diff --git a/lib/PyWin32.chm b/lib/PyWin32.chm deleted file mode 100644 index 7606f82b..00000000 Binary files a/lib/PyWin32.chm and /dev/null differ diff --git a/lib/adodbapi/__init__.py b/lib/adodbapi/__init__.py deleted file mode 100644 index 0d769e05..00000000 --- a/lib/adodbapi/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole -* http://sourceforge.net/projects/adodbapi -""" -import sys -import time - -from .adodbapi import Connection, Cursor, __version__, connect, dateconverter -from .apibase import ( - BINARY, - DATETIME, - NUMBER, - ROWID, - STRING, - DatabaseError, - DataError, - Error, - FetchFailedError, - IntegrityError, - InterfaceError, - InternalError, - NotSupportedError, - OperationalError, - ProgrammingError, - Warning, - apilevel, - paramstyle, - threadsafety, -) - - -def Binary(aString): - """This function constructs an object capable of holding a binary (long) string value.""" - return bytes(aString) - - -def Date(year, month, day): - "This function constructs an object holding a date value." - return dateconverter.Date(year, month, day) - - -def Time(hour, minute, second): - "This function constructs an object holding a time value." - return dateconverter.Time(hour, minute, second) - - -def Timestamp(year, month, day, hour, minute, second): - "This function constructs an object holding a time stamp value." - return dateconverter.Timestamp(year, month, day, hour, minute, second) - - -def DateFromTicks(ticks): - """This function constructs an object holding a date value from the given ticks value - (number of seconds since the epoch; see the documentation of the standard Python time module for details). - """ - return Date(*time.gmtime(ticks)[:3]) - - -def TimeFromTicks(ticks): - """This function constructs an object holding a time value from the given ticks value - (number of seconds since the epoch; see the documentation of the standard Python time module for details). - """ - return Time(*time.gmtime(ticks)[3:6]) - - -def TimestampFromTicks(ticks): - """This function constructs an object holding a time stamp value from the given - ticks value (number of seconds since the epoch; - see the documentation of the standard Python time module for details).""" - return Timestamp(*time.gmtime(ticks)[:6]) - - -version = "adodbapi v" + __version__ diff --git a/lib/adodbapi/ado_consts.py b/lib/adodbapi/ado_consts.py deleted file mode 100644 index ecb2147d..00000000 --- a/lib/adodbapi/ado_consts.py +++ /dev/null @@ -1,281 +0,0 @@ -# ADO enumerated constants documented on MSDN: -# http://msdn.microsoft.com/en-us/library/ms678353(VS.85).aspx - -# IsolationLevelEnum -adXactUnspecified = -1 -adXactBrowse = 0x100 -adXactChaos = 0x10 -adXactCursorStability = 0x1000 -adXactIsolated = 0x100000 -adXactReadCommitted = 0x1000 -adXactReadUncommitted = 0x100 -adXactRepeatableRead = 0x10000 -adXactSerializable = 0x100000 - -# CursorLocationEnum -adUseClient = 3 -adUseServer = 2 - -# CursorTypeEnum -adOpenDynamic = 2 -adOpenForwardOnly = 0 -adOpenKeyset = 1 -adOpenStatic = 3 -adOpenUnspecified = -1 - -# CommandTypeEnum -adCmdText = 1 -adCmdStoredProc = 4 -adSchemaTables = 20 - -# ParameterDirectionEnum -adParamInput = 1 -adParamInputOutput = 3 -adParamOutput = 2 -adParamReturnValue = 4 -adParamUnknown = 0 -directions = { - 0: "Unknown", - 1: "Input", - 2: "Output", - 3: "InputOutput", - 4: "Return", -} - - -def ado_direction_name(ado_dir): - try: - return "adParam" + directions[ado_dir] - except: - return "unknown direction (" + str(ado_dir) + ")" - - -# ObjectStateEnum -adStateClosed = 0 -adStateOpen = 1 -adStateConnecting = 2 -adStateExecuting = 4 -adStateFetching = 8 - -# FieldAttributeEnum -adFldMayBeNull = 0x40 - -# ConnectModeEnum -adModeUnknown = 0 -adModeRead = 1 -adModeWrite = 2 -adModeReadWrite = 3 -adModeShareDenyRead = 4 -adModeShareDenyWrite = 8 -adModeShareExclusive = 12 -adModeShareDenyNone = 16 -adModeRecursive = 0x400000 - -# XactAttributeEnum -adXactCommitRetaining = 131072 -adXactAbortRetaining = 262144 - -ado_error_TIMEOUT = -2147217871 - -# DataTypeEnum - ADO Data types documented at: -# http://msdn2.microsoft.com/en-us/library/ms675318.aspx -adArray = 0x2000 -adEmpty = 0x0 -adBSTR = 0x8 -adBigInt = 0x14 -adBinary = 0x80 -adBoolean = 0xB -adChapter = 0x88 -adChar = 0x81 -adCurrency = 0x6 -adDBDate = 0x85 -adDBTime = 0x86 -adDBTimeStamp = 0x87 -adDate = 0x7 -adDecimal = 0xE -adDouble = 0x5 -adError = 0xA -adFileTime = 0x40 -adGUID = 0x48 -adIDispatch = 0x9 -adIUnknown = 0xD -adInteger = 0x3 -adLongVarBinary = 0xCD -adLongVarChar = 0xC9 -adLongVarWChar = 0xCB -adNumeric = 0x83 -adPropVariant = 0x8A -adSingle = 0x4 -adSmallInt = 0x2 -adTinyInt = 0x10 -adUnsignedBigInt = 0x15 -adUnsignedInt = 0x13 -adUnsignedSmallInt = 0x12 -adUnsignedTinyInt = 0x11 -adUserDefined = 0x84 -adVarBinary = 0xCC -adVarChar = 0xC8 -adVarNumeric = 0x8B -adVarWChar = 0xCA -adVariant = 0xC -adWChar = 0x82 -# Additional constants used by introspection but not ADO itself -AUTO_FIELD_MARKER = -1000 - -adTypeNames = { - adBSTR: "adBSTR", - adBigInt: "adBigInt", - adBinary: "adBinary", - adBoolean: "adBoolean", - adChapter: "adChapter", - adChar: "adChar", - adCurrency: "adCurrency", - adDBDate: "adDBDate", - adDBTime: "adDBTime", - adDBTimeStamp: "adDBTimeStamp", - adDate: "adDate", - adDecimal: "adDecimal", - adDouble: "adDouble", - adEmpty: "adEmpty", - adError: "adError", - adFileTime: "adFileTime", - adGUID: "adGUID", - adIDispatch: "adIDispatch", - adIUnknown: "adIUnknown", - adInteger: "adInteger", - adLongVarBinary: "adLongVarBinary", - adLongVarChar: "adLongVarChar", - adLongVarWChar: "adLongVarWChar", - adNumeric: "adNumeric", - adPropVariant: "adPropVariant", - adSingle: "adSingle", - adSmallInt: "adSmallInt", - adTinyInt: "adTinyInt", - adUnsignedBigInt: "adUnsignedBigInt", - adUnsignedInt: "adUnsignedInt", - adUnsignedSmallInt: "adUnsignedSmallInt", - adUnsignedTinyInt: "adUnsignedTinyInt", - adUserDefined: "adUserDefined", - adVarBinary: "adVarBinary", - adVarChar: "adVarChar", - adVarNumeric: "adVarNumeric", - adVarWChar: "adVarWChar", - adVariant: "adVariant", - adWChar: "adWChar", -} - - -def ado_type_name(ado_type): - return adTypeNames.get(ado_type, "unknown type (" + str(ado_type) + ")") - - -# here in decimal, sorted by value -# adEmpty 0 Specifies no value (DBTYPE_EMPTY). -# adSmallInt 2 Indicates a two-byte signed integer (DBTYPE_I2). -# adInteger 3 Indicates a four-byte signed integer (DBTYPE_I4). -# adSingle 4 Indicates a single-precision floating-point value (DBTYPE_R4). -# adDouble 5 Indicates a double-precision floating-point value (DBTYPE_R8). -# adCurrency 6 Indicates a currency value (DBTYPE_CY). Currency is a fixed-point number -# with four digits to the right of the decimal point. It is stored in an eight-byte signed integer scaled by 10,000. -# adDate 7 Indicates a date value (DBTYPE_DATE). A date is stored as a double, the whole part of which is -# the number of days since December 30, 1899, and the fractional part of which is the fraction of a day. -# adBSTR 8 Indicates a null-terminated character string (Unicode) (DBTYPE_BSTR). -# adIDispatch 9 Indicates a pointer to an IDispatch interface on a COM object (DBTYPE_IDISPATCH). -# adError 10 Indicates a 32-bit error code (DBTYPE_ERROR). -# adBoolean 11 Indicates a boolean value (DBTYPE_BOOL). -# adVariant 12 Indicates an Automation Variant (DBTYPE_VARIANT). -# adIUnknown 13 Indicates a pointer to an IUnknown interface on a COM object (DBTYPE_IUNKNOWN). -# adDecimal 14 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_DECIMAL). -# adTinyInt 16 Indicates a one-byte signed integer (DBTYPE_I1). -# adUnsignedTinyInt 17 Indicates a one-byte unsigned integer (DBTYPE_UI1). -# adUnsignedSmallInt 18 Indicates a two-byte unsigned integer (DBTYPE_UI2). -# adUnsignedInt 19 Indicates a four-byte unsigned integer (DBTYPE_UI4). -# adBigInt 20 Indicates an eight-byte signed integer (DBTYPE_I8). -# adUnsignedBigInt 21 Indicates an eight-byte unsigned integer (DBTYPE_UI8). -# adFileTime 64 Indicates a 64-bit value representing the number of 100-nanosecond intervals since -# January 1, 1601 (DBTYPE_FILETIME). -# adGUID 72 Indicates a globally unique identifier (GUID) (DBTYPE_GUID). -# adBinary 128 Indicates a binary value (DBTYPE_BYTES). -# adChar 129 Indicates a string value (DBTYPE_STR). -# adWChar 130 Indicates a null-terminated Unicode character string (DBTYPE_WSTR). -# adNumeric 131 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_NUMERIC). -# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT). -# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT). -# adDBDate 133 Indicates a date value (yyyymmdd) (DBTYPE_DBDATE). -# adDBTime 134 Indicates a time value (hhmmss) (DBTYPE_DBTIME). -# adDBTimeStamp 135 Indicates a date/time stamp (yyyymmddhhmmss plus a fraction in billionths) (DBTYPE_DBTIMESTAMP). -# adChapter 136 Indicates a four-byte chapter value that identifies rows in a child rowset (DBTYPE_HCHAPTER). -# adPropVariant 138 Indicates an Automation PROPVARIANT (DBTYPE_PROP_VARIANT). -# adVarNumeric 139 Indicates a numeric value (Parameter object only). -# adVarChar 200 Indicates a string value (Parameter object only). -# adLongVarChar 201 Indicates a long string value (Parameter object only). -# adVarWChar 202 Indicates a null-terminated Unicode character string (Parameter object only). -# adLongVarWChar 203 Indicates a long null-terminated Unicode string value (Parameter object only). -# adVarBinary 204 Indicates a binary value (Parameter object only). -# adLongVarBinary 205 Indicates a long binary value (Parameter object only). -# adArray (Does not apply to ADOX.) 0x2000 A flag value, always combined with another data type constant, -# that indicates an array of that other data type. - -# Error codes to names -adoErrors = { - 0xE7B: "adErrBoundToCommand", - 0xE94: "adErrCannotComplete", - 0xEA4: "adErrCantChangeConnection", - 0xC94: "adErrCantChangeProvider", - 0xE8C: "adErrCantConvertvalue", - 0xE8D: "adErrCantCreate", - 0xEA3: "adErrCatalogNotSet", - 0xE8E: "adErrColumnNotOnThisRow", - 0xD5D: "adErrDataConversion", - 0xE89: "adErrDataOverflow", - 0xE9A: "adErrDelResOutOfScope", - 0xEA6: "adErrDenyNotSupported", - 0xEA7: "adErrDenyTypeNotSupported", - 0xCB3: "adErrFeatureNotAvailable", - 0xEA5: "adErrFieldsUpdateFailed", - 0xC93: "adErrIllegalOperation", - 0xCAE: "adErrInTransaction", - 0xE87: "adErrIntegrityViolation", - 0xBB9: "adErrInvalidArgument", - 0xE7D: "adErrInvalidConnection", - 0xE7C: "adErrInvalidParamInfo", - 0xE82: "adErrInvalidTransaction", - 0xE91: "adErrInvalidURL", - 0xCC1: "adErrItemNotFound", - 0xBCD: "adErrNoCurrentRecord", - 0xE83: "adErrNotExecuting", - 0xE7E: "adErrNotReentrant", - 0xE78: "adErrObjectClosed", - 0xD27: "adErrObjectInCollection", - 0xD5C: "adErrObjectNotSet", - 0xE79: "adErrObjectOpen", - 0xBBA: "adErrOpeningFile", - 0xE80: "adErrOperationCancelled", - 0xE96: "adErrOutOfSpace", - 0xE88: "adErrPermissionDenied", - 0xE9E: "adErrPropConflicting", - 0xE9B: "adErrPropInvalidColumn", - 0xE9C: "adErrPropInvalidOption", - 0xE9D: "adErrPropInvalidValue", - 0xE9F: "adErrPropNotAllSettable", - 0xEA0: "adErrPropNotSet", - 0xEA1: "adErrPropNotSettable", - 0xEA2: "adErrPropNotSupported", - 0xBB8: "adErrProviderFailed", - 0xE7A: "adErrProviderNotFound", - 0xBBB: "adErrReadFile", - 0xE93: "adErrResourceExists", - 0xE92: "adErrResourceLocked", - 0xE97: "adErrResourceOutOfScope", - 0xE8A: "adErrSchemaViolation", - 0xE8B: "adErrSignMismatch", - 0xE81: "adErrStillConnecting", - 0xE7F: "adErrStillExecuting", - 0xE90: "adErrTreePermissionDenied", - 0xE8F: "adErrURLDoesNotExist", - 0xE99: "adErrURLNamedRowDoesNotExist", - 0xE98: "adErrUnavailable", - 0xE84: "adErrUnsafeOperation", - 0xE95: "adErrVolumeNotFound", - 0xBBC: "adErrWriteFile", -} diff --git a/lib/adodbapi/adodbapi.py b/lib/adodbapi/adodbapi.py deleted file mode 100644 index 8f7c045e..00000000 --- a/lib/adodbapi/adodbapi.py +++ /dev/null @@ -1,1223 +0,0 @@ -"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, versions 2.1 and later by Vernon Cole -* http://sourceforge.net/projects/pywin32 -* https://github.com/mhammond/pywin32 -* http://sourceforge.net/projects/adodbapi - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - django adaptations and refactoring by Adam Vandenberg - -DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/ - -This module source should run correctly in CPython versions 2.7 and later, -or IronPython version 2.7 and later, -or, after running through 2to3.py, CPython 3.4 or later. -""" - -__version__ = "2.6.2.0" -version = "adodbapi v" + __version__ - -import copy -import decimal -import os -import sys -import weakref - -from . import ado_consts as adc, apibase as api, process_connect_string - -try: - verbose = int(os.environ["ADODBAPI_VERBOSE"]) -except: - verbose = False -if verbose: - print(version) - -# --- define objects to smooth out IronPython <-> CPython differences -onWin32 = False # assume the worst -if api.onIronPython: - from clr import Reference - from System import ( - Activator, - Array, - Byte, - DateTime, - DBNull, - Decimal as SystemDecimal, - Type, - ) - - def Dispatch(dispatch): - type = Type.GetTypeFromProgID(dispatch) - return Activator.CreateInstance(type) - - def getIndexedValue(obj, index): - return obj.Item[index] - -else: # try pywin32 - try: - import pythoncom - import pywintypes - import win32com.client - - onWin32 = True - - def Dispatch(dispatch): - return win32com.client.Dispatch(dispatch) - - except ImportError: - import warnings - - warnings.warn( - "pywin32 package (or IronPython) required for adodbapi.", ImportWarning - ) - - def getIndexedValue(obj, index): - return obj(index) - - -from collections.abc import Mapping - -# --- define objects to smooth out Python3000 <-> Python 2.x differences -unicodeType = str -longType = int -StringTypes = str -maxint = sys.maxsize - - -# ----------------- The .connect method ----------------- -def make_COM_connecter(): - try: - if onWin32: - pythoncom.CoInitialize() # v2.1 Paj - c = Dispatch("ADODB.Connection") # connect _after_ CoIninialize v2.1.1 adamvan - except: - raise api.InterfaceError( - "Windows COM Error: Dispatch('ADODB.Connection') failed." - ) - return c - - -def connect(*args, **kwargs): # --> a db-api connection object - """Connect to a database. - - call using: - :connection_string -- An ADODB formatted connection string, see: - * http://www.connectionstrings.com - * http://www.asp101.com/articles/john/connstring/default.asp - :timeout -- A command timeout value, in seconds (default 30 seconds) - """ - co = Connection() # make an empty connection object - - kwargs = process_connect_string.process(args, kwargs, True) - - try: # connect to the database, using the connection information in kwargs - co.connect(kwargs) - return co - except Exception as e: - message = 'Error opening connection to "%s"' % co.connection_string - raise api.OperationalError(e, message) - - -# so you could use something like: -# myConnection.paramstyle = 'named' -# The programmer may also change the default. -# For example, if I were using django, I would say: -# import adodbapi as Database -# Database.adodbapi.paramstyle = 'format' - -# ------- other module level defaults -------- -defaultIsolationLevel = adc.adXactReadCommitted -# Set defaultIsolationLevel on module level before creating the connection. -# For example: -# import adodbapi, ado_consts -# adodbapi.adodbapi.defaultIsolationLevel=ado_consts.adXactBrowse" -# -# Set defaultCursorLocation on module level before creating the connection. -# It may be one of the "adUse..." consts. -defaultCursorLocation = adc.adUseClient # changed from adUseServer as of v 2.3.0 - -dateconverter = api.pythonDateTimeConverter() # default - - -def format_parameters(ADOparameters, show_value=False): - """Format a collection of ADO Command Parameters. - - Used by error reporting in _execute_command. - """ - try: - if show_value: - desc = [ - 'Name: %s, Dir.: %s, Type: %s, Size: %s, Value: "%s", Precision: %s, NumericScale: %s' - % ( - p.Name, - adc.directions[p.Direction], - adc.adTypeNames.get(p.Type, str(p.Type) + " (unknown type)"), - p.Size, - p.Value, - p.Precision, - p.NumericScale, - ) - for p in ADOparameters - ] - else: - desc = [ - "Name: %s, Dir.: %s, Type: %s, Size: %s, Precision: %s, NumericScale: %s" - % ( - p.Name, - adc.directions[p.Direction], - adc.adTypeNames.get(p.Type, str(p.Type) + " (unknown type)"), - p.Size, - p.Precision, - p.NumericScale, - ) - for p in ADOparameters - ] - return "[" + "\n".join(desc) + "]" - except: - return "[]" - - -def _configure_parameter(p, value, adotype, settings_known): - """Configure the given ADO Parameter 'p' with the Python 'value'.""" - - if adotype in api.adoBinaryTypes: - p.Size = len(value) - p.AppendChunk(value) - - elif isinstance(value, StringTypes): # v2.1 Jevon - L = len(value) - if adotype in api.adoStringTypes: # v2.2.1 Cole - if settings_known: - L = min(L, p.Size) # v2.1 Cole limit data to defined size - p.Value = value[:L] # v2.1 Jevon & v2.1 Cole - else: - p.Value = value # dont limit if db column is numeric - if L > 0: # v2.1 Cole something does not like p.Size as Zero - p.Size = L # v2.1 Jevon - - elif isinstance(value, decimal.Decimal): - if api.onIronPython: - s = str(value) - p.Value = s - p.Size = len(s) - else: - p.Value = value - exponent = value.as_tuple()[2] - digit_count = len(value.as_tuple()[1]) - p.Precision = digit_count - if exponent == 0: - p.NumericScale = 0 - elif exponent < 0: - p.NumericScale = -exponent - if p.Precision < p.NumericScale: - p.Precision = p.NumericScale - else: # exponent > 0: - p.NumericScale = 0 - p.Precision = digit_count + exponent - - elif type(value) in dateconverter.types: - if settings_known and adotype in api.adoDateTimeTypes: - p.Value = dateconverter.COMDate(value) - else: # probably a string - # provide the date as a string in the format 'YYYY-MM-dd' - s = dateconverter.DateObjectToIsoFormatString(value) - p.Value = s - p.Size = len(s) - - elif api.onIronPython and isinstance(value, longType): # Iron Python Long - s = str(value) # feature workaround for IPy 2.0 - p.Value = s - - elif adotype == adc.adEmpty: # ADO will not let you specify a null column - p.Type = ( - adc.adInteger - ) # so we will fake it to be an integer (just to have something) - p.Value = None # and pass in a Null *value* - - # For any other type, set the value and let pythoncom do the right thing. - else: - p.Value = value - - -# # # # # ----- the Class that defines a connection ----- # # # # # -class Connection(object): - # include connection attributes as class attributes required by api definition. - Warning = api.Warning - Error = api.Error - InterfaceError = api.InterfaceError - DataError = api.DataError - DatabaseError = api.DatabaseError - OperationalError = api.OperationalError - IntegrityError = api.IntegrityError - InternalError = api.InternalError - NotSupportedError = api.NotSupportedError - ProgrammingError = api.ProgrammingError - FetchFailedError = api.FetchFailedError # (special for django) - # ...class attributes... (can be overridden by instance attributes) - verbose = api.verbose - - @property - def dbapi(self): # a proposed db-api version 3 extension. - "Return a reference to the DBAPI module for this Connection." - return api - - def __init__(self): # now define the instance attributes - self.connector = None - self.paramstyle = api.paramstyle - self.supportsTransactions = False - self.connection_string = "" - self.cursors = weakref.WeakValueDictionary() - self.dbms_name = "" - self.dbms_version = "" - self.errorhandler = None # use the standard error handler for this instance - self.transaction_level = 0 # 0 == Not in a transaction, at the top level - self._autocommit = False - - def connect(self, kwargs, connection_maker=make_COM_connecter): - if verbose > 9: - print("kwargs=", repr(kwargs)) - try: - self.connection_string = ( - kwargs["connection_string"] % kwargs - ) # insert keyword arguments - except Exception as e: - self._raiseConnectionError( - KeyError, "Python string format error in connection string->" - ) - self.timeout = kwargs.get("timeout", 30) - self.mode = kwargs.get("mode", adc.adModeUnknown) - self.kwargs = kwargs - if verbose: - print('%s attempting: "%s"' % (version, self.connection_string)) - self.connector = connection_maker() - self.connector.ConnectionTimeout = self.timeout - self.connector.ConnectionString = self.connection_string - self.connector.Mode = self.mode - - try: - self.connector.Open() # Open the ADO connection - except api.Error: - self._raiseConnectionError( - api.DatabaseError, - "ADO error trying to Open=%s" % self.connection_string, - ) - - try: # Stefan Fuchs; support WINCCOLEDBProvider - if getIndexedValue(self.connector.Properties, "Transaction DDL").Value != 0: - self.supportsTransactions = True - except pywintypes.com_error: - pass # Stefan Fuchs - self.dbms_name = getIndexedValue(self.connector.Properties, "DBMS Name").Value - try: # Stefan Fuchs - self.dbms_version = getIndexedValue( - self.connector.Properties, "DBMS Version" - ).Value - except pywintypes.com_error: - pass # Stefan Fuchs - self.connector.CursorLocation = defaultCursorLocation # v2.1 Rose - if self.supportsTransactions: - self.connector.IsolationLevel = defaultIsolationLevel - self._autocommit = bool(kwargs.get("autocommit", False)) - if not self._autocommit: - self.transaction_level = ( - self.connector.BeginTrans() - ) # Disables autocommit & inits transaction_level - else: - self._autocommit = True - if "paramstyle" in kwargs: - self.paramstyle = kwargs["paramstyle"] # let setattr do the error checking - self.messages = [] - if verbose: - print("adodbapi New connection at %X" % id(self)) - - def _raiseConnectionError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self, None, errorclass, errorvalue) - - def _closeAdoConnection(self): # all v2.1 Rose - """close the underlying ADO Connection object, - rolling it back first if it supports transactions.""" - if self.connector is None: - return - if not self._autocommit: - if self.transaction_level: - try: - self.connector.RollbackTrans() - except: - pass - self.connector.Close() - if verbose: - print("adodbapi Closed connection at %X" % id(self)) - - def close(self): - """Close the connection now (rather than whenever __del__ is called). - - The connection will be unusable from this point forward; - an Error (or subclass) exception will be raised if any operation is attempted with the connection. - The same applies to all cursor objects trying to use the connection. - """ - for crsr in list(self.cursors.values())[ - : - ]: # copy the list, then close each one - crsr.close(dont_tell_me=True) # close without back-link clearing - self.messages = [] - try: - self._closeAdoConnection() # v2.1 Rose - except Exception as e: - self._raiseConnectionError(sys.exc_info()[0], sys.exc_info()[1]) - - self.connector = None # v2.4.2.2 fix subtle timeout bug - # per M.Hammond: "I expect the benefits of uninitializing are probably fairly small, - # so never uninitializing will probably not cause any problems." - - def commit(self): - """Commit any pending transaction to the database. - - Note that if the database supports an auto-commit feature, - this must be initially off. An interface method may be provided to turn it back on. - Database modules that do not support transactions should implement this method with void functionality. - """ - self.messages = [] - if not self.supportsTransactions: - return - - try: - self.transaction_level = self.connector.CommitTrans() - if verbose > 1: - print("commit done on connection at %X" % id(self)) - if not ( - self._autocommit - or (self.connector.Attributes & adc.adXactAbortRetaining) - ): - # If attributes has adXactCommitRetaining it performs retaining commits that is, - # calling CommitTrans automatically starts a new transaction. Not all providers support this. - # If not, we will have to start a new transaction by this command: - self.transaction_level = self.connector.BeginTrans() - except Exception as e: - self._raiseConnectionError(api.ProgrammingError, e) - - def _rollback(self): - """In case a database does provide transactions this method causes the the database to roll back to - the start of any pending transaction. Closing a connection without committing the changes first will - cause an implicit rollback to be performed. - - If the database does not support the functionality required by the method, the interface should - throw an exception in case the method is used. - The preferred approach is to not implement the method and thus have Python generate - an AttributeError in case the method is requested. This allows the programmer to check for database - capabilities using the standard hasattr() function. - - For some dynamically configured interfaces it may not be appropriate to require dynamically making - the method available. These interfaces should then raise a NotSupportedError to indicate the - non-ability to perform the roll back when the method is invoked. - """ - self.messages = [] - if ( - self.transaction_level - ): # trying to roll back with no open transaction causes an error - try: - self.transaction_level = self.connector.RollbackTrans() - if verbose > 1: - print("rollback done on connection at %X" % id(self)) - if not self._autocommit and not ( - self.connector.Attributes & adc.adXactAbortRetaining - ): - # If attributes has adXactAbortRetaining it performs retaining aborts that is, - # calling RollbackTrans automatically starts a new transaction. Not all providers support this. - # If not, we will have to start a new transaction by this command: - if ( - not self.transaction_level - ): # if self.transaction_level == 0 or self.transaction_level is None: - self.transaction_level = self.connector.BeginTrans() - except Exception as e: - self._raiseConnectionError(api.ProgrammingError, e) - - def __setattr__(self, name, value): - if name == "autocommit": # extension: allow user to turn autocommit on or off - if self.supportsTransactions: - object.__setattr__(self, "_autocommit", bool(value)) - try: - self._rollback() # must clear any outstanding transactions - except: - pass - return - elif name == "paramstyle": - if value not in api.accepted_paramstyles: - self._raiseConnectionError( - api.NotSupportedError, - 'paramstyle="%s" not in:%s' - % (value, repr(api.accepted_paramstyles)), - ) - elif name == "variantConversions": - value = copy.copy( - value - ) # make a new copy -- no changes in the default, please - object.__setattr__(self, name, value) - - def __getattr__(self, item): - if ( - item == "rollback" - ): # the rollback method only appears if the database supports transactions - if self.supportsTransactions: - return ( - self._rollback - ) # return the rollback method so the caller can execute it. - else: - raise AttributeError("this data provider does not support Rollback") - elif item == "autocommit": - return self._autocommit - else: - raise AttributeError( - 'no such attribute in ADO connection object as="%s"' % item - ) - - def cursor(self): - "Return a new Cursor Object using the connection." - self.messages = [] - c = Cursor(self) - return c - - def _i_am_here(self, crsr): - "message from a new cursor proclaiming its existence" - oid = id(crsr) - self.cursors[oid] = crsr - - def _i_am_closing(self, crsr): - "message from a cursor giving connection a chance to clean up" - try: - del self.cursors[id(crsr)] - except: - pass - - def printADOerrors(self): - j = self.connector.Errors.Count - if j: - print("ADO Errors:(%i)" % j) - for e in self.connector.Errors: - print("Description: %s" % e.Description) - print("Error: %s %s " % (e.Number, adc.adoErrors.get(e.Number, "unknown"))) - if e.Number == adc.ado_error_TIMEOUT: - print( - "Timeout Error: Try using adodbpi.connect(constr,timeout=Nseconds)" - ) - print("Source: %s" % e.Source) - print("NativeError: %s" % e.NativeError) - print("SQL State: %s" % e.SQLState) - - def _suggest_error_class(self): - """Introspect the current ADO Errors and determine an appropriate error class. - - Error.SQLState is a SQL-defined error condition, per the SQL specification: - http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt - - The 23000 class of errors are integrity errors. - Error 40002 is a transactional integrity error. - """ - if self.connector is not None: - for e in self.connector.Errors: - state = str(e.SQLState) - if state.startswith("23") or state == "40002": - return api.IntegrityError - return api.DatabaseError - - def __del__(self): - try: - self._closeAdoConnection() # v2.1 Rose - except: - pass - self.connector = None - - def __enter__(self): # Connections are context managers - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type: - self._rollback() # automatic rollback on errors - else: - self.commit() - - def get_table_names(self): - schema = self.connector.OpenSchema(20) # constant = adSchemaTables - - tables = [] - while not schema.EOF: - name = getIndexedValue(schema.Fields, "TABLE_NAME").Value - tables.append(name) - schema.MoveNext() - del schema - return tables - - -# # # # # ----- the Class that defines a cursor ----- # # # # # -class Cursor(object): - ## ** api required attributes: - ## description... - ## This read-only attribute is a sequence of 7-item sequences. - ## Each of these sequences contains information describing one result column: - ## (name, type_code, display_size, internal_size, precision, scale, null_ok). - ## This attribute will be None for operations that do not return rows or if the - ## cursor has not had an operation invoked via the executeXXX() method yet. - ## The type_code can be interpreted by comparing it to the Type Objects specified in the section below. - ## rowcount... - ## This read-only attribute specifies the number of rows that the last executeXXX() produced - ## (for DQL statements like select) or affected (for DML statements like update or insert). - ## The attribute is -1 in case no executeXXX() has been performed on the cursor or - ## the rowcount of the last operation is not determinable by the interface.[7] - ## arraysize... - ## This read/write attribute specifies the number of rows to fetch at a time with fetchmany(). - ## It defaults to 1 meaning to fetch a single row at a time. - ## Implementations must observe this value with respect to the fetchmany() method, - ## but are free to interact with the database a single row at a time. - ## It may also be used in the implementation of executemany(). - ## ** extension attributes: - ## paramstyle... - ## allows the programmer to override the connection's default paramstyle - ## errorhandler... - ## allows the programmer to override the connection's default error handler - - def __init__(self, connection): - self.command = None - self._ado_prepared = False - self.messages = [] - self.connection = connection - self.paramstyle = connection.paramstyle # used for overriding the paramstyle - self._parameter_names = [] - self.recordset_is_remote = False - self.rs = None # the ADO recordset for this cursor - self.converters = [] # conversion function for each column - self.columnNames = {} # names of columns {lowercase name : number,...} - self.numberOfColumns = 0 - self._description = None - self.rowcount = -1 - self.errorhandler = connection.errorhandler - self.arraysize = 1 - connection._i_am_here(self) - if verbose: - print( - "%s New cursor at %X on conn %X" - % (version, id(self), id(self.connection)) - ) - - def __iter__(self): # [2.1 Zamarev] - return iter(self.fetchone, None) # [2.1 Zamarev] - - def prepare(self, operation): - self.command = operation - self._description = None - self._ado_prepared = "setup" - - def __next__(self): - r = self.fetchone() - if r: - return r - raise StopIteration - - def __enter__(self): - "Allow database cursors to be used with context managers." - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - "Allow database cursors to be used with context managers." - self.close() - - def _raiseCursorError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self.connection, self, errorclass, errorvalue) - - def build_column_info(self, recordset): - self.converters = [] # convertion function for each column - self.columnNames = {} # names of columns {lowercase name : number,...} - self._description = None - - # if EOF and BOF are true at the same time, there are no records in the recordset - if (recordset is None) or (recordset.State == adc.adStateClosed): - self.rs = None - self.numberOfColumns = 0 - return - self.rs = recordset # v2.1.1 bkline - self.recordset_format = api.RS_ARRAY if api.onIronPython else api.RS_WIN_32 - self.numberOfColumns = recordset.Fields.Count - try: - varCon = self.connection.variantConversions - except AttributeError: - varCon = api.variantConversions - for i in range(self.numberOfColumns): - f = getIndexedValue(self.rs.Fields, i) - try: - self.converters.append( - varCon[f.Type] - ) # conversion function for this column - except KeyError: - self._raiseCursorError( - api.InternalError, "Data column of Unknown ADO type=%s" % f.Type - ) - self.columnNames[f.Name.lower()] = i # columnNames lookup - - def _makeDescriptionFromRS(self): - # Abort if closed or no recordset. - if self.rs is None: - self._description = None - return - desc = [] - for i in range(self.numberOfColumns): - f = getIndexedValue(self.rs.Fields, i) - if self.rs.EOF or self.rs.BOF: - display_size = None - else: - display_size = ( - f.ActualSize - ) # TODO: Is this the correct defintion according to the DB API 2 Spec ? - null_ok = bool(f.Attributes & adc.adFldMayBeNull) # v2.1 Cole - desc.append( - ( - f.Name, - f.Type, - display_size, - f.DefinedSize, - f.Precision, - f.NumericScale, - null_ok, - ) - ) - self._description = desc - - def get_description(self): - if not self._description: - self._makeDescriptionFromRS() - return self._description - - def __getattr__(self, item): - if item == "description": - return self.get_description() - object.__getattribute__( - self, item - ) # may get here on Remote attribute calls for existing attributes - - def format_description(self, d): - """Format db_api description tuple for printing.""" - if self.description is None: - self._makeDescriptionFromRS() - if isinstance(d, int): - d = self.description[d] - desc = ( - "Name= %s, Type= %s, DispSize= %s, IntSize= %s, Precision= %s, Scale= %s NullOK=%s" - % ( - d[0], - adc.adTypeNames.get(d[1], str(d[1]) + " (unknown type)"), - d[2], - d[3], - d[4], - d[5], - d[6], - ) - ) - return desc - - def close(self, dont_tell_me=False): - """Close the cursor now (rather than whenever __del__ is called). - The cursor will be unusable from this point forward; an Error (or subclass) - exception will be raised if any operation is attempted with the cursor. - """ - if self.connection is None: - return - self.messages = [] - if ( - self.rs and self.rs.State != adc.adStateClosed - ): # rs exists and is open #v2.1 Rose - self.rs.Close() # v2.1 Rose - self.rs = None # let go of the recordset so ADO will let it be disposed #v2.1 Rose - if not dont_tell_me: - self.connection._i_am_closing( - self - ) # take me off the connection's cursors list - self.connection = ( - None # this will make all future method calls on me throw an exception - ) - if verbose: - print("adodbapi Closed cursor at %X" % id(self)) - - def __del__(self): - try: - self.close() - except: - pass - - def _new_command(self, command_type=adc.adCmdText): - self.cmd = None - self.messages = [] - - if self.connection is None: - self._raiseCursorError(api.InterfaceError, None) - return - try: - self.cmd = Dispatch("ADODB.Command") - self.cmd.ActiveConnection = self.connection.connector - self.cmd.CommandTimeout = self.connection.timeout - self.cmd.CommandType = command_type - self.cmd.CommandText = self.commandText - self.cmd.Prepared = bool(self._ado_prepared) - except: - self._raiseCursorError( - api.DatabaseError, - 'Error creating new ADODB.Command object for "%s"' - % repr(self.commandText), - ) - - def _execute_command(self): - # Stored procedures may have an integer return value - self.return_value = None - recordset = None - count = -1 # default value - if verbose: - print('Executing command="%s"' % self.commandText) - try: - # ----- the actual SQL is executed here --- - if api.onIronPython: - ra = Reference[int]() - recordset = self.cmd.Execute(ra) - count = ra.Value - else: # pywin32 - recordset, count = self.cmd.Execute() - # ----- ------------------------------- --- - except Exception as e: - _message = "" - if hasattr(e, "args"): - _message += str(e.args) + "\n" - _message += "Command:\n%s\nParameters:\n%s" % ( - self.commandText, - format_parameters(self.cmd.Parameters, True), - ) - klass = self.connection._suggest_error_class() - self._raiseCursorError(klass, _message) - try: - self.rowcount = recordset.RecordCount - except: - self.rowcount = count - self.build_column_info(recordset) - - # The ADO documentation hints that obtaining the recordcount may be timeconsuming - # "If the Recordset object does not support approximate positioning, this property - # may be a significant drain on resources # [ekelund] - # Therefore, COM will not return rowcount for server-side cursors. [Cole] - # Client-side cursors (the default since v2.8) will force a static - # cursor, and rowcount will then be set accurately [Cole] - - def get_rowcount(self): - return self.rowcount - - def get_returned_parameters(self): - """with some providers, returned parameters and the .return_value are not available until - after the last recordset has been read. In that case, you must coll nextset() until it - returns None, then call this method to get your returned information.""" - - retLst = ( - [] - ) # store procedures may return altered parameters, including an added "return value" item - for p in tuple(self.cmd.Parameters): - if verbose > 2: - print( - 'Returned=Name: %s, Dir.: %s, Type: %s, Size: %s, Value: "%s",' - " Precision: %s, NumericScale: %s" - % ( - p.Name, - adc.directions[p.Direction], - adc.adTypeNames.get(p.Type, str(p.Type) + " (unknown type)"), - p.Size, - p.Value, - p.Precision, - p.NumericScale, - ) - ) - pyObject = api.convert_to_python(p.Value, api.variantConversions[p.Type]) - if p.Direction == adc.adParamReturnValue: - self.returnValue = ( - pyObject # also load the undocumented attribute (Vernon's Error!) - ) - self.return_value = pyObject - else: - retLst.append(pyObject) - return retLst # return the parameter list to the caller - - def callproc(self, procname, parameters=None): - """Call a stored database procedure with the given name. - The sequence of parameters must contain one entry for each - argument that the sproc expects. The result of the - call is returned as modified copy of the input - sequence. Input parameters are left untouched, output and - input/output parameters replaced with possibly new values. - - The sproc may also provide a result set as output, - which is available through the standard .fetch*() methods. - Extension: A "return_value" property may be set on the - cursor if the sproc defines an integer return value. - """ - self._parameter_names = [] - self.commandText = procname - self._new_command(command_type=adc.adCmdStoredProc) - self._buildADOparameterList(parameters, sproc=True) - if verbose > 2: - print( - "Calling Stored Proc with Params=", - format_parameters(self.cmd.Parameters, True), - ) - self._execute_command() - return self.get_returned_parameters() - - def _reformat_operation(self, operation, parameters): - if self.paramstyle in ("format", "pyformat"): # convert %s to ? - operation, self._parameter_names = api.changeFormatToQmark(operation) - elif self.paramstyle == "named" or ( - self.paramstyle == "dynamic" and isinstance(parameters, Mapping) - ): - operation, self._parameter_names = api.changeNamedToQmark( - operation - ) # convert :name to ? - return operation - - def _buildADOparameterList(self, parameters, sproc=False): - self.parameters = parameters - if parameters is None: - parameters = [] - - # Note: ADO does not preserve the parameter list, even if "Prepared" is True, so we must build every time. - parameters_known = False - if sproc: # needed only if we are calling a stored procedure - try: # attempt to use ADO's parameter list - self.cmd.Parameters.Refresh() - if verbose > 2: - print( - "ADO detected Params=", - format_parameters(self.cmd.Parameters, True), - ) - print("Program Parameters=", repr(parameters)) - parameters_known = True - except api.Error: - if verbose: - print("ADO Parameter Refresh failed") - pass - else: - if len(parameters) != self.cmd.Parameters.Count - 1: - raise api.ProgrammingError( - "You must supply %d parameters for this stored procedure" - % (self.cmd.Parameters.Count - 1) - ) - if sproc or parameters != []: - i = 0 - if parameters_known: # use ado parameter list - if self._parameter_names: # named parameters - for i, pm_name in enumerate(self._parameter_names): - p = getIndexedValue(self.cmd.Parameters, i) - try: - _configure_parameter( - p, parameters[pm_name], p.Type, parameters_known - ) - except Exception as e: - _message = ( - "Error Converting Parameter %s: %s, %s <- %s\n" - % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(parameters[pm_name]), - ) - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - else: # regular sequence of parameters - for value in parameters: - p = getIndexedValue(self.cmd.Parameters, i) - if ( - p.Direction == adc.adParamReturnValue - ): # this is an extra parameter added by ADO - i += 1 # skip the extra - p = getIndexedValue(self.cmd.Parameters, i) - try: - _configure_parameter(p, value, p.Type, parameters_known) - except Exception as e: - _message = ( - "Error Converting Parameter %s: %s, %s <- %s\n" - % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(value), - ) - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - i += 1 - else: # -- build own parameter list - if ( - self._parameter_names - ): # we expect a dictionary of parameters, this is the list of expected names - for parm_name in self._parameter_names: - elem = parameters[parm_name] - adotype = api.pyTypeToADOType(elem) - p = self.cmd.CreateParameter( - parm_name, adotype, adc.adParamInput - ) - _configure_parameter(p, elem, adotype, parameters_known) - try: - self.cmd.Parameters.Append(p) - except Exception as e: - _message = "Error Building Parameter %s: %s, %s <- %s\n" % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(elem), - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - else: # expecting the usual sequence of parameters - if sproc: - p = self.cmd.CreateParameter( - "@RETURN_VALUE", adc.adInteger, adc.adParamReturnValue - ) - self.cmd.Parameters.Append(p) - - for elem in parameters: - name = "p%i" % i - adotype = api.pyTypeToADOType(elem) - p = self.cmd.CreateParameter( - name, adotype, adc.adParamInput - ) # Name, Type, Direction, Size, Value - _configure_parameter(p, elem, adotype, parameters_known) - try: - self.cmd.Parameters.Append(p) - except Exception as e: - _message = "Error Building Parameter %s: %s, %s <- %s\n" % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(elem), - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - i += 1 - if self._ado_prepared == "setup": - self._ado_prepared = ( - True # parameters will be "known" by ADO next loop - ) - - def execute(self, operation, parameters=None): - """Prepare and execute a database operation (query or command). - - Parameters may be provided as sequence or mapping and will be bound to variables in the operation. - Variables are specified in a database-specific notation - (see the module's paramstyle attribute for details). [5] - A reference to the operation will be retained by the cursor. - If the same operation object is passed in again, then the cursor - can optimize its behavior. This is most effective for algorithms - where the same operation is used, but different parameters are bound to it (many times). - - For maximum efficiency when reusing an operation, it is best to use - the setinputsizes() method to specify the parameter types and sizes ahead of time. - It is legal for a parameter to not match the predefined information; - the implementation should compensate, possibly with a loss of efficiency. - - The parameters may also be specified as list of tuples to e.g. insert multiple rows in - a single operation, but this kind of usage is depreciated: executemany() should be used instead. - - Return value is not defined. - - [5] The module will use the __getitem__ method of the parameters object to map either positions - (integers) or names (strings) to parameter values. This allows for both sequences and mappings - to be used as input. - The term "bound" refers to the process of binding an input value to a database execution buffer. - In practical terms, this means that the input value is directly used as a value in the operation. - The client should not be required to "escape" the value so that it can be used -- the value - should be equal to the actual database value.""" - if ( - self.command is not operation - or self._ado_prepared == "setup" - or not hasattr(self, "commandText") - ): - if self.command is not operation: - self._ado_prepared = False - self.command = operation - self._parameter_names = [] - self.commandText = ( - operation - if (self.paramstyle == "qmark" or not parameters) - else self._reformat_operation(operation, parameters) - ) - self._new_command() - self._buildADOparameterList(parameters) - if verbose > 3: - print("Params=", format_parameters(self.cmd.Parameters, True)) - self._execute_command() - - def executemany(self, operation, seq_of_parameters): - """Prepare a database operation (query or command) - and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters. - - Return values are not defined. - """ - self.messages = list() - total_recordcount = 0 - - self.prepare(operation) - for params in seq_of_parameters: - self.execute(self.command, params) - if self.rowcount == -1: - total_recordcount = -1 - if total_recordcount != -1: - total_recordcount += self.rowcount - self.rowcount = total_recordcount - - def _fetch(self, limit=None): - """Fetch rows from the current recordset. - - limit -- Number of rows to fetch, or None (default) to fetch all rows. - """ - if self.connection is None or self.rs is None: - self._raiseCursorError( - api.FetchFailedError, "fetch() on closed connection or empty query set" - ) - return - - if self.rs.State == adc.adStateClosed or self.rs.BOF or self.rs.EOF: - return list() - if limit: # limit number of rows retrieved - ado_results = self.rs.GetRows(limit) - else: # get all rows - ado_results = self.rs.GetRows() - if ( - self.recordset_format == api.RS_ARRAY - ): # result of GetRows is a two-dimension array - length = ( - len(ado_results) // self.numberOfColumns - ) # length of first dimension - else: # pywin32 - length = len(ado_results[0]) # result of GetRows is tuples in a tuple - fetchObject = api.SQLrows( - ado_results, length, self - ) # new object to hold the results of the fetch - return fetchObject - - def fetchone(self): - """Fetch the next row of a query result set, returning a single sequence, - or None when no more data is available. - - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - """ - self.messages = [] - result = self._fetch(1) - if result: # return record (not list of records) - return result[0] - return None - - def fetchmany(self, size=None): - """Fetch the next set of rows of a query result, returning a list of tuples. An empty sequence is returned when no more rows are available. - - The number of rows to fetch per call is specified by the parameter. - If it is not given, the cursor's arraysize determines the number of rows to be fetched. - The method should try to fetch as many rows as indicated by the size parameter. - If this is not possible due to the specified number of rows not being available, - fewer rows may be returned. - - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - - Note there are performance considerations involved with the size parameter. - For optimal performance, it is usually best to use the arraysize attribute. - If the size parameter is used, then it is best for it to retain the same value from - one fetchmany() call to the next. - """ - self.messages = [] - if size is None: - size = self.arraysize - return self._fetch(size) - - def fetchall(self): - """Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). - - Note that the cursor's arraysize attribute - can affect the performance of this operation. - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - """ - self.messages = [] - return self._fetch() - - def nextset(self): - """Skip to the next available recordset, discarding any remaining rows from the current recordset. - - If there are no more sets, the method returns None. Otherwise, it returns a true - value and subsequent calls to the fetch methods will return rows from the next result set. - - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - """ - self.messages = [] - if self.connection is None or self.rs is None: - self._raiseCursorError( - api.OperationalError, - ("nextset() on closed connection or empty query set"), - ) - return None - - if api.onIronPython: - try: - recordset = self.rs.NextRecordset() - except TypeError: - recordset = None - except api.Error as exc: - self._raiseCursorError(api.NotSupportedError, exc.args) - else: # pywin32 - try: # [begin 2.1 ekelund] - rsTuple = self.rs.NextRecordset() # - except pywintypes.com_error as exc: # return appropriate error - self._raiseCursorError( - api.NotSupportedError, exc.args - ) # [end 2.1 ekelund] - recordset = rsTuple[0] - if recordset is None: - return None - self.build_column_info(recordset) - return True - - def setinputsizes(self, sizes): - pass - - def setoutputsize(self, size, column=None): - pass - - def _last_query(self): # let the programmer see what query we actually used - try: - if self.parameters == None: - ret = self.commandText - else: - ret = "%s,parameters=%s" % (self.commandText, repr(self.parameters)) - except: - ret = None - return ret - - query = property(_last_query, None, None, "returns the last query executed") - - -if __name__ == "__main__": - raise api.ProgrammingError(version + " cannot be run as a main program.") diff --git a/lib/adodbapi/apibase.py b/lib/adodbapi/apibase.py deleted file mode 100644 index a56cd4b6..00000000 --- a/lib/adodbapi/apibase.py +++ /dev/null @@ -1,794 +0,0 @@ -"""adodbapi.apibase - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole -* http://sourceforge.net/projects/pywin32 -* http://sourceforge.net/projects/adodbapi -""" - -import datetime -import decimal -import numbers -import sys -import time - -# noinspection PyUnresolvedReferences -from . import ado_consts as adc - -verbose = False # debugging flag - -onIronPython = sys.platform == "cli" -if onIronPython: # we need type definitions for odd data we may need to convert - # noinspection PyUnresolvedReferences - from System import DateTime, DBNull - - NullTypes = (type(None), DBNull) -else: - DateTime = type(NotImplemented) # should never be seen on win32 - NullTypes = type(None) - -# --- define objects to smooth out Python3 <-> Python 2.x differences -unicodeType = str -longType = int -StringTypes = str -makeByteBuffer = bytes -memoryViewType = memoryview -_BaseException = Exception - -try: # jdhardy -- handle bytes under IronPython & Py3 - bytes -except NameError: - bytes = str # define it for old Pythons - - -# ------- Error handlers ------ -def standardErrorHandler(connection, cursor, errorclass, errorvalue): - err = (errorclass, errorvalue) - try: - connection.messages.append(err) - except: - pass - if cursor is not None: - try: - cursor.messages.append(err) - except: - pass - raise errorclass(errorvalue) - - -# Note: _BaseException is defined differently between Python 2.x and 3.x -class Error(_BaseException): - pass # Exception that is the base class of all other error - # exceptions. You can use this to catch all errors with one - # single 'except' statement. Warnings are not considered - # errors and thus should not use this class as base. It must - # be a subclass of the Python StandardError (defined in the - # module exceptions). - - -class Warning(_BaseException): - pass - - -class InterfaceError(Error): - pass - - -class DatabaseError(Error): - pass - - -class InternalError(DatabaseError): - pass - - -class OperationalError(DatabaseError): - pass - - -class ProgrammingError(DatabaseError): - pass - - -class IntegrityError(DatabaseError): - pass - - -class DataError(DatabaseError): - pass - - -class NotSupportedError(DatabaseError): - pass - - -class FetchFailedError(OperationalError): - """ - Error is used by RawStoredProcedureQuerySet to determine when a fetch - failed due to a connection being closed or there is no record set - returned. (Non-standard, added especially for django) - """ - - pass - - -# # # # # ----- Type Objects and Constructors ----- # # # # # -# Many databases need to have the input in a particular format for binding to an operation's input parameters. -# For example, if an input is destined for a DATE column, then it must be bound to the database in a particular -# string format. Similar problems exist for "Row ID" columns or large binary items (e.g. blobs or RAW columns). -# This presents problems for Python since the parameters to the executeXXX() method are untyped. -# When the database module sees a Python string object, it doesn't know if it should be bound as a simple CHAR -# column, as a raw BINARY item, or as a DATE. -# -# To overcome this problem, a module must provide the constructors defined below to create objects that can -# hold special values. When passed to the cursor methods, the module can then detect the proper type of -# the input parameter and bind it accordingly. - -# A Cursor Object's description attribute returns information about each of the result columns of a query. -# The type_code must compare equal to one of Type Objects defined below. Type Objects may be equal to more than -# one type code (e.g. DATETIME could be equal to the type codes for date, time and timestamp columns; -# see the Implementation Hints below for details). - -# SQL NULL values are represented by the Python None singleton on input and output. - -# Note: Usage of Unix ticks for database interfacing can cause troubles because of the limited date range they cover. - - -# def Date(year,month,day): -# "This function constructs an object holding a date value. " -# return dateconverter.date(year,month,day) #dateconverter.Date(year,month,day) -# -# def Time(hour,minute,second): -# "This function constructs an object holding a time value. " -# return dateconverter.time(hour, minute, second) # dateconverter.Time(hour,minute,second) -# -# def Timestamp(year,month,day,hour,minute,second): -# "This function constructs an object holding a time stamp value. " -# return dateconverter.datetime(year,month,day,hour,minute,second) -# -# def DateFromTicks(ticks): -# """This function constructs an object holding a date value from the given ticks value -# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ -# return Date(*time.gmtime(ticks)[:3]) -# -# def TimeFromTicks(ticks): -# """This function constructs an object holding a time value from the given ticks value -# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ -# return Time(*time.gmtime(ticks)[3:6]) -# -# def TimestampFromTicks(ticks): -# """This function constructs an object holding a time stamp value from the given -# ticks value (number of seconds since the epoch; -# see the documentation of the standard Python time module for details). """ -# return Timestamp(*time.gmtime(ticks)[:6]) -# -# def Binary(aString): -# """This function constructs an object capable of holding a binary (long) string value. """ -# b = makeByteBuffer(aString) -# return b -# ----- Time converters ---------------------------------------------- -class TimeConverter(object): # this is a generic time converter skeleton - def __init__(self): # the details will be filled in by instances - self._ordinal_1899_12_31 = datetime.date(1899, 12, 31).toordinal() - 1 - # Use cls.types to compare if an input parameter is a datetime - self.types = { - type(self.Date(2000, 1, 1)), - type(self.Time(12, 1, 1)), - type(self.Timestamp(2000, 1, 1, 12, 1, 1)), - datetime.datetime, - datetime.time, - datetime.date, - } - - def COMDate(self, obj): - """Returns a ComDate from a date-time""" - try: # most likely a datetime - tt = obj.timetuple() - - try: - ms = obj.microsecond - except: - ms = 0 - return self.ComDateFromTuple(tt, ms) - except: # might be a tuple - try: - return self.ComDateFromTuple(obj) - except: # try an mxdate - try: - return obj.COMDate() - except: - raise ValueError('Cannot convert "%s" to COMdate.' % repr(obj)) - - def ComDateFromTuple(self, t, microseconds=0): - d = datetime.date(t[0], t[1], t[2]) - integerPart = d.toordinal() - self._ordinal_1899_12_31 - ms = (t[3] * 3600 + t[4] * 60 + t[5]) * 1000000 + microseconds - fractPart = float(ms) / 86400000000.0 - return integerPart + fractPart - - def DateObjectFromCOMDate(self, comDate): - "Returns an object of the wanted type from a ComDate" - raise NotImplementedError # "Abstract class" - - def Date(self, year, month, day): - "This function constructs an object holding a date value." - raise NotImplementedError # "Abstract class" - - def Time(self, hour, minute, second): - "This function constructs an object holding a time value." - raise NotImplementedError # "Abstract class" - - def Timestamp(self, year, month, day, hour, minute, second): - "This function constructs an object holding a time stamp value." - raise NotImplementedError # "Abstract class" - # all purpose date to ISO format converter - - def DateObjectToIsoFormatString(self, obj): - "This function should return a string in the format 'YYYY-MM-dd HH:MM:SS:ms' (ms optional)" - try: # most likely, a datetime.datetime - s = obj.isoformat(" ") - except (TypeError, AttributeError): - if isinstance(obj, datetime.date): - s = obj.isoformat() + " 00:00:00" # return exact midnight - else: - try: # maybe it has a strftime method, like mx - s = obj.strftime("%Y-%m-%d %H:%M:%S") - except AttributeError: - try: # but may be time.struct_time - s = time.strftime("%Y-%m-%d %H:%M:%S", obj) - except: - raise ValueError('Cannot convert "%s" to isoformat' % repr(obj)) - return s - - -# -- Optional: if mx extensions are installed you may use mxDateTime ---- -try: - import mx.DateTime - - mxDateTime = True -except: - mxDateTime = False -if mxDateTime: - - class mxDateTimeConverter(TimeConverter): # used optionally if installed - def __init__(self): - TimeConverter.__init__(self) - self.types.add(type(mx.DateTime)) - - def DateObjectFromCOMDate(self, comDate): - return mx.DateTime.DateTimeFromCOMDate(comDate) - - def Date(self, year, month, day): - return mx.DateTime.Date(year, month, day) - - def Time(self, hour, minute, second): - return mx.DateTime.Time(hour, minute, second) - - def Timestamp(self, year, month, day, hour, minute, second): - return mx.DateTime.Timestamp(year, month, day, hour, minute, second) - -else: - - class mxDateTimeConverter(TimeConverter): - pass # if no mx is installed - - -class pythonDateTimeConverter(TimeConverter): # standard since Python 2.3 - def __init__(self): - TimeConverter.__init__(self) - - def DateObjectFromCOMDate(self, comDate): - if isinstance(comDate, datetime.datetime): - odn = comDate.toordinal() - tim = comDate.time() - new = datetime.datetime.combine(datetime.datetime.fromordinal(odn), tim) - return new - # return comDate.replace(tzinfo=None) # make non aware - elif isinstance(comDate, DateTime): - fComDate = comDate.ToOADate() # ironPython clr Date/Time - else: - fComDate = float(comDate) # ComDate is number of days since 1899-12-31 - integerPart = int(fComDate) - floatpart = fComDate - integerPart - ##if floatpart == 0.0: - ## return datetime.date.fromordinal(integerPart + self._ordinal_1899_12_31) - dte = datetime.datetime.fromordinal( - integerPart + self._ordinal_1899_12_31 - ) + datetime.timedelta(milliseconds=floatpart * 86400000) - # millisecondsperday=86400000 # 24*60*60*1000 - return dte - - def Date(self, year, month, day): - return datetime.date(year, month, day) - - def Time(self, hour, minute, second): - return datetime.time(hour, minute, second) - - def Timestamp(self, year, month, day, hour, minute, second): - return datetime.datetime(year, month, day, hour, minute, second) - - -class pythonTimeConverter(TimeConverter): # the old, ?nix type date and time - def __init__(self): # caution: this Class gets confised by timezones and DST - TimeConverter.__init__(self) - self.types.add(time.struct_time) - - def DateObjectFromCOMDate(self, comDate): - "Returns ticks since 1970" - if isinstance(comDate, datetime.datetime): - return comDate.timetuple() - elif isinstance(comDate, DateTime): # ironPython clr date/time - fcomDate = comDate.ToOADate() - else: - fcomDate = float(comDate) - secondsperday = 86400 # 24*60*60 - # ComDate is number of days since 1899-12-31, gmtime epoch is 1970-1-1 = 25569 days - t = time.gmtime(secondsperday * (fcomDate - 25569.0)) - return t # year,month,day,hour,minute,second,weekday,julianday,daylightsaving=t - - def Date(self, year, month, day): - return self.Timestamp(year, month, day, 0, 0, 0) - - def Time(self, hour, minute, second): - return time.gmtime((hour * 60 + minute) * 60 + second) - - def Timestamp(self, year, month, day, hour, minute, second): - return time.localtime( - time.mktime((year, month, day, hour, minute, second, 0, 0, -1)) - ) - - -base_dateconverter = pythonDateTimeConverter() - -# ------ DB API required module attributes --------------------- -threadsafety = 1 # TODO -- find out whether this module is actually BETTER than 1. - -apilevel = "2.0" # String constant stating the supported DB API level. - -paramstyle = "qmark" # the default parameter style - -# ------ control for an extension which may become part of DB API 3.0 --- -accepted_paramstyles = ("qmark", "named", "format", "pyformat", "dynamic") - -# ------------------------------------------------------------------------------------------ -# define similar types for generic conversion routines -adoIntegerTypes = ( - adc.adInteger, - adc.adSmallInt, - adc.adTinyInt, - adc.adUnsignedInt, - adc.adUnsignedSmallInt, - adc.adUnsignedTinyInt, - adc.adBoolean, - adc.adError, -) # max 32 bits -adoRowIdTypes = (adc.adChapter,) # v2.1 Rose -adoLongTypes = (adc.adBigInt, adc.adFileTime, adc.adUnsignedBigInt) -adoExactNumericTypes = ( - adc.adDecimal, - adc.adNumeric, - adc.adVarNumeric, - adc.adCurrency, -) # v2.3 Cole -adoApproximateNumericTypes = (adc.adDouble, adc.adSingle) # v2.1 Cole -adoStringTypes = ( - adc.adBSTR, - adc.adChar, - adc.adLongVarChar, - adc.adLongVarWChar, - adc.adVarChar, - adc.adVarWChar, - adc.adWChar, -) -adoBinaryTypes = (adc.adBinary, adc.adLongVarBinary, adc.adVarBinary) -adoDateTimeTypes = (adc.adDBTime, adc.adDBTimeStamp, adc.adDate, adc.adDBDate) -adoRemainingTypes = ( - adc.adEmpty, - adc.adIDispatch, - adc.adIUnknown, - adc.adPropVariant, - adc.adArray, - adc.adUserDefined, - adc.adVariant, - adc.adGUID, -) - - -# this class is a trick to determine whether a type is a member of a related group of types. see PEP notes -class DBAPITypeObject(object): - def __init__(self, valuesTuple): - self.values = frozenset(valuesTuple) - - def __eq__(self, other): - return other in self.values - - def __ne__(self, other): - return other not in self.values - - -"""This type object is used to describe columns in a database that are string-based (e.g. CHAR). """ -STRING = DBAPITypeObject(adoStringTypes) - -"""This type object is used to describe (long) binary columns in a database (e.g. LONG, RAW, BLOBs). """ -BINARY = DBAPITypeObject(adoBinaryTypes) - -"""This type object is used to describe numeric columns in a database. """ -NUMBER = DBAPITypeObject( - adoIntegerTypes + adoLongTypes + adoExactNumericTypes + adoApproximateNumericTypes -) - -"""This type object is used to describe date/time columns in a database. """ - -DATETIME = DBAPITypeObject(adoDateTimeTypes) -"""This type object is used to describe the "Row ID" column in a database. """ -ROWID = DBAPITypeObject(adoRowIdTypes) - -OTHER = DBAPITypeObject(adoRemainingTypes) - -# ------- utilities for translating python data types to ADO data types --------------------------------- -typeMap = { - memoryViewType: adc.adVarBinary, - float: adc.adDouble, - type(None): adc.adEmpty, - str: adc.adBSTR, - bool: adc.adBoolean, # v2.1 Cole - decimal.Decimal: adc.adDecimal, - int: adc.adBigInt, - bytes: adc.adVarBinary, -} - - -def pyTypeToADOType(d): - tp = type(d) - try: - return typeMap[tp] - except KeyError: # The type was not defined in the pre-computed Type table - from . import dateconverter - - if ( - tp in dateconverter.types - ): # maybe it is one of our supported Date/Time types - return adc.adDate - # otherwise, attempt to discern the type by probing the data object itself -- to handle duck typing - if isinstance(d, StringTypes): - return adc.adBSTR - if isinstance(d, numbers.Integral): - return adc.adBigInt - if isinstance(d, numbers.Real): - return adc.adDouble - raise DataError('cannot convert "%s" (type=%s) to ADO' % (repr(d), tp)) - - -# # # # # # # # # # # # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# functions to convert database values to Python objects -# ------------------------------------------------------------------------ -# variant type : function converting variant to Python value -def variantConvertDate(v): - from . import dateconverter # this function only called when adodbapi is running - - return dateconverter.DateObjectFromCOMDate(v) - - -def cvtString(variant): # use to get old action of adodbapi v1 if desired - if onIronPython: - try: - return variant.ToString() - except: - pass - return str(variant) - - -def cvtDecimal(variant): # better name - return _convertNumberWithCulture(variant, decimal.Decimal) - - -def cvtNumeric(variant): # older name - don't break old code - return cvtDecimal(variant) - - -def cvtFloat(variant): - return _convertNumberWithCulture(variant, float) - - -def _convertNumberWithCulture(variant, f): - try: - return f(variant) - except (ValueError, TypeError, decimal.InvalidOperation): - try: - europeVsUS = str(variant).replace(",", ".") - return f(europeVsUS) - except (ValueError, TypeError, decimal.InvalidOperation): - pass - - -def cvtInt(variant): - return int(variant) - - -def cvtLong(variant): # only important in old versions where long and int differ - return int(variant) - - -def cvtBuffer(variant): - return bytes(variant) - - -def cvtUnicode(variant): - return str(variant) - - -def identity(x): - return x - - -def cvtUnusual(variant): - if verbose > 1: - sys.stderr.write("Conversion called for Unusual data=%s\n" % repr(variant)) - if isinstance(variant, DateTime): # COMdate or System.Date - from .adodbapi import ( # this will only be called when adodbapi is in use, and very rarely - dateconverter, - ) - - return dateconverter.DateObjectFromCOMDate(variant) - return variant # cannot find conversion function -- just give the data to the user - - -def convert_to_python(variant, func): # convert DB value into Python value - if isinstance(variant, NullTypes): # IronPython Null or None - return None - return func(variant) # call the appropriate conversion function - - -class MultiMap(dict): # builds a dictionary from {(sequence,of,keys) : function} - """A dictionary of ado.type : function -- but you can set multiple items by passing a sequence of keys""" - - # useful for defining conversion functions for groups of similar data types. - def __init__(self, aDict): - for k, v in list(aDict.items()): - self[k] = v # we must call __setitem__ - - def __setitem__(self, adoType, cvtFn): - "set a single item, or a whole sequence of items" - try: # user passed us a sequence, set them individually - for type in adoType: - dict.__setitem__(self, type, cvtFn) - except TypeError: # a single value fails attempt to iterate - dict.__setitem__(self, adoType, cvtFn) - - -# initialize variantConversions dictionary used to convert SQL to Python -# this is the dictionary of default conversion functions, built by the class above. -# this becomes a class attribute for the Connection, and that attribute is used -# to build the list of column conversion functions for the Cursor -variantConversions = MultiMap( - { - adoDateTimeTypes: variantConvertDate, - adoApproximateNumericTypes: cvtFloat, - adoExactNumericTypes: cvtDecimal, # use to force decimal rather than unicode - adoLongTypes: cvtLong, - adoIntegerTypes: cvtInt, - adoRowIdTypes: cvtInt, - adoStringTypes: identity, - adoBinaryTypes: cvtBuffer, - adoRemainingTypes: cvtUnusual, - } -) - -# # # # # classes to emulate the result of cursor.fetchxxx() as a sequence of sequences # # # # # -# "an ENUM of how my low level records are laid out" -RS_WIN_32, RS_ARRAY, RS_REMOTE = list(range(1, 4)) - - -class SQLrow(object): # a single database row - # class to emulate a sequence, so that a column may be retrieved by either number or name - def __init__(self, rows, index): # "rows" is an _SQLrows object, index is which row - self.rows = rows # parent 'fetch' container object - self.index = index # my row number within parent - - def __getattr__(self, name): # used for row.columnName type of value access - try: - return self._getValue(self.rows.columnNames[name.lower()]) - except KeyError: - raise AttributeError('Unknown column name "{}"'.format(name)) - - def _getValue(self, key): # key must be an integer - if ( - self.rows.recordset_format == RS_ARRAY - ): # retrieve from two-dimensional array - v = self.rows.ado_results[key, self.index] - elif self.rows.recordset_format == RS_REMOTE: - v = self.rows.ado_results[self.index][key] - else: # pywin32 - retrieve from tuple of tuples - v = self.rows.ado_results[key][self.index] - if self.rows.converters is NotImplemented: - return v - return convert_to_python(v, self.rows.converters[key]) - - def __len__(self): - return self.rows.numberOfColumns - - def __getitem__(self, key): # used for row[key] type of value access - if isinstance(key, int): # normal row[1] designation - try: - return self._getValue(key) - except IndexError: - raise - if isinstance(key, slice): - indices = key.indices(self.rows.numberOfColumns) - vl = [self._getValue(i) for i in range(*indices)] - return tuple(vl) - try: - return self._getValue( - self.rows.columnNames[key.lower()] - ) # extension row[columnName] designation - except (KeyError, TypeError): - er, st, tr = sys.exc_info() - raise er( - 'No such key as "%s" in %s' % (repr(key), self.__repr__()) - ).with_traceback(tr) - - def __iter__(self): - return iter(self.__next__()) - - def __next__(self): - for n in range(self.rows.numberOfColumns): - yield self._getValue(n) - - def __repr__(self): # create a human readable representation - taglist = sorted(list(self.rows.columnNames.items()), key=lambda x: x[1]) - s = "" - - def __str__(self): # create a pretty human readable representation - return str( - tuple(str(self._getValue(i)) for i in range(self.rows.numberOfColumns)) - ) - - # TO-DO implement pickling an SQLrow directly - # def __getstate__(self): return self.__dict__ - # def __setstate__(self, d): self.__dict__.update(d) - # which basically tell pickle to treat your class just like a normal one, - # taking self.__dict__ as representing the whole of the instance state, - # despite the existence of the __getattr__. - # # # # - - -class SQLrows(object): - # class to emulate a sequence for multiple rows using a container object - def __init__(self, ado_results, numberOfRows, cursor): - self.ado_results = ado_results # raw result of SQL get - try: - self.recordset_format = cursor.recordset_format - self.numberOfColumns = cursor.numberOfColumns - self.converters = cursor.converters - self.columnNames = cursor.columnNames - except AttributeError: - self.recordset_format = RS_ARRAY - self.numberOfColumns = 0 - self.converters = [] - self.columnNames = {} - self.numberOfRows = numberOfRows - - def __len__(self): - return self.numberOfRows - - def __getitem__(self, item): # used for row or row,column access - if not self.ado_results: - return [] - if isinstance(item, slice): # will return a list of row objects - indices = item.indices(self.numberOfRows) - return [SQLrow(self, k) for k in range(*indices)] - elif isinstance(item, tuple) and len(item) == 2: - # d = some_rowsObject[i,j] will return a datum from a two-dimension address - i, j = item - if not isinstance(j, int): - try: - j = self.columnNames[j.lower()] # convert named column to numeric - except KeyError: - raise KeyError('adodbapi: no such column name as "%s"' % repr(j)) - if self.recordset_format == RS_ARRAY: # retrieve from two-dimensional array - v = self.ado_results[j, i] - elif self.recordset_format == RS_REMOTE: - v = self.ado_results[i][j] - else: # pywin32 - retrieve from tuple of tuples - v = self.ado_results[j][i] - if self.converters is NotImplemented: - return v - return convert_to_python(v, self.converters[j]) - else: - row = SQLrow(self, item) # new row descriptor - return row - - def __iter__(self): - return iter(self.__next__()) - - def __next__(self): - for n in range(self.numberOfRows): - row = SQLrow(self, n) - yield row - # # # # # - - # # # # # functions to re-format SQL requests to other paramstyle requirements # # # # # # # # # # - - -def changeNamedToQmark( - op, -): # convert from 'named' paramstyle to ADO required '?'mark parameters - outOp = "" - outparms = [] - chunks = op.split( - "'" - ) # quote all literals -- odd numbered list results are literals. - inQuotes = False - for chunk in chunks: - if inQuotes: # this is inside a quote - if chunk == "": # double apostrophe to quote one apostrophe - outOp = outOp[:-1] # so take one away - else: - outOp += "'" + chunk + "'" # else pass the quoted string as is. - else: # is SQL code -- look for a :namedParameter - while chunk: # some SQL string remains - sp = chunk.split(":", 1) - outOp += sp[0] # concat the part up to the : - s = "" - try: - chunk = sp[1] - except IndexError: - chunk = None - if chunk: # there was a parameter - parse it out - i = 0 - c = chunk[0] - while c.isalnum() or c == "_": - i += 1 - try: - c = chunk[i] - except IndexError: - break - s = chunk[:i] - chunk = chunk[i:] - if s: - outparms.append(s) # list the parameters in order - outOp += "?" # put in the Qmark - inQuotes = not inQuotes - return outOp, outparms - - -def changeFormatToQmark( - op, -): # convert from 'format' paramstyle to ADO required '?'mark parameters - outOp = "" - outparams = [] - chunks = op.split( - "'" - ) # quote all literals -- odd numbered list results are literals. - inQuotes = False - for chunk in chunks: - if inQuotes: - if ( - outOp != "" and chunk == "" - ): # he used a double apostrophe to quote one apostrophe - outOp = outOp[:-1] # so take one away - else: - outOp += "'" + chunk + "'" # else pass the quoted string as is. - else: # is SQL code -- look for a %s parameter - if "%(" in chunk: # ugh! pyformat! - while chunk: # some SQL string remains - sp = chunk.split("%(", 1) - outOp += sp[0] # concat the part up to the % - if len(sp) > 1: - try: - s, chunk = sp[1].split(")s", 1) # find the ')s' - except ValueError: - raise ProgrammingError( - 'Pyformat SQL has incorrect format near "%s"' % chunk - ) - outparams.append(s) - outOp += "?" # put in the Qmark - else: - chunk = None - else: # proper '%s' format - sp = chunk.split("%s") # make each %s - outOp += "?".join(sp) # into ? - inQuotes = not inQuotes # every other chunk is a quoted string - return outOp, outparams diff --git a/lib/adodbapi/examples/db_print.py b/lib/adodbapi/examples/db_print.py deleted file mode 100644 index 3f5f9d5b..00000000 --- a/lib/adodbapi/examples/db_print.py +++ /dev/null @@ -1,72 +0,0 @@ -""" db_print.py -- a simple demo for ADO database reads.""" - -import sys - -import adodbapi.ado_consts as adc - -cmd_args = ("filename", "table_name") -if "help" in sys.argv: - print("possible settings keywords are:", cmd_args) - sys.exit() - -kw_args = {} # pick up filename and proxy address from command line (optionally) -for arg in sys.argv: - s = arg.split("=") - if len(s) > 1: - if s[0] in cmd_args: - kw_args[s[0]] = s[1] - -kw_args.setdefault( - "filename", "test.mdb" -) # assumes server is running from examples folder -kw_args.setdefault("table_name", "Products") # the name of the demo table - -# the server needs to select the provider based on his Python installation -provider_switch = ["provider", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"] - -# ------------------------ START HERE ------------------------------------- -# create the connection -constr = "Provider=%(provider)s;Data Source=%(filename)s" -import adodbapi as db - -con = db.connect(constr, kw_args, macro_is64bit=provider_switch) - -if kw_args["table_name"] == "?": - print("The tables in your database are:") - for name in con.get_table_names(): - print(name) -else: - # make a cursor on the connection - with con.cursor() as c: - # run an SQL statement on the cursor - sql = "select * from %s" % kw_args["table_name"] - print('performing query="%s"' % sql) - c.execute(sql) - - # check the results - print( - 'result rowcount shows as= %d. (Note: -1 means "not known")' % (c.rowcount,) - ) - print("") - print("result data description is:") - print(" NAME Type DispSize IntrnlSz Prec Scale Null?") - for d in c.description: - print( - ("%16s %-12s %8s %8d %4d %5d %s") - % (d[0], adc.adTypeNames[d[1]], d[2], d[3], d[4], d[5], bool(d[6])) - ) - print("") - print("str() of first five records are...") - - # get the results - db = c.fetchmany(5) - - # print them - for rec in db: - print(rec) - - print("") - print("repr() of next row is...") - print(repr(c.fetchone())) - print("") -con.close() diff --git a/lib/adodbapi/examples/db_table_names.py b/lib/adodbapi/examples/db_table_names.py deleted file mode 100644 index eb512a33..00000000 --- a/lib/adodbapi/examples/db_table_names.py +++ /dev/null @@ -1,20 +0,0 @@ -""" db_table_names.py -- a simple demo for ADO database table listing.""" -import sys - -import adodbapi - -try: - databasename = sys.argv[1] -except IndexError: - databasename = "test.mdb" - -provider = ["prv", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"] -constr = "Provider=%(prv)s;Data Source=%(db)s" - -# create the connection -con = adodbapi.connect(constr, db=databasename, macro_is64bit=provider) - -print("Table names in= %s" % databasename) - -for table in con.get_table_names(): - print(table) diff --git a/lib/adodbapi/examples/xls_read.py b/lib/adodbapi/examples/xls_read.py deleted file mode 100644 index 45e0d277..00000000 --- a/lib/adodbapi/examples/xls_read.py +++ /dev/null @@ -1,41 +0,0 @@ -import sys - -import adodbapi - -try: - import adodbapi.is64bit as is64bit - - is64 = is64bit.Python() -except ImportError: - is64 = False - -if is64: - driver = "Microsoft.ACE.OLEDB.12.0" -else: - driver = "Microsoft.Jet.OLEDB.4.0" -extended = 'Extended Properties="Excel 8.0;HDR=Yes;IMEX=1;"' - -try: # first command line argument will be xls file name -- default to the one written by xls_write.py - filename = sys.argv[1] -except IndexError: - filename = "xx.xls" - -constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended) - -conn = adodbapi.connect(constr) - -try: # second command line argument will be worksheet name -- default to first worksheet - sheet = sys.argv[2] -except IndexError: - # use ADO feature to get the name of the first worksheet - sheet = conn.get_table_names()[0] - -print("Shreadsheet=%s Worksheet=%s" % (filename, sheet)) -print("------------------------------------------------------------") -crsr = conn.cursor() -sql = "SELECT * from [%s]" % sheet -crsr.execute(sql) -for row in crsr.fetchmany(10): - print(repr(row)) -crsr.close() -conn.close() diff --git a/lib/adodbapi/examples/xls_write.py b/lib/adodbapi/examples/xls_write.py deleted file mode 100644 index 9d1d3114..00000000 --- a/lib/adodbapi/examples/xls_write.py +++ /dev/null @@ -1,41 +0,0 @@ -import datetime - -import adodbapi - -try: - import adodbapi.is64bit as is64bit - - is64 = is64bit.Python() -except ImportError: - is64 = False # in case the user has an old version of adodbapi -if is64: - driver = "Microsoft.ACE.OLEDB.12.0" -else: - driver = "Microsoft.Jet.OLEDB.4.0" -filename = "xx.xls" # file will be created if it does not exist -extended = 'Extended Properties="Excel 8.0;Readonly=False;"' - -constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended) - -conn = adodbapi.connect(constr) -with conn: # will auto commit if no errors - with conn.cursor() as crsr: - try: - crsr.execute("drop table SheetOne") - except: - pass # just is case there is one already there - - # create the sheet and the header row and set the types for the columns - crsr.execute( - "create table SheetOne (Name varchar, Rank varchar, SrvcNum integer, Weight float, Birth date)" - ) - - sql = "INSERT INTO SheetOne (name, rank , srvcnum, weight, birth) values (?,?,?,?,?)" - - data = ("Mike Murphy", "SSG", 123456789, 167.8, datetime.date(1922, 12, 27)) - crsr.execute(sql, data) # write the first row of data - crsr.execute( - sql, ["John Jones", "Pvt", 987654321, 140.0, datetime.date(1921, 7, 4)] - ) # another row of data -conn.close() -print("Created spreadsheet=%s worksheet=%s" % (filename, "SheetOne")) diff --git a/lib/adodbapi/is64bit.py b/lib/adodbapi/is64bit.py deleted file mode 100644 index bba12b43..00000000 --- a/lib/adodbapi/is64bit.py +++ /dev/null @@ -1,41 +0,0 @@ -"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version""" -import sys - - -def Python(): - if sys.platform == "cli": # IronPython - import System - - return System.IntPtr.Size == 8 - else: - try: - return sys.maxsize > 2147483647 - except AttributeError: - return sys.maxint > 2147483647 - - -def os(): - import platform - - pm = platform.machine() - if pm != ".." and pm.endswith("64"): # recent Python (not Iron) - return True - else: - import os - - if "PROCESSOR_ARCHITEW6432" in os.environ: - return True # 32 bit program running on 64 bit Windows - try: - return os.environ["PROCESSOR_ARCHITECTURE"].endswith( - "64" - ) # 64 bit Windows 64 bit program - except (IndexError, KeyError): - pass # not Windows - try: - return "64" in platform.architecture()[0] # this often works in Linux - except: - return False # is an older version of Python, assume also an older os (best we can guess) - - -if __name__ == "__main__": - print("is64bit.Python() =", Python(), "is64bit.os() =", os()) diff --git a/lib/adodbapi/license.txt b/lib/adodbapi/license.txt deleted file mode 100644 index c255f4aa..00000000 --- a/lib/adodbapi/license.txt +++ /dev/null @@ -1,506 +0,0 @@ - GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1, February 1999 - - Copyright (C) 1991, 1999 Free Software Foundation, Inc. - 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - -[This is the first released version of the Lesser GPL. It also counts - as the successor of the GNU Library Public License, version 2, hence - the version number 2.1.] - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -Licenses are intended to guarantee your freedom to share and change -free software--to make sure the software is free for all its users. - - This license, the Lesser General Public License, applies to some -specially designated software packages--typically libraries--of the -Free Software Foundation and other authors who decide to use it. You -can use it too, but we suggest you first think carefully about whether -this license or the ordinary General Public License is the better -strategy to use in any particular case, based on the explanations below. - - When we speak of free software, we are referring to freedom of use, -not price. Our General Public Licenses are designed to make sure that -you have the freedom to distribute copies of free software (and charge -for this service if you wish); that you receive source code or can get -it if you want it; that you can change the software and use pieces of -it in new free programs; and that you are informed that you can do -these things. - - To protect your rights, we need to make restrictions that forbid -distributors to deny you these rights or to ask you to surrender these -rights. These restrictions translate to certain responsibilities for -you if you distribute copies of the library or if you modify it. - - For example, if you distribute copies of the library, whether gratis -or for a fee, you must give the recipients all the rights that we gave -you. You must make sure that they, too, receive or can get the source -code. If you link other code with the library, you must provide -complete object files to the recipients, so that they can relink them -with the library after making changes to the library and recompiling -it. And you must show them these terms so they know their rights. - - We protect your rights with a two-step method: (1) we copyright the -library, and (2) we offer you this license, which gives you legal -permission to copy, distribute and/or modify the library. - - To protect each distributor, we want to make it very clear that -there is no warranty for the free library. Also, if the library is -modified by someone else and passed on, the recipients should know -that what they have is not the original version, so that the original -author's reputation will not be affected by problems that might be -introduced by others. - - - - Finally, software patents pose a constant threat to the existence of -any free program. We wish to make sure that a company cannot -effectively restrict the users of a free program by obtaining a -restrictive license from a patent holder. Therefore, we insist that -any patent license obtained for a version of the library must be -consistent with the full freedom of use specified in this license. - - Most GNU software, including some libraries, is covered by the -ordinary GNU General Public License. This license, the GNU Lesser -General Public License, applies to certain designated libraries, and -is quite different from the ordinary General Public License. We use -this license for certain libraries in order to permit linking those -libraries into non-free programs. - - When a program is linked with a library, whether statically or using -a shared library, the combination of the two is legally speaking a -combined work, a derivative of the original library. The ordinary -General Public License therefore permits such linking only if the -entire combination fits its criteria of freedom. The Lesser General -Public License permits more lax criteria for linking other code with -the library. - - We call this license the "Lesser" General Public License because it -does Less to protect the user's freedom than the ordinary General -Public License. It also provides other free software developers Less -of an advantage over competing non-free programs. These disadvantages -are the reason we use the ordinary General Public License for many -libraries. However, the Lesser license provides advantages in certain -special circumstances. - - For example, on rare occasions, there may be a special need to -encourage the widest possible use of a certain library, so that it becomes -a de-facto standard. To achieve this, non-free programs must be -allowed to use the library. A more frequent case is that a free -library does the same job as widely used non-free libraries. In this -case, there is little to gain by limiting the free library to free -software only, so we use the Lesser General Public License. - - In other cases, permission to use a particular library in non-free -programs enables a greater number of people to use a large body of -free software. For example, permission to use the GNU C Library in -non-free programs enables many more people to use the whole GNU -operating system, as well as its variant, the GNU/Linux operating -system. - - Although the Lesser General Public License is Less protective of the -users' freedom, it does ensure that the user of a program that is -linked with the Library has the freedom and the wherewithal to run -that program using a modified version of the Library. - - The precise terms and conditions for copying, distribution and -modification follow. Pay close attention to the difference between a -"work based on the library" and a "work that uses the library". The -former contains code derived from the library, whereas the latter must -be combined with the library in order to run. - - - - GNU LESSER GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License Agreement applies to any software library or other -program which contains a notice placed by the copyright holder or -other authorized party saying it may be distributed under the terms of -this Lesser General Public License (also called "this License"). -Each licensee is addressed as "you". - - A "library" means a collection of software functions and/or data -prepared so as to be conveniently linked with application programs -(which use some of those functions and data) to form executables. - - The "Library", below, refers to any such software library or work -which has been distributed under these terms. A "work based on the -Library" means either the Library or any derivative work under -copyright law: that is to say, a work containing the Library or a -portion of it, either verbatim or with modifications and/or translated -straightforwardly into another language. (Hereinafter, translation is -included without limitation in the term "modification".) - - "Source code" for a work means the preferred form of the work for -making modifications to it. For a library, complete source code means -all the source code for all modules it contains, plus any associated -interface definition files, plus the scripts used to control compilation -and installation of the library. - - Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running a program using the Library is not restricted, and output from -such a program is covered only if its contents constitute a work based -on the Library (independent of the use of the Library in a tool for -writing it). Whether that is true depends on what the Library does -and what the program that uses the Library does. - - 1. You may copy and distribute verbatim copies of the Library's -complete source code as you receive it, in any medium, provided that -you conspicuously and appropriately publish on each copy an -appropriate copyright notice and disclaimer of warranty; keep intact -all the notices that refer to this License and to the absence of any -warranty; and distribute a copy of this License along with the -Library. - You may charge a fee for the physical act of transferring a copy, -and you may at your option offer warranty protection in exchange for a -fee. - - 2. You may modify your copy or copies of the Library or any portion -of it, thus forming a work based on the Library, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) The modified work must itself be a software library. - - b) You must cause the files modified to carry prominent notices - stating that you changed the files and the date of any change. - - c) You must cause the whole of the work to be licensed at no - charge to all third parties under the terms of this License. - - d) If a facility in the modified Library refers to a function or a - table of data to be supplied by an application program that uses - the facility, other than as an argument passed when the facility - is invoked, then you must make a good faith effort to ensure that, - in the event an application does not supply such function or - table, the facility still operates, and performs whatever part of - its purpose remains meaningful. - - (For example, a function in a library to compute square roots has - a purpose that is entirely well-defined independent of the - application. Therefore, Subsection 2d requires that any - application-supplied function or table used by this function must - be optional: if the application does not supply it, the square - root function must still compute square roots.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Library, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Library, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote -it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Library. - -In addition, mere aggregation of another work not based on the Library -with the Library (or with a work based on the Library) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may opt to apply the terms of the ordinary GNU General Public -License instead of this License to a given copy of the Library. To do -this, you must alter all the notices that refer to this License, so -that they refer to the ordinary GNU General Public License, version 2, -instead of to this License. (If a newer version than version 2 of the -ordinary GNU General Public License has appeared, then you can specify -that version instead if you wish.) Do not make any other change in -these notices. - - Once this change is made in a given copy, it is irreversible for -that copy, so the ordinary GNU General Public License applies to all -subsequent copies and derivative works made from that copy. - - This option is useful when you wish to copy part of the code of -the Library into a program that is not a library. - - 4. You may copy and distribute the Library (or a portion or -derivative of it, under Section 2) in object code or executable form -under the terms of Sections 1 and 2 above provided that you accompany -it with the complete corresponding machine-readable source code, which -must be distributed under the terms of Sections 1 and 2 above on a -medium customarily used for software interchange. - - If distribution of object code is made by offering access to copy -from a designated place, then offering equivalent access to copy the -source code from the same place satisfies the requirement to -distribute the source code, even though third parties are not -compelled to copy the source along with the object code. - - 5. A program that contains no derivative of any portion of the -Library, but is designed to work with the Library by being compiled or -linked with it, is called a "work that uses the Library". Such a -work, in isolation, is not a derivative work of the Library, and -therefore falls outside the scope of this License. - - However, linking a "work that uses the Library" with the Library -creates an executable that is a derivative of the Library (because it -contains portions of the Library), rather than a "work that uses the -library". The executable is therefore covered by this License. -Section 6 states terms for distribution of such executables. - - When a "work that uses the Library" uses material from a header file -that is part of the Library, the object code for the work may be a -derivative work of the Library even though the source code is not. -Whether this is true is especially significant if the work can be -linked without the Library, or if the work is itself a library. The -threshold for this to be true is not precisely defined by law. - - If such an object file uses only numerical parameters, data -structure layouts and accessors, and small macros and small inline -functions (ten lines or less in length), then the use of the object -file is unrestricted, regardless of whether it is legally a derivative -work. (Executables containing this object code plus portions of the -Library will still fall under Section 6.) - - Otherwise, if the work is a derivative of the Library, you may -distribute the object code for the work under the terms of Section 6. -Any executables containing that work also fall under Section 6, -whether or not they are linked directly with the Library itself. - - 6. As an exception to the Sections above, you may also combine or -link a "work that uses the Library" with the Library to produce a -work containing portions of the Library, and distribute that work -under terms of your choice, provided that the terms permit -modification of the work for the customer's own use and reverse -engineering for debugging such modifications. - - You must give prominent notice with each copy of the work that the -Library is used in it and that the Library and its use are covered by -this License. You must supply a copy of this License. If the work -during execution displays copyright notices, you must include the -copyright notice for the Library among them, as well as a reference -directing the user to the copy of this License. Also, you must do one -of these things: - - a) Accompany the work with the complete corresponding - machine-readable source code for the Library including whatever - changes were used in the work (which must be distributed under - Sections 1 and 2 above); and, if the work is an executable linked - with the Library, with the complete machine-readable "work that - uses the Library", as object code and/or source code, so that the - user can modify the Library and then relink to produce a modified - executable containing the modified Library. (It is understood - that the user who changes the contents of definitions files in the - Library will not necessarily be able to recompile the application - to use the modified definitions.) - - b) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (1) uses at run time a - copy of the library already present on the user's computer system, - rather than copying library functions into the executable, and (2) - will operate properly with a modified version of the library, if - the user installs one, as long as the modified version is - interface-compatible with the version that the work was made with. - - c) Accompany the work with a written offer, valid for at - least three years, to give the same user the materials - specified in Subsection 6a, above, for a charge no more - than the cost of performing this distribution. - - d) If distribution of the work is made by offering access to copy - from a designated place, offer equivalent access to copy the above - specified materials from the same place. - - e) Verify that the user has already received a copy of these - materials or that you have already sent this user a copy. - - For an executable, the required form of the "work that uses the -Library" must include any data and utility programs needed for -reproducing the executable from it. However, as a special exception, -the materials to be distributed need not include anything that is -normally distributed (in either source or binary form) with the major -components (compiler, kernel, and so on) of the operating system on -which the executable runs, unless that component itself accompanies -the executable. - - It may happen that this requirement contradicts the license -restrictions of other proprietary libraries that do not normally -accompany the operating system. Such a contradiction means you cannot -use both them and the Library together in an executable that you -distribute. - - 7. You may place library facilities that are a work based on the -Library side-by-side in a single library together with other library -facilities not covered by this License, and distribute such a combined -library, provided that the separate distribution of the work based on -the Library and of the other library facilities is otherwise -permitted, and provided that you do these two things: - - a) Accompany the combined library with a copy of the same work - based on the Library, uncombined with any other library - facilities. This must be distributed under the terms of the - Sections above. - - b) Give prominent notice with the combined library of the fact - that part of it is a work based on the Library, and explaining - where to find the accompanying uncombined form of the same work. - - 8. You may not copy, modify, sublicense, link with, or distribute -the Library except as expressly provided under this License. Any -attempt otherwise to copy, modify, sublicense, link with, or -distribute the Library is void, and will automatically terminate your -rights under this License. However, parties who have received copies, -or rights, from you under this License will not have their licenses -terminated so long as such parties remain in full compliance. - - 9. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Library or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Library (or any work based on the -Library), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Library or works based on it. - - 10. Each time you redistribute the Library (or any work based on the -Library), the recipient automatically receives a license from the -original licensor to copy, distribute, link with or modify the Library -subject to these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties with -this License. - - 11. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Library at all. For example, if a patent -license would not permit royalty-free redistribution of the Library by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Library. - -If any portion of this section is held invalid or unenforceable under any -particular circumstance, the balance of the section is intended to apply, -and the section as a whole is intended to apply in other circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 12. If the distribution and/or use of the Library is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Library under this License may add -an explicit geographical distribution limitation excluding those countries, -so that distribution is permitted only in or among countries not thus -excluded. In such case, this License incorporates the limitation as if -written in the body of this License. - - 13. The Free Software Foundation may publish revised and/or new -versions of the Lesser General Public License from time to time. -Such new versions will be similar in spirit to the present version, -but may differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Library -specifies a version number of this License which applies to it and -"any later version", you have the option of following the terms and -conditions either of that version or of any later version published by -the Free Software Foundation. If the Library does not specify a -license version number, you may choose any version ever published by -the Free Software Foundation. - - 14. If you wish to incorporate parts of the Library into other free -programs whose distribution conditions are incompatible with these, -write to the author to ask for permission. For software which is -copyrighted by the Free Software Foundation, write to the Free -Software Foundation; we sometimes make exceptions for this. Our -decision will be guided by the two goals of preserving the free status -of all derivatives of our free software and of promoting the sharing -and reuse of software generally. - - NO WARRANTY - - 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO -WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. -EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY -KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE -LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME -THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN -WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY -AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU -FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR -CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE -LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING -RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A -FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF -SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Libraries - - If you develop a new library, and you want it to be of the greatest -possible use to the public, we recommend making it free software that -everyone can redistribute and change. You can do so by permitting -redistribution under these terms (or, alternatively, under the terms of the -ordinary General Public License). - - To apply these terms, attach the following notices to the library. It is -safest to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least the -"copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -Also add information on how to contact you by electronic and paper mail. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the library, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - library `Frob' (a library for tweaking knobs) written by James Random Hacker. - - , 1 April 1990 - Ty Coon, President of Vice - -That's all there is to it! - diff --git a/lib/adodbapi/process_connect_string.py b/lib/adodbapi/process_connect_string.py deleted file mode 100644 index fa34d23a..00000000 --- a/lib/adodbapi/process_connect_string.py +++ /dev/null @@ -1,144 +0,0 @@ -""" a clumsy attempt at a macro language to let the programmer execute code on the server (ex: determine 64bit)""" -from . import is64bit as is64bit - - -def macro_call(macro_name, args, kwargs): - """allow the programmer to perform limited processing on the server by passing macro names and args - - :new_key - the key name the macro will create - :args[0] - macro name - :args[1:] - any arguments - :code - the value of the keyword item - :kwargs - the connection keyword dictionary. ??key has been removed - --> the value to put in for kwargs['name'] = value - """ - if isinstance(args, (str, str)): - args = [ - args - ] # the user forgot to pass a sequence, so make a string into args[0] - new_key = args[0] - try: - if macro_name == "is64bit": - if is64bit.Python(): # if on 64 bit Python - return new_key, args[1] # return first argument - else: - try: - return new_key, args[2] # else return second argument (if defined) - except IndexError: - return new_key, "" # else return blank - - elif ( - macro_name == "getuser" - ): # get the name of the user the server is logged in under - if not new_key in kwargs: - import getpass - - return new_key, getpass.getuser() - - elif macro_name == "getnode": # get the name of the computer running the server - import platform - - try: - return new_key, args[1] % platform.node() - except IndexError: - return new_key, platform.node() - - elif macro_name == "getenv": # expand the server's environment variable args[1] - try: - dflt = args[2] # if not found, default from args[2] - except IndexError: # or blank - dflt = "" - return new_key, os.environ.get(args[1], dflt) - - elif macro_name == "auto_security": - if ( - not "user" in kwargs or not kwargs["user"] - ): # missing, blank, or Null username - return new_key, "Integrated Security=SSPI" - return new_key, "User ID=%(user)s; Password=%(password)s" % kwargs - - elif ( - macro_name == "find_temp_test_path" - ): # helper function for testing ado operation -- undocumented - import os - import tempfile - - return new_key, os.path.join( - tempfile.gettempdir(), "adodbapi_test", args[1] - ) - - raise ValueError("Unknown connect string macro=%s" % macro_name) - except: - raise ValueError("Error in macro processing %s %s" % (macro_name, repr(args))) - - -def process( - args, kwargs, expand_macros=False -): # --> connection string with keyword arguments processed. - """attempts to inject arguments into a connection string using Python "%" operator for strings - - co: adodbapi connection object - args: positional parameters from the .connect() call - kvargs: keyword arguments from the .connect() call - """ - try: - dsn = args[0] - except IndexError: - dsn = None - if isinstance( - dsn, dict - ): # as a convenience the first argument may be django settings - kwargs.update(dsn) - elif ( - dsn - ): # the connection string is passed to the connection as part of the keyword dictionary - kwargs["connection_string"] = dsn - try: - a1 = args[1] - except IndexError: - a1 = None - # historically, the second positional argument might be a timeout value - if isinstance(a1, int): - kwargs["timeout"] = a1 - # if the second positional argument is a string, then it is user - elif isinstance(a1, str): - kwargs["user"] = a1 - # if the second positional argument is a dictionary, use it as keyword arguments, too - elif isinstance(a1, dict): - kwargs.update(a1) - try: - kwargs["password"] = args[2] # the third positional argument is password - kwargs["host"] = args[3] # the fourth positional argument is host name - kwargs["database"] = args[4] # the fifth positional argument is database name - except IndexError: - pass - - # make sure connection string is defined somehow - if not "connection_string" in kwargs: - try: # perhaps 'dsn' was defined - kwargs["connection_string"] = kwargs["dsn"] - except KeyError: - try: # as a last effort, use the "host" keyword - kwargs["connection_string"] = kwargs["host"] - except KeyError: - raise TypeError("Must define 'connection_string' for ado connections") - if expand_macros: - for kwarg in list(kwargs.keys()): - if kwarg.startswith("macro_"): # If a key defines a macro - macro_name = kwarg[6:] # name without the "macro_" - macro_code = kwargs.pop( - kwarg - ) # we remove the macro_key and get the code to execute - new_key, rslt = macro_call( - macro_name, macro_code, kwargs - ) # run the code in the local context - kwargs[new_key] = rslt # put the result back in the keywords dict - # special processing for PyRO IPv6 host address - try: - s = kwargs["proxy_host"] - if ":" in s: # it is an IPv6 address - if s[0] != "[": # is not surrounded by brackets - kwargs["proxy_host"] = s.join(("[", "]")) # put it in brackets - except KeyError: - pass - return kwargs diff --git a/lib/adodbapi/readme.txt b/lib/adodbapi/readme.txt deleted file mode 100644 index cf591905..00000000 --- a/lib/adodbapi/readme.txt +++ /dev/null @@ -1,92 +0,0 @@ -Project -------- -adodbapi - -A Python DB-API 2.0 (PEP-249) module that makes it easy to use Microsoft ADO -for connecting with databases and other data sources -using either CPython or IronPython. - -Home page: - -Features: -* 100% DB-API 2.0 (PEP-249) compliant (including most extensions and recommendations). -* Includes pyunit testcases that describe how to use the module. -* Fully implemented in Python. -- runs in Python 2.5+ Python 3.0+ and IronPython 2.6+ -* Licensed under the LGPL license, which means that it can be used freely even in commercial programs subject to certain restrictions. -* The user can choose between paramstyles: 'qmark' 'named' 'format' 'pyformat' 'dynamic' -* Supports data retrieval by column name e.g.: - for row in myCurser.execute("select name,age from students"): - print("Student", row.name, "is", row.age, "years old.") -* Supports user-definable system-to-Python data conversion functions (selected by ADO data type, or by column) - -Prerequisites: -* C Python 2.7 or 3.5 or higher - and pywin32 (Mark Hammond's python for windows extensions.) -or - Iron Python 2.7 or higher. (works in IPy2.0 for all data types except BUFFER) - -Installation: -* (C-Python on Windows): Install pywin32 ("pip install pywin32") which includes adodbapi. -* (IronPython on Windows): Download adodbapi from http://sf.net/projects/adodbapi. Unpack the zip. - Open a command window as an administrator. CD to the folder containing the unzipped files. - Run "setup.py install" using the IronPython of your choice. - -NOTE: ........... -If you do not like the new default operation of returning Numeric columns as decimal.Decimal, -you can select other options by the user defined conversion feature. -Try: - adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtString -or: - adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtFloat -or: - adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = write_your_own_convertion_function - ............ -notes for 2.6.2: - The definitive source has been moved to https://github.com/mhammond/pywin32/tree/master/adodbapi. - Remote has proven too hard to configure and test with Pyro4. I am moving it to unsupported status - until I can change to a different connection method. -whats new in version 2.6 - A cursor.prepare() method and support for prepared SQL statements. - Lots of refactoring, especially of the Remote and Server modules (still to be treated as Beta code). - The quick start document 'quick_reference.odt' will export as a nice-looking pdf. - Added paramstyles 'pyformat' and 'dynamic'. If your 'paramstyle' is 'named' you _must_ pass a dictionary of - parameters to your .execute() method. If your 'paramstyle' is 'format' 'pyformat' or 'dynamic', you _may_ - pass a dictionary of parameters -- provided your SQL operation string is formatted correctly. - -whats new in version 2.5 - Remote module: (works on Linux!) allows a Windows computer to serve ADO databases via PyRO - Server module: PyRO server for ADO. Run using a command like= C:>python -m adodbapi.server - (server has simple connection string macros: is64bit, getuser, sql_provider, auto_security) - Brief documentation included. See adodbapi/examples folder adodbapi.rtf - New connection method conn.get_table_names() --> list of names of tables in database - - Vastly refactored. Data conversion things have been moved to the new adodbapi.apibase module. - Many former module-level attributes are now class attributes. (Should be more thread-safe) - Connection objects are now context managers for transactions and will commit or rollback. - Cursor objects are context managers and will automatically close themselves. - Autocommit can be switched on and off. - Keyword and positional arguments on the connect() method work as documented in PEP 249. - Keyword arguments from the connect call can be formatted into the connection string. - New keyword arguments defined, such as: autocommit, paramstyle, remote_proxy, remote_port. - *** Breaking change: variantConversion lookups are simplified: the following will raise KeyError: - oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes] - Refactor as: oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes[0]] - -License -------- -LGPL, see http://www.opensource.org/licenses/lgpl-license.php - -Documentation -------------- - -Look at adodbapi/quick_reference.md -http://www.python.org/topics/database/DatabaseAPI-2.0.html -read the examples in adodbapi/examples -and look at the test cases in adodbapi/test directory. - -Mailing lists -------------- -The adodbapi mailing lists have been deactivated. Submit comments to the -pywin32 or IronPython mailing lists. - -- the bug tracker on sourceforge.net/projects/adodbapi may be checked, (infrequently). - -- please use: https://github.com/mhammond/pywin32/issues diff --git a/lib/adodbapi/remote.py b/lib/adodbapi/remote.py deleted file mode 100644 index ae22b5a7..00000000 --- a/lib/adodbapi/remote.py +++ /dev/null @@ -1,634 +0,0 @@ -"""adodbapi.remote - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole -* http://sourceforge.net/projects/pywin32 -* http://sourceforge.net/projects/adodbapi - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - django adaptations and refactoring thanks to Adam Vandenberg - -DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/ - -This module source should run correctly in CPython versions 2.5 and later, -or IronPython version 2.7 and later, -or, after running through 2to3.py, CPython 3.0 or later. -""" - -__version__ = "2.6.0.4" -version = "adodbapi.remote v" + __version__ - -import array -import datetime -import os -import sys -import time - -# Pyro4 is required for server and remote operation --> https://pypi.python.org/pypi/Pyro4/ -try: - import Pyro4 -except ImportError: - print('* * * Sorry, server operation requires Pyro4. Please "pip import" it.') - exit(11) - -import adodbapi -import adodbapi.apibase as api -import adodbapi.process_connect_string -from adodbapi.apibase import ProgrammingError - -_BaseException = api._BaseException - -sys.excepthook = Pyro4.util.excepthook -Pyro4.config.PREFER_IP_VERSION = 0 # allow system to prefer IPv6 -Pyro4.config.COMMTIMEOUT = 40.0 # a bit longer than the default SQL server Gtimeout -Pyro4.config.SERIALIZER = "pickle" - -try: - verbose = int(os.environ["ADODBAPI_VERBOSE"]) -except: - verbose = False -if verbose: - print(version) - -# --- define objects to smooth out Python3 <-> Python 2.x differences -unicodeType = str # this line will be altered by 2to3.py to '= str' -longType = int # this line will be altered by 2to3.py to '= int' -StringTypes = str -makeByteBuffer = bytes -memoryViewType = memoryview - -# ----------------------------------------------------------- -# conversion functions mandated by PEP 249 -Binary = makeByteBuffer # override the function from apibase.py - - -def Date(year, month, day): - return datetime.date(year, month, day) # dateconverter.Date(year,month,day) - - -def Time(hour, minute, second): - return datetime.time(hour, minute, second) # dateconverter.Time(hour,minute,second) - - -def Timestamp(year, month, day, hour, minute, second): - return datetime.datetime(year, month, day, hour, minute, second) - - -def DateFromTicks(ticks): - return Date(*time.gmtime(ticks)[:3]) - - -def TimeFromTicks(ticks): - return Time(*time.gmtime(ticks)[3:6]) - - -def TimestampFromTicks(ticks): - return Timestamp(*time.gmtime(ticks)[:6]) - - -def connect(*args, **kwargs): # --> a remote db-api connection object - """Create and open a remote db-api database connection object""" - # process the argument list the programmer gave us - kwargs = adodbapi.process_connect_string.process(args, kwargs) - # the "proxy_xxx" keys tell us where to find the PyRO proxy server - kwargs.setdefault( - "pyro_connection", "PYRO:ado.connection@%(proxy_host)s:%(proxy_port)s" - ) - if not "proxy_port" in kwargs: - try: - pport = os.environ["PROXY_PORT"] - except KeyError: - pport = 9099 - kwargs["proxy_port"] = pport - if not "proxy_host" in kwargs or not kwargs["proxy_host"]: - try: - phost = os.environ["PROXY_HOST"] - except KeyError: - phost = "[::1]" # '127.0.0.1' - kwargs["proxy_host"] = phost - ado_uri = kwargs["pyro_connection"] % kwargs - # ask PyRO make us a remote connection object - auto_retry = 3 - while auto_retry: - try: - dispatcher = Pyro4.Proxy(ado_uri) - if "comm_timeout" in kwargs: - dispatcher._pyroTimeout = float(kwargs["comm_timeout"]) - uri = dispatcher.make_connection() - break - except Pyro4.core.errors.PyroError: - auto_retry -= 1 - if auto_retry: - time.sleep(1) - else: - raise api.DatabaseError("Cannot create connection to=%s" % ado_uri) - - conn_uri = fix_uri(uri, kwargs) # get a host connection from the proxy server - while auto_retry: - try: - host_conn = Pyro4.Proxy( - conn_uri - ) # bring up an exclusive Pyro connection for my ADO connection - break - except Pyro4.core.errors.PyroError: - auto_retry -= 1 - if auto_retry: - time.sleep(1) - else: - raise api.DatabaseError( - "Cannot create ADO connection object using=%s" % conn_uri - ) - if "comm_timeout" in kwargs: - host_conn._pyroTimeout = float(kwargs["comm_timeout"]) - # make a local clone - myConn = Connection() - while auto_retry: - try: - myConn.connect( - kwargs, host_conn - ) # call my connect method -- hand him the host connection - break - except Pyro4.core.errors.PyroError: - auto_retry -= 1 - if auto_retry: - time.sleep(1) - else: - raise api.DatabaseError( - "Pyro error creating connection to/thru=%s" % repr(kwargs) - ) - except _BaseException as e: - raise api.DatabaseError( - "Error creating remote connection to=%s, e=%s, %s" - % (repr(kwargs), repr(e), sys.exc_info()[2]) - ) - return myConn - - -def fix_uri(uri, kwargs): - """convert a generic pyro uri with '0.0.0.0' into the address we actually called""" - u = uri.asString() - s = u.split("[::0]") # IPv6 generic address - if len(s) == 1: # did not find one - s = u.split("0.0.0.0") # IPv4 generic address - if len(s) > 1: # found a generic - return kwargs["proxy_host"].join(s) # fill in our address for the host - return uri - - -# # # # # ----- the Class that defines a connection ----- # # # # # -class Connection(object): - # include connection attributes required by api definition. - Warning = api.Warning - Error = api.Error - InterfaceError = api.InterfaceError - DataError = api.DataError - DatabaseError = api.DatabaseError - OperationalError = api.OperationalError - IntegrityError = api.IntegrityError - InternalError = api.InternalError - NotSupportedError = api.NotSupportedError - ProgrammingError = api.ProgrammingError - # set up some class attributes - paramstyle = api.paramstyle - - @property - def dbapi(self): # a proposed db-api version 3 extension. - "Return a reference to the DBAPI module for this Connection." - return api - - def __init__(self): - self.proxy = None - self.kwargs = {} - self.errorhandler = None - self.supportsTransactions = False - self.paramstyle = api.paramstyle - self.timeout = 30 - self.cursors = {} - - def connect(self, kwargs, connection_maker): - self.kwargs = kwargs - if verbose: - print('%s attempting: "%s"' % (version, repr(kwargs))) - self.proxy = connection_maker - ##try: - ret = self.proxy.connect(kwargs) # ask the server to hook us up - ##except ImportError, e: # Pyro is trying to import pywinTypes.comerrer - ## self._raiseConnectionError(api.DatabaseError, 'Proxy cannot connect using=%s' % repr(kwargs)) - if ret is not True: - self._raiseConnectionError( - api.OperationalError, "Proxy returns error message=%s" % repr(ret) - ) - - self.supportsTransactions = self.getIndexedValue("supportsTransactions") - self.paramstyle = self.getIndexedValue("paramstyle") - self.timeout = self.getIndexedValue("timeout") - if verbose: - print("adodbapi.remote New connection at %X" % id(self)) - - def _raiseConnectionError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self, None, errorclass, errorvalue) - - def close(self): - """Close the connection now (rather than whenever __del__ is called). - - The connection will be unusable from this point forward; - an Error (or subclass) exception will be raised if any operation is attempted with the connection. - The same applies to all cursor objects trying to use the connection. - """ - for crsr in list(self.cursors.values())[ - : - ]: # copy the list, then close each one - crsr.close() - try: - """close the underlying remote Connection object""" - self.proxy.close() - if verbose: - print("adodbapi.remote Closed connection at %X" % id(self)) - object.__delattr__( - self, "proxy" - ) # future attempts to use closed cursor will be caught by __getattr__ - except Exception: - pass - - def __del__(self): - try: - self.proxy.close() - except: - pass - - def commit(self): - """Commit any pending transaction to the database. - - Note that if the database supports an auto-commit feature, - this must be initially off. An interface method may be provided to turn it back on. - Database modules that do not support transactions should implement this method with void functionality. - """ - if not self.supportsTransactions: - return - result = self.proxy.commit() - if result: - self._raiseConnectionError( - api.OperationalError, "Error during commit: %s" % result - ) - - def _rollback(self): - """In case a database does provide transactions this method causes the the database to roll back to - the start of any pending transaction. Closing a connection without committing the changes first will - cause an implicit rollback to be performed. - """ - result = self.proxy.rollback() - if result: - self._raiseConnectionError( - api.OperationalError, "Error during rollback: %s" % result - ) - - def __setattr__(self, name, value): - if name in ("paramstyle", "timeout", "autocommit"): - if self.proxy: - self.proxy.send_attribute_to_host(name, value) - object.__setattr__(self, name, value) # store attribute locally (too) - - def __getattr__(self, item): - if ( - item == "rollback" - ): # the rollback method only appears if the database supports transactions - if self.supportsTransactions: - return ( - self._rollback - ) # return the rollback method so the caller can execute it. - else: - raise self.ProgrammingError( - "this data provider does not support Rollback" - ) - elif item in ( - "dbms_name", - "dbms_version", - "connection_string", - "autocommit", - ): # 'messages' ): - return self.getIndexedValue(item) - elif item == "proxy": - raise self.ProgrammingError("Attempting to use closed connection") - else: - raise self.ProgrammingError('No remote access for attribute="%s"' % item) - - def getIndexedValue(self, index): - r = self.proxy.get_attribute_for_remote(index) - return r - - def cursor(self): - "Return a new Cursor Object using the connection." - myCursor = Cursor(self) - return myCursor - - def _i_am_here(self, crsr): - "message from a new cursor proclaiming its existence" - self.cursors[crsr.id] = crsr - - def _i_am_closing(self, crsr): - "message from a cursor giving connection a chance to clean up" - try: - del self.cursors[crsr.id] - except: - pass - - def __enter__(self): # Connections are context managers - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type: - self._rollback() # automatic rollback on errors - else: - self.commit() - - def get_table_names(self): - return self.proxy.get_table_names() - - -def fixpickle(x): - """pickle barfs on buffer(x) so we pass as array.array(x) then restore to original form for .execute()""" - if x is None: - return None - if isinstance(x, dict): - # for 'named' paramstyle user will pass a mapping - newargs = {} - for arg, val in list(x.items()): - if isinstance(val, memoryViewType): - newval = array.array("B") - newval.fromstring(val) - newargs[arg] = newval - else: - newargs[arg] = val - return newargs - # if not a mapping, then a sequence - newargs = [] - for arg in x: - if isinstance(arg, memoryViewType): - newarg = array.array("B") - newarg.fromstring(arg) - newargs.append(newarg) - else: - newargs.append(arg) - return newargs - - -class Cursor(object): - def __init__(self, connection): - self.command = None - self.errorhandler = None ## was: connection.errorhandler - self.connection = connection - self.proxy = self.connection.proxy - self.rs = None # the fetchable data for this cursor - self.converters = NotImplemented - self.id = connection.proxy.build_cursor() - connection._i_am_here(self) - self.recordset_format = api.RS_REMOTE - if verbose: - print( - "%s New cursor at %X on conn %X" - % (version, id(self), id(self.connection)) - ) - - def prepare(self, operation): - self.command = operation - try: - del self.description - except AttributeError: - pass - self.proxy.crsr_prepare(self.id, operation) - - def __iter__(self): # [2.1 Zamarev] - return iter(self.fetchone, None) # [2.1 Zamarev] - - def __next__(self): - r = self.fetchone() - if r: - return r - raise StopIteration - - def __enter__(self): - "Allow database cursors to be used with context managers." - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - "Allow database cursors to be used with context managers." - self.close() - - def __getattr__(self, key): - if key == "numberOfColumns": - try: - return len(self.rs[0]) - except: - return 0 - if key == "description": - try: - self.description = self.proxy.crsr_get_description(self.id)[:] - return self.description - except TypeError: - return None - if key == "columnNames": - try: - r = dict( - self.proxy.crsr_get_columnNames(self.id) - ) # copy the remote columns - - except TypeError: - r = {} - self.columnNames = r - return r - - if key == "remote_cursor": - raise api.OperationalError - try: - return self.proxy.crsr_get_attribute_for_remote(self.id, key) - except AttributeError: - raise api.InternalError( - 'Failure getting attribute "%s" from proxy cursor.' % key - ) - - def __setattr__(self, key, value): - if key == "arraysize": - self.proxy.crsr_set_arraysize(self.id, value) - if key == "paramstyle": - if value in api.accepted_paramstyles: - self.proxy.crsr_set_paramstyle(self.id, value) - else: - self._raiseCursorError( - api.ProgrammingError, 'invalid paramstyle ="%s"' % value - ) - object.__setattr__(self, key, value) - - def _raiseCursorError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self.connection, self, errorclass, errorvalue) - - def execute(self, operation, parameters=None): - if self.connection is None: - self._raiseCursorError( - ProgrammingError, "Attempted operation on closed cursor" - ) - self.command = operation - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - fp = fixpickle(parameters) - if verbose > 2: - print( - ( - '%s executing "%s" with params=%s' - % (version, operation, repr(parameters)) - ) - ) - result = self.proxy.crsr_execute(self.id, operation, fp) - if result: # an exception was triggered - self._raiseCursorError(result[0], result[1]) - - def executemany(self, operation, seq_of_parameters): - if self.connection is None: - self._raiseCursorError( - ProgrammingError, "Attempted operation on closed cursor" - ) - self.command = operation - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - sq = [fixpickle(x) for x in seq_of_parameters] - if verbose > 2: - print( - ( - '%s executemany "%s" with params=%s' - % (version, operation, repr(seq_of_parameters)) - ) - ) - self.proxy.crsr_executemany(self.id, operation, sq) - - def nextset(self): - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - if verbose > 2: - print(("%s nextset" % version)) - return self.proxy.crsr_nextset(self.id) - - def callproc(self, procname, parameters=None): - if self.connection is None: - self._raiseCursorError( - ProgrammingError, "Attempted operation on closed cursor" - ) - self.command = procname - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - fp = fixpickle(parameters) - if verbose > 2: - print( - ( - '%s callproc "%s" with params=%s' - % (version, procname, repr(parameters)) - ) - ) - return self.proxy.crsr_callproc(self.id, procname, fp) - - def fetchone(self): - try: - f1 = self.proxy.crsr_fetchone(self.id) - except _BaseException as e: - self._raiseCursorError(api.DatabaseError, e) - else: - if f1 is None: - return None - self.rs = [f1] - return api.SQLrows(self.rs, 1, self)[ - 0 - ] # new object to hold the results of the fetch - - def fetchmany(self, size=None): - try: - self.rs = self.proxy.crsr_fetchmany(self.id, size) - if not self.rs: - return [] - r = api.SQLrows(self.rs, len(self.rs), self) - return r - except Exception as e: - self._raiseCursorError(api.DatabaseError, e) - - def fetchall(self): - try: - self.rs = self.proxy.crsr_fetchall(self.id) - if not self.rs: - return [] - return api.SQLrows(self.rs, len(self.rs), self) - except Exception as e: - self._raiseCursorError(api.DatabaseError, e) - - def close(self): - if self.connection is None: - return - self.connection._i_am_closing(self) # take me off the connection's cursors list - try: - self.proxy.crsr_close(self.id) - except: - pass - try: - del self.description - except: - pass - try: - del self.rs # let go of the recordset - except: - pass - self.connection = ( - None # this will make all future method calls on me throw an exception - ) - self.proxy = None - if verbose: - print("adodbapi.remote Closed cursor at %X" % id(self)) - - def __del__(self): - try: - self.close() - except: - pass - - def setinputsizes(self, sizes): - pass - - def setoutputsize(self, size, column=None): - pass diff --git a/lib/adodbapi/schema_table.py b/lib/adodbapi/schema_table.py deleted file mode 100644 index 8621830e..00000000 --- a/lib/adodbapi/schema_table.py +++ /dev/null @@ -1,15 +0,0 @@ -"""call using an open ADO connection --> list of table names""" -from . import adodbapi - - -def names(connection_object): - ado = connection_object.adoConn - schema = ado.OpenSchema(20) # constant = adSchemaTables - - tables = [] - while not schema.EOF: - name = adodbapi.getIndexedValue(schema.Fields, "TABLE_NAME").Value - tables.append(name) - schema.MoveNext() - del schema - return tables diff --git a/lib/adodbapi/setup.py b/lib/adodbapi/setup.py deleted file mode 100644 index d25869ad..00000000 --- a/lib/adodbapi/setup.py +++ /dev/null @@ -1,70 +0,0 @@ -"""adodbapi -- a pure Python PEP 249 DB-API package using Microsoft ADO - -Adodbapi can be run on CPython 3.5 and later. -or IronPython version 2.6 and later (in theory, possibly no longer in practice!) -""" -CLASSIFIERS = """\ -Development Status :: 5 - Production/Stable -Intended Audience :: Developers -License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) -Operating System :: Microsoft :: Windows -Operating System :: POSIX :: Linux -Programming Language :: Python -Programming Language :: Python :: 3 -Programming Language :: SQL -Topic :: Software Development -Topic :: Software Development :: Libraries :: Python Modules -Topic :: Database -""" - -NAME = "adodbapi" -MAINTAINER = "Vernon Cole" -MAINTAINER_EMAIL = "vernondcole@gmail.com" -DESCRIPTION = ( - """A pure Python package implementing PEP 249 DB-API using Microsoft ADO.""" -) -URL = "http://sourceforge.net/projects/adodbapi" -LICENSE = "LGPL" -CLASSIFIERS = filter(None, CLASSIFIERS.split("\n")) -AUTHOR = "Henrik Ekelund, Vernon Cole, et.al." -AUTHOR_EMAIL = "vernondcole@gmail.com" -PLATFORMS = ["Windows", "Linux"] - -VERSION = None # in case searching for version fails -a = open("adodbapi.py") # find the version string in the source code -for line in a: - if "__version__" in line: - VERSION = line.split("'")[1] - print('adodbapi version="%s"' % VERSION) - break -a.close() - - -def setup_package(): - from distutils.command.build_py import build_py - from distutils.core import setup - - setup( - cmdclass={"build_py": build_py}, - name=NAME, - maintainer=MAINTAINER, - maintainer_email=MAINTAINER_EMAIL, - description=DESCRIPTION, - url=URL, - keywords="database ado odbc dbapi db-api Microsoft SQL", - ## download_url=DOWNLOAD_URL, - long_description=open("README.txt").read(), - license=LICENSE, - classifiers=CLASSIFIERS, - author=AUTHOR, - author_email=AUTHOR_EMAIL, - platforms=PLATFORMS, - version=VERSION, - package_dir={"adodbapi": ""}, - packages=["adodbapi"], - ) - return - - -if __name__ == "__main__": - setup_package() diff --git a/lib/adodbapi/test/adodbapitest.py b/lib/adodbapi/test/adodbapitest.py deleted file mode 100644 index e5b3dc19..00000000 --- a/lib/adodbapi/test/adodbapitest.py +++ /dev/null @@ -1,1692 +0,0 @@ -""" Unit tests version 2.6.1.0 for adodbapi""" -""" - adodbapi - A python DB API 2.0 interface to Microsoft ADO - - Copyright (C) 2002 Henrik Ekelund - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Updates by Vernon Cole -""" - -import copy -import datetime -import decimal -import random -import string -import sys -import unittest - -try: - import win32com.client - - win32 = True -except ImportError: - win32 = False - -# run the configuration module. -import adodbapitestconfig as config # will set sys.path to find correct version of adodbapi - -# in our code below, all our switches are from config.whatever -import tryconnection - -import adodbapi -import adodbapi.apibase as api - -try: - import adodbapi.ado_consts as ado_consts -except ImportError: # we are doing a shortcut import as a module -- so - try: - import ado_consts - except ImportError: - from adodbapi import ado_consts - - -def str2bytes(sval): - return sval.encode("latin1") - - -long = int - - -def randomstring(length): - return "".join([random.choice(string.ascii_letters) for n in range(32)]) - - -class CommonDBTests(unittest.TestCase): - "Self contained super-simple tests in easy syntax, should work on everything between mySQL and Oracle" - - def setUp(self): - self.engine = "unknown" - - def getEngine(self): - return self.engine - - def getConnection(self): - raise NotImplementedError # "This method must be overriden by a subclass" - - def getCursor(self): - return self.getConnection().cursor() - - def testConnection(self): - crsr = self.getCursor() - assert crsr.__class__.__name__ == "Cursor" - - def testErrorHandlerInherits(self): - if not self.remote: - conn = self.getConnection() - mycallable = lambda connection, cursor, errorclass, errorvalue: 1 - conn.errorhandler = mycallable - crsr = conn.cursor() - assert ( - crsr.errorhandler == mycallable - ), "Error handler on crsr should be same as on connection" - - def testDefaultErrorHandlerConnection(self): - if not self.remote: - conn = self.getConnection() - del conn.messages[:] - try: - conn.close() - conn.commit() # Should not be able to use connection after it is closed - except: - assert len(conn.messages) == 1 - assert len(conn.messages[0]) == 2 - assert conn.messages[0][0] == api.ProgrammingError - - def testOwnErrorHandlerConnection(self): - if self.remote: # ToDo: use "skip" - return - mycallable = ( - lambda connection, cursor, errorclass, errorvalue: 1 - ) # does not raise anything - conn = self.getConnection() - conn.errorhandler = mycallable - conn.close() - conn.commit() # Should not be able to use connection after it is closed - assert len(conn.messages) == 0 - - conn.errorhandler = None # This should bring back the standard error handler - try: - conn.close() - conn.commit() # Should not be able to use connection after it is closed - except: - pass - # The Standard errorhandler appends error to messages attribute - assert ( - len(conn.messages) > 0 - ), "Setting errorhandler to none should bring back the standard error handler" - - def testDefaultErrorHandlerCursor(self): - crsr = self.getConnection().cursor() - if not self.remote: - del crsr.messages[:] - try: - crsr.execute("SELECT abbtytddrf FROM dasdasd") - except: - assert len(crsr.messages) == 1 - assert len(crsr.messages[0]) == 2 - assert crsr.messages[0][0] == api.DatabaseError - - def testOwnErrorHandlerCursor(self): - if self.remote: # ToDo: should be a "skip" - return - mycallable = ( - lambda connection, cursor, errorclass, errorvalue: 1 - ) # does not raise anything - crsr = self.getConnection().cursor() - crsr.errorhandler = mycallable - crsr.execute("SELECT abbtytddrf FROM dasdasd") - assert len(crsr.messages) == 0 - - crsr.errorhandler = None # This should bring back the standard error handler - try: - crsr.execute("SELECT abbtytddrf FROM dasdasd") - except: - pass - # The Standard errorhandler appends error to messages attribute - assert ( - len(crsr.messages) > 0 - ), "Setting errorhandler to none should bring back the standard error handler" - - def testUserDefinedConversions(self): - if self.remote: ## Todo: should be a "skip" - return - try: - duplicatingConverter = lambda aStringField: aStringField * 2 - assert duplicatingConverter("gabba") == "gabbagabba" - - self.helpForceDropOnTblTemp() - conn = self.getConnection() - # the variantConversions attribute should not exist on a normal connection object - self.assertRaises(AttributeError, lambda x: conn.variantConversions[x], [2]) - if not self.remote: - # create a variantConversions attribute on the connection - conn.variantConversions = copy.copy(api.variantConversions) - crsr = conn.cursor() - tabdef = ( - "CREATE TABLE xx_%s (fldData VARCHAR(100) NOT NULL, fld2 VARCHAR(20))" - % config.tmp - ) - crsr.execute(tabdef) - crsr.execute( - "INSERT INTO xx_%s(fldData,fld2) VALUES('gabba','booga')" - % config.tmp - ) - crsr.execute( - "INSERT INTO xx_%s(fldData,fld2) VALUES('hey','yo')" % config.tmp - ) - # change converter for ALL adoStringTypes columns - conn.variantConversions[api.adoStringTypes] = duplicatingConverter - crsr.execute( - "SELECT fldData,fld2 FROM xx_%s ORDER BY fldData" % config.tmp - ) - - rows = crsr.fetchall() - row = rows[0] - self.assertEqual(row[0], "gabbagabba") - row = rows[1] - self.assertEqual(row[0], "heyhey") - self.assertEqual(row[1], "yoyo") - - upcaseConverter = lambda aStringField: aStringField.upper() - assert upcaseConverter("upThis") == "UPTHIS" - - # now use a single column converter - rows.converters[1] = upcaseConverter # convert second column - self.assertEqual(row[0], "heyhey") # first will be unchanged - self.assertEqual(row[1], "YO") # second will convert to upper case - - finally: - try: - del conn.variantConversions # Restore the default - except: - pass - self.helpRollbackTblTemp() - - def testUserDefinedConversionForExactNumericTypes(self): - # variantConversions is a dictionary of conversion functions - # held internally in adodbapi.apibase - # - # !!! this test intentionally alters the value of what should be constant in the module - # !!! no new code should use this example, to is only a test to see that the - # !!! deprecated way of doing this still works. (use connection.variantConversions) - # - if not self.remote and sys.version_info < (3, 0): ### Py3 need different test - oldconverter = adodbapi.variantConversions[ - ado_consts.adNumeric - ] # keep old function to restore later - # By default decimal and "numbers" are returned as decimals. - # Instead, make numbers return as floats - try: - adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtFloat - self.helpTestDataType( - "decimal(18,2)", "NUMBER", 3.45, compareAlmostEqual=1 - ) - self.helpTestDataType( - "numeric(18,2)", "NUMBER", 3.45, compareAlmostEqual=1 - ) - # now return strings - adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtString - self.helpTestDataType("numeric(18,2)", "NUMBER", "3.45") - # now a completly weird user defined convertion - adodbapi.variantConversions[ado_consts.adNumeric] = ( - lambda x: "!!This function returns a funny unicode string %s!!" % x - ) - self.helpTestDataType( - "numeric(18,2)", - "NUMBER", - "3.45", - allowedReturnValues=[ - "!!This function returns a funny unicode string 3.45!!" - ], - ) - finally: - # now reset the converter to its original function - adodbapi.variantConversions[ - ado_consts.adNumeric - ] = oldconverter # Restore the original convertion function - - def helpTestDataType( - self, - sqlDataTypeString, - DBAPIDataTypeString, - pyData, - pyDataInputAlternatives=None, - compareAlmostEqual=None, - allowedReturnValues=None, - ): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData """ - % config.tmp - + sqlDataTypeString - + ")\n" - ) - - crsr.execute(tabdef) - - # Test Null values mapped to None - crsr.execute("INSERT INTO xx_%s (fldId) VALUES (1)" % config.tmp) - - crsr.execute("SELECT fldId,fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchone() - self.assertEqual(rs[1], None) # Null should be mapped to None - assert rs[0] == 1 - - # Test description related - descTuple = crsr.description[1] - assert descTuple[0] in ["fldData", "flddata"], 'was "%s" expected "%s"' % ( - descTuple[0], - "fldData", - ) - - if DBAPIDataTypeString == "STRING": - assert descTuple[1] == api.STRING, 'was "%s" expected "%s"' % ( - descTuple[1], - api.STRING.values, - ) - elif DBAPIDataTypeString == "NUMBER": - assert descTuple[1] == api.NUMBER, 'was "%s" expected "%s"' % ( - descTuple[1], - api.NUMBER.values, - ) - elif DBAPIDataTypeString == "BINARY": - assert descTuple[1] == api.BINARY, 'was "%s" expected "%s"' % ( - descTuple[1], - api.BINARY.values, - ) - elif DBAPIDataTypeString == "DATETIME": - assert descTuple[1] == api.DATETIME, 'was "%s" expected "%s"' % ( - descTuple[1], - api.DATETIME.values, - ) - elif DBAPIDataTypeString == "ROWID": - assert descTuple[1] == api.ROWID, 'was "%s" expected "%s"' % ( - descTuple[1], - api.ROWID.values, - ) - elif DBAPIDataTypeString == "UUID": - assert descTuple[1] == api.OTHER, 'was "%s" expected "%s"' % ( - descTuple[1], - api.OTHER.values, - ) - else: - raise NotImplementedError # "DBAPIDataTypeString not provided" - - # Test data binding - inputs = [pyData] - if pyDataInputAlternatives: - inputs.extend(pyDataInputAlternatives) - inputs = set(inputs) # removes redundant string==unicode tests - fldId = 1 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (?,?)" % config.tmp, - (fldId, inParam), - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE ?=fldID" % config.tmp, [fldId] - ) - rs = crsr.fetchone() - if allowedReturnValues: - allowedTypes = tuple([type(aRV) for aRV in allowedReturnValues]) - assert isinstance( - rs[0], allowedTypes - ), 'result type "%s" must be one of %s' % (type(rs[0]), allowedTypes) - else: - assert isinstance( - rs[0], type(pyData) - ), 'result type "%s" must be instance of %s' % ( - type(rs[0]), - type(pyData), - ) - - if compareAlmostEqual and DBAPIDataTypeString == "DATETIME": - iso1 = adodbapi.dateconverter.DateObjectToIsoFormatString(rs[0]) - iso2 = adodbapi.dateconverter.DateObjectToIsoFormatString(pyData) - self.assertEqual(iso1, iso2) - elif compareAlmostEqual: - s = float(pyData) - v = float(rs[0]) - assert ( - abs(v - s) / s < 0.00001 - ), "Values not almost equal recvd=%s, expected=%f" % (rs[0], s) - else: - if allowedReturnValues: - ok = False - self.assertTrue( - rs[0] in allowedReturnValues, - 'Value "%s" not in %s' % (repr(rs[0]), allowedReturnValues), - ) - else: - self.assertEqual( - rs[0], - pyData, - 'Values are not equal recvd="%s", expected="%s"' - % (rs[0], pyData), - ) - - def testDataTypeFloat(self): - self.helpTestDataType("real", "NUMBER", 3.45, compareAlmostEqual=True) - self.helpTestDataType("float", "NUMBER", 1.79e37, compareAlmostEqual=True) - - def testDataTypeDecmal(self): - self.helpTestDataType( - "decimal(18,2)", - "NUMBER", - 3.45, - allowedReturnValues=["3.45", "3,45", decimal.Decimal("3.45")], - ) - self.helpTestDataType( - "numeric(18,2)", - "NUMBER", - 3.45, - allowedReturnValues=["3.45", "3,45", decimal.Decimal("3.45")], - ) - self.helpTestDataType( - "decimal(20,2)", - "NUMBER", - 444444444444444444, - allowedReturnValues=[ - "444444444444444444.00", - "444444444444444444,00", - decimal.Decimal("444444444444444444"), - ], - ) - if self.getEngine() == "MSSQL": - self.helpTestDataType( - "uniqueidentifier", - "UUID", - "{71A4F49E-39F3-42B1-A41E-48FF154996E6}", - allowedReturnValues=["{71A4F49E-39F3-42B1-A41E-48FF154996E6}"], - ) - - def testDataTypeMoney(self): # v2.1 Cole -- use decimal for money - if self.getEngine() == "MySQL": - self.helpTestDataType( - "DECIMAL(20,4)", "NUMBER", decimal.Decimal("-922337203685477.5808") - ) - elif self.getEngine() == "PostgreSQL": - self.helpTestDataType( - "money", - "NUMBER", - decimal.Decimal("-922337203685477.5808"), - compareAlmostEqual=True, - allowedReturnValues=[ - -922337203685477.5808, - decimal.Decimal("-922337203685477.5808"), - ], - ) - else: - self.helpTestDataType("smallmoney", "NUMBER", decimal.Decimal("214748.02")) - self.helpTestDataType( - "money", "NUMBER", decimal.Decimal("-922337203685477.5808") - ) - - def testDataTypeInt(self): - if self.getEngine() != "PostgreSQL": - self.helpTestDataType("tinyint", "NUMBER", 115) - self.helpTestDataType("smallint", "NUMBER", -32768) - if self.getEngine() not in ["ACCESS", "PostgreSQL"]: - self.helpTestDataType( - "bit", "NUMBER", 1 - ) # Does not work correctly with access - if self.getEngine() in ["MSSQL", "PostgreSQL"]: - self.helpTestDataType( - "bigint", - "NUMBER", - 3000000000, - allowedReturnValues=[3000000000, int(3000000000)], - ) - self.helpTestDataType("int", "NUMBER", 2147483647) - - def testDataTypeChar(self): - for sqlDataType in ("char(6)", "nchar(6)"): - self.helpTestDataType( - sqlDataType, - "STRING", - "spam ", - allowedReturnValues=["spam", "spam", "spam ", "spam "], - ) - - def testDataTypeVarChar(self): - if self.getEngine() == "MySQL": - stringKinds = ["varchar(10)", "text"] - elif self.getEngine() == "PostgreSQL": - stringKinds = ["varchar(10)", "text", "character varying"] - else: - stringKinds = [ - "varchar(10)", - "nvarchar(10)", - "text", - "ntext", - ] # ,"varchar(max)"] - - for sqlDataType in stringKinds: - self.helpTestDataType(sqlDataType, "STRING", "spam", ["spam"]) - - def testDataTypeDate(self): - if self.getEngine() == "PostgreSQL": - dt = "timestamp" - else: - dt = "datetime" - self.helpTestDataType( - dt, "DATETIME", adodbapi.Date(2002, 10, 28), compareAlmostEqual=True - ) - if self.getEngine() not in ["MySQL", "PostgreSQL"]: - self.helpTestDataType( - "smalldatetime", - "DATETIME", - adodbapi.Date(2002, 10, 28), - compareAlmostEqual=True, - ) - if tag != "pythontime" and self.getEngine() not in [ - "MySQL", - "PostgreSQL", - ]: # fails when using pythonTime - self.helpTestDataType( - dt, - "DATETIME", - adodbapi.Timestamp(2002, 10, 28, 12, 15, 1), - compareAlmostEqual=True, - ) - - def testDataTypeBinary(self): - binfld = str2bytes("\x07\x00\xE2\x40*") - arv = [binfld, adodbapi.Binary(binfld), bytes(binfld)] - if self.getEngine() == "PostgreSQL": - self.helpTestDataType( - "bytea", "BINARY", adodbapi.Binary(binfld), allowedReturnValues=arv - ) - else: - self.helpTestDataType( - "binary(5)", "BINARY", adodbapi.Binary(binfld), allowedReturnValues=arv - ) - self.helpTestDataType( - "varbinary(100)", - "BINARY", - adodbapi.Binary(binfld), - allowedReturnValues=arv, - ) - if self.getEngine() != "MySQL": - self.helpTestDataType( - "image", "BINARY", adodbapi.Binary(binfld), allowedReturnValues=arv - ) - - def helpRollbackTblTemp(self): - self.helpForceDropOnTblTemp() - - def helpForceDropOnTblTemp(self): - conn = self.getConnection() - with conn.cursor() as crsr: - try: - crsr.execute("DROP TABLE xx_%s" % config.tmp) - if not conn.autocommit: - conn.commit() - except: - pass - - def helpCreateAndPopulateTableTemp(self, crsr): - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldData INTEGER - ) - """ - % config.tmp - ) - try: # EAFP - crsr.execute(tabdef) - except api.DatabaseError: # was not dropped before - self.helpForceDropOnTblTemp() # so drop it now - crsr.execute(tabdef) - for i in range(9): # note: this poor SQL code, but a valid test - crsr.execute("INSERT INTO xx_%s (fldData) VALUES (%i)" % (config.tmp, i)) - # NOTE: building the test table without using parameter substitution - - def testFetchAll(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchall() - assert len(rs) == 9 - # test slice of rows - i = 3 - for row in rs[3:-2]: # should have rowid 3..6 - assert row[0] == i - i += 1 - self.helpRollbackTblTemp() - - def testPreparedStatement(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.prepare("SELECT fldData FROM xx_%s" % config.tmp) - crsr.execute(crsr.command) # remember the one that was prepared - rs = crsr.fetchall() - assert len(rs) == 9 - assert rs[2][0] == 2 - self.helpRollbackTblTemp() - - def testWrongPreparedStatement(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.prepare("SELECT * FROM nowhere") - crsr.execute( - "SELECT fldData FROM xx_%s" % config.tmp - ) # should execute this one, not the prepared one - rs = crsr.fetchall() - assert len(rs) == 9 - assert rs[2][0] == 2 - self.helpRollbackTblTemp() - - def testIterator(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - for i, row in enumerate( - crsr - ): # using cursor as an iterator, rather than fetchxxx - assert row[0] == i - self.helpRollbackTblTemp() - - def testExecuteMany(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - seq_of_values = [(111,), (222,)] - crsr.executemany( - "INSERT INTO xx_%s (fldData) VALUES (?)" % config.tmp, seq_of_values - ) - if crsr.rowcount == -1: - print( - self.getEngine() - + " Provider does not support rowcount (on .executemany())" - ) - else: - self.assertEqual(crsr.rowcount, 2) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchall() - assert len(rs) == 11 - self.helpRollbackTblTemp() - - def testRowCount(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - if crsr.rowcount == -1: - # print("provider does not support rowcount on select") - pass - else: - self.assertEqual(crsr.rowcount, 9) - self.helpRollbackTblTemp() - - def testRowCountNoRecordset(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("DELETE FROM xx_%s WHERE fldData >= 5" % config.tmp) - if crsr.rowcount == -1: - print(self.getEngine() + " Provider does not support rowcount (on DELETE)") - else: - self.assertEqual(crsr.rowcount, 4) - self.helpRollbackTblTemp() - - def testFetchMany(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchmany(3) - assert len(rs) == 3 - rs = crsr.fetchmany(5) - assert len(rs) == 5 - rs = crsr.fetchmany(5) - assert len(rs) == 1 # Asked for five, but there is only one left - self.helpRollbackTblTemp() - - def testFetchManyWithArraySize(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchmany() - assert len(rs) == 1 # arraysize Defaults to one - crsr.arraysize = 4 - rs = crsr.fetchmany() - assert len(rs) == 4 - rs = crsr.fetchmany() - assert len(rs) == 4 - rs = crsr.fetchmany() - assert len(rs) == 0 - self.helpRollbackTblTemp() - - def testErrorConnect(self): - conn = self.getConnection() - kw = {} - if "proxy_host" in conn.kwargs: - kw["proxy_host"] = conn.kwargs["proxy_host"] - conn.close() - self.assertRaises(api.DatabaseError, self.db, "not a valid connect string", kw) - - def testRowIterator(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldTwo integer, - fldThree integer, - fldFour integer) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = [(2, 3, 4), (102, 103, 104)] - fldId = 1 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldTwo,fldThree,fldFour) VALUES (?,?,?,?)" - % config.tmp, - (fldId, inParam[0], inParam[1], inParam[2]), - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldTwo,fldThree,fldFour FROM xx_%s WHERE ?=fldID" % config.tmp, - [fldId], - ) - rec = crsr.fetchone() - # check that stepping through an emulated row works - for j in range(len(inParam)): - assert ( - rec[j] == inParam[j] - ), 'returned value:"%s" != test value:"%s"' % (rec[j], inParam[j]) - # check that we can get a complete tuple from a row - assert tuple(rec) == inParam, 'returned value:"%s" != test value:"%s"' % ( - repr(rec), - repr(inParam), - ) - # test that slices of rows work - slice1 = tuple(rec[:-1]) - slice2 = tuple(inParam[0:2]) - assert slice1 == slice2, 'returned value:"%s" != test value:"%s"' % ( - repr(slice1), - repr(slice2), - ) - # now test named column retrieval - assert rec["fldTwo"] == inParam[0] - assert rec.fldThree == inParam[1] - assert rec.fldFour == inParam[2] - # test array operation - # note that the fields vv vv vv are out of order - crsr.execute("select fldThree,fldFour,fldTwo from xx_%s" % config.tmp) - recs = crsr.fetchall() - assert recs[1][0] == 103 - assert recs[0][1] == 4 - assert recs[1]["fldFour"] == 104 - assert recs[0, 0] == 3 - assert recs[0, "fldTwo"] == 2 - assert recs[1, 2] == 102 - for i in range(1): - for j in range(2): - assert recs[i][j] == recs[i, j] - - def testFormatParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - conn.paramstyle = "format" # test nonstandard use of paramstyle - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10), - fldConst varchar(30)) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = ["one", "two", "three"] - fldId = 2 - for inParam in inputs: - fldId += 1 - sql = ( - "INSERT INTO xx_" - + config.tmp - + " (fldId,fldConst,fldData) VALUES (%s,'thi%s :may cause? trouble', %s)" - ) - try: - crsr.execute(sql, (fldId, inParam)) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE %s=fldID", - [fldId], - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - self.assertEqual(rec[1], "thi%s :may cause? trouble") - - # now try an operation with a "%s" as part of a literal - sel = ( - "insert into xx_" + config.tmp + " (fldId,fldData) VALUES (%s,'four%sfive')" - ) - params = (20,) - crsr.execute(sel, params) - - # test the .query implementation - assert "(?," in crsr.query, 'expected:"%s" in "%s"' % ("(?,", crsr.query) - # test the .command attribute - assert crsr.command == sel, 'expected:"%s" but found "%s"' % (sel, crsr.command) - - # test the .parameters attribute - if not self.remote: # parameter list will be altered in transit - self.assertEqual(crsr.parameters, params) - # now make sure the data made it - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=20" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "four%sfive") - - def testNamedParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - crsr.paramstyle = "named" # test nonstandard use of paramstyle - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10)) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = ["four", "five", "six"] - fldId = 10 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" - % config.tmp, - {"f_Val": inParam, "Id": fldId}, - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE fldID=:Id" % config.tmp, {"Id": fldId} - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - # now a test with a ":" as part of a literal - crsr.execute( - "insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp, - {"xyz": 30}, - ) - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "six:five") - - def testPyformatParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - crsr.paramstyle = "pyformat" # test nonstandard use of paramstyle - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10)) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = ["four", "five", "six"] - fldId = 10 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (%%(Id)s,%%(f_Val)s)" - % config.tmp, - {"f_Val": inParam, "Id": fldId}, - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE fldID=%%(Id)s" % config.tmp, - {"Id": fldId}, - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - # now a test with a "%" as part of a literal - crsr.execute( - "insert into xx_%s (fldId,fldData) VALUES (%%(xyz)s,'six%%five')" - % config.tmp, - {"xyz": 30}, - ) - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "six%five") - - def testAutomaticParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - conn.paramstyle = "dynamic" # test nonstandard use of paramstyle - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10), - fldConst varchar(30)) - """ - % config.tmp - ) - crsr.execute(tabdef) - inputs = ["one", "two", "three"] - fldId = 2 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_" - + config.tmp - + " (fldId,fldConst,fldData) VALUES (?,'thi%s :may cause? troub:1e', ?)", - (fldId, inParam), - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - trouble = "thi%s :may cause? troub:1e" - crsr.execute( - "SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE ?=fldID", - [fldId], - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - self.assertEqual(rec[1], trouble) - # inputs = [u'four',u'five',u'six'] - fldId = 10 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" - % config.tmp, - {"f_Val": inParam, "Id": fldId}, - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE :Id=fldID" % config.tmp, {"Id": fldId} - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - # now a test with a ":" as part of a literal -- and use a prepared query - ppdcmd = ( - "insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp - ) - crsr.prepare(ppdcmd) - crsr.execute(ppdcmd, {"xyz": 30}) - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "six:five") - - def testRollBack(self): - conn = self.getConnection() - crsr = conn.cursor() - assert not crsr.connection.autocommit, "Unexpected beginning condition" - self.helpCreateAndPopulateTableTemp(crsr) - crsr.connection.commit() # commit the first bunch - - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - - selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - self.conn.rollback() - crsr.execute(selectSql) - assert ( - crsr.fetchone() == None - ), "cursor.fetchone should return None if a query retrieves no rows" - crsr.execute("SELECT fldData from xx_%s" % config.tmp) - rs = crsr.fetchall() - assert len(rs) == 9, "the original records should still be present" - self.helpRollbackTblTemp() - - def testCommit(self): - try: - con2 = self.getAnotherConnection() - except NotImplementedError: - return # should be "SKIP" for ACCESS - assert not con2.autocommit, "default should be manual commit" - crsr = con2.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - con2.commit() - - selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - crsr.close() - con2.close() - conn = self.getConnection() - crsr = self.getCursor() - with conn.cursor() as crsr: - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - assert rs[0][0] == 100 - self.helpRollbackTblTemp() - - def testAutoRollback(self): - try: - con2 = self.getAnotherConnection() - except NotImplementedError: - return # should be "SKIP" for ACCESS - assert not con2.autocommit, "unexpected beginning condition" - crsr = con2.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - crsr.close() - con2.close() - crsr = self.getCursor() - try: - crsr.execute( - selectSql - ) # closing the connection should have forced rollback - row = crsr.fetchone() - except api.DatabaseError: - row = None # if the entire table disappeared the rollback was perfect and the test passed - assert row == None, ( - "cursor.fetchone should return None if a query retrieves no rows. Got %s" - % repr(row) - ) - self.helpRollbackTblTemp() - - def testAutoCommit(self): - try: - ac_conn = self.getAnotherConnection({"autocommit": True}) - except NotImplementedError: - return # should be "SKIP" for ACCESS - crsr = ac_conn.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - crsr.close() - with self.getCursor() as crsr: - selectSql = "SELECT fldData from xx_%s" % config.tmp - crsr.execute( - selectSql - ) # closing the connection should _not_ have forced rollback - rs = crsr.fetchall() - assert len(rs) == 10, "all records should still be present" - ac_conn.close() - self.helpRollbackTblTemp() - - def testSwitchedAutoCommit(self): - try: - ac_conn = self.getAnotherConnection() - except NotImplementedError: - return # should be "SKIP" for ACCESS - ac_conn.autocommit = True - crsr = ac_conn.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - crsr.close() - conn = self.getConnection() - ac_conn.close() - with self.getCursor() as crsr: - selectSql = "SELECT fldData from xx_%s" % config.tmp - crsr.execute( - selectSql - ) # closing the connection should _not_ have forced rollback - rs = crsr.fetchall() - assert len(rs) == 10, "all records should still be present" - self.helpRollbackTblTemp() - - def testExtendedTypeHandling(self): - class XtendString(str): - pass - - class XtendInt(int): - pass - - class XtendFloat(float): - pass - - xs = XtendString(randomstring(30)) - xi = XtendInt(random.randint(-100, 500)) - xf = XtendFloat(random.random()) - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - s VARCHAR(40) NOT NULL, - i INTEGER NOT NULL, - f REAL NOT NULL)""" - % config.tmp - ) - crsr.execute(tabdef) - crsr.execute( - "INSERT INTO xx_%s (s, i, f) VALUES (?, ?, ?)" % config.tmp, (xs, xi, xf) - ) - crsr.close() - conn = self.getConnection() - with self.getCursor() as crsr: - selectSql = "SELECT s, i, f from xx_%s" % config.tmp - crsr.execute( - selectSql - ) # closing the connection should _not_ have forced rollback - row = crsr.fetchone() - self.assertEqual(row.s, xs) - self.assertEqual(row.i, xi) - self.assertAlmostEqual(row.f, xf) - self.helpRollbackTblTemp() - - -class TestADOwithSQLServer(CommonDBTests): - def setUp(self): - self.conn = config.dbSqlServerconnect( - *config.connStrSQLServer[0], **config.connStrSQLServer[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "MSSQL" - self.db = config.dbSqlServerconnect - self.remote = config.connStrSQLServer[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - keys = dict(config.connStrSQLServer[1]) - if addkeys: - keys.update(addkeys) - return config.dbSqlServerconnect(*config.connStrSQLServer[0], **keys) - - def testVariableReturningStoredProcedure(self): - crsr = self.conn.cursor() - spdef = """ - CREATE PROCEDURE sp_DeleteMeOnlyForTesting - @theInput varchar(50), - @theOtherInput varchar(50), - @theOutput varchar(100) OUTPUT - AS - SET @theOutput=@theInput+@theOtherInput - """ - try: - crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - retvalues = crsr.callproc( - "sp_DeleteMeOnlyForTesting", ("Dodsworth", "Anne", " ") - ) - assert retvalues[0] == "Dodsworth", '%s is not "Dodsworth"' % repr(retvalues[0]) - assert retvalues[1] == "Anne", '%s is not "Anne"' % repr(retvalues[1]) - assert retvalues[2] == "DodsworthAnne", '%s is not "DodsworthAnne"' % repr( - retvalues[2] - ) - self.conn.rollback() - - def testMultipleSetReturn(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - - spdef = """ - CREATE PROCEDURE sp_DeleteMe_OnlyForTesting - AS - SELECT fldData FROM xx_%s ORDER BY fldData ASC - SELECT fldData From xx_%s where fldData = -9999 - SELECT fldData FROM xx_%s ORDER BY fldData DESC - """ % ( - config.tmp, - config.tmp, - config.tmp, - ) - try: - crsr.execute("DROP PROCEDURE sp_DeleteMe_OnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - retvalues = crsr.callproc("sp_DeleteMe_OnlyForTesting") - row = crsr.fetchone() - self.assertEqual(row[0], 0) - assert crsr.nextset() == True, "Operation should succeed" - assert not crsr.fetchall(), "Should be an empty second set" - assert crsr.nextset() == True, "third set should be present" - rowdesc = crsr.fetchall() - self.assertEqual(rowdesc[0][0], 8) - assert crsr.nextset() == None, "No more return sets, should return None" - - self.helpRollbackTblTemp() - - def testDatetimeProcedureParameter(self): - crsr = self.conn.cursor() - spdef = """ - CREATE PROCEDURE sp_DeleteMeOnlyForTesting - @theInput DATETIME, - @theOtherInput varchar(50), - @theOutput varchar(100) OUTPUT - AS - SET @theOutput = CONVERT(CHARACTER(20), @theInput, 0) + @theOtherInput - """ - try: - crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - result = crsr.callproc( - "sp_DeleteMeOnlyForTesting", - [adodbapi.Timestamp(2014, 12, 25, 0, 1, 0), "Beep", " " * 30], - ) - - assert result[2] == "Dec 25 2014 12:01AM Beep", 'value was="%s"' % result[2] - self.conn.rollback() - - def testIncorrectStoredProcedureParameter(self): - crsr = self.conn.cursor() - spdef = """ - CREATE PROCEDURE sp_DeleteMeOnlyForTesting - @theInput DATETIME, - @theOtherInput varchar(50), - @theOutput varchar(100) OUTPUT - AS - SET @theOutput = CONVERT(CHARACTER(20), @theInput) + @theOtherInput - """ - try: - crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - # calling the sproc with a string for the first parameter where a DateTime is expected - result = tryconnection.try_operation_with_expected_exception( - (api.DataError, api.DatabaseError), - crsr.callproc, - ["sp_DeleteMeOnlyForTesting"], - {"parameters": ["this is wrong", "Anne", "not Alice"]}, - ) - if result[0]: # the expected exception was raised - assert "@theInput" in str(result[1]) or "DatabaseError" in str( - result - ), "Identifies the wrong erroneous parameter" - else: - assert result[0], result[1] # incorrect or no exception - self.conn.rollback() - - -class TestADOwithAccessDB(CommonDBTests): - def setUp(self): - self.conn = config.dbAccessconnect( - *config.connStrAccess[0], **config.connStrAccess[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "ACCESS" - self.db = config.dbAccessconnect - self.remote = config.connStrAccess[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - raise NotImplementedError("Jet cannot use a second connection to the database") - - def testOkConnect(self): - c = self.db(*config.connStrAccess[0], **config.connStrAccess[1]) - assert c != None - c.close() - - -class TestADOwithMySql(CommonDBTests): - def setUp(self): - self.conn = config.dbMySqlconnect( - *config.connStrMySql[0], **config.connStrMySql[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "MySQL" - self.db = config.dbMySqlconnect - self.remote = config.connStrMySql[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - keys = dict(config.connStrMySql[1]) - if addkeys: - keys.update(addkeys) - return config.dbMySqlconnect(*config.connStrMySql[0], **keys) - - def testOkConnect(self): - c = self.db(*config.connStrMySql[0], **config.connStrMySql[1]) - assert c != None - - # def testStoredProcedure(self): - # crsr=self.conn.cursor() - # try: - # crsr.execute("DROP PROCEDURE DeleteMeOnlyForTesting") - # self.conn.commit() - # except: #Make sure it is empty - # pass - # spdef= """ - # DELIMITER $$ - # CREATE PROCEDURE DeleteMeOnlyForTesting (onein CHAR(10), twoin CHAR(10), OUT theout CHAR(20)) - # DETERMINISTIC - # BEGIN - # SET theout = onein //|| twoin; - # /* (SELECT 'a small string' as result; */ - # END $$ - # """ - # - # crsr.execute(spdef) - # - # retvalues=crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' ')) - # print 'return value (mysql)=',repr(crsr.returnValue) ### - # assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0]) - # assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1]) - # assert retvalues[2]=='DodsworthAnne','%s is not "DodsworthAnne"'%repr(retvalues[2]) - # - # try: - # crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting") - # self.conn.commit() - # except: #Make sure it is empty - # pass - - -class TestADOwithPostgres(CommonDBTests): - def setUp(self): - self.conn = config.dbPostgresConnect( - *config.connStrPostgres[0], **config.connStrPostgres[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "PostgreSQL" - self.db = config.dbPostgresConnect - self.remote = config.connStrPostgres[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - keys = dict(config.connStrPostgres[1]) - if addkeys: - keys.update(addkeys) - return config.dbPostgresConnect(*config.connStrPostgres[0], **keys) - - def testOkConnect(self): - c = self.db(*config.connStrPostgres[0], **config.connStrPostgres[1]) - assert c != None - - # def testStoredProcedure(self): - # crsr=self.conn.cursor() - # spdef= """ - # CREATE OR REPLACE FUNCTION DeleteMeOnlyForTesting (text, text) - # RETURNS text AS $funk$ - # BEGIN - # RETURN $1 || $2; - # END; - # $funk$ - # LANGUAGE SQL; - # """ - # - # crsr.execute(spdef) - # retvalues = crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' ')) - # ### print 'return value (pg)=',repr(crsr.returnValue) ### - # assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0]) - # assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1]) - # assert retvalues[2]=='Dodsworth Anne','%s is not "Dodsworth Anne"'%repr(retvalues[2]) - # self.conn.rollback() - # try: - # crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting") - # self.conn.commit() - # except: #Make sure it is empty - # pass - - -class TimeConverterInterfaceTest(unittest.TestCase): - def testIDate(self): - assert self.tc.Date(1990, 2, 2) - - def testITime(self): - assert self.tc.Time(13, 2, 2) - - def testITimestamp(self): - assert self.tc.Timestamp(1990, 2, 2, 13, 2, 1) - - def testIDateObjectFromCOMDate(self): - assert self.tc.DateObjectFromCOMDate(37435.7604282) - - def testICOMDate(self): - assert hasattr(self.tc, "COMDate") - - def testExactDate(self): - d = self.tc.Date(1994, 11, 15) - comDate = self.tc.COMDate(d) - correct = 34653.0 - assert comDate == correct, comDate - - def testExactTimestamp(self): - d = self.tc.Timestamp(1994, 11, 15, 12, 0, 0) - comDate = self.tc.COMDate(d) - correct = 34653.5 - self.assertEqual(comDate, correct) - - d = self.tc.Timestamp(2003, 5, 6, 14, 15, 17) - comDate = self.tc.COMDate(d) - correct = 37747.593946759262 - self.assertEqual(comDate, correct) - - def testIsoFormat(self): - d = self.tc.Timestamp(1994, 11, 15, 12, 3, 10) - iso = self.tc.DateObjectToIsoFormatString(d) - self.assertEqual(str(iso[:19]), "1994-11-15 12:03:10") - - dt = self.tc.Date(2003, 5, 2) - iso = self.tc.DateObjectToIsoFormatString(dt) - self.assertEqual(str(iso[:10]), "2003-05-02") - - -if config.doMxDateTimeTest: - import mx.DateTime - - -class TestMXDateTimeConverter(TimeConverterInterfaceTest): - def setUp(self): - self.tc = api.mxDateTimeConverter() - - def testCOMDate(self): - t = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 2) - cmd = self.tc.COMDate(t) - assert cmd == t.COMDate() - - def testDateObjectFromCOMDate(self): - cmd = self.tc.DateObjectFromCOMDate(37435.7604282) - t = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 0) - t2 = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 2) - assert t2 > cmd > t - - def testDate(self): - assert mx.DateTime.Date(1980, 11, 4) == self.tc.Date(1980, 11, 4) - - def testTime(self): - assert mx.DateTime.Time(13, 11, 4) == self.tc.Time(13, 11, 4) - - def testTimestamp(self): - t = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 1) - obj = self.tc.Timestamp(2002, 6, 28, 18, 15, 1) - assert t == obj - - -import time - - -class TestPythonTimeConverter(TimeConverterInterfaceTest): - def setUp(self): - self.tc = api.pythonTimeConverter() - - def testCOMDate(self): - mk = time.mktime((2002, 6, 28, 18, 15, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - t = time.localtime(mk) - # Fri, 28 Jun 2002 18:15:01 +0000 - cmd = self.tc.COMDate(t) - assert abs(cmd - 37435.7604282) < 1.0 / 24, "%f more than an hour wrong" % cmd - - def testDateObjectFromCOMDate(self): - cmd = self.tc.DateObjectFromCOMDate(37435.7604282) - t1 = time.gmtime( - time.mktime((2002, 6, 28, 0, 14, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - # there are errors in the implementation of gmtime which we ignore - t2 = time.gmtime( - time.mktime((2002, 6, 29, 12, 14, 2, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - assert t1 < cmd < t2, '"%s" should be about 2002-6-28 12:15:01' % repr(cmd) - - def testDate(self): - t1 = time.mktime((2002, 6, 28, 18, 15, 1, 4, 31 + 28 + 31 + 30 + 31 + 30, 0)) - t2 = time.mktime((2002, 6, 30, 18, 15, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, 0)) - obj = self.tc.Date(2002, 6, 29) - assert t1 < time.mktime(obj) < t2, obj - - def testTime(self): - self.assertEqual( - self.tc.Time(18, 15, 2), time.gmtime(18 * 60 * 60 + 15 * 60 + 2) - ) - - def testTimestamp(self): - t1 = time.localtime( - time.mktime((2002, 6, 28, 18, 14, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - t2 = time.localtime( - time.mktime((2002, 6, 28, 18, 16, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - obj = self.tc.Timestamp(2002, 6, 28, 18, 15, 2) - assert t1 < obj < t2, obj - - -class TestPythonDateTimeConverter(TimeConverterInterfaceTest): - def setUp(self): - self.tc = api.pythonDateTimeConverter() - - def testCOMDate(self): - t = datetime.datetime(2002, 6, 28, 18, 15, 1) - # Fri, 28 Jun 2002 18:15:01 +0000 - cmd = self.tc.COMDate(t) - assert abs(cmd - 37435.7604282) < 1.0 / 24, "more than an hour wrong" - - def testDateObjectFromCOMDate(self): - cmd = self.tc.DateObjectFromCOMDate(37435.7604282) - t1 = datetime.datetime(2002, 6, 28, 18, 14, 1) - t2 = datetime.datetime(2002, 6, 28, 18, 16, 1) - assert t1 < cmd < t2, cmd - - tx = datetime.datetime( - 2002, 6, 28, 18, 14, 1, 900000 - ) # testing that microseconds don't become milliseconds - c1 = self.tc.DateObjectFromCOMDate(self.tc.COMDate(tx)) - assert t1 < c1 < t2, c1 - - def testDate(self): - t1 = datetime.date(2002, 6, 28) - t2 = datetime.date(2002, 6, 30) - obj = self.tc.Date(2002, 6, 29) - assert t1 < obj < t2, obj - - def testTime(self): - self.assertEqual(self.tc.Time(18, 15, 2).isoformat()[:8], "18:15:02") - - def testTimestamp(self): - t1 = datetime.datetime(2002, 6, 28, 18, 14, 1) - t2 = datetime.datetime(2002, 6, 28, 18, 16, 1) - obj = self.tc.Timestamp(2002, 6, 28, 18, 15, 2) - assert t1 < obj < t2, obj - - -suites = [] -suites.append(unittest.makeSuite(TestPythonDateTimeConverter, "test")) -if config.doMxDateTimeTest: - suites.append(unittest.makeSuite(TestMXDateTimeConverter, "test")) -if config.doTimeTest: - suites.append(unittest.makeSuite(TestPythonTimeConverter, "test")) - -if config.doAccessTest: - suites.append(unittest.makeSuite(TestADOwithAccessDB, "test")) -if config.doSqlServerTest: - suites.append(unittest.makeSuite(TestADOwithSQLServer, "test")) -if config.doMySqlTest: - suites.append(unittest.makeSuite(TestADOwithMySql, "test")) -if config.doPostgresTest: - suites.append(unittest.makeSuite(TestADOwithPostgres, "test")) - - -class cleanup_manager(object): - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_val, exc_tb): - config.cleanup(config.testfolder, config.mdb_name) - - -suite = unittest.TestSuite(suites) -if __name__ == "__main__": - mysuite = copy.deepcopy(suite) - with cleanup_manager(): - defaultDateConverter = adodbapi.dateconverter - print(__doc__) - print("Default Date Converter is %s" % (defaultDateConverter,)) - dateconverter = defaultDateConverter - tag = "datetime" - unittest.TextTestRunner().run(mysuite) - - if config.iterateOverTimeTests: - for test, dateconverter, tag in ( - (config.doTimeTest, api.pythonTimeConverter, "pythontime"), - (config.doMxDateTimeTest, api.mxDateTimeConverter, "mx"), - ): - if test: - mysuite = copy.deepcopy( - suite - ) # work around a side effect of unittest.TextTestRunner - adodbapi.adodbapi.dateconverter = dateconverter() - print("Changed dateconverter to ") - print(adodbapi.adodbapi.dateconverter) - unittest.TextTestRunner().run(mysuite) diff --git a/lib/adodbapi/test/adodbapitestconfig.py b/lib/adodbapi/test/adodbapitestconfig.py deleted file mode 100644 index 98f25444..00000000 --- a/lib/adodbapi/test/adodbapitestconfig.py +++ /dev/null @@ -1,221 +0,0 @@ -# Configure this to _YOUR_ environment in order to run the testcases. -"testADOdbapiConfig.py v 2.6.2.B00" - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# # -# # TESTERS: -# # -# # You will need to make numerous modifications to this file -# # to adapt it to your own testing environment. -# # -# # Skip down to the next "# #" line -- -# # -- the things you need to change are below it. -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -import platform -import random -import sys - -import is64bit -import setuptestframework -import tryconnection - -print("\nPython", sys.version) -node = platform.node() -try: - print( - "node=%s, is64bit.os()= %s, is64bit.Python()= %s" - % (node, is64bit.os(), is64bit.Python()) - ) -except: - pass - -if "--help" in sys.argv: - print( - """Valid command-line switches are: - --package - create a temporary test package, run 2to3 if needed. - --all - run all possible tests - --time - loop over time format tests (including mxdatetime if present) - --nojet - do not test against an ACCESS database file - --mssql - test against Microsoft SQL server - --pg - test against PostgreSQL - --mysql - test against MariaDB - --remote= - test unsing remote server at= (experimental) - """ - ) - exit() -try: - onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python -except: - onWindows = False - -# create a random name for temporary table names -_alphabet = ( - "PYFGCRLAOEUIDHTNSQJKXBMWVZ" # why, yes, I do happen to use a dvorak keyboard -) -tmp = "".join([random.choice(_alphabet) for x in range(9)]) -mdb_name = "xx_" + tmp + ".mdb" # generate a non-colliding name for the temporary .mdb -testfolder = setuptestframework.maketemp() - -if "--package" in sys.argv: - # create a new adodbapi module -- running 2to3 if needed. - pth = setuptestframework.makeadopackage(testfolder) -else: - # use the adodbapi module in which this file appears - pth = setuptestframework.find_ado_path() -if pth not in sys.path: - # look here _first_ to find modules - sys.path.insert(1, pth) - -proxy_host = None -for arg in sys.argv: - if arg.startswith("--remote="): - proxy_host = arg.split("=")[1] - import adodbapi.remote as remote - - break - - -# function to clean up the temporary folder -- calling program must run this function before exit. -cleanup = setuptestframework.getcleanupfunction() -try: - import adodbapi # will (hopefully) be imported using the "pth" discovered above -except SyntaxError: - print( - '\n* * * Are you trying to run Python2 code using Python3? Re-run this test using the "--package" switch.' - ) - sys.exit(11) -try: - print(adodbapi.version) # show version -except: - print('"adodbapi.version" not present or not working.') -print(__doc__) - -verbose = False -for a in sys.argv: - if a.startswith("--verbose"): - arg = True - try: - arg = int(a.split("=")[1]) - except IndexError: - pass - adodbapi.adodbapi.verbose = arg - verbose = arg - -doAllTests = "--all" in sys.argv -doAccessTest = not ("--nojet" in sys.argv) -doSqlServerTest = "--mssql" in sys.argv or doAllTests -doMySqlTest = "--mysql" in sys.argv or doAllTests -doPostgresTest = "--pg" in sys.argv or doAllTests -iterateOverTimeTests = ("--time" in sys.argv or doAllTests) and onWindows - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# # start your environment setup here v v v -SQL_HOST_NODE = "testsql.2txt.us,1430" - -try: # If mx extensions are installed, use mxDateTime - import mx.DateTime - - doMxDateTimeTest = True -except: - doMxDateTimeTest = False # Requires eGenixMXExtensions - -doTimeTest = True # obsolete python time format - -if doAccessTest: - if proxy_host: # determine the (probably remote) database file folder - c = {"macro_find_temp_test_path": ["mdb", mdb_name], "proxy_host": proxy_host} - else: - c = {"mdb": setuptestframework.makemdb(testfolder, mdb_name)} - - # macro definition for keyword "provider" using macro "is64bit" -- see documentation - # is64bit will return true for 64 bit versions of Python, so the macro will select the ACE provider - # (If running a remote ADO service, this will test the 64-bitedness of the ADO server.) - c["macro_is64bit"] = [ - "provider", - "Microsoft.ACE.OLEDB.12.0", # 64 bit provider - "Microsoft.Jet.OLEDB.4.0", - ] # 32 bit provider - connStrAccess = "Provider=%(provider)s;Data Source=%(mdb)s" # ;Mode=ReadWrite;Persist Security Info=False;Jet OLEDB:Bypass UserInfo Validation=True" - print( - " ...Testing ACCESS connection to {} file...".format( - c.get("mdb", "remote .mdb") - ) - ) - doAccessTest, connStrAccess, dbAccessconnect = tryconnection.try_connection( - verbose, connStrAccess, 10, **c - ) - -if doSqlServerTest: - c = { - "host": SQL_HOST_NODE, # name of computer with SQL Server - "database": "adotest", - "user": "adotestuser", # None implies Windows security - "password": "Sq1234567", - # macro definition for keyword "security" using macro "auto_security" - "macro_auto_security": "security", - "provider": "MSOLEDBSQL; MARS Connection=True", - } - if proxy_host: - c["proxy_host"] = proxy_host - connStr = "Provider=%(provider)s; Initial Catalog=%(database)s; Data Source=%(host)s; %(security)s;" - print(" ...Testing MS-SQL login to {}...".format(c["host"])) - ( - doSqlServerTest, - connStrSQLServer, - dbSqlServerconnect, - ) = tryconnection.try_connection(verbose, connStr, 30, **c) - -if doMySqlTest: - c = { - "host": "testmysql.2txt.us", - "database": "adodbapitest", - "user": "adotest", - "password": "12345678", - "port": "3330", # note the nonstandard port for obfuscation - "driver": "MySQL ODBC 5.1 Driver", - } # or _driver="MySQL ODBC 3.51 Driver - if proxy_host: - c["proxy_host"] = proxy_host - c["macro_is64bit"] = [ - "provider", - "Provider=MSDASQL;", - ] # turn on the 64 bit ODBC adapter only if needed - cs = ( - "%(provider)sDriver={%(driver)s};Server=%(host)s;Port=3330;" - + "Database=%(database)s;user=%(user)s;password=%(password)s;Option=3;" - ) - print(" ...Testing MySql login to {}...".format(c["host"])) - doMySqlTest, connStrMySql, dbMySqlconnect = tryconnection.try_connection( - verbose, cs, 5, **c - ) - - -if doPostgresTest: - _computername = "testpg.2txt.us" - _databasename = "adotest" - _username = "adotestuser" - _password = "12345678" - kws = {"timeout": 4} - kws["macro_is64bit"] = [ - "prov_drv", - "Provider=MSDASQL;Driver={PostgreSQL Unicode(x64)}", - "Driver=PostgreSQL Unicode", - ] - # get driver from http://www.postgresql.org/ftp/odbc/versions/ - # test using positional and keyword arguments (bad example for real code) - if proxy_host: - kws["proxy_host"] = proxy_host - print(" ...Testing PostgreSQL login to {}...".format(_computername)) - doPostgresTest, connStrPostgres, dbPostgresConnect = tryconnection.try_connection( - verbose, - "%(prov_drv)s;Server=%(host)s;Database=%(database)s;uid=%(user)s;pwd=%(password)s;port=5430;", # note nonstandard port - _username, - _password, - _computername, - _databasename, - **kws - ) - -assert ( - doAccessTest or doSqlServerTest or doMySqlTest or doPostgresTest -), "No database engine found for testing" diff --git a/lib/adodbapi/test/dbapi20.py b/lib/adodbapi/test/dbapi20.py deleted file mode 100644 index e378b194..00000000 --- a/lib/adodbapi/test/dbapi20.py +++ /dev/null @@ -1,939 +0,0 @@ -#!/usr/bin/env python -""" Python DB API 2.0 driver compliance unit test suite. - - This software is Public Domain and may be used without restrictions. - - "Now we have booze and barflies entering the discussion, plus rumours of - DBAs on drugs... and I won't tell you what flashes through my mind each - time I read the subject line with 'Anal Compliance' in it. All around - this is turning out to be a thoroughly unwholesome unit test." - - -- Ian Bicking -""" - -__version__ = "$Revision: 1.15.0 $"[11:-2] -__author__ = "Stuart Bishop " - -import sys -import time -import unittest - -if sys.version[0] >= "3": # python 3.x - _BaseException = Exception - - def _failUnless(self, expr, msg=None): - self.assertTrue(expr, msg) - -else: # python 2.x - from exceptions import Exception as _BaseException - - def _failUnless(self, expr, msg=None): - self.failUnless(expr, msg) ## deprecated since Python 2.6 - - -# set this to "True" to follow API 2.0 to the letter -TEST_FOR_NON_IDEMPOTENT_CLOSE = False - -# Revision 1.15 2019/11/22 00:50:00 kf7xm -# Make Turn off IDEMPOTENT_CLOSE a proper skipTest - -# Revision 1.14 2013/05/20 11:02:05 kf7xm -# Add a literal string to the format insertion test to catch trivial re-format algorithms - -# Revision 1.13 2013/05/08 14:31:50 kf7xm -# Quick switch to Turn off IDEMPOTENT_CLOSE test. Also: Silence teardown failure - - -# Revision 1.12 2009/02/06 03:35:11 kf7xm -# Tested okay with Python 3.0, includes last minute patches from Mark H. -# -# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole -# Include latest changes from main branch -# Updates for py3k -# -# Revision 1.11 2005/01/02 02:41:01 zenzen -# Update author email address -# -# Revision 1.10 2003/10/09 03:14:14 zenzen -# Add test for DB API 2.0 optional extension, where database exceptions -# are exposed as attributes on the Connection object. -# -# Revision 1.9 2003/08/13 01:16:36 zenzen -# Minor tweak from Stefan Fleiter -# -# Revision 1.8 2003/04/10 00:13:25 zenzen -# Changes, as per suggestions by M.-A. Lemburg -# - Add a table prefix, to ensure namespace collisions can always be avoided -# -# Revision 1.7 2003/02/26 23:33:37 zenzen -# Break out DDL into helper functions, as per request by David Rushby -# -# Revision 1.6 2003/02/21 03:04:33 zenzen -# Stuff from Henrik Ekelund: -# added test_None -# added test_nextset & hooks -# -# Revision 1.5 2003/02/17 22:08:43 zenzen -# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize -# defaults to 1 & generic cursor.callproc test added -# -# Revision 1.4 2003/02/15 00:16:33 zenzen -# Changes, as per suggestions and bug reports by M.-A. Lemburg, -# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar -# - Class renamed -# - Now a subclass of TestCase, to avoid requiring the driver stub -# to use multiple inheritance -# - Reversed the polarity of buggy test in test_description -# - Test exception heirarchy correctly -# - self.populate is now self._populate(), so if a driver stub -# overrides self.ddl1 this change propogates -# - VARCHAR columns now have a width, which will hopefully make the -# DDL even more portible (this will be reversed if it causes more problems) -# - cursor.rowcount being checked after various execute and fetchXXX methods -# - Check for fetchall and fetchmany returning empty lists after results -# are exhausted (already checking for empty lists if select retrieved -# nothing -# - Fix bugs in test_setoutputsize_basic and test_setinputsizes -# -def str2bytes(sval): - if sys.version_info < (3, 0) and isinstance(sval, str): - sval = sval.decode("latin1") - return sval.encode("latin1") # python 3 make unicode into bytes - - -class DatabaseAPI20Test(unittest.TestCase): - """Test a database self.driver for DB API 2.0 compatibility. - This implementation tests Gadfly, but the TestCase - is structured so that other self.drivers can subclass this - test case to ensure compiliance with the DB-API. It is - expected that this TestCase may be expanded in the future - if ambiguities or edge conditions are discovered. - - The 'Optional Extensions' are not yet being tested. - - self.drivers should subclass this test, overriding setUp, tearDown, - self.driver, connect_args and connect_kw_args. Class specification - should be as follows: - - import dbapi20 - class mytest(dbapi20.DatabaseAPI20Test): - [...] - - Don't 'import DatabaseAPI20Test from dbapi20', or you will - confuse the unit tester - just 'import dbapi20'. - """ - - # The self.driver module. This should be the module where the 'connect' - # method is to be found - driver = None - connect_args = () # List of arguments to pass to connect - connect_kw_args = {} # Keyword arguments for connect - table_prefix = "dbapi20test_" # If you need to specify a prefix for tables - - ddl1 = "create table %sbooze (name varchar(20))" % table_prefix - ddl2 = "create table %sbarflys (name varchar(20), drink varchar(30))" % table_prefix - xddl1 = "drop table %sbooze" % table_prefix - xddl2 = "drop table %sbarflys" % table_prefix - - lowerfunc = "lower" # Name of stored procedure to convert string->lowercase - - # Some drivers may need to override these helpers, for example adding - # a 'commit' after the execute. - def executeDDL1(self, cursor): - cursor.execute(self.ddl1) - - def executeDDL2(self, cursor): - cursor.execute(self.ddl2) - - def setUp(self): - """self.drivers should override this method to perform required setup - if any is necessary, such as creating the database. - """ - pass - - def tearDown(self): - """self.drivers should override this method to perform required cleanup - if any is necessary, such as deleting the test database. - The default drops the tables that may be created. - """ - try: - con = self._connect() - try: - cur = con.cursor() - for ddl in (self.xddl1, self.xddl2): - try: - cur.execute(ddl) - con.commit() - except self.driver.Error: - # Assume table didn't exist. Other tests will check if - # execute is busted. - pass - finally: - con.close() - except _BaseException: - pass - - def _connect(self): - try: - r = self.driver.connect(*self.connect_args, **self.connect_kw_args) - except AttributeError: - self.fail("No connect method found in self.driver module") - return r - - def test_connect(self): - con = self._connect() - con.close() - - def test_apilevel(self): - try: - # Must exist - apilevel = self.driver.apilevel - # Must equal 2.0 - self.assertEqual(apilevel, "2.0") - except AttributeError: - self.fail("Driver doesn't define apilevel") - - def test_threadsafety(self): - try: - # Must exist - threadsafety = self.driver.threadsafety - # Must be a valid value - _failUnless(self, threadsafety in (0, 1, 2, 3)) - except AttributeError: - self.fail("Driver doesn't define threadsafety") - - def test_paramstyle(self): - try: - # Must exist - paramstyle = self.driver.paramstyle - # Must be a valid value - _failUnless( - self, paramstyle in ("qmark", "numeric", "named", "format", "pyformat") - ) - except AttributeError: - self.fail("Driver doesn't define paramstyle") - - def test_Exceptions(self): - # Make sure required exceptions exist, and are in the - # defined heirarchy. - if sys.version[0] == "3": # under Python 3 StardardError no longer exists - self.assertTrue(issubclass(self.driver.Warning, Exception)) - self.assertTrue(issubclass(self.driver.Error, Exception)) - else: - self.failUnless(issubclass(self.driver.Warning, Exception)) - self.failUnless(issubclass(self.driver.Error, Exception)) - - _failUnless(self, issubclass(self.driver.InterfaceError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.DatabaseError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.OperationalError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.IntegrityError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.InternalError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.ProgrammingError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.NotSupportedError, self.driver.Error)) - - def test_ExceptionsAsConnectionAttributes(self): - # OPTIONAL EXTENSION - # Test for the optional DB API 2.0 extension, where the exceptions - # are exposed as attributes on the Connection object - # I figure this optional extension will be implemented by any - # driver author who is using this test suite, so it is enabled - # by default. - con = self._connect() - drv = self.driver - _failUnless(self, con.Warning is drv.Warning) - _failUnless(self, con.Error is drv.Error) - _failUnless(self, con.InterfaceError is drv.InterfaceError) - _failUnless(self, con.DatabaseError is drv.DatabaseError) - _failUnless(self, con.OperationalError is drv.OperationalError) - _failUnless(self, con.IntegrityError is drv.IntegrityError) - _failUnless(self, con.InternalError is drv.InternalError) - _failUnless(self, con.ProgrammingError is drv.ProgrammingError) - _failUnless(self, con.NotSupportedError is drv.NotSupportedError) - - def test_commit(self): - con = self._connect() - try: - # Commit must work, even if it doesn't do anything - con.commit() - finally: - con.close() - - def test_rollback(self): - con = self._connect() - # If rollback is defined, it should either work or throw - # the documented exception - if hasattr(con, "rollback"): - try: - con.rollback() - except self.driver.NotSupportedError: - pass - - def test_cursor(self): - con = self._connect() - try: - cur = con.cursor() - finally: - con.close() - - def test_cursor_isolation(self): - con = self._connect() - try: - # Make sure cursors created from the same connection have - # the documented transaction isolation level - cur1 = con.cursor() - cur2 = con.cursor() - self.executeDDL1(cur1) - cur1.execute( - "insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix) - ) - cur2.execute("select name from %sbooze" % self.table_prefix) - booze = cur2.fetchall() - self.assertEqual(len(booze), 1) - self.assertEqual(len(booze[0]), 1) - self.assertEqual(booze[0][0], "Victoria Bitter") - finally: - con.close() - - def test_description(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - self.assertEqual( - cur.description, - None, - "cursor.description should be none after executing a " - "statement that can return no rows (such as DDL)", - ) - cur.execute("select name from %sbooze" % self.table_prefix) - self.assertEqual( - len(cur.description), 1, "cursor.description describes too many columns" - ) - self.assertEqual( - len(cur.description[0]), - 7, - "cursor.description[x] tuples must have 7 elements", - ) - self.assertEqual( - cur.description[0][0].lower(), - "name", - "cursor.description[x][0] must return column name", - ) - self.assertEqual( - cur.description[0][1], - self.driver.STRING, - "cursor.description[x][1] must return column type. Got %r" - % cur.description[0][1], - ) - - # Make sure self.description gets reset - self.executeDDL2(cur) - self.assertEqual( - cur.description, - None, - "cursor.description not being set to None when executing " - "no-result statements (eg. DDL)", - ) - finally: - con.close() - - def test_rowcount(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - _failUnless( - self, - cur.rowcount in (-1, 0), # Bug #543885 - "cursor.rowcount should be -1 or 0 after executing no-result " - "statements", - ) - cur.execute( - "insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix) - ) - _failUnless( - self, - cur.rowcount in (-1, 1), - "cursor.rowcount should == number or rows inserted, or " - "set to -1 after executing an insert statement", - ) - cur.execute("select name from %sbooze" % self.table_prefix) - _failUnless( - self, - cur.rowcount in (-1, 1), - "cursor.rowcount should == number of rows returned, or " - "set to -1 after executing a select statement", - ) - self.executeDDL2(cur) - self.assertEqual( - cur.rowcount, - -1, - "cursor.rowcount not being reset to -1 after executing " - "no-result statements", - ) - finally: - con.close() - - lower_func = "lower" - - def test_callproc(self): - con = self._connect() - try: - cur = con.cursor() - if self.lower_func and hasattr(cur, "callproc"): - r = cur.callproc(self.lower_func, ("FOO",)) - self.assertEqual(len(r), 1) - self.assertEqual(r[0], "FOO") - r = cur.fetchall() - self.assertEqual(len(r), 1, "callproc produced no result set") - self.assertEqual(len(r[0]), 1, "callproc produced invalid result set") - self.assertEqual(r[0][0], "foo", "callproc produced invalid results") - finally: - con.close() - - def test_close(self): - con = self._connect() - try: - cur = con.cursor() - finally: - con.close() - - # cursor.execute should raise an Error if called after connection - # closed - self.assertRaises(self.driver.Error, self.executeDDL1, cur) - - # connection.commit should raise an Error if called after connection' - # closed.' - self.assertRaises(self.driver.Error, con.commit) - - # connection.close should raise an Error if called more than once - #!!! reasonable persons differ about the usefulness of this test and this feature !!! - if TEST_FOR_NON_IDEMPOTENT_CLOSE: - self.assertRaises(self.driver.Error, con.close) - else: - self.skipTest( - "Non-idempotent close is considered a bad thing by some people." - ) - - def test_execute(self): - con = self._connect() - try: - cur = con.cursor() - self._paraminsert(cur) - finally: - con.close() - - def _paraminsert(self, cur): - self.executeDDL2(cur) - cur.execute( - "insert into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')" - % (self.table_prefix) - ) - _failUnless(self, cur.rowcount in (-1, 1)) - - if self.driver.paramstyle == "qmark": - cur.execute( - "insert into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - ("Cooper's",), - ) - elif self.driver.paramstyle == "numeric": - cur.execute( - "insert into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - ("Cooper's",), - ) - elif self.driver.paramstyle == "named": - cur.execute( - "insert into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - {"beer": "Cooper's"}, - ) - elif self.driver.paramstyle == "format": - cur.execute( - "insert into %sbarflys values (%%s, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - ("Cooper's",), - ) - elif self.driver.paramstyle == "pyformat": - cur.execute( - "insert into %sbarflys values (%%(beer)s, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - {"beer": "Cooper's"}, - ) - else: - self.fail("Invalid paramstyle") - _failUnless(self, cur.rowcount in (-1, 1)) - - cur.execute("select name, drink from %sbarflys" % self.table_prefix) - res = cur.fetchall() - self.assertEqual(len(res), 2, "cursor.fetchall returned too few rows") - beers = [res[0][0], res[1][0]] - beers.sort() - self.assertEqual( - beers[0], - "Cooper's", - "cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly", - ) - self.assertEqual( - beers[1], - "Victoria Bitter", - "cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly", - ) - trouble = "thi%s :may ca%(u)se? troub:1e" - self.assertEqual( - res[0][1], - trouble, - "cursor.fetchall retrieved incorrect data, or data inserted " - "incorrectly. Got=%s, Expected=%s" % (repr(res[0][1]), repr(trouble)), - ) - self.assertEqual( - res[1][1], - trouble, - "cursor.fetchall retrieved incorrect data, or data inserted " - "incorrectly. Got=%s, Expected=%s" % (repr(res[1][1]), repr(trouble)), - ) - - def test_executemany(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - largs = [("Cooper's",), ("Boag's",)] - margs = [{"beer": "Cooper's"}, {"beer": "Boag's"}] - if self.driver.paramstyle == "qmark": - cur.executemany( - "insert into %sbooze values (?)" % self.table_prefix, largs - ) - elif self.driver.paramstyle == "numeric": - cur.executemany( - "insert into %sbooze values (:1)" % self.table_prefix, largs - ) - elif self.driver.paramstyle == "named": - cur.executemany( - "insert into %sbooze values (:beer)" % self.table_prefix, margs - ) - elif self.driver.paramstyle == "format": - cur.executemany( - "insert into %sbooze values (%%s)" % self.table_prefix, largs - ) - elif self.driver.paramstyle == "pyformat": - cur.executemany( - "insert into %sbooze values (%%(beer)s)" % (self.table_prefix), - margs, - ) - else: - self.fail("Unknown paramstyle") - _failUnless( - self, - cur.rowcount in (-1, 2), - "insert using cursor.executemany set cursor.rowcount to " - "incorrect value %r" % cur.rowcount, - ) - cur.execute("select name from %sbooze" % self.table_prefix) - res = cur.fetchall() - self.assertEqual( - len(res), 2, "cursor.fetchall retrieved incorrect number of rows" - ) - beers = [res[0][0], res[1][0]] - beers.sort() - self.assertEqual( - beers[0], "Boag's", 'incorrect data "%s" retrieved' % beers[0] - ) - self.assertEqual(beers[1], "Cooper's", "incorrect data retrieved") - finally: - con.close() - - def test_fetchone(self): - con = self._connect() - try: - cur = con.cursor() - - # cursor.fetchone should raise an Error if called before - # executing a select-type query - self.assertRaises(self.driver.Error, cur.fetchone) - - # cursor.fetchone should raise an Error if called after - # executing a query that cannnot return rows - self.executeDDL1(cur) - self.assertRaises(self.driver.Error, cur.fetchone) - - cur.execute("select name from %sbooze" % self.table_prefix) - self.assertEqual( - cur.fetchone(), - None, - "cursor.fetchone should return None if a query retrieves " "no rows", - ) - _failUnless(self, cur.rowcount in (-1, 0)) - - # cursor.fetchone should raise an Error if called after - # executing a query that cannnot return rows - cur.execute( - "insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix) - ) - self.assertRaises(self.driver.Error, cur.fetchone) - - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchone() - self.assertEqual( - len(r), 1, "cursor.fetchone should have retrieved a single row" - ) - self.assertEqual( - r[0], "Victoria Bitter", "cursor.fetchone retrieved incorrect data" - ) - self.assertEqual( - cur.fetchone(), - None, - "cursor.fetchone should return None if no more rows available", - ) - _failUnless(self, cur.rowcount in (-1, 1)) - finally: - con.close() - - samples = [ - "Carlton Cold", - "Carlton Draft", - "Mountain Goat", - "Redback", - "Victoria Bitter", - "XXXX", - ] - - def _populate(self): - """Return a list of sql commands to setup the DB for the fetch - tests. - """ - populate = [ - "insert into %sbooze values ('%s')" % (self.table_prefix, s) - for s in self.samples - ] - return populate - - def test_fetchmany(self): - con = self._connect() - try: - cur = con.cursor() - - # cursor.fetchmany should raise an Error if called without - # issuing a query - self.assertRaises(self.driver.Error, cur.fetchmany, 4) - - self.executeDDL1(cur) - for sql in self._populate(): - cur.execute(sql) - - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchmany() - self.assertEqual( - len(r), - 1, - "cursor.fetchmany retrieved incorrect number of rows, " - "default of arraysize is one.", - ) - cur.arraysize = 10 - r = cur.fetchmany(3) # Should get 3 rows - self.assertEqual( - len(r), 3, "cursor.fetchmany retrieved incorrect number of rows" - ) - r = cur.fetchmany(4) # Should get 2 more - self.assertEqual( - len(r), 2, "cursor.fetchmany retrieved incorrect number of rows" - ) - r = cur.fetchmany(4) # Should be an empty sequence - self.assertEqual( - len(r), - 0, - "cursor.fetchmany should return an empty sequence after " - "results are exhausted", - ) - _failUnless(self, cur.rowcount in (-1, 6)) - - # Same as above, using cursor.arraysize - cur.arraysize = 4 - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchmany() # Should get 4 rows - self.assertEqual( - len(r), 4, "cursor.arraysize not being honoured by fetchmany" - ) - r = cur.fetchmany() # Should get 2 more - self.assertEqual(len(r), 2) - r = cur.fetchmany() # Should be an empty sequence - self.assertEqual(len(r), 0) - _failUnless(self, cur.rowcount in (-1, 6)) - - cur.arraysize = 6 - cur.execute("select name from %sbooze" % self.table_prefix) - rows = cur.fetchmany() # Should get all rows - _failUnless(self, cur.rowcount in (-1, 6)) - self.assertEqual(len(rows), 6) - self.assertEqual(len(rows), 6) - rows = [r[0] for r in rows] - rows.sort() - - # Make sure we get the right data back out - for i in range(0, 6): - self.assertEqual( - rows[i], - self.samples[i], - "incorrect data retrieved by cursor.fetchmany", - ) - - rows = cur.fetchmany() # Should return an empty list - self.assertEqual( - len(rows), - 0, - "cursor.fetchmany should return an empty sequence if " - "called after the whole result set has been fetched", - ) - _failUnless(self, cur.rowcount in (-1, 6)) - - self.executeDDL2(cur) - cur.execute("select name from %sbarflys" % self.table_prefix) - r = cur.fetchmany() # Should get empty sequence - self.assertEqual( - len(r), - 0, - "cursor.fetchmany should return an empty sequence if " - "query retrieved no rows", - ) - _failUnless(self, cur.rowcount in (-1, 0)) - - finally: - con.close() - - def test_fetchall(self): - con = self._connect() - try: - cur = con.cursor() - # cursor.fetchall should raise an Error if called - # without executing a query that may return rows (such - # as a select) - self.assertRaises(self.driver.Error, cur.fetchall) - - self.executeDDL1(cur) - for sql in self._populate(): - cur.execute(sql) - - # cursor.fetchall should raise an Error if called - # after executing a a statement that cannot return rows - self.assertRaises(self.driver.Error, cur.fetchall) - - cur.execute("select name from %sbooze" % self.table_prefix) - rows = cur.fetchall() - _failUnless(self, cur.rowcount in (-1, len(self.samples))) - self.assertEqual( - len(rows), - len(self.samples), - "cursor.fetchall did not retrieve all rows", - ) - rows = [r[0] for r in rows] - rows.sort() - for i in range(0, len(self.samples)): - self.assertEqual( - rows[i], self.samples[i], "cursor.fetchall retrieved incorrect rows" - ) - rows = cur.fetchall() - self.assertEqual( - len(rows), - 0, - "cursor.fetchall should return an empty list if called " - "after the whole result set has been fetched", - ) - _failUnless(self, cur.rowcount in (-1, len(self.samples))) - - self.executeDDL2(cur) - cur.execute("select name from %sbarflys" % self.table_prefix) - rows = cur.fetchall() - _failUnless(self, cur.rowcount in (-1, 0)) - self.assertEqual( - len(rows), - 0, - "cursor.fetchall should return an empty list if " - "a select query returns no rows", - ) - - finally: - con.close() - - def test_mixedfetch(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - for sql in self._populate(): - cur.execute(sql) - - cur.execute("select name from %sbooze" % self.table_prefix) - rows1 = cur.fetchone() - rows23 = cur.fetchmany(2) - rows4 = cur.fetchone() - rows56 = cur.fetchall() - _failUnless(self, cur.rowcount in (-1, 6)) - self.assertEqual( - len(rows23), 2, "fetchmany returned incorrect number of rows" - ) - self.assertEqual( - len(rows56), 2, "fetchall returned incorrect number of rows" - ) - - rows = [rows1[0]] - rows.extend([rows23[0][0], rows23[1][0]]) - rows.append(rows4[0]) - rows.extend([rows56[0][0], rows56[1][0]]) - rows.sort() - for i in range(0, len(self.samples)): - self.assertEqual( - rows[i], self.samples[i], "incorrect data retrieved or inserted" - ) - finally: - con.close() - - def help_nextset_setUp(self, cur): - """Should create a procedure called deleteme - that returns two result sets, first the - number of rows in booze then "name from booze" - """ - raise NotImplementedError("Helper not implemented") - # sql=""" - # create procedure deleteme as - # begin - # select count(*) from booze - # select name from booze - # end - # """ - # cur.execute(sql) - - def help_nextset_tearDown(self, cur): - "If cleaning up is needed after nextSetTest" - raise NotImplementedError("Helper not implemented") - # cur.execute("drop procedure deleteme") - - def test_nextset(self): - con = self._connect() - try: - cur = con.cursor() - if not hasattr(cur, "nextset"): - return - - try: - self.executeDDL1(cur) - sql = self._populate() - for sql in self._populate(): - cur.execute(sql) - - self.help_nextset_setUp(cur) - - cur.callproc("deleteme") - numberofrows = cur.fetchone() - assert numberofrows[0] == len(self.samples) - assert cur.nextset() - names = cur.fetchall() - assert len(names) == len(self.samples) - s = cur.nextset() - assert s == None, "No more return sets, should return None" - finally: - self.help_nextset_tearDown(cur) - - finally: - con.close() - - def test_nextset(self): - raise NotImplementedError("Drivers need to override this test") - - def test_arraysize(self): - # Not much here - rest of the tests for this are in test_fetchmany - con = self._connect() - try: - cur = con.cursor() - _failUnless( - self, hasattr(cur, "arraysize"), "cursor.arraysize must be defined" - ) - finally: - con.close() - - def test_setinputsizes(self): - con = self._connect() - try: - cur = con.cursor() - cur.setinputsizes((25,)) - self._paraminsert(cur) # Make sure cursor still works - finally: - con.close() - - def test_setoutputsize_basic(self): - # Basic test is to make sure setoutputsize doesn't blow up - con = self._connect() - try: - cur = con.cursor() - cur.setoutputsize(1000) - cur.setoutputsize(2000, 0) - self._paraminsert(cur) # Make sure the cursor still works - finally: - con.close() - - def test_setoutputsize(self): - # Real test for setoutputsize is driver dependant - raise NotImplementedError("Driver needed to override this test") - - def test_None(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - cur.execute("insert into %sbooze values (NULL)" % self.table_prefix) - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchall() - self.assertEqual(len(r), 1) - self.assertEqual(len(r[0]), 1) - self.assertEqual(r[0][0], None, "NULL value not returned as None") - finally: - con.close() - - def test_Date(self): - d1 = self.driver.Date(2002, 12, 25) - d2 = self.driver.DateFromTicks(time.mktime((2002, 12, 25, 0, 0, 0, 0, 0, 0))) - # Can we assume this? API doesn't specify, but it seems implied - # self.assertEqual(str(d1),str(d2)) - - def test_Time(self): - t1 = self.driver.Time(13, 45, 30) - t2 = self.driver.TimeFromTicks(time.mktime((2001, 1, 1, 13, 45, 30, 0, 0, 0))) - # Can we assume this? API doesn't specify, but it seems implied - # self.assertEqual(str(t1),str(t2)) - - def test_Timestamp(self): - t1 = self.driver.Timestamp(2002, 12, 25, 13, 45, 30) - t2 = self.driver.TimestampFromTicks( - time.mktime((2002, 12, 25, 13, 45, 30, 0, 0, 0)) - ) - # Can we assume this? API doesn't specify, but it seems implied - # self.assertEqual(str(t1),str(t2)) - - def test_Binary(self): - b = self.driver.Binary(str2bytes("Something")) - b = self.driver.Binary(str2bytes("")) - - def test_STRING(self): - _failUnless( - self, hasattr(self.driver, "STRING"), "module.STRING must be defined" - ) - - def test_BINARY(self): - _failUnless( - self, hasattr(self.driver, "BINARY"), "module.BINARY must be defined." - ) - - def test_NUMBER(self): - _failUnless( - self, hasattr(self.driver, "NUMBER"), "module.NUMBER must be defined." - ) - - def test_DATETIME(self): - _failUnless( - self, hasattr(self.driver, "DATETIME"), "module.DATETIME must be defined." - ) - - def test_ROWID(self): - _failUnless( - self, hasattr(self.driver, "ROWID"), "module.ROWID must be defined." - ) diff --git a/lib/adodbapi/test/is64bit.py b/lib/adodbapi/test/is64bit.py deleted file mode 100644 index 39834540..00000000 --- a/lib/adodbapi/test/is64bit.py +++ /dev/null @@ -1,41 +0,0 @@ -"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version""" -import sys - - -def Python(): - if sys.platform == "cli": # IronPython - import System - - return System.IntPtr.Size == 8 - else: - try: - return sys.maxsize > 2147483647 - except AttributeError: - return sys.maxint > 2147483647 - - -def os(): - import platform - - pm = platform.machine() - if pm != ".." and pm.endswith("64"): # recent Python (not Iron) - return True - else: - import os - - if "PROCESSOR_ARCHITEW6432" in os.environ: - return True # 32 bit program running on 64 bit Windows - try: - return os.environ["PROCESSOR_ARCHITECTURE"].endswith( - "64" - ) # 64 bit Windows 64 bit program - except IndexError: - pass # not Windows - try: - return "64" in platform.architecture()[0] # this often works in Linux - except: - return False # is an older version of Python, assume also an older os (best we can guess) - - -if __name__ == "__main__": - print("is64bit.Python() =", Python(), "is64bit.os() =", os()) diff --git a/lib/adodbapi/test/setuptestframework.py b/lib/adodbapi/test/setuptestframework.py deleted file mode 100644 index fcaaf1ae..00000000 --- a/lib/adodbapi/test/setuptestframework.py +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/python2 -# Configure this in order to run the testcases. -"setuptestframework.py v 2.6.0.8" -import os -import shutil -import sys -import tempfile - -try: - OSErrors = (WindowsError, OSError) -except NameError: # not running on Windows - OSErrors = OSError - - -def maketemp(): - temphome = tempfile.gettempdir() - tempdir = os.path.join(temphome, "adodbapi_test") - try: - os.mkdir(tempdir) - except: - pass - return tempdir - - -def _cleanup_function(testfolder, mdb_name): - try: - os.unlink(os.path.join(testfolder, mdb_name)) - except: - pass # mdb database not present - try: - shutil.rmtree(testfolder) - print(" cleaned up folder", testfolder) - except: - pass # test package not present - - -def getcleanupfunction(): - return _cleanup_function - - -def find_ado_path(): - adoName = os.path.normpath(os.getcwd() + "/../../adodbapi.py") - adoPackage = os.path.dirname(adoName) - return adoPackage - - -# make a new package directory for the test copy of ado -def makeadopackage(testfolder): - adoName = os.path.normpath(os.getcwd() + "/../adodbapi.py") - adoPath = os.path.dirname(adoName) - if os.path.exists(adoName): - newpackage = os.path.join(testfolder, "adodbapi") - try: - os.mkdir(newpackage) - except OSErrors: - print( - "*Note: temporary adodbapi package already exists: may be two versions running?" - ) - for f in os.listdir(adoPath): - if f.endswith(".py"): - shutil.copy(os.path.join(adoPath, f), newpackage) - if sys.version_info >= (3, 0): # only when running Py3.n - save = sys.stdout - sys.stdout = None - from lib2to3.main import main # use 2to3 to make test package - - main("lib2to3.fixes", args=["-n", "-w", newpackage]) - sys.stdout = save - return testfolder - else: - raise EnvironmentError("Connot find source of adodbapi to test.") - - -def makemdb(testfolder, mdb_name): - # following setup code borrowed from pywin32 odbc test suite - # kindly contributed by Frank Millman. - import os - - _accessdatasource = os.path.join(testfolder, mdb_name) - if os.path.isfile(_accessdatasource): - print("using JET database=", _accessdatasource) - else: - try: - from win32com.client import constants - from win32com.client.gencache import EnsureDispatch - - win32 = True - except ImportError: # perhaps we are running IronPython - win32 = False # iron Python - try: - from System import Activator, Type - except: - pass - - # Create a brand-new database - what is the story with these? - dbe = None - for suffix in (".36", ".35", ".30"): - try: - if win32: - dbe = EnsureDispatch("DAO.DBEngine" + suffix) - else: - type = Type.GetTypeFromProgID("DAO.DBEngine" + suffix) - dbe = Activator.CreateInstance(type) - break - except: - pass - if dbe: - print(" ...Creating ACCESS db at " + _accessdatasource) - if win32: - workspace = dbe.Workspaces(0) - newdb = workspace.CreateDatabase( - _accessdatasource, constants.dbLangGeneral, constants.dbVersion40 - ) - else: - newdb = dbe.CreateDatabase( - _accessdatasource, ";LANGID=0x0409;CP=1252;COUNTRY=0" - ) - newdb.Close() - else: - print(" ...copying test ACCESS db to " + _accessdatasource) - mdbName = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "examples", "test.mdb") - ) - import shutil - - shutil.copy(mdbName, _accessdatasource) - - return _accessdatasource - - -if __name__ == "__main__": - print("Setting up a Jet database for server to use for remote testing...") - temp = maketemp() - makemdb(temp, "server_test.mdb") diff --git a/lib/adodbapi/test/test_adodbapi_dbapi20.py b/lib/adodbapi/test/test_adodbapi_dbapi20.py deleted file mode 100644 index f8986484..00000000 --- a/lib/adodbapi/test/test_adodbapi_dbapi20.py +++ /dev/null @@ -1,200 +0,0 @@ -print("This module depends on the dbapi20 compliance tests created by Stuart Bishop") -print("(see db-sig mailing list history for info)") -import platform -import sys -import unittest - -import dbapi20 -import setuptestframework - -testfolder = setuptestframework.maketemp() -if "--package" in sys.argv: - pth = setuptestframework.makeadopackage(testfolder) - sys.argv.remove("--package") -else: - pth = setuptestframework.find_ado_path() -if pth not in sys.path: - sys.path.insert(1, pth) -# function to clean up the temporary folder -- calling program must run this function before exit. -cleanup = setuptestframework.getcleanupfunction() - -import adodbapi -import adodbapi.is64bit as is64bit - -db = adodbapi - -if "--verbose" in sys.argv: - db.adodbapi.verbose = 3 - -print(adodbapi.version) -print("Tested with dbapi20 %s" % dbapi20.__version__) - -try: - onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python -except: - onWindows = False - -node = platform.node() - -conn_kws = {} -host = "testsql.2txt.us,1430" # if None, will use macro to fill in node name -instance = r"%s\SQLEXPRESS" -conn_kws["name"] = "adotest" - -conn_kws["user"] = "adotestuser" # None implies Windows security -conn_kws["password"] = "Sq1234567" -# macro definition for keyword "security" using macro "auto_security" -conn_kws["macro_auto_security"] = "security" - -if host is None: - conn_kws["macro_getnode"] = ["host", instance] -else: - conn_kws["host"] = host - -conn_kws[ - "provider" -] = "Provider=MSOLEDBSQL;DataTypeCompatibility=80;MARS Connection=True;" -connStr = "%(provider)s; %(security)s; Initial Catalog=%(name)s;Data Source=%(host)s" - -if onWindows and node != "z-PC": - pass # default should make a local SQL Server connection -elif node == "xxx": # try Postgres database - _computername = "25.223.161.222" - _databasename = "adotest" - _username = "adotestuser" - _password = "12345678" - _driver = "PostgreSQL Unicode" - _provider = "" - connStr = "%sDriver={%s};Server=%s;Database=%s;uid=%s;pwd=%s;" % ( - _provider, - _driver, - _computername, - _databasename, - _username, - _password, - ) -elif node == "yyy": # ACCESS data base is known to fail some tests. - if is64bit.Python(): - driver = "Microsoft.ACE.OLEDB.12.0" - else: - driver = "Microsoft.Jet.OLEDB.4.0" - testmdb = setuptestframework.makemdb(testfolder) - connStr = r"Provider=%s;Data Source=%s" % (driver, testmdb) -else: # try a remote connection to an SQL server - conn_kws["proxy_host"] = "25.44.77.176" - import adodbapi.remote - - db = adodbapi.remote - -print("Using Connection String like=%s" % connStr) -print("Keywords=%s" % repr(conn_kws)) - - -class test_adodbapi(dbapi20.DatabaseAPI20Test): - driver = db - connect_args = (connStr,) - connect_kw_args = conn_kws - - def __init__(self, arg): - dbapi20.DatabaseAPI20Test.__init__(self, arg) - - def getTestMethodName(self): - return self.id().split(".")[-1] - - def setUp(self): - # Call superclass setUp In case this does something in the - # future - dbapi20.DatabaseAPI20Test.setUp(self) - if self.getTestMethodName() == "test_callproc": - con = self._connect() - engine = con.dbms_name - ## print('Using database Engine=%s' % engine) ## - if engine != "MS Jet": - sql = """ - create procedure templower - @theData varchar(50) - as - select lower(@theData) - """ - else: # Jet - sql = """ - create procedure templower - (theData varchar(50)) - as - select lower(theData); - """ - cur = con.cursor() - try: - cur.execute(sql) - con.commit() - except: - pass - cur.close() - con.close() - self.lower_func = "templower" - - def tearDown(self): - if self.getTestMethodName() == "test_callproc": - con = self._connect() - cur = con.cursor() - try: - cur.execute("drop procedure templower") - except: - pass - con.commit() - dbapi20.DatabaseAPI20Test.tearDown(self) - - def help_nextset_setUp(self, cur): - "Should create a procedure called deleteme" - 'that returns two result sets, first the number of rows in booze then "name from booze"' - sql = """ - create procedure deleteme as - begin - select count(*) from %sbooze - select name from %sbooze - end - """ % ( - self.table_prefix, - self.table_prefix, - ) - cur.execute(sql) - - def help_nextset_tearDown(self, cur): - "If cleaning up is needed after nextSetTest" - try: - cur.execute("drop procedure deleteme") - except: - pass - - def test_nextset(self): - con = self._connect() - try: - cur = con.cursor() - - stmts = [self.ddl1] + self._populate() - for sql in stmts: - cur.execute(sql) - - self.help_nextset_setUp(cur) - - cur.callproc("deleteme") - numberofrows = cur.fetchone() - assert numberofrows[0] == 6 - assert cur.nextset() - names = cur.fetchall() - assert len(names) == len(self.samples) - s = cur.nextset() - assert s == None, "No more return sets, should return None" - finally: - try: - self.help_nextset_tearDown(cur) - finally: - con.close() - - def test_setoutputsize(self): - pass - - -if __name__ == "__main__": - unittest.main() - cleanup(testfolder, None) diff --git a/lib/adodbapi/test/tryconnection.py b/lib/adodbapi/test/tryconnection.py deleted file mode 100644 index 9d3901a8..00000000 --- a/lib/adodbapi/test/tryconnection.py +++ /dev/null @@ -1,33 +0,0 @@ -remote = False # automatic testing of remote access has been removed here - - -def try_connection(verbose, *args, **kwargs): - import adodbapi - - dbconnect = adodbapi.connect - try: - s = dbconnect(*args, **kwargs) # connect to server - if verbose: - print("Connected to:", s.connection_string) - print("which has tables:", s.get_table_names()) - s.close() # thanks, it worked, goodbye - except adodbapi.DatabaseError as inst: - print(inst.args[0]) # should be the error message - print("***Failed getting connection using=", repr(args), repr(kwargs)) - return False, (args, kwargs), None - - print(" (successful)") - - return True, (args, kwargs, remote), dbconnect - - -def try_operation_with_expected_exception( - expected_exception_list, some_function, *args, **kwargs -): - try: - some_function(*args, **kwargs) - except expected_exception_list as e: - return True, e - except: - raise # an exception other than the expected occurred - return False, "The expected exception did not occur" diff --git a/lib/annotated_types/__init__.py b/lib/annotated_types/__init__.py deleted file mode 100644 index 2f989504..00000000 --- a/lib/annotated_types/__init__.py +++ /dev/null @@ -1,396 +0,0 @@ -import math -import sys -from dataclasses import dataclass -from datetime import timezone -from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union - -if sys.version_info < (3, 8): - from typing_extensions import Protocol, runtime_checkable -else: - from typing import Protocol, runtime_checkable - -if sys.version_info < (3, 9): - from typing_extensions import Annotated, Literal -else: - from typing import Annotated, Literal - -if sys.version_info < (3, 10): - EllipsisType = type(Ellipsis) - KW_ONLY = {} - SLOTS = {} -else: - from types import EllipsisType - - KW_ONLY = {"kw_only": True} - SLOTS = {"slots": True} - - -__all__ = ( - 'BaseMetadata', - 'GroupedMetadata', - 'Gt', - 'Ge', - 'Lt', - 'Le', - 'Interval', - 'MultipleOf', - 'MinLen', - 'MaxLen', - 'Len', - 'Timezone', - 'Predicate', - 'LowerCase', - 'UpperCase', - 'IsDigits', - 'IsFinite', - 'IsNotFinite', - 'IsNan', - 'IsNotNan', - 'IsInfinite', - 'IsNotInfinite', - 'doc', - 'DocInfo', - '__version__', -) - -__version__ = '0.6.0' - - -T = TypeVar('T') - - -# arguments that start with __ are considered -# positional only -# see https://peps.python.org/pep-0484/#positional-only-arguments - - -class SupportsGt(Protocol): - def __gt__(self: T, __other: T) -> bool: - ... - - -class SupportsGe(Protocol): - def __ge__(self: T, __other: T) -> bool: - ... - - -class SupportsLt(Protocol): - def __lt__(self: T, __other: T) -> bool: - ... - - -class SupportsLe(Protocol): - def __le__(self: T, __other: T) -> bool: - ... - - -class SupportsMod(Protocol): - def __mod__(self: T, __other: T) -> T: - ... - - -class SupportsDiv(Protocol): - def __div__(self: T, __other: T) -> T: - ... - - -class BaseMetadata: - """Base class for all metadata. - - This exists mainly so that implementers - can do `isinstance(..., BaseMetadata)` while traversing field annotations. - """ - - __slots__ = () - - -@dataclass(frozen=True, **SLOTS) -class Gt(BaseMetadata): - """Gt(gt=x) implies that the value must be greater than x. - - It can be used with any type that supports the ``>`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - gt: SupportsGt - - -@dataclass(frozen=True, **SLOTS) -class Ge(BaseMetadata): - """Ge(ge=x) implies that the value must be greater than or equal to x. - - It can be used with any type that supports the ``>=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - ge: SupportsGe - - -@dataclass(frozen=True, **SLOTS) -class Lt(BaseMetadata): - """Lt(lt=x) implies that the value must be less than x. - - It can be used with any type that supports the ``<`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - lt: SupportsLt - - -@dataclass(frozen=True, **SLOTS) -class Le(BaseMetadata): - """Le(le=x) implies that the value must be less than or equal to x. - - It can be used with any type that supports the ``<=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - le: SupportsLe - - -@runtime_checkable -class GroupedMetadata(Protocol): - """A grouping of multiple BaseMetadata objects. - - `GroupedMetadata` on its own is not metadata and has no meaning. - All it the the constraint and metadata should be fully expressable - in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. - - Concrete implementations should override `GroupedMetadata.__iter__()` - to add their own metadata. - For example: - - >>> @dataclass - >>> class Field(GroupedMetadata): - >>> gt: float | None = None - >>> description: str | None = None - ... - >>> def __iter__(self) -> Iterable[BaseMetadata]: - >>> if self.gt is not None: - >>> yield Gt(self.gt) - >>> if self.description is not None: - >>> yield Description(self.gt) - - Also see the implementation of `Interval` below for an example. - - Parsers should recognize this and unpack it so that it can be used - both with and without unpacking: - - - `Annotated[int, Field(...)]` (parser must unpack Field) - - `Annotated[int, *Field(...)]` (PEP-646) - """ # noqa: trailing-whitespace - - @property - def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: - return True - - def __iter__(self) -> Iterator[BaseMetadata]: - ... - - if not TYPE_CHECKING: - __slots__ = () # allow subclasses to use slots - - def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: - # Basic ABC like functionality without the complexity of an ABC - super().__init_subclass__(*args, **kwargs) - if cls.__iter__ is GroupedMetadata.__iter__: - raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") - - def __iter__(self) -> Iterator[BaseMetadata]: # noqa: F811 - raise NotImplementedError # more helpful than "None has no attribute..." type errors - - -@dataclass(frozen=True, **KW_ONLY, **SLOTS) -class Interval(GroupedMetadata): - """Interval can express inclusive or exclusive bounds with a single object. - - It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which - are interpreted the same way as the single-bound constraints. - """ - - gt: Union[SupportsGt, None] = None - ge: Union[SupportsGe, None] = None - lt: Union[SupportsLt, None] = None - le: Union[SupportsLe, None] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack an Interval into zero or more single-bounds.""" - if self.gt is not None: - yield Gt(self.gt) - if self.ge is not None: - yield Ge(self.ge) - if self.lt is not None: - yield Lt(self.lt) - if self.le is not None: - yield Le(self.le) - - -@dataclass(frozen=True, **SLOTS) -class MultipleOf(BaseMetadata): - """MultipleOf(multiple_of=x) might be interpreted in two ways: - - 1. Python semantics, implying ``value % multiple_of == 0``, or - 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` - - We encourage users to be aware of these two common interpretations, - and libraries to carefully document which they implement. - """ - - multiple_of: Union[SupportsDiv, SupportsMod] - - -@dataclass(frozen=True, **SLOTS) -class MinLen(BaseMetadata): - """ - MinLen() implies minimum inclusive length, - e.g. ``len(value) >= min_length``. - """ - - min_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class MaxLen(BaseMetadata): - """ - MaxLen() implies maximum inclusive length, - e.g. ``len(value) <= max_length``. - """ - - max_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class Len(GroupedMetadata): - """ - Len() implies that ``min_length <= len(value) <= max_length``. - - Upper bound may be omitted or ``None`` to indicate no upper length bound. - """ - - min_length: Annotated[int, Ge(0)] = 0 - max_length: Optional[Annotated[int, Ge(0)]] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack a Len into zone or more single-bounds.""" - if self.min_length > 0: - yield MinLen(self.min_length) - if self.max_length is not None: - yield MaxLen(self.max_length) - - -@dataclass(frozen=True, **SLOTS) -class Timezone(BaseMetadata): - """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). - - ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. - ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be - tz-aware but any timezone is allowed. - - You may also pass a specific timezone string or timezone object such as - ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that - you only allow a specific timezone, though we note that this is often - a symptom of poor design. - """ - - tz: Union[str, timezone, EllipsisType, None] - - -@dataclass(frozen=True, **SLOTS) -class Predicate(BaseMetadata): - """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. - - Users should prefer statically inspectable metadata, but if you need the full - power and flexibility of arbitrary runtime predicates... here it is. - - We provide a few predefined predicates for common string constraints: - ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and - ``IsDigit = Predicate(str.isdigit)``. Users are encouraged to use methods which - can be given special handling, and avoid indirection like ``lambda s: s.lower()``. - - Some libraries might have special logic to handle certain predicates, e.g. by - checking for `str.isdigit` and using its presence to both call custom logic to - enforce digit-only strings, and customise some generated external schema. - - We do not specify what behaviour should be expected for predicates that raise - an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently - skip invalid constraints, or statically raise an error; or it might try calling it - and then propogate or discard the resulting exception. - """ - - func: Callable[[Any], bool] - - -@dataclass -class Not: - func: Callable[[Any], bool] - - def __call__(self, __v: Any) -> bool: - return not self.func(__v) - - -_StrType = TypeVar("_StrType", bound=str) - -LowerCase = Annotated[_StrType, Predicate(str.islower)] -""" -Return True if the string is a lowercase string, False otherwise. - -A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. -""" # noqa: E501 -UpperCase = Annotated[_StrType, Predicate(str.isupper)] -""" -Return True if the string is an uppercase string, False otherwise. - -A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. -""" # noqa: E501 -IsDigits = Annotated[_StrType, Predicate(str.isdigit)] -""" -Return True if the string is a digit string, False otherwise. - -A string is a digit string if all characters in the string are digits and there is at least one character in the string. -""" # noqa: E501 -IsAscii = Annotated[_StrType, Predicate(str.isascii)] -""" -Return True if all characters in the string are ASCII, False otherwise. - -ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. -""" - -_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) -IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] -"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" -IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] -"""Return True if x is one of infinity or NaN, and False otherwise""" -IsNan = Annotated[_NumericType, Predicate(math.isnan)] -"""Return True if x is a NaN (not a number), and False otherwise.""" -IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] -"""Return True if x is anything but NaN (not a number), and False otherwise.""" -IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] -"""Return True if x is a positive or negative infinity, and False otherwise.""" -IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] -"""Return True if x is neither a positive or negative infinity, and False otherwise.""" - -try: - from typing_extensions import DocInfo, doc # type: ignore [attr-defined] -except ImportError: - - @dataclass(frozen=True, **SLOTS) - class DocInfo: # type: ignore [no-redef] - """ " - The return value of doc(), mainly to be used by tools that want to extract the - Annotated documentation at runtime. - """ - - documentation: str - """The documentation string passed to doc().""" - - def doc( - documentation: str, - ) -> DocInfo: - """ - Add documentation to a type annotation inside of Annotated. - - For example: - - >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... - """ - return DocInfo(documentation) diff --git a/lib/annotated_types/py.typed b/lib/annotated_types/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/annotated_types/test_cases.py b/lib/annotated_types/test_cases.py deleted file mode 100644 index f54df700..00000000 --- a/lib/annotated_types/test_cases.py +++ /dev/null @@ -1,147 +0,0 @@ -import math -import sys -from datetime import date, datetime, timedelta, timezone -from decimal import Decimal -from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple - -if sys.version_info < (3, 9): - from typing_extensions import Annotated -else: - from typing import Annotated - -import annotated_types as at - - -class Case(NamedTuple): - """ - A test case for `annotated_types`. - """ - - annotation: Any - valid_cases: Iterable[Any] - invalid_cases: Iterable[Any] - - -def cases() -> Iterable[Case]: - # Gt, Ge, Lt, Le - yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Gt(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(date(2000, 1, 1))], - [date(2000, 1, 2), date(2000, 1, 3)], - [date(2000, 1, 1), date(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(Decimal('1.123'))], - [Decimal('1.1231'), Decimal('123')], - [Decimal('1.123'), Decimal('0')], - ) - - yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) - yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Ge(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(1998, 1, 1), datetime(1999, 12, 31)], - ) - - yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) - yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Lt(datetime(2000, 1, 1))], - [datetime(1999, 12, 31), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) - yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Le(datetime(2000, 1, 1))], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - # Interval - yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) - yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) - yield Case( - Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(2000, 1, 4)], - ) - - yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) - yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) - - # lengths - - yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - - yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - - yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) - yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) - - yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) - yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) - yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) - - # Timezone - - yield Case( - Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] - ) - yield Case( - Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] - ) - yield Case( - Annotated[datetime, at.Timezone(timezone.utc)], - [datetime(2000, 1, 1, tzinfo=timezone.utc)], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - yield Case( - Annotated[datetime, at.Timezone('Europe/London')], - [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - - # predicate types - - yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) - yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) - yield Case(at.IsDigits[str], ['123'], ['', 'ab', 'a1b2']) - yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) - - yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) - - yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) - yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) - yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) - yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) - yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) - yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) - - # check stacked predicates - yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) - - # doc - yield Case(Annotated[int, at.doc("A number")], [1, 2], []) - - # custom GroupedMetadata - class MyCustomGroupedMetadata(at.GroupedMetadata): - def __iter__(self) -> Iterator[at.Predicate]: - yield at.Predicate(lambda x: float(x).is_integer()) - - yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/lib/autocommand/autoasync.py b/lib/autocommand/autoasync.py index 688f7e05..3c8ebdcf 100644 --- a/lib/autocommand/autoasync.py +++ b/lib/autocommand/autoasync.py @@ -20,7 +20,7 @@ from functools import wraps from inspect import signature -async def _run_forever_coro(coro, args, kwargs, loop): +def _launch_forever_coro(coro, args, kwargs, loop): ''' This helper function launches an async main function that was tagged with forever=True. There are two possibilities: @@ -48,7 +48,7 @@ async def _run_forever_coro(coro, args, kwargs, loop): # forever=True feature from autoasync at some point in the future. thing = coro(*args, **kwargs) if iscoroutine(thing): - await thing + loop.create_task(thing) def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False): @@ -127,9 +127,7 @@ def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False): args, kwargs = bound_args.args, bound_args.kwargs if forever: - local_loop.create_task(_run_forever_coro( - coro, args, kwargs, local_loop - )) + _launch_forever_coro(coro, args, kwargs, local_loop) local_loop.run_forever() else: return local_loop.run_until_complete(coro(*args, **kwargs)) diff --git a/lib/cherrypy/_cplogging.py b/lib/cherrypy/_cplogging.py index bce1c87b..151d3b40 100644 --- a/lib/cherrypy/_cplogging.py +++ b/lib/cherrypy/_cplogging.py @@ -452,6 +452,6 @@ class WSGIErrorHandler(logging.Handler): class LazyRfc3339UtcTime(object): def __str__(self): - """Return utcnow() in RFC3339 UTC Format.""" - iso_formatted_now = datetime.datetime.utcnow().isoformat('T') - return f'{iso_formatted_now!s}Z' + """Return now() in RFC3339 UTC Format.""" + now = datetime.datetime.now() + return now.isoformat('T') + 'Z' diff --git a/lib/cherrypy/lib/cptools.py b/lib/cherrypy/lib/cptools.py index 13b4c567..613a8995 100644 --- a/lib/cherrypy/lib/cptools.py +++ b/lib/cherrypy/lib/cptools.py @@ -622,15 +622,13 @@ def autovary(ignore=None, debug=False): def convert_params(exception=ValueError, error=400): - """Convert request params based on function annotations. + """Convert request params based on function annotations, with error handling. - This function also processes errors that are subclasses of ``exception``. + exception + Exception class to catch. - :param BaseException exception: Exception class to catch. - :type exception: BaseException - - :param error: The HTTP status code to return to the client on failure. - :type error: int + status + The HTTP error code to return to the client on failure. """ request = cherrypy.serving.request types = request.handler.callable.__annotations__ diff --git a/lib/cherrypy/lib/profiler.py b/lib/cherrypy/lib/profiler.py index 7182278a..fccf2eb8 100644 --- a/lib/cherrypy/lib/profiler.py +++ b/lib/cherrypy/lib/profiler.py @@ -47,9 +47,7 @@ try: import pstats def new_func_strip_path(func_name): - """Add ``__init__`` modules' parents. - - This makes the profiler output more readable. + """Make profiler output more readable by adding `__init__` modules' parents """ filename, line, name = func_name if filename.endswith('__init__.py'): diff --git a/lib/cherrypy/lib/reprconf.py b/lib/cherrypy/lib/reprconf.py index 536b9417..76381d7b 100644 --- a/lib/cherrypy/lib/reprconf.py +++ b/lib/cherrypy/lib/reprconf.py @@ -188,7 +188,7 @@ class Parser(configparser.ConfigParser): def dict_from_file(self, file): if hasattr(file, 'read'): - self.read_file(file) + self.readfp(file) else: self.read(file) return self.as_dict() diff --git a/lib/cherrypy/lib/static.py b/lib/cherrypy/lib/static.py index c1ad95f3..66a5a947 100644 --- a/lib/cherrypy/lib/static.py +++ b/lib/cherrypy/lib/static.py @@ -1,18 +1,19 @@ """Module with helpers for serving static files.""" -import mimetypes import os import platform import re import stat -import unicodedata +import mimetypes import urllib.parse +import unicodedata + from email.generator import _make_boundary as make_boundary from io import UnsupportedOperation import cherrypy from cherrypy._cpcompat import ntob -from cherrypy.lib import cptools, file_generator_limited, httputil +from cherrypy.lib import cptools, httputil, file_generator_limited def _setup_mimetypes(): @@ -184,10 +185,7 @@ def serve_fileobj(fileobj, content_type=None, disposition=None, name=None, def _serve_fileobj(fileobj, content_type, content_length, debug=False): - """Set ``response.body`` to the given file object, perhaps ranged. - - Internal helper. - """ + """Internal. Set response.body to the given file object, perhaps ranged.""" response = cherrypy.serving.response # HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code diff --git a/lib/cherrypy/process/wspbus.py b/lib/cherrypy/process/wspbus.py index a60cd51e..1d2789b1 100644 --- a/lib/cherrypy/process/wspbus.py +++ b/lib/cherrypy/process/wspbus.py @@ -494,7 +494,7 @@ class Bus(object): "Cannot reconstruct command from '-c'. " 'Ref: https://github.com/cherrypy/cherrypy/issues/1545') except AttributeError: - """It looks Py_GetArgcArgv's completely absent in some environments + """It looks Py_GetArgcArgv is completely absent in some environments It is known, that there's no Py_GetArgcArgv in MS Windows and ``ctypes`` module is completely absent in Google AppEngine diff --git a/lib/cherrypy/test/test_http.py b/lib/cherrypy/test/test_http.py index 9a7e9331..a955be43 100644 --- a/lib/cherrypy/test/test_http.py +++ b/lib/cherrypy/test/test_http.py @@ -136,9 +136,6 @@ class HTTPTests(helper.CPWebCase): self.assertStatus(200) self.assertBody(b'Hello world!') - response.close() - c.close() - # Now send a message that has no Content-Length, but does send a body. # Verify that CP times out the socket and responds # with 411 Length Required. @@ -162,9 +159,6 @@ class HTTPTests(helper.CPWebCase): self.status = str(response.status) self.assertStatus(411) - response.close() - c.close() - def test_post_multipart(self): alphabet = 'abcdefghijklmnopqrstuvwxyz' # generate file contents for a large post @@ -190,9 +184,6 @@ class HTTPTests(helper.CPWebCase): parts = ['%s * 65536' % ch for ch in alphabet] self.assertBody(', '.join(parts)) - response.close() - c.close() - def test_post_filename_with_special_characters(self): """Testing that we can handle filenames with special characters. @@ -226,9 +217,6 @@ class HTTPTests(helper.CPWebCase): self.assertStatus(200) self.assertBody(fname) - response.close() - c.close() - def test_malformed_request_line(self): if getattr(cherrypy.server, 'using_apache', False): return self.skip('skipped due to known Apache differences...') @@ -276,9 +264,6 @@ class HTTPTests(helper.CPWebCase): self.body = response.fp.read(20) self.assertBody('Illegal header line.') - response.close() - c.close() - def test_http_over_https(self): if self.scheme != 'https': return self.skip('skipped (not running HTTPS)... ') diff --git a/lib/cherrypy/test/test_iterator.py b/lib/cherrypy/test/test_iterator.py index 5bad59be..6600a78d 100644 --- a/lib/cherrypy/test/test_iterator.py +++ b/lib/cherrypy/test/test_iterator.py @@ -150,8 +150,6 @@ class IteratorTest(helper.CPWebCase): self.assertStatus(200) self.assertBody('0') - itr_conn.close() - # Now we do the same check with streaming - some classes will # be automatically closed, while others cannot. stream_counts = {} diff --git a/lib/cherrypy/test/test_logging.py b/lib/cherrypy/test/test_logging.py index 49d41d0a..2d4aa56f 100644 --- a/lib/cherrypy/test/test_logging.py +++ b/lib/cherrypy/test/test_logging.py @@ -1,6 +1,5 @@ """Basic tests for the CherryPy core: request handling.""" -import datetime import logging from cheroot.test import webtest @@ -198,33 +197,6 @@ def test_custom_log_format(log_tracker, monkeypatch, server): ) -def test_utc_in_timez(monkeypatch): - """Test that ``LazyRfc3339UtcTime`` is rendered as ``str`` using UTC timestamp.""" - utcoffset8_local_time_in_naive_utc = ( - datetime.datetime( - year=2020, - month=1, - day=1, - hour=1, - minute=23, - second=45, - tzinfo=datetime.timezone(datetime.timedelta(hours=8)), - ) - .astimezone(datetime.timezone.utc) - .replace(tzinfo=None) - ) - - class mock_datetime: - @classmethod - def utcnow(cls): - return utcoffset8_local_time_in_naive_utc - - monkeypatch.setattr('datetime.datetime', mock_datetime) - rfc3339_utc_time = str(cherrypy._cplogging.LazyRfc3339UtcTime()) - expected_time = '2019-12-31T17:23:45Z' - assert rfc3339_utc_time == expected_time - - def test_timez_log_format(log_tracker, monkeypatch, server): """Test a customized access_log_format string, which is a feature of _cplogging.LogManager.access().""" diff --git a/lib/inflect/__init__.py b/lib/inflect/__init__.py index b638c6b8..78d2e33c 100644 --- a/lib/inflect/__init__.py +++ b/lib/inflect/__init__.py @@ -3,6 +3,8 @@ inflect: english language inflection - correctly generate plurals, ordinals, indefinite articles - convert numbers to words +Copyright (C) 2010 Paul Dyson + Based upon the Perl module `Lingua::EN::Inflect `_. @@ -68,16 +70,11 @@ from typing import ( cast, Any, ) -from typing_extensions import Literal from numbers import Number -from pydantic import Field -from typing_extensions import Annotated - - -from .compat.pydantic1 import validate_call -from .compat.pydantic import same_method +from pydantic import Field, validate_arguments +from pydantic.typing import Annotated class UnknownClassicalModeError(Exception): @@ -108,6 +105,14 @@ class BadGenderError(Exception): pass +STDOUT_ON = False + + +def print3(txt: str) -> None: + if STDOUT_ON: + print(txt) + + def enclose(s: str) -> str: return f"(?:{s})" @@ -1722,44 +1727,66 @@ plverb_irregular_pres = { "is": "are", "was": "were", "were": "were", + "was": "were", + "have": "have", "have": "have", "has": "have", "do": "do", + "do": "do", "does": "do", } plverb_ambiguous_pres = { + "act": "act", "act": "act", "acts": "act", "blame": "blame", + "blame": "blame", "blames": "blame", "can": "can", + "can": "can", + "can": "can", "must": "must", + "must": "must", + "must": "must", + "fly": "fly", "fly": "fly", "flies": "fly", "copy": "copy", + "copy": "copy", "copies": "copy", "drink": "drink", + "drink": "drink", "drinks": "drink", "fight": "fight", + "fight": "fight", "fights": "fight", "fire": "fire", + "fire": "fire", "fires": "fire", "like": "like", + "like": "like", "likes": "like", "look": "look", + "look": "look", "looks": "look", "make": "make", + "make": "make", "makes": "make", "reach": "reach", + "reach": "reach", "reaches": "reach", "run": "run", + "run": "run", "runs": "run", "sink": "sink", + "sink": "sink", "sinks": "sink", "sleep": "sleep", + "sleep": "sleep", "sleeps": "sleep", "view": "view", + "view": "view", "views": "view", } @@ -1827,7 +1854,7 @@ pl_adj_poss_keys = re.compile(fr"^({enclose('|'.join(pl_adj_poss))})$", re.IGNOR A_abbrev = re.compile( r""" -^(?! FJO | [HLMNS]Y. | RY[EO] | SQU +(?! FJO | [HLMNS]Y. | RY[EO] | SQU | ( F[LR]? | [HL] | MN? | N | RH? | S[CHKLMNPTVW]? | X(YL)?) [AEIOU]) [FHLMNRSX][A-Z] """, @@ -2026,14 +2053,15 @@ Falsish = Any # ideally, falsish would only validate on bool(value) is False class engine: def __init__(self) -> None: + self.classical_dict = def_classical.copy() self.persistent_count: Optional[int] = None self.mill_count = 0 - self.pl_sb_user_defined: List[Optional[Word]] = [] - self.pl_v_user_defined: List[Optional[Word]] = [] - self.pl_adj_user_defined: List[Optional[Word]] = [] - self.si_sb_user_defined: List[Optional[Word]] = [] - self.A_a_user_defined: List[Optional[Word]] = [] + self.pl_sb_user_defined: List[str] = [] + self.pl_v_user_defined: List[str] = [] + self.pl_adj_user_defined: List[str] = [] + self.si_sb_user_defined: List[str] = [] + self.A_a_user_defined: List[str] = [] self.thegender = "neuter" self.__number_args: Optional[Dict[str, str]] = None @@ -2045,8 +2073,28 @@ class engine: def _number_args(self, val): self.__number_args = val - @validate_call - def defnoun(self, singular: Optional[Word], plural: Optional[Word]) -> int: + deprecated_methods = dict( + pl="plural", + plnoun="plural_noun", + plverb="plural_verb", + pladj="plural_adj", + sinoun="single_noun", + prespart="present_participle", + numwords="number_to_words", + plequal="compare", + plnounequal="compare_nouns", + plverbequal="compare_verbs", + pladjequal="compare_adjs", + wordlist="join", + ) + + def __getattr__(self, meth): + if meth in self.deprecated_methods: + print3(f"{meth}() deprecated, use {self.deprecated_methods[meth]}()") + raise DeprecationWarning + raise AttributeError + + def defnoun(self, singular: str, plural: str) -> int: """ Set the noun plural of singular to plural. @@ -2057,16 +2105,7 @@ class engine: self.si_sb_user_defined.extend((plural, singular)) return 1 - @validate_call - def defverb( - self, - s1: Optional[Word], - p1: Optional[Word], - s2: Optional[Word], - p2: Optional[Word], - s3: Optional[Word], - p3: Optional[Word], - ) -> int: + def defverb(self, s1: str, p1: str, s2: str, p2: str, s3: str, p3: str) -> int: """ Set the verb plurals for s1, s2 and s3 to p1, p2 and p3 respectively. @@ -2082,8 +2121,7 @@ class engine: self.pl_v_user_defined.extend((s1, p1, s2, p2, s3, p3)) return 1 - @validate_call - def defadj(self, singular: Optional[Word], plural: Optional[Word]) -> int: + def defadj(self, singular: str, plural: str) -> int: """ Set the adjective plural of singular to plural. @@ -2093,8 +2131,7 @@ class engine: self.pl_adj_user_defined.extend((singular, plural)) return 1 - @validate_call - def defa(self, pattern: Optional[Word]) -> int: + def defa(self, pattern: str) -> int: """ Define the indefinite article as 'a' for words matching pattern. @@ -2103,8 +2140,7 @@ class engine: self.A_a_user_defined.extend((pattern, "a")) return 1 - @validate_call - def defan(self, pattern: Optional[Word]) -> int: + def defan(self, pattern: str) -> int: """ Define the indefinite article as 'an' for words matching pattern. @@ -2113,7 +2149,7 @@ class engine: self.A_a_user_defined.extend((pattern, "an")) return 1 - def checkpat(self, pattern: Optional[Word]) -> None: + def checkpat(self, pattern: Optional[str]) -> None: """ check for errors in a regex pattern """ @@ -2122,15 +2158,16 @@ class engine: try: re.match(pattern, "") except re.error: - raise BadUserDefinedPatternError(pattern) + print3(f"\nBad user-defined singular pattern:\n\t{pattern}\n") + raise BadUserDefinedPatternError - def checkpatplural(self, pattern: Optional[Word]) -> None: + def checkpatplural(self, pattern: str) -> None: """ check for errors in a regex replace pattern """ return - @validate_call + @validate_arguments def ud_match(self, word: Word, wordlist: Sequence[Optional[Word]]) -> Optional[str]: for i in range(len(wordlist) - 2, -2, -2): # backwards through even elements mo = re.search(fr"^{wordlist[i]}$", word, re.IGNORECASE) @@ -2270,7 +2307,7 @@ class engine: # 0. PERFORM GENERAL INFLECTIONS IN A STRING - @validate_call + @validate_arguments def inflect(self, text: Word) -> str: """ Perform inflections in a string. @@ -2347,7 +2384,7 @@ class engine: else: return "", "", "" - @validate_call + @validate_arguments def plural(self, text: Word, count: Optional[Union[str, int, Any]] = None) -> str: """ Return the plural of text. @@ -2371,7 +2408,7 @@ class engine: ) return f"{pre}{plural}{post}" - @validate_call + @validate_arguments def plural_noun( self, text: Word, count: Optional[Union[str, int, Any]] = None ) -> str: @@ -2392,7 +2429,7 @@ class engine: plural = self.postprocess(word, self._plnoun(word, count)) return f"{pre}{plural}{post}" - @validate_call + @validate_arguments def plural_verb( self, text: Word, count: Optional[Union[str, int, Any]] = None ) -> str: @@ -2416,7 +2453,7 @@ class engine: ) return f"{pre}{plural}{post}" - @validate_call + @validate_arguments def plural_adj( self, text: Word, count: Optional[Union[str, int, Any]] = None ) -> str: @@ -2437,7 +2474,7 @@ class engine: plural = self.postprocess(word, self._pl_special_adjective(word, count) or word) return f"{pre}{plural}{post}" - @validate_call + @validate_arguments def compare(self, word1: Word, word2: Word) -> Union[str, bool]: """ compare word1 and word2 for equality regardless of plurality @@ -2460,15 +2497,15 @@ class engine: >>> compare('egg', '') Traceback (most recent call last): ... - pydantic...ValidationError: ... - ... - ...at least 1 characters... + pydantic.error_wrappers.ValidationError: 1 validation error for Compare + word2 + ensure this value has at least 1 characters... """ norms = self.plural_noun, self.plural_verb, self.plural_adj results = (self._plequal(word1, word2, norm) for norm in norms) return next(filter(None, results), False) - @validate_call + @validate_arguments def compare_nouns(self, word1: Word, word2: Word) -> Union[str, bool]: """ compare word1 and word2 for equality regardless of plurality @@ -2484,7 +2521,7 @@ class engine: """ return self._plequal(word1, word2, self.plural_noun) - @validate_call + @validate_arguments def compare_verbs(self, word1: Word, word2: Word) -> Union[str, bool]: """ compare word1 and word2 for equality regardless of plurality @@ -2500,7 +2537,7 @@ class engine: """ return self._plequal(word1, word2, self.plural_verb) - @validate_call + @validate_arguments def compare_adjs(self, word1: Word, word2: Word) -> Union[str, bool]: """ compare word1 and word2 for equality regardless of plurality @@ -2516,13 +2553,13 @@ class engine: """ return self._plequal(word1, word2, self.plural_adj) - @validate_call + @validate_arguments def singular_noun( self, text: Word, count: Optional[Union[int, str, Any]] = None, gender: Optional[str] = None, - ) -> Union[str, Literal[False]]: + ) -> Union[str, bool]: """ Return the singular of text, where text is a plural noun. @@ -2574,12 +2611,12 @@ class engine: return "s:p" self.classical_dict = classval.copy() - if same_method(pl, self.plural) or same_method(pl, self.plural_noun): + if pl == self.plural or pl == self.plural_noun: if self._pl_check_plurals_N(word1, word2): return "p:p" if self._pl_check_plurals_N(word2, word1): return "p:p" - if same_method(pl, self.plural) or same_method(pl, self.plural_adj): + if pl == self.plural or pl == self.plural_adj: if self._pl_check_plurals_adj(word1, word2): return "p:p" return False @@ -3229,11 +3266,11 @@ class engine: if words.last in si_sb_irregular_caps: llen = len(words.last) - return f"{word[:-llen]}{si_sb_irregular_caps[words.last]}" + return "{}{}".format(word[:-llen], si_sb_irregular_caps[words.last]) if words.last.lower() in si_sb_irregular: llen = len(words.last.lower()) - return f"{word[:-llen]}{si_sb_irregular[words.last.lower()]}" + return "{}{}".format(word[:-llen], si_sb_irregular[words.last.lower()]) dash_split = words.lowered.split("-") if (" ".join(dash_split[-2:])).lower() in si_sb_irregular_compound: @@ -3304,6 +3341,7 @@ class engine: # HANDLE INCOMPLETELY ASSIMILATED IMPORTS if self.classical_dict["ancient"]: + if words.lowered[-6:] == "trices": return word[:-3] + "x" if words.lowered[-4:] in ("eaux", "ieux"): @@ -3421,6 +3459,7 @@ class engine: # HANDLE ...o if words.lowered[-2:] == "os": + if words.last.lower() in si_sb_U_o_os_complete: return word[:-1] @@ -3450,7 +3489,7 @@ class engine: # ADJECTIVES - @validate_call + @validate_arguments def a(self, text: Word, count: Optional[Union[int, str, Any]] = 1) -> str: """ Return the appropriate indefinite article followed by text. @@ -3531,7 +3570,7 @@ class engine: # 2. TRANSLATE ZERO-QUANTIFIED $word TO "no plural($word)" - @validate_call + @validate_arguments def no(self, text: Word, count: Optional[Union[int, str]] = None) -> str: """ If count is 0, no, zero or nil, return 'no' followed by the plural @@ -3569,7 +3608,7 @@ class engine: # PARTICIPLES - @validate_call + @validate_arguments def present_participle(self, word: Word) -> str: """ Return the present participle for word. @@ -3588,31 +3627,31 @@ class engine: # NUMERICAL INFLECTIONS - @validate_call(config=dict(arbitrary_types_allowed=True)) - def ordinal(self, num: Union[Number, Word]) -> str: + @validate_arguments + def ordinal(self, num: Union[int, Word]) -> str: # noqa: C901 """ Return the ordinal of num. - >>> ordinal = engine().ordinal - >>> ordinal(1) - '1st' - >>> ordinal('one') - 'first' + num can be an integer or text + + e.g. ordinal(1) returns '1st' + ordinal('one') returns 'first' + """ if DIGIT.match(str(num)): - if isinstance(num, (float, int)) and int(num) == num: + if isinstance(num, (int, float)): n = int(num) else: if "." in str(num): try: # numbers after decimal, # so only need last one for ordinal - n = int(str(num)[-1]) + n = int(num[-1]) except ValueError: # ends with '.', so need to use whole string - n = int(str(num)[:-1]) + n = int(num[:-1]) else: - n = int(num) # type: ignore + n = int(num) try: post = nth[n % 100] except KeyError: @@ -3621,7 +3660,7 @@ class engine: else: # Mad props to Damian Conway (?) whose ordinal() # algorithm is type-bendy enough to foil MyPy - str_num: str = num # type: ignore[assignment] + str_num: str = num # type: ignore[assignment] mo = ordinal_suff.search(str_num) if mo: post = ordinal[mo.group(1)] @@ -3632,6 +3671,7 @@ class engine: def millfn(self, ind: int = 0) -> str: if ind > len(mill) - 1: + print3("number out of range") raise NumOutOfRangeError return mill[ind] @@ -3747,7 +3787,7 @@ class engine: num = ONE_DIGIT_WORD.sub(self.unitsub, num, 1) return num - @validate_call(config=dict(arbitrary_types_allowed=True)) # noqa: C901 + @validate_arguments(config=dict(arbitrary_types_allowed=True)) # noqa: C901 def number_to_words( # noqa: C901 self, num: Union[Number, Word], @@ -3899,7 +3939,7 @@ class engine: # Join words with commas and a trailing 'and' (when appropriate)... - @validate_call + @validate_arguments def join( self, words: Optional[Sequence[Word]], diff --git a/lib/inflect/compat/pydantic.py b/lib/inflect/compat/pydantic.py deleted file mode 100644 index d777564a..00000000 --- a/lib/inflect/compat/pydantic.py +++ /dev/null @@ -1,19 +0,0 @@ -class ValidateCallWrapperWrapper: - def __init__(self, wrapped): - self.orig = wrapped - - def __eq__(self, other): - return self.raw_function == other.raw_function - - @property - def raw_function(self): - return getattr(self.orig, 'raw_function') or self.orig - - -def same_method(m1, m2) -> bool: - """ - Return whether m1 and m2 are the same method. - - Workaround for pydantic/pydantic#6390. - """ - return ValidateCallWrapperWrapper(m1) == ValidateCallWrapperWrapper(m2) diff --git a/lib/inflect/compat/pydantic1.py b/lib/inflect/compat/pydantic1.py deleted file mode 100644 index 8262fdcf..00000000 --- a/lib/inflect/compat/pydantic1.py +++ /dev/null @@ -1,8 +0,0 @@ -try: - from pydantic import validate_call # type: ignore -except ImportError: - # Pydantic 1 - from pydantic import validate_arguments as validate_call # type: ignore - - -__all__ = ['validate_call'] diff --git a/lib/isapi/README.txt b/lib/isapi/README.txt deleted file mode 100644 index dc528624..00000000 --- a/lib/isapi/README.txt +++ /dev/null @@ -1,7 +0,0 @@ -A Python ISAPI extension. Contributed by Phillip Frantz, and is -Copyright 2002-2003 by Blackdog Software Pty Ltd. - -See the 'samples' directory, and particularly samples\README.txt - -You can find documentation in the PyWin32.chm file that comes with pywin32 - -you can open this from Pythonwin->Help, or from the start menu. \ No newline at end of file diff --git a/lib/isapi/__init__.py b/lib/isapi/__init__.py deleted file mode 100644 index 71823616..00000000 --- a/lib/isapi/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -# The Python ISAPI package. - - -# Exceptions thrown by the DLL framework. -class ISAPIError(Exception): - def __init__(self, errno, strerror=None, funcname=None): - # named attributes match IOError etc. - self.errno = errno - self.strerror = strerror - self.funcname = funcname - Exception.__init__(self, errno, strerror, funcname) - - def __str__(self): - if self.strerror is None: - try: - import win32api - - self.strerror = win32api.FormatMessage(self.errno).strip() - except: - self.strerror = "no error message is available" - # str() looks like a win32api error. - return str((self.errno, self.strerror, self.funcname)) - - -class FilterError(ISAPIError): - pass - - -class ExtensionError(ISAPIError): - pass - - -# A little development aid - a filter or extension callback function can -# raise one of these exceptions, and the handler module will be reloaded. -# This means you can change your code without restarting IIS. -# After a reload, your filter/extension will have the GetFilterVersion/ -# GetExtensionVersion function called, but with None as the first arg. -class InternalReloadException(Exception): - pass diff --git a/lib/isapi/doc/isapi.html b/lib/isapi/doc/isapi.html deleted file mode 100644 index 03001a1b..00000000 --- a/lib/isapi/doc/isapi.html +++ /dev/null @@ -1,92 +0,0 @@ - - - -Introduction to Python ISAPI support - -

Introduction to Python ISAPI support

- -

See also

- -

Note: if you are viewing this documentation directly from disk, -most links in this document will fail - you can also find this document in the -CHM file that comes with pywin32, where the links will work - -

Introduction

-This documents Python support for hosting ISAPI exensions and filters inside -Microsoft Internet Information Server (IIS). It assumes a basic understanding -of the ISAPI filter and extension mechanism. -

-In summary, to implement a filter or extension, you provide a Python module -which defines a Filter and/or Extension class. Once your class has been -loaded, IIS/ISAPI will, via an extension DLL, call methods on your class. -

-A filter and a class instance need only provide 3 methods - for filters they -are called GetFilterVersion, HttpFilterProc and -TerminateFilter. For extensions they -are named GetExtensionVersion, HttpExtensionProc and -TerminateExtension. If you are familiar with writing ISAPI -extensions in C/C++, these names and their purpose will be familiar. -

-Most of the work is done in the HttpFilterProc and -HttpExtensionProc methods. These both take a single -parameter - an HTTP_FILTER_CONTEXT and -EXTENSION_CONTROL_BLOCK -object respectively. -

-In addition to these components, there is an 'isapi' package, containing -support facilities (base-classes, exceptions, etc) which can be leveraged -by the extension. - -

Base classes

-There are a number of base classes provided to make writing extensions a little -simpler. Of particular note is isapi.threaded_extension.ThreadPoolExtension. -This implements a thread-pool and informs IIS that the request is progressing -in the background. Your sub-class need only provide a Dispatch -method, which is called on one of the worker threads rather than the thread -that the request came in on. -

-There is base-class for a filter in isapi.simple, but there is no -equivilent threaded filter - filters work under a different model, where -background processing is not possible. -

Samples

-Please see the isapi/samples directory for some sample filters -and extensions. - -

Implementation

-A Python ISAPI filter extension consists of 2 main components: -
    -
  • A DLL used by ISAPI to interface with Python.
  • -
  • A Python script used by that DLL to implement the filter or extension -functionality
  • -
- -

Extension DLL

-The DLL is usually managed automatically by the isapi.install module. As the -Python script for the extension is installed, a generic DLL provided with -the isapi package is installed next to the script, and IIS configured to -use this DLL. -

-The name of the DLL always has the same base name as the Python script, but -with a leading underscore (_), and an extension of .dll. For example, the -sample "redirector.py" will, when installed, have "_redirector.dll" created -in the same directory. -

-The Python script may provide 2 entry points - methods named __FilterFactory__ -and __ExtensionFactory__, both taking no arguments and returning a filter or -extension object. - -

Using py2exe and the isapi package

-You can instruct py2exe to create a 'frozen' Python ISAPI filter/extension. -In this case, py2exe will create a package with everything you need in one -directory, and the Python source file embedded in the .zip file. -

-In general, you will want to build a seperate installation executable along -with the ISAPI extension. This executable will be built from the same script. -See the ISAPI sample in the py2exe distribution. diff --git a/lib/isapi/install.py b/lib/isapi/install.py deleted file mode 100644 index 154f82af..00000000 --- a/lib/isapi/install.py +++ /dev/null @@ -1,815 +0,0 @@ -"""Installation utilities for Python ISAPI filters and extensions.""" - -# this code adapted from "Tomcat JK2 ISAPI redirector", part of Apache -# Created July 2004, Mark Hammond. -import imp -import os -import shutil -import stat -import sys -import traceback - -import pythoncom -import win32api -import winerror -from win32com.client import Dispatch, GetObject -from win32com.client.gencache import EnsureDispatch, EnsureModule - -_APP_INPROC = 0 -_APP_OUTPROC = 1 -_APP_POOLED = 2 -_IIS_OBJECT = "IIS://LocalHost/W3SVC" -_IIS_SERVER = "IIsWebServer" -_IIS_WEBDIR = "IIsWebDirectory" -_IIS_WEBVIRTUALDIR = "IIsWebVirtualDir" -_IIS_FILTERS = "IIsFilters" -_IIS_FILTER = "IIsFilter" - -_DEFAULT_SERVER_NAME = "Default Web Site" -_DEFAULT_HEADERS = "X-Powered-By: Python" -_DEFAULT_PROTECTION = _APP_POOLED - -# Default is for 'execute' only access - ie, only the extension -# can be used. This can be overridden via your install script. -_DEFAULT_ACCESS_EXECUTE = True -_DEFAULT_ACCESS_READ = False -_DEFAULT_ACCESS_WRITE = False -_DEFAULT_ACCESS_SCRIPT = False -_DEFAULT_CONTENT_INDEXED = False -_DEFAULT_ENABLE_DIR_BROWSING = False -_DEFAULT_ENABLE_DEFAULT_DOC = False - -_extensions = [ext for ext, _, _ in imp.get_suffixes()] -is_debug_build = "_d.pyd" in _extensions - -this_dir = os.path.abspath(os.path.dirname(__file__)) - - -class FilterParameters: - Name = None - Description = None - Path = None - Server = None - # Params that control if/how AddExtensionFile is called. - AddExtensionFile = True - AddExtensionFile_Enabled = True - AddExtensionFile_GroupID = None # defaults to Name - AddExtensionFile_CanDelete = True - AddExtensionFile_Description = None # defaults to Description. - - def __init__(self, **kw): - self.__dict__.update(kw) - - -class VirtualDirParameters: - Name = None # Must be provided. - Description = None # defaults to Name - AppProtection = _DEFAULT_PROTECTION - Headers = _DEFAULT_HEADERS - Path = None # defaults to WWW root. - Type = _IIS_WEBVIRTUALDIR - AccessExecute = _DEFAULT_ACCESS_EXECUTE - AccessRead = _DEFAULT_ACCESS_READ - AccessWrite = _DEFAULT_ACCESS_WRITE - AccessScript = _DEFAULT_ACCESS_SCRIPT - ContentIndexed = _DEFAULT_CONTENT_INDEXED - EnableDirBrowsing = _DEFAULT_ENABLE_DIR_BROWSING - EnableDefaultDoc = _DEFAULT_ENABLE_DEFAULT_DOC - DefaultDoc = None # Only set in IIS if not None - ScriptMaps = [] - ScriptMapUpdate = "end" # can be 'start', 'end', 'replace' - Server = None - - def __init__(self, **kw): - self.__dict__.update(kw) - - def is_root(self): - "This virtual directory is a root directory if parent and name are blank" - parent, name = self.split_path() - return not parent and not name - - def split_path(self): - return split_path(self.Name) - - -class ScriptMapParams: - Extension = None - Module = None - Flags = 5 - Verbs = "" - # Params that control if/how AddExtensionFile is called. - AddExtensionFile = True - AddExtensionFile_Enabled = True - AddExtensionFile_GroupID = None # defaults to Name - AddExtensionFile_CanDelete = True - AddExtensionFile_Description = None # defaults to Description. - - def __init__(self, **kw): - self.__dict__.update(kw) - - def __str__(self): - "Format this parameter suitable for IIS" - items = [self.Extension, self.Module, self.Flags] - # IIS gets upset if there is a trailing verb comma, but no verbs - if self.Verbs: - items.append(self.Verbs) - items = [str(item) for item in items] - return ",".join(items) - - -class ISAPIParameters: - ServerName = _DEFAULT_SERVER_NAME - # Description = None - Filters = [] - VirtualDirs = [] - - def __init__(self, **kw): - self.__dict__.update(kw) - - -verbose = 1 # The level - 0 is quiet. - - -def log(level, what): - if verbose >= level: - print(what) - - -# Convert an ADSI COM exception to the Win32 error code embedded in it. -def _GetWin32ErrorCode(com_exc): - hr = com_exc.hresult - # If we have more details in the 'excepinfo' struct, use it. - if com_exc.excepinfo: - hr = com_exc.excepinfo[-1] - if winerror.HRESULT_FACILITY(hr) != winerror.FACILITY_WIN32: - raise - return winerror.SCODE_CODE(hr) - - -class InstallationError(Exception): - pass - - -class ItemNotFound(InstallationError): - pass - - -class ConfigurationError(InstallationError): - pass - - -def FindPath(options, server, name): - if name.lower().startswith("iis://"): - return name - else: - if name and name[0] != "/": - name = "/" + name - return FindWebServer(options, server) + "/ROOT" + name - - -def LocateWebServerPath(description): - """ - Find an IIS web server whose name or comment matches the provided - description (case-insensitive). - - >>> LocateWebServerPath('Default Web Site') # doctest: +SKIP - - or - - >>> LocateWebServerPath('1') #doctest: +SKIP - """ - assert len(description) >= 1, "Server name or comment is required" - iis = GetObject(_IIS_OBJECT) - description = description.lower().strip() - for site in iis: - # Name is generally a number, but no need to assume that. - site_attributes = [ - getattr(site, attr, "").lower().strip() - for attr in ("Name", "ServerComment") - ] - if description in site_attributes: - return site.AdsPath - msg = "No web sites match the description '%s'" % description - raise ItemNotFound(msg) - - -def GetWebServer(description=None): - """ - Load the web server instance (COM object) for a given instance - or description. - If None is specified, the default website is retrieved (indicated - by the identifier 1. - """ - description = description or "1" - path = LocateWebServerPath(description) - server = LoadWebServer(path) - return server - - -def LoadWebServer(path): - try: - server = GetObject(path) - except pythoncom.com_error as details: - msg = details.strerror - if exc.excepinfo and exc.excepinfo[2]: - msg = exc.excepinfo[2] - msg = "WebServer %s: %s" % (path, msg) - raise ItemNotFound(msg) - return server - - -def FindWebServer(options, server_desc): - """ - Legacy function to allow options to define a .server property - to override the other parameter. Use GetWebServer instead. - """ - # options takes precedence - server_desc = options.server or server_desc - # make sure server_desc is unicode (could be mbcs if passed in - # sys.argv). - if server_desc and not isinstance(server_desc, str): - server_desc = server_desc.decode("mbcs") - - # get the server (if server_desc is None, the default site is acquired) - server = GetWebServer(server_desc) - return server.adsPath - - -def split_path(path): - """ - Get the parent path and basename. - - >>> split_path('/') - ['', ''] - - >>> split_path('') - ['', ''] - - >>> split_path('foo') - ['', 'foo'] - - >>> split_path('/foo') - ['', 'foo'] - - >>> split_path('/foo/bar') - ['/foo', 'bar'] - - >>> split_path('foo/bar') - ['/foo', 'bar'] - """ - - if not path.startswith("/"): - path = "/" + path - return path.rsplit("/", 1) - - -def _CreateDirectory(iis_dir, name, params): - # We used to go to lengths to keep an existing virtual directory - # in place. However, in some cases the existing directories got - # into a bad state, and an update failed to get them working. - # So we nuke it first. If this is a problem, we could consider adding - # a --keep-existing option. - try: - # Also seen the Class change to a generic IISObject - so nuke - # *any* existing object, regardless of Class - assert name.strip("/"), "mustn't delete the root!" - iis_dir.Delete("", name) - log(2, "Deleted old directory '%s'" % (name,)) - except pythoncom.com_error: - pass - - newDir = iis_dir.Create(params.Type, name) - log(2, "Creating new directory '%s' in %s..." % (name, iis_dir.Name)) - - friendly = params.Description or params.Name - newDir.AppFriendlyName = friendly - - # Note that the new directory won't be visible in the IIS UI - # unless the directory exists on the filesystem. - try: - path = params.Path or iis_dir.Path - newDir.Path = path - except AttributeError: - # If params.Type is IIS_WEBDIRECTORY, an exception is thrown - pass - newDir.AppCreate2(params.AppProtection) - # XXX - note that these Headers only work in IIS6 and earlier. IIS7 - # only supports them on the w3svc node - not even on individial sites, - # let alone individual extensions in the site! - if params.Headers: - newDir.HttpCustomHeaders = params.Headers - - log(2, "Setting directory options...") - newDir.AccessExecute = params.AccessExecute - newDir.AccessRead = params.AccessRead - newDir.AccessWrite = params.AccessWrite - newDir.AccessScript = params.AccessScript - newDir.ContentIndexed = params.ContentIndexed - newDir.EnableDirBrowsing = params.EnableDirBrowsing - newDir.EnableDefaultDoc = params.EnableDefaultDoc - if params.DefaultDoc is not None: - newDir.DefaultDoc = params.DefaultDoc - newDir.SetInfo() - return newDir - - -def CreateDirectory(params, options): - _CallHook(params, "PreInstall", options) - if not params.Name: - raise ConfigurationError("No Name param") - parent, name = params.split_path() - target_dir = GetObject(FindPath(options, params.Server, parent)) - - if not params.is_root(): - target_dir = _CreateDirectory(target_dir, name, params) - - AssignScriptMaps(params.ScriptMaps, target_dir, params.ScriptMapUpdate) - - _CallHook(params, "PostInstall", options, target_dir) - log(1, "Configured Virtual Directory: %s" % (params.Name,)) - return target_dir - - -def AssignScriptMaps(script_maps, target, update="replace"): - """Updates IIS with the supplied script map information. - - script_maps is a list of ScriptMapParameter objects - - target is an IIS Virtual Directory to assign the script maps to - - update is a string indicating how to update the maps, one of ('start', - 'end', or 'replace') - """ - # determine which function to use to assign script maps - script_map_func = "_AssignScriptMaps" + update.capitalize() - try: - script_map_func = eval(script_map_func) - except NameError: - msg = "Unknown ScriptMapUpdate option '%s'" % update - raise ConfigurationError(msg) - # use the str method to format the script maps for IIS - script_maps = [str(s) for s in script_maps] - # call the correct function - script_map_func(target, script_maps) - target.SetInfo() - - -def get_unique_items(sequence, reference): - "Return items in sequence that can't be found in reference." - return tuple([item for item in sequence if item not in reference]) - - -def _AssignScriptMapsReplace(target, script_maps): - target.ScriptMaps = script_maps - - -def _AssignScriptMapsEnd(target, script_maps): - unique_new_maps = get_unique_items(script_maps, target.ScriptMaps) - target.ScriptMaps = target.ScriptMaps + unique_new_maps - - -def _AssignScriptMapsStart(target, script_maps): - unique_new_maps = get_unique_items(script_maps, target.ScriptMaps) - target.ScriptMaps = unique_new_maps + target.ScriptMaps - - -def CreateISAPIFilter(filterParams, options): - server = FindWebServer(options, filterParams.Server) - _CallHook(filterParams, "PreInstall", options) - try: - filters = GetObject(server + "/Filters") - except pythoncom.com_error as exc: - # Brand new sites don't have the '/Filters' collection - create it. - # Any errors other than 'not found' we shouldn't ignore. - if ( - winerror.HRESULT_FACILITY(exc.hresult) != winerror.FACILITY_WIN32 - or winerror.HRESULT_CODE(exc.hresult) != winerror.ERROR_PATH_NOT_FOUND - ): - raise - server_ob = GetObject(server) - filters = server_ob.Create(_IIS_FILTERS, "Filters") - filters.FilterLoadOrder = "" - filters.SetInfo() - - # As for VirtualDir, delete an existing one. - assert filterParams.Name.strip("/"), "mustn't delete the root!" - try: - filters.Delete(_IIS_FILTER, filterParams.Name) - log(2, "Deleted old filter '%s'" % (filterParams.Name,)) - except pythoncom.com_error: - pass - newFilter = filters.Create(_IIS_FILTER, filterParams.Name) - log(2, "Created new ISAPI filter...") - assert os.path.isfile(filterParams.Path) - newFilter.FilterPath = filterParams.Path - newFilter.FilterDescription = filterParams.Description - newFilter.SetInfo() - load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b] - if filterParams.Name not in load_order: - load_order.append(filterParams.Name) - filters.FilterLoadOrder = ",".join(load_order) - filters.SetInfo() - _CallHook(filterParams, "PostInstall", options, newFilter) - log(1, "Configured Filter: %s" % (filterParams.Name,)) - return newFilter - - -def DeleteISAPIFilter(filterParams, options): - _CallHook(filterParams, "PreRemove", options) - server = FindWebServer(options, filterParams.Server) - ob_path = server + "/Filters" - try: - filters = GetObject(ob_path) - except pythoncom.com_error as details: - # failure to open the filters just means a totally clean IIS install - # (IIS5 at least has no 'Filters' key when freshly installed). - log(2, "ISAPI filter path '%s' did not exist." % (ob_path,)) - return - try: - assert filterParams.Name.strip("/"), "mustn't delete the root!" - filters.Delete(_IIS_FILTER, filterParams.Name) - log(2, "Deleted ISAPI filter '%s'" % (filterParams.Name,)) - except pythoncom.com_error as details: - rc = _GetWin32ErrorCode(details) - if rc != winerror.ERROR_PATH_NOT_FOUND: - raise - log(2, "ISAPI filter '%s' did not exist." % (filterParams.Name,)) - # Remove from the load order - load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b] - if filterParams.Name in load_order: - load_order.remove(filterParams.Name) - filters.FilterLoadOrder = ",".join(load_order) - filters.SetInfo() - _CallHook(filterParams, "PostRemove", options) - log(1, "Deleted Filter: %s" % (filterParams.Name,)) - - -def _AddExtensionFile(module, def_groupid, def_desc, params, options): - group_id = params.AddExtensionFile_GroupID or def_groupid - desc = params.AddExtensionFile_Description or def_desc - try: - ob = GetObject(_IIS_OBJECT) - ob.AddExtensionFile( - module, - params.AddExtensionFile_Enabled, - group_id, - params.AddExtensionFile_CanDelete, - desc, - ) - log(2, "Added extension file '%s' (%s)" % (module, desc)) - except (pythoncom.com_error, AttributeError) as details: - # IIS5 always fails. Probably should upgrade this to - # complain more loudly if IIS6 fails. - log(2, "Failed to add extension file '%s': %s" % (module, details)) - - -def AddExtensionFiles(params, options): - """Register the modules used by the filters/extensions as a trusted - 'extension module' - required by the default IIS6 security settings.""" - # Add each module only once. - added = {} - for vd in params.VirtualDirs: - for smp in vd.ScriptMaps: - if smp.Module not in added and smp.AddExtensionFile: - _AddExtensionFile(smp.Module, vd.Name, vd.Description, smp, options) - added[smp.Module] = True - - for fd in params.Filters: - if fd.Path not in added and fd.AddExtensionFile: - _AddExtensionFile(fd.Path, fd.Name, fd.Description, fd, options) - added[fd.Path] = True - - -def _DeleteExtensionFileRecord(module, options): - try: - ob = GetObject(_IIS_OBJECT) - ob.DeleteExtensionFileRecord(module) - log(2, "Deleted extension file record for '%s'" % module) - except (pythoncom.com_error, AttributeError) as details: - log(2, "Failed to remove extension file '%s': %s" % (module, details)) - - -def DeleteExtensionFileRecords(params, options): - deleted = {} # only remove each .dll once. - for vd in params.VirtualDirs: - for smp in vd.ScriptMaps: - if smp.Module not in deleted and smp.AddExtensionFile: - _DeleteExtensionFileRecord(smp.Module, options) - deleted[smp.Module] = True - - for filter_def in params.Filters: - if filter_def.Path not in deleted and filter_def.AddExtensionFile: - _DeleteExtensionFileRecord(filter_def.Path, options) - deleted[filter_def.Path] = True - - -def CheckLoaderModule(dll_name): - suffix = "" - if is_debug_build: - suffix = "_d" - template = os.path.join(this_dir, "PyISAPI_loader" + suffix + ".dll") - if not os.path.isfile(template): - raise ConfigurationError("Template loader '%s' does not exist" % (template,)) - # We can't do a simple "is newer" check, as the DLL is specific to the - # Python version. So we check the date-time and size are identical, - # and skip the copy in that case. - src_stat = os.stat(template) - try: - dest_stat = os.stat(dll_name) - except os.error: - same = 0 - else: - same = ( - src_stat[stat.ST_SIZE] == dest_stat[stat.ST_SIZE] - and src_stat[stat.ST_MTIME] == dest_stat[stat.ST_MTIME] - ) - if not same: - log(2, "Updating %s->%s" % (template, dll_name)) - shutil.copyfile(template, dll_name) - shutil.copystat(template, dll_name) - else: - log(2, "%s is up to date." % (dll_name,)) - - -def _CallHook(ob, hook_name, options, *extra_args): - func = getattr(ob, hook_name, None) - if func is not None: - args = (ob, options) + extra_args - func(*args) - - -def Install(params, options): - _CallHook(params, "PreInstall", options) - for vd in params.VirtualDirs: - CreateDirectory(vd, options) - - for filter_def in params.Filters: - CreateISAPIFilter(filter_def, options) - - AddExtensionFiles(params, options) - - _CallHook(params, "PostInstall", options) - - -def RemoveDirectory(params, options): - if params.is_root(): - return - try: - directory = GetObject(FindPath(options, params.Server, params.Name)) - except pythoncom.com_error as details: - rc = _GetWin32ErrorCode(details) - if rc != winerror.ERROR_PATH_NOT_FOUND: - raise - log(2, "VirtualDirectory '%s' did not exist" % params.Name) - directory = None - if directory is not None: - # Be robust should IIS get upset about unloading. - try: - directory.AppUnLoad() - except: - exc_val = sys.exc_info()[1] - log(2, "AppUnLoad() for %s failed: %s" % (params.Name, exc_val)) - # Continue trying to delete it. - try: - parent = GetObject(directory.Parent) - parent.Delete(directory.Class, directory.Name) - log(1, "Deleted Virtual Directory: %s" % (params.Name,)) - except: - exc_val = sys.exc_info()[1] - log(1, "Failed to remove directory %s: %s" % (params.Name, exc_val)) - - -def RemoveScriptMaps(vd_params, options): - "Remove script maps from the already installed virtual directory" - parent, name = vd_params.split_path() - target_dir = GetObject(FindPath(options, vd_params.Server, parent)) - installed_maps = list(target_dir.ScriptMaps) - for _map in map(str, vd_params.ScriptMaps): - if _map in installed_maps: - installed_maps.remove(_map) - target_dir.ScriptMaps = installed_maps - target_dir.SetInfo() - - -def Uninstall(params, options): - _CallHook(params, "PreRemove", options) - - DeleteExtensionFileRecords(params, options) - - for vd in params.VirtualDirs: - _CallHook(vd, "PreRemove", options) - - RemoveDirectory(vd, options) - if vd.is_root(): - # if this is installed to the root virtual directory, we can't delete it - # so remove the script maps. - RemoveScriptMaps(vd, options) - - _CallHook(vd, "PostRemove", options) - - for filter_def in params.Filters: - DeleteISAPIFilter(filter_def, options) - _CallHook(params, "PostRemove", options) - - -# Patch up any missing module names in the params, replacing them with -# the DLL name that hosts this extension/filter. -def _PatchParamsModule(params, dll_name, file_must_exist=True): - if file_must_exist: - if not os.path.isfile(dll_name): - raise ConfigurationError("%s does not exist" % (dll_name,)) - - # Patch up all references to the DLL. - for f in params.Filters: - if f.Path is None: - f.Path = dll_name - for d in params.VirtualDirs: - for sm in d.ScriptMaps: - if sm.Module is None: - sm.Module = dll_name - - -def GetLoaderModuleName(mod_name, check_module=None): - # find the name of the DLL hosting us. - # By default, this is "_{module_base_name}.dll" - if hasattr(sys, "frozen"): - # What to do? The .dll knows its name, but this is likely to be - # executed via a .exe, which does not know. - base, ext = os.path.splitext(mod_name) - path, base = os.path.split(base) - # handle the common case of 'foo.exe'/'foow.exe' - if base.endswith("w"): - base = base[:-1] - # For py2exe, we have '_foo.dll' as the standard pyisapi loader - but - # 'foo.dll' is what we use (it just delegates). - # So no leading '_' on the installed name. - dll_name = os.path.abspath(os.path.join(path, base + ".dll")) - else: - base, ext = os.path.splitext(mod_name) - path, base = os.path.split(base) - dll_name = os.path.abspath(os.path.join(path, "_" + base + ".dll")) - # Check we actually have it. - if check_module is None: - check_module = not hasattr(sys, "frozen") - if check_module: - CheckLoaderModule(dll_name) - return dll_name - - -# Note the 'log' params to these 'builtin' args - old versions of pywin32 -# didn't log at all in this function (by intent; anyone calling this was -# responsible). So existing code that calls this function with the old -# signature (ie, without a 'log' param) still gets the same behaviour as -# before... - - -def InstallModule(conf_module_name, params, options, log=lambda *args: None): - "Install the extension" - if not hasattr(sys, "frozen"): - conf_module_name = os.path.abspath(conf_module_name) - if not os.path.isfile(conf_module_name): - raise ConfigurationError("%s does not exist" % (conf_module_name,)) - - loader_dll = GetLoaderModuleName(conf_module_name) - _PatchParamsModule(params, loader_dll) - Install(params, options) - log(1, "Installation complete.") - - -def UninstallModule(conf_module_name, params, options, log=lambda *args: None): - "Remove the extension" - loader_dll = GetLoaderModuleName(conf_module_name, False) - _PatchParamsModule(params, loader_dll, False) - Uninstall(params, options) - log(1, "Uninstallation complete.") - - -standard_arguments = { - "install": InstallModule, - "remove": UninstallModule, -} - - -def build_usage(handler_map): - docstrings = [handler.__doc__ for handler in handler_map.values()] - all_args = dict(zip(iter(handler_map.keys()), docstrings)) - arg_names = "|".join(iter(all_args.keys())) - usage_string = "%prog [options] [" + arg_names + "]\n" - usage_string += "commands:\n" - for arg, desc in all_args.items(): - usage_string += " %-10s: %s" % (arg, desc) + "\n" - return usage_string[:-1] - - -def MergeStandardOptions(options, params): - """ - Take an options object generated by the command line and merge - the values into the IISParameters object. - """ - pass - - -# We support 2 ways of extending our command-line/install support. -# * Many of the installation items allow you to specify "PreInstall", -# "PostInstall", "PreRemove" and "PostRemove" hooks -# All hooks are called with the 'params' object being operated on, and -# the 'optparser' options for this session (ie, the command-line options) -# PostInstall for VirtualDirectories and Filters both have an additional -# param - the ADSI object just created. -# * You can pass your own option parser for us to use, and/or define a map -# with your own custom arg handlers. It is a map of 'arg'->function. -# The function is called with (options, log_fn, arg). The function's -# docstring is used in the usage output. -def HandleCommandLine( - params, - argv=None, - conf_module_name=None, - default_arg="install", - opt_parser=None, - custom_arg_handlers={}, -): - """Perform installation or removal of an ISAPI filter or extension. - - This module handles standard command-line options and configuration - information, and installs, removes or updates the configuration of an - ISAPI filter or extension. - - You must pass your configuration information in params - all other - arguments are optional, and allow you to configure the installation - process. - """ - global verbose - from optparse import OptionParser - - argv = argv or sys.argv - if not conf_module_name: - conf_module_name = sys.argv[0] - # convert to a long name so that if we were somehow registered with - # the "short" version but unregistered with the "long" version we - # still work (that will depend on exactly how the installer was - # started) - try: - conf_module_name = win32api.GetLongPathName(conf_module_name) - except win32api.error as exc: - log( - 2, - "Couldn't determine the long name for %r: %s" % (conf_module_name, exc), - ) - - if opt_parser is None: - # Build our own parser. - parser = OptionParser(usage="") - else: - # The caller is providing their own filter, presumably with their - # own options all setup. - parser = opt_parser - - # build a usage string if we don't have one. - if not parser.get_usage(): - all_handlers = standard_arguments.copy() - all_handlers.update(custom_arg_handlers) - parser.set_usage(build_usage(all_handlers)) - - # allow the user to use uninstall as a synonym for remove if it wasn't - # defined by the custom arg handlers. - all_handlers.setdefault("uninstall", all_handlers["remove"]) - - parser.add_option( - "-q", - "--quiet", - action="store_false", - dest="verbose", - default=True, - help="don't print status messages to stdout", - ) - parser.add_option( - "-v", - "--verbosity", - action="count", - dest="verbose", - default=1, - help="increase the verbosity of status messages", - ) - parser.add_option( - "", - "--server", - action="store", - help="Specifies the IIS server to install/uninstall on." - " Default is '%s/1'" % (_IIS_OBJECT,), - ) - - (options, args) = parser.parse_args(argv[1:]) - MergeStandardOptions(options, params) - verbose = options.verbose - if not args: - args = [default_arg] - try: - for arg in args: - handler = all_handlers[arg] - handler(conf_module_name, params, options, log) - except (ItemNotFound, InstallationError) as details: - if options.verbose > 1: - traceback.print_exc() - print("%s: %s" % (details.__class__.__name__, details)) - except KeyError: - parser.error("Invalid arg '%s'" % arg) diff --git a/lib/isapi/isapicon.py b/lib/isapi/isapicon.py deleted file mode 100644 index 20de1a44..00000000 --- a/lib/isapi/isapicon.py +++ /dev/null @@ -1,120 +0,0 @@ -"""Constants needed by ISAPI filters and extensions.""" -# ====================================================================== -# Copyright 2002-2003 by Blackdog Software Pty Ltd. -# -# All Rights Reserved -# -# Permission to use, copy, modify, and distribute this software and -# its documentation for any purpose and without fee is hereby -# granted, provided that the above copyright notice appear in all -# copies and that both that copyright notice and this permission -# notice appear in supporting documentation, and that the name of -# Blackdog Software not be used in advertising or publicity pertaining to -# distribution of the software without specific, written prior -# permission. -# -# BLACKDOG SOFTWARE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, -# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN -# NO EVENT SHALL BLACKDOG SOFTWARE BE LIABLE FOR ANY SPECIAL, INDIRECT OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN -# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -# ====================================================================== - -# HTTP reply codes - -HTTP_CONTINUE = 100 -HTTP_SWITCHING_PROTOCOLS = 101 -HTTP_PROCESSING = 102 -HTTP_OK = 200 -HTTP_CREATED = 201 -HTTP_ACCEPTED = 202 -HTTP_NON_AUTHORITATIVE = 203 -HTTP_NO_CONTENT = 204 -HTTP_RESET_CONTENT = 205 -HTTP_PARTIAL_CONTENT = 206 -HTTP_MULTI_STATUS = 207 -HTTP_MULTIPLE_CHOICES = 300 -HTTP_MOVED_PERMANENTLY = 301 -HTTP_MOVED_TEMPORARILY = 302 -HTTP_SEE_OTHER = 303 -HTTP_NOT_MODIFIED = 304 -HTTP_USE_PROXY = 305 -HTTP_TEMPORARY_REDIRECT = 307 -HTTP_BAD_REQUEST = 400 -HTTP_UNAUTHORIZED = 401 -HTTP_PAYMENT_REQUIRED = 402 -HTTP_FORBIDDEN = 403 -HTTP_NOT_FOUND = 404 -HTTP_METHOD_NOT_ALLOWED = 405 -HTTP_NOT_ACCEPTABLE = 406 -HTTP_PROXY_AUTHENTICATION_REQUIRED = 407 -HTTP_REQUEST_TIME_OUT = 408 -HTTP_CONFLICT = 409 -HTTP_GONE = 410 -HTTP_LENGTH_REQUIRED = 411 -HTTP_PRECONDITION_FAILED = 412 -HTTP_REQUEST_ENTITY_TOO_LARGE = 413 -HTTP_REQUEST_URI_TOO_LARGE = 414 -HTTP_UNSUPPORTED_MEDIA_TYPE = 415 -HTTP_RANGE_NOT_SATISFIABLE = 416 -HTTP_EXPECTATION_FAILED = 417 -HTTP_UNPROCESSABLE_ENTITY = 422 -HTTP_INTERNAL_SERVER_ERROR = 500 -HTTP_NOT_IMPLEMENTED = 501 -HTTP_BAD_GATEWAY = 502 -HTTP_SERVICE_UNAVAILABLE = 503 -HTTP_GATEWAY_TIME_OUT = 504 -HTTP_VERSION_NOT_SUPPORTED = 505 -HTTP_VARIANT_ALSO_VARIES = 506 - -HSE_STATUS_SUCCESS = 1 -HSE_STATUS_SUCCESS_AND_KEEP_CONN = 2 -HSE_STATUS_PENDING = 3 -HSE_STATUS_ERROR = 4 - -SF_NOTIFY_SECURE_PORT = 0x00000001 -SF_NOTIFY_NONSECURE_PORT = 0x00000002 -SF_NOTIFY_READ_RAW_DATA = 0x00008000 -SF_NOTIFY_PREPROC_HEADERS = 0x00004000 -SF_NOTIFY_AUTHENTICATION = 0x00002000 -SF_NOTIFY_URL_MAP = 0x00001000 -SF_NOTIFY_ACCESS_DENIED = 0x00000800 -SF_NOTIFY_SEND_RESPONSE = 0x00000040 -SF_NOTIFY_SEND_RAW_DATA = 0x00000400 -SF_NOTIFY_LOG = 0x00000200 -SF_NOTIFY_END_OF_REQUEST = 0x00000080 -SF_NOTIFY_END_OF_NET_SESSION = 0x00000100 - -SF_NOTIFY_ORDER_HIGH = 0x00080000 -SF_NOTIFY_ORDER_MEDIUM = 0x00040000 -SF_NOTIFY_ORDER_LOW = 0x00020000 -SF_NOTIFY_ORDER_DEFAULT = SF_NOTIFY_ORDER_LOW - -SF_NOTIFY_ORDER_MASK = ( - SF_NOTIFY_ORDER_HIGH | SF_NOTIFY_ORDER_MEDIUM | SF_NOTIFY_ORDER_LOW -) - -SF_STATUS_REQ_FINISHED = 134217728 # 0x8000000 -SF_STATUS_REQ_FINISHED_KEEP_CONN = 134217728 + 1 -SF_STATUS_REQ_NEXT_NOTIFICATION = 134217728 + 2 -SF_STATUS_REQ_HANDLED_NOTIFICATION = 134217728 + 3 -SF_STATUS_REQ_ERROR = 134217728 + 4 -SF_STATUS_REQ_READ_NEXT = 134217728 + 5 - -HSE_IO_SYNC = 0x00000001 # for WriteClient -HSE_IO_ASYNC = 0x00000002 # for WriteClient/TF/EU -HSE_IO_DISCONNECT_AFTER_SEND = 0x00000004 # for TF -HSE_IO_SEND_HEADERS = 0x00000008 # for TF -HSE_IO_NODELAY = 0x00001000 # turn off nagling -# These two are only used by VectorSend -HSE_IO_FINAL_SEND = 0x00000010 -HSE_IO_CACHE_RESPONSE = 0x00000020 - -HSE_EXEC_URL_NO_HEADERS = 0x02 -HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR = 0x04 -HSE_EXEC_URL_IGNORE_VALIDATION_AND_RANGE = 0x10 -HSE_EXEC_URL_DISABLE_CUSTOM_ERROR = 0x20 -HSE_EXEC_URL_SSI_CMD = 0x40 -HSE_EXEC_URL_HTTP_CACHE_ELIGIBLE = 0x80 diff --git a/lib/isapi/samples/README.txt b/lib/isapi/samples/README.txt deleted file mode 100644 index cff87587..00000000 --- a/lib/isapi/samples/README.txt +++ /dev/null @@ -1,20 +0,0 @@ -In this directory you will find examples of ISAPI filters and extensions. - -The filter loading mechanism works like this: -* IIS loads the special Python "loader" DLL. This DLL will generally have a - leading underscore as part of its name. -* This loader DLL looks for a Python module, by removing the first letter of - the DLL base name. - -This means that an ISAPI extension module consists of 2 key files - the loader -DLL (eg, "_MyIISModule.dll", and a Python module (which for this example -would be "MyIISModule.py") - -When you install an ISAPI extension, the installation code checks to see if -there is a loader DLL for your implementation file - if one does not exist, -or the standard loader is different, it is copied and renamed accordingly. - -We use this mechanism to provide the maximum separation between different -Python extensions installed on the same server - otherwise filter order and -other tricky IIS semantics would need to be replicated. Also, each filter -gets its own thread-pool, etc. diff --git a/lib/isapi/samples/advanced.py b/lib/isapi/samples/advanced.py deleted file mode 100644 index c10d0c80..00000000 --- a/lib/isapi/samples/advanced.py +++ /dev/null @@ -1,218 +0,0 @@ -# This extension demonstrates some advanced features of the Python ISAPI -# framework. -# We demonstrate: -# * Reloading your Python module without shutting down IIS (eg, when your -# .py implementation file changes.) -# * Custom command-line handling - both additional options and commands. -# * Using a query string - any part of the URL after a '?' is assumed to -# be "variable names" separated by '&' - we will print the values of -# these server variables. -# * If the tail portion of the URL is "ReportUnhealthy", IIS will be -# notified we are unhealthy via a HSE_REQ_REPORT_UNHEALTHY request. -# Whether this is acted upon depends on if the IIS health-checking -# tools are installed, but you should always see the reason written -# to the Windows event log - see the IIS documentation for more. - -import os -import stat -import sys - -from isapi import isapicon -from isapi.simple import SimpleExtension - -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# Notes on reloading -# If your HttpFilterProc or HttpExtensionProc functions raises -# 'isapi.InternalReloadException', the framework will not treat it -# as an error but instead will terminate your extension, reload your -# extension module, re-initialize the instance, and re-issue the request. -# The Initialize functions are called with None as their param. The -# return code from the terminate function is ignored. -# -# This is all the framework does to help you. It is up to your code -# when you raise this exception. This sample uses a Win32 "find -# notification". Whenever windows tells us one of the files in the -# directory has changed, we check if the time of our source-file has -# changed, and set a flag. Next imcoming request, we check the flag and -# raise the special exception if set. -# -# The end result is that the module is automatically reloaded whenever -# the source-file changes - you need take no further action to see your -# changes reflected in the running server. - -# The framework only reloads your module - if you have libraries you -# depend on and also want reloaded, you must arrange for this yourself. -# One way of doing this would be to special case the import of these -# modules. Eg: -# -- -# try: -# my_module = reload(my_module) # module already imported - reload it -# except NameError: -# import my_module # first time around - import it. -# -- -# When your module is imported for the first time, the NameError will -# be raised, and the module imported. When the ISAPI framework reloads -# your module, the existing module will avoid the NameError, and allow -# you to reload that module. - -import threading - -import win32con -import win32event -import win32file -import winerror - -from isapi import InternalReloadException - -try: - reload_counter += 1 -except NameError: - reload_counter = 0 - - -# A watcher thread that checks for __file__ changing. -# When it detects it, it simply sets "change_detected" to true. -class ReloadWatcherThread(threading.Thread): - def __init__(self): - self.change_detected = False - self.filename = __file__ - if self.filename.endswith("c") or self.filename.endswith("o"): - self.filename = self.filename[:-1] - self.handle = win32file.FindFirstChangeNotification( - os.path.dirname(self.filename), - False, # watch tree? - win32con.FILE_NOTIFY_CHANGE_LAST_WRITE, - ) - threading.Thread.__init__(self) - - def run(self): - last_time = os.stat(self.filename)[stat.ST_MTIME] - while 1: - try: - rc = win32event.WaitForSingleObject(self.handle, win32event.INFINITE) - win32file.FindNextChangeNotification(self.handle) - except win32event.error as details: - # handle closed - thread should terminate. - if details.winerror != winerror.ERROR_INVALID_HANDLE: - raise - break - this_time = os.stat(self.filename)[stat.ST_MTIME] - if this_time != last_time: - print("Detected file change - flagging for reload.") - self.change_detected = True - last_time = this_time - - def stop(self): - win32file.FindCloseChangeNotification(self.handle) - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(SimpleExtension): - "Python advanced sample Extension" - - def __init__(self): - self.reload_watcher = ReloadWatcherThread() - self.reload_watcher.start() - - def HttpExtensionProc(self, ecb): - # NOTE: If you use a ThreadPoolExtension, you must still perform - # this check in HttpExtensionProc - raising the exception from - # The "Dispatch" method will just cause the exception to be - # rendered to the browser. - if self.reload_watcher.change_detected: - print("Doing reload") - raise InternalReloadException - - url = ecb.GetServerVariable("UNICODE_URL") - if url.endswith("ReportUnhealthy"): - ecb.ReportUnhealthy("I'm a little sick") - - ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0) - print("", file=ecb) - - qs = ecb.GetServerVariable("QUERY_STRING") - if qs: - queries = qs.split("&") - print("

", file=ecb)
-            for q in queries:
-                val = ecb.GetServerVariable(q, "<no such variable>")
-                print("%s=%r" % (q, val), file=ecb)
-            print("

", file=ecb) - - print("This module has been imported", file=ecb) - print("%d times" % (reload_counter,), file=ecb) - print("", file=ecb) - ecb.close() - return isapicon.HSE_STATUS_SUCCESS - - def TerminateExtension(self, status): - self.reload_watcher.stop() - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -# Our special command line customization. -# Pre-install hook for our virtual directory. -def PreInstallDirectory(params, options): - # If the user used our special '--description' option, - # then we override our default. - if options.description: - params.Description = options.description - - -# Post install hook for our entire script -def PostInstall(params, options): - print() - print("The sample has been installed.") - print("Point your browser to /AdvancedPythonSample") - print("If you modify the source file and reload the page,") - print("you should see the reload counter increment") - - -# Handler for our custom 'status' argument. -def status_handler(options, log, arg): - "Query the status of something" - print("Everything seems to be fine!") - - -custom_arg_handlers = {"status": status_handler} - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters(PostInstall=PostInstall) - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="AdvancedPythonSample", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - # specify the pre-install hook. - PreInstall=PreInstallDirectory, - ) - params.VirtualDirs = [vd] - # Setup our custom option parser. - from optparse import OptionParser - - parser = OptionParser("") # blank usage, so isapi sets it. - parser.add_option( - "", - "--description", - action="store", - help="custom description to use for the virtual directory", - ) - - HandleCommandLine( - params, opt_parser=parser, custom_arg_handlers=custom_arg_handlers - ) diff --git a/lib/isapi/samples/redirector.py b/lib/isapi/samples/redirector.py deleted file mode 100644 index 40698bb2..00000000 --- a/lib/isapi/samples/redirector.py +++ /dev/null @@ -1,125 +0,0 @@ -# This is a sample ISAPI extension written in Python. -# -# Please see README.txt in this directory, and specifically the -# information about the "loader" DLL - installing this sample will create -# "_redirector.dll" in the current directory. The readme explains this. - -# Executing this script (or any server config script) will install the extension -# into your web server. As the server executes, the PyISAPI framework will load -# this module and create your Extension and Filter objects. - -# This is the simplest possible redirector (or proxy) we can write. The -# extension installs with a mask of '*' in the root of the site. -# As an added bonus though, we optionally show how, on IIS6 and later, we -# can use HSE_ERQ_EXEC_URL to ignore certain requests - in IIS5 and earlier -# we can only do this with an ISAPI filter - see redirector_with_filter for -# an example. If this sample is run on IIS5 or earlier it simply ignores -# any excludes. - -import sys - -from isapi import isapicon, threaded_extension - -try: - from urllib.request import urlopen -except ImportError: - # py3k spelling... - from urllib.request import urlopen - -import win32api - -# sys.isapidllhandle will exist when we are loaded by the IIS framework. -# In this case we redirect our output to the win32traceutil collector. -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# The site we are proxying. -proxy = "http://www.python.org" - -# Urls we exclude (ie, allow IIS to handle itself) - all are lowered, -# and these entries exist by default on Vista... -excludes = ["/iisstart.htm", "/welcome.png"] - - -# An "io completion" function, called when ecb.ExecURL completes... -def io_callback(ecb, url, cbIO, errcode): - # Get the status of our ExecURL - httpstatus, substatus, win32 = ecb.GetExecURLStatus() - print( - "ExecURL of %r finished with http status %d.%d, win32 status %d (%s)" - % (url, httpstatus, substatus, win32, win32api.FormatMessage(win32).strip()) - ) - # nothing more to do! - ecb.DoneWithSession() - - -# The ISAPI extension - handles all requests in the site. -class Extension(threaded_extension.ThreadPoolExtension): - "Python sample Extension" - - def Dispatch(self, ecb): - # Note that our ThreadPoolExtension base class will catch exceptions - # in our Dispatch method, and write the traceback to the client. - # That is perfect for this sample, so we don't catch our own. - # print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),) - url = ecb.GetServerVariable("URL").decode("ascii") - for exclude in excludes: - if url.lower().startswith(exclude): - print("excluding %s" % url) - if ecb.Version < 0x60000: - print("(but this is IIS5 or earlier - can't do 'excludes')") - else: - ecb.IOCompletion(io_callback, url) - ecb.ExecURL( - None, - None, - None, - None, - None, - isapicon.HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR, - ) - return isapicon.HSE_STATUS_PENDING - - new_url = proxy + url - print("Opening %s" % new_url) - fp = urlopen(new_url) - headers = fp.info() - # subtle py3k breakage: in py3k, str(headers) has normalized \r\n - # back to \n and also stuck an extra \n term. py2k leaves the - # \r\n from the server in tact and finishes with a single term. - if sys.version_info < (3, 0): - header_text = str(headers) + "\r\n" - else: - # take *all* trailing \n off, replace remaining with - # \r\n, then add the 2 trailing \r\n. - header_text = str(headers).rstrip("\n").replace("\n", "\r\n") + "\r\n\r\n" - ecb.SendResponseHeaders("200 OK", header_text, False) - ecb.WriteClient(fp.read()) - ecb.DoneWithSession() - print("Returned data from '%s'" % (new_url,)) - return isapicon.HSE_STATUS_SUCCESS - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="/", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/lib/isapi/samples/redirector_asynch.py b/lib/isapi/samples/redirector_asynch.py deleted file mode 100644 index 3c4b5e4f..00000000 --- a/lib/isapi/samples/redirector_asynch.py +++ /dev/null @@ -1,85 +0,0 @@ -# This is a sample ISAPI extension written in Python. - -# This is like the other 'redirector' samples, but uses asnch IO when writing -# back to the client (it does *not* use asynch io talking to the remote -# server!) - -import sys -import urllib.error -import urllib.parse -import urllib.request - -from isapi import isapicon, threaded_extension - -# sys.isapidllhandle will exist when we are loaded by the IIS framework. -# In this case we redirect our output to the win32traceutil collector. -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# The site we are proxying. -proxy = "http://www.python.org" - -# We synchronously read chunks of this size then asynchronously write them. -CHUNK_SIZE = 8192 - - -# The callback made when IIS completes the asynch write. -def io_callback(ecb, fp, cbIO, errcode): - print("IO callback", ecb, fp, cbIO, errcode) - chunk = fp.read(CHUNK_SIZE) - if chunk: - ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC) - # and wait for the next callback to say this chunk is done. - else: - # eof - say we are complete. - fp.close() - ecb.DoneWithSession() - - -# The ISAPI extension - handles all requests in the site. -class Extension(threaded_extension.ThreadPoolExtension): - "Python sample proxy server - asynch version." - - def Dispatch(self, ecb): - print('IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),)) - url = ecb.GetServerVariable("URL") - - new_url = proxy + url - print("Opening %s" % new_url) - fp = urllib.request.urlopen(new_url) - headers = fp.info() - ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False) - # now send the first chunk asynchronously - ecb.ReqIOCompletion(io_callback, fp) - chunk = fp.read(CHUNK_SIZE) - if chunk: - ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC) - return isapicon.HSE_STATUS_PENDING - # no data - just close things now. - ecb.DoneWithSession() - return isapicon.HSE_STATUS_SUCCESS - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="/", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/lib/isapi/samples/redirector_with_filter.py b/lib/isapi/samples/redirector_with_filter.py deleted file mode 100644 index a63b1db1..00000000 --- a/lib/isapi/samples/redirector_with_filter.py +++ /dev/null @@ -1,161 +0,0 @@ -# This is a sample configuration file for an ISAPI filter and extension -# written in Python. -# -# Please see README.txt in this directory, and specifically the -# information about the "loader" DLL - installing this sample will create -# "_redirector_with_filter.dll" in the current directory. The readme explains -# this. - -# Executing this script (or any server config script) will install the extension -# into your web server. As the server executes, the PyISAPI framework will load -# this module and create your Extension and Filter objects. - -# This sample provides sample redirector: -# It is implemented by a filter and an extension, so that some requests can -# be ignored. Compare with 'redirector_simple' which avoids the filter, but -# is unable to selectively ignore certain requests. -# The process is sample uses is: -# * The filter is installed globally, as all filters are. -# * A Virtual Directory named "python" is setup. This dir has our ISAPI -# extension as the only application, mapped to file-extension '*'. Thus, our -# extension handles *all* requests in this directory. -# The basic process is that the filter does URL rewriting, redirecting every -# URL to our Virtual Directory. Our extension then handles this request, -# forwarding the data from the proxied site. -# For example: -# * URL of "index.html" comes in. -# * Filter rewrites this to "/python/index.html" -# * Our extension sees the full "/python/index.html", removes the leading -# portion, and opens and forwards the remote URL. - - -# This sample is very small - it avoid most error handling, etc. It is for -# demonstration purposes only. - -import sys -import urllib.error -import urllib.parse -import urllib.request - -from isapi import isapicon, threaded_extension -from isapi.simple import SimpleFilter - -# sys.isapidllhandle will exist when we are loaded by the IIS framework. -# In this case we redirect our output to the win32traceutil collector. -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# The site we are proxying. -proxy = "http://www.python.org" -# The name of the virtual directory we install in, and redirect from. -virtualdir = "/python" - -# The key feature of this redirector over the simple redirector is that it -# can choose to ignore certain responses by having the filter not rewrite them -# to our virtual dir. For this sample, we just exclude the IIS help directory. - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(threaded_extension.ThreadPoolExtension): - "Python sample Extension" - - def Dispatch(self, ecb): - # Note that our ThreadPoolExtension base class will catch exceptions - # in our Dispatch method, and write the traceback to the client. - # That is perfect for this sample, so we don't catch our own. - # print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),) - url = ecb.GetServerVariable("URL") - if url.startswith(virtualdir): - new_url = proxy + url[len(virtualdir) :] - print("Opening", new_url) - fp = urllib.request.urlopen(new_url) - headers = fp.info() - ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False) - ecb.WriteClient(fp.read()) - ecb.DoneWithSession() - print("Returned data from '%s'!" % (new_url,)) - else: - # this should never happen - we should only see requests that - # start with our virtual directory name. - print("Not proxying '%s'" % (url,)) - - -# The ISAPI filter. -class Filter(SimpleFilter): - "Sample Python Redirector" - filter_flags = isapicon.SF_NOTIFY_PREPROC_HEADERS | isapicon.SF_NOTIFY_ORDER_DEFAULT - - def HttpFilterProc(self, fc): - # print "Filter Dispatch" - nt = fc.NotificationType - if nt != isapicon.SF_NOTIFY_PREPROC_HEADERS: - return isapicon.SF_STATUS_REQ_NEXT_NOTIFICATION - - pp = fc.GetData() - url = pp.GetHeader("url") - # print "URL is '%s'" % (url,) - prefix = virtualdir - if not url.startswith(prefix): - new_url = prefix + url - print("New proxied URL is '%s'" % (new_url,)) - pp.SetHeader("url", new_url) - # For the sake of demonstration, show how the FilterContext - # attribute is used. It always starts out life as None, and - # any assignments made are automatically decref'd by the - # framework during a SF_NOTIFY_END_OF_NET_SESSION notification. - if fc.FilterContext is None: - fc.FilterContext = 0 - fc.FilterContext += 1 - print("This is request number %d on this connection" % fc.FilterContext) - return isapicon.SF_STATUS_REQ_HANDLED_NOTIFICATION - else: - print("Filter ignoring URL '%s'" % (url,)) - - # Some older code that handled SF_NOTIFY_URL_MAP. - # ~ print "Have URL_MAP notify" - # ~ urlmap = fc.GetData() - # ~ print "URI is", urlmap.URL - # ~ print "Path is", urlmap.PhysicalPath - # ~ if urlmap.URL.startswith("/UC/"): - # ~ # Find the /UC/ in the physical path, and nuke it (except - # ~ # as the path is physical, it is \) - # ~ p = urlmap.PhysicalPath - # ~ pos = p.index("\\UC\\") - # ~ p = p[:pos] + p[pos+3:] - # ~ p = r"E:\src\pyisapi\webroot\PyTest\formTest.htm" - # ~ print "New path is", p - # ~ urlmap.PhysicalPath = p - - -# The entry points for the ISAPI extension. -def __FilterFactory__(): - return Filter() - - -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup all filters - these are global to the site. - params.Filters = [ - FilterParameters(Name="PythonRedirector", Description=Filter.__doc__), - ] - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name=virtualdir[1:], - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/lib/isapi/samples/test.py b/lib/isapi/samples/test.py deleted file mode 100644 index 5e4d899b..00000000 --- a/lib/isapi/samples/test.py +++ /dev/null @@ -1,195 +0,0 @@ -# This extension is used mainly for testing purposes - it is not -# designed to be a simple sample, but instead is a hotch-potch of things -# that attempts to exercise the framework. - -import os -import stat -import sys - -from isapi import isapicon -from isapi.simple import SimpleExtension - -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# We use the same reload support as 'advanced.py' demonstrates. -import threading - -import win32con -import win32event -import win32file -import winerror - -from isapi import InternalReloadException - - -# A watcher thread that checks for __file__ changing. -# When it detects it, it simply sets "change_detected" to true. -class ReloadWatcherThread(threading.Thread): - def __init__(self): - self.change_detected = False - self.filename = __file__ - if self.filename.endswith("c") or self.filename.endswith("o"): - self.filename = self.filename[:-1] - self.handle = win32file.FindFirstChangeNotification( - os.path.dirname(self.filename), - False, # watch tree? - win32con.FILE_NOTIFY_CHANGE_LAST_WRITE, - ) - threading.Thread.__init__(self) - - def run(self): - last_time = os.stat(self.filename)[stat.ST_MTIME] - while 1: - try: - rc = win32event.WaitForSingleObject(self.handle, win32event.INFINITE) - win32file.FindNextChangeNotification(self.handle) - except win32event.error as details: - # handle closed - thread should terminate. - if details.winerror != winerror.ERROR_INVALID_HANDLE: - raise - break - this_time = os.stat(self.filename)[stat.ST_MTIME] - if this_time != last_time: - print("Detected file change - flagging for reload.") - self.change_detected = True - last_time = this_time - - def stop(self): - win32file.FindCloseChangeNotification(self.handle) - - -def TransmitFileCallback(ecb, hFile, cbIO, errCode): - print("Transmit complete!") - ecb.close() - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(SimpleExtension): - "Python test Extension" - - def __init__(self): - self.reload_watcher = ReloadWatcherThread() - self.reload_watcher.start() - - def HttpExtensionProc(self, ecb): - # NOTE: If you use a ThreadPoolExtension, you must still perform - # this check in HttpExtensionProc - raising the exception from - # The "Dispatch" method will just cause the exception to be - # rendered to the browser. - if self.reload_watcher.change_detected: - print("Doing reload") - raise InternalReloadException - - if ecb.GetServerVariable("UNICODE_URL").endswith("test.py"): - file_flags = ( - win32con.FILE_FLAG_SEQUENTIAL_SCAN | win32con.FILE_FLAG_OVERLAPPED - ) - hfile = win32file.CreateFile( - __file__, - win32con.GENERIC_READ, - 0, - None, - win32con.OPEN_EXISTING, - file_flags, - None, - ) - flags = ( - isapicon.HSE_IO_ASYNC - | isapicon.HSE_IO_DISCONNECT_AFTER_SEND - | isapicon.HSE_IO_SEND_HEADERS - ) - # We pass hFile to the callback simply as a way of keeping it alive - # for the duration of the transmission - try: - ecb.TransmitFile( - TransmitFileCallback, - hfile, - int(hfile), - "200 OK", - 0, - 0, - None, - None, - flags, - ) - except: - # Errors keep this source file open! - hfile.Close() - raise - else: - # default response - ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0) - print("", file=ecb) - print("The root of this site is at", ecb.MapURLToPath("/"), file=ecb) - print("", file=ecb) - ecb.close() - return isapicon.HSE_STATUS_SUCCESS - - def TerminateExtension(self, status): - self.reload_watcher.stop() - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -# Our special command line customization. -# Pre-install hook for our virtual directory. -def PreInstallDirectory(params, options): - # If the user used our special '--description' option, - # then we override our default. - if options.description: - params.Description = options.description - - -# Post install hook for our entire script -def PostInstall(params, options): - print() - print("The sample has been installed.") - print("Point your browser to /PyISAPITest") - - -# Handler for our custom 'status' argument. -def status_handler(options, log, arg): - "Query the status of something" - print("Everything seems to be fine!") - - -custom_arg_handlers = {"status": status_handler} - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters(PostInstall=PostInstall) - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="PyISAPITest", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - # specify the pre-install hook. - PreInstall=PreInstallDirectory, - ) - params.VirtualDirs = [vd] - # Setup our custom option parser. - from optparse import OptionParser - - parser = OptionParser("") # blank usage, so isapi sets it. - parser.add_option( - "", - "--description", - action="store", - help="custom description to use for the virtual directory", - ) - - HandleCommandLine( - params, opt_parser=parser, custom_arg_handlers=custom_arg_handlers - ) diff --git a/lib/isapi/simple.py b/lib/isapi/simple.py deleted file mode 100644 index b453bbae..00000000 --- a/lib/isapi/simple.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Simple base-classes for extensions and filters. - -None of the filter and extension functions are considered 'optional' by the -framework. These base-classes provide simple implementations for the -Initialize and Terminate functions, allowing you to omit them, - -It is not necessary to use these base-classes - but if you don't, you -must ensure each of the required methods are implemented. -""" - - -class SimpleExtension: - "Base class for a simple ISAPI extension" - - def __init__(self): - pass - - def GetExtensionVersion(self, vi): - """Called by the ISAPI framework to get the extension version - - The default implementation uses the classes docstring to - set the extension description.""" - # nod to our reload capability - vi is None when we are reloaded. - if vi is not None: - vi.ExtensionDesc = self.__doc__ - - def HttpExtensionProc(self, control_block): - """Called by the ISAPI framework for each extension request. - - sub-classes must provide an implementation for this method. - """ - raise NotImplementedError("sub-classes should override HttpExtensionProc") - - def TerminateExtension(self, status): - """Called by the ISAPI framework as the extension terminates.""" - pass - - -class SimpleFilter: - "Base class for a a simple ISAPI filter" - filter_flags = None - - def __init__(self): - pass - - def GetFilterVersion(self, fv): - """Called by the ISAPI framework to get the extension version - - The default implementation uses the classes docstring to - set the extension description, and uses the classes - filter_flags attribute to set the ISAPI filter flags - you - must specify filter_flags in your class. - """ - if self.filter_flags is None: - raise RuntimeError("You must specify the filter flags") - # nod to our reload capability - fv is None when we are reloaded. - if fv is not None: - fv.Flags = self.filter_flags - fv.FilterDesc = self.__doc__ - - def HttpFilterProc(self, fc): - """Called by the ISAPI framework for each filter request. - - sub-classes must provide an implementation for this method. - """ - raise NotImplementedError("sub-classes should override HttpExtensionProc") - - def TerminateFilter(self, status): - """Called by the ISAPI framework as the filter terminates.""" - pass diff --git a/lib/isapi/test/README.txt b/lib/isapi/test/README.txt deleted file mode 100644 index 18643dd7..00000000 --- a/lib/isapi/test/README.txt +++ /dev/null @@ -1,3 +0,0 @@ -This is a directory for tests of the PyISAPI framework. - -For demos, please see the pyisapi 'samples' directory. \ No newline at end of file diff --git a/lib/isapi/test/extension_simple.py b/lib/isapi/test/extension_simple.py deleted file mode 100644 index 64bd71fd..00000000 --- a/lib/isapi/test/extension_simple.py +++ /dev/null @@ -1,119 +0,0 @@ -# This is an ISAPI extension purely for testing purposes. It is NOT -# a 'demo' (even though it may be useful!) -# -# Install this extension, then point your browser to: -# "http://localhost/pyisapi_test/test1" -# This will execute the method 'test1' below. See below for the list of -# test methods that are acceptable. - -import urllib.error -import urllib.parse -import urllib.request - -# If we have no console (eg, am running from inside IIS), redirect output -# somewhere useful - in this case, the standard win32 trace collector. -import win32api -import winerror - -from isapi import ExtensionError, isapicon, threaded_extension -from isapi.simple import SimpleFilter - -try: - win32api.GetConsoleTitle() -except win32api.error: - # No console - redirect - import win32traceutil - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(threaded_extension.ThreadPoolExtension): - "Python ISAPI Tester" - - def Dispatch(self, ecb): - print('Tester dispatching "%s"' % (ecb.GetServerVariable("URL"),)) - url = ecb.GetServerVariable("URL") - test_name = url.split("/")[-1] - meth = getattr(self, test_name, None) - if meth is None: - raise AttributeError("No test named '%s'" % (test_name,)) - result = meth(ecb) - if result is None: - # This means the test finalized everything - return - ecb.SendResponseHeaders("200 OK", "Content-type: text/html\r\n\r\n", False) - print("Finished running test ", test_name, "", file=ecb) - print("

", file=ecb)
-        print(result, file=ecb)
-        print("
", file=ecb) - print("", file=ecb) - ecb.DoneWithSession() - - def test1(self, ecb): - try: - ecb.GetServerVariable("foo bar") - raise RuntimeError("should have failed!") - except ExtensionError as err: - assert err.errno == winerror.ERROR_INVALID_INDEX, err - return "worked!" - - def test_long_vars(self, ecb): - qs = ecb.GetServerVariable("QUERY_STRING") - # Our implementation has a default buffer size of 8k - so we test - # the code that handles an overflow by ensuring there are more - # than 8k worth of chars in the URL. - expected_query = "x" * 8500 - if len(qs) == 0: - # Just the URL with no query part - redirect to myself, but with - # a huge query portion. - me = ecb.GetServerVariable("URL") - headers = "Location: " + me + "?" + expected_query + "\r\n\r\n" - ecb.SendResponseHeaders("301 Moved", headers) - ecb.DoneWithSession() - return None - if qs == expected_query: - return "Total length of variable is %d - test worked!" % (len(qs),) - else: - return "Unexpected query portion! Got %d chars, expected %d" % ( - len(qs), - len(expected_query), - ) - - def test_unicode_vars(self, ecb): - # We need to check that we are running IIS6! This seems the only - # effective way from an extension. - ver = float(ecb.GetServerVariable("SERVER_SOFTWARE").split("/")[1]) - if ver < 6.0: - return "This is IIS version %g - unicode only works in IIS6 and later" % ver - - us = ecb.GetServerVariable("UNICODE_SERVER_NAME") - if not isinstance(us, str): - raise RuntimeError("unexpected type!") - if us != str(ecb.GetServerVariable("SERVER_NAME")): - raise RuntimeError("Unicode and non-unicode values were not the same") - return "worked!" - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="pyisapi_test", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/lib/isapi/threaded_extension.py b/lib/isapi/threaded_extension.py deleted file mode 100644 index b31c8c9e..00000000 --- a/lib/isapi/threaded_extension.py +++ /dev/null @@ -1,189 +0,0 @@ -"""An ISAPI extension base class implemented using a thread-pool.""" -# $Id$ - -import sys -import threading -import time -import traceback - -from pywintypes import OVERLAPPED -from win32event import INFINITE -from win32file import ( - CloseHandle, - CreateIoCompletionPort, - GetQueuedCompletionStatus, - PostQueuedCompletionStatus, -) -from win32security import SetThreadToken - -import isapi.simple -from isapi import ExtensionError, isapicon - -ISAPI_REQUEST = 1 -ISAPI_SHUTDOWN = 2 - - -class WorkerThread(threading.Thread): - def __init__(self, extension, io_req_port): - self.running = False - self.io_req_port = io_req_port - self.extension = extension - threading.Thread.__init__(self) - # We wait 15 seconds for a thread to terminate, but if it fails to, - # we don't want the process to hang at exit waiting for it... - self.setDaemon(True) - - def run(self): - self.running = True - while self.running: - errCode, bytes, key, overlapped = GetQueuedCompletionStatus( - self.io_req_port, INFINITE - ) - if key == ISAPI_SHUTDOWN and overlapped is None: - break - - # Let the parent extension handle the command. - dispatcher = self.extension.dispatch_map.get(key) - if dispatcher is None: - raise RuntimeError("Bad request '%s'" % (key,)) - - dispatcher(errCode, bytes, key, overlapped) - - def call_handler(self, cblock): - self.extension.Dispatch(cblock) - - -# A generic thread-pool based extension, using IO Completion Ports. -# Sub-classes can override one method to implement a simple extension, or -# may leverage the CompletionPort to queue their own requests, and implement a -# fully asynch extension. -class ThreadPoolExtension(isapi.simple.SimpleExtension): - "Base class for an ISAPI extension based around a thread-pool" - max_workers = 20 - worker_shutdown_wait = 15000 # 15 seconds for workers to quit... - - def __init__(self): - self.workers = [] - # extensible dispatch map, for sub-classes that need to post their - # own requests to the completion port. - # Each of these functions is called with the result of - # GetQueuedCompletionStatus for our port. - self.dispatch_map = { - ISAPI_REQUEST: self.DispatchConnection, - } - - def GetExtensionVersion(self, vi): - isapi.simple.SimpleExtension.GetExtensionVersion(self, vi) - # As per Q192800, the CompletionPort should be created with the number - # of processors, even if the number of worker threads is much larger. - # Passing 0 means the system picks the number. - self.io_req_port = CreateIoCompletionPort(-1, None, 0, 0) - # start up the workers - self.workers = [] - for i in range(self.max_workers): - worker = WorkerThread(self, self.io_req_port) - worker.start() - self.workers.append(worker) - - def HttpExtensionProc(self, control_block): - overlapped = OVERLAPPED() - overlapped.object = control_block - PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_REQUEST, overlapped) - return isapicon.HSE_STATUS_PENDING - - def TerminateExtension(self, status): - for worker in self.workers: - worker.running = False - for worker in self.workers: - PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_SHUTDOWN, None) - # wait for them to terminate - pity we aren't using 'native' threads - # as then we could do a smart wait - but now we need to poll.... - end_time = time.time() + self.worker_shutdown_wait / 1000 - alive = self.workers - while alive: - if time.time() > end_time: - # xxx - might be nice to log something here. - break - time.sleep(0.2) - alive = [w for w in alive if w.is_alive()] - self.dispatch_map = {} # break circles - CloseHandle(self.io_req_port) - - # This is the one operation the base class supports - a simple - # Connection request. We setup the thread-token, and dispatch to the - # sub-class's 'Dispatch' method. - def DispatchConnection(self, errCode, bytes, key, overlapped): - control_block = overlapped.object - # setup the correct user for this request - hRequestToken = control_block.GetImpersonationToken() - SetThreadToken(None, hRequestToken) - try: - try: - self.Dispatch(control_block) - except: - self.HandleDispatchError(control_block) - finally: - # reset the security context - SetThreadToken(None, None) - - def Dispatch(self, ecb): - """Overridden by the sub-class to handle connection requests. - - This class creates a thread-pool using a Windows completion port, - and dispatches requests via this port. Sub-classes can generally - implement each connection request using blocking reads and writes, and - the thread-pool will still provide decent response to the end user. - - The sub-class can set a max_workers attribute (default is 20). Note - that this generally does *not* mean 20 threads will all be concurrently - running, via the magic of Windows completion ports. - - There is no default implementation - sub-classes must implement this. - """ - raise NotImplementedError("sub-classes should override Dispatch") - - def HandleDispatchError(self, ecb): - """Handles errors in the Dispatch method. - - When a Dispatch method call fails, this method is called to handle - the exception. The default implementation formats the traceback - in the browser. - """ - ecb.HttpStatusCode = isapicon.HSE_STATUS_ERROR - # control_block.LogData = "we failed!" - exc_typ, exc_val, exc_tb = sys.exc_info() - limit = None - try: - try: - import cgi - - ecb.SendResponseHeaders( - "200 OK", "Content-type: text/html\r\n\r\n", False - ) - print(file=ecb) - print("

Traceback (most recent call last):

", file=ecb) - list = traceback.format_tb( - exc_tb, limit - ) + traceback.format_exception_only(exc_typ, exc_val) - print( - "
%s%s
" - % ( - cgi.escape("".join(list[:-1])), - cgi.escape(list[-1]), - ), - file=ecb, - ) - except ExtensionError: - # The client disconnected without reading the error body - - # its probably not a real browser at the other end, ignore it. - pass - except: - print("FAILED to render the error message!") - traceback.print_exc() - print("ORIGINAL extension error:") - traceback.print_exception(exc_typ, exc_val, exc_tb) - finally: - # holding tracebacks in a local of a frame that may itself be - # part of a traceback used to be evil and cause leaks! - exc_tb = None - ecb.DoneWithSession() diff --git a/lib/inflect/compat/__init__.py b/lib/jaraco/classes/__init__.py similarity index 100% rename from lib/inflect/compat/__init__.py rename to lib/jaraco/classes/__init__.py diff --git a/lib/jaraco/collections/__init__.py b/lib/jaraco/collections.py similarity index 88% rename from lib/jaraco/collections/__init__.py rename to lib/jaraco/collections.py index abedf002..db89b122 100644 --- a/lib/jaraco/collections/__init__.py +++ b/lib/jaraco/collections.py @@ -5,49 +5,23 @@ import itertools import copy import functools import random -from collections.abc import Container, Iterable, Mapping -from typing import Callable, Union +from jaraco.classes.properties import NonDataProperty import jaraco.text -_Matchable = Union[Callable, Container, Iterable, re.Pattern] - - -def _dispatch(obj: _Matchable) -> Callable: - # can't rely on singledispatch for Union[Container, Iterable] - # due to ambiguity - # (https://peps.python.org/pep-0443/#abstract-base-classes). - if isinstance(obj, re.Pattern): - return obj.fullmatch - if not isinstance(obj, Callable): # type: ignore - if not isinstance(obj, Container): - obj = set(obj) # type: ignore - obj = obj.__contains__ - return obj # type: ignore - - class Projection(collections.abc.Mapping): """ Project a set of keys over a mapping >>> sample = {'a': 1, 'b': 2, 'c': 3} >>> prj = Projection(['a', 'c', 'd'], sample) - >>> dict(prj) - {'a': 1, 'c': 3} - - Projection also accepts an iterable or callable or pattern. - - >>> iter_prj = Projection(iter('acd'), sample) - >>> call_prj = Projection(lambda k: ord(k) in (97, 99, 100), sample) - >>> pat_prj = Projection(re.compile(r'[acd]'), sample) - >>> prj == iter_prj == call_prj == pat_prj + >>> prj == {'a': 1, 'c': 3} True Keys should only appear if they were specified and exist in the space. - Order is retained. - >>> list(prj) + >>> sorted(list(prj.keys())) ['a', 'c'] Attempting to access a key not in the projection @@ -62,58 +36,119 @@ class Projection(collections.abc.Mapping): >>> target = {'a': 2, 'b': 2} >>> target.update(prj) - >>> target - {'a': 1, 'b': 2, 'c': 3} + >>> target == {'a': 1, 'b': 2, 'c': 3} + True - Projection keeps a reference to the original dict, so - modifying the original dict may modify the Projection. + Also note that Projection keeps a reference to the original dict, so + if you modify the original dict, that could modify the Projection. >>> del sample['a'] >>> dict(prj) {'c': 3} """ - def __init__(self, keys: _Matchable, space: Mapping): - self._match = _dispatch(keys) + def __init__(self, keys, space): + self._keys = tuple(keys) self._space = space def __getitem__(self, key): - if not self._match(key): + if key not in self._keys: raise KeyError(key) return self._space[key] - def _keys_resolved(self): - return filter(self._match, self._space) - def __iter__(self): - return self._keys_resolved() + return iter(set(self._keys).intersection(self._space)) def __len__(self): - return len(tuple(self._keys_resolved())) + return len(tuple(iter(self))) -class Mask(Projection): +class DictFilter(collections.abc.Mapping): """ - The inverse of a :class:`Projection`, masking out keys. + Takes a dict, and simulates a sub-dict based on the keys. >>> sample = {'a': 1, 'b': 2, 'c': 3} - >>> msk = Mask(['a', 'c', 'd'], sample) - >>> dict(msk) + >>> filtered = DictFilter(sample, ['a', 'c']) + >>> filtered == {'a': 1, 'c': 3} + True + >>> set(filtered.values()) == {1, 3} + True + >>> set(filtered.items()) == {('a', 1), ('c', 3)} + True + + One can also filter by a regular expression pattern + + >>> sample['d'] = 4 + >>> sample['ef'] = 5 + + Here we filter for only single-character keys + + >>> filtered = DictFilter(sample, include_pattern='.$') + >>> filtered == {'a': 1, 'b': 2, 'c': 3, 'd': 4} + True + + >>> filtered['e'] + Traceback (most recent call last): + ... + KeyError: 'e' + + >>> 'e' in filtered + False + + Pattern is useful for excluding keys with a prefix. + + >>> filtered = DictFilter(sample, include_pattern=r'(?![ace])') + >>> dict(filtered) + {'b': 2, 'd': 4} + + Also note that DictFilter keeps a reference to the original dict, so + if you modify the original dict, that could modify the filtered dict. + + >>> del sample['d'] + >>> dict(filtered) {'b': 2} """ - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - # self._match = compose(operator.not_, self._match) - self._match = lambda key, orig=self._match: not orig(key) + def __init__(self, dict, include_keys=[], include_pattern=None): + self.dict = dict + self.specified_keys = set(include_keys) + if include_pattern is not None: + self.include_pattern = re.compile(include_pattern) + else: + # for performance, replace the pattern_keys property + self.pattern_keys = set() + + def get_pattern_keys(self): + keys = filter(self.include_pattern.match, self.dict.keys()) + return set(keys) + + pattern_keys = NonDataProperty(get_pattern_keys) + + @property + def include_keys(self): + return self.specified_keys | self.pattern_keys + + def __getitem__(self, i): + if i not in self.include_keys: + raise KeyError(i) + return self.dict[i] + + def __iter__(self): + return filter(self.include_keys.__contains__, self.dict.keys()) + + def __len__(self): + return len(list(self)) def dict_map(function, dictionary): """ - Return a new dict with function applied to values of dictionary. + dict_map is much like the built-in function map. It takes a dictionary + and applys a function to the values of that dictionary, returning a + new dictionary with the mapped values in the original keys. - >>> dict_map(lambda x: x+1, dict(a=1, b=2)) - {'a': 2, 'b': 3} + >>> d = dict_map(lambda x:x+1, dict(a=1, b=2)) + >>> d == dict(a=2,b=3) + True """ return dict((key, function(value)) for key, value in dictionary.items()) @@ -129,7 +164,7 @@ class RangeMap(dict): One may supply keyword parameters to be passed to the sort function used to sort keys (i.e. key, reverse) as sort_params. - Create a map that maps 1-3 -> 'a', 4-6 -> 'b' + Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b' >>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy >>> r[1], r[2], r[3], r[4], r[5], r[6] @@ -141,7 +176,7 @@ class RangeMap(dict): >>> r[4.5] 'b' - Notice that the way rangemap is defined, it must be open-ended + But you'll notice that the way rangemap is defined, it must be open-ended on one side. >>> r[0] @@ -244,7 +279,7 @@ class RangeMap(dict): return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item]) # some special values for the RangeMap - undefined_value = type('RangeValueUndefined', (), {})() + undefined_value = type(str('RangeValueUndefined'), (), {})() class Item(int): "RangeMap Item" @@ -259,7 +294,7 @@ def __identity(x): def sorted_items(d, key=__identity, reverse=False): """ - Return the items of the dictionary sorted by the keys. + Return the items of the dictionary sorted by the keys >>> sample = dict(foo=20, bar=42, baz=10) >>> tuple(sorted_items(sample)) @@ -272,7 +307,6 @@ def sorted_items(d, key=__identity, reverse=False): >>> tuple(sorted_items(sample, reverse=True)) (('foo', 20), ('baz', 10), ('bar', 42)) """ - # wrap the key func so it operates on the first element of each item def pairkey_key(item): return key(item[0]) @@ -441,7 +475,7 @@ class ItemsAsAttributes: Mix-in class to enable a mapping object to provide items as attributes. - >>> C = type('C', (dict, ItemsAsAttributes), dict()) + >>> C = type(str('C'), (dict, ItemsAsAttributes), dict()) >>> i = C() >>> i['foo'] = 'bar' >>> i.foo @@ -470,7 +504,7 @@ class ItemsAsAttributes: >>> missing_func = lambda self, key: 'missing item' >>> C = type( - ... 'C', + ... str('C'), ... (dict, ItemsAsAttributes), ... dict(__missing__ = missing_func), ... ) diff --git a/lib/jaraco/collections/py.typed b/lib/jaraco/collections/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/jaraco/context.py b/lib/jaraco/context.py index b0d1ef37..87a4e3dc 100644 --- a/lib/jaraco/context.py +++ b/lib/jaraco/context.py @@ -5,18 +5,10 @@ import functools import tempfile import shutil import operator -import warnings @contextlib.contextmanager def pushd(dir): - """ - >>> tmp_path = getfixture('tmp_path') - >>> with pushd(tmp_path): - ... assert os.getcwd() == os.fspath(tmp_path) - >>> assert os.getcwd() != os.fspath(tmp_path) - """ - orig = os.getcwd() os.chdir(dir) try: @@ -37,8 +29,6 @@ def tarball_context(url, target_dir=None, runner=None, pushd=pushd): target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '') if runner is None: runner = functools.partial(subprocess.check_call, shell=True) - else: - warnings.warn("runner parameter is deprecated", DeprecationWarning) # In the tar command, use --strip-components=1 to strip the first path and # then # use -C to cause the files to be extracted to {target_dir}. This ensures @@ -58,15 +48,6 @@ def tarball_context(url, target_dir=None, runner=None, pushd=pushd): def infer_compression(url): """ Given a URL or filename, infer the compression code for tar. - - >>> infer_compression('http://foo/bar.tar.gz') - 'z' - >>> infer_compression('http://foo/bar.tgz') - 'z' - >>> infer_compression('file.bz') - 'j' - >>> infer_compression('file.xz') - 'J' """ # cheat and just assume it's the last two characters compression_indicator = url[-2:] @@ -80,12 +61,6 @@ def temp_dir(remover=shutil.rmtree): """ Create a temporary directory context. Pass a custom remover to override the removal behavior. - - >>> import pathlib - >>> with temp_dir() as the_dir: - ... assert os.path.isdir(the_dir) - ... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents') - >>> assert not os.path.exists(the_dir) """ temp_dir = tempfile.mkdtemp() try: @@ -115,12 +90,6 @@ def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir): @contextlib.contextmanager def null(): - """ - A null context suitable to stand in for a meaningful context. - - >>> with null() as value: - ... assert value is None - """ yield @@ -143,10 +112,6 @@ class ExceptionTrap: ... raise ValueError("1 + 1 is not 3") >>> bool(trap) True - >>> trap.value - ValueError('1 + 1 is not 3') - >>> trap.tb - >>> with ExceptionTrap(ValueError) as trap: ... raise Exception() @@ -246,43 +211,3 @@ class suppress(contextlib.suppress, contextlib.ContextDecorator): ... {}[''] >>> key_error() """ - - -class on_interrupt(contextlib.ContextDecorator): - """ - Replace a KeyboardInterrupt with SystemExit(1) - - >>> def do_interrupt(): - ... raise KeyboardInterrupt() - >>> on_interrupt('error')(do_interrupt)() - Traceback (most recent call last): - ... - SystemExit: 1 - >>> on_interrupt('error', code=255)(do_interrupt)() - Traceback (most recent call last): - ... - SystemExit: 255 - >>> on_interrupt('suppress')(do_interrupt)() - >>> with __import__('pytest').raises(KeyboardInterrupt): - ... on_interrupt('ignore')(do_interrupt)() - """ - - def __init__( - self, - action='error', - # py3.7 compat - # /, - code=1, - ): - self.action = action - self.code = code - - def __enter__(self): - return self - - def __exit__(self, exctype, excinst, exctb): - if exctype is not KeyboardInterrupt or self.action == 'ignore': - return - elif self.action == 'error': - raise SystemExit(self.code) from excinst - return self.action == 'suppress' diff --git a/lib/jaraco/functools/__init__.py b/lib/jaraco/functools.py similarity index 84% rename from lib/jaraco/functools/__init__.py rename to lib/jaraco/functools.py index ca6c22fa..43c009f9 100644 --- a/lib/jaraco/functools/__init__.py +++ b/lib/jaraco/functools.py @@ -1,4 +1,4 @@ -import collections.abc +import collections import functools import inspect import itertools @@ -9,6 +9,11 @@ import warnings import more_itertools +from typing import Callable, TypeVar + + +CallableT = TypeVar("CallableT", bound=Callable[..., object]) + def compose(*funcs): """ @@ -34,6 +39,24 @@ def compose(*funcs): return functools.reduce(compose_two, funcs) +def method_caller(method_name, *args, **kwargs): + """ + Return a function that will call a named method on the + target object with optional positional and keyword + arguments. + + >>> lower = method_caller('lower') + >>> lower('MyString') + 'mystring' + """ + + def call_method(target): + func = getattr(target, method_name) + return func(*args, **kwargs) + + return call_method + + def once(func): """ Decorate func so it's only ever called the first time. @@ -76,7 +99,12 @@ def once(func): return wrapper -def method_cache(method, cache_wrapper=functools.lru_cache()): +def method_cache( + method: CallableT, + cache_wrapper: Callable[ + [CallableT], CallableT + ] = functools.lru_cache(), # type: ignore[assignment] +) -> CallableT: """ Wrap lru_cache to support storing the cache data in the object instances. @@ -144,17 +172,22 @@ def method_cache(method, cache_wrapper=functools.lru_cache()): for another implementation and additional justification. """ - def wrapper(self, *args, **kwargs): + def wrapper(self: object, *args: object, **kwargs: object) -> object: # it's the first call, replace the method with a cached, bound method - bound_method = types.MethodType(method, self) + bound_method: CallableT = types.MethodType( # type: ignore[assignment] + method, self + ) cached_method = cache_wrapper(bound_method) setattr(self, method.__name__, cached_method) return cached_method(*args, **kwargs) # Support cache clear even before cache has been created. - wrapper.cache_clear = lambda: None + wrapper.cache_clear = lambda: None # type: ignore[attr-defined] - return _special_method_cache(method, cache_wrapper) or wrapper + return ( + _special_method_cache(method, cache_wrapper) # type: ignore[return-value] + or wrapper + ) def _special_method_cache(method, cache_wrapper): @@ -170,13 +203,12 @@ def _special_method_cache(method, cache_wrapper): """ name = method.__name__ special_names = '__getattr__', '__getitem__' - if name not in special_names: - return None + return wrapper_name = '__cached' + name - def proxy(self, /, *args, **kwargs): + def proxy(self, *args, **kwargs): if wrapper_name not in vars(self): bound = types.MethodType(method, self) cache = cache_wrapper(bound) @@ -213,7 +245,7 @@ def result_invoke(action): r""" Decorate a function with an action function that is invoked on the results returned from the decorated - function (for its side effect), then return the original + function (for its side-effect), then return the original result. >>> @result_invoke(print) @@ -237,7 +269,7 @@ def result_invoke(action): return wrap -def invoke(f, /, *args, **kwargs): +def invoke(f, *args, **kwargs): """ Call a function for its side effect after initialization. @@ -272,15 +304,25 @@ def invoke(f, /, *args, **kwargs): Use functools.partial to pass parameters to the initial call >>> @functools.partial(invoke, name='bingo') - ... def func(name): print('called with', name) + ... def func(name): print("called with", name) called with bingo """ f(*args, **kwargs) return f +def call_aside(*args, **kwargs): + """ + Deprecated name for invoke. + """ + warnings.warn("call_aside is deprecated, use invoke", DeprecationWarning) + return invoke(*args, **kwargs) + + class Throttler: - """Rate-limit a function (or other callable).""" + """ + Rate-limit a function (or other callable) + """ def __init__(self, func, max_rate=float('Inf')): if isinstance(func, Throttler): @@ -297,20 +339,20 @@ class Throttler: return self.func(*args, **kwargs) def _wait(self): - """Ensure at least 1/max_rate seconds from last call.""" + "ensure at least 1/max_rate seconds from last call" elapsed = time.time() - self.last_called must_wait = 1 / self.max_rate - elapsed time.sleep(max(0, must_wait)) self.last_called = time.time() - def __get__(self, obj, owner=None): + def __get__(self, obj, type=None): return first_invoke(self._wait, functools.partial(self.func, obj)) def first_invoke(func1, func2): """ Return a function that when invoked will invoke func1 without - any parameters (for its side effect) and then invoke func2 + any parameters (for its side-effect) and then invoke func2 with whatever parameters were passed, returning its result. """ @@ -321,17 +363,6 @@ def first_invoke(func1, func2): return wrapper -method_caller = first_invoke( - lambda: warnings.warn( - '`jaraco.functools.method_caller` is deprecated, ' - 'use `operator.methodcaller` instead', - DeprecationWarning, - stacklevel=3, - ), - operator.methodcaller, -) - - def retry_call(func, cleanup=lambda: None, retries=0, trap=()): """ Given a callable func, trap the indicated exceptions @@ -340,7 +371,7 @@ def retry_call(func, cleanup=lambda: None, retries=0, trap=()): to propagate. """ attempts = itertools.count() if retries == float('inf') else range(retries) - for _ in attempts: + for attempt in attempts: try: return func() except trap: @@ -377,7 +408,7 @@ def retry(*r_args, **r_kwargs): def print_yielded(func): """ - Convert a generator into a function that prints all yielded elements. + Convert a generator into a function that prints all yielded elements >>> @print_yielded ... def x(): @@ -393,7 +424,7 @@ def print_yielded(func): def pass_none(func): """ - Wrap func so it's not called if its first param is None. + Wrap func so it's not called if its first param is None >>> print_text = pass_none(print) >>> print_text('text') @@ -402,10 +433,9 @@ def pass_none(func): """ @functools.wraps(func) - def wrapper(param, /, *args, **kwargs): + def wrapper(param, *args, **kwargs): if param is not None: return func(param, *args, **kwargs) - return None return wrapper @@ -479,7 +509,7 @@ def save_method_args(method): args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs') @functools.wraps(method) - def wrapper(self, /, *args, **kwargs): + def wrapper(self, *args, **kwargs): attr_name = '_saved_' + method.__name__ attr = args_and_kwargs(args, kwargs) setattr(self, attr_name, attr) @@ -529,13 +559,6 @@ def except_(*exceptions, replace=None, use=None): def identity(x): - """ - Return the argument. - - >>> o = object() - >>> identity(o) is o - True - """ return x @@ -557,7 +580,7 @@ def bypass_when(check, *, _op=identity): def decorate(func): @functools.wraps(func) - def wrapper(param, /): + def wrapper(param): return param if _op(check) else func(param) return wrapper @@ -581,53 +604,3 @@ def bypass_unless(check): 2 """ return bypass_when(check, _op=operator.not_) - - -@functools.singledispatch -def _splat_inner(args, func): - """Splat args to func.""" - return func(*args) - - -@_splat_inner.register -def _(args: collections.abc.Mapping, func): - """Splat kargs to func as kwargs.""" - return func(**args) - - -def splat(func): - """ - Wrap func to expect its parameters to be passed positionally in a tuple. - - Has a similar effect to that of ``itertools.starmap`` over - simple ``map``. - - >>> pairs = [(-1, 1), (0, 2)] - >>> more_itertools.consume(itertools.starmap(print, pairs)) - -1 1 - 0 2 - >>> more_itertools.consume(map(splat(print), pairs)) - -1 1 - 0 2 - - The approach generalizes to other iterators that don't have a "star" - equivalent, such as a "starfilter". - - >>> list(filter(splat(operator.add), pairs)) - [(0, 2)] - - Splat also accepts a mapping argument. - - >>> def is_nice(msg, code): - ... return "smile" in msg or code == 0 - >>> msgs = [ - ... dict(msg='smile!', code=20), - ... dict(msg='error :(', code=1), - ... dict(msg='unknown', code=0), - ... ] - >>> for msg in filter(splat(is_nice), msgs): - ... print(msg) - {'msg': 'smile!', 'code': 20} - {'msg': 'unknown', 'code': 0} - """ - return functools.wraps(func)(functools.partial(_splat_inner, func=func)) diff --git a/lib/jaraco/functools/__init__.pyi b/lib/jaraco/functools/__init__.pyi deleted file mode 100644 index c2b9ab17..00000000 --- a/lib/jaraco/functools/__init__.pyi +++ /dev/null @@ -1,128 +0,0 @@ -from collections.abc import Callable, Hashable, Iterator -from functools import partial -from operator import methodcaller -import sys -from typing import ( - Any, - Generic, - Protocol, - TypeVar, - overload, -) - -if sys.version_info >= (3, 10): - from typing import Concatenate, ParamSpec -else: - from typing_extensions import Concatenate, ParamSpec - -_P = ParamSpec('_P') -_R = TypeVar('_R') -_T = TypeVar('_T') -_R1 = TypeVar('_R1') -_R2 = TypeVar('_R2') -_V = TypeVar('_V') -_S = TypeVar('_S') -_R_co = TypeVar('_R_co', covariant=True) - -class _OnceCallable(Protocol[_P, _R]): - saved_result: _R - reset: Callable[[], None] - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... - -class _ProxyMethodCacheWrapper(Protocol[_R_co]): - cache_clear: Callable[[], None] - def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ... - -class _MethodCacheWrapper(Protocol[_R_co]): - def cache_clear(self) -> None: ... - def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ... - -# `compose()` overloads below will cover most use cases. - -@overload -def compose( - __func1: Callable[[_R], _T], - __func2: Callable[_P, _R], - /, -) -> Callable[_P, _T]: ... -@overload -def compose( - __func1: Callable[[_R], _T], - __func2: Callable[[_R1], _R], - __func3: Callable[_P, _R1], - /, -) -> Callable[_P, _T]: ... -@overload -def compose( - __func1: Callable[[_R], _T], - __func2: Callable[[_R2], _R], - __func3: Callable[[_R1], _R2], - __func4: Callable[_P, _R1], - /, -) -> Callable[_P, _T]: ... -def once(func: Callable[_P, _R]) -> _OnceCallable[_P, _R]: ... -def method_cache( - method: Callable[..., _R], - cache_wrapper: Callable[[Callable[..., _R]], _MethodCacheWrapper[_R]] = ..., -) -> _MethodCacheWrapper[_R] | _ProxyMethodCacheWrapper[_R]: ... -def apply( - transform: Callable[[_R], _T] -) -> Callable[[Callable[_P, _R]], Callable[_P, _T]]: ... -def result_invoke( - action: Callable[[_R], Any] -) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ... -def invoke( - f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs -) -> Callable[_P, _R]: ... -def call_aside( - f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs -) -> Callable[_P, _R]: ... - -class Throttler(Generic[_R]): - last_called: float - func: Callable[..., _R] - max_rate: float - def __init__( - self, func: Callable[..., _R] | Throttler[_R], max_rate: float = ... - ) -> None: ... - def reset(self) -> None: ... - def __call__(self, *args: Any, **kwargs: Any) -> _R: ... - def __get__(self, obj: Any, owner: type[Any] | None = ...) -> Callable[..., _R]: ... - -def first_invoke( - func1: Callable[..., Any], func2: Callable[_P, _R] -) -> Callable[_P, _R]: ... - -method_caller: Callable[..., methodcaller] - -def retry_call( - func: Callable[..., _R], - cleanup: Callable[..., None] = ..., - retries: int | float = ..., - trap: type[BaseException] | tuple[type[BaseException], ...] = ..., -) -> _R: ... -def retry( - cleanup: Callable[..., None] = ..., - retries: int | float = ..., - trap: type[BaseException] | tuple[type[BaseException], ...] = ..., -) -> Callable[[Callable[..., _R]], Callable[..., _R]]: ... -def print_yielded(func: Callable[_P, Iterator[Any]]) -> Callable[_P, None]: ... -def pass_none( - func: Callable[Concatenate[_T, _P], _R] -) -> Callable[Concatenate[_T, _P], _R]: ... -def assign_params( - func: Callable[..., _R], namespace: dict[str, Any] -) -> partial[_R]: ... -def save_method_args( - method: Callable[Concatenate[_S, _P], _R] -) -> Callable[Concatenate[_S, _P], _R]: ... -def except_( - *exceptions: type[BaseException], replace: Any = ..., use: Any = ... -) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ... -def identity(x: _T) -> _T: ... -def bypass_when( - check: _V, *, _op: Callable[[_V], Any] = ... -) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ... -def bypass_unless( - check: Any, -) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ... diff --git a/lib/jaraco/functools/py.typed b/lib/jaraco/functools/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/jaraco/text/__init__.py b/lib/jaraco/text/__init__.py index 0fabd0c3..e51101c2 100644 --- a/lib/jaraco/text/__init__.py +++ b/lib/jaraco/text/__init__.py @@ -227,12 +227,10 @@ def unwrap(s): return '\n'.join(cleaned) -lorem_ipsum: str = ( - files(__name__).joinpath('Lorem ipsum.txt').read_text(encoding='utf-8') -) +lorem_ipsum: str = files(__name__).joinpath('Lorem ipsum.txt').read_text() -class Splitter: +class Splitter(object): """object that will split a string with the given arguments for each call >>> s = Splitter(',') @@ -369,7 +367,7 @@ class WordSet(tuple): return self.trim_left(item).trim_right(item) def __getitem__(self, item): - result = super().__getitem__(item) + result = super(WordSet, self).__getitem__(item) if isinstance(item, slice): result = WordSet(result) return result @@ -584,7 +582,7 @@ def join_continuation(lines): ['foobarbaz'] Not sure why, but... - The character preceding the backslash is also elided. + The character preceeding the backslash is also elided. >>> list(join_continuation(['goo\\', 'dly'])) ['godly'] @@ -609,16 +607,16 @@ def read_newlines(filename, limit=1024): r""" >>> tmp_path = getfixture('tmp_path') >>> filename = tmp_path / 'out.txt' - >>> _ = filename.write_text('foo\n', newline='', encoding='utf-8') + >>> _ = filename.write_text('foo\n', newline='') >>> read_newlines(filename) '\n' - >>> _ = filename.write_text('foo\r\n', newline='', encoding='utf-8') + >>> _ = filename.write_text('foo\r\n', newline='') >>> read_newlines(filename) '\r\n' - >>> _ = filename.write_text('foo\r\nbar\nbing\r', newline='', encoding='utf-8') + >>> _ = filename.write_text('foo\r\nbar\nbing\r', newline='') >>> read_newlines(filename) ('\r', '\n', '\r\n') """ - with open(filename, encoding='utf-8') as fp: + with open(filename) as fp: fp.read(limit) return fp.newlines diff --git a/lib/jaraco/text/show-newlines.py b/lib/jaraco/text/show-newlines.py index e11d1ba4..2ba32062 100644 --- a/lib/jaraco/text/show-newlines.py +++ b/lib/jaraco/text/show-newlines.py @@ -12,11 +12,11 @@ def report_newlines(filename): >>> tmp_path = getfixture('tmp_path') >>> filename = tmp_path / 'out.txt' - >>> _ = filename.write_text('foo\nbar\n', newline='', encoding='utf-8') + >>> _ = filename.write_text('foo\nbar\n', newline='') >>> report_newlines(filename) newline is '\n' >>> filename = tmp_path / 'out.txt' - >>> _ = filename.write_text('foo\nbar\r\n', newline='', encoding='utf-8') + >>> _ = filename.write_text('foo\nbar\r\n', newline='') >>> report_newlines(filename) newlines are ('\n', '\r\n') """ diff --git a/lib/jaraco/text/strip-prefix.py b/lib/jaraco/text/strip-prefix.py deleted file mode 100644 index 761717a9..00000000 --- a/lib/jaraco/text/strip-prefix.py +++ /dev/null @@ -1,21 +0,0 @@ -import sys - -import autocommand - -from jaraco.text import Stripper - - -def strip_prefix(): - r""" - Strip any common prefix from stdin. - - >>> import io, pytest - >>> getfixture('monkeypatch').setattr('sys.stdin', io.StringIO('abcdef\nabc123')) - >>> strip_prefix() - def - 123 - """ - sys.stdout.writelines(Stripper.strip_prefix(sys.stdin).lines) - - -autocommand.autocommand(__name__)(strip_prefix) diff --git a/lib/more_itertools/__init__.py b/lib/more_itertools/__init__.py index aff94a9a..28ffadcf 100644 --- a/lib/more_itertools/__init__.py +++ b/lib/more_itertools/__init__.py @@ -3,4 +3,4 @@ from .more import * # noqa from .recipes import * # noqa -__version__ = '10.2.0' +__version__ = '10.1.0' diff --git a/lib/more_itertools/more.py b/lib/more_itertools/more.py index dd711a47..59c2f1a4 100755 --- a/lib/more_itertools/more.py +++ b/lib/more_itertools/more.py @@ -19,7 +19,7 @@ from itertools import ( zip_longest, product, ) -from math import exp, factorial, floor, log, perm, comb +from math import exp, factorial, floor, log from queue import Empty, Queue from random import random, randrange, uniform from operator import itemgetter, mul, sub, gt, lt, ge, le @@ -68,10 +68,8 @@ __all__ = [ 'divide', 'duplicates_everseen', 'duplicates_justseen', - 'classify_unique', 'exactly_n', 'filter_except', - 'filter_map', 'first', 'gray_product', 'groupby_transform', @@ -85,7 +83,6 @@ __all__ = [ 'is_sorted', 'islice_extended', 'iterate', - 'iter_suppress', 'last', 'locate', 'longest_common_prefix', @@ -201,14 +198,15 @@ def first(iterable, default=_marker): ``next(iter(iterable), default)``. """ - for item in iterable: - return item - if default is _marker: - raise ValueError( - 'first() was called on an empty iterable, and no ' - 'default value was provided.' - ) - return default + try: + return next(iter(iterable)) + except StopIteration as e: + if default is _marker: + raise ValueError( + 'first() was called on an empty iterable, and no ' + 'default value was provided.' + ) from e + return default def last(iterable, default=_marker): @@ -584,9 +582,6 @@ def strictly_n(iterable, n, too_short=None, too_long=None): >>> list(strictly_n(iterable, n)) ['a', 'b', 'c', 'd'] - Note that the returned iterable must be consumed in order for the check to - be made. - By default, *too_short* and *too_long* are functions that raise ``ValueError``. @@ -924,7 +919,7 @@ def substrings_indexes(seq, reverse=False): class bucket: - """Wrap *iterable* and return an object that buckets the iterable into + """Wrap *iterable* and return an object that buckets it iterable into child iterables based on a *key* function. >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] @@ -3227,8 +3222,6 @@ class time_limited: stops if the time elapsed is greater than *limit_seconds*. If your time limit is 1 second, but it takes 2 seconds to generate the first item from the iterable, the function will run for 2 seconds and not yield anything. - As a special case, when *limit_seconds* is zero, the iterator never - returns anything. """ @@ -3244,9 +3237,6 @@ class time_limited: return self def __next__(self): - if self.limit_seconds == 0: - self.timed_out = True - raise StopIteration item = next(self._iterable) if monotonic() - self._start_time > self.limit_seconds: self.timed_out = True @@ -3366,7 +3356,7 @@ def iequals(*iterables): >>> iequals("abc", "acb") False - Not to be confused with :func:`all_equal`, which checks whether all + Not to be confused with :func:`all_equals`, which checks whether all elements of iterable are equal to each other. """ @@ -3863,7 +3853,7 @@ def nth_permutation(iterable, r, index): elif not 0 <= r < n: raise ValueError else: - c = perm(n, r) + c = factorial(n) // factorial(n - r) if index < 0: index += c @@ -3908,7 +3898,7 @@ def nth_combination_with_replacement(iterable, r, index): if (r < 0) or (r > n): raise ValueError - c = comb(n + r - 1, r) + c = factorial(n + r - 1) // (factorial(r) * factorial(n - 1)) if index < 0: index += c @@ -3921,7 +3911,9 @@ def nth_combination_with_replacement(iterable, r, index): while r: r -= 1 while n >= 0: - num_combs = comb(n + r - 1, r) + num_combs = factorial(n + r - 1) // ( + factorial(r) * factorial(n - 1) + ) if index < num_combs: break n -= 1 @@ -4023,9 +4015,9 @@ def combination_index(element, iterable): for i, j in enumerate(reversed(indexes), start=1): j = n - j if i <= j: - index += comb(j, i) + index += factorial(j) // (factorial(i) * factorial(j - i)) - return comb(n + 1, k + 1) - index + return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index def combination_with_replacement_index(element, iterable): @@ -4065,7 +4057,7 @@ def combination_with_replacement_index(element, iterable): break else: raise ValueError( - 'element is not a combination with replacement of iterable' + 'element is not a combination with replacment of iterable' ) n = len(pool) @@ -4074,13 +4066,11 @@ def combination_with_replacement_index(element, iterable): occupations[p] += 1 index = 0 - cumulative_sum = 0 for k in range(1, n): - cumulative_sum += occupations[k - 1] - j = l + n - 1 - k - cumulative_sum + j = l + n - 1 - k - sum(occupations[:k]) i = n - k if i <= j: - index += comb(j, i) + index += factorial(j) // (factorial(i) * factorial(j - i)) return index @@ -4306,7 +4296,7 @@ def duplicates_everseen(iterable, key=None): >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower)) ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a'] - This function is analogous to :func:`unique_everseen` and is subject to + This function is analagous to :func:`unique_everseen` and is subject to the same performance considerations. """ @@ -4336,54 +4326,12 @@ def duplicates_justseen(iterable, key=None): >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower)) ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a'] - This function is analogous to :func:`unique_justseen`. + This function is analagous to :func:`unique_justseen`. """ return flatten(g for _, g in groupby(iterable, key) for _ in g) -def classify_unique(iterable, key=None): - """Classify each element in terms of its uniqueness. - - For each element in the input iterable, return a 3-tuple consisting of: - - 1. The element itself - 2. ``False`` if the element is equal to the one preceding it in the input, - ``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`) - 3. ``False`` if this element has been seen anywhere in the input before, - ``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`) - - >>> list(classify_unique('otto')) # doctest: +NORMALIZE_WHITESPACE - [('o', True, True), - ('t', True, True), - ('t', False, False), - ('o', True, False)] - - This function is analogous to :func:`unique_everseen` and is subject to - the same performance considerations. - - """ - seen_set = set() - seen_list = [] - use_key = key is not None - previous = None - - for i, element in enumerate(iterable): - k = key(element) if use_key else element - is_unique_justseen = not i or previous != k - previous = k - is_unique_everseen = False - try: - if k not in seen_set: - seen_set.add(k) - is_unique_everseen = True - except TypeError: - if k not in seen_list: - seen_list.append(k) - is_unique_everseen = True - yield element, is_unique_justseen, is_unique_everseen - - def minmax(iterable_or_value, *others, key=None, default=_marker): """Returns both the smallest and largest items in an iterable or the largest of two or more arguments. @@ -4581,8 +4529,10 @@ def takewhile_inclusive(predicate, iterable): :func:`takewhile` would return ``[1, 4]``. """ for x in iterable: - yield x - if not predicate(x): + if predicate(x): + yield x + else: + yield x break @@ -4617,40 +4567,3 @@ def outer_product(func, xs, ys, *args, **kwargs): starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)), n=len(ys), ) - - -def iter_suppress(iterable, *exceptions): - """Yield each of the items from *iterable*. If the iteration raises one of - the specified *exceptions*, that exception will be suppressed and iteration - will stop. - - >>> from itertools import chain - >>> def breaks_at_five(x): - ... while True: - ... if x >= 5: - ... raise RuntimeError - ... yield x - ... x += 1 - >>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError) - >>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError) - >>> list(chain(it_1, it_2)) - [1, 2, 3, 4, 2, 3, 4] - """ - try: - yield from iterable - except exceptions: - return - - -def filter_map(func, iterable): - """Apply *func* to every element of *iterable*, yielding only those which - are not ``None``. - - >>> elems = ['1', 'a', '2', 'b', '3'] - >>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems)) - [1, 2, 3] - """ - for x in iterable: - y = func(x) - if y is not None: - yield y diff --git a/lib/more_itertools/more.pyi b/lib/more_itertools/more.pyi index 9a5fc911..07bfc155 100644 --- a/lib/more_itertools/more.pyi +++ b/lib/more_itertools/more.pyi @@ -29,7 +29,7 @@ _U = TypeVar('_U') _V = TypeVar('_V') _W = TypeVar('_W') _T_co = TypeVar('_T_co', covariant=True) -_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]]) +_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]]) _Raisable = BaseException | Type[BaseException] @type_check_only @@ -74,7 +74,7 @@ class peekable(Generic[_T], Iterator[_T]): def __getitem__(self, index: slice) -> list[_T]: ... def consumer(func: _GenFn) -> _GenFn: ... -def ilen(iterable: Iterable[_T]) -> int: ... +def ilen(iterable: Iterable[object]) -> int: ... def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ... def with_iter( context_manager: ContextManager[Iterable[_T]], @@ -116,7 +116,7 @@ class bucket(Generic[_T, _U], Container[_U]): self, iterable: Iterable[_T], key: Callable[[_T], _U], - validator: Callable[[_U], object] | None = ..., + validator: Callable[[object], object] | None = ..., ) -> None: ... def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_U]: ... @@ -383,7 +383,7 @@ def mark_ends( iterable: Iterable[_T], ) -> Iterable[tuple[bool, bool, _T]]: ... def locate( - iterable: Iterable[_T], + iterable: Iterable[object], pred: Callable[..., Any] = ..., window_size: int | None = ..., ) -> Iterator[int]: ... @@ -618,9 +618,6 @@ def duplicates_everseen( def duplicates_justseen( iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... ) -> Iterator[_T]: ... -def classify_unique( - iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... -) -> Iterator[tuple[_T, bool, bool]]: ... class _SupportsLessThan(Protocol): def __lt__(self, __other: Any) -> bool: ... @@ -665,9 +662,9 @@ def minmax( def longest_common_prefix( iterables: Iterable[Iterable[_T]], ) -> Iterator[_T]: ... -def iequals(*iterables: Iterable[Any]) -> bool: ... +def iequals(*iterables: Iterable[object]) -> bool: ... def constrained_batches( - iterable: Iterable[_T], + iterable: Iterable[object], max_size: int, max_count: int | None = ..., get_len: Callable[[_T], object] = ..., @@ -685,11 +682,3 @@ def outer_product( *args: Any, **kwargs: Any, ) -> Iterator[tuple[_V, ...]]: ... -def iter_suppress( - iterable: Iterable[_T], - *exceptions: Type[BaseException], -) -> Iterator[_T]: ... -def filter_map( - func: Callable[[_T], _V | None], - iterable: Iterable[_T], -) -> Iterator[_V]: ... diff --git a/lib/more_itertools/recipes.py b/lib/more_itertools/recipes.py index 145e3cb5..a0bdbece 100644 --- a/lib/more_itertools/recipes.py +++ b/lib/more_itertools/recipes.py @@ -28,7 +28,6 @@ from itertools import ( zip_longest, ) from random import randrange, sample, choice -from sys import hexversion __all__ = [ 'all_equal', @@ -57,7 +56,6 @@ __all__ = [ 'powerset', 'prepend', 'quantify', - 'reshape', 'random_combination_with_replacement', 'random_combination', 'random_permutation', @@ -71,7 +69,6 @@ __all__ = [ 'tabulate', 'tail', 'take', - 'totient', 'transpose', 'triplewise', 'unique_everseen', @@ -495,7 +492,7 @@ def unique_everseen(iterable, key=None): >>> list(unique_everseen(iterable, key=tuple)) # Faster [[1, 2], [2, 3]] - Similarly, you may want to convert unhashable ``set`` objects with + Similary, you may want to convert unhashable ``set`` objects with ``key=frozenset``. For ``dict`` objects, ``key=lambda x: frozenset(x.items())`` can be used. @@ -527,9 +524,6 @@ def unique_justseen(iterable, key=None): ['A', 'B', 'C', 'A', 'D'] """ - if key is None: - return map(operator.itemgetter(0), groupby(iterable)) - return map(next, map(operator.itemgetter(1), groupby(iterable, key))) @@ -823,34 +817,35 @@ def polynomial_from_roots(roots): return list(reduce(convolve, factors, [1])) -def iter_index(iterable, value, start=0, stop=None): +def iter_index(iterable, value, start=0): """Yield the index of each place in *iterable* that *value* occurs, - beginning with index *start* and ending before index *stop*. + beginning with index *start*. See :func:`locate` for a more general means of finding the indexes associated with particular values. >>> list(iter_index('AABCADEAF', 'A')) [0, 1, 4, 7] - >>> list(iter_index('AABCADEAF', 'A', 1)) # start index is inclusive - [1, 4, 7] - >>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive - [1, 4] """ - seq_index = getattr(iterable, 'index', None) - if seq_index is None: + try: + seq_index = iterable.index + except AttributeError: # Slow path for general iterables - it = islice(iterable, start, stop) - for i, element in enumerate(it, start): - if element is value or element == value: - yield i - else: - # Fast path for sequences - stop = len(iterable) if stop is None else stop + it = islice(iterable, start, None) i = start - 1 try: while True: - yield (i := seq_index(value, i + 1, stop)) + i = i + operator.indexOf(it, value) + 1 + yield i + except ValueError: + pass + else: + # Fast path for sequences + i = start - 1 + try: + while True: + i = seq_index(value, i + 1) + yield i except ValueError: pass @@ -861,52 +856,47 @@ def sieve(n): >>> list(sieve(30)) [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] """ - if n > 2: - yield 2 - start = 3 data = bytearray((0, 1)) * (n // 2) + data[:3] = 0, 0, 0 limit = math.isqrt(n) + 1 - for p in iter_index(data, 1, start, limit): - yield from iter_index(data, 1, start, p * p) + for p in compress(range(limit), data): data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p))) - start = p * p - yield from iter_index(data, 1, start) + data[2] = 1 + return iter_index(data, 1) if n > 2 else iter([]) -def _batched(iterable, n, *, strict=False): - """Batch data into tuples of length *n*. If the number of items in - *iterable* is not divisible by *n*: - * The last batch will be shorter if *strict* is ``False``. - * :exc:`ValueError` will be raised if *strict* is ``True``. +def _batched(iterable, n): + """Batch data into lists of length *n*. The last batch may be shorter. >>> list(batched('ABCDEFG', 3)) [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)] - On Python 3.13 and above, this is an alias for :func:`itertools.batched`. + On Python 3.12 and above, this is an alias for :func:`itertools.batched`. """ if n < 1: raise ValueError('n must be at least one') it = iter(iterable) - while batch := tuple(islice(it, n)): - if strict and len(batch) != n: - raise ValueError('batched(): incomplete batch') + while True: + batch = tuple(islice(it, n)) + if not batch: + break yield batch -if hexversion >= 0x30D00A2: +try: from itertools import batched as itertools_batched - - def batched(iterable, n, *, strict=False): - return itertools_batched(iterable, n, strict=strict) - -else: +except ImportError: batched = _batched +else: + + def batched(iterable, n): + return itertools_batched(iterable, n) batched.__doc__ = _batched.__doc__ def transpose(it): - """Swap the rows and columns of the input matrix. + """Swap the rows and columns of the input. >>> list(transpose([(1, 2, 3), (11, 22, 33)])) [(1, 11), (2, 22), (3, 33)] @@ -917,20 +907,8 @@ def transpose(it): return _zip_strict(*it) -def reshape(matrix, cols): - """Reshape the 2-D input *matrix* to have a column count given by *cols*. - - >>> matrix = [(0, 1), (2, 3), (4, 5)] - >>> cols = 3 - >>> list(reshape(matrix, cols)) - [(0, 1, 2), (3, 4, 5)] - """ - return batched(chain.from_iterable(matrix), cols) - - def matmul(m1, m2): """Multiply two matrices. - >>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)])) [(49, 80), (41, 60)] @@ -943,12 +921,13 @@ def matmul(m1, m2): def factor(n): """Yield the prime factors of n. - >>> list(factor(360)) [2, 2, 2, 3, 3, 5] """ for prime in sieve(math.isqrt(n) + 1): - while not n % prime: + while True: + if n % prime: + break yield prime n //= prime if n == 1: @@ -996,17 +975,3 @@ def polynomial_derivative(coefficients): n = len(coefficients) powers = reversed(range(1, n)) return list(map(operator.mul, coefficients, powers)) - - -def totient(n): - """Return the count of natural numbers up to *n* that are coprime with *n*. - - >>> totient(9) - 6 - >>> totient(12) - 4 - """ - for p in unique_justseen(factor(n)): - n = n // p * (p - 1) - - return n diff --git a/lib/more_itertools/recipes.pyi b/lib/more_itertools/recipes.pyi index ed4c19db..ef883864 100644 --- a/lib/more_itertools/recipes.pyi +++ b/lib/more_itertools/recipes.pyi @@ -14,8 +14,6 @@ from typing import ( # Type and type variable definitions _T = TypeVar('_T') -_T1 = TypeVar('_T1') -_T2 = TypeVar('_T2') _U = TypeVar('_U') def take(n: int, iterable: Iterable[_T]) -> list[_T]: ... @@ -28,14 +26,14 @@ def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ... def nth(iterable: Iterable[_T], n: int) -> _T | None: ... @overload def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ... -def all_equal(iterable: Iterable[_T]) -> bool: ... +def all_equal(iterable: Iterable[object]) -> bool: ... def quantify( iterable: Iterable[_T], pred: Callable[[_T], bool] = ... ) -> int: ... def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ... def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ... def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ... -def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ... +def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ... def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ... def repeatfunc( func: Callable[..., _U], times: int | None = ..., *args: Any @@ -105,24 +103,20 @@ def sliding_window( def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ... def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ... def iter_index( - iterable: Iterable[_T], + iterable: Iterable[object], value: Any, start: int | None = ..., - stop: int | None = ..., ) -> Iterator[int]: ... def sieve(n: int) -> Iterator[int]: ... def batched( - iterable: Iterable[_T], n: int, *, strict: bool = False + iterable: Iterable[_T], + n: int, ) -> Iterator[tuple[_T]]: ... def transpose( it: Iterable[Iterable[_T]], ) -> Iterator[tuple[_T, ...]]: ... -def reshape( - matrix: Iterable[Iterable[_T]], cols: int -) -> Iterator[tuple[_T, ...]]: ... def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ... def factor(n: int) -> Iterator[int]: ... def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ... def sum_of_squares(it: Iterable[_T]) -> _T: ... def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ... -def totient(n: int) -> int: ... diff --git a/lib/pydantic/__init__.py b/lib/pydantic/__init__.py index 85a8c18b..3bf1418f 100644 --- a/lib/pydantic/__init__.py +++ b/lib/pydantic/__init__.py @@ -1,114 +1,56 @@ -import typing - -from ._migration import getattr_migration -from .version import VERSION - -if typing.TYPE_CHECKING: - # import of virtually everything is supported via `__getattr__` below, - # but we need them here for type checking and IDE support - import pydantic_core - from pydantic_core.core_schema import ( - FieldSerializationInfo, - SerializationInfo, - SerializerFunctionWrapHandler, - ValidationInfo, - ValidatorFunctionWrapHandler, - ) - - from . import dataclasses - from ._internal._generate_schema import GenerateSchema as GenerateSchema - from .aliases import AliasChoices, AliasGenerator, AliasPath - from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler - from .config import ConfigDict - from .errors import * - from .fields import Field, PrivateAttr, computed_field - from .functional_serializers import ( - PlainSerializer, - SerializeAsAny, - WrapSerializer, - field_serializer, - model_serializer, - ) - from .functional_validators import ( - AfterValidator, - BeforeValidator, - InstanceOf, - PlainValidator, - SkipValidation, - WrapValidator, - field_validator, - model_validator, - ) - from .json_schema import WithJsonSchema - from .main import * - from .networks import * - from .type_adapter import TypeAdapter - from .types import * - from .validate_call_decorator import validate_call - from .warnings import PydanticDeprecatedSince20, PydanticDeprecatedSince26, PydanticDeprecationWarning - - # this encourages pycharm to import `ValidationError` from here, not pydantic_core - ValidationError = pydantic_core.ValidationError - from .deprecated.class_validators import root_validator, validator - from .deprecated.config import BaseConfig, Extra - from .deprecated.tools import * - from .root_model import RootModel +# flake8: noqa +from . import dataclasses +from .annotated_types import create_model_from_namedtuple, create_model_from_typeddict +from .class_validators import root_validator, validator +from .config import BaseConfig, ConfigDict, Extra +from .decorator import validate_arguments +from .env_settings import BaseSettings +from .error_wrappers import ValidationError +from .errors import * +from .fields import Field, PrivateAttr, Required +from .main import * +from .networks import * +from .parse import Protocol +from .tools import * +from .types import * +from .version import VERSION, compiled __version__ = VERSION -__all__ = ( + +# WARNING __all__ from .errors is not included here, it will be removed as an export here in v2 +# please use "from pydantic.errors import ..." instead +__all__ = [ + # annotated types utils + 'create_model_from_namedtuple', + 'create_model_from_typeddict', # dataclasses 'dataclasses', - # functional validators - 'field_validator', - 'model_validator', - 'AfterValidator', - 'BeforeValidator', - 'PlainValidator', - 'WrapValidator', - 'SkipValidation', - 'InstanceOf', - # JSON Schema - 'WithJsonSchema', - # deprecated V1 functional validators, these are imported via `__getattr__` below + # class_validators 'root_validator', 'validator', - # functional serializers - 'field_serializer', - 'model_serializer', - 'PlainSerializer', - 'SerializeAsAny', - 'WrapSerializer', # config - 'ConfigDict', - # deprecated V1 config, these are imported via `__getattr__` below 'BaseConfig', + 'ConfigDict', 'Extra', - # validate_call - 'validate_call', - # errors - 'PydanticErrorCodes', - 'PydanticUserError', - 'PydanticSchemaGenerationError', - 'PydanticImportError', - 'PydanticUndefinedAnnotation', - 'PydanticInvalidForJsonSchema', + # decorator + 'validate_arguments', + # env_settings + 'BaseSettings', + # error_wrappers + 'ValidationError', # fields 'Field', - 'computed_field', - 'PrivateAttr', - # alias - 'AliasChoices', - 'AliasGenerator', - 'AliasPath', + 'Required', # main 'BaseModel', 'create_model', + 'validate_model', # network 'AnyUrl', 'AnyHttpUrl', 'FileUrl', 'HttpUrl', - 'UrlConstraints', + 'stricturl', 'EmailStr', 'NameEmail', 'IPvAnyAddress', @@ -120,38 +62,48 @@ __all__ = ( 'RedisDsn', 'MongoDsn', 'KafkaDsn', - 'NatsDsn', - 'MySQLDsn', - 'MariaDBDsn', 'validate_email', - # root_model - 'RootModel', - # deprecated tools, these are imported via `__getattr__` below + # parse + 'Protocol', + # tools + 'parse_file_as', 'parse_obj_as', + 'parse_raw_as', 'schema_of', 'schema_json_of', # types - 'Strict', + 'NoneStr', + 'NoneBytes', + 'StrBytes', + 'NoneStrBytes', 'StrictStr', + 'ConstrainedBytes', 'conbytes', + 'ConstrainedList', 'conlist', + 'ConstrainedSet', 'conset', + 'ConstrainedFrozenSet', 'confrozenset', + 'ConstrainedStr', 'constr', - 'StringConstraints', - 'ImportString', + 'PyObject', + 'ConstrainedInt', 'conint', 'PositiveInt', 'NegativeInt', 'NonNegativeInt', 'NonPositiveInt', + 'ConstrainedFloat', 'confloat', 'PositiveFloat', 'NegativeFloat', 'NonNegativeFloat', 'NonPositiveFloat', 'FiniteFloat', + 'ConstrainedDecimal', 'condecimal', + 'ConstrainedDate', 'condate', 'UUID1', 'UUID3', @@ -159,8 +111,9 @@ __all__ = ( 'UUID5', 'FilePath', 'DirectoryPath', - 'NewPath', 'Json', + 'JsonWrapper', + 'SecretField', 'SecretStr', 'SecretBytes', 'StrictBool', @@ -168,221 +121,11 @@ __all__ = ( 'StrictInt', 'StrictFloat', 'PaymentCardNumber', + 'PrivateAttr', 'ByteSize', 'PastDate', 'FutureDate', - 'PastDatetime', - 'FutureDatetime', - 'AwareDatetime', - 'NaiveDatetime', - 'AllowInfNan', - 'EncoderProtocol', - 'EncodedBytes', - 'EncodedStr', - 'Base64Encoder', - 'Base64Bytes', - 'Base64Str', - 'Base64UrlBytes', - 'Base64UrlStr', - 'GetPydanticSchema', - 'Tag', - 'Discriminator', - 'JsonValue', - # type_adapter - 'TypeAdapter', # version - '__version__', + 'compiled', 'VERSION', - # warnings - 'PydanticDeprecatedSince20', - 'PydanticDeprecatedSince26', - 'PydanticDeprecationWarning', - # annotated handlers - 'GetCoreSchemaHandler', - 'GetJsonSchemaHandler', - # generate schema from ._internal - 'GenerateSchema', - # pydantic_core - 'ValidationError', - 'ValidationInfo', - 'SerializationInfo', - 'ValidatorFunctionWrapHandler', - 'FieldSerializationInfo', - 'SerializerFunctionWrapHandler', - 'OnErrorOmit', -) - -# A mapping of {: (package, )} defining dynamic imports -_dynamic_imports: 'dict[str, tuple[str, str]]' = { - 'dataclasses': (__package__, '__module__'), - # functional validators - 'field_validator': (__package__, '.functional_validators'), - 'model_validator': (__package__, '.functional_validators'), - 'AfterValidator': (__package__, '.functional_validators'), - 'BeforeValidator': (__package__, '.functional_validators'), - 'PlainValidator': (__package__, '.functional_validators'), - 'WrapValidator': (__package__, '.functional_validators'), - 'SkipValidation': (__package__, '.functional_validators'), - 'InstanceOf': (__package__, '.functional_validators'), - # JSON Schema - 'WithJsonSchema': (__package__, '.json_schema'), - # functional serializers - 'field_serializer': (__package__, '.functional_serializers'), - 'model_serializer': (__package__, '.functional_serializers'), - 'PlainSerializer': (__package__, '.functional_serializers'), - 'SerializeAsAny': (__package__, '.functional_serializers'), - 'WrapSerializer': (__package__, '.functional_serializers'), - # config - 'ConfigDict': (__package__, '.config'), - # validate call - 'validate_call': (__package__, '.validate_call_decorator'), - # errors - 'PydanticErrorCodes': (__package__, '.errors'), - 'PydanticUserError': (__package__, '.errors'), - 'PydanticSchemaGenerationError': (__package__, '.errors'), - 'PydanticImportError': (__package__, '.errors'), - 'PydanticUndefinedAnnotation': (__package__, '.errors'), - 'PydanticInvalidForJsonSchema': (__package__, '.errors'), - # fields - 'Field': (__package__, '.fields'), - 'computed_field': (__package__, '.fields'), - 'PrivateAttr': (__package__, '.fields'), - # alias - 'AliasChoices': (__package__, '.aliases'), - 'AliasGenerator': (__package__, '.aliases'), - 'AliasPath': (__package__, '.aliases'), - # main - 'BaseModel': (__package__, '.main'), - 'create_model': (__package__, '.main'), - # network - 'AnyUrl': (__package__, '.networks'), - 'AnyHttpUrl': (__package__, '.networks'), - 'FileUrl': (__package__, '.networks'), - 'HttpUrl': (__package__, '.networks'), - 'UrlConstraints': (__package__, '.networks'), - 'EmailStr': (__package__, '.networks'), - 'NameEmail': (__package__, '.networks'), - 'IPvAnyAddress': (__package__, '.networks'), - 'IPvAnyInterface': (__package__, '.networks'), - 'IPvAnyNetwork': (__package__, '.networks'), - 'PostgresDsn': (__package__, '.networks'), - 'CockroachDsn': (__package__, '.networks'), - 'AmqpDsn': (__package__, '.networks'), - 'RedisDsn': (__package__, '.networks'), - 'MongoDsn': (__package__, '.networks'), - 'KafkaDsn': (__package__, '.networks'), - 'NatsDsn': (__package__, '.networks'), - 'MySQLDsn': (__package__, '.networks'), - 'MariaDBDsn': (__package__, '.networks'), - 'validate_email': (__package__, '.networks'), - # root_model - 'RootModel': (__package__, '.root_model'), - # types - 'Strict': (__package__, '.types'), - 'StrictStr': (__package__, '.types'), - 'conbytes': (__package__, '.types'), - 'conlist': (__package__, '.types'), - 'conset': (__package__, '.types'), - 'confrozenset': (__package__, '.types'), - 'constr': (__package__, '.types'), - 'StringConstraints': (__package__, '.types'), - 'ImportString': (__package__, '.types'), - 'conint': (__package__, '.types'), - 'PositiveInt': (__package__, '.types'), - 'NegativeInt': (__package__, '.types'), - 'NonNegativeInt': (__package__, '.types'), - 'NonPositiveInt': (__package__, '.types'), - 'confloat': (__package__, '.types'), - 'PositiveFloat': (__package__, '.types'), - 'NegativeFloat': (__package__, '.types'), - 'NonNegativeFloat': (__package__, '.types'), - 'NonPositiveFloat': (__package__, '.types'), - 'FiniteFloat': (__package__, '.types'), - 'condecimal': (__package__, '.types'), - 'condate': (__package__, '.types'), - 'UUID1': (__package__, '.types'), - 'UUID3': (__package__, '.types'), - 'UUID4': (__package__, '.types'), - 'UUID5': (__package__, '.types'), - 'FilePath': (__package__, '.types'), - 'DirectoryPath': (__package__, '.types'), - 'NewPath': (__package__, '.types'), - 'Json': (__package__, '.types'), - 'SecretStr': (__package__, '.types'), - 'SecretBytes': (__package__, '.types'), - 'StrictBool': (__package__, '.types'), - 'StrictBytes': (__package__, '.types'), - 'StrictInt': (__package__, '.types'), - 'StrictFloat': (__package__, '.types'), - 'PaymentCardNumber': (__package__, '.types'), - 'ByteSize': (__package__, '.types'), - 'PastDate': (__package__, '.types'), - 'FutureDate': (__package__, '.types'), - 'PastDatetime': (__package__, '.types'), - 'FutureDatetime': (__package__, '.types'), - 'AwareDatetime': (__package__, '.types'), - 'NaiveDatetime': (__package__, '.types'), - 'AllowInfNan': (__package__, '.types'), - 'EncoderProtocol': (__package__, '.types'), - 'EncodedBytes': (__package__, '.types'), - 'EncodedStr': (__package__, '.types'), - 'Base64Encoder': (__package__, '.types'), - 'Base64Bytes': (__package__, '.types'), - 'Base64Str': (__package__, '.types'), - 'Base64UrlBytes': (__package__, '.types'), - 'Base64UrlStr': (__package__, '.types'), - 'GetPydanticSchema': (__package__, '.types'), - 'Tag': (__package__, '.types'), - 'Discriminator': (__package__, '.types'), - 'JsonValue': (__package__, '.types'), - 'OnErrorOmit': (__package__, '.types'), - # type_adapter - 'TypeAdapter': (__package__, '.type_adapter'), - # warnings - 'PydanticDeprecatedSince20': (__package__, '.warnings'), - 'PydanticDeprecatedSince26': (__package__, '.warnings'), - 'PydanticDeprecationWarning': (__package__, '.warnings'), - # annotated handlers - 'GetCoreSchemaHandler': (__package__, '.annotated_handlers'), - 'GetJsonSchemaHandler': (__package__, '.annotated_handlers'), - # generate schema from ._internal - 'GenerateSchema': (__package__, '._internal._generate_schema'), - # pydantic_core stuff - 'ValidationError': ('pydantic_core', '.'), - 'ValidationInfo': ('pydantic_core', '.core_schema'), - 'SerializationInfo': ('pydantic_core', '.core_schema'), - 'ValidatorFunctionWrapHandler': ('pydantic_core', '.core_schema'), - 'FieldSerializationInfo': ('pydantic_core', '.core_schema'), - 'SerializerFunctionWrapHandler': ('pydantic_core', '.core_schema'), - # deprecated, mostly not included in __all__ - 'root_validator': (__package__, '.deprecated.class_validators'), - 'validator': (__package__, '.deprecated.class_validators'), - 'BaseConfig': (__package__, '.deprecated.config'), - 'Extra': (__package__, '.deprecated.config'), - 'parse_obj_as': (__package__, '.deprecated.tools'), - 'schema_of': (__package__, '.deprecated.tools'), - 'schema_json_of': (__package__, '.deprecated.tools'), - 'FieldValidationInfo': ('pydantic_core', '.core_schema'), -} - -_getattr_migration = getattr_migration(__name__) - - -def __getattr__(attr_name: str) -> object: - dynamic_attr = _dynamic_imports.get(attr_name) - if dynamic_attr is None: - return _getattr_migration(attr_name) - - package, module_name = dynamic_attr - - from importlib import import_module - - if module_name == '__module__': - return import_module(f'.{attr_name}', package=package) - else: - module = import_module(module_name, package=package) - return getattr(module, attr_name) - - -def __dir__() -> 'list[str]': - return list(__all__) +] diff --git a/lib/pydantic/v1/_hypothesis_plugin.py b/lib/pydantic/_hypothesis_plugin.py similarity index 97% rename from lib/pydantic/v1/_hypothesis_plugin.py rename to lib/pydantic/_hypothesis_plugin.py index 0c529620..a56d2b98 100644 --- a/lib/pydantic/v1/_hypothesis_plugin.py +++ b/lib/pydantic/_hypothesis_plugin.py @@ -10,7 +10,7 @@ Pydantic is installed. See also: https://hypothesis.readthedocs.io/en/latest/strategies.html#registering-strategies-via-setuptools-entry-points https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.register_type_strategy https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov -https://docs.pydantic.dev/usage/types/#pydantic-types +https://pydantic-docs.helpmanual.io/usage/types/#pydantic-types Note that because our motivation is to *improve user experience*, the strategies are always sound (never generate invalid data) but sacrifice completeness for @@ -46,7 +46,7 @@ from pydantic.utils import lenient_issubclass # # conlist() and conset() are unsupported for now, because the workarounds for # Cython and Hypothesis to handle parametrized generic types are incompatible. -# We are rethinking Hypothesis compatibility in Pydantic v2. +# Once Cython can support 'normal' generics we'll revisit this. # Emails try: @@ -168,11 +168,6 @@ st.register_type_strategy(pydantic.StrictBool, st.booleans()) st.register_type_strategy(pydantic.StrictStr, st.text()) -# FutureDate, PastDate -st.register_type_strategy(pydantic.FutureDate, st.dates(min_value=datetime.date.today() + datetime.timedelta(days=1))) -st.register_type_strategy(pydantic.PastDate, st.dates(max_value=datetime.date.today() - datetime.timedelta(days=1))) - - # Constrained-type resolver functions # # For these ones, we actually want to inspect the type in order to work out a diff --git a/lib/pydantic/_internal/__init__.py b/lib/pydantic/_internal/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pydantic/_internal/_config.py b/lib/pydantic/_internal/_config.py deleted file mode 100644 index 52c4cc42..00000000 --- a/lib/pydantic/_internal/_config.py +++ /dev/null @@ -1,322 +0,0 @@ -from __future__ import annotations as _annotations - -import warnings -from contextlib import contextmanager -from typing import ( - TYPE_CHECKING, - Any, - Callable, - cast, -) - -from pydantic_core import core_schema -from typing_extensions import ( - Literal, - Self, -) - -from ..aliases import AliasGenerator -from ..config import ConfigDict, ExtraValues, JsonDict, JsonEncoder, JsonSchemaExtraCallable -from ..errors import PydanticUserError -from ..warnings import PydanticDeprecatedSince20 - -if not TYPE_CHECKING: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - -if TYPE_CHECKING: - from .._internal._schema_generation_shared import GenerateSchema - -DEPRECATION_MESSAGE = 'Support for class-based `config` is deprecated, use ConfigDict instead.' - - -class ConfigWrapper: - """Internal wrapper for Config which exposes ConfigDict items as attributes.""" - - __slots__ = ('config_dict',) - - config_dict: ConfigDict - - # all annotations are copied directly from ConfigDict, and should be kept up to date, a test will fail if they - # stop matching - title: str | None - str_to_lower: bool - str_to_upper: bool - str_strip_whitespace: bool - str_min_length: int - str_max_length: int | None - extra: ExtraValues | None - frozen: bool - populate_by_name: bool - use_enum_values: bool - validate_assignment: bool - arbitrary_types_allowed: bool - from_attributes: bool - # whether to use the actual key provided in the data (e.g. alias or first alias for "field required" errors) instead of field_names - # to construct error `loc`s, default `True` - loc_by_alias: bool - alias_generator: Callable[[str], str] | AliasGenerator | None - ignored_types: tuple[type, ...] - allow_inf_nan: bool - json_schema_extra: JsonDict | JsonSchemaExtraCallable | None - json_encoders: dict[type[object], JsonEncoder] | None - - # new in V2 - strict: bool - # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never' - revalidate_instances: Literal['always', 'never', 'subclass-instances'] - ser_json_timedelta: Literal['iso8601', 'float'] - ser_json_bytes: Literal['utf8', 'base64'] - ser_json_inf_nan: Literal['null', 'constants'] - # whether to validate default values during validation, default False - validate_default: bool - validate_return: bool - protected_namespaces: tuple[str, ...] - hide_input_in_errors: bool - defer_build: bool - plugin_settings: dict[str, object] | None - schema_generator: type[GenerateSchema] | None - json_schema_serialization_defaults_required: bool - json_schema_mode_override: Literal['validation', 'serialization', None] - coerce_numbers_to_str: bool - regex_engine: Literal['rust-regex', 'python-re'] - validation_error_cause: bool - - def __init__(self, config: ConfigDict | dict[str, Any] | type[Any] | None, *, check: bool = True): - if check: - self.config_dict = prepare_config(config) - else: - self.config_dict = cast(ConfigDict, config) - - @classmethod - def for_model(cls, bases: tuple[type[Any], ...], namespace: dict[str, Any], kwargs: dict[str, Any]) -> Self: - """Build a new `ConfigWrapper` instance for a `BaseModel`. - - The config wrapper built based on (in descending order of priority): - - options from `kwargs` - - options from the `namespace` - - options from the base classes (`bases`) - - Args: - bases: A tuple of base classes. - namespace: The namespace of the class being created. - kwargs: The kwargs passed to the class being created. - - Returns: - A `ConfigWrapper` instance for `BaseModel`. - """ - config_new = ConfigDict() - for base in bases: - config = getattr(base, 'model_config', None) - if config: - config_new.update(config.copy()) - - config_class_from_namespace = namespace.get('Config') - config_dict_from_namespace = namespace.get('model_config') - - if config_class_from_namespace and config_dict_from_namespace: - raise PydanticUserError('"Config" and "model_config" cannot be used together', code='config-both') - - config_from_namespace = config_dict_from_namespace or prepare_config(config_class_from_namespace) - - config_new.update(config_from_namespace) - - for k in list(kwargs.keys()): - if k in config_keys: - config_new[k] = kwargs.pop(k) - - return cls(config_new) - - # we don't show `__getattr__` to type checkers so missing attributes cause errors - if not TYPE_CHECKING: # pragma: no branch - - def __getattr__(self, name: str) -> Any: - try: - return self.config_dict[name] - except KeyError: - try: - return config_defaults[name] - except KeyError: - raise AttributeError(f'Config has no attribute {name!r}') from None - - def core_config(self, obj: Any) -> core_schema.CoreConfig: - """Create a pydantic-core config, `obj` is just used to populate `title` if not set in config. - - Pass `obj=None` if you do not want to attempt to infer the `title`. - - We don't use getattr here since we don't want to populate with defaults. - - Args: - obj: An object used to populate `title` if not set in config. - - Returns: - A `CoreConfig` object created from config. - """ - - def dict_not_none(**kwargs: Any) -> Any: - return {k: v for k, v in kwargs.items() if v is not None} - - core_config = core_schema.CoreConfig( - **dict_not_none( - title=self.config_dict.get('title') or (obj and obj.__name__), - extra_fields_behavior=self.config_dict.get('extra'), - allow_inf_nan=self.config_dict.get('allow_inf_nan'), - populate_by_name=self.config_dict.get('populate_by_name'), - str_strip_whitespace=self.config_dict.get('str_strip_whitespace'), - str_to_lower=self.config_dict.get('str_to_lower'), - str_to_upper=self.config_dict.get('str_to_upper'), - strict=self.config_dict.get('strict'), - ser_json_timedelta=self.config_dict.get('ser_json_timedelta'), - ser_json_bytes=self.config_dict.get('ser_json_bytes'), - ser_json_inf_nan=self.config_dict.get('ser_json_inf_nan'), - from_attributes=self.config_dict.get('from_attributes'), - loc_by_alias=self.config_dict.get('loc_by_alias'), - revalidate_instances=self.config_dict.get('revalidate_instances'), - validate_default=self.config_dict.get('validate_default'), - str_max_length=self.config_dict.get('str_max_length'), - str_min_length=self.config_dict.get('str_min_length'), - hide_input_in_errors=self.config_dict.get('hide_input_in_errors'), - coerce_numbers_to_str=self.config_dict.get('coerce_numbers_to_str'), - regex_engine=self.config_dict.get('regex_engine'), - validation_error_cause=self.config_dict.get('validation_error_cause'), - ) - ) - return core_config - - def __repr__(self): - c = ', '.join(f'{k}={v!r}' for k, v in self.config_dict.items()) - return f'ConfigWrapper({c})' - - -class ConfigWrapperStack: - """A stack of `ConfigWrapper` instances.""" - - def __init__(self, config_wrapper: ConfigWrapper): - self._config_wrapper_stack: list[ConfigWrapper] = [config_wrapper] - - @property - def tail(self) -> ConfigWrapper: - return self._config_wrapper_stack[-1] - - @contextmanager - def push(self, config_wrapper: ConfigWrapper | ConfigDict | None): - if config_wrapper is None: - yield - return - - if not isinstance(config_wrapper, ConfigWrapper): - config_wrapper = ConfigWrapper(config_wrapper, check=False) - - self._config_wrapper_stack.append(config_wrapper) - try: - yield - finally: - self._config_wrapper_stack.pop() - - -config_defaults = ConfigDict( - title=None, - str_to_lower=False, - str_to_upper=False, - str_strip_whitespace=False, - str_min_length=0, - str_max_length=None, - # let the model / dataclass decide how to handle it - extra=None, - frozen=False, - populate_by_name=False, - use_enum_values=False, - validate_assignment=False, - arbitrary_types_allowed=False, - from_attributes=False, - loc_by_alias=True, - alias_generator=None, - ignored_types=(), - allow_inf_nan=True, - json_schema_extra=None, - strict=False, - revalidate_instances='never', - ser_json_timedelta='iso8601', - ser_json_bytes='utf8', - ser_json_inf_nan='null', - validate_default=False, - validate_return=False, - protected_namespaces=('model_',), - hide_input_in_errors=False, - json_encoders=None, - defer_build=False, - plugin_settings=None, - schema_generator=None, - json_schema_serialization_defaults_required=False, - json_schema_mode_override=None, - coerce_numbers_to_str=False, - regex_engine='rust-regex', - validation_error_cause=False, -) - - -def prepare_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict: - """Create a `ConfigDict` instance from an existing dict, a class (e.g. old class-based config) or None. - - Args: - config: The input config. - - Returns: - A ConfigDict object created from config. - """ - if config is None: - return ConfigDict() - - if not isinstance(config, dict): - warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning) - config = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')} - - config_dict = cast(ConfigDict, config) - check_deprecated(config_dict) - return config_dict - - -config_keys = set(ConfigDict.__annotations__.keys()) - - -V2_REMOVED_KEYS = { - 'allow_mutation', - 'error_msg_templates', - 'fields', - 'getter_dict', - 'smart_union', - 'underscore_attrs_are_private', - 'json_loads', - 'json_dumps', - 'copy_on_model_validation', - 'post_init_call', -} -V2_RENAMED_KEYS = { - 'allow_population_by_field_name': 'populate_by_name', - 'anystr_lower': 'str_to_lower', - 'anystr_strip_whitespace': 'str_strip_whitespace', - 'anystr_upper': 'str_to_upper', - 'keep_untouched': 'ignored_types', - 'max_anystr_length': 'str_max_length', - 'min_anystr_length': 'str_min_length', - 'orm_mode': 'from_attributes', - 'schema_extra': 'json_schema_extra', - 'validate_all': 'validate_default', -} - - -def check_deprecated(config_dict: ConfigDict) -> None: - """Check for deprecated config keys and warn the user. - - Args: - config_dict: The input config. - """ - deprecated_removed_keys = V2_REMOVED_KEYS & config_dict.keys() - deprecated_renamed_keys = V2_RENAMED_KEYS.keys() & config_dict.keys() - if deprecated_removed_keys or deprecated_renamed_keys: - renamings = {k: V2_RENAMED_KEYS[k] for k in sorted(deprecated_renamed_keys)} - renamed_bullets = [f'* {k!r} has been renamed to {v!r}' for k, v in renamings.items()] - removed_bullets = [f'* {k!r} has been removed' for k in sorted(deprecated_removed_keys)] - message = '\n'.join(['Valid config keys have changed in V2:'] + renamed_bullets + removed_bullets) - warnings.warn(message, UserWarning) diff --git a/lib/pydantic/_internal/_core_metadata.py b/lib/pydantic/_internal/_core_metadata.py deleted file mode 100644 index 296d49f5..00000000 --- a/lib/pydantic/_internal/_core_metadata.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import annotations as _annotations - -import typing -from typing import Any - -import typing_extensions - -if typing.TYPE_CHECKING: - from ._schema_generation_shared import ( - CoreSchemaOrField as CoreSchemaOrField, - ) - from ._schema_generation_shared import ( - GetJsonSchemaFunction, - ) - - -class CoreMetadata(typing_extensions.TypedDict, total=False): - """A `TypedDict` for holding the metadata dict of the schema. - - Attributes: - pydantic_js_functions: List of JSON schema functions. - pydantic_js_prefer_positional_arguments: Whether JSON schema generator will - prefer positional over keyword arguments for an 'arguments' schema. - """ - - pydantic_js_functions: list[GetJsonSchemaFunction] - pydantic_js_annotation_functions: list[GetJsonSchemaFunction] - - # If `pydantic_js_prefer_positional_arguments` is True, the JSON schema generator will - # prefer positional over keyword arguments for an 'arguments' schema. - pydantic_js_prefer_positional_arguments: bool | None - - pydantic_typed_dict_cls: type[Any] | None # TODO: Consider moving this into the pydantic-core TypedDictSchema - - -class CoreMetadataHandler: - """Because the metadata field in pydantic_core is of type `Any`, we can't assume much about its contents. - - This class is used to interact with the metadata field on a CoreSchema object in a consistent - way throughout pydantic. - """ - - __slots__ = ('_schema',) - - def __init__(self, schema: CoreSchemaOrField): - self._schema = schema - - metadata = schema.get('metadata') - if metadata is None: - schema['metadata'] = CoreMetadata() - elif not isinstance(metadata, dict): - raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.') - - @property - def metadata(self) -> CoreMetadata: - """Retrieves the metadata dict from the schema, initializing it to a dict if it is None - and raises an error if it is not a dict. - """ - metadata = self._schema.get('metadata') - if metadata is None: - self._schema['metadata'] = metadata = CoreMetadata() - if not isinstance(metadata, dict): - raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.') - return metadata - - -def build_metadata_dict( - *, # force keyword arguments to make it easier to modify this signature in a backwards-compatible way - js_functions: list[GetJsonSchemaFunction] | None = None, - js_annotation_functions: list[GetJsonSchemaFunction] | None = None, - js_prefer_positional_arguments: bool | None = None, - typed_dict_cls: type[Any] | None = None, - initial_metadata: Any | None = None, -) -> Any: - """Builds a dict to use as the metadata field of a CoreSchema object in a manner that is consistent - with the CoreMetadataHandler class. - """ - if initial_metadata is not None and not isinstance(initial_metadata, dict): - raise TypeError(f'CoreSchema metadata should be a dict; got {initial_metadata!r}.') - - metadata = CoreMetadata( - pydantic_js_functions=js_functions or [], - pydantic_js_annotation_functions=js_annotation_functions or [], - pydantic_js_prefer_positional_arguments=js_prefer_positional_arguments, - pydantic_typed_dict_cls=typed_dict_cls, - ) - metadata = {k: v for k, v in metadata.items() if v is not None} - - if initial_metadata is not None: - metadata = {**initial_metadata, **metadata} - - return metadata diff --git a/lib/pydantic/_internal/_core_utils.py b/lib/pydantic/_internal/_core_utils.py deleted file mode 100644 index e74c74ac..00000000 --- a/lib/pydantic/_internal/_core_utils.py +++ /dev/null @@ -1,570 +0,0 @@ -from __future__ import annotations - -import os -from collections import defaultdict -from typing import ( - Any, - Callable, - Hashable, - TypeVar, - Union, -) - -from pydantic_core import CoreSchema, core_schema -from pydantic_core import validate_core_schema as _validate_core_schema -from typing_extensions import TypeAliasType, TypeGuard, get_args, get_origin - -from . import _repr -from ._typing_extra import is_generic_alias - -AnyFunctionSchema = Union[ - core_schema.AfterValidatorFunctionSchema, - core_schema.BeforeValidatorFunctionSchema, - core_schema.WrapValidatorFunctionSchema, - core_schema.PlainValidatorFunctionSchema, -] - - -FunctionSchemaWithInnerSchema = Union[ - core_schema.AfterValidatorFunctionSchema, - core_schema.BeforeValidatorFunctionSchema, - core_schema.WrapValidatorFunctionSchema, -] - -CoreSchemaField = Union[ - core_schema.ModelField, core_schema.DataclassField, core_schema.TypedDictField, core_schema.ComputedField -] -CoreSchemaOrField = Union[core_schema.CoreSchema, CoreSchemaField] - -_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field', 'model-field', 'computed-field'} -_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after', 'function-wrap'} -_LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES = {'list', 'set', 'frozenset'} - -_DEFINITIONS_CACHE_METADATA_KEY = 'pydantic.definitions_cache' - -TAGGED_UNION_TAG_KEY = 'pydantic.internal.tagged_union_tag' -""" -Used in a `Tag` schema to specify the tag used for a discriminated union. -""" -HAS_INVALID_SCHEMAS_METADATA_KEY = 'pydantic.internal.invalid' -"""Used to mark a schema that is invalid because it refers to a definition that was not yet defined when the -schema was first encountered. -""" - - -def is_core_schema( - schema: CoreSchemaOrField, -) -> TypeGuard[CoreSchema]: - return schema['type'] not in _CORE_SCHEMA_FIELD_TYPES - - -def is_core_schema_field( - schema: CoreSchemaOrField, -) -> TypeGuard[CoreSchemaField]: - return schema['type'] in _CORE_SCHEMA_FIELD_TYPES - - -def is_function_with_inner_schema( - schema: CoreSchemaOrField, -) -> TypeGuard[FunctionSchemaWithInnerSchema]: - return schema['type'] in _FUNCTION_WITH_INNER_SCHEMA_TYPES - - -def is_list_like_schema_with_items_schema( - schema: CoreSchema, -) -> TypeGuard[core_schema.ListSchema | core_schema.SetSchema | core_schema.FrozenSetSchema]: - return schema['type'] in _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES - - -def get_type_ref(type_: type[Any], args_override: tuple[type[Any], ...] | None = None) -> str: - """Produces the ref to be used for this type by pydantic_core's core schemas. - - This `args_override` argument was added for the purpose of creating valid recursive references - when creating generic models without needing to create a concrete class. - """ - origin = get_origin(type_) or type_ - - args = get_args(type_) if is_generic_alias(type_) else (args_override or ()) - generic_metadata = getattr(type_, '__pydantic_generic_metadata__', None) - if generic_metadata: - origin = generic_metadata['origin'] or origin - args = generic_metadata['args'] or args - - module_name = getattr(origin, '__module__', '') - if isinstance(origin, TypeAliasType): - type_ref = f'{module_name}.{origin.__name__}:{id(origin)}' - else: - try: - qualname = getattr(origin, '__qualname__', f'') - except Exception: - qualname = getattr(origin, '__qualname__', '') - type_ref = f'{module_name}.{qualname}:{id(origin)}' - - arg_refs: list[str] = [] - for arg in args: - if isinstance(arg, str): - # Handle string literals as a special case; we may be able to remove this special handling if we - # wrap them in a ForwardRef at some point. - arg_ref = f'{arg}:str-{id(arg)}' - else: - arg_ref = f'{_repr.display_as_type(arg)}:{id(arg)}' - arg_refs.append(arg_ref) - if arg_refs: - type_ref = f'{type_ref}[{",".join(arg_refs)}]' - return type_ref - - -def get_ref(s: core_schema.CoreSchema) -> None | str: - """Get the ref from the schema if it has one. - This exists just for type checking to work correctly. - """ - return s.get('ref', None) - - -def collect_definitions(schema: core_schema.CoreSchema) -> dict[str, core_schema.CoreSchema]: - defs: dict[str, CoreSchema] = {} - - def _record_valid_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema: - ref = get_ref(s) - if ref: - defs[ref] = s - return recurse(s, _record_valid_refs) - - walk_core_schema(schema, _record_valid_refs) - - return defs - - -def define_expected_missing_refs( - schema: core_schema.CoreSchema, allowed_missing_refs: set[str] -) -> core_schema.CoreSchema | None: - if not allowed_missing_refs: - # in this case, there are no missing refs to potentially substitute, so there's no need to walk the schema - # this is a common case (will be hit for all non-generic models), so it's worth optimizing for - return None - - refs = collect_definitions(schema).keys() - - expected_missing_refs = allowed_missing_refs.difference(refs) - if expected_missing_refs: - definitions: list[core_schema.CoreSchema] = [ - # TODO: Replace this with a (new) CoreSchema that, if present at any level, makes validation fail - # Issue: https://github.com/pydantic/pydantic-core/issues/619 - core_schema.none_schema(ref=ref, metadata={HAS_INVALID_SCHEMAS_METADATA_KEY: True}) - for ref in expected_missing_refs - ] - return core_schema.definitions_schema(schema, definitions) - return None - - -def collect_invalid_schemas(schema: core_schema.CoreSchema) -> bool: - invalid = False - - def _is_schema_valid(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema: - nonlocal invalid - if 'metadata' in s: - metadata = s['metadata'] - if HAS_INVALID_SCHEMAS_METADATA_KEY in metadata: - invalid = metadata[HAS_INVALID_SCHEMAS_METADATA_KEY] - return s - return recurse(s, _is_schema_valid) - - walk_core_schema(schema, _is_schema_valid) - return invalid - - -T = TypeVar('T') - - -Recurse = Callable[[core_schema.CoreSchema, 'Walk'], core_schema.CoreSchema] -Walk = Callable[[core_schema.CoreSchema, Recurse], core_schema.CoreSchema] - -# TODO: Should we move _WalkCoreSchema into pydantic_core proper? -# Issue: https://github.com/pydantic/pydantic-core/issues/615 - - -class _WalkCoreSchema: - def __init__(self): - self._schema_type_to_method = self._build_schema_type_to_method() - - def _build_schema_type_to_method(self) -> dict[core_schema.CoreSchemaType, Recurse]: - mapping: dict[core_schema.CoreSchemaType, Recurse] = {} - key: core_schema.CoreSchemaType - for key in get_args(core_schema.CoreSchemaType): - method_name = f"handle_{key.replace('-', '_')}_schema" - mapping[key] = getattr(self, method_name, self._handle_other_schemas) - return mapping - - def walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema: - return f(schema, self._walk) - - def _walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema: - schema = self._schema_type_to_method[schema['type']](schema.copy(), f) - ser_schema: core_schema.SerSchema | None = schema.get('serialization') # type: ignore - if ser_schema: - schema['serialization'] = self._handle_ser_schemas(ser_schema, f) - return schema - - def _handle_other_schemas(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema: - sub_schema = schema.get('schema', None) - if sub_schema is not None: - schema['schema'] = self.walk(sub_schema, f) # type: ignore - return schema - - def _handle_ser_schemas(self, ser_schema: core_schema.SerSchema, f: Walk) -> core_schema.SerSchema: - schema: core_schema.CoreSchema | None = ser_schema.get('schema', None) - if schema is not None: - ser_schema['schema'] = self.walk(schema, f) # type: ignore - return_schema: core_schema.CoreSchema | None = ser_schema.get('return_schema', None) - if return_schema is not None: - ser_schema['return_schema'] = self.walk(return_schema, f) # type: ignore - return ser_schema - - def handle_definitions_schema(self, schema: core_schema.DefinitionsSchema, f: Walk) -> core_schema.CoreSchema: - new_definitions: list[core_schema.CoreSchema] = [] - for definition in schema['definitions']: - if 'schema_ref' in definition and 'ref' in definition: - # This indicates a purposely indirect reference - # We want to keep such references around for implications related to JSON schema, etc.: - new_definitions.append(definition) - # However, we still need to walk the referenced definition: - self.walk(definition, f) - continue - - updated_definition = self.walk(definition, f) - if 'ref' in updated_definition: - # If the updated definition schema doesn't have a 'ref', it shouldn't go in the definitions - # This is most likely to happen due to replacing something with a definition reference, in - # which case it should certainly not go in the definitions list - new_definitions.append(updated_definition) - new_inner_schema = self.walk(schema['schema'], f) - - if not new_definitions and len(schema) == 3: - # This means we'd be returning a "trivial" definitions schema that just wrapped the inner schema - return new_inner_schema - - new_schema = schema.copy() - new_schema['schema'] = new_inner_schema - new_schema['definitions'] = new_definitions - return new_schema - - def handle_list_schema(self, schema: core_schema.ListSchema, f: Walk) -> core_schema.CoreSchema: - items_schema = schema.get('items_schema') - if items_schema is not None: - schema['items_schema'] = self.walk(items_schema, f) - return schema - - def handle_set_schema(self, schema: core_schema.SetSchema, f: Walk) -> core_schema.CoreSchema: - items_schema = schema.get('items_schema') - if items_schema is not None: - schema['items_schema'] = self.walk(items_schema, f) - return schema - - def handle_frozenset_schema(self, schema: core_schema.FrozenSetSchema, f: Walk) -> core_schema.CoreSchema: - items_schema = schema.get('items_schema') - if items_schema is not None: - schema['items_schema'] = self.walk(items_schema, f) - return schema - - def handle_generator_schema(self, schema: core_schema.GeneratorSchema, f: Walk) -> core_schema.CoreSchema: - items_schema = schema.get('items_schema') - if items_schema is not None: - schema['items_schema'] = self.walk(items_schema, f) - return schema - - def handle_tuple_schema(self, schema: core_schema.TupleSchema, f: Walk) -> core_schema.CoreSchema: - schema['items_schema'] = [self.walk(v, f) for v in schema['items_schema']] - return schema - - def handle_dict_schema(self, schema: core_schema.DictSchema, f: Walk) -> core_schema.CoreSchema: - keys_schema = schema.get('keys_schema') - if keys_schema is not None: - schema['keys_schema'] = self.walk(keys_schema, f) - values_schema = schema.get('values_schema') - if values_schema: - schema['values_schema'] = self.walk(values_schema, f) - return schema - - def handle_function_schema(self, schema: AnyFunctionSchema, f: Walk) -> core_schema.CoreSchema: - if not is_function_with_inner_schema(schema): - return schema - schema['schema'] = self.walk(schema['schema'], f) - return schema - - def handle_union_schema(self, schema: core_schema.UnionSchema, f: Walk) -> core_schema.CoreSchema: - new_choices: list[CoreSchema | tuple[CoreSchema, str]] = [] - for v in schema['choices']: - if isinstance(v, tuple): - new_choices.append((self.walk(v[0], f), v[1])) - else: - new_choices.append(self.walk(v, f)) - schema['choices'] = new_choices - return schema - - def handle_tagged_union_schema(self, schema: core_schema.TaggedUnionSchema, f: Walk) -> core_schema.CoreSchema: - new_choices: dict[Hashable, core_schema.CoreSchema] = {} - for k, v in schema['choices'].items(): - new_choices[k] = v if isinstance(v, (str, int)) else self.walk(v, f) - schema['choices'] = new_choices - return schema - - def handle_chain_schema(self, schema: core_schema.ChainSchema, f: Walk) -> core_schema.CoreSchema: - schema['steps'] = [self.walk(v, f) for v in schema['steps']] - return schema - - def handle_lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema, f: Walk) -> core_schema.CoreSchema: - schema['lax_schema'] = self.walk(schema['lax_schema'], f) - schema['strict_schema'] = self.walk(schema['strict_schema'], f) - return schema - - def handle_json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema, f: Walk) -> core_schema.CoreSchema: - schema['json_schema'] = self.walk(schema['json_schema'], f) - schema['python_schema'] = self.walk(schema['python_schema'], f) - return schema - - def handle_model_fields_schema(self, schema: core_schema.ModelFieldsSchema, f: Walk) -> core_schema.CoreSchema: - extras_schema = schema.get('extras_schema') - if extras_schema is not None: - schema['extras_schema'] = self.walk(extras_schema, f) - replaced_fields: dict[str, core_schema.ModelField] = {} - replaced_computed_fields: list[core_schema.ComputedField] = [] - for computed_field in schema.get('computed_fields', ()): - replaced_field = computed_field.copy() - replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f) - replaced_computed_fields.append(replaced_field) - if replaced_computed_fields: - schema['computed_fields'] = replaced_computed_fields - for k, v in schema['fields'].items(): - replaced_field = v.copy() - replaced_field['schema'] = self.walk(v['schema'], f) - replaced_fields[k] = replaced_field - schema['fields'] = replaced_fields - return schema - - def handle_typed_dict_schema(self, schema: core_schema.TypedDictSchema, f: Walk) -> core_schema.CoreSchema: - extras_schema = schema.get('extras_schema') - if extras_schema is not None: - schema['extras_schema'] = self.walk(extras_schema, f) - replaced_computed_fields: list[core_schema.ComputedField] = [] - for computed_field in schema.get('computed_fields', ()): - replaced_field = computed_field.copy() - replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f) - replaced_computed_fields.append(replaced_field) - if replaced_computed_fields: - schema['computed_fields'] = replaced_computed_fields - replaced_fields: dict[str, core_schema.TypedDictField] = {} - for k, v in schema['fields'].items(): - replaced_field = v.copy() - replaced_field['schema'] = self.walk(v['schema'], f) - replaced_fields[k] = replaced_field - schema['fields'] = replaced_fields - return schema - - def handle_dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema, f: Walk) -> core_schema.CoreSchema: - replaced_fields: list[core_schema.DataclassField] = [] - replaced_computed_fields: list[core_schema.ComputedField] = [] - for computed_field in schema.get('computed_fields', ()): - replaced_field = computed_field.copy() - replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f) - replaced_computed_fields.append(replaced_field) - if replaced_computed_fields: - schema['computed_fields'] = replaced_computed_fields - for field in schema['fields']: - replaced_field = field.copy() - replaced_field['schema'] = self.walk(field['schema'], f) - replaced_fields.append(replaced_field) - schema['fields'] = replaced_fields - return schema - - def handle_arguments_schema(self, schema: core_schema.ArgumentsSchema, f: Walk) -> core_schema.CoreSchema: - replaced_arguments_schema: list[core_schema.ArgumentsParameter] = [] - for param in schema['arguments_schema']: - replaced_param = param.copy() - replaced_param['schema'] = self.walk(param['schema'], f) - replaced_arguments_schema.append(replaced_param) - schema['arguments_schema'] = replaced_arguments_schema - if 'var_args_schema' in schema: - schema['var_args_schema'] = self.walk(schema['var_args_schema'], f) - if 'var_kwargs_schema' in schema: - schema['var_kwargs_schema'] = self.walk(schema['var_kwargs_schema'], f) - return schema - - def handle_call_schema(self, schema: core_schema.CallSchema, f: Walk) -> core_schema.CoreSchema: - schema['arguments_schema'] = self.walk(schema['arguments_schema'], f) - if 'return_schema' in schema: - schema['return_schema'] = self.walk(schema['return_schema'], f) - return schema - - -_dispatch = _WalkCoreSchema().walk - - -def walk_core_schema(schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema: - """Recursively traverse a CoreSchema. - - Args: - schema (core_schema.CoreSchema): The CoreSchema to process, it will not be modified. - f (Walk): A function to apply. This function takes two arguments: - 1. The current CoreSchema that is being processed - (not the same one you passed into this function, one level down). - 2. The "next" `f` to call. This lets you for example use `f=functools.partial(some_method, some_context)` - to pass data down the recursive calls without using globals or other mutable state. - - Returns: - core_schema.CoreSchema: A processed CoreSchema. - """ - return f(schema.copy(), _dispatch) - - -def simplify_schema_references(schema: core_schema.CoreSchema) -> core_schema.CoreSchema: # noqa: C901 - definitions: dict[str, core_schema.CoreSchema] = {} - ref_counts: dict[str, int] = defaultdict(int) - involved_in_recursion: dict[str, bool] = {} - current_recursion_ref_count: dict[str, int] = defaultdict(int) - - def collect_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema: - if s['type'] == 'definitions': - for definition in s['definitions']: - ref = get_ref(definition) - assert ref is not None - if ref not in definitions: - definitions[ref] = definition - recurse(definition, collect_refs) - return recurse(s['schema'], collect_refs) - else: - ref = get_ref(s) - if ref is not None: - new = recurse(s, collect_refs) - new_ref = get_ref(new) - if new_ref: - definitions[new_ref] = new - return core_schema.definition_reference_schema(schema_ref=ref) - else: - return recurse(s, collect_refs) - - schema = walk_core_schema(schema, collect_refs) - - def count_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema: - if s['type'] != 'definition-ref': - return recurse(s, count_refs) - ref = s['schema_ref'] - ref_counts[ref] += 1 - - if ref_counts[ref] >= 2: - # If this model is involved in a recursion this should be detected - # on its second encounter, we can safely stop the walk here. - if current_recursion_ref_count[ref] != 0: - involved_in_recursion[ref] = True - return s - - current_recursion_ref_count[ref] += 1 - recurse(definitions[ref], count_refs) - current_recursion_ref_count[ref] -= 1 - return s - - schema = walk_core_schema(schema, count_refs) - - assert all(c == 0 for c in current_recursion_ref_count.values()), 'this is a bug! please report it' - - def can_be_inlined(s: core_schema.DefinitionReferenceSchema, ref: str) -> bool: - if ref_counts[ref] > 1: - return False - if involved_in_recursion.get(ref, False): - return False - if 'serialization' in s: - return False - if 'metadata' in s: - metadata = s['metadata'] - for k in ( - 'pydantic_js_functions', - 'pydantic_js_annotation_functions', - 'pydantic.internal.union_discriminator', - ): - if k in metadata: - # we need to keep this as a ref - return False - return True - - def inline_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema: - if s['type'] == 'definition-ref': - ref = s['schema_ref'] - # Check if the reference is only used once, not involved in recursion and does not have - # any extra keys (like 'serialization') - if can_be_inlined(s, ref): - # Inline the reference by replacing the reference with the actual schema - new = definitions.pop(ref) - ref_counts[ref] -= 1 # because we just replaced it! - # put all other keys that were on the def-ref schema into the inlined version - # in particular this is needed for `serialization` - if 'serialization' in s: - new['serialization'] = s['serialization'] - s = recurse(new, inline_refs) - return s - else: - return recurse(s, inline_refs) - else: - return recurse(s, inline_refs) - - schema = walk_core_schema(schema, inline_refs) - - def_values = [v for v in definitions.values() if ref_counts[v['ref']] > 0] # type: ignore - - if def_values: - schema = core_schema.definitions_schema(schema=schema, definitions=def_values) - return schema - - -def _strip_metadata(schema: CoreSchema) -> CoreSchema: - def strip_metadata(s: CoreSchema, recurse: Recurse) -> CoreSchema: - s = s.copy() - s.pop('metadata', None) - if s['type'] == 'model-fields': - s = s.copy() - s['fields'] = {k: v.copy() for k, v in s['fields'].items()} - for field_name, field_schema in s['fields'].items(): - field_schema.pop('metadata', None) - s['fields'][field_name] = field_schema - computed_fields = s.get('computed_fields', None) - if computed_fields: - s['computed_fields'] = [cf.copy() for cf in computed_fields] - for cf in computed_fields: - cf.pop('metadata', None) - else: - s.pop('computed_fields', None) - elif s['type'] == 'model': - # remove some defaults - if s.get('custom_init', True) is False: - s.pop('custom_init') - if s.get('root_model', True) is False: - s.pop('root_model') - if {'title'}.issuperset(s.get('config', {}).keys()): - s.pop('config', None) - - return recurse(s, strip_metadata) - - return walk_core_schema(schema, strip_metadata) - - -def pretty_print_core_schema( - schema: CoreSchema, - include_metadata: bool = False, -) -> None: - """Pretty print a CoreSchema using rich. - This is intended for debugging purposes. - - Args: - schema: The CoreSchema to print. - include_metadata: Whether to include metadata in the output. Defaults to `False`. - """ - from rich import print # type: ignore # install it manually in your dev env - - if not include_metadata: - schema = _strip_metadata(schema) - - return print(schema) - - -def validate_core_schema(schema: CoreSchema) -> CoreSchema: - if 'PYDANTIC_SKIP_VALIDATING_CORE_SCHEMAS' in os.environ: - return schema - return _validate_core_schema(schema) diff --git a/lib/pydantic/_internal/_dataclasses.py b/lib/pydantic/_internal/_dataclasses.py deleted file mode 100644 index 1ec23044..00000000 --- a/lib/pydantic/_internal/_dataclasses.py +++ /dev/null @@ -1,225 +0,0 @@ -"""Private logic for creating pydantic dataclasses.""" -from __future__ import annotations as _annotations - -import dataclasses -import typing -import warnings -from functools import partial, wraps -from typing import Any, Callable, ClassVar - -from pydantic_core import ( - ArgsKwargs, - SchemaSerializer, - SchemaValidator, - core_schema, -) -from typing_extensions import TypeGuard - -from ..errors import PydanticUndefinedAnnotation -from ..fields import FieldInfo -from ..plugin._schema_validator import create_schema_validator -from ..warnings import PydanticDeprecatedSince20 -from . import _config, _decorators, _typing_extra -from ._fields import collect_dataclass_fields -from ._generate_schema import GenerateSchema -from ._generics import get_standard_typevars_map -from ._mock_val_ser import set_dataclass_mocks -from ._schema_generation_shared import CallbackGetCoreSchemaHandler -from ._signature import generate_pydantic_signature - -if typing.TYPE_CHECKING: - from ..config import ConfigDict - - class StandardDataclass(typing.Protocol): - __dataclass_fields__: ClassVar[dict[str, Any]] - __dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams` - __post_init__: ClassVar[Callable[..., None]] - - def __init__(self, *args: object, **kwargs: object) -> None: - pass - - class PydanticDataclass(StandardDataclass, typing.Protocol): - """A protocol containing attributes only available once a class has been decorated as a Pydantic dataclass. - - Attributes: - __pydantic_config__: Pydantic-specific configuration settings for the dataclass. - __pydantic_complete__: Whether dataclass building is completed, or if there are still undefined fields. - __pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer. - __pydantic_decorators__: Metadata containing the decorators defined on the dataclass. - __pydantic_fields__: Metadata about the fields defined on the dataclass. - __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the dataclass. - __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the dataclass. - """ - - __pydantic_config__: ClassVar[ConfigDict] - __pydantic_complete__: ClassVar[bool] - __pydantic_core_schema__: ClassVar[core_schema.CoreSchema] - __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] - __pydantic_fields__: ClassVar[dict[str, FieldInfo]] - __pydantic_serializer__: ClassVar[SchemaSerializer] - __pydantic_validator__: ClassVar[SchemaValidator] - -else: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - - -def set_dataclass_fields(cls: type[StandardDataclass], types_namespace: dict[str, Any] | None = None) -> None: - """Collect and set `cls.__pydantic_fields__`. - - Args: - cls: The class. - types_namespace: The types namespace, defaults to `None`. - """ - typevars_map = get_standard_typevars_map(cls) - fields = collect_dataclass_fields(cls, types_namespace, typevars_map=typevars_map) - - cls.__pydantic_fields__ = fields # type: ignore - - -def complete_dataclass( - cls: type[Any], - config_wrapper: _config.ConfigWrapper, - *, - raise_errors: bool = True, - types_namespace: dict[str, Any] | None, -) -> bool: - """Finish building a pydantic dataclass. - - This logic is called on a class which has already been wrapped in `dataclasses.dataclass()`. - - This is somewhat analogous to `pydantic._internal._model_construction.complete_model_class`. - - Args: - cls: The class. - config_wrapper: The config wrapper instance. - raise_errors: Whether to raise errors, defaults to `True`. - types_namespace: The types namespace. - - Returns: - `True` if building a pydantic dataclass is successfully completed, `False` otherwise. - - Raises: - PydanticUndefinedAnnotation: If `raise_error` is `True` and there is an undefined annotations. - """ - if hasattr(cls, '__post_init_post_parse__'): - warnings.warn( - 'Support for `__post_init_post_parse__` has been dropped, the method will not be called', DeprecationWarning - ) - - if types_namespace is None: - types_namespace = _typing_extra.get_cls_types_namespace(cls) - - set_dataclass_fields(cls, types_namespace) - - typevars_map = get_standard_typevars_map(cls) - gen_schema = GenerateSchema( - config_wrapper, - types_namespace, - typevars_map, - ) - - # This needs to be called before we change the __init__ - sig = generate_pydantic_signature( - init=cls.__init__, - fields=cls.__pydantic_fields__, # type: ignore - config_wrapper=config_wrapper, - is_dataclass=True, - ) - - # dataclass.__init__ must be defined here so its `__qualname__` can be changed since functions can't be copied. - def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None: - __tracebackhide__ = True - s = __dataclass_self__ - s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s) - - __init__.__qualname__ = f'{cls.__qualname__}.__init__' - - cls.__init__ = __init__ # type: ignore - cls.__pydantic_config__ = config_wrapper.config_dict # type: ignore - cls.__signature__ = sig # type: ignore - get_core_schema = getattr(cls, '__get_pydantic_core_schema__', None) - try: - if get_core_schema: - schema = get_core_schema( - cls, - CallbackGetCoreSchemaHandler( - partial(gen_schema.generate_schema, from_dunder_get_core_schema=False), - gen_schema, - ref_mode='unpack', - ), - ) - else: - schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False) - except PydanticUndefinedAnnotation as e: - if raise_errors: - raise - set_dataclass_mocks(cls, cls.__name__, f'`{e.name}`') - return False - - core_config = config_wrapper.core_config(cls) - - try: - schema = gen_schema.clean_schema(schema) - except gen_schema.CollectedInvalid: - set_dataclass_mocks(cls, cls.__name__, 'all referenced types') - return False - - # We are about to set all the remaining required properties expected for this cast; - # __pydantic_decorators__ and __pydantic_fields__ should already be set - cls = typing.cast('type[PydanticDataclass]', cls) - # debug(schema) - - cls.__pydantic_core_schema__ = schema - cls.__pydantic_validator__ = validator = create_schema_validator( - schema, cls, cls.__module__, cls.__qualname__, 'dataclass', core_config, config_wrapper.plugin_settings - ) - cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config) - - if config_wrapper.validate_assignment: - - @wraps(cls.__setattr__) - def validated_setattr(instance: Any, __field: str, __value: str) -> None: - validator.validate_assignment(instance, __field, __value) - - cls.__setattr__ = validated_setattr.__get__(None, cls) # type: ignore - - return True - - -def is_builtin_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]: - """Returns True if a class is a stdlib dataclass and *not* a pydantic dataclass. - - We check that - - `_cls` is a dataclass - - `_cls` does not inherit from a processed pydantic dataclass (and thus have a `__pydantic_validator__`) - - `_cls` does not have any annotations that are not dataclass fields - e.g. - ```py - import dataclasses - - import pydantic.dataclasses - - @dataclasses.dataclass - class A: - x: int - - @pydantic.dataclasses.dataclass - class B(A): - y: int - ``` - In this case, when we first check `B`, we make an extra check and look at the annotations ('y'), - which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x') - - Args: - cls: The class. - - Returns: - `True` if the class is a stdlib dataclass, `False` otherwise. - """ - return ( - dataclasses.is_dataclass(_cls) - and not hasattr(_cls, '__pydantic_validator__') - and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {}))) - ) diff --git a/lib/pydantic/_internal/_decorators.py b/lib/pydantic/_internal/_decorators.py deleted file mode 100644 index 5672464c..00000000 --- a/lib/pydantic/_internal/_decorators.py +++ /dev/null @@ -1,791 +0,0 @@ -"""Logic related to validators applied to models etc. via the `@field_validator` and `@model_validator` decorators.""" -from __future__ import annotations as _annotations - -from collections import deque -from dataclasses import dataclass, field -from functools import cached_property, partial, partialmethod -from inspect import Parameter, Signature, isdatadescriptor, ismethoddescriptor, signature -from itertools import islice -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Iterable, TypeVar, Union - -from pydantic_core import PydanticUndefined, core_schema -from typing_extensions import Literal, TypeAlias, is_typeddict - -from ..errors import PydanticUserError -from ._core_utils import get_type_ref -from ._internal_dataclass import slots_true -from ._typing_extra import get_function_type_hints - -if TYPE_CHECKING: - from ..fields import ComputedFieldInfo - from ..functional_validators import FieldValidatorModes - - -@dataclass(**slots_true) -class ValidatorDecoratorInfo: - """A container for data from `@validator` so that we can access it - while building the pydantic-core schema. - - Attributes: - decorator_repr: A class variable representing the decorator string, '@validator'. - fields: A tuple of field names the validator should be called on. - mode: The proposed validator mode. - each_item: For complex objects (sets, lists etc.) whether to validate individual - elements rather than the whole object. - always: Whether this method and other validators should be called even if the value is missing. - check_fields: Whether to check that the fields actually exist on the model. - """ - - decorator_repr: ClassVar[str] = '@validator' - - fields: tuple[str, ...] - mode: Literal['before', 'after'] - each_item: bool - always: bool - check_fields: bool | None - - -@dataclass(**slots_true) -class FieldValidatorDecoratorInfo: - """A container for data from `@field_validator` so that we can access it - while building the pydantic-core schema. - - Attributes: - decorator_repr: A class variable representing the decorator string, '@field_validator'. - fields: A tuple of field names the validator should be called on. - mode: The proposed validator mode. - check_fields: Whether to check that the fields actually exist on the model. - """ - - decorator_repr: ClassVar[str] = '@field_validator' - - fields: tuple[str, ...] - mode: FieldValidatorModes - check_fields: bool | None - - -@dataclass(**slots_true) -class RootValidatorDecoratorInfo: - """A container for data from `@root_validator` so that we can access it - while building the pydantic-core schema. - - Attributes: - decorator_repr: A class variable representing the decorator string, '@root_validator'. - mode: The proposed validator mode. - """ - - decorator_repr: ClassVar[str] = '@root_validator' - mode: Literal['before', 'after'] - - -@dataclass(**slots_true) -class FieldSerializerDecoratorInfo: - """A container for data from `@field_serializer` so that we can access it - while building the pydantic-core schema. - - Attributes: - decorator_repr: A class variable representing the decorator string, '@field_serializer'. - fields: A tuple of field names the serializer should be called on. - mode: The proposed serializer mode. - return_type: The type of the serializer's return value. - when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`, - and `'json-unless-none'`. - check_fields: Whether to check that the fields actually exist on the model. - """ - - decorator_repr: ClassVar[str] = '@field_serializer' - fields: tuple[str, ...] - mode: Literal['plain', 'wrap'] - return_type: Any - when_used: core_schema.WhenUsed - check_fields: bool | None - - -@dataclass(**slots_true) -class ModelSerializerDecoratorInfo: - """A container for data from `@model_serializer` so that we can access it - while building the pydantic-core schema. - - Attributes: - decorator_repr: A class variable representing the decorator string, '@model_serializer'. - mode: The proposed serializer mode. - return_type: The type of the serializer's return value. - when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`, - and `'json-unless-none'`. - """ - - decorator_repr: ClassVar[str] = '@model_serializer' - mode: Literal['plain', 'wrap'] - return_type: Any - when_used: core_schema.WhenUsed - - -@dataclass(**slots_true) -class ModelValidatorDecoratorInfo: - """A container for data from `@model_validator` so that we can access it - while building the pydantic-core schema. - - Attributes: - decorator_repr: A class variable representing the decorator string, '@model_serializer'. - mode: The proposed serializer mode. - """ - - decorator_repr: ClassVar[str] = '@model_validator' - mode: Literal['wrap', 'before', 'after'] - - -DecoratorInfo: TypeAlias = """Union[ - ValidatorDecoratorInfo, - FieldValidatorDecoratorInfo, - RootValidatorDecoratorInfo, - FieldSerializerDecoratorInfo, - ModelSerializerDecoratorInfo, - ModelValidatorDecoratorInfo, - ComputedFieldInfo, -]""" - -ReturnType = TypeVar('ReturnType') -DecoratedType: TypeAlias = ( - 'Union[classmethod[Any, Any, ReturnType], staticmethod[Any, ReturnType], Callable[..., ReturnType], property]' -) - - -@dataclass # can't use slots here since we set attributes on `__post_init__` -class PydanticDescriptorProxy(Generic[ReturnType]): - """Wrap a classmethod, staticmethod, property or unbound function - and act as a descriptor that allows us to detect decorated items - from the class' attributes. - - This class' __get__ returns the wrapped item's __get__ result, - which makes it transparent for classmethods and staticmethods. - - Attributes: - wrapped: The decorator that has to be wrapped. - decorator_info: The decorator info. - shim: A wrapper function to wrap V1 style function. - """ - - wrapped: DecoratedType[ReturnType] - decorator_info: DecoratorInfo - shim: Callable[[Callable[..., Any]], Callable[..., Any]] | None = None - - def __post_init__(self): - for attr in 'setter', 'deleter': - if hasattr(self.wrapped, attr): - f = partial(self._call_wrapped_attr, name=attr) - setattr(self, attr, f) - - def _call_wrapped_attr(self, func: Callable[[Any], None], *, name: str) -> PydanticDescriptorProxy[ReturnType]: - self.wrapped = getattr(self.wrapped, name)(func) - return self - - def __get__(self, obj: object | None, obj_type: type[object] | None = None) -> PydanticDescriptorProxy[ReturnType]: - try: - return self.wrapped.__get__(obj, obj_type) - except AttributeError: - # not a descriptor, e.g. a partial object - return self.wrapped # type: ignore[return-value] - - def __set_name__(self, instance: Any, name: str) -> None: - if hasattr(self.wrapped, '__set_name__'): - self.wrapped.__set_name__(instance, name) # pyright: ignore[reportFunctionMemberAccess] - - def __getattr__(self, __name: str) -> Any: - """Forward checks for __isabstractmethod__ and such.""" - return getattr(self.wrapped, __name) - - -DecoratorInfoType = TypeVar('DecoratorInfoType', bound=DecoratorInfo) - - -@dataclass(**slots_true) -class Decorator(Generic[DecoratorInfoType]): - """A generic container class to join together the decorator metadata - (metadata from decorator itself, which we have when the - decorator is called but not when we are building the core-schema) - and the bound function (which we have after the class itself is created). - - Attributes: - cls_ref: The class ref. - cls_var_name: The decorated function name. - func: The decorated function. - shim: A wrapper function to wrap V1 style function. - info: The decorator info. - """ - - cls_ref: str - cls_var_name: str - func: Callable[..., Any] - shim: Callable[[Any], Any] | None - info: DecoratorInfoType - - @staticmethod - def build( - cls_: Any, - *, - cls_var_name: str, - shim: Callable[[Any], Any] | None, - info: DecoratorInfoType, - ) -> Decorator[DecoratorInfoType]: - """Build a new decorator. - - Args: - cls_: The class. - cls_var_name: The decorated function name. - shim: A wrapper function to wrap V1 style function. - info: The decorator info. - - Returns: - The new decorator instance. - """ - func = get_attribute_from_bases(cls_, cls_var_name) - if shim is not None: - func = shim(func) - func = unwrap_wrapped_function(func, unwrap_partial=False) - if not callable(func): - # This branch will get hit for classmethod properties - attribute = get_attribute_from_base_dicts(cls_, cls_var_name) # prevents the binding call to `__get__` - if isinstance(attribute, PydanticDescriptorProxy): - func = unwrap_wrapped_function(attribute.wrapped) - return Decorator( - cls_ref=get_type_ref(cls_), - cls_var_name=cls_var_name, - func=func, - shim=shim, - info=info, - ) - - def bind_to_cls(self, cls: Any) -> Decorator[DecoratorInfoType]: - """Bind the decorator to a class. - - Args: - cls: the class. - - Returns: - The new decorator instance. - """ - return self.build( - cls, - cls_var_name=self.cls_var_name, - shim=self.shim, - info=self.info, - ) - - -def get_bases(tp: type[Any]) -> tuple[type[Any], ...]: - """Get the base classes of a class or typeddict. - - Args: - tp: The type or class to get the bases. - - Returns: - The base classes. - """ - if is_typeddict(tp): - return tp.__orig_bases__ # type: ignore - try: - return tp.__bases__ - except AttributeError: - return () - - -def mro(tp: type[Any]) -> tuple[type[Any], ...]: - """Calculate the Method Resolution Order of bases using the C3 algorithm. - - See https://www.python.org/download/releases/2.3/mro/ - """ - # try to use the existing mro, for performance mainly - # but also because it helps verify the implementation below - if not is_typeddict(tp): - try: - return tp.__mro__ - except AttributeError: - # GenericAlias and some other cases - pass - - bases = get_bases(tp) - return (tp,) + mro_for_bases(bases) - - -def mro_for_bases(bases: tuple[type[Any], ...]) -> tuple[type[Any], ...]: - def merge_seqs(seqs: list[deque[type[Any]]]) -> Iterable[type[Any]]: - while True: - non_empty = [seq for seq in seqs if seq] - if not non_empty: - # Nothing left to process, we're done. - return - candidate: type[Any] | None = None - for seq in non_empty: # Find merge candidates among seq heads. - candidate = seq[0] - not_head = [s for s in non_empty if candidate in islice(s, 1, None)] - if not_head: - # Reject the candidate. - candidate = None - else: - break - if not candidate: - raise TypeError('Inconsistent hierarchy, no C3 MRO is possible') - yield candidate - for seq in non_empty: - # Remove candidate. - if seq[0] == candidate: - seq.popleft() - - seqs = [deque(mro(base)) for base in bases] + [deque(bases)] - return tuple(merge_seqs(seqs)) - - -_sentinel = object() - - -def get_attribute_from_bases(tp: type[Any] | tuple[type[Any], ...], name: str) -> Any: - """Get the attribute from the next class in the MRO that has it, - aiming to simulate calling the method on the actual class. - - The reason for iterating over the mro instead of just getting - the attribute (which would do that for us) is to support TypedDict, - which lacks a real __mro__, but can have a virtual one constructed - from its bases (as done here). - - Args: - tp: The type or class to search for the attribute. If a tuple, this is treated as a set of base classes. - name: The name of the attribute to retrieve. - - Returns: - Any: The attribute value, if found. - - Raises: - AttributeError: If the attribute is not found in any class in the MRO. - """ - if isinstance(tp, tuple): - for base in mro_for_bases(tp): - attribute = base.__dict__.get(name, _sentinel) - if attribute is not _sentinel: - attribute_get = getattr(attribute, '__get__', None) - if attribute_get is not None: - return attribute_get(None, tp) - return attribute - raise AttributeError(f'{name} not found in {tp}') - else: - try: - return getattr(tp, name) - except AttributeError: - return get_attribute_from_bases(mro(tp), name) - - -def get_attribute_from_base_dicts(tp: type[Any], name: str) -> Any: - """Get an attribute out of the `__dict__` following the MRO. - This prevents the call to `__get__` on the descriptor, and allows - us to get the original function for classmethod properties. - - Args: - tp: The type or class to search for the attribute. - name: The name of the attribute to retrieve. - - Returns: - Any: The attribute value, if found. - - Raises: - KeyError: If the attribute is not found in any class's `__dict__` in the MRO. - """ - for base in reversed(mro(tp)): - if name in base.__dict__: - return base.__dict__[name] - return tp.__dict__[name] # raise the error - - -@dataclass(**slots_true) -class DecoratorInfos: - """Mapping of name in the class namespace to decorator info. - - note that the name in the class namespace is the function or attribute name - not the field name! - """ - - validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(default_factory=dict) - field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]] = field(default_factory=dict) - root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(default_factory=dict) - field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]] = field(default_factory=dict) - model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]] = field(default_factory=dict) - model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]] = field(default_factory=dict) - computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(default_factory=dict) - - @staticmethod - def build(model_dc: type[Any]) -> DecoratorInfos: # noqa: C901 (ignore complexity) - """We want to collect all DecFunc instances that exist as - attributes in the namespace of the class (a BaseModel or dataclass) - that called us - But we want to collect these in the order of the bases - So instead of getting them all from the leaf class (the class that called us), - we traverse the bases from root (the oldest ancestor class) to leaf - and collect all of the instances as we go, taking care to replace - any duplicate ones with the last one we see to mimic how function overriding - works with inheritance. - If we do replace any functions we put the replacement into the position - the replaced function was in; that is, we maintain the order. - """ - # reminder: dicts are ordered and replacement does not alter the order - res = DecoratorInfos() - for base in reversed(mro(model_dc)[1:]): - existing: DecoratorInfos | None = base.__dict__.get('__pydantic_decorators__') - if existing is None: - existing = DecoratorInfos.build(base) - res.validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.validators.items()}) - res.field_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_validators.items()}) - res.root_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.root_validators.items()}) - res.field_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_serializers.items()}) - res.model_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_serializers.items()}) - res.model_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_validators.items()}) - res.computed_fields.update({k: v.bind_to_cls(model_dc) for k, v in existing.computed_fields.items()}) - - to_replace: list[tuple[str, Any]] = [] - - for var_name, var_value in vars(model_dc).items(): - if isinstance(var_value, PydanticDescriptorProxy): - info = var_value.decorator_info - if isinstance(info, ValidatorDecoratorInfo): - res.validators[var_name] = Decorator.build( - model_dc, cls_var_name=var_name, shim=var_value.shim, info=info - ) - elif isinstance(info, FieldValidatorDecoratorInfo): - res.field_validators[var_name] = Decorator.build( - model_dc, cls_var_name=var_name, shim=var_value.shim, info=info - ) - elif isinstance(info, RootValidatorDecoratorInfo): - res.root_validators[var_name] = Decorator.build( - model_dc, cls_var_name=var_name, shim=var_value.shim, info=info - ) - elif isinstance(info, FieldSerializerDecoratorInfo): - # check whether a serializer function is already registered for fields - for field_serializer_decorator in res.field_serializers.values(): - # check that each field has at most one serializer function. - # serializer functions for the same field in subclasses are allowed, - # and are treated as overrides - if field_serializer_decorator.cls_var_name == var_name: - continue - for f in info.fields: - if f in field_serializer_decorator.info.fields: - raise PydanticUserError( - 'Multiple field serializer functions were defined ' - f'for field {f!r}, this is not allowed.', - code='multiple-field-serializers', - ) - res.field_serializers[var_name] = Decorator.build( - model_dc, cls_var_name=var_name, shim=var_value.shim, info=info - ) - elif isinstance(info, ModelValidatorDecoratorInfo): - res.model_validators[var_name] = Decorator.build( - model_dc, cls_var_name=var_name, shim=var_value.shim, info=info - ) - elif isinstance(info, ModelSerializerDecoratorInfo): - res.model_serializers[var_name] = Decorator.build( - model_dc, cls_var_name=var_name, shim=var_value.shim, info=info - ) - else: - from ..fields import ComputedFieldInfo - - isinstance(var_value, ComputedFieldInfo) - res.computed_fields[var_name] = Decorator.build( - model_dc, cls_var_name=var_name, shim=None, info=info - ) - to_replace.append((var_name, var_value.wrapped)) - if to_replace: - # If we can save `__pydantic_decorators__` on the class we'll be able to check for it above - # so then we don't need to re-process the type, which means we can discard our descriptor wrappers - # and replace them with the thing they are wrapping (see the other setattr call below) - # which allows validator class methods to also function as regular class methods - setattr(model_dc, '__pydantic_decorators__', res) - for name, value in to_replace: - setattr(model_dc, name, value) - return res - - -def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes) -> bool: - """Look at a field or model validator function and determine whether it takes an info argument. - - An error is raised if the function has an invalid signature. - - Args: - validator: The validator function to inspect. - mode: The proposed validator mode. - - Returns: - Whether the validator takes an info argument. - """ - try: - sig = signature(validator) - except ValueError: - # builtins and some C extensions don't have signatures - # assume that they don't take an info argument and only take a single argument - # e.g. `str.strip` or `datetime.datetime` - return False - n_positional = count_positional_params(sig) - if mode == 'wrap': - if n_positional == 3: - return True - elif n_positional == 2: - return False - else: - assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain" - if n_positional == 2: - return True - elif n_positional == 1: - return False - - raise PydanticUserError( - f'Unrecognized field_validator function signature for {validator} with `mode={mode}`:{sig}', - code='validator-signature', - ) - - -def inspect_field_serializer( - serializer: Callable[..., Any], mode: Literal['plain', 'wrap'], computed_field: bool = False -) -> tuple[bool, bool]: - """Look at a field serializer function and determine if it is a field serializer, - and whether it takes an info argument. - - An error is raised if the function has an invalid signature. - - Args: - serializer: The serializer function to inspect. - mode: The serializer mode, either 'plain' or 'wrap'. - computed_field: When serializer is applied on computed_field. It doesn't require - info signature. - - Returns: - Tuple of (is_field_serializer, info_arg). - """ - sig = signature(serializer) - - first = next(iter(sig.parameters.values()), None) - is_field_serializer = first is not None and first.name == 'self' - - n_positional = count_positional_params(sig) - if is_field_serializer: - # -1 to correct for self parameter - info_arg = _serializer_info_arg(mode, n_positional - 1) - else: - info_arg = _serializer_info_arg(mode, n_positional) - - if info_arg is None: - raise PydanticUserError( - f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}', - code='field-serializer-signature', - ) - if info_arg and computed_field: - raise PydanticUserError( - 'field_serializer on computed_field does not use info signature', code='field-serializer-signature' - ) - - else: - return is_field_serializer, info_arg - - -def inspect_annotated_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool: - """Look at a serializer function used via `Annotated` and determine whether it takes an info argument. - - An error is raised if the function has an invalid signature. - - Args: - serializer: The serializer function to check. - mode: The serializer mode, either 'plain' or 'wrap'. - - Returns: - info_arg - """ - sig = signature(serializer) - info_arg = _serializer_info_arg(mode, count_positional_params(sig)) - if info_arg is None: - raise PydanticUserError( - f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}', - code='field-serializer-signature', - ) - else: - return info_arg - - -def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool: - """Look at a model serializer function and determine whether it takes an info argument. - - An error is raised if the function has an invalid signature. - - Args: - serializer: The serializer function to check. - mode: The serializer mode, either 'plain' or 'wrap'. - - Returns: - `info_arg` - whether the function expects an info argument. - """ - if isinstance(serializer, (staticmethod, classmethod)) or not is_instance_method_from_sig(serializer): - raise PydanticUserError( - '`@model_serializer` must be applied to instance methods', code='model-serializer-instance-method' - ) - - sig = signature(serializer) - info_arg = _serializer_info_arg(mode, count_positional_params(sig)) - if info_arg is None: - raise PydanticUserError( - f'Unrecognized model_serializer function signature for {serializer} with `mode={mode}`:{sig}', - code='model-serializer-signature', - ) - else: - return info_arg - - -def _serializer_info_arg(mode: Literal['plain', 'wrap'], n_positional: int) -> bool | None: - if mode == 'plain': - if n_positional == 1: - # (__input_value: Any) -> Any - return False - elif n_positional == 2: - # (__model: Any, __input_value: Any) -> Any - return True - else: - assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'" - if n_positional == 2: - # (__input_value: Any, __serializer: SerializerFunctionWrapHandler) -> Any - return False - elif n_positional == 3: - # (__input_value: Any, __serializer: SerializerFunctionWrapHandler, __info: SerializationInfo) -> Any - return True - - return None - - -AnyDecoratorCallable: TypeAlias = ( - 'Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any], Callable[..., Any]]' -) - - -def is_instance_method_from_sig(function: AnyDecoratorCallable) -> bool: - """Whether the function is an instance method. - - It will consider a function as instance method if the first parameter of - function is `self`. - - Args: - function: The function to check. - - Returns: - `True` if the function is an instance method, `False` otherwise. - """ - sig = signature(unwrap_wrapped_function(function)) - first = next(iter(sig.parameters.values()), None) - if first and first.name == 'self': - return True - return False - - -def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable) -> Any: - """Apply the `@classmethod` decorator on the function. - - Args: - function: The function to apply the decorator on. - - Return: - The `@classmethod` decorator applied function. - """ - if not isinstance( - unwrap_wrapped_function(function, unwrap_class_static_method=False), classmethod - ) and _is_classmethod_from_sig(function): - return classmethod(function) # type: ignore[arg-type] - return function - - -def _is_classmethod_from_sig(function: AnyDecoratorCallable) -> bool: - sig = signature(unwrap_wrapped_function(function)) - first = next(iter(sig.parameters.values()), None) - if first and first.name == 'cls': - return True - return False - - -def unwrap_wrapped_function( - func: Any, - *, - unwrap_partial: bool = True, - unwrap_class_static_method: bool = True, -) -> Any: - """Recursively unwraps a wrapped function until the underlying function is reached. - This handles property, functools.partial, functools.partialmethod, staticmethod and classmethod. - - Args: - func: The function to unwrap. - unwrap_partial: If True (default), unwrap partial and partialmethod decorators, otherwise don't. - decorators. - unwrap_class_static_method: If True (default), also unwrap classmethod and staticmethod - decorators. If False, only unwrap partial and partialmethod decorators. - - Returns: - The underlying function of the wrapped function. - """ - all: set[Any] = {property, cached_property} - - if unwrap_partial: - all.update({partial, partialmethod}) - - if unwrap_class_static_method: - all.update({staticmethod, classmethod}) - - while isinstance(func, tuple(all)): - if unwrap_class_static_method and isinstance(func, (classmethod, staticmethod)): - func = func.__func__ - elif isinstance(func, (partial, partialmethod)): - func = func.func - elif isinstance(func, property): - func = func.fget # arbitrary choice, convenient for computed fields - else: - # Make coverage happy as it can only get here in the last possible case - assert isinstance(func, cached_property) - func = func.func # type: ignore - - return func - - -def get_function_return_type( - func: Any, explicit_return_type: Any, types_namespace: dict[str, Any] | None = None -) -> Any: - """Get the function return type. - - It gets the return type from the type annotation if `explicit_return_type` is `None`. - Otherwise, it returns `explicit_return_type`. - - Args: - func: The function to get its return type. - explicit_return_type: The explicit return type. - types_namespace: The types namespace, defaults to `None`. - - Returns: - The function return type. - """ - if explicit_return_type is PydanticUndefined: - # try to get it from the type annotation - hints = get_function_type_hints( - unwrap_wrapped_function(func), include_keys={'return'}, types_namespace=types_namespace - ) - return hints.get('return', PydanticUndefined) - else: - return explicit_return_type - - -def count_positional_params(sig: Signature) -> int: - return sum(1 for param in sig.parameters.values() if can_be_positional(param)) - - -def can_be_positional(param: Parameter) -> bool: - return param.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD) - - -def ensure_property(f: Any) -> Any: - """Ensure that a function is a `property` or `cached_property`, or is a valid descriptor. - - Args: - f: The function to check. - - Returns: - The function, or a `property` or `cached_property` instance wrapping the function. - """ - if ismethoddescriptor(f) or isdatadescriptor(f): - return f - else: - return property(f) diff --git a/lib/pydantic/_internal/_decorators_v1.py b/lib/pydantic/_internal/_decorators_v1.py deleted file mode 100644 index 4f81e6d4..00000000 --- a/lib/pydantic/_internal/_decorators_v1.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Logic for V1 validators, e.g. `@validator` and `@root_validator`.""" -from __future__ import annotations as _annotations - -from inspect import Parameter, signature -from typing import Any, Dict, Tuple, Union, cast - -from pydantic_core import core_schema -from typing_extensions import Protocol - -from ..errors import PydanticUserError -from ._decorators import can_be_positional - - -class V1OnlyValueValidator(Protocol): - """A simple validator, supported for V1 validators and V2 validators.""" - - def __call__(self, __value: Any) -> Any: - ... - - -class V1ValidatorWithValues(Protocol): - """A validator with `values` argument, supported for V1 validators and V2 validators.""" - - def __call__(self, __value: Any, values: dict[str, Any]) -> Any: - ... - - -class V1ValidatorWithValuesKwOnly(Protocol): - """A validator with keyword only `values` argument, supported for V1 validators and V2 validators.""" - - def __call__(self, __value: Any, *, values: dict[str, Any]) -> Any: - ... - - -class V1ValidatorWithKwargs(Protocol): - """A validator with `kwargs` argument, supported for V1 validators and V2 validators.""" - - def __call__(self, __value: Any, **kwargs: Any) -> Any: - ... - - -class V1ValidatorWithValuesAndKwargs(Protocol): - """A validator with `values` and `kwargs` arguments, supported for V1 validators and V2 validators.""" - - def __call__(self, __value: Any, values: dict[str, Any], **kwargs: Any) -> Any: - ... - - -V1Validator = Union[ - V1ValidatorWithValues, V1ValidatorWithValuesKwOnly, V1ValidatorWithKwargs, V1ValidatorWithValuesAndKwargs -] - - -def can_be_keyword(param: Parameter) -> bool: - return param.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) - - -def make_generic_v1_field_validator(validator: V1Validator) -> core_schema.WithInfoValidatorFunction: - """Wrap a V1 style field validator for V2 compatibility. - - Args: - validator: The V1 style field validator. - - Returns: - A wrapped V2 style field validator. - - Raises: - PydanticUserError: If the signature is not supported or the parameters are - not available in Pydantic V2. - """ - sig = signature(validator) - - needs_values_kw = False - - for param_num, (param_name, parameter) in enumerate(sig.parameters.items()): - if can_be_keyword(parameter) and param_name in ('field', 'config'): - raise PydanticUserError( - 'The `field` and `config` parameters are not available in Pydantic V2, ' - 'please use the `info` parameter instead.', - code='validator-field-config-info', - ) - if parameter.kind is Parameter.VAR_KEYWORD: - needs_values_kw = True - elif can_be_keyword(parameter) and param_name == 'values': - needs_values_kw = True - elif can_be_positional(parameter) and param_num == 0: - # value - continue - elif parameter.default is Parameter.empty: # ignore params with defaults e.g. bound by functools.partial - raise PydanticUserError( - f'Unsupported signature for V1 style validator {validator}: {sig} is not supported.', - code='validator-v1-signature', - ) - - if needs_values_kw: - # (v, **kwargs), (v, values, **kwargs), (v, *, values, **kwargs) or (v, *, values) - val1 = cast(V1ValidatorWithValues, validator) - - def wrapper1(value: Any, info: core_schema.ValidationInfo) -> Any: - return val1(value, values=info.data) - - return wrapper1 - else: - val2 = cast(V1OnlyValueValidator, validator) - - def wrapper2(value: Any, _: core_schema.ValidationInfo) -> Any: - return val2(value) - - return wrapper2 - - -RootValidatorValues = Dict[str, Any] -# technically tuple[model_dict, model_extra, fields_set] | tuple[dataclass_dict, init_vars] -RootValidatorFieldsTuple = Tuple[Any, ...] - - -class V1RootValidatorFunction(Protocol): - """A simple root validator, supported for V1 validators and V2 validators.""" - - def __call__(self, __values: RootValidatorValues) -> RootValidatorValues: - ... - - -class V2CoreBeforeRootValidator(Protocol): - """V2 validator with mode='before'.""" - - def __call__(self, __values: RootValidatorValues, __info: core_schema.ValidationInfo) -> RootValidatorValues: - ... - - -class V2CoreAfterRootValidator(Protocol): - """V2 validator with mode='after'.""" - - def __call__( - self, __fields_tuple: RootValidatorFieldsTuple, __info: core_schema.ValidationInfo - ) -> RootValidatorFieldsTuple: - ... - - -def make_v1_generic_root_validator( - validator: V1RootValidatorFunction, pre: bool -) -> V2CoreBeforeRootValidator | V2CoreAfterRootValidator: - """Wrap a V1 style root validator for V2 compatibility. - - Args: - validator: The V1 style field validator. - pre: Whether the validator is a pre validator. - - Returns: - A wrapped V2 style validator. - """ - if pre is True: - # mode='before' for pydantic-core - def _wrapper1(values: RootValidatorValues, _: core_schema.ValidationInfo) -> RootValidatorValues: - return validator(values) - - return _wrapper1 - - # mode='after' for pydantic-core - def _wrapper2(fields_tuple: RootValidatorFieldsTuple, _: core_schema.ValidationInfo) -> RootValidatorFieldsTuple: - if len(fields_tuple) == 2: - # dataclass, this is easy - values, init_vars = fields_tuple - values = validator(values) - return values, init_vars - else: - # ugly hack: to match v1 behaviour, we merge values and model_extra, then split them up based on fields - # afterwards - model_dict, model_extra, fields_set = fields_tuple - if model_extra: - fields = set(model_dict.keys()) - model_dict.update(model_extra) - model_dict_new = validator(model_dict) - for k in list(model_dict_new.keys()): - if k not in fields: - model_extra[k] = model_dict_new.pop(k) - else: - model_dict_new = validator(model_dict) - return model_dict_new, model_extra, fields_set - - return _wrapper2 diff --git a/lib/pydantic/_internal/_discriminated_union.py b/lib/pydantic/_internal/_discriminated_union.py deleted file mode 100644 index c40117d3..00000000 --- a/lib/pydantic/_internal/_discriminated_union.py +++ /dev/null @@ -1,506 +0,0 @@ -from __future__ import annotations as _annotations - -from typing import TYPE_CHECKING, Any, Hashable, Sequence - -from pydantic_core import CoreSchema, core_schema - -from ..errors import PydanticUserError -from . import _core_utils -from ._core_utils import ( - CoreSchemaField, - collect_definitions, - simplify_schema_references, -) - -if TYPE_CHECKING: - from ..types import Discriminator - -CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY = 'pydantic.internal.union_discriminator' - - -class MissingDefinitionForUnionRef(Exception): - """Raised when applying a discriminated union discriminator to a schema - requires a definition that is not yet defined - """ - - def __init__(self, ref: str) -> None: - self.ref = ref - super().__init__(f'Missing definition for ref {self.ref!r}') - - -def set_discriminator_in_metadata(schema: CoreSchema, discriminator: Any) -> None: - schema.setdefault('metadata', {}) - metadata = schema.get('metadata') - assert metadata is not None - metadata[CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY] = discriminator - - -def apply_discriminators(schema: core_schema.CoreSchema) -> core_schema.CoreSchema: - definitions: dict[str, CoreSchema] | None = None - - def inner(s: core_schema.CoreSchema, recurse: _core_utils.Recurse) -> core_schema.CoreSchema: - nonlocal definitions - - s = recurse(s, inner) - if s['type'] == 'tagged-union': - return s - - metadata = s.get('metadata', {}) - discriminator = metadata.pop(CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY, None) - if discriminator is not None: - if definitions is None: - definitions = collect_definitions(schema) - s = apply_discriminator(s, discriminator, definitions) - return s - - return simplify_schema_references(_core_utils.walk_core_schema(schema, inner)) - - -def apply_discriminator( - schema: core_schema.CoreSchema, - discriminator: str | Discriminator, - definitions: dict[str, core_schema.CoreSchema] | None = None, -) -> core_schema.CoreSchema: - """Applies the discriminator and returns a new core schema. - - Args: - schema: The input schema. - discriminator: The name of the field which will serve as the discriminator. - definitions: A mapping of schema ref to schema. - - Returns: - The new core schema. - - Raises: - TypeError: - - If `discriminator` is used with invalid union variant. - - If `discriminator` is used with `Union` type with one variant. - - If `discriminator` value mapped to multiple choices. - MissingDefinitionForUnionRef: - If the definition for ref is missing. - PydanticUserError: - - If a model in union doesn't have a discriminator field. - - If discriminator field has a non-string alias. - - If discriminator fields have different aliases. - - If discriminator field not of type `Literal`. - """ - from ..types import Discriminator - - if isinstance(discriminator, Discriminator): - if isinstance(discriminator.discriminator, str): - discriminator = discriminator.discriminator - else: - return discriminator._convert_schema(schema) - - return _ApplyInferredDiscriminator(discriminator, definitions or {}).apply(schema) - - -class _ApplyInferredDiscriminator: - """This class is used to convert an input schema containing a union schema into one where that union is - replaced with a tagged-union, with all the associated debugging and performance benefits. - - This is done by: - * Validating that the input schema is compatible with the provided discriminator - * Introspecting the schema to determine which discriminator values should map to which union choices - * Handling various edge cases such as 'definitions', 'default', 'nullable' schemas, and more - - I have chosen to implement the conversion algorithm in this class, rather than a function, - to make it easier to maintain state while recursively walking the provided CoreSchema. - """ - - def __init__(self, discriminator: str, definitions: dict[str, core_schema.CoreSchema]): - # `discriminator` should be the name of the field which will serve as the discriminator. - # It must be the python name of the field, and *not* the field's alias. Note that as of now, - # all members of a discriminated union _must_ use a field with the same name as the discriminator. - # This may change if/when we expose a way to manually specify the TaggedUnionSchema's choices. - self.discriminator = discriminator - - # `definitions` should contain a mapping of schema ref to schema for all schemas which might - # be referenced by some choice - self.definitions = definitions - - # `_discriminator_alias` will hold the value, if present, of the alias for the discriminator - # - # Note: following the v1 implementation, we currently disallow the use of different aliases - # for different choices. This is not a limitation of pydantic_core, but if we try to handle - # this, the inference logic gets complicated very quickly, and could result in confusing - # debugging challenges for users making subtle mistakes. - # - # Rather than trying to do the most powerful inference possible, I think we should eventually - # expose a way to more-manually control the way the TaggedUnionSchema is constructed through - # the use of a new type which would be placed as an Annotation on the Union type. This would - # provide the full flexibility/power of pydantic_core's TaggedUnionSchema where necessary for - # more complex cases, without over-complicating the inference logic for the common cases. - self._discriminator_alias: str | None = None - - # `_should_be_nullable` indicates whether the converted union has `None` as an allowed value. - # If `None` is an acceptable value of the (possibly-wrapped) union, we ignore it while - # constructing the TaggedUnionSchema, but set the `_should_be_nullable` attribute to True. - # Once we have constructed the TaggedUnionSchema, if `_should_be_nullable` is True, we ensure - # that the final schema gets wrapped as a NullableSchema. This has the same semantics on the - # python side, but resolves the issue that `None` cannot correspond to any discriminator values. - self._should_be_nullable = False - - # `_is_nullable` is used to track if the final produced schema will definitely be nullable; - # we set it to True if the input schema is wrapped in a nullable schema that we know will be preserved - # as an indication that, even if None is discovered as one of the union choices, we will not need to wrap - # the final value in another nullable schema. - # - # This is more complicated than just checking for the final outermost schema having type 'nullable' thanks - # to the possible presence of other wrapper schemas such as DefinitionsSchema, WithDefaultSchema, etc. - self._is_nullable = False - - # `_choices_to_handle` serves as a stack of choices to add to the tagged union. Initially, choices - # from the union in the wrapped schema will be appended to this list, and the recursive choice-handling - # algorithm may add more choices to this stack as (nested) unions are encountered. - self._choices_to_handle: list[core_schema.CoreSchema] = [] - - # `_tagged_union_choices` is built during the call to `apply`, and will hold the choices to be included - # in the output TaggedUnionSchema that will replace the union from the input schema - self._tagged_union_choices: dict[Hashable, core_schema.CoreSchema] = {} - - # `_used` is changed to True after applying the discriminator to prevent accidental re-use - self._used = False - - def apply(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema: - """Return a new CoreSchema based on `schema` that uses a tagged-union with the discriminator provided - to this class. - - Args: - schema: The input schema. - - Returns: - The new core schema. - - Raises: - TypeError: - - If `discriminator` is used with invalid union variant. - - If `discriminator` is used with `Union` type with one variant. - - If `discriminator` value mapped to multiple choices. - ValueError: - If the definition for ref is missing. - PydanticUserError: - - If a model in union doesn't have a discriminator field. - - If discriminator field has a non-string alias. - - If discriminator fields have different aliases. - - If discriminator field not of type `Literal`. - """ - self.definitions.update(collect_definitions(schema)) - assert not self._used - schema = self._apply_to_root(schema) - if self._should_be_nullable and not self._is_nullable: - schema = core_schema.nullable_schema(schema) - self._used = True - new_defs = collect_definitions(schema) - missing_defs = self.definitions.keys() - new_defs.keys() - if missing_defs: - schema = core_schema.definitions_schema(schema, [self.definitions[ref] for ref in missing_defs]) - return schema - - def _apply_to_root(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema: - """This method handles the outer-most stage of recursion over the input schema: - unwrapping nullable or definitions schemas, and calling the `_handle_choice` - method iteratively on the choices extracted (recursively) from the possibly-wrapped union. - """ - if schema['type'] == 'nullable': - self._is_nullable = True - wrapped = self._apply_to_root(schema['schema']) - nullable_wrapper = schema.copy() - nullable_wrapper['schema'] = wrapped - return nullable_wrapper - - if schema['type'] == 'definitions': - wrapped = self._apply_to_root(schema['schema']) - definitions_wrapper = schema.copy() - definitions_wrapper['schema'] = wrapped - return definitions_wrapper - - if schema['type'] != 'union': - # If the schema is not a union, it probably means it just had a single member and - # was flattened by pydantic_core. - # However, it still may make sense to apply the discriminator to this schema, - # as a way to get discriminated-union-style error messages, so we allow this here. - schema = core_schema.union_schema([schema]) - - # Reverse the choices list before extending the stack so that they get handled in the order they occur - choices_schemas = [v[0] if isinstance(v, tuple) else v for v in schema['choices'][::-1]] - self._choices_to_handle.extend(choices_schemas) - while self._choices_to_handle: - choice = self._choices_to_handle.pop() - self._handle_choice(choice) - - if self._discriminator_alias is not None and self._discriminator_alias != self.discriminator: - # * We need to annotate `discriminator` as a union here to handle both branches of this conditional - # * We need to annotate `discriminator` as list[list[str | int]] and not list[list[str]] due to the - # invariance of list, and because list[list[str | int]] is the type of the discriminator argument - # to tagged_union_schema below - # * See the docstring of pydantic_core.core_schema.tagged_union_schema for more details about how to - # interpret the value of the discriminator argument to tagged_union_schema. (The list[list[str]] here - # is the appropriate way to provide a list of fallback attributes to check for a discriminator value.) - discriminator: str | list[list[str | int]] = [[self.discriminator], [self._discriminator_alias]] - else: - discriminator = self.discriminator - return core_schema.tagged_union_schema( - choices=self._tagged_union_choices, - discriminator=discriminator, - custom_error_type=schema.get('custom_error_type'), - custom_error_message=schema.get('custom_error_message'), - custom_error_context=schema.get('custom_error_context'), - strict=False, - from_attributes=True, - ref=schema.get('ref'), - metadata=schema.get('metadata'), - serialization=schema.get('serialization'), - ) - - def _handle_choice(self, choice: core_schema.CoreSchema) -> None: - """This method handles the "middle" stage of recursion over the input schema. - Specifically, it is responsible for handling each choice of the outermost union - (and any "coalesced" choices obtained from inner unions). - - Here, "handling" entails: - * Coalescing nested unions and compatible tagged-unions - * Tracking the presence of 'none' and 'nullable' schemas occurring as choices - * Validating that each allowed discriminator value maps to a unique choice - * Updating the _tagged_union_choices mapping that will ultimately be used to build the TaggedUnionSchema. - """ - if choice['type'] == 'definition-ref': - if choice['schema_ref'] not in self.definitions: - raise MissingDefinitionForUnionRef(choice['schema_ref']) - - if choice['type'] == 'none': - self._should_be_nullable = True - elif choice['type'] == 'definitions': - self._handle_choice(choice['schema']) - elif choice['type'] == 'nullable': - self._should_be_nullable = True - self._handle_choice(choice['schema']) # unwrap the nullable schema - elif choice['type'] == 'union': - # Reverse the choices list before extending the stack so that they get handled in the order they occur - choices_schemas = [v[0] if isinstance(v, tuple) else v for v in choice['choices'][::-1]] - self._choices_to_handle.extend(choices_schemas) - elif choice['type'] not in { - 'model', - 'typed-dict', - 'tagged-union', - 'lax-or-strict', - 'dataclass', - 'dataclass-args', - 'definition-ref', - } and not _core_utils.is_function_with_inner_schema(choice): - # We should eventually handle 'definition-ref' as well - raise TypeError( - f'{choice["type"]!r} is not a valid discriminated union variant;' - ' should be a `BaseModel` or `dataclass`' - ) - else: - if choice['type'] == 'tagged-union' and self._is_discriminator_shared(choice): - # In this case, this inner tagged-union is compatible with the outer tagged-union, - # and its choices can be coalesced into the outer TaggedUnionSchema. - subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))] - # Reverse the choices list before extending the stack so that they get handled in the order they occur - self._choices_to_handle.extend(subchoices[::-1]) - return - - inferred_discriminator_values = self._infer_discriminator_values_for_choice(choice, source_name=None) - self._set_unique_choice_for_values(choice, inferred_discriminator_values) - - def _is_discriminator_shared(self, choice: core_schema.TaggedUnionSchema) -> bool: - """This method returns a boolean indicating whether the discriminator for the `choice` - is the same as that being used for the outermost tagged union. This is used to - determine whether this TaggedUnionSchema choice should be "coalesced" into the top level, - or whether it should be treated as a separate (nested) choice. - """ - inner_discriminator = choice['discriminator'] - return inner_discriminator == self.discriminator or ( - isinstance(inner_discriminator, list) - and (self.discriminator in inner_discriminator or [self.discriminator] in inner_discriminator) - ) - - def _infer_discriminator_values_for_choice( # noqa C901 - self, choice: core_schema.CoreSchema, source_name: str | None - ) -> list[str | int]: - """This function recurses over `choice`, extracting all discriminator values that should map to this choice. - - `model_name` is accepted for the purpose of producing useful error messages. - """ - if choice['type'] == 'definitions': - return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name) - elif choice['type'] == 'function-plain': - raise TypeError( - f'{choice["type"]!r} is not a valid discriminated union variant;' - ' should be a `BaseModel` or `dataclass`' - ) - elif _core_utils.is_function_with_inner_schema(choice): - return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name) - elif choice['type'] == 'lax-or-strict': - return sorted( - set( - self._infer_discriminator_values_for_choice(choice['lax_schema'], source_name=None) - + self._infer_discriminator_values_for_choice(choice['strict_schema'], source_name=None) - ) - ) - - elif choice['type'] == 'tagged-union': - values: list[str | int] = [] - # Ignore str/int "choices" since these are just references to other choices - subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))] - for subchoice in subchoices: - subchoice_values = self._infer_discriminator_values_for_choice(subchoice, source_name=None) - values.extend(subchoice_values) - return values - - elif choice['type'] == 'union': - values = [] - for subchoice in choice['choices']: - subchoice_schema = subchoice[0] if isinstance(subchoice, tuple) else subchoice - subchoice_values = self._infer_discriminator_values_for_choice(subchoice_schema, source_name=None) - values.extend(subchoice_values) - return values - - elif choice['type'] == 'nullable': - self._should_be_nullable = True - return self._infer_discriminator_values_for_choice(choice['schema'], source_name=None) - - elif choice['type'] == 'model': - return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__) - - elif choice['type'] == 'dataclass': - return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__) - - elif choice['type'] == 'model-fields': - return self._infer_discriminator_values_for_model_choice(choice, source_name=source_name) - - elif choice['type'] == 'dataclass-args': - return self._infer_discriminator_values_for_dataclass_choice(choice, source_name=source_name) - - elif choice['type'] == 'typed-dict': - return self._infer_discriminator_values_for_typed_dict_choice(choice, source_name=source_name) - - elif choice['type'] == 'definition-ref': - schema_ref = choice['schema_ref'] - if schema_ref not in self.definitions: - raise MissingDefinitionForUnionRef(schema_ref) - return self._infer_discriminator_values_for_choice(self.definitions[schema_ref], source_name=source_name) - else: - raise TypeError( - f'{choice["type"]!r} is not a valid discriminated union variant;' - ' should be a `BaseModel` or `dataclass`' - ) - - def _infer_discriminator_values_for_typed_dict_choice( - self, choice: core_schema.TypedDictSchema, source_name: str | None = None - ) -> list[str | int]: - """This method just extracts the _infer_discriminator_values_for_choice logic specific to TypedDictSchema - for the sake of readability. - """ - source = 'TypedDict' if source_name is None else f'TypedDict {source_name!r}' - field = choice['fields'].get(self.discriminator) - if field is None: - raise PydanticUserError( - f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field' - ) - return self._infer_discriminator_values_for_field(field, source) - - def _infer_discriminator_values_for_model_choice( - self, choice: core_schema.ModelFieldsSchema, source_name: str | None = None - ) -> list[str | int]: - source = 'ModelFields' if source_name is None else f'Model {source_name!r}' - field = choice['fields'].get(self.discriminator) - if field is None: - raise PydanticUserError( - f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field' - ) - return self._infer_discriminator_values_for_field(field, source) - - def _infer_discriminator_values_for_dataclass_choice( - self, choice: core_schema.DataclassArgsSchema, source_name: str | None = None - ) -> list[str | int]: - source = 'DataclassArgs' if source_name is None else f'Dataclass {source_name!r}' - for field in choice['fields']: - if field['name'] == self.discriminator: - break - else: - raise PydanticUserError( - f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field' - ) - return self._infer_discriminator_values_for_field(field, source) - - def _infer_discriminator_values_for_field(self, field: CoreSchemaField, source: str) -> list[str | int]: - if field['type'] == 'computed-field': - # This should never occur as a discriminator, as it is only relevant to serialization - return [] - alias = field.get('validation_alias', self.discriminator) - if not isinstance(alias, str): - raise PydanticUserError( - f'Alias {alias!r} is not supported in a discriminated union', code='discriminator-alias-type' - ) - if self._discriminator_alias is None: - self._discriminator_alias = alias - elif self._discriminator_alias != alias: - raise PydanticUserError( - f'Aliases for discriminator {self.discriminator!r} must be the same ' - f'(got {alias}, {self._discriminator_alias})', - code='discriminator-alias', - ) - return self._infer_discriminator_values_for_inner_schema(field['schema'], source) - - def _infer_discriminator_values_for_inner_schema( - self, schema: core_schema.CoreSchema, source: str - ) -> list[str | int]: - """When inferring discriminator values for a field, we typically extract the expected values from a literal - schema. This function does that, but also handles nested unions and defaults. - """ - if schema['type'] == 'literal': - return schema['expected'] - - elif schema['type'] == 'union': - # Generally when multiple values are allowed they should be placed in a single `Literal`, but - # we add this case to handle the situation where a field is annotated as a `Union` of `Literal`s. - # For example, this lets us handle `Union[Literal['key'], Union[Literal['Key'], Literal['KEY']]]` - values: list[Any] = [] - for choice in schema['choices']: - choice_schema = choice[0] if isinstance(choice, tuple) else choice - choice_values = self._infer_discriminator_values_for_inner_schema(choice_schema, source) - values.extend(choice_values) - return values - - elif schema['type'] == 'default': - # This will happen if the field has a default value; we ignore it while extracting the discriminator values - return self._infer_discriminator_values_for_inner_schema(schema['schema'], source) - - elif schema['type'] == 'function-after': - # After validators don't affect the discriminator values - return self._infer_discriminator_values_for_inner_schema(schema['schema'], source) - - elif schema['type'] in {'function-before', 'function-wrap', 'function-plain'}: - validator_type = repr(schema['type'].split('-')[1]) - raise PydanticUserError( - f'Cannot use a mode={validator_type} validator in the' - f' discriminator field {self.discriminator!r} of {source}', - code='discriminator-validator', - ) - - else: - raise PydanticUserError( - f'{source} needs field {self.discriminator!r} to be of type `Literal`', - code='discriminator-needs-literal', - ) - - def _set_unique_choice_for_values(self, choice: core_schema.CoreSchema, values: Sequence[str | int]) -> None: - """This method updates `self.tagged_union_choices` so that all provided (discriminator) `values` map to the - provided `choice`, validating that none of these values already map to another (different) choice. - """ - for discriminator_value in values: - if discriminator_value in self._tagged_union_choices: - # It is okay if `value` is already in tagged_union_choices as long as it maps to the same value. - # Because tagged_union_choices may map values to other values, we need to walk the choices dict - # until we get to a "real" choice, and confirm that is equal to the one assigned. - existing_choice = self._tagged_union_choices[discriminator_value] - if existing_choice != choice: - raise TypeError( - f'Value {discriminator_value!r} for discriminator ' - f'{self.discriminator!r} mapped to multiple choices' - ) - else: - self._tagged_union_choices[discriminator_value] = choice diff --git a/lib/pydantic/_internal/_fields.py b/lib/pydantic/_internal/_fields.py deleted file mode 100644 index 94de3062..00000000 --- a/lib/pydantic/_internal/_fields.py +++ /dev/null @@ -1,319 +0,0 @@ -"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`.""" -from __future__ import annotations as _annotations - -import dataclasses -import sys -import warnings -from copy import copy -from functools import lru_cache -from typing import TYPE_CHECKING, Any - -from pydantic_core import PydanticUndefined - -from pydantic.errors import PydanticUserError - -from . import _typing_extra -from ._config import ConfigWrapper -from ._repr import Representation -from ._typing_extra import get_cls_type_hints_lenient, get_type_hints, is_classvar, is_finalvar - -if TYPE_CHECKING: - from annotated_types import BaseMetadata - - from ..fields import FieldInfo - from ..main import BaseModel - from ._dataclasses import StandardDataclass - from ._decorators import DecoratorInfos - - -def get_type_hints_infer_globalns( - obj: Any, - localns: dict[str, Any] | None = None, - include_extras: bool = False, -) -> dict[str, Any]: - """Gets type hints for an object by inferring the global namespace. - - It uses the `typing.get_type_hints`, The only thing that we do here is fetching - global namespace from `obj.__module__` if it is not `None`. - - Args: - obj: The object to get its type hints. - localns: The local namespaces. - include_extras: Whether to recursively include annotation metadata. - - Returns: - The object type hints. - """ - module_name = getattr(obj, '__module__', None) - globalns: dict[str, Any] | None = None - if module_name: - try: - globalns = sys.modules[module_name].__dict__ - except KeyError: - # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 - pass - return get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras) - - -class PydanticMetadata(Representation): - """Base class for annotation markers like `Strict`.""" - - __slots__ = () - - -def pydantic_general_metadata(**metadata: Any) -> BaseMetadata: - """Create a new `_PydanticGeneralMetadata` class with the given metadata. - - Args: - **metadata: The metadata to add. - - Returns: - The new `_PydanticGeneralMetadata` class. - """ - return _general_metadata_cls()(metadata) # type: ignore - - -@lru_cache(maxsize=None) -def _general_metadata_cls() -> type[BaseMetadata]: - """Do it this way to avoid importing `annotated_types` at import time.""" - from annotated_types import BaseMetadata - - class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata): - """Pydantic general metadata like `max_digits`.""" - - def __init__(self, metadata: Any): - self.__dict__ = metadata - - return _PydanticGeneralMetadata # type: ignore - - -def collect_model_fields( # noqa: C901 - cls: type[BaseModel], - bases: tuple[type[Any], ...], - config_wrapper: ConfigWrapper, - types_namespace: dict[str, Any] | None, - *, - typevars_map: dict[Any, Any] | None = None, -) -> tuple[dict[str, FieldInfo], set[str]]: - """Collect the fields of a nascent pydantic model. - - Also collect the names of any ClassVars present in the type hints. - - The returned value is a tuple of two items: the fields dict, and the set of ClassVar names. - - Args: - cls: BaseModel or dataclass. - bases: Parents of the class, generally `cls.__bases__`. - config_wrapper: The config wrapper instance. - types_namespace: Optional extra namespace to look for types in. - typevars_map: A dictionary mapping type variables to their concrete types. - - Returns: - A tuple contains fields and class variables. - - Raises: - NameError: - - If there is a conflict between a field name and protected namespaces. - - If there is a field other than `root` in `RootModel`. - - If a field shadows an attribute in the parent model. - """ - from ..fields import FieldInfo - - type_hints = get_cls_type_hints_lenient(cls, types_namespace) - - # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older - # annotations is only used for finding fields in parent classes - annotations = cls.__dict__.get('__annotations__', {}) - fields: dict[str, FieldInfo] = {} - - class_vars: set[str] = set() - for ann_name, ann_type in type_hints.items(): - if ann_name == 'model_config': - # We never want to treat `model_config` as a field - # Note: we may need to change this logic if/when we introduce a `BareModel` class with no - # protected namespaces (where `model_config` might be allowed as a field name) - continue - for protected_namespace in config_wrapper.protected_namespaces: - if ann_name.startswith(protected_namespace): - for b in bases: - if hasattr(b, ann_name): - from ..main import BaseModel - - if not (issubclass(b, BaseModel) and ann_name in b.model_fields): - raise NameError( - f'Field "{ann_name}" conflicts with member {getattr(b, ann_name)}' - f' of protected namespace "{protected_namespace}".' - ) - else: - valid_namespaces = tuple( - x for x in config_wrapper.protected_namespaces if not ann_name.startswith(x) - ) - warnings.warn( - f'Field "{ann_name}" has conflict with protected namespace "{protected_namespace}".' - '\n\nYou may be able to resolve this warning by setting' - f" `model_config['protected_namespaces'] = {valid_namespaces}`.", - UserWarning, - ) - if is_classvar(ann_type): - class_vars.add(ann_name) - continue - if _is_finalvar_with_default_val(ann_type, getattr(cls, ann_name, PydanticUndefined)): - class_vars.add(ann_name) - continue - if not is_valid_field_name(ann_name): - continue - if cls.__pydantic_root_model__ and ann_name != 'root': - raise NameError( - f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`" - ) - - # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get - # "... shadows an attribute" errors - generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin') - for base in bases: - dataclass_fields = { - field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ()) - } - if hasattr(base, ann_name): - if base is generic_origin: - # Don't error when "shadowing" of attributes in parametrized generics - continue - - if ann_name in dataclass_fields: - # Don't error when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set - # on the class instance. - continue - warnings.warn( - f'Field name "{ann_name}" shadows an attribute in parent "{base.__qualname__}"; ', - UserWarning, - ) - - try: - default = getattr(cls, ann_name, PydanticUndefined) - if default is PydanticUndefined: - raise AttributeError - except AttributeError: - if ann_name in annotations: - field_info = FieldInfo.from_annotation(ann_type) - else: - # if field has no default value and is not in __annotations__ this means that it is - # defined in a base class and we can take it from there - model_fields_lookup: dict[str, FieldInfo] = {} - for x in cls.__bases__[::-1]: - model_fields_lookup.update(getattr(x, 'model_fields', {})) - if ann_name in model_fields_lookup: - # The field was present on one of the (possibly multiple) base classes - # copy the field to make sure typevar substitutions don't cause issues with the base classes - field_info = copy(model_fields_lookup[ann_name]) - else: - # The field was not found on any base classes; this seems to be caused by fields not getting - # generated thanks to models not being fully defined while initializing recursive models. - # Nothing stops us from just creating a new FieldInfo for this type hint, so we do this. - field_info = FieldInfo.from_annotation(ann_type) - else: - field_info = FieldInfo.from_annotated_attribute(ann_type, default) - # attributes which are fields are removed from the class namespace: - # 1. To match the behaviour of annotation-only fields - # 2. To avoid false positives in the NameError check above - try: - delattr(cls, ann_name) - except AttributeError: - pass # indicates the attribute was on a parent class - - # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__ - # to make sure the decorators have already been built for this exact class - decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__'] - if ann_name in decorators.computed_fields: - raise ValueError("you can't override a field with a computed field") - fields[ann_name] = field_info - - if typevars_map: - for field in fields.values(): - field.apply_typevars_map(typevars_map, types_namespace) - - return fields, class_vars - - -def _is_finalvar_with_default_val(type_: type[Any], val: Any) -> bool: - from ..fields import FieldInfo - - if not is_finalvar(type_): - return False - elif val is PydanticUndefined: - return False - elif isinstance(val, FieldInfo) and (val.default is PydanticUndefined and val.default_factory is None): - return False - else: - return True - - -def collect_dataclass_fields( - cls: type[StandardDataclass], types_namespace: dict[str, Any] | None, *, typevars_map: dict[Any, Any] | None = None -) -> dict[str, FieldInfo]: - """Collect the fields of a dataclass. - - Args: - cls: dataclass. - types_namespace: Optional extra namespace to look for types in. - typevars_map: A dictionary mapping type variables to their concrete types. - - Returns: - The dataclass fields. - """ - from ..fields import FieldInfo - - fields: dict[str, FieldInfo] = {} - dataclass_fields: dict[str, dataclasses.Field] = cls.__dataclass_fields__ - cls_localns = dict(vars(cls)) # this matches get_cls_type_hints_lenient, but all tests pass with `= None` instead - - source_module = sys.modules.get(cls.__module__) - if source_module is not None: - types_namespace = {**source_module.__dict__, **(types_namespace or {})} - - for ann_name, dataclass_field in dataclass_fields.items(): - ann_type = _typing_extra.eval_type_lenient(dataclass_field.type, types_namespace, cls_localns) - if is_classvar(ann_type): - continue - - if ( - not dataclass_field.init - and dataclass_field.default == dataclasses.MISSING - and dataclass_field.default_factory == dataclasses.MISSING - ): - # TODO: We should probably do something with this so that validate_assignment behaves properly - # Issue: https://github.com/pydantic/pydantic/issues/5470 - continue - - if isinstance(dataclass_field.default, FieldInfo): - if dataclass_field.default.init_var: - if dataclass_field.default.init is False: - raise PydanticUserError( - f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.', - code='clashing-init-and-init-var', - ) - - # TODO: same note as above re validate_assignment - continue - field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field.default) - else: - field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field) - - fields[ann_name] = field_info - - if field_info.default is not PydanticUndefined and isinstance(getattr(cls, ann_name, field_info), FieldInfo): - # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo - setattr(cls, ann_name, field_info.default) - - if typevars_map: - for field in fields.values(): - field.apply_typevars_map(typevars_map, types_namespace) - - return fields - - -def is_valid_field_name(name: str) -> bool: - return not name.startswith('_') - - -def is_valid_privateattr_name(name: str) -> bool: - return name.startswith('_') and not name.startswith('__') diff --git a/lib/pydantic/_internal/_forward_ref.py b/lib/pydantic/_internal/_forward_ref.py deleted file mode 100644 index 231f81d1..00000000 --- a/lib/pydantic/_internal/_forward_ref.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import annotations as _annotations - -from dataclasses import dataclass -from typing import Union - - -@dataclass -class PydanticRecursiveRef: - type_ref: str - - __name__ = 'PydanticRecursiveRef' - __hash__ = object.__hash__ - - def __call__(self) -> None: - """Defining __call__ is necessary for the `typing` module to let you use an instance of - this class as the result of resolving a standard ForwardRef. - """ - - def __or__(self, other): - return Union[self, other] # type: ignore - - def __ror__(self, other): - return Union[other, self] # type: ignore diff --git a/lib/pydantic/_internal/_generate_schema.py b/lib/pydantic/_internal/_generate_schema.py deleted file mode 100644 index 6ab7ec19..00000000 --- a/lib/pydantic/_internal/_generate_schema.py +++ /dev/null @@ -1,2231 +0,0 @@ -"""Convert python types to pydantic-core schema.""" -from __future__ import annotations as _annotations - -import collections.abc -import dataclasses -import inspect -import re -import sys -import typing -import warnings -from contextlib import contextmanager -from copy import copy, deepcopy -from enum import Enum -from functools import partial -from inspect import Parameter, _ParameterKind, signature -from itertools import chain -from operator import attrgetter -from types import FunctionType, LambdaType, MethodType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Final, - ForwardRef, - Iterable, - Iterator, - Mapping, - Type, - TypeVar, - Union, - cast, - overload, -) -from warnings import warn - -from pydantic_core import CoreSchema, PydanticUndefined, core_schema, to_jsonable_python -from typing_extensions import Annotated, Literal, TypeAliasType, TypedDict, get_args, get_origin, is_typeddict - -from ..aliases import AliasGenerator -from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler -from ..config import ConfigDict, JsonDict, JsonEncoder -from ..errors import PydanticSchemaGenerationError, PydanticUndefinedAnnotation, PydanticUserError -from ..json_schema import JsonSchemaValue -from ..version import version_short -from ..warnings import PydanticDeprecatedSince20 -from . import _core_utils, _decorators, _discriminated_union, _known_annotated_metadata, _typing_extra -from ._config import ConfigWrapper, ConfigWrapperStack -from ._core_metadata import CoreMetadataHandler, build_metadata_dict -from ._core_utils import ( - CoreSchemaOrField, - collect_invalid_schemas, - define_expected_missing_refs, - get_ref, - get_type_ref, - is_function_with_inner_schema, - is_list_like_schema_with_items_schema, - simplify_schema_references, - validate_core_schema, -) -from ._decorators import ( - Decorator, - DecoratorInfos, - FieldSerializerDecoratorInfo, - FieldValidatorDecoratorInfo, - ModelSerializerDecoratorInfo, - ModelValidatorDecoratorInfo, - RootValidatorDecoratorInfo, - ValidatorDecoratorInfo, - get_attribute_from_bases, - inspect_field_serializer, - inspect_model_serializer, - inspect_validator, -) -from ._fields import collect_dataclass_fields, get_type_hints_infer_globalns -from ._forward_ref import PydanticRecursiveRef -from ._generics import get_standard_typevars_map, has_instance_in_type, recursively_defined_type_refs, replace_types -from ._schema_generation_shared import ( - CallbackGetCoreSchemaHandler, -) -from ._typing_extra import is_finalvar -from ._utils import lenient_issubclass - -if TYPE_CHECKING: - from ..fields import ComputedFieldInfo, FieldInfo - from ..main import BaseModel - from ..types import Discriminator - from ..validators import FieldValidatorModes - from ._dataclasses import StandardDataclass - from ._schema_generation_shared import GetJsonSchemaFunction - -_SUPPORTS_TYPEDDICT = sys.version_info >= (3, 12) -_AnnotatedType = type(Annotated[int, 123]) - -FieldDecoratorInfo = Union[ValidatorDecoratorInfo, FieldValidatorDecoratorInfo, FieldSerializerDecoratorInfo] -FieldDecoratorInfoType = TypeVar('FieldDecoratorInfoType', bound=FieldDecoratorInfo) -AnyFieldDecorator = Union[ - Decorator[ValidatorDecoratorInfo], - Decorator[FieldValidatorDecoratorInfo], - Decorator[FieldSerializerDecoratorInfo], -] - -ModifyCoreSchemaWrapHandler = GetCoreSchemaHandler -GetCoreSchemaFunction = Callable[[Any, ModifyCoreSchemaWrapHandler], core_schema.CoreSchema] - - -TUPLE_TYPES: list[type] = [tuple, typing.Tuple] -LIST_TYPES: list[type] = [list, typing.List, collections.abc.MutableSequence] -SET_TYPES: list[type] = [set, typing.Set, collections.abc.MutableSet] -FROZEN_SET_TYPES: list[type] = [frozenset, typing.FrozenSet, collections.abc.Set] -DICT_TYPES: list[type] = [dict, typing.Dict, collections.abc.MutableMapping, collections.abc.Mapping] - - -def check_validator_fields_against_field_name( - info: FieldDecoratorInfo, - field: str, -) -> bool: - """Check if field name is in validator fields. - - Args: - info: The field info. - field: The field name to check. - - Returns: - `True` if field name is in validator fields, `False` otherwise. - """ - if isinstance(info, (ValidatorDecoratorInfo, FieldValidatorDecoratorInfo)): - if '*' in info.fields: - return True - for v_field_name in info.fields: - if v_field_name == field: - return True - return False - - -def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator], fields: Iterable[str]) -> None: - """Check if the defined fields in decorators exist in `fields` param. - - It ignores the check for a decorator if the decorator has `*` as field or `check_fields=False`. - - Args: - decorators: An iterable of decorators. - fields: An iterable of fields name. - - Raises: - PydanticUserError: If one of the field names does not exist in `fields` param. - """ - fields = set(fields) - for dec in decorators: - if isinstance(dec.info, (ValidatorDecoratorInfo, FieldValidatorDecoratorInfo)) and '*' in dec.info.fields: - continue - if dec.info.check_fields is False: - continue - for field in dec.info.fields: - if field not in fields: - raise PydanticUserError( - f'Decorators defined with incorrect fields: {dec.cls_ref}.{dec.cls_var_name}' - " (use check_fields=False if you're inheriting from the model and intended this)", - code='decorator-missing-field', - ) - - -def filter_field_decorator_info_by_field( - validator_functions: Iterable[Decorator[FieldDecoratorInfoType]], field: str -) -> list[Decorator[FieldDecoratorInfoType]]: - return [dec for dec in validator_functions if check_validator_fields_against_field_name(dec.info, field)] - - -def apply_each_item_validators( - schema: core_schema.CoreSchema, - each_item_validators: list[Decorator[ValidatorDecoratorInfo]], - field_name: str | None, -) -> core_schema.CoreSchema: - # This V1 compatibility shim should eventually be removed - - # push down any `each_item=True` validators - # note that this won't work for any Annotated types that get wrapped by a function validator - # but that's okay because that didn't exist in V1 - if schema['type'] == 'nullable': - schema['schema'] = apply_each_item_validators(schema['schema'], each_item_validators, field_name) - return schema - elif schema['type'] == 'tuple': - if (variadic_item_index := schema.get('variadic_item_index')) is not None: - schema['items_schema'][variadic_item_index] = apply_validators( - schema['items_schema'][variadic_item_index], each_item_validators, field_name - ) - elif is_list_like_schema_with_items_schema(schema): - inner_schema = schema.get('items_schema', None) - if inner_schema is None: - inner_schema = core_schema.any_schema() - schema['items_schema'] = apply_validators(inner_schema, each_item_validators, field_name) - elif schema['type'] == 'dict': - # push down any `each_item=True` validators onto dict _values_ - # this is super arbitrary but it's the V1 behavior - inner_schema = schema.get('values_schema', None) - if inner_schema is None: - inner_schema = core_schema.any_schema() - schema['values_schema'] = apply_validators(inner_schema, each_item_validators, field_name) - elif each_item_validators: - raise TypeError( - f"`@validator(..., each_item=True)` cannot be applied to fields with a schema of {schema['type']}" - ) - return schema - - -def modify_model_json_schema( - schema_or_field: CoreSchemaOrField, handler: GetJsonSchemaHandler, *, cls: Any -) -> JsonSchemaValue: - """Add title and description for model-like classes' JSON schema. - - Args: - schema_or_field: The schema data to generate a JSON schema from. - handler: The `GetCoreSchemaHandler` instance. - cls: The model-like class. - - Returns: - JsonSchemaValue: The updated JSON schema. - """ - from ..main import BaseModel - - json_schema = handler(schema_or_field) - original_schema = handler.resolve_ref_schema(json_schema) - # Preserve the fact that definitions schemas should never have sibling keys: - if '$ref' in original_schema: - ref = original_schema['$ref'] - original_schema.clear() - original_schema['allOf'] = [{'$ref': ref}] - if 'title' not in original_schema: - original_schema['title'] = cls.__name__ - # BaseModel; don't use cls.__doc__ as it will contain the verbose class signature by default - docstring = None if cls is BaseModel else cls.__doc__ - if docstring and 'description' not in original_schema: - original_schema['description'] = inspect.cleandoc(docstring) - return json_schema - - -JsonEncoders = Dict[Type[Any], JsonEncoder] - - -def _add_custom_serialization_from_json_encoders( - json_encoders: JsonEncoders | None, tp: Any, schema: CoreSchema -) -> CoreSchema: - """Iterate over the json_encoders and add the first matching encoder to the schema. - - Args: - json_encoders: A dictionary of types and their encoder functions. - tp: The type to check for a matching encoder. - schema: The schema to add the encoder to. - """ - if not json_encoders: - return schema - if 'serialization' in schema: - return schema - # Check the class type and its superclasses for a matching encoder - # Decimal.__class__.__mro__ (and probably other cases) doesn't include Decimal itself - # if the type is a GenericAlias (e.g. from list[int]) we need to use __class__ instead of .__mro__ - for base in (tp, *getattr(tp, '__mro__', tp.__class__.__mro__)[:-1]): - encoder = json_encoders.get(base) - if encoder is None: - continue - - warnings.warn( - f'`json_encoders` is deprecated. See https://docs.pydantic.dev/{version_short()}/concepts/serialization/#custom-serializers for alternatives', - PydanticDeprecatedSince20, - ) - - # TODO: in theory we should check that the schema accepts a serialization key - schema['serialization'] = core_schema.plain_serializer_function_ser_schema(encoder, when_used='json') - return schema - - return schema - - -TypesNamespace = Union[Dict[str, Any], None] - - -class TypesNamespaceStack: - """A stack of types namespaces.""" - - def __init__(self, types_namespace: TypesNamespace): - self._types_namespace_stack: list[TypesNamespace] = [types_namespace] - - @property - def tail(self) -> TypesNamespace: - return self._types_namespace_stack[-1] - - @contextmanager - def push(self, for_type: type[Any]): - types_namespace = {**_typing_extra.get_cls_types_namespace(for_type), **(self.tail or {})} - self._types_namespace_stack.append(types_namespace) - try: - yield - finally: - self._types_namespace_stack.pop() - - -class GenerateSchema: - """Generate core schema for a Pydantic model, dataclass and types like `str`, `datetime`, ... .""" - - __slots__ = ( - '_config_wrapper_stack', - '_types_namespace_stack', - '_typevars_map', - '_has_invalid_schema', - 'field_name_stack', - 'defs', - ) - - def __init__( - self, - config_wrapper: ConfigWrapper, - types_namespace: dict[str, Any] | None, - typevars_map: dict[Any, Any] | None = None, - ) -> None: - # we need a stack for recursing into child models - self._config_wrapper_stack = ConfigWrapperStack(config_wrapper) - self._types_namespace_stack = TypesNamespaceStack(types_namespace) - self._typevars_map = typevars_map - self._has_invalid_schema = False - self.field_name_stack = _FieldNameStack() - self.defs = _Definitions() - - @classmethod - def __from_parent( - cls, - config_wrapper_stack: ConfigWrapperStack, - types_namespace_stack: TypesNamespaceStack, - typevars_map: dict[Any, Any] | None, - defs: _Definitions, - ) -> GenerateSchema: - obj = cls.__new__(cls) - obj._config_wrapper_stack = config_wrapper_stack - obj._types_namespace_stack = types_namespace_stack - obj._typevars_map = typevars_map - obj._has_invalid_schema = False - obj.field_name_stack = _FieldNameStack() - obj.defs = defs - return obj - - @property - def _config_wrapper(self) -> ConfigWrapper: - return self._config_wrapper_stack.tail - - @property - def _types_namespace(self) -> dict[str, Any] | None: - return self._types_namespace_stack.tail - - @property - def _current_generate_schema(self) -> GenerateSchema: - cls = self._config_wrapper.schema_generator or GenerateSchema - return cls.__from_parent( - self._config_wrapper_stack, - self._types_namespace_stack, - self._typevars_map, - self.defs, - ) - - @property - def _arbitrary_types(self) -> bool: - return self._config_wrapper.arbitrary_types_allowed - - def str_schema(self) -> CoreSchema: - """Generate a CoreSchema for `str`""" - return core_schema.str_schema() - - # the following methods can be overridden but should be considered - # unstable / private APIs - def _list_schema(self, tp: Any, items_type: Any) -> CoreSchema: - return core_schema.list_schema(self.generate_schema(items_type)) - - def _dict_schema(self, tp: Any, keys_type: Any, values_type: Any) -> CoreSchema: - return core_schema.dict_schema(self.generate_schema(keys_type), self.generate_schema(values_type)) - - def _set_schema(self, tp: Any, items_type: Any) -> CoreSchema: - return core_schema.set_schema(self.generate_schema(items_type)) - - def _frozenset_schema(self, tp: Any, items_type: Any) -> CoreSchema: - return core_schema.frozenset_schema(self.generate_schema(items_type)) - - def _arbitrary_type_schema(self, tp: Any) -> CoreSchema: - if not isinstance(tp, type): - warn( - f'{tp!r} is not a Python type (it may be an instance of an object),' - ' Pydantic will allow any object with no validation since we cannot even' - ' enforce that the input is an instance of the given type.' - ' To get rid of this error wrap the type with `pydantic.SkipValidation`.', - UserWarning, - ) - return core_schema.any_schema() - return core_schema.is_instance_schema(tp) - - def _unknown_type_schema(self, obj: Any) -> CoreSchema: - raise PydanticSchemaGenerationError( - f'Unable to generate pydantic-core schema for {obj!r}. ' - 'Set `arbitrary_types_allowed=True` in the model_config to ignore this error' - ' or implement `__get_pydantic_core_schema__` on your type to fully support it.' - '\n\nIf you got this error by calling handler() within' - ' `__get_pydantic_core_schema__` then you likely need to call' - ' `handler.generate_schema()` since we do not call' - ' `__get_pydantic_core_schema__` on `` otherwise to avoid infinite recursion.' - ) - - def _apply_discriminator_to_union( - self, schema: CoreSchema, discriminator: str | Discriminator | None - ) -> CoreSchema: - if discriminator is None: - return schema - try: - return _discriminated_union.apply_discriminator( - schema, - discriminator, - ) - except _discriminated_union.MissingDefinitionForUnionRef: - # defer until defs are resolved - _discriminated_union.set_discriminator_in_metadata( - schema, - discriminator, - ) - return schema - - class CollectedInvalid(Exception): - pass - - def clean_schema(self, schema: CoreSchema) -> CoreSchema: - schema = self.collect_definitions(schema) - schema = simplify_schema_references(schema) - schema = _discriminated_union.apply_discriminators(schema) - if collect_invalid_schemas(schema): - raise self.CollectedInvalid() - schema = validate_core_schema(schema) - return schema - - def collect_definitions(self, schema: CoreSchema) -> CoreSchema: - ref = cast('str | None', schema.get('ref', None)) - if ref: - self.defs.definitions[ref] = schema - if 'ref' in schema: - schema = core_schema.definition_reference_schema(schema['ref']) - return core_schema.definitions_schema( - schema, - list(self.defs.definitions.values()), - ) - - def _add_js_function(self, metadata_schema: CoreSchema, js_function: Callable[..., Any]) -> None: - metadata = CoreMetadataHandler(metadata_schema).metadata - pydantic_js_functions = metadata.setdefault('pydantic_js_functions', []) - # because of how we generate core schemas for nested generic models - # we can end up adding `BaseModel.__get_pydantic_json_schema__` multiple times - # this check may fail to catch duplicates if the function is a `functools.partial` - # or something like that - # but if it does it'll fail by inserting the duplicate - if js_function not in pydantic_js_functions: - pydantic_js_functions.append(js_function) - - def generate_schema( - self, - obj: Any, - from_dunder_get_core_schema: bool = True, - ) -> core_schema.CoreSchema: - """Generate core schema. - - Args: - obj: The object to generate core schema for. - from_dunder_get_core_schema: Whether to generate schema from either the - `__get_pydantic_core_schema__` function or `__pydantic_core_schema__` property. - - Returns: - The generated core schema. - - Raises: - PydanticUndefinedAnnotation: - If it is not possible to evaluate forward reference. - PydanticSchemaGenerationError: - If it is not possible to generate pydantic-core schema. - TypeError: - - If `alias_generator` returns a disallowed type (must be str, AliasPath or AliasChoices). - - If V1 style validator with `each_item=True` applied on a wrong field. - PydanticUserError: - - If `typing.TypedDict` is used instead of `typing_extensions.TypedDict` on Python < 3.12. - - If `__modify_schema__` method is used instead of `__get_pydantic_json_schema__`. - """ - schema: CoreSchema | None = None - - if from_dunder_get_core_schema: - from_property = self._generate_schema_from_property(obj, obj) - if from_property is not None: - schema = from_property - - if schema is None: - schema = self._generate_schema(obj) - - metadata_js_function = _extract_get_pydantic_json_schema(obj, schema) - if metadata_js_function is not None: - metadata_schema = resolve_original_schema(schema, self.defs.definitions) - if metadata_schema: - self._add_js_function(metadata_schema, metadata_js_function) - - schema = _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, obj, schema) - - schema = self._post_process_generated_schema(schema) - - return schema - - def _model_schema(self, cls: type[BaseModel]) -> core_schema.CoreSchema: - """Generate schema for a Pydantic model.""" - with self.defs.get_schema_or_ref(cls) as (model_ref, maybe_schema): - if maybe_schema is not None: - return maybe_schema - - fields = cls.model_fields - decorators = cls.__pydantic_decorators__ - computed_fields = decorators.computed_fields - check_decorator_fields_exist( - chain( - decorators.field_validators.values(), - decorators.field_serializers.values(), - decorators.validators.values(), - ), - {*fields.keys(), *computed_fields.keys()}, - ) - config_wrapper = ConfigWrapper(cls.model_config, check=False) - core_config = config_wrapper.core_config(cls) - metadata = build_metadata_dict(js_functions=[partial(modify_model_json_schema, cls=cls)]) - - model_validators = decorators.model_validators.values() - - extras_schema = None - if core_config.get('extra_fields_behavior') == 'allow': - for tp in (cls, *cls.__mro__): - extras_annotation = cls.__annotations__.get('__pydantic_extra__', None) - if extras_annotation is not None: - tp = get_origin(extras_annotation) - if tp not in (Dict, dict): - raise PydanticSchemaGenerationError( - 'The type annotation for `__pydantic_extra__` must be `Dict[str, ...]`' - ) - extra_items_type = self._get_args_resolving_forward_refs( - cls.__annotations__['__pydantic_extra__'], - required=True, - )[1] - if extra_items_type is not Any: - extras_schema = self.generate_schema(extra_items_type) - break - - with self._config_wrapper_stack.push(config_wrapper), self._types_namespace_stack.push(cls): - self = self._current_generate_schema - if cls.__pydantic_root_model__: - root_field = self._common_field_schema('root', fields['root'], decorators) - inner_schema = root_field['schema'] - inner_schema = apply_model_validators(inner_schema, model_validators, 'inner') - model_schema = core_schema.model_schema( - cls, - inner_schema, - custom_init=getattr(cls, '__pydantic_custom_init__', None), - root_model=True, - post_init=getattr(cls, '__pydantic_post_init__', None), - config=core_config, - ref=model_ref, - metadata=metadata, - ) - else: - fields_schema: core_schema.CoreSchema = core_schema.model_fields_schema( - {k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()}, - computed_fields=[ - self._computed_field_schema(d, decorators.field_serializers) - for d in computed_fields.values() - ], - extras_schema=extras_schema, - model_name=cls.__name__, - ) - inner_schema = apply_validators(fields_schema, decorators.root_validators.values(), None) - new_inner_schema = define_expected_missing_refs(inner_schema, recursively_defined_type_refs()) - if new_inner_schema is not None: - inner_schema = new_inner_schema - inner_schema = apply_model_validators(inner_schema, model_validators, 'inner') - - model_schema = core_schema.model_schema( - cls, - inner_schema, - custom_init=getattr(cls, '__pydantic_custom_init__', None), - root_model=False, - post_init=getattr(cls, '__pydantic_post_init__', None), - config=core_config, - ref=model_ref, - metadata=metadata, - ) - - schema = self._apply_model_serializers(model_schema, decorators.model_serializers.values()) - schema = apply_model_validators(schema, model_validators, 'outer') - self.defs.definitions[model_ref] = self._post_process_generated_schema(schema) - return core_schema.definition_reference_schema(model_ref) - - def _unpack_refs_defs(self, schema: CoreSchema) -> CoreSchema: - """Unpack all 'definitions' schemas into `GenerateSchema.defs.definitions` - and return the inner schema. - """ - - def get_ref(s: CoreSchema) -> str: - return s['ref'] # type: ignore - - if schema['type'] == 'definitions': - self.defs.definitions.update({get_ref(s): s for s in schema['definitions']}) - schema = schema['schema'] - return schema - - def _generate_schema_from_property(self, obj: Any, source: Any) -> core_schema.CoreSchema | None: - """Try to generate schema from either the `__get_pydantic_core_schema__` function or - `__pydantic_core_schema__` property. - - Note: `__get_pydantic_core_schema__` takes priority so it can - decide whether to use a `__pydantic_core_schema__` attribute, or generate a fresh schema. - """ - # avoid calling `__get_pydantic_core_schema__` if we've already visited this object - with self.defs.get_schema_or_ref(obj) as (_, maybe_schema): - if maybe_schema is not None: - return maybe_schema - if obj is source: - ref_mode = 'unpack' - else: - ref_mode = 'to-def' - - schema: CoreSchema - get_schema = getattr(obj, '__get_pydantic_core_schema__', None) - if get_schema is None: - validators = getattr(obj, '__get_validators__', None) - if validators is None: - return None - warn( - '`__get_validators__` is deprecated and will be removed, use `__get_pydantic_core_schema__` instead.', - PydanticDeprecatedSince20, - ) - schema = core_schema.chain_schema([core_schema.with_info_plain_validator_function(v) for v in validators()]) - else: - if len(inspect.signature(get_schema).parameters) == 1: - # (source) -> CoreSchema - schema = get_schema(source) - else: - schema = get_schema( - source, CallbackGetCoreSchemaHandler(self._generate_schema, self, ref_mode=ref_mode) - ) - - schema = self._unpack_refs_defs(schema) - - if is_function_with_inner_schema(schema): - ref = schema['schema'].pop('ref', None) # pyright: ignore[reportGeneralTypeIssues] - if ref: - schema['ref'] = ref - else: - ref = get_ref(schema) - - if ref: - self.defs.definitions[ref] = self._post_process_generated_schema(schema) - return core_schema.definition_reference_schema(ref) - - schema = self._post_process_generated_schema(schema) - - return schema - - def _resolve_forward_ref(self, obj: Any) -> Any: - # we assume that types_namespace has the target of forward references in its scope, - # but this could fail, for example, if calling Validator on an imported type which contains - # forward references to other types only defined in the module from which it was imported - # `Validator(SomeImportedTypeAliasWithAForwardReference)` - # or the equivalent for BaseModel - # class Model(BaseModel): - # x: SomeImportedTypeAliasWithAForwardReference - try: - obj = _typing_extra.eval_type_backport(obj, globalns=self._types_namespace) - except NameError as e: - raise PydanticUndefinedAnnotation.from_name_error(e) from e - - # if obj is still a ForwardRef, it means we can't evaluate it, raise PydanticUndefinedAnnotation - if isinstance(obj, ForwardRef): - raise PydanticUndefinedAnnotation(obj.__forward_arg__, f'Unable to evaluate forward reference {obj}') - - if self._typevars_map: - obj = replace_types(obj, self._typevars_map) - - return obj - - @overload - def _get_args_resolving_forward_refs(self, obj: Any, required: Literal[True]) -> tuple[Any, ...]: - ... - - @overload - def _get_args_resolving_forward_refs(self, obj: Any) -> tuple[Any, ...] | None: - ... - - def _get_args_resolving_forward_refs(self, obj: Any, required: bool = False) -> tuple[Any, ...] | None: - args = get_args(obj) - if args: - args = tuple([self._resolve_forward_ref(a) if isinstance(a, ForwardRef) else a for a in args]) - elif required: # pragma: no cover - raise TypeError(f'Expected {obj} to have generic parameters but it had none') - return args - - def _get_first_arg_or_any(self, obj: Any) -> Any: - args = self._get_args_resolving_forward_refs(obj) - if not args: - return Any - return args[0] - - def _get_first_two_args_or_any(self, obj: Any) -> tuple[Any, Any]: - args = self._get_args_resolving_forward_refs(obj) - if not args: - return (Any, Any) - if len(args) < 2: - origin = get_origin(obj) - raise TypeError(f'Expected two type arguments for {origin}, got 1') - return args[0], args[1] - - def _post_process_generated_schema(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema: - if 'metadata' not in schema: - schema['metadata'] = {} - return schema - - def _generate_schema(self, obj: Any) -> core_schema.CoreSchema: - """Recursively generate a pydantic-core schema for any supported python type.""" - has_invalid_schema = self._has_invalid_schema - self._has_invalid_schema = False - schema = self._generate_schema_inner(obj) - self._has_invalid_schema = self._has_invalid_schema or has_invalid_schema - return schema - - def _generate_schema_inner(self, obj: Any) -> core_schema.CoreSchema: - if isinstance(obj, _AnnotatedType): - return self._annotated_schema(obj) - - if isinstance(obj, dict): - # we assume this is already a valid schema - return obj # type: ignore[return-value] - - if isinstance(obj, str): - obj = ForwardRef(obj) - - if isinstance(obj, ForwardRef): - return self.generate_schema(self._resolve_forward_ref(obj)) - - from ..main import BaseModel - - if lenient_issubclass(obj, BaseModel): - return self._model_schema(obj) - - if isinstance(obj, PydanticRecursiveRef): - return core_schema.definition_reference_schema(schema_ref=obj.type_ref) - - return self.match_type(obj) - - def match_type(self, obj: Any) -> core_schema.CoreSchema: # noqa: C901 - """Main mapping of types to schemas. - - The general structure is a series of if statements starting with the simple cases - (non-generic primitive types) and then handling generics and other more complex cases. - - Each case either generates a schema directly, calls into a public user-overridable method - (like `GenerateSchema.tuple_variable_schema`) or calls into a private method that handles some - boilerplate before calling into the user-facing method (e.g. `GenerateSchema._tuple_schema`). - - The idea is that we'll evolve this into adding more and more user facing methods over time - as they get requested and we figure out what the right API for them is. - """ - if obj is str: - return self.str_schema() - elif obj is bytes: - return core_schema.bytes_schema() - elif obj is int: - return core_schema.int_schema() - elif obj is float: - return core_schema.float_schema() - elif obj is bool: - return core_schema.bool_schema() - elif obj is Any or obj is object: - return core_schema.any_schema() - elif obj is None or obj is _typing_extra.NoneType: - return core_schema.none_schema() - elif obj in TUPLE_TYPES: - return self._tuple_schema(obj) - elif obj in LIST_TYPES: - return self._list_schema(obj, self._get_first_arg_or_any(obj)) - elif obj in SET_TYPES: - return self._set_schema(obj, self._get_first_arg_or_any(obj)) - elif obj in FROZEN_SET_TYPES: - return self._frozenset_schema(obj, self._get_first_arg_or_any(obj)) - elif obj in DICT_TYPES: - return self._dict_schema(obj, *self._get_first_two_args_or_any(obj)) - elif isinstance(obj, TypeAliasType): - return self._type_alias_type_schema(obj) - elif obj == type: - return self._type_schema() - elif _typing_extra.is_callable_type(obj): - return core_schema.callable_schema() - elif _typing_extra.is_literal_type(obj): - return self._literal_schema(obj) - elif is_typeddict(obj): - return self._typed_dict_schema(obj, None) - elif _typing_extra.is_namedtuple(obj): - return self._namedtuple_schema(obj, None) - elif _typing_extra.is_new_type(obj): - # NewType, can't use isinstance because it fails <3.10 - return self.generate_schema(obj.__supertype__) - elif obj == re.Pattern: - return self._pattern_schema(obj) - elif obj is collections.abc.Hashable or obj is typing.Hashable: - return self._hashable_schema() - elif isinstance(obj, typing.TypeVar): - return self._unsubstituted_typevar_schema(obj) - elif is_finalvar(obj): - if obj is Final: - return core_schema.any_schema() - return self.generate_schema( - self._get_first_arg_or_any(obj), - ) - elif isinstance(obj, (FunctionType, LambdaType, MethodType, partial)): - return self._callable_schema(obj) - elif inspect.isclass(obj) and issubclass(obj, Enum): - from ._std_types_schema import get_enum_core_schema - - return get_enum_core_schema(obj, self._config_wrapper.config_dict) - - if _typing_extra.is_dataclass(obj): - return self._dataclass_schema(obj, None) - - res = self._get_prepare_pydantic_annotations_for_known_type(obj, ()) - if res is not None: - source_type, annotations = res - return self._apply_annotations(source_type, annotations) - - origin = get_origin(obj) - if origin is not None: - return self._match_generic_type(obj, origin) - - if self._arbitrary_types: - return self._arbitrary_type_schema(obj) - return self._unknown_type_schema(obj) - - def _match_generic_type(self, obj: Any, origin: Any) -> CoreSchema: # noqa: C901 - if isinstance(origin, TypeAliasType): - return self._type_alias_type_schema(obj) - - # Need to handle generic dataclasses before looking for the schema properties because attribute accesses - # on _GenericAlias delegate to the origin type, so lose the information about the concrete parametrization - # As a result, currently, there is no way to cache the schema for generic dataclasses. This may be possible - # to resolve by modifying the value returned by `Generic.__class_getitem__`, but that is a dangerous game. - if _typing_extra.is_dataclass(origin): - return self._dataclass_schema(obj, origin) - if _typing_extra.is_namedtuple(origin): - return self._namedtuple_schema(obj, origin) - - from_property = self._generate_schema_from_property(origin, obj) - if from_property is not None: - return from_property - - if _typing_extra.origin_is_union(origin): - return self._union_schema(obj) - elif origin in TUPLE_TYPES: - return self._tuple_schema(obj) - elif origin in LIST_TYPES: - return self._list_schema(obj, self._get_first_arg_or_any(obj)) - elif origin in SET_TYPES: - return self._set_schema(obj, self._get_first_arg_or_any(obj)) - elif origin in FROZEN_SET_TYPES: - return self._frozenset_schema(obj, self._get_first_arg_or_any(obj)) - elif origin in DICT_TYPES: - return self._dict_schema(obj, *self._get_first_two_args_or_any(obj)) - elif is_typeddict(origin): - return self._typed_dict_schema(obj, origin) - elif origin in (typing.Type, type): - return self._subclass_schema(obj) - elif origin in {typing.Sequence, collections.abc.Sequence}: - return self._sequence_schema(obj) - elif origin in {typing.Iterable, collections.abc.Iterable, typing.Generator, collections.abc.Generator}: - return self._iterable_schema(obj) - elif origin in (re.Pattern, typing.Pattern): - return self._pattern_schema(obj) - - if self._arbitrary_types: - return self._arbitrary_type_schema(origin) - return self._unknown_type_schema(obj) - - def _generate_td_field_schema( - self, - name: str, - field_info: FieldInfo, - decorators: DecoratorInfos, - *, - required: bool = True, - ) -> core_schema.TypedDictField: - """Prepare a TypedDictField to represent a model or typeddict field.""" - common_field = self._common_field_schema(name, field_info, decorators) - return core_schema.typed_dict_field( - common_field['schema'], - required=False if not field_info.is_required() else required, - serialization_exclude=common_field['serialization_exclude'], - validation_alias=common_field['validation_alias'], - serialization_alias=common_field['serialization_alias'], - metadata=common_field['metadata'], - ) - - def _generate_md_field_schema( - self, - name: str, - field_info: FieldInfo, - decorators: DecoratorInfos, - ) -> core_schema.ModelField: - """Prepare a ModelField to represent a model field.""" - common_field = self._common_field_schema(name, field_info, decorators) - return core_schema.model_field( - common_field['schema'], - serialization_exclude=common_field['serialization_exclude'], - validation_alias=common_field['validation_alias'], - serialization_alias=common_field['serialization_alias'], - frozen=common_field['frozen'], - metadata=common_field['metadata'], - ) - - def _generate_dc_field_schema( - self, - name: str, - field_info: FieldInfo, - decorators: DecoratorInfos, - ) -> core_schema.DataclassField: - """Prepare a DataclassField to represent the parameter/field, of a dataclass.""" - common_field = self._common_field_schema(name, field_info, decorators) - return core_schema.dataclass_field( - name, - common_field['schema'], - init=field_info.init, - init_only=field_info.init_var or None, - kw_only=None if field_info.kw_only else False, - serialization_exclude=common_field['serialization_exclude'], - validation_alias=common_field['validation_alias'], - serialization_alias=common_field['serialization_alias'], - frozen=common_field['frozen'], - metadata=common_field['metadata'], - ) - - @staticmethod - def _apply_alias_generator_to_field_info( - alias_generator: Callable[[str], str] | AliasGenerator, field_info: FieldInfo, field_name: str - ) -> None: - """Apply an alias_generator to aliases on a FieldInfo instance if appropriate. - - Args: - alias_generator: A callable that takes a string and returns a string, or an AliasGenerator instance. - field_info: The FieldInfo instance to which the alias_generator is (maybe) applied. - field_name: The name of the field from which to generate the alias. - """ - # Apply an alias_generator if - # 1. An alias is not specified - # 2. An alias is specified, but the priority is <= 1 - if ( - field_info.alias_priority is None - or field_info.alias_priority <= 1 - or field_info.alias is None - or field_info.validation_alias is None - or field_info.serialization_alias is None - ): - alias, validation_alias, serialization_alias = None, None, None - - if isinstance(alias_generator, AliasGenerator): - alias, validation_alias, serialization_alias = alias_generator.generate_aliases(field_name) - elif isinstance(alias_generator, Callable): - alias = alias_generator(field_name) - if not isinstance(alias, str): - raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}') - - # if priority is not set, we set to 1 - # which supports the case where the alias_generator from a child class is used - # to generate an alias for a field in a parent class - if field_info.alias_priority is None or field_info.alias_priority <= 1: - field_info.alias_priority = 1 - - # if the priority is 1, then we set the aliases to the generated alias - if field_info.alias_priority == 1: - field_info.serialization_alias = serialization_alias or alias - field_info.validation_alias = validation_alias or alias - field_info.alias = alias - - # if any of the aliases are not set, then we set them to the corresponding generated alias - if field_info.alias is None: - field_info.alias = alias - if field_info.serialization_alias is None: - field_info.serialization_alias = serialization_alias or alias - if field_info.validation_alias is None: - field_info.validation_alias = validation_alias or alias - - @staticmethod - def _apply_alias_generator_to_computed_field_info( - alias_generator: Callable[[str], str] | AliasGenerator, - computed_field_info: ComputedFieldInfo, - computed_field_name: str, - ): - """Apply an alias_generator to alias on a ComputedFieldInfo instance if appropriate. - - Args: - alias_generator: A callable that takes a string and returns a string, or an AliasGenerator instance. - computed_field_info: The ComputedFieldInfo instance to which the alias_generator is (maybe) applied. - computed_field_name: The name of the computed field from which to generate the alias. - """ - # Apply an alias_generator if - # 1. An alias is not specified - # 2. An alias is specified, but the priority is <= 1 - - if ( - computed_field_info.alias_priority is None - or computed_field_info.alias_priority <= 1 - or computed_field_info.alias is None - ): - alias, validation_alias, serialization_alias = None, None, None - - if isinstance(alias_generator, AliasGenerator): - alias, validation_alias, serialization_alias = alias_generator.generate_aliases(computed_field_name) - elif isinstance(alias_generator, Callable): - alias = alias_generator(computed_field_name) - if not isinstance(alias, str): - raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}') - - # if priority is not set, we set to 1 - # which supports the case where the alias_generator from a child class is used - # to generate an alias for a field in a parent class - if computed_field_info.alias_priority is None or computed_field_info.alias_priority <= 1: - computed_field_info.alias_priority = 1 - - # if the priority is 1, then we set the aliases to the generated alias - # note that we use the serialization_alias with priority over alias, as computed_field - # aliases are used for serialization only (not validation) - if computed_field_info.alias_priority == 1: - computed_field_info.alias = serialization_alias or alias - - def _common_field_schema( # C901 - self, name: str, field_info: FieldInfo, decorators: DecoratorInfos - ) -> _CommonField: - # Update FieldInfo annotation if appropriate: - from .. import AliasChoices, AliasPath - from ..fields import FieldInfo - - if has_instance_in_type(field_info.annotation, (ForwardRef, str)): - types_namespace = self._types_namespace - if self._typevars_map: - types_namespace = (types_namespace or {}).copy() - # Ensure that typevars get mapped to their concrete types: - types_namespace.update({k.__name__: v for k, v in self._typevars_map.items()}) - - evaluated = _typing_extra.eval_type_lenient(field_info.annotation, types_namespace) - if evaluated is not field_info.annotation and not has_instance_in_type(evaluated, PydanticRecursiveRef): - new_field_info = FieldInfo.from_annotation(evaluated) - field_info.annotation = new_field_info.annotation - - # Handle any field info attributes that may have been obtained from now-resolved annotations - for k, v in new_field_info._attributes_set.items(): - # If an attribute is already set, it means it was set by assigning to a call to Field (or just a - # default value), and that should take the highest priority. So don't overwrite existing attributes. - # We skip over "attributes" that are present in the metadata_lookup dict because these won't - # actually end up as attributes of the `FieldInfo` instance. - if k not in field_info._attributes_set and k not in field_info.metadata_lookup: - setattr(field_info, k, v) - - # Finally, ensure the field info also reflects all the `_attributes_set` that are actually metadata. - field_info.metadata = [*new_field_info.metadata, *field_info.metadata] - - source_type, annotations = field_info.annotation, field_info.metadata - - def set_discriminator(schema: CoreSchema) -> CoreSchema: - schema = self._apply_discriminator_to_union(schema, field_info.discriminator) - return schema - - with self.field_name_stack.push(name): - if field_info.discriminator is not None: - schema = self._apply_annotations(source_type, annotations, transform_inner_schema=set_discriminator) - else: - schema = self._apply_annotations( - source_type, - annotations, - ) - - # This V1 compatibility shim should eventually be removed - # push down any `each_item=True` validators - # note that this won't work for any Annotated types that get wrapped by a function validator - # but that's okay because that didn't exist in V1 - this_field_validators = filter_field_decorator_info_by_field(decorators.validators.values(), name) - if _validators_require_validate_default(this_field_validators): - field_info.validate_default = True - each_item_validators = [v for v in this_field_validators if v.info.each_item is True] - this_field_validators = [v for v in this_field_validators if v not in each_item_validators] - schema = apply_each_item_validators(schema, each_item_validators, name) - - schema = apply_validators(schema, filter_field_decorator_info_by_field(this_field_validators, name), name) - schema = apply_validators( - schema, filter_field_decorator_info_by_field(decorators.field_validators.values(), name), name - ) - - # the default validator needs to go outside of any other validators - # so that it is the topmost validator for the field validator - # which uses it to check if the field has a default value or not - if not field_info.is_required(): - schema = wrap_default(field_info, schema) - - schema = self._apply_field_serializers( - schema, filter_field_decorator_info_by_field(decorators.field_serializers.values(), name) - ) - json_schema_updates = { - 'title': field_info.title, - 'description': field_info.description, - 'examples': to_jsonable_python(field_info.examples), - } - json_schema_updates = {k: v for k, v in json_schema_updates.items() if v is not None} - - json_schema_extra = field_info.json_schema_extra - - metadata = build_metadata_dict( - js_annotation_functions=[get_json_schema_update_func(json_schema_updates, json_schema_extra)] - ) - - alias_generator = self._config_wrapper.alias_generator - if alias_generator is not None: - self._apply_alias_generator_to_field_info(alias_generator, field_info, name) - - if isinstance(field_info.validation_alias, (AliasChoices, AliasPath)): - validation_alias = field_info.validation_alias.convert_to_aliases() - else: - validation_alias = field_info.validation_alias - - return _common_field( - schema, - serialization_exclude=True if field_info.exclude else None, - validation_alias=validation_alias, - serialization_alias=field_info.serialization_alias, - frozen=field_info.frozen, - metadata=metadata, - ) - - def _union_schema(self, union_type: Any) -> core_schema.CoreSchema: - """Generate schema for a Union.""" - args = self._get_args_resolving_forward_refs(union_type, required=True) - choices: list[CoreSchema] = [] - nullable = False - for arg in args: - if arg is None or arg is _typing_extra.NoneType: - nullable = True - else: - choices.append(self.generate_schema(arg)) - - if len(choices) == 1: - s = choices[0] - else: - choices_with_tags: list[CoreSchema | tuple[CoreSchema, str]] = [] - for choice in choices: - metadata = choice.get('metadata') - if isinstance(metadata, dict): - tag = metadata.get(_core_utils.TAGGED_UNION_TAG_KEY) - if tag is not None: - choices_with_tags.append((choice, tag)) - else: - choices_with_tags.append(choice) - s = core_schema.union_schema(choices_with_tags) - - if nullable: - s = core_schema.nullable_schema(s) - return s - - def _type_alias_type_schema( - self, - obj: Any, # TypeAliasType - ) -> CoreSchema: - with self.defs.get_schema_or_ref(obj) as (ref, maybe_schema): - if maybe_schema is not None: - return maybe_schema - - origin = get_origin(obj) or obj - - annotation = origin.__value__ - typevars_map = get_standard_typevars_map(obj) - - with self._types_namespace_stack.push(origin): - annotation = _typing_extra.eval_type_lenient(annotation, self._types_namespace) - annotation = replace_types(annotation, typevars_map) - schema = self.generate_schema(annotation) - assert schema['type'] != 'definitions' - schema['ref'] = ref # type: ignore - self.defs.definitions[ref] = schema - return core_schema.definition_reference_schema(ref) - - def _literal_schema(self, literal_type: Any) -> CoreSchema: - """Generate schema for a Literal.""" - expected = _typing_extra.all_literal_values(literal_type) - assert expected, f'literal "expected" cannot be empty, obj={literal_type}' - return core_schema.literal_schema(expected) - - def _typed_dict_schema(self, typed_dict_cls: Any, origin: Any) -> core_schema.CoreSchema: - """Generate schema for a TypedDict. - - It is not possible to track required/optional keys in TypedDict without __required_keys__ - since TypedDict.__new__ erases the base classes (it replaces them with just `dict`) - and thus we can track usage of total=True/False - __required_keys__ was added in Python 3.9 - (https://github.com/miss-islington/cpython/blob/1e9939657dd1f8eb9f596f77c1084d2d351172fc/Doc/library/typing.rst?plain=1#L1546-L1548) - however it is buggy - (https://github.com/python/typing_extensions/blob/ac52ac5f2cb0e00e7988bae1e2a1b8257ac88d6d/src/typing_extensions.py#L657-L666). - - On 3.11 but < 3.12 TypedDict does not preserve inheritance information. - - Hence to avoid creating validators that do not do what users expect we only - support typing.TypedDict on Python >= 3.12 or typing_extension.TypedDict on all versions - """ - from ..fields import FieldInfo - - with self.defs.get_schema_or_ref(typed_dict_cls) as (typed_dict_ref, maybe_schema): - if maybe_schema is not None: - return maybe_schema - - typevars_map = get_standard_typevars_map(typed_dict_cls) - if origin is not None: - typed_dict_cls = origin - - if not _SUPPORTS_TYPEDDICT and type(typed_dict_cls).__module__ == 'typing': - raise PydanticUserError( - 'Please use `typing_extensions.TypedDict` instead of `typing.TypedDict` on Python < 3.12.', - code='typed-dict-version', - ) - - try: - config: ConfigDict | None = get_attribute_from_bases(typed_dict_cls, '__pydantic_config__') - except AttributeError: - config = None - - with self._config_wrapper_stack.push(config), self._types_namespace_stack.push(typed_dict_cls): - core_config = self._config_wrapper.core_config(typed_dict_cls) - - self = self._current_generate_schema - - required_keys: frozenset[str] = typed_dict_cls.__required_keys__ - - fields: dict[str, core_schema.TypedDictField] = {} - - decorators = DecoratorInfos.build(typed_dict_cls) - - for field_name, annotation in get_type_hints_infer_globalns( - typed_dict_cls, localns=self._types_namespace, include_extras=True - ).items(): - annotation = replace_types(annotation, typevars_map) - required = field_name in required_keys - - if get_origin(annotation) == _typing_extra.Required: - required = True - annotation = self._get_args_resolving_forward_refs( - annotation, - required=True, - )[0] - elif get_origin(annotation) == _typing_extra.NotRequired: - required = False - annotation = self._get_args_resolving_forward_refs( - annotation, - required=True, - )[0] - - field_info = FieldInfo.from_annotation(annotation) - fields[field_name] = self._generate_td_field_schema( - field_name, field_info, decorators, required=required - ) - - metadata = build_metadata_dict( - js_functions=[partial(modify_model_json_schema, cls=typed_dict_cls)], typed_dict_cls=typed_dict_cls - ) - - td_schema = core_schema.typed_dict_schema( - fields, - computed_fields=[ - self._computed_field_schema(d, decorators.field_serializers) - for d in decorators.computed_fields.values() - ], - ref=typed_dict_ref, - metadata=metadata, - config=core_config, - ) - - schema = self._apply_model_serializers(td_schema, decorators.model_serializers.values()) - schema = apply_model_validators(schema, decorators.model_validators.values(), 'all') - self.defs.definitions[typed_dict_ref] = self._post_process_generated_schema(schema) - return core_schema.definition_reference_schema(typed_dict_ref) - - def _namedtuple_schema(self, namedtuple_cls: Any, origin: Any) -> core_schema.CoreSchema: - """Generate schema for a NamedTuple.""" - with self.defs.get_schema_or_ref(namedtuple_cls) as (namedtuple_ref, maybe_schema): - if maybe_schema is not None: - return maybe_schema - typevars_map = get_standard_typevars_map(namedtuple_cls) - if origin is not None: - namedtuple_cls = origin - - annotations: dict[str, Any] = get_type_hints_infer_globalns( - namedtuple_cls, include_extras=True, localns=self._types_namespace - ) - if not annotations: - # annotations is empty, happens if namedtuple_cls defined via collections.namedtuple(...) - annotations = {k: Any for k in namedtuple_cls._fields} - - if typevars_map: - annotations = { - field_name: replace_types(annotation, typevars_map) - for field_name, annotation in annotations.items() - } - - arguments_schema = core_schema.arguments_schema( - [ - self._generate_parameter_schema( - field_name, annotation, default=namedtuple_cls._field_defaults.get(field_name, Parameter.empty) - ) - for field_name, annotation in annotations.items() - ], - metadata=build_metadata_dict(js_prefer_positional_arguments=True), - ) - return core_schema.call_schema(arguments_schema, namedtuple_cls, ref=namedtuple_ref) - - def _generate_parameter_schema( - self, - name: str, - annotation: type[Any], - default: Any = Parameter.empty, - mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] | None = None, - ) -> core_schema.ArgumentsParameter: - """Prepare a ArgumentsParameter to represent a field in a namedtuple or function signature.""" - from ..fields import FieldInfo - - if default is Parameter.empty: - field = FieldInfo.from_annotation(annotation) - else: - field = FieldInfo.from_annotated_attribute(annotation, default) - assert field.annotation is not None, 'field.annotation should not be None when generating a schema' - source_type, annotations = field.annotation, field.metadata - with self.field_name_stack.push(name): - schema = self._apply_annotations(source_type, annotations) - - if not field.is_required(): - schema = wrap_default(field, schema) - - parameter_schema = core_schema.arguments_parameter(name, schema) - if mode is not None: - parameter_schema['mode'] = mode - if field.alias is not None: - parameter_schema['alias'] = field.alias - else: - alias_generator = self._config_wrapper.alias_generator - if isinstance(alias_generator, AliasGenerator) and alias_generator.alias is not None: - parameter_schema['alias'] = alias_generator.alias(name) - elif isinstance(alias_generator, Callable): - parameter_schema['alias'] = alias_generator(name) - return parameter_schema - - def _tuple_schema(self, tuple_type: Any) -> core_schema.CoreSchema: - """Generate schema for a Tuple, e.g. `tuple[int, str]` or `tuple[int, ...]`.""" - # TODO: do we really need to resolve type vars here? - typevars_map = get_standard_typevars_map(tuple_type) - params = self._get_args_resolving_forward_refs(tuple_type) - - if typevars_map and params: - params = tuple(replace_types(param, typevars_map) for param in params) - - # NOTE: subtle difference: `tuple[()]` gives `params=()`, whereas `typing.Tuple[()]` gives `params=((),)` - # This is only true for <3.11, on Python 3.11+ `typing.Tuple[()]` gives `params=()` - if not params: - if tuple_type in TUPLE_TYPES: - return core_schema.tuple_schema([core_schema.any_schema()], variadic_item_index=0) - else: - # special case for `tuple[()]` which means `tuple[]` - an empty tuple - return core_schema.tuple_schema([]) - elif params[-1] is Ellipsis: - if len(params) == 2: - return core_schema.tuple_schema([self.generate_schema(params[0])], variadic_item_index=0) - else: - # TODO: something like https://github.com/pydantic/pydantic/issues/5952 - raise ValueError('Variable tuples can only have one type') - elif len(params) == 1 and params[0] == (): - # special case for `Tuple[()]` which means `Tuple[]` - an empty tuple - # NOTE: This conditional can be removed when we drop support for Python 3.10. - return core_schema.tuple_schema([]) - else: - return core_schema.tuple_schema([self.generate_schema(param) for param in params]) - - def _type_schema(self) -> core_schema.CoreSchema: - return core_schema.custom_error_schema( - core_schema.is_instance_schema(type), - custom_error_type='is_type', - custom_error_message='Input should be a type', - ) - - def _union_is_subclass_schema(self, union_type: Any) -> core_schema.CoreSchema: - """Generate schema for `Type[Union[X, ...]]`.""" - args = self._get_args_resolving_forward_refs(union_type, required=True) - return core_schema.union_schema([self.generate_schema(typing.Type[args]) for args in args]) - - def _subclass_schema(self, type_: Any) -> core_schema.CoreSchema: - """Generate schema for a Type, e.g. `Type[int]`.""" - type_param = self._get_first_arg_or_any(type_) - if type_param == Any: - return self._type_schema() - elif isinstance(type_param, typing.TypeVar): - if type_param.__bound__: - if _typing_extra.origin_is_union(get_origin(type_param.__bound__)): - return self._union_is_subclass_schema(type_param.__bound__) - return core_schema.is_subclass_schema(type_param.__bound__) - elif type_param.__constraints__: - return core_schema.union_schema( - [self.generate_schema(typing.Type[c]) for c in type_param.__constraints__] - ) - else: - return self._type_schema() - elif _typing_extra.origin_is_union(get_origin(type_param)): - return self._union_is_subclass_schema(type_param) - else: - return core_schema.is_subclass_schema(type_param) - - def _sequence_schema(self, sequence_type: Any) -> core_schema.CoreSchema: - """Generate schema for a Sequence, e.g. `Sequence[int]`.""" - item_type = self._get_first_arg_or_any(sequence_type) - - list_schema = core_schema.list_schema(self.generate_schema(item_type)) - python_schema = core_schema.is_instance_schema(typing.Sequence, cls_repr='Sequence') - if item_type != Any: - from ._validators import sequence_validator - - python_schema = core_schema.chain_schema( - [python_schema, core_schema.no_info_wrap_validator_function(sequence_validator, list_schema)], - ) - return core_schema.json_or_python_schema(json_schema=list_schema, python_schema=python_schema) - - def _iterable_schema(self, type_: Any) -> core_schema.GeneratorSchema: - """Generate a schema for an `Iterable`.""" - item_type = self._get_first_arg_or_any(type_) - - return core_schema.generator_schema(self.generate_schema(item_type)) - - def _pattern_schema(self, pattern_type: Any) -> core_schema.CoreSchema: - from . import _validators - - metadata = build_metadata_dict(js_functions=[lambda _1, _2: {'type': 'string', 'format': 'regex'}]) - ser = core_schema.plain_serializer_function_ser_schema( - attrgetter('pattern'), when_used='json', return_schema=core_schema.str_schema() - ) - if pattern_type == typing.Pattern or pattern_type == re.Pattern: - # bare type - return core_schema.no_info_plain_validator_function( - _validators.pattern_either_validator, serialization=ser, metadata=metadata - ) - - param = self._get_args_resolving_forward_refs( - pattern_type, - required=True, - )[0] - if param == str: - return core_schema.no_info_plain_validator_function( - _validators.pattern_str_validator, serialization=ser, metadata=metadata - ) - elif param == bytes: - return core_schema.no_info_plain_validator_function( - _validators.pattern_bytes_validator, serialization=ser, metadata=metadata - ) - else: - raise PydanticSchemaGenerationError(f'Unable to generate pydantic-core schema for {pattern_type!r}.') - - def _hashable_schema(self) -> core_schema.CoreSchema: - return core_schema.custom_error_schema( - core_schema.is_instance_schema(collections.abc.Hashable), - custom_error_type='is_hashable', - custom_error_message='Input should be hashable', - ) - - def _dataclass_schema( - self, dataclass: type[StandardDataclass], origin: type[StandardDataclass] | None - ) -> core_schema.CoreSchema: - """Generate schema for a dataclass.""" - with self.defs.get_schema_or_ref(dataclass) as (dataclass_ref, maybe_schema): - if maybe_schema is not None: - return maybe_schema - - typevars_map = get_standard_typevars_map(dataclass) - if origin is not None: - dataclass = origin - - config = getattr(dataclass, '__pydantic_config__', None) - with self._config_wrapper_stack.push(config), self._types_namespace_stack.push(dataclass): - core_config = self._config_wrapper.core_config(dataclass) - - self = self._current_generate_schema - - from ..dataclasses import is_pydantic_dataclass - - if is_pydantic_dataclass(dataclass): - fields = deepcopy(dataclass.__pydantic_fields__) - if typevars_map: - for field in fields.values(): - field.apply_typevars_map(typevars_map, self._types_namespace) - else: - fields = collect_dataclass_fields( - dataclass, - self._types_namespace, - typevars_map=typevars_map, - ) - - # disallow combination of init=False on a dataclass field and extra='allow' on a dataclass - if config and config.get('extra') == 'allow': - # disallow combination of init=False on a dataclass field and extra='allow' on a dataclass - for field_name, field in fields.items(): - if field.init is False: - raise PydanticUserError( - f'Field {field_name} has `init=False` and dataclass has config setting `extra="allow"`. ' - f'This combination is not allowed.', - code='dataclass-init-false-extra-allow', - ) - - decorators = dataclass.__dict__.get('__pydantic_decorators__') or DecoratorInfos.build(dataclass) - # Move kw_only=False args to the start of the list, as this is how vanilla dataclasses work. - # Note that when kw_only is missing or None, it is treated as equivalent to kw_only=True - args = sorted( - (self._generate_dc_field_schema(k, v, decorators) for k, v in fields.items()), - key=lambda a: a.get('kw_only') is not False, - ) - has_post_init = hasattr(dataclass, '__post_init__') - has_slots = hasattr(dataclass, '__slots__') - - args_schema = core_schema.dataclass_args_schema( - dataclass.__name__, - args, - computed_fields=[ - self._computed_field_schema(d, decorators.field_serializers) - for d in decorators.computed_fields.values() - ], - collect_init_only=has_post_init, - ) - - inner_schema = apply_validators(args_schema, decorators.root_validators.values(), None) - - model_validators = decorators.model_validators.values() - inner_schema = apply_model_validators(inner_schema, model_validators, 'inner') - - dc_schema = core_schema.dataclass_schema( - dataclass, - inner_schema, - post_init=has_post_init, - ref=dataclass_ref, - fields=[field.name for field in dataclasses.fields(dataclass)], - slots=has_slots, - config=core_config, - ) - schema = self._apply_model_serializers(dc_schema, decorators.model_serializers.values()) - schema = apply_model_validators(schema, model_validators, 'outer') - self.defs.definitions[dataclass_ref] = self._post_process_generated_schema(schema) - return core_schema.definition_reference_schema(dataclass_ref) - - def _callable_schema(self, function: Callable[..., Any]) -> core_schema.CallSchema: - """Generate schema for a Callable. - - TODO support functional validators once we support them in Config - """ - sig = signature(function) - - type_hints = _typing_extra.get_function_type_hints(function) - - mode_lookup: dict[_ParameterKind, Literal['positional_only', 'positional_or_keyword', 'keyword_only']] = { - Parameter.POSITIONAL_ONLY: 'positional_only', - Parameter.POSITIONAL_OR_KEYWORD: 'positional_or_keyword', - Parameter.KEYWORD_ONLY: 'keyword_only', - } - - arguments_list: list[core_schema.ArgumentsParameter] = [] - var_args_schema: core_schema.CoreSchema | None = None - var_kwargs_schema: core_schema.CoreSchema | None = None - - for name, p in sig.parameters.items(): - if p.annotation is sig.empty: - annotation = Any - else: - annotation = type_hints[name] - - parameter_mode = mode_lookup.get(p.kind) - if parameter_mode is not None: - arg_schema = self._generate_parameter_schema(name, annotation, p.default, parameter_mode) - arguments_list.append(arg_schema) - elif p.kind == Parameter.VAR_POSITIONAL: - var_args_schema = self.generate_schema(annotation) - else: - assert p.kind == Parameter.VAR_KEYWORD, p.kind - var_kwargs_schema = self.generate_schema(annotation) - - return_schema: core_schema.CoreSchema | None = None - config_wrapper = self._config_wrapper - if config_wrapper.validate_return: - return_hint = type_hints.get('return') - if return_hint is not None: - return_schema = self.generate_schema(return_hint) - - return core_schema.call_schema( - core_schema.arguments_schema( - arguments_list, - var_args_schema=var_args_schema, - var_kwargs_schema=var_kwargs_schema, - populate_by_name=config_wrapper.populate_by_name, - ), - function, - return_schema=return_schema, - ) - - def _unsubstituted_typevar_schema(self, typevar: typing.TypeVar) -> core_schema.CoreSchema: - assert isinstance(typevar, typing.TypeVar) - - bound = typevar.__bound__ - constraints = typevar.__constraints__ - default = getattr(typevar, '__default__', None) - - if (bound is not None) + (len(constraints) != 0) + (default is not None) > 1: - raise NotImplementedError( - 'Pydantic does not support mixing more than one of TypeVar bounds, constraints and defaults' - ) - - if default is not None: - return self.generate_schema(default) - elif constraints: - return self._union_schema(typing.Union[constraints]) # type: ignore - elif bound: - schema = self.generate_schema(bound) - schema['serialization'] = core_schema.wrap_serializer_function_ser_schema( - lambda x, h: h(x), schema=core_schema.any_schema() - ) - return schema - else: - return core_schema.any_schema() - - def _computed_field_schema( - self, - d: Decorator[ComputedFieldInfo], - field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]], - ) -> core_schema.ComputedField: - try: - return_type = _decorators.get_function_return_type(d.func, d.info.return_type, self._types_namespace) - except NameError as e: - raise PydanticUndefinedAnnotation.from_name_error(e) from e - if return_type is PydanticUndefined: - raise PydanticUserError( - 'Computed field is missing return type annotation or specifying `return_type`' - ' to the `@computed_field` decorator (e.g. `@computed_field(return_type=int|str)`)', - code='model-field-missing-annotation', - ) - - return_type = replace_types(return_type, self._typevars_map) - # Create a new ComputedFieldInfo so that different type parametrizations of the same - # generic model's computed field can have different return types. - d.info = dataclasses.replace(d.info, return_type=return_type) - return_type_schema = self.generate_schema(return_type) - # Apply serializers to computed field if there exist - return_type_schema = self._apply_field_serializers( - return_type_schema, - filter_field_decorator_info_by_field(field_serializers.values(), d.cls_var_name), - computed_field=True, - ) - - alias_generator = self._config_wrapper.alias_generator - if alias_generator is not None: - self._apply_alias_generator_to_computed_field_info( - alias_generator=alias_generator, computed_field_info=d.info, computed_field_name=d.cls_var_name - ) - - def set_computed_field_metadata(schema: CoreSchemaOrField, handler: GetJsonSchemaHandler) -> JsonSchemaValue: - json_schema = handler(schema) - - json_schema['readOnly'] = True - - title = d.info.title - if title is not None: - json_schema['title'] = title - - description = d.info.description - if description is not None: - json_schema['description'] = description - - examples = d.info.examples - if examples is not None: - json_schema['examples'] = to_jsonable_python(examples) - - json_schema_extra = d.info.json_schema_extra - if json_schema_extra is not None: - add_json_schema_extra(json_schema, json_schema_extra) - - return json_schema - - metadata = build_metadata_dict(js_annotation_functions=[set_computed_field_metadata]) - return core_schema.computed_field( - d.cls_var_name, return_schema=return_type_schema, alias=d.info.alias, metadata=metadata - ) - - def _annotated_schema(self, annotated_type: Any) -> core_schema.CoreSchema: - """Generate schema for an Annotated type, e.g. `Annotated[int, Field(...)]` or `Annotated[int, Gt(0)]`.""" - from ..fields import FieldInfo - - source_type, *annotations = self._get_args_resolving_forward_refs( - annotated_type, - required=True, - ) - schema = self._apply_annotations(source_type, annotations) - # put the default validator last so that TypeAdapter.get_default_value() works - # even if there are function validators involved - for annotation in annotations: - if isinstance(annotation, FieldInfo): - schema = wrap_default(annotation, schema) - return schema - - def _get_prepare_pydantic_annotations_for_known_type( - self, obj: Any, annotations: tuple[Any, ...] - ) -> tuple[Any, list[Any]] | None: - from ._std_types_schema import PREPARE_METHODS - - # Check for hashability - try: - hash(obj) - except TypeError: - # obj is definitely not a known type if this fails - return None - - for gen in PREPARE_METHODS: - res = gen(obj, annotations, self._config_wrapper.config_dict) - if res is not None: - return res - - return None - - def _apply_annotations( - self, - source_type: Any, - annotations: list[Any], - transform_inner_schema: Callable[[CoreSchema], CoreSchema] = lambda x: x, - ) -> CoreSchema: - """Apply arguments from `Annotated` or from `FieldInfo` to a schema. - - This gets called by `GenerateSchema._annotated_schema` but differs from it in that it does - not expect `source_type` to be an `Annotated` object, it expects it to be the first argument of that - (in other words, `GenerateSchema._annotated_schema` just unpacks `Annotated`, this process it). - """ - annotations = list(_known_annotated_metadata.expand_grouped_metadata(annotations)) - res = self._get_prepare_pydantic_annotations_for_known_type(source_type, tuple(annotations)) - if res is not None: - source_type, annotations = res - - pydantic_js_annotation_functions: list[GetJsonSchemaFunction] = [] - - def inner_handler(obj: Any) -> CoreSchema: - from_property = self._generate_schema_from_property(obj, obj) - if from_property is None: - schema = self._generate_schema(obj) - else: - schema = from_property - metadata_js_function = _extract_get_pydantic_json_schema(obj, schema) - if metadata_js_function is not None: - metadata_schema = resolve_original_schema(schema, self.defs.definitions) - if metadata_schema is not None: - self._add_js_function(metadata_schema, metadata_js_function) - return transform_inner_schema(schema) - - get_inner_schema = CallbackGetCoreSchemaHandler(inner_handler, self) - - for annotation in annotations: - if annotation is None: - continue - get_inner_schema = self._get_wrapped_inner_schema( - get_inner_schema, annotation, pydantic_js_annotation_functions - ) - - schema = get_inner_schema(source_type) - if pydantic_js_annotation_functions: - metadata = CoreMetadataHandler(schema).metadata - metadata.setdefault('pydantic_js_annotation_functions', []).extend(pydantic_js_annotation_functions) - return _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, source_type, schema) - - def _apply_single_annotation(self, schema: core_schema.CoreSchema, metadata: Any) -> core_schema.CoreSchema: - from ..fields import FieldInfo - - if isinstance(metadata, FieldInfo): - for field_metadata in metadata.metadata: - schema = self._apply_single_annotation(schema, field_metadata) - - if metadata.discriminator is not None: - schema = self._apply_discriminator_to_union(schema, metadata.discriminator) - return schema - - if schema['type'] == 'nullable': - # for nullable schemas, metadata is automatically applied to the inner schema - inner = schema.get('schema', core_schema.any_schema()) - inner = self._apply_single_annotation(inner, metadata) - if inner: - schema['schema'] = inner - return schema - - original_schema = schema - ref = schema.get('ref', None) - if ref is not None: - schema = schema.copy() - new_ref = ref + f'_{repr(metadata)}' - if new_ref in self.defs.definitions: - return self.defs.definitions[new_ref] - schema['ref'] = new_ref # type: ignore - elif schema['type'] == 'definition-ref': - ref = schema['schema_ref'] - if ref in self.defs.definitions: - schema = self.defs.definitions[ref].copy() - new_ref = ref + f'_{repr(metadata)}' - if new_ref in self.defs.definitions: - return self.defs.definitions[new_ref] - schema['ref'] = new_ref # type: ignore - - maybe_updated_schema = _known_annotated_metadata.apply_known_metadata(metadata, schema.copy()) - - if maybe_updated_schema is not None: - return maybe_updated_schema - return original_schema - - def _apply_single_annotation_json_schema( - self, schema: core_schema.CoreSchema, metadata: Any - ) -> core_schema.CoreSchema: - from ..fields import FieldInfo - - if isinstance(metadata, FieldInfo): - for field_metadata in metadata.metadata: - schema = self._apply_single_annotation_json_schema(schema, field_metadata) - json_schema_update: JsonSchemaValue = {} - if metadata.title: - json_schema_update['title'] = metadata.title - if metadata.description: - json_schema_update['description'] = metadata.description - if metadata.examples: - json_schema_update['examples'] = to_jsonable_python(metadata.examples) - - json_schema_extra = metadata.json_schema_extra - if json_schema_update or json_schema_extra: - CoreMetadataHandler(schema).metadata.setdefault('pydantic_js_annotation_functions', []).append( - get_json_schema_update_func(json_schema_update, json_schema_extra) - ) - return schema - - def _get_wrapped_inner_schema( - self, - get_inner_schema: GetCoreSchemaHandler, - annotation: Any, - pydantic_js_annotation_functions: list[GetJsonSchemaFunction], - ) -> CallbackGetCoreSchemaHandler: - metadata_get_schema: GetCoreSchemaFunction = getattr(annotation, '__get_pydantic_core_schema__', None) or ( - lambda source, handler: handler(source) - ) - - def new_handler(source: Any) -> core_schema.CoreSchema: - schema = metadata_get_schema(source, get_inner_schema) - schema = self._apply_single_annotation(schema, annotation) - schema = self._apply_single_annotation_json_schema(schema, annotation) - - metadata_js_function = _extract_get_pydantic_json_schema(annotation, schema) - if metadata_js_function is not None: - pydantic_js_annotation_functions.append(metadata_js_function) - return schema - - return CallbackGetCoreSchemaHandler(new_handler, self) - - def _apply_field_serializers( - self, - schema: core_schema.CoreSchema, - serializers: list[Decorator[FieldSerializerDecoratorInfo]], - computed_field: bool = False, - ) -> core_schema.CoreSchema: - """Apply field serializers to a schema.""" - if serializers: - schema = copy(schema) - if schema['type'] == 'definitions': - inner_schema = schema['schema'] - schema['schema'] = self._apply_field_serializers(inner_schema, serializers) - return schema - else: - ref = typing.cast('str|None', schema.get('ref', None)) - if ref is not None: - schema = core_schema.definition_reference_schema(ref) - - # use the last serializer to make it easy to override a serializer set on a parent model - serializer = serializers[-1] - is_field_serializer, info_arg = inspect_field_serializer( - serializer.func, serializer.info.mode, computed_field=computed_field - ) - - try: - return_type = _decorators.get_function_return_type( - serializer.func, serializer.info.return_type, self._types_namespace - ) - except NameError as e: - raise PydanticUndefinedAnnotation.from_name_error(e) from e - - if return_type is PydanticUndefined: - return_schema = None - else: - return_schema = self.generate_schema(return_type) - - if serializer.info.mode == 'wrap': - schema['serialization'] = core_schema.wrap_serializer_function_ser_schema( - serializer.func, - is_field_serializer=is_field_serializer, - info_arg=info_arg, - return_schema=return_schema, - when_used=serializer.info.when_used, - ) - else: - assert serializer.info.mode == 'plain' - schema['serialization'] = core_schema.plain_serializer_function_ser_schema( - serializer.func, - is_field_serializer=is_field_serializer, - info_arg=info_arg, - return_schema=return_schema, - when_used=serializer.info.when_used, - ) - return schema - - def _apply_model_serializers( - self, schema: core_schema.CoreSchema, serializers: Iterable[Decorator[ModelSerializerDecoratorInfo]] - ) -> core_schema.CoreSchema: - """Apply model serializers to a schema.""" - ref: str | None = schema.pop('ref', None) # type: ignore - if serializers: - serializer = list(serializers)[-1] - info_arg = inspect_model_serializer(serializer.func, serializer.info.mode) - - try: - return_type = _decorators.get_function_return_type( - serializer.func, serializer.info.return_type, self._types_namespace - ) - except NameError as e: - raise PydanticUndefinedAnnotation.from_name_error(e) from e - if return_type is PydanticUndefined: - return_schema = None - else: - return_schema = self.generate_schema(return_type) - - if serializer.info.mode == 'wrap': - ser_schema: core_schema.SerSchema = core_schema.wrap_serializer_function_ser_schema( - serializer.func, - info_arg=info_arg, - return_schema=return_schema, - when_used=serializer.info.when_used, - ) - else: - # plain - ser_schema = core_schema.plain_serializer_function_ser_schema( - serializer.func, - info_arg=info_arg, - return_schema=return_schema, - when_used=serializer.info.when_used, - ) - schema['serialization'] = ser_schema - if ref: - schema['ref'] = ref # type: ignore - return schema - - -_VALIDATOR_F_MATCH: Mapping[ - tuple[FieldValidatorModes, Literal['no-info', 'with-info']], - Callable[[Callable[..., Any], core_schema.CoreSchema, str | None], core_schema.CoreSchema], -] = { - ('before', 'no-info'): lambda f, schema, _: core_schema.no_info_before_validator_function(f, schema), - ('after', 'no-info'): lambda f, schema, _: core_schema.no_info_after_validator_function(f, schema), - ('plain', 'no-info'): lambda f, _1, _2: core_schema.no_info_plain_validator_function(f), - ('wrap', 'no-info'): lambda f, schema, _: core_schema.no_info_wrap_validator_function(f, schema), - ('before', 'with-info'): lambda f, schema, field_name: core_schema.with_info_before_validator_function( - f, schema, field_name=field_name - ), - ('after', 'with-info'): lambda f, schema, field_name: core_schema.with_info_after_validator_function( - f, schema, field_name=field_name - ), - ('plain', 'with-info'): lambda f, _, field_name: core_schema.with_info_plain_validator_function( - f, field_name=field_name - ), - ('wrap', 'with-info'): lambda f, schema, field_name: core_schema.with_info_wrap_validator_function( - f, schema, field_name=field_name - ), -} - - -def apply_validators( - schema: core_schema.CoreSchema, - validators: Iterable[Decorator[RootValidatorDecoratorInfo]] - | Iterable[Decorator[ValidatorDecoratorInfo]] - | Iterable[Decorator[FieldValidatorDecoratorInfo]], - field_name: str | None, -) -> core_schema.CoreSchema: - """Apply validators to a schema. - - Args: - schema: The schema to apply validators on. - validators: An iterable of validators. - field_name: The name of the field if validators are being applied to a model field. - - Returns: - The updated schema. - """ - for validator in validators: - info_arg = inspect_validator(validator.func, validator.info.mode) - val_type = 'with-info' if info_arg else 'no-info' - - schema = _VALIDATOR_F_MATCH[(validator.info.mode, val_type)](validator.func, schema, field_name) - return schema - - -def _validators_require_validate_default(validators: Iterable[Decorator[ValidatorDecoratorInfo]]) -> bool: - """In v1, if any of the validators for a field had `always=True`, the default value would be validated. - - This serves as an auxiliary function for re-implementing that logic, by looping over a provided - collection of (v1-style) ValidatorDecoratorInfo's and checking if any of them have `always=True`. - - We should be able to drop this function and the associated logic calling it once we drop support - for v1-style validator decorators. (Or we can extend it and keep it if we add something equivalent - to the v1-validator `always` kwarg to `field_validator`.) - """ - for validator in validators: - if validator.info.always: - return True - return False - - -def apply_model_validators( - schema: core_schema.CoreSchema, - validators: Iterable[Decorator[ModelValidatorDecoratorInfo]], - mode: Literal['inner', 'outer', 'all'], -) -> core_schema.CoreSchema: - """Apply model validators to a schema. - - If mode == 'inner', only "before" validators are applied - If mode == 'outer', validators other than "before" are applied - If mode == 'all', all validators are applied - - Args: - schema: The schema to apply validators on. - validators: An iterable of validators. - mode: The validator mode. - - Returns: - The updated schema. - """ - ref: str | None = schema.pop('ref', None) # type: ignore - for validator in validators: - if mode == 'inner' and validator.info.mode != 'before': - continue - if mode == 'outer' and validator.info.mode == 'before': - continue - info_arg = inspect_validator(validator.func, validator.info.mode) - if validator.info.mode == 'wrap': - if info_arg: - schema = core_schema.with_info_wrap_validator_function(function=validator.func, schema=schema) - else: - schema = core_schema.no_info_wrap_validator_function(function=validator.func, schema=schema) - elif validator.info.mode == 'before': - if info_arg: - schema = core_schema.with_info_before_validator_function(function=validator.func, schema=schema) - else: - schema = core_schema.no_info_before_validator_function(function=validator.func, schema=schema) - else: - assert validator.info.mode == 'after' - if info_arg: - schema = core_schema.with_info_after_validator_function(function=validator.func, schema=schema) - else: - schema = core_schema.no_info_after_validator_function(function=validator.func, schema=schema) - if ref: - schema['ref'] = ref # type: ignore - return schema - - -def wrap_default(field_info: FieldInfo, schema: core_schema.CoreSchema) -> core_schema.CoreSchema: - """Wrap schema with default schema if default value or `default_factory` are available. - - Args: - field_info: The field info object. - schema: The schema to apply default on. - - Returns: - Updated schema by default value or `default_factory`. - """ - if field_info.default_factory: - return core_schema.with_default_schema( - schema, default_factory=field_info.default_factory, validate_default=field_info.validate_default - ) - elif field_info.default is not PydanticUndefined: - return core_schema.with_default_schema( - schema, default=field_info.default, validate_default=field_info.validate_default - ) - else: - return schema - - -def _extract_get_pydantic_json_schema(tp: Any, schema: CoreSchema) -> GetJsonSchemaFunction | None: - """Extract `__get_pydantic_json_schema__` from a type, handling the deprecated `__modify_schema__`.""" - js_modify_function = getattr(tp, '__get_pydantic_json_schema__', None) - - if hasattr(tp, '__modify_schema__'): - from pydantic import BaseModel # circular reference - - has_custom_v2_modify_js_func = ( - js_modify_function is not None - and BaseModel.__get_pydantic_json_schema__.__func__ # type: ignore - not in (js_modify_function, getattr(js_modify_function, '__func__', None)) - ) - - if not has_custom_v2_modify_js_func: - raise PydanticUserError( - 'The `__modify_schema__` method is not supported in Pydantic v2. ' - 'Use `__get_pydantic_json_schema__` instead.', - code='custom-json-schema', - ) - - # handle GenericAlias' but ignore Annotated which "lies" about its origin (in this case it would be `int`) - if hasattr(tp, '__origin__') and not isinstance(tp, type(Annotated[int, 'placeholder'])): - return _extract_get_pydantic_json_schema(tp.__origin__, schema) - - if js_modify_function is None: - return None - - return js_modify_function - - -def get_json_schema_update_func( - json_schema_update: JsonSchemaValue, json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None -) -> GetJsonSchemaFunction: - def json_schema_update_func( - core_schema_or_field: CoreSchemaOrField, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - json_schema = {**handler(core_schema_or_field), **json_schema_update} - add_json_schema_extra(json_schema, json_schema_extra) - return json_schema - - return json_schema_update_func - - -def add_json_schema_extra( - json_schema: JsonSchemaValue, json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None -): - if isinstance(json_schema_extra, dict): - json_schema.update(to_jsonable_python(json_schema_extra)) - elif callable(json_schema_extra): - json_schema_extra(json_schema) - - -class _CommonField(TypedDict): - schema: core_schema.CoreSchema - validation_alias: str | list[str | int] | list[list[str | int]] | None - serialization_alias: str | None - serialization_exclude: bool | None - frozen: bool | None - metadata: dict[str, Any] - - -def _common_field( - schema: core_schema.CoreSchema, - *, - validation_alias: str | list[str | int] | list[list[str | int]] | None = None, - serialization_alias: str | None = None, - serialization_exclude: bool | None = None, - frozen: bool | None = None, - metadata: Any = None, -) -> _CommonField: - return { - 'schema': schema, - 'validation_alias': validation_alias, - 'serialization_alias': serialization_alias, - 'serialization_exclude': serialization_exclude, - 'frozen': frozen, - 'metadata': metadata, - } - - -class _Definitions: - """Keeps track of references and definitions.""" - - def __init__(self) -> None: - self.seen: set[str] = set() - self.definitions: dict[str, core_schema.CoreSchema] = {} - - @contextmanager - def get_schema_or_ref(self, tp: Any) -> Iterator[tuple[str, None] | tuple[str, CoreSchema]]: - """Get a definition for `tp` if one exists. - - If a definition exists, a tuple of `(ref_string, CoreSchema)` is returned. - If no definition exists yet, a tuple of `(ref_string, None)` is returned. - - Note that the returned `CoreSchema` will always be a `DefinitionReferenceSchema`, - not the actual definition itself. - - This should be called for any type that can be identified by reference. - This includes any recursive types. - - At present the following types can be named/recursive: - - - BaseModel - - Dataclasses - - TypedDict - - TypeAliasType - """ - ref = get_type_ref(tp) - # return the reference if we're either (1) in a cycle or (2) it was already defined - if ref in self.seen or ref in self.definitions: - yield (ref, core_schema.definition_reference_schema(ref)) - else: - self.seen.add(ref) - try: - yield (ref, None) - finally: - self.seen.discard(ref) - - -def resolve_original_schema(schema: CoreSchema, definitions: dict[str, CoreSchema]) -> CoreSchema | None: - if schema['type'] == 'definition-ref': - return definitions.get(schema['schema_ref'], None) - elif schema['type'] == 'definitions': - return schema['schema'] - else: - return schema - - -class _FieldNameStack: - __slots__ = ('_stack',) - - def __init__(self) -> None: - self._stack: list[str] = [] - - @contextmanager - def push(self, field_name: str) -> Iterator[None]: - self._stack.append(field_name) - yield - self._stack.pop() - - def get(self) -> str | None: - if self._stack: - return self._stack[-1] - else: - return None diff --git a/lib/pydantic/_internal/_generics.py b/lib/pydantic/_internal/_generics.py deleted file mode 100644 index 5a66eaa9..00000000 --- a/lib/pydantic/_internal/_generics.py +++ /dev/null @@ -1,517 +0,0 @@ -from __future__ import annotations - -import sys -import types -import typing -from collections import ChainMap -from contextlib import contextmanager -from contextvars import ContextVar -from types import prepare_class -from typing import TYPE_CHECKING, Any, Iterator, List, Mapping, MutableMapping, Tuple, TypeVar -from weakref import WeakValueDictionary - -import typing_extensions - -from ._core_utils import get_type_ref -from ._forward_ref import PydanticRecursiveRef -from ._typing_extra import TypeVarType, typing_base -from ._utils import all_identical, is_model_class - -if sys.version_info >= (3, 10): - from typing import _UnionGenericAlias # type: ignore[attr-defined] - -if TYPE_CHECKING: - from ..main import BaseModel - -GenericTypesCacheKey = Tuple[Any, Any, Tuple[Any, ...]] - -# Note: We want to remove LimitedDict, but to do this, we'd need to improve the handling of generics caching. -# Right now, to handle recursive generics, we some types must remain cached for brief periods without references. -# By chaining the WeakValuesDict with a LimitedDict, we have a way to retain caching for all types with references, -# while also retaining a limited number of types even without references. This is generally enough to build -# specific recursive generic models without losing required items out of the cache. - -KT = TypeVar('KT') -VT = TypeVar('VT') -_LIMITED_DICT_SIZE = 100 -if TYPE_CHECKING: - - class LimitedDict(dict, MutableMapping[KT, VT]): - def __init__(self, size_limit: int = _LIMITED_DICT_SIZE): - ... - -else: - - class LimitedDict(dict): - """Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage. - - Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache. - """ - - def __init__(self, size_limit: int = _LIMITED_DICT_SIZE): - self.size_limit = size_limit - super().__init__() - - def __setitem__(self, __key: Any, __value: Any) -> None: - super().__setitem__(__key, __value) - if len(self) > self.size_limit: - excess = len(self) - self.size_limit + self.size_limit // 10 - to_remove = list(self.keys())[:excess] - for key in to_remove: - del self[key] - - -# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected -# once they are no longer referenced by the caller. -if sys.version_info >= (3, 9): # Typing for weak dictionaries available at 3.9 - GenericTypesCache = WeakValueDictionary[GenericTypesCacheKey, 'type[BaseModel]'] -else: - GenericTypesCache = WeakValueDictionary - -if TYPE_CHECKING: - - class DeepChainMap(ChainMap[KT, VT]): # type: ignore - ... - -else: - - class DeepChainMap(ChainMap): - """Variant of ChainMap that allows direct updates to inner scopes. - - Taken from https://docs.python.org/3/library/collections.html#collections.ChainMap, - with some light modifications for this use case. - """ - - def clear(self) -> None: - for mapping in self.maps: - mapping.clear() - - def __setitem__(self, key: KT, value: VT) -> None: - for mapping in self.maps: - mapping[key] = value - - def __delitem__(self, key: KT) -> None: - hit = False - for mapping in self.maps: - if key in mapping: - del mapping[key] - hit = True - if not hit: - raise KeyError(key) - - -# Despite the fact that LimitedDict _seems_ no longer necessary, I'm very nervous to actually remove it -# and discover later on that we need to re-add all this infrastructure... -# _GENERIC_TYPES_CACHE = DeepChainMap(GenericTypesCache(), LimitedDict()) - -_GENERIC_TYPES_CACHE = GenericTypesCache() - - -class PydanticGenericMetadata(typing_extensions.TypedDict): - origin: type[BaseModel] | None # analogous to typing._GenericAlias.__origin__ - args: tuple[Any, ...] # analogous to typing._GenericAlias.__args__ - parameters: tuple[type[Any], ...] # analogous to typing.Generic.__parameters__ - - -def create_generic_submodel( - model_name: str, origin: type[BaseModel], args: tuple[Any, ...], params: tuple[Any, ...] -) -> type[BaseModel]: - """Dynamically create a submodel of a provided (generic) BaseModel. - - This is used when producing concrete parametrizations of generic models. This function - only *creates* the new subclass; the schema/validators/serialization must be updated to - reflect a concrete parametrization elsewhere. - - Args: - model_name: The name of the newly created model. - origin: The base class for the new model to inherit from. - args: A tuple of generic metadata arguments. - params: A tuple of generic metadata parameters. - - Returns: - The created submodel. - """ - namespace: dict[str, Any] = {'__module__': origin.__module__} - bases = (origin,) - meta, ns, kwds = prepare_class(model_name, bases) - namespace.update(ns) - created_model = meta( - model_name, - bases, - namespace, - __pydantic_generic_metadata__={ - 'origin': origin, - 'args': args, - 'parameters': params, - }, - __pydantic_reset_parent_namespace__=False, - **kwds, - ) - - model_module, called_globally = _get_caller_frame_info(depth=3) - if called_globally: # create global reference and therefore allow pickling - object_by_reference = None - reference_name = model_name - reference_module_globals = sys.modules[created_model.__module__].__dict__ - while object_by_reference is not created_model: - object_by_reference = reference_module_globals.setdefault(reference_name, created_model) - reference_name += '_' - - return created_model - - -def _get_caller_frame_info(depth: int = 2) -> tuple[str | None, bool]: - """Used inside a function to check whether it was called globally. - - Args: - depth: The depth to get the frame. - - Returns: - A tuple contains `module_name` and `called_globally`. - - Raises: - RuntimeError: If the function is not called inside a function. - """ - try: - previous_caller_frame = sys._getframe(depth) - except ValueError as e: - raise RuntimeError('This function must be used inside another function') from e - except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it - return None, False - frame_globals = previous_caller_frame.f_globals - return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals - - -DictValues: type[Any] = {}.values().__class__ - - -def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]: - """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found. - - This is inspired as an alternative to directly accessing the `__parameters__` attribute of a GenericAlias, - since __parameters__ of (nested) generic BaseModel subclasses won't show up in that list. - """ - if isinstance(v, TypeVar): - yield v - elif is_model_class(v): - yield from v.__pydantic_generic_metadata__['parameters'] - elif isinstance(v, (DictValues, list)): - for var in v: - yield from iter_contained_typevars(var) - else: - args = get_args(v) - for arg in args: - yield from iter_contained_typevars(arg) - - -def get_args(v: Any) -> Any: - pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None) - if pydantic_generic_metadata: - return pydantic_generic_metadata.get('args') - return typing_extensions.get_args(v) - - -def get_origin(v: Any) -> Any: - pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None) - if pydantic_generic_metadata: - return pydantic_generic_metadata.get('origin') - return typing_extensions.get_origin(v) - - -def get_standard_typevars_map(cls: type[Any]) -> dict[TypeVarType, Any] | None: - """Package a generic type's typevars and parametrization (if present) into a dictionary compatible with the - `replace_types` function. Specifically, this works with standard typing generics and typing._GenericAlias. - """ - origin = get_origin(cls) - if origin is None: - return None - if not hasattr(origin, '__parameters__'): - return None - - # In this case, we know that cls is a _GenericAlias, and origin is the generic type - # So it is safe to access cls.__args__ and origin.__parameters__ - args: tuple[Any, ...] = cls.__args__ # type: ignore - parameters: tuple[TypeVarType, ...] = origin.__parameters__ - return dict(zip(parameters, args)) - - -def get_model_typevars_map(cls: type[BaseModel]) -> dict[TypeVarType, Any] | None: - """Package a generic BaseModel's typevars and concrete parametrization (if present) into a dictionary compatible - with the `replace_types` function. - - Since BaseModel.__class_getitem__ does not produce a typing._GenericAlias, and the BaseModel generic info is - stored in the __pydantic_generic_metadata__ attribute, we need special handling here. - """ - # TODO: This could be unified with `get_standard_typevars_map` if we stored the generic metadata - # in the __origin__, __args__, and __parameters__ attributes of the model. - generic_metadata = cls.__pydantic_generic_metadata__ - origin = generic_metadata['origin'] - args = generic_metadata['args'] - return dict(zip(iter_contained_typevars(origin), args)) - - -def replace_types(type_: Any, type_map: Mapping[Any, Any] | None) -> Any: - """Return type with all occurrences of `type_map` keys recursively replaced with their values. - - Args: - type_: The class or generic alias. - type_map: Mapping from `TypeVar` instance to concrete types. - - Returns: - A new type representing the basic structure of `type_` with all - `typevar_map` keys recursively replaced. - - Example: - ```py - from typing import List, Tuple, Union - - from pydantic._internal._generics import replace_types - - replace_types(Tuple[str, Union[List[str], float]], {str: int}) - #> Tuple[int, Union[List[int], float]] - ``` - """ - if not type_map: - return type_ - - type_args = get_args(type_) - origin_type = get_origin(type_) - - if origin_type is typing_extensions.Annotated: - annotated_type, *annotations = type_args - annotated = replace_types(annotated_type, type_map) - for annotation in annotations: - annotated = typing_extensions.Annotated[annotated, annotation] - return annotated - - # Having type args is a good indicator that this is a typing module - # class instantiation or a generic alias of some sort. - if type_args: - resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args) - if all_identical(type_args, resolved_type_args): - # If all arguments are the same, there is no need to modify the - # type or create a new object at all - return type_ - if ( - origin_type is not None - and isinstance(type_, typing_base) - and not isinstance(origin_type, typing_base) - and getattr(type_, '_name', None) is not None - ): - # In python < 3.9 generic aliases don't exist so any of these like `list`, - # `type` or `collections.abc.Callable` need to be translated. - # See: https://www.python.org/dev/peps/pep-0585 - origin_type = getattr(typing, type_._name) - assert origin_type is not None - # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__. - # We also cannot use isinstance() since we have to compare types. - if sys.version_info >= (3, 10) and origin_type is types.UnionType: - return _UnionGenericAlias(origin_type, resolved_type_args) - # NotRequired[T] and Required[T] don't support tuple type resolved_type_args, hence the condition below - return origin_type[resolved_type_args[0] if len(resolved_type_args) == 1 else resolved_type_args] - - # We handle pydantic generic models separately as they don't have the same - # semantics as "typing" classes or generic aliases - - if not origin_type and is_model_class(type_): - parameters = type_.__pydantic_generic_metadata__['parameters'] - if not parameters: - return type_ - resolved_type_args = tuple(replace_types(t, type_map) for t in parameters) - if all_identical(parameters, resolved_type_args): - return type_ - return type_[resolved_type_args] - - # Handle special case for typehints that can have lists as arguments. - # `typing.Callable[[int, str], int]` is an example for this. - if isinstance(type_, (List, list)): - resolved_list = list(replace_types(element, type_map) for element in type_) - if all_identical(type_, resolved_list): - return type_ - return resolved_list - - # If all else fails, we try to resolve the type directly and otherwise just - # return the input with no modifications. - return type_map.get(type_, type_) - - -def has_instance_in_type(type_: Any, isinstance_target: Any) -> bool: - """Checks if the type, or any of its arbitrary nested args, satisfy - `isinstance(, isinstance_target)`. - """ - if isinstance(type_, isinstance_target): - return True - - type_args = get_args(type_) - origin_type = get_origin(type_) - - if origin_type is typing_extensions.Annotated: - annotated_type, *annotations = type_args - return has_instance_in_type(annotated_type, isinstance_target) - - # Having type args is a good indicator that this is a typing module - # class instantiation or a generic alias of some sort. - if any(has_instance_in_type(a, isinstance_target) for a in type_args): - return True - - # Handle special case for typehints that can have lists as arguments. - # `typing.Callable[[int, str], int]` is an example for this. - if isinstance(type_, (List, list)) and not isinstance(type_, typing_extensions.ParamSpec): - if any(has_instance_in_type(element, isinstance_target) for element in type_): - return True - - return False - - -def check_parameters_count(cls: type[BaseModel], parameters: tuple[Any, ...]) -> None: - """Check the generic model parameters count is equal. - - Args: - cls: The generic model. - parameters: A tuple of passed parameters to the generic model. - - Raises: - TypeError: If the passed parameters count is not equal to generic model parameters count. - """ - actual = len(parameters) - expected = len(cls.__pydantic_generic_metadata__['parameters']) - if actual != expected: - description = 'many' if actual > expected else 'few' - raise TypeError(f'Too {description} parameters for {cls}; actual {actual}, expected {expected}') - - -_generic_recursion_cache: ContextVar[set[str] | None] = ContextVar('_generic_recursion_cache', default=None) - - -@contextmanager -def generic_recursion_self_type( - origin: type[BaseModel], args: tuple[Any, ...] -) -> Iterator[PydanticRecursiveRef | None]: - """This contextmanager should be placed around the recursive calls used to build a generic type, - and accept as arguments the generic origin type and the type arguments being passed to it. - - If the same origin and arguments are observed twice, it implies that a self-reference placeholder - can be used while building the core schema, and will produce a schema_ref that will be valid in the - final parent schema. - """ - previously_seen_type_refs = _generic_recursion_cache.get() - if previously_seen_type_refs is None: - previously_seen_type_refs = set() - token = _generic_recursion_cache.set(previously_seen_type_refs) - else: - token = None - - try: - type_ref = get_type_ref(origin, args_override=args) - if type_ref in previously_seen_type_refs: - self_type = PydanticRecursiveRef(type_ref=type_ref) - yield self_type - else: - previously_seen_type_refs.add(type_ref) - yield None - finally: - if token: - _generic_recursion_cache.reset(token) - - -def recursively_defined_type_refs() -> set[str]: - visited = _generic_recursion_cache.get() - if not visited: - return set() # not in a generic recursion, so there are no types - - return visited.copy() # don't allow modifications - - -def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any) -> type[BaseModel] | None: - """The use of a two-stage cache lookup approach was necessary to have the highest performance possible for - repeated calls to `__class_getitem__` on generic types (which may happen in tighter loops during runtime), - while still ensuring that certain alternative parametrizations ultimately resolve to the same type. - - As a concrete example, this approach was necessary to make Model[List[T]][int] equal to Model[List[int]]. - The approach could be modified to not use two different cache keys at different points, but the - _early_cache_key is optimized to be as quick to compute as possible (for repeated-access speed), and the - _late_cache_key is optimized to be as "correct" as possible, so that two types that will ultimately be the - same after resolving the type arguments will always produce cache hits. - - If we wanted to move to only using a single cache key per type, we would either need to always use the - slower/more computationally intensive logic associated with _late_cache_key, or would need to accept - that Model[List[T]][int] is a different type than Model[List[T]][int]. Because we rely on subclass relationships - during validation, I think it is worthwhile to ensure that types that are functionally equivalent are actually - equal. - """ - return _GENERIC_TYPES_CACHE.get(_early_cache_key(parent, typevar_values)) - - -def get_cached_generic_type_late( - parent: type[BaseModel], typevar_values: Any, origin: type[BaseModel], args: tuple[Any, ...] -) -> type[BaseModel] | None: - """See the docstring of `get_cached_generic_type_early` for more information about the two-stage cache lookup.""" - cached = _GENERIC_TYPES_CACHE.get(_late_cache_key(origin, args, typevar_values)) - if cached is not None: - set_cached_generic_type(parent, typevar_values, cached, origin, args) - return cached - - -def set_cached_generic_type( - parent: type[BaseModel], - typevar_values: tuple[Any, ...], - type_: type[BaseModel], - origin: type[BaseModel] | None = None, - args: tuple[Any, ...] | None = None, -) -> None: - """See the docstring of `get_cached_generic_type_early` for more information about why items are cached with - two different keys. - """ - _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values)] = type_ - if len(typevar_values) == 1: - _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values[0])] = type_ - if origin and args: - _GENERIC_TYPES_CACHE[_late_cache_key(origin, args, typevar_values)] = type_ - - -def _union_orderings_key(typevar_values: Any) -> Any: - """This is intended to help differentiate between Union types with the same arguments in different order. - - Thanks to caching internal to the `typing` module, it is not possible to distinguish between - List[Union[int, float]] and List[Union[float, int]] (and similarly for other "parent" origins besides List) - because `typing` considers Union[int, float] to be equal to Union[float, int]. - - However, you _can_ distinguish between (top-level) Union[int, float] vs. Union[float, int]. - Because we parse items as the first Union type that is successful, we get slightly more consistent behavior - if we make an effort to distinguish the ordering of items in a union. It would be best if we could _always_ - get the exact-correct order of items in the union, but that would require a change to the `typing` module itself. - (See https://github.com/python/cpython/issues/86483 for reference.) - """ - if isinstance(typevar_values, tuple): - args_data = [] - for value in typevar_values: - args_data.append(_union_orderings_key(value)) - return tuple(args_data) - elif typing_extensions.get_origin(typevar_values) is typing.Union: - return get_args(typevar_values) - else: - return () - - -def _early_cache_key(cls: type[BaseModel], typevar_values: Any) -> GenericTypesCacheKey: - """This is intended for minimal computational overhead during lookups of cached types. - - Note that this is overly simplistic, and it's possible that two different cls/typevar_values - inputs would ultimately result in the same type being created in BaseModel.__class_getitem__. - To handle this, we have a fallback _late_cache_key that is checked later if the _early_cache_key - lookup fails, and should result in a cache hit _precisely_ when the inputs to __class_getitem__ - would result in the same type. - """ - return cls, typevar_values, _union_orderings_key(typevar_values) - - -def _late_cache_key(origin: type[BaseModel], args: tuple[Any, ...], typevar_values: Any) -> GenericTypesCacheKey: - """This is intended for use later in the process of creating a new type, when we have more information - about the exact args that will be passed. If it turns out that a different set of inputs to - __class_getitem__ resulted in the same inputs to the generic type creation process, we can still - return the cached type, and update the cache with the _early_cache_key as well. - """ - # The _union_orderings_key is placed at the start here to ensure there cannot be a collision with an - # _early_cache_key, as that function will always produce a BaseModel subclass as the first item in the key, - # whereas this function will always produce a tuple as the first item in the key. - return _union_orderings_key(typevar_values), origin, args diff --git a/lib/pydantic/_internal/_git.py b/lib/pydantic/_internal/_git.py deleted file mode 100644 index 9de7aaf9..00000000 --- a/lib/pydantic/_internal/_git.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Git utilities, adopted from mypy's git utilities (https://github.com/python/mypy/blob/master/mypy/git.py).""" -from __future__ import annotations - -import os -import subprocess - - -def is_git_repo(dir: str) -> bool: - """Is the given directory version-controlled with git?""" - return os.path.exists(os.path.join(dir, '.git')) - - -def have_git() -> bool: - """Can we run the git executable?""" - try: - subprocess.check_output(['git', '--help']) - return True - except subprocess.CalledProcessError: - return False - except OSError: - return False - - -def git_revision(dir: str) -> str: - """Get the SHA-1 of the HEAD of a git repository.""" - return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=dir).decode('utf-8').strip() diff --git a/lib/pydantic/_internal/_internal_dataclass.py b/lib/pydantic/_internal/_internal_dataclass.py deleted file mode 100644 index 317a3d9c..00000000 --- a/lib/pydantic/_internal/_internal_dataclass.py +++ /dev/null @@ -1,10 +0,0 @@ -import sys -from typing import Any, Dict - -dataclass_kwargs: Dict[str, Any] - -# `slots` is available on Python >= 3.10 -if sys.version_info >= (3, 10): - slots_true = {'slots': True} -else: - slots_true = {} diff --git a/lib/pydantic/_internal/_known_annotated_metadata.py b/lib/pydantic/_internal/_known_annotated_metadata.py deleted file mode 100644 index 77caf705..00000000 --- a/lib/pydantic/_internal/_known_annotated_metadata.py +++ /dev/null @@ -1,410 +0,0 @@ -from __future__ import annotations - -from collections import defaultdict -from copy import copy -from functools import partial -from typing import TYPE_CHECKING, Any, Callable, Iterable - -from pydantic_core import CoreSchema, PydanticCustomError, to_jsonable_python -from pydantic_core import core_schema as cs - -from ._fields import PydanticMetadata - -if TYPE_CHECKING: - from ..annotated_handlers import GetJsonSchemaHandler - - -STRICT = {'strict'} -SEQUENCE_CONSTRAINTS = {'min_length', 'max_length'} -INEQUALITY = {'le', 'ge', 'lt', 'gt'} -NUMERIC_CONSTRAINTS = {'multiple_of', 'allow_inf_nan', *INEQUALITY} - -STR_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT, 'strip_whitespace', 'to_lower', 'to_upper', 'pattern'} -BYTES_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT} - -LIST_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT} -TUPLE_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT} -SET_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT} -DICT_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT} -GENERATOR_CONSTRAINTS = {*SEQUENCE_CONSTRAINTS, *STRICT} - -FLOAT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} -INT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} -BOOL_CONSTRAINTS = STRICT -UUID_CONSTRAINTS = STRICT - -DATE_TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} -TIMEDELTA_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} -TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} -LAX_OR_STRICT_CONSTRAINTS = STRICT - -UNION_CONSTRAINTS = {'union_mode'} -URL_CONSTRAINTS = { - 'max_length', - 'allowed_schemes', - 'host_required', - 'default_host', - 'default_port', - 'default_path', -} - -TEXT_SCHEMA_TYPES = ('str', 'bytes', 'url', 'multi-host-url') -SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator', *TEXT_SCHEMA_TYPES) -NUMERIC_SCHEMA_TYPES = ('float', 'int', 'date', 'time', 'timedelta', 'datetime') - -CONSTRAINTS_TO_ALLOWED_SCHEMAS: dict[str, set[str]] = defaultdict(set) -for constraint in STR_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(TEXT_SCHEMA_TYPES) -for constraint in BYTES_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('bytes',)) -for constraint in LIST_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('list',)) -for constraint in TUPLE_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('tuple',)) -for constraint in SET_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('set', 'frozenset')) -for constraint in DICT_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('dict',)) -for constraint in GENERATOR_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('generator',)) -for constraint in FLOAT_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('float',)) -for constraint in INT_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('int',)) -for constraint in DATE_TIME_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('date', 'time', 'datetime')) -for constraint in TIMEDELTA_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('timedelta',)) -for constraint in TIME_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('time',)) -for schema_type in (*TEXT_SCHEMA_TYPES, *SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model'): - CONSTRAINTS_TO_ALLOWED_SCHEMAS['strict'].add(schema_type) -for constraint in UNION_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('union',)) -for constraint in URL_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('url', 'multi-host-url')) -for constraint in BOOL_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('bool',)) -for constraint in UUID_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('uuid',)) -for constraint in LAX_OR_STRICT_CONSTRAINTS: - CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint].update(('lax-or-strict',)) - - -def add_js_update_schema(s: cs.CoreSchema, f: Callable[[], dict[str, Any]]) -> None: - def update_js_schema(s: cs.CoreSchema, handler: GetJsonSchemaHandler) -> dict[str, Any]: - js_schema = handler(s) - js_schema.update(f()) - return js_schema - - if 'metadata' in s: - metadata = s['metadata'] - if 'pydantic_js_functions' in s: - metadata['pydantic_js_functions'].append(update_js_schema) - else: - metadata['pydantic_js_functions'] = [update_js_schema] - else: - s['metadata'] = {'pydantic_js_functions': [update_js_schema]} - - -def as_jsonable_value(v: Any) -> Any: - if type(v) not in (int, str, float, bytes, bool, type(None)): - return to_jsonable_python(v) - return v - - -def expand_grouped_metadata(annotations: Iterable[Any]) -> Iterable[Any]: - """Expand the annotations. - - Args: - annotations: An iterable of annotations. - - Returns: - An iterable of expanded annotations. - - Example: - ```py - from annotated_types import Ge, Len - - from pydantic._internal._known_annotated_metadata import expand_grouped_metadata - - print(list(expand_grouped_metadata([Ge(4), Len(5)]))) - #> [Ge(ge=4), MinLen(min_length=5)] - ``` - """ - import annotated_types as at - - from pydantic.fields import FieldInfo # circular import - - for annotation in annotations: - if isinstance(annotation, at.GroupedMetadata): - yield from annotation - elif isinstance(annotation, FieldInfo): - yield from annotation.metadata - # this is a bit problematic in that it results in duplicate metadata - # all of our "consumers" can handle it, but it is not ideal - # we probably should split up FieldInfo into: - # - annotated types metadata - # - individual metadata known only to Pydantic - annotation = copy(annotation) - annotation.metadata = [] - yield annotation - else: - yield annotation - - -def apply_known_metadata(annotation: Any, schema: CoreSchema) -> CoreSchema | None: # noqa: C901 - """Apply `annotation` to `schema` if it is an annotation we know about (Gt, Le, etc.). - Otherwise return `None`. - - This does not handle all known annotations. If / when it does, it can always - return a CoreSchema and return the unmodified schema if the annotation should be ignored. - - Assumes that GroupedMetadata has already been expanded via `expand_grouped_metadata`. - - Args: - annotation: The annotation. - schema: The schema. - - Returns: - An updated schema with annotation if it is an annotation we know about, `None` otherwise. - - Raises: - PydanticCustomError: If `Predicate` fails. - """ - import annotated_types as at - - from . import _validators - - schema = schema.copy() - schema_update, other_metadata = collect_known_metadata([annotation]) - schema_type = schema['type'] - for constraint, value in schema_update.items(): - if constraint not in CONSTRAINTS_TO_ALLOWED_SCHEMAS: - raise ValueError(f'Unknown constraint {constraint}') - allowed_schemas = CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint] - - if schema_type in allowed_schemas: - if constraint == 'union_mode' and schema_type == 'union': - schema['mode'] = value # type: ignore # schema is UnionSchema - else: - schema[constraint] = value - continue - - if constraint == 'allow_inf_nan' and value is False: - return cs.no_info_after_validator_function( - _validators.forbid_inf_nan_check, - schema, - ) - elif constraint == 'pattern': - # insert a str schema to make sure the regex engine matches - return cs.chain_schema( - [ - schema, - cs.str_schema(pattern=value), - ] - ) - elif constraint == 'gt': - s = cs.no_info_after_validator_function( - partial(_validators.greater_than_validator, gt=value), - schema, - ) - add_js_update_schema(s, lambda: {'gt': as_jsonable_value(value)}) - return s - elif constraint == 'ge': - return cs.no_info_after_validator_function( - partial(_validators.greater_than_or_equal_validator, ge=value), - schema, - ) - elif constraint == 'lt': - return cs.no_info_after_validator_function( - partial(_validators.less_than_validator, lt=value), - schema, - ) - elif constraint == 'le': - return cs.no_info_after_validator_function( - partial(_validators.less_than_or_equal_validator, le=value), - schema, - ) - elif constraint == 'multiple_of': - return cs.no_info_after_validator_function( - partial(_validators.multiple_of_validator, multiple_of=value), - schema, - ) - elif constraint == 'min_length': - s = cs.no_info_after_validator_function( - partial(_validators.min_length_validator, min_length=value), - schema, - ) - add_js_update_schema(s, lambda: {'minLength': (as_jsonable_value(value))}) - return s - elif constraint == 'max_length': - s = cs.no_info_after_validator_function( - partial(_validators.max_length_validator, max_length=value), - schema, - ) - add_js_update_schema(s, lambda: {'maxLength': (as_jsonable_value(value))}) - return s - elif constraint == 'strip_whitespace': - return cs.chain_schema( - [ - schema, - cs.str_schema(strip_whitespace=True), - ] - ) - elif constraint == 'to_lower': - return cs.chain_schema( - [ - schema, - cs.str_schema(to_lower=True), - ] - ) - elif constraint == 'to_upper': - return cs.chain_schema( - [ - schema, - cs.str_schema(to_upper=True), - ] - ) - elif constraint == 'min_length': - return cs.no_info_after_validator_function( - partial(_validators.min_length_validator, min_length=annotation.min_length), - schema, - ) - elif constraint == 'max_length': - return cs.no_info_after_validator_function( - partial(_validators.max_length_validator, max_length=annotation.max_length), - schema, - ) - else: - raise RuntimeError(f'Unable to apply constraint {constraint} to schema {schema_type}') - - for annotation in other_metadata: - if isinstance(annotation, at.Gt): - return cs.no_info_after_validator_function( - partial(_validators.greater_than_validator, gt=annotation.gt), - schema, - ) - elif isinstance(annotation, at.Ge): - return cs.no_info_after_validator_function( - partial(_validators.greater_than_or_equal_validator, ge=annotation.ge), - schema, - ) - elif isinstance(annotation, at.Lt): - return cs.no_info_after_validator_function( - partial(_validators.less_than_validator, lt=annotation.lt), - schema, - ) - elif isinstance(annotation, at.Le): - return cs.no_info_after_validator_function( - partial(_validators.less_than_or_equal_validator, le=annotation.le), - schema, - ) - elif isinstance(annotation, at.MultipleOf): - return cs.no_info_after_validator_function( - partial(_validators.multiple_of_validator, multiple_of=annotation.multiple_of), - schema, - ) - elif isinstance(annotation, at.MinLen): - return cs.no_info_after_validator_function( - partial(_validators.min_length_validator, min_length=annotation.min_length), - schema, - ) - elif isinstance(annotation, at.MaxLen): - return cs.no_info_after_validator_function( - partial(_validators.max_length_validator, max_length=annotation.max_length), - schema, - ) - elif isinstance(annotation, at.Predicate): - predicate_name = f'{annotation.func.__qualname__} ' if hasattr(annotation.func, '__qualname__') else '' - - def val_func(v: Any) -> Any: - # annotation.func may also raise an exception, let it pass through - if not annotation.func(v): - raise PydanticCustomError( - 'predicate_failed', - f'Predicate {predicate_name}failed', # type: ignore - ) - return v - - return cs.no_info_after_validator_function(val_func, schema) - # ignore any other unknown metadata - return None - - return schema - - -def collect_known_metadata(annotations: Iterable[Any]) -> tuple[dict[str, Any], list[Any]]: - """Split `annotations` into known metadata and unknown annotations. - - Args: - annotations: An iterable of annotations. - - Returns: - A tuple contains a dict of known metadata and a list of unknown annotations. - - Example: - ```py - from annotated_types import Gt, Len - - from pydantic._internal._known_annotated_metadata import collect_known_metadata - - print(collect_known_metadata([Gt(1), Len(42), ...])) - #> ({'gt': 1, 'min_length': 42}, [Ellipsis]) - ``` - """ - import annotated_types as at - - annotations = expand_grouped_metadata(annotations) - - res: dict[str, Any] = {} - remaining: list[Any] = [] - for annotation in annotations: - # isinstance(annotation, PydanticMetadata) also covers ._fields:_PydanticGeneralMetadata - if isinstance(annotation, PydanticMetadata): - res.update(annotation.__dict__) - # we don't use dataclasses.asdict because that recursively calls asdict on the field values - elif isinstance(annotation, at.MinLen): - res.update({'min_length': annotation.min_length}) - elif isinstance(annotation, at.MaxLen): - res.update({'max_length': annotation.max_length}) - elif isinstance(annotation, at.Gt): - res.update({'gt': annotation.gt}) - elif isinstance(annotation, at.Ge): - res.update({'ge': annotation.ge}) - elif isinstance(annotation, at.Lt): - res.update({'lt': annotation.lt}) - elif isinstance(annotation, at.Le): - res.update({'le': annotation.le}) - elif isinstance(annotation, at.MultipleOf): - res.update({'multiple_of': annotation.multiple_of}) - elif isinstance(annotation, type) and issubclass(annotation, PydanticMetadata): - # also support PydanticMetadata classes being used without initialisation, - # e.g. `Annotated[int, Strict]` as well as `Annotated[int, Strict()]` - res.update({k: v for k, v in vars(annotation).items() if not k.startswith('_')}) - else: - remaining.append(annotation) - # Nones can sneak in but pydantic-core will reject them - # it'd be nice to clean things up so we don't put in None (we probably don't _need_ to, it was just easier) - # but this is simple enough to kick that can down the road - res = {k: v for k, v in res.items() if v is not None} - return res, remaining - - -def check_metadata(metadata: dict[str, Any], allowed: Iterable[str], source_type: Any) -> None: - """A small utility function to validate that the given metadata can be applied to the target. - More than saving lines of code, this gives us a consistent error message for all of our internal implementations. - - Args: - metadata: A dict of metadata. - allowed: An iterable of allowed metadata. - source_type: The source type. - - Raises: - TypeError: If there is metadatas that can't be applied on source type. - """ - unknown = metadata.keys() - set(allowed) - if unknown: - raise TypeError( - f'The following constraints cannot be applied to {source_type!r}: {", ".join([f"{k!r}" for k in unknown])}' - ) diff --git a/lib/pydantic/_internal/_mock_val_ser.py b/lib/pydantic/_internal/_mock_val_ser.py deleted file mode 100644 index b303fed2..00000000 --- a/lib/pydantic/_internal/_mock_val_ser.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Callable, Generic, TypeVar - -from pydantic_core import SchemaSerializer, SchemaValidator -from typing_extensions import Literal - -from ..errors import PydanticErrorCodes, PydanticUserError - -if TYPE_CHECKING: - from ..dataclasses import PydanticDataclass - from ..main import BaseModel - - -ValSer = TypeVar('ValSer', SchemaValidator, SchemaSerializer) - - -class MockValSer(Generic[ValSer]): - """Mocker for `pydantic_core.SchemaValidator` or `pydantic_core.SchemaSerializer` which optionally attempts to - rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails. - """ - - __slots__ = '_error_message', '_code', '_val_or_ser', '_attempt_rebuild' - - def __init__( - self, - error_message: str, - *, - code: PydanticErrorCodes, - val_or_ser: Literal['validator', 'serializer'], - attempt_rebuild: Callable[[], ValSer | None] | None = None, - ) -> None: - self._error_message = error_message - self._val_or_ser = SchemaValidator if val_or_ser == 'validator' else SchemaSerializer - self._code: PydanticErrorCodes = code - self._attempt_rebuild = attempt_rebuild - - def __getattr__(self, item: str) -> None: - __tracebackhide__ = True - if self._attempt_rebuild: - val_ser = self._attempt_rebuild() - if val_ser is not None: - return getattr(val_ser, item) - - # raise an AttributeError if `item` doesn't exist - getattr(self._val_or_ser, item) - raise PydanticUserError(self._error_message, code=self._code) - - def rebuild(self) -> ValSer | None: - if self._attempt_rebuild: - val_ser = self._attempt_rebuild() - if val_ser is not None: - return val_ser - else: - raise PydanticUserError(self._error_message, code=self._code) - return None - - -def set_model_mocks(cls: type[BaseModel], cls_name: str, undefined_name: str = 'all referenced types') -> None: - """Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a model. - - Args: - cls: The model class to set the mocks on - cls_name: Name of the model class, used in error messages - undefined_name: Name of the undefined thing, used in error messages - """ - undefined_type_error_message = ( - f'`{cls_name}` is not fully defined; you should define {undefined_name},' - f' then call `{cls_name}.model_rebuild()`.' - ) - - def attempt_rebuild_validator() -> SchemaValidator | None: - if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False: - return cls.__pydantic_validator__ - else: - return None - - cls.__pydantic_validator__ = MockValSer( # type: ignore[assignment] - undefined_type_error_message, - code='class-not-fully-defined', - val_or_ser='validator', - attempt_rebuild=attempt_rebuild_validator, - ) - - def attempt_rebuild_serializer() -> SchemaSerializer | None: - if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False: - return cls.__pydantic_serializer__ - else: - return None - - cls.__pydantic_serializer__ = MockValSer( # type: ignore[assignment] - undefined_type_error_message, - code='class-not-fully-defined', - val_or_ser='serializer', - attempt_rebuild=attempt_rebuild_serializer, - ) - - -def set_dataclass_mocks( - cls: type[PydanticDataclass], cls_name: str, undefined_name: str = 'all referenced types' -) -> None: - """Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a dataclass. - - Args: - cls: The model class to set the mocks on - cls_name: Name of the model class, used in error messages - undefined_name: Name of the undefined thing, used in error messages - """ - from ..dataclasses import rebuild_dataclass - - undefined_type_error_message = ( - f'`{cls_name}` is not fully defined; you should define {undefined_name},' - f' then call `pydantic.dataclasses.rebuild_dataclass({cls_name})`.' - ) - - def attempt_rebuild_validator() -> SchemaValidator | None: - if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False: - return cls.__pydantic_validator__ - else: - return None - - cls.__pydantic_validator__ = MockValSer( # type: ignore[assignment] - undefined_type_error_message, - code='class-not-fully-defined', - val_or_ser='validator', - attempt_rebuild=attempt_rebuild_validator, - ) - - def attempt_rebuild_serializer() -> SchemaSerializer | None: - if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False: - return cls.__pydantic_serializer__ - else: - return None - - cls.__pydantic_serializer__ = MockValSer( # type: ignore[assignment] - undefined_type_error_message, - code='class-not-fully-defined', - val_or_ser='validator', - attempt_rebuild=attempt_rebuild_serializer, - ) diff --git a/lib/pydantic/_internal/_model_construction.py b/lib/pydantic/_internal/_model_construction.py deleted file mode 100644 index 543f73e9..00000000 --- a/lib/pydantic/_internal/_model_construction.py +++ /dev/null @@ -1,637 +0,0 @@ -"""Private logic for creating models.""" -from __future__ import annotations as _annotations - -import operator -import typing -import warnings -import weakref -from abc import ABCMeta -from functools import partial -from types import FunctionType -from typing import Any, Callable, Generic - -import typing_extensions -from pydantic_core import PydanticUndefined, SchemaSerializer -from typing_extensions import dataclass_transform, deprecated - -from ..errors import PydanticUndefinedAnnotation, PydanticUserError -from ..plugin._schema_validator import create_schema_validator -from ..warnings import GenericBeforeBaseModelWarning, PydanticDeprecatedSince20 -from ._config import ConfigWrapper -from ._decorators import DecoratorInfos, PydanticDescriptorProxy, get_attribute_from_bases -from ._fields import collect_model_fields, is_valid_field_name, is_valid_privateattr_name -from ._generate_schema import GenerateSchema -from ._generics import PydanticGenericMetadata, get_model_typevars_map -from ._mock_val_ser import MockValSer, set_model_mocks -from ._schema_generation_shared import CallbackGetCoreSchemaHandler -from ._signature import generate_pydantic_signature -from ._typing_extra import get_cls_types_namespace, is_annotated, is_classvar, parent_frame_namespace -from ._utils import ClassAttribute, SafeGetItemProxy -from ._validate_call import ValidateCallWrapper - -if typing.TYPE_CHECKING: - from ..fields import Field as PydanticModelField - from ..fields import FieldInfo, ModelPrivateAttr - from ..main import BaseModel -else: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - PydanticModelField = object() - -object_setattr = object.__setattr__ - - -class _ModelNamespaceDict(dict): - """A dictionary subclass that intercepts attribute setting on model classes and - warns about overriding of decorators. - """ - - def __setitem__(self, k: str, v: object) -> None: - existing: Any = self.get(k, None) - if existing and v is not existing and isinstance(existing, PydanticDescriptorProxy): - warnings.warn(f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator') - - return super().__setitem__(k, v) - - -@dataclass_transform(kw_only_default=True, field_specifiers=(PydanticModelField,)) -class ModelMetaclass(ABCMeta): - def __new__( - mcs, - cls_name: str, - bases: tuple[type[Any], ...], - namespace: dict[str, Any], - __pydantic_generic_metadata__: PydanticGenericMetadata | None = None, - __pydantic_reset_parent_namespace__: bool = True, - _create_model_module: str | None = None, - **kwargs: Any, - ) -> type: - """Metaclass for creating Pydantic models. - - Args: - cls_name: The name of the class to be created. - bases: The base classes of the class to be created. - namespace: The attribute dictionary of the class to be created. - __pydantic_generic_metadata__: Metadata for generic models. - __pydantic_reset_parent_namespace__: Reset parent namespace. - _create_model_module: The module of the class to be created, if created by `create_model`. - **kwargs: Catch-all for any other keyword arguments. - - Returns: - The new class created by the metaclass. - """ - # Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we rely on the fact - # that `BaseModel` itself won't have any bases, but any subclass of it will, to determine whether the `__new__` - # call we're in the middle of is for the `BaseModel` class. - if bases: - base_field_names, class_vars, base_private_attributes = mcs._collect_bases_data(bases) - - config_wrapper = ConfigWrapper.for_model(bases, namespace, kwargs) - namespace['model_config'] = config_wrapper.config_dict - private_attributes = inspect_namespace( - namespace, config_wrapper.ignored_types, class_vars, base_field_names - ) - if private_attributes: - original_model_post_init = get_model_post_init(namespace, bases) - if original_model_post_init is not None: - # if there are private_attributes and a model_post_init function, we handle both - - def wrapped_model_post_init(self: BaseModel, __context: Any) -> None: - """We need to both initialize private attributes and call the user-defined model_post_init - method. - """ - init_private_attributes(self, __context) - original_model_post_init(self, __context) - - namespace['model_post_init'] = wrapped_model_post_init - else: - namespace['model_post_init'] = init_private_attributes - - namespace['__class_vars__'] = class_vars - namespace['__private_attributes__'] = {**base_private_attributes, **private_attributes} - - cls: type[BaseModel] = super().__new__(mcs, cls_name, bases, namespace, **kwargs) # type: ignore - - from ..main import BaseModel - - mro = cls.__mro__ - if Generic in mro and mro.index(Generic) < mro.index(BaseModel): - warnings.warn( - GenericBeforeBaseModelWarning( - 'Classes should inherit from `BaseModel` before generic classes (e.g. `typing.Generic[T]`) ' - 'for pydantic generics to work properly.' - ), - stacklevel=2, - ) - - cls.__pydantic_custom_init__ = not getattr(cls.__init__, '__pydantic_base_init__', False) - cls.__pydantic_post_init__ = None if cls.model_post_init is BaseModel.model_post_init else 'model_post_init' - - cls.__pydantic_decorators__ = DecoratorInfos.build(cls) - - # Use the getattr below to grab the __parameters__ from the `typing.Generic` parent class - if __pydantic_generic_metadata__: - cls.__pydantic_generic_metadata__ = __pydantic_generic_metadata__ - else: - parent_parameters = getattr(cls, '__pydantic_generic_metadata__', {}).get('parameters', ()) - parameters = getattr(cls, '__parameters__', None) or parent_parameters - if parameters and parent_parameters and not all(x in parameters for x in parent_parameters): - combined_parameters = parent_parameters + tuple(x for x in parameters if x not in parent_parameters) - parameters_str = ', '.join([str(x) for x in combined_parameters]) - generic_type_label = f'typing.Generic[{parameters_str}]' - error_message = ( - f'All parameters must be present on typing.Generic;' - f' you should inherit from {generic_type_label}.' - ) - if Generic not in bases: # pragma: no cover - # We raise an error here not because it is desirable, but because some cases are mishandled. - # It would be nice to remove this error and still have things behave as expected, it's just - # challenging because we are using a custom `__class_getitem__` to parametrize generic models, - # and not returning a typing._GenericAlias from it. - bases_str = ', '.join([x.__name__ for x in bases] + [generic_type_label]) - error_message += ( - f' Note: `typing.Generic` must go last: `class {cls.__name__}({bases_str}): ...`)' - ) - raise TypeError(error_message) - - cls.__pydantic_generic_metadata__ = { - 'origin': None, - 'args': (), - 'parameters': parameters, - } - - cls.__pydantic_complete__ = False # Ensure this specific class gets completed - - # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 - # for attributes not in `new_namespace` (e.g. private attributes) - for name, obj in private_attributes.items(): - obj.__set_name__(cls, name) - - if __pydantic_reset_parent_namespace__: - cls.__pydantic_parent_namespace__ = build_lenient_weakvaluedict(parent_frame_namespace()) - parent_namespace = getattr(cls, '__pydantic_parent_namespace__', None) - if isinstance(parent_namespace, dict): - parent_namespace = unpack_lenient_weakvaluedict(parent_namespace) - - types_namespace = get_cls_types_namespace(cls, parent_namespace) - set_model_fields(cls, bases, config_wrapper, types_namespace) - - if config_wrapper.frozen and '__hash__' not in namespace: - set_default_hash_func(cls, bases) - - complete_model_class( - cls, - cls_name, - config_wrapper, - raise_errors=False, - types_namespace=types_namespace, - create_model_module=_create_model_module, - ) - - # If this is placed before the complete_model_class call above, - # the generic computed fields return type is set to PydanticUndefined - cls.model_computed_fields = {k: v.info for k, v in cls.__pydantic_decorators__.computed_fields.items()} - - # using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__ - # I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is - # only hit for _proper_ subclasses of BaseModel - super(cls, cls).__pydantic_init_subclass__(**kwargs) # type: ignore[misc] - return cls - else: - # this is the BaseModel class itself being created, no logic required - return super().__new__(mcs, cls_name, bases, namespace, **kwargs) - - if not typing.TYPE_CHECKING: # pragma: no branch - # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access - - def __getattr__(self, item: str) -> Any: - """This is necessary to keep attribute access working for class attribute access.""" - private_attributes = self.__dict__.get('__private_attributes__') - if private_attributes and item in private_attributes: - return private_attributes[item] - if item == '__pydantic_core_schema__': - # This means the class didn't get a schema generated for it, likely because there was an undefined reference - maybe_mock_validator = getattr(self, '__pydantic_validator__', None) - if isinstance(maybe_mock_validator, MockValSer): - rebuilt_validator = maybe_mock_validator.rebuild() - if rebuilt_validator is not None: - # In this case, a validator was built, and so `__pydantic_core_schema__` should now be set - return getattr(self, '__pydantic_core_schema__') - raise AttributeError(item) - - @classmethod - def __prepare__(cls, *args: Any, **kwargs: Any) -> dict[str, object]: - return _ModelNamespaceDict() - - def __instancecheck__(self, instance: Any) -> bool: - """Avoid calling ABC _abc_subclasscheck unless we're pretty sure. - - See #3829 and python/cpython#92810 - """ - return hasattr(instance, '__pydantic_validator__') and super().__instancecheck__(instance) - - @staticmethod - def _collect_bases_data(bases: tuple[type[Any], ...]) -> tuple[set[str], set[str], dict[str, ModelPrivateAttr]]: - from ..main import BaseModel - - field_names: set[str] = set() - class_vars: set[str] = set() - private_attributes: dict[str, ModelPrivateAttr] = {} - for base in bases: - if issubclass(base, BaseModel) and base is not BaseModel: - # model_fields might not be defined yet in the case of generics, so we use getattr here: - field_names.update(getattr(base, 'model_fields', {}).keys()) - class_vars.update(base.__class_vars__) - private_attributes.update(base.__private_attributes__) - return field_names, class_vars, private_attributes - - @property - @deprecated('The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None) - def __fields__(self) -> dict[str, FieldInfo]: - warnings.warn( - 'The `__fields__` attribute is deprecated, use `model_fields` instead.', PydanticDeprecatedSince20 - ) - return self.model_fields # type: ignore - - def __dir__(self) -> list[str]: - attributes = list(super().__dir__()) - if '__fields__' in attributes: - attributes.remove('__fields__') - return attributes - - -def init_private_attributes(self: BaseModel, __context: Any) -> None: - """This function is meant to behave like a BaseModel method to initialise private attributes. - - It takes context as an argument since that's what pydantic-core passes when calling it. - - Args: - self: The BaseModel instance. - __context: The context. - """ - if getattr(self, '__pydantic_private__', None) is None: - pydantic_private = {} - for name, private_attr in self.__private_attributes__.items(): - default = private_attr.get_default() - if default is not PydanticUndefined: - pydantic_private[name] = default - object_setattr(self, '__pydantic_private__', pydantic_private) - - -def get_model_post_init(namespace: dict[str, Any], bases: tuple[type[Any], ...]) -> Callable[..., Any] | None: - """Get the `model_post_init` method from the namespace or the class bases, or `None` if not defined.""" - if 'model_post_init' in namespace: - return namespace['model_post_init'] - - from ..main import BaseModel - - model_post_init = get_attribute_from_bases(bases, 'model_post_init') - if model_post_init is not BaseModel.model_post_init: - return model_post_init - - -def inspect_namespace( # noqa C901 - namespace: dict[str, Any], - ignored_types: tuple[type[Any], ...], - base_class_vars: set[str], - base_class_fields: set[str], -) -> dict[str, ModelPrivateAttr]: - """Iterate over the namespace and: - * gather private attributes - * check for items which look like fields but are not (e.g. have no annotation) and warn. - - Args: - namespace: The attribute dictionary of the class to be created. - ignored_types: A tuple of ignore types. - base_class_vars: A set of base class class variables. - base_class_fields: A set of base class fields. - - Returns: - A dict contains private attributes info. - - Raises: - TypeError: If there is a `__root__` field in model. - NameError: If private attribute name is invalid. - PydanticUserError: - - If a field does not have a type annotation. - - If a field on base class was overridden by a non-annotated attribute. - """ - from ..fields import FieldInfo, ModelPrivateAttr, PrivateAttr - - all_ignored_types = ignored_types + default_ignored_types() - - private_attributes: dict[str, ModelPrivateAttr] = {} - raw_annotations = namespace.get('__annotations__', {}) - - if '__root__' in raw_annotations or '__root__' in namespace: - raise TypeError("To define root models, use `pydantic.RootModel` rather than a field called '__root__'") - - ignored_names: set[str] = set() - for var_name, value in list(namespace.items()): - if var_name == 'model_config': - continue - elif ( - isinstance(value, type) - and value.__module__ == namespace['__module__'] - and value.__qualname__.startswith(namespace['__qualname__']) - ): - # `value` is a nested type defined in this namespace; don't error - continue - elif isinstance(value, all_ignored_types) or value.__class__.__module__ == 'functools': - ignored_names.add(var_name) - continue - elif isinstance(value, ModelPrivateAttr): - if var_name.startswith('__'): - raise NameError( - 'Private attributes must not use dunder names;' - f' use a single underscore prefix instead of {var_name!r}.' - ) - elif is_valid_field_name(var_name): - raise NameError( - 'Private attributes must not use valid field names;' - f' use sunder names, e.g. {"_" + var_name!r} instead of {var_name!r}.' - ) - private_attributes[var_name] = value - del namespace[var_name] - elif isinstance(value, FieldInfo) and not is_valid_field_name(var_name): - suggested_name = var_name.lstrip('_') or 'my_field' # don't suggest '' for all-underscore name - raise NameError( - f'Fields must not use names with leading underscores;' - f' e.g., use {suggested_name!r} instead of {var_name!r}.' - ) - - elif var_name.startswith('__'): - continue - elif is_valid_privateattr_name(var_name): - if var_name not in raw_annotations or not is_classvar(raw_annotations[var_name]): - private_attributes[var_name] = PrivateAttr(default=value) - del namespace[var_name] - elif var_name in base_class_vars: - continue - elif var_name not in raw_annotations: - if var_name in base_class_fields: - raise PydanticUserError( - f'Field {var_name!r} defined on a base class was overridden by a non-annotated attribute. ' - f'All field definitions, including overrides, require a type annotation.', - code='model-field-overridden', - ) - elif isinstance(value, FieldInfo): - raise PydanticUserError( - f'Field {var_name!r} requires a type annotation', code='model-field-missing-annotation' - ) - else: - raise PydanticUserError( - f'A non-annotated attribute was detected: `{var_name} = {value!r}`. All model fields require a ' - f'type annotation; if `{var_name}` is not meant to be a field, you may be able to resolve this ' - f"error by annotating it as a `ClassVar` or updating `model_config['ignored_types']`.", - code='model-field-missing-annotation', - ) - - for ann_name, ann_type in raw_annotations.items(): - if ( - is_valid_privateattr_name(ann_name) - and ann_name not in private_attributes - and ann_name not in ignored_names - and not is_classvar(ann_type) - and ann_type not in all_ignored_types - and getattr(ann_type, '__module__', None) != 'functools' - ): - if is_annotated(ann_type): - _, *metadata = typing_extensions.get_args(ann_type) - private_attr = next((v for v in metadata if isinstance(v, ModelPrivateAttr)), None) - if private_attr is not None: - private_attributes[ann_name] = private_attr - continue - private_attributes[ann_name] = PrivateAttr() - - return private_attributes - - -def set_default_hash_func(cls: type[BaseModel], bases: tuple[type[Any], ...]) -> None: - base_hash_func = get_attribute_from_bases(bases, '__hash__') - new_hash_func = make_hash_func(cls) - if base_hash_func in {None, object.__hash__} or getattr(base_hash_func, '__code__', None) == new_hash_func.__code__: - # If `__hash__` is some default, we generate a hash function. - # It will be `None` if not overridden from BaseModel. - # It may be `object.__hash__` if there is another - # parent class earlier in the bases which doesn't override `__hash__` (e.g. `typing.Generic`). - # It may be a value set by `set_default_hash_func` if `cls` is a subclass of another frozen model. - # In the last case we still need a new hash function to account for new `model_fields`. - cls.__hash__ = new_hash_func - - -def make_hash_func(cls: type[BaseModel]) -> Any: - getter = operator.itemgetter(*cls.model_fields.keys()) if cls.model_fields else lambda _: 0 - - def hash_func(self: Any) -> int: - try: - return hash(getter(self.__dict__)) - except KeyError: - # In rare cases (such as when using the deprecated copy method), the __dict__ may not contain - # all model fields, which is how we can get here. - # getter(self.__dict__) is much faster than any 'safe' method that accounts for missing keys, - # and wrapping it in a `try` doesn't slow things down much in the common case. - return hash(getter(SafeGetItemProxy(self.__dict__))) - - return hash_func - - -def set_model_fields( - cls: type[BaseModel], bases: tuple[type[Any], ...], config_wrapper: ConfigWrapper, types_namespace: dict[str, Any] -) -> None: - """Collect and set `cls.model_fields` and `cls.__class_vars__`. - - Args: - cls: BaseModel or dataclass. - bases: Parents of the class, generally `cls.__bases__`. - config_wrapper: The config wrapper instance. - types_namespace: Optional extra namespace to look for types in. - """ - typevars_map = get_model_typevars_map(cls) - fields, class_vars = collect_model_fields(cls, bases, config_wrapper, types_namespace, typevars_map=typevars_map) - - cls.model_fields = fields - cls.__class_vars__.update(class_vars) - - for k in class_vars: - # Class vars should not be private attributes - # We remove them _here_ and not earlier because we rely on inspecting the class to determine its classvars, - # but private attributes are determined by inspecting the namespace _prior_ to class creation. - # In the case that a classvar with a leading-'_' is defined via a ForwardRef (e.g., when using - # `__future__.annotations`), we want to remove the private attribute which was detected _before_ we knew it - # evaluated to a classvar - - value = cls.__private_attributes__.pop(k, None) - if value is not None and value.default is not PydanticUndefined: - setattr(cls, k, value.default) - - -def complete_model_class( - cls: type[BaseModel], - cls_name: str, - config_wrapper: ConfigWrapper, - *, - raise_errors: bool = True, - types_namespace: dict[str, Any] | None, - create_model_module: str | None = None, -) -> bool: - """Finish building a model class. - - This logic must be called after class has been created since validation functions must be bound - and `get_type_hints` requires a class object. - - Args: - cls: BaseModel or dataclass. - cls_name: The model or dataclass name. - config_wrapper: The config wrapper instance. - raise_errors: Whether to raise errors. - types_namespace: Optional extra namespace to look for types in. - create_model_module: The module of the class to be created, if created by `create_model`. - - Returns: - `True` if the model is successfully completed, else `False`. - - Raises: - PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation` occurs in`__get_pydantic_core_schema__` - and `raise_errors=True`. - """ - typevars_map = get_model_typevars_map(cls) - gen_schema = GenerateSchema( - config_wrapper, - types_namespace, - typevars_map, - ) - - handler = CallbackGetCoreSchemaHandler( - partial(gen_schema.generate_schema, from_dunder_get_core_schema=False), - gen_schema, - ref_mode='unpack', - ) - - if config_wrapper.defer_build: - set_model_mocks(cls, cls_name) - return False - - try: - schema = cls.__get_pydantic_core_schema__(cls, handler) - except PydanticUndefinedAnnotation as e: - if raise_errors: - raise - set_model_mocks(cls, cls_name, f'`{e.name}`') - return False - - core_config = config_wrapper.core_config(cls) - - try: - schema = gen_schema.clean_schema(schema) - except gen_schema.CollectedInvalid: - set_model_mocks(cls, cls_name) - return False - - # debug(schema) - cls.__pydantic_core_schema__ = schema - - cls.__pydantic_validator__ = create_schema_validator( - schema, - cls, - create_model_module or cls.__module__, - cls.__qualname__, - 'create_model' if create_model_module else 'BaseModel', - core_config, - config_wrapper.plugin_settings, - ) - cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config) - cls.__pydantic_complete__ = True - - # set __signature__ attr only for model class, but not for its instances - cls.__signature__ = ClassAttribute( - '__signature__', - generate_pydantic_signature(init=cls.__init__, fields=cls.model_fields, config_wrapper=config_wrapper), - ) - return True - - -class _PydanticWeakRef: - """Wrapper for `weakref.ref` that enables `pickle` serialization. - - Cloudpickle fails to serialize `weakref.ref` objects due to an arcane error related - to abstract base classes (`abc.ABC`). This class works around the issue by wrapping - `weakref.ref` instead of subclassing it. - - See https://github.com/pydantic/pydantic/issues/6763 for context. - - Semantics: - - If not pickled, behaves the same as a `weakref.ref`. - - If pickled along with the referenced object, the same `weakref.ref` behavior - will be maintained between them after unpickling. - - If pickled without the referenced object, after unpickling the underlying - reference will be cleared (`__call__` will always return `None`). - """ - - def __init__(self, obj: Any): - if obj is None: - # The object will be `None` upon deserialization if the serialized weakref - # had lost its underlying object. - self._wr = None - else: - self._wr = weakref.ref(obj) - - def __call__(self) -> Any: - if self._wr is None: - return None - else: - return self._wr() - - def __reduce__(self) -> tuple[Callable, tuple[weakref.ReferenceType | None]]: - return _PydanticWeakRef, (self(),) - - -def build_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None: - """Takes an input dictionary, and produces a new value that (invertibly) replaces the values with weakrefs. - - We can't just use a WeakValueDictionary because many types (including int, str, etc.) can't be stored as values - in a WeakValueDictionary. - - The `unpack_lenient_weakvaluedict` function can be used to reverse this operation. - """ - if d is None: - return None - result = {} - for k, v in d.items(): - try: - proxy = _PydanticWeakRef(v) - except TypeError: - proxy = v - result[k] = proxy - return result - - -def unpack_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None: - """Inverts the transform performed by `build_lenient_weakvaluedict`.""" - if d is None: - return None - - result = {} - for k, v in d.items(): - if isinstance(v, _PydanticWeakRef): - v = v() - if v is not None: - result[k] = v - else: - result[k] = v - return result - - -def default_ignored_types() -> tuple[type[Any], ...]: - from ..fields import ComputedFieldInfo - - return ( - FunctionType, - property, - classmethod, - staticmethod, - PydanticDescriptorProxy, - ComputedFieldInfo, - ValidateCallWrapper, - ) diff --git a/lib/pydantic/_internal/_repr.py b/lib/pydantic/_internal/_repr.py deleted file mode 100644 index 479b4479..00000000 --- a/lib/pydantic/_internal/_repr.py +++ /dev/null @@ -1,117 +0,0 @@ -"""Tools to provide pretty/human-readable display of objects.""" -from __future__ import annotations as _annotations - -import types -import typing -from typing import Any - -import typing_extensions - -from . import _typing_extra - -if typing.TYPE_CHECKING: - ReprArgs: typing_extensions.TypeAlias = 'typing.Iterable[tuple[str | None, Any]]' - RichReprResult: typing_extensions.TypeAlias = ( - 'typing.Iterable[Any | tuple[Any] | tuple[str, Any] | tuple[str, Any, Any]]' - ) - - -class PlainRepr(str): - """String class where repr doesn't include quotes. Useful with Representation when you want to return a string - representation of something that is valid (or pseudo-valid) python. - """ - - def __repr__(self) -> str: - return str(self) - - -class Representation: - # Mixin to provide `__str__`, `__repr__`, and `__pretty__` and `__rich_repr__` methods. - # `__pretty__` is used by [devtools](https://python-devtools.helpmanual.io/). - # `__rich_repr__` is used by [rich](https://rich.readthedocs.io/en/stable/pretty.html). - # (this is not a docstring to avoid adding a docstring to classes which inherit from Representation) - - # we don't want to use a type annotation here as it can break get_type_hints - __slots__ = tuple() # type: typing.Collection[str] - - def __repr_args__(self) -> ReprArgs: - """Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. - - Can either return: - * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` - * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` - """ - attrs_names = self.__slots__ - if not attrs_names and hasattr(self, '__dict__'): - attrs_names = self.__dict__.keys() - attrs = ((s, getattr(self, s)) for s in attrs_names) - return [(a, v) for a, v in attrs if v is not None] - - def __repr_name__(self) -> str: - """Name of the instance's class, used in __repr__.""" - return self.__class__.__name__ - - def __repr_str__(self, join_str: str) -> str: - return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) - - def __pretty__(self, fmt: typing.Callable[[Any], Any], **kwargs: Any) -> typing.Generator[Any, None, None]: - """Used by devtools (https://python-devtools.helpmanual.io/) to pretty print objects.""" - yield self.__repr_name__() + '(' - yield 1 - for name, value in self.__repr_args__(): - if name is not None: - yield name + '=' - yield fmt(value) - yield ',' - yield 0 - yield -1 - yield ')' - - def __rich_repr__(self) -> RichReprResult: - """Used by Rich (https://rich.readthedocs.io/en/stable/pretty.html) to pretty print objects.""" - for name, field_repr in self.__repr_args__(): - if name is None: - yield field_repr - else: - yield name, field_repr - - def __str__(self) -> str: - return self.__repr_str__(' ') - - def __repr__(self) -> str: - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' - - -def display_as_type(obj: Any) -> str: - """Pretty representation of a type, should be as close as possible to the original type definition string. - - Takes some logic from `typing._type_repr`. - """ - if isinstance(obj, types.FunctionType): - return obj.__name__ - elif obj is ...: - return '...' - elif isinstance(obj, Representation): - return repr(obj) - elif isinstance(obj, typing_extensions.TypeAliasType): - return str(obj) - - if not isinstance(obj, (_typing_extra.typing_base, _typing_extra.WithArgsTypes, type)): - obj = obj.__class__ - - if _typing_extra.origin_is_union(typing_extensions.get_origin(obj)): - args = ', '.join(map(display_as_type, typing_extensions.get_args(obj))) - return f'Union[{args}]' - elif isinstance(obj, _typing_extra.WithArgsTypes): - if typing_extensions.get_origin(obj) == typing_extensions.Literal: - args = ', '.join(map(repr, typing_extensions.get_args(obj))) - else: - args = ', '.join(map(display_as_type, typing_extensions.get_args(obj))) - try: - return f'{obj.__qualname__}[{args}]' - except AttributeError: - return str(obj) # handles TypeAliasType in 3.12 - elif isinstance(obj, type): - return obj.__qualname__ - else: - return repr(obj).replace('typing.', '').replace('typing_extensions.', '') diff --git a/lib/pydantic/_internal/_schema_generation_shared.py b/lib/pydantic/_internal/_schema_generation_shared.py deleted file mode 100644 index 1a9aa852..00000000 --- a/lib/pydantic/_internal/_schema_generation_shared.py +++ /dev/null @@ -1,124 +0,0 @@ -"""Types and utility functions used by various other internal tools.""" -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Callable - -from pydantic_core import core_schema -from typing_extensions import Literal - -from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler - -if TYPE_CHECKING: - from ..json_schema import GenerateJsonSchema, JsonSchemaValue - from ._core_utils import CoreSchemaOrField - from ._generate_schema import GenerateSchema - - GetJsonSchemaFunction = Callable[[CoreSchemaOrField, GetJsonSchemaHandler], JsonSchemaValue] - HandlerOverride = Callable[[CoreSchemaOrField], JsonSchemaValue] - - -class GenerateJsonSchemaHandler(GetJsonSchemaHandler): - """JsonSchemaHandler implementation that doesn't do ref unwrapping by default. - - This is used for any Annotated metadata so that we don't end up with conflicting - modifications to the definition schema. - - Used internally by Pydantic, please do not rely on this implementation. - See `GetJsonSchemaHandler` for the handler API. - """ - - def __init__(self, generate_json_schema: GenerateJsonSchema, handler_override: HandlerOverride | None) -> None: - self.generate_json_schema = generate_json_schema - self.handler = handler_override or generate_json_schema.generate_inner - self.mode = generate_json_schema.mode - - def __call__(self, __core_schema: CoreSchemaOrField) -> JsonSchemaValue: - return self.handler(__core_schema) - - def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue: - """Resolves `$ref` in the json schema. - - This returns the input json schema if there is no `$ref` in json schema. - - Args: - maybe_ref_json_schema: The input json schema that may contains `$ref`. - - Returns: - Resolved json schema. - - Raises: - LookupError: If it can't find the definition for `$ref`. - """ - if '$ref' not in maybe_ref_json_schema: - return maybe_ref_json_schema - ref = maybe_ref_json_schema['$ref'] - json_schema = self.generate_json_schema.get_schema_from_definitions(ref) - if json_schema is None: - raise LookupError( - f'Could not find a ref for {ref}.' - ' Maybe you tried to call resolve_ref_schema from within a recursive model?' - ) - return json_schema - - -class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler): - """Wrapper to use an arbitrary function as a `GetCoreSchemaHandler`. - - Used internally by Pydantic, please do not rely on this implementation. - See `GetCoreSchemaHandler` for the handler API. - """ - - def __init__( - self, - handler: Callable[[Any], core_schema.CoreSchema], - generate_schema: GenerateSchema, - ref_mode: Literal['to-def', 'unpack'] = 'to-def', - ) -> None: - self._handler = handler - self._generate_schema = generate_schema - self._ref_mode = ref_mode - - def __call__(self, __source_type: Any) -> core_schema.CoreSchema: - schema = self._handler(__source_type) - ref = schema.get('ref') - if self._ref_mode == 'to-def': - if ref is not None: - self._generate_schema.defs.definitions[ref] = schema - return core_schema.definition_reference_schema(ref) - return schema - else: # ref_mode = 'unpack - return self.resolve_ref_schema(schema) - - def _get_types_namespace(self) -> dict[str, Any] | None: - return self._generate_schema._types_namespace - - def generate_schema(self, __source_type: Any) -> core_schema.CoreSchema: - return self._generate_schema.generate_schema(__source_type) - - @property - def field_name(self) -> str | None: - return self._generate_schema.field_name_stack.get() - - def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema: - """Resolves reference in the core schema. - - Args: - maybe_ref_schema: The input core schema that may contains reference. - - Returns: - Resolved core schema. - - Raises: - LookupError: If it can't find the definition for reference. - """ - if maybe_ref_schema['type'] == 'definition-ref': - ref = maybe_ref_schema['schema_ref'] - if ref not in self._generate_schema.defs.definitions: - raise LookupError( - f'Could not find a ref for {ref}.' - ' Maybe you tried to call resolve_ref_schema from within a recursive model?' - ) - return self._generate_schema.defs.definitions[ref] - elif maybe_ref_schema['type'] == 'definitions': - return self.resolve_ref_schema(maybe_ref_schema['schema']) - return maybe_ref_schema diff --git a/lib/pydantic/_internal/_signature.py b/lib/pydantic/_internal/_signature.py deleted file mode 100644 index 816a1651..00000000 --- a/lib/pydantic/_internal/_signature.py +++ /dev/null @@ -1,164 +0,0 @@ -from __future__ import annotations - -import dataclasses -from inspect import Parameter, Signature, signature -from typing import TYPE_CHECKING, Any, Callable - -from pydantic_core import PydanticUndefined - -from ._config import ConfigWrapper -from ._utils import is_valid_identifier - -if TYPE_CHECKING: - from ..fields import FieldInfo - - -def _field_name_for_signature(field_name: str, field_info: FieldInfo) -> str: - """Extract the correct name to use for the field when generating a signature. - - Assuming the field has a valid alias, this will return the alias. Otherwise, it will return the field name. - First priority is given to the validation_alias, then the alias, then the field name. - - Args: - field_name: The name of the field - field_info: The corresponding FieldInfo object. - - Returns: - The correct name to use when generating a signature. - """ - - def _alias_if_valid(x: Any) -> str | None: - """Return the alias if it is a valid alias and identifier, else None.""" - return x if isinstance(x, str) and is_valid_identifier(x) else None - - return _alias_if_valid(field_info.alias) or _alias_if_valid(field_info.validation_alias) or field_name - - -def _process_param_defaults(param: Parameter) -> Parameter: - """Modify the signature for a parameter in a dataclass where the default value is a FieldInfo instance. - - Args: - param (Parameter): The parameter - - Returns: - Parameter: The custom processed parameter - """ - from ..fields import FieldInfo - - param_default = param.default - if isinstance(param_default, FieldInfo): - annotation = param.annotation - # Replace the annotation if appropriate - # inspect does "clever" things to show annotations as strings because we have - # `from __future__ import annotations` in main, we don't want that - if annotation == 'Any': - annotation = Any - - # Replace the field default - default = param_default.default - if default is PydanticUndefined: - if param_default.default_factory is PydanticUndefined: - default = Signature.empty - else: - # this is used by dataclasses to indicate a factory exists: - default = dataclasses._HAS_DEFAULT_FACTORY # type: ignore - return param.replace( - annotation=annotation, name=_field_name_for_signature(param.name, param_default), default=default - ) - return param - - -def _generate_signature_parameters( # noqa: C901 (ignore complexity, could use a refactor) - init: Callable[..., None], - fields: dict[str, FieldInfo], - config_wrapper: ConfigWrapper, -) -> dict[str, Parameter]: - """Generate a mapping of parameter names to Parameter objects for a pydantic BaseModel or dataclass.""" - from itertools import islice - - present_params = signature(init).parameters.values() - merged_params: dict[str, Parameter] = {} - var_kw = None - use_var_kw = False - - for param in islice(present_params, 1, None): # skip self arg - # inspect does "clever" things to show annotations as strings because we have - # `from __future__ import annotations` in main, we don't want that - if fields.get(param.name): - # exclude params with init=False - if getattr(fields[param.name], 'init', True) is False: - continue - param = param.replace(name=_field_name_for_signature(param.name, fields[param.name])) - if param.annotation == 'Any': - param = param.replace(annotation=Any) - if param.kind is param.VAR_KEYWORD: - var_kw = param - continue - merged_params[param.name] = param - - if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through - allow_names = config_wrapper.populate_by_name - for field_name, field in fields.items(): - # when alias is a str it should be used for signature generation - param_name = _field_name_for_signature(field_name, field) - - if field_name in merged_params or param_name in merged_params: - continue - - if not is_valid_identifier(param_name): - if allow_names: - param_name = field_name - else: - use_var_kw = True - continue - - kwargs = {} if field.is_required() else {'default': field.get_default(call_default_factory=False)} - merged_params[param_name] = Parameter( - param_name, Parameter.KEYWORD_ONLY, annotation=field.rebuild_annotation(), **kwargs - ) - - if config_wrapper.extra == 'allow': - use_var_kw = True - - if var_kw and use_var_kw: - # Make sure the parameter for extra kwargs - # does not have the same name as a field - default_model_signature = [ - ('self', Parameter.POSITIONAL_ONLY), - ('data', Parameter.VAR_KEYWORD), - ] - if [(p.name, p.kind) for p in present_params] == default_model_signature: - # if this is the standard model signature, use extra_data as the extra args name - var_kw_name = 'extra_data' - else: - # else start from var_kw - var_kw_name = var_kw.name - - # generate a name that's definitely unique - while var_kw_name in fields: - var_kw_name += '_' - merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) - - return merged_params - - -def generate_pydantic_signature( - init: Callable[..., None], fields: dict[str, FieldInfo], config_wrapper: ConfigWrapper, is_dataclass: bool = False -) -> Signature: - """Generate signature for a pydantic BaseModel or dataclass. - - Args: - init: The class init. - fields: The model fields. - config_wrapper: The config wrapper instance. - is_dataclass: Whether the model is a dataclass. - - Returns: - The dataclass/BaseModel subclass signature. - """ - merged_params = _generate_signature_parameters(init, fields, config_wrapper) - - if is_dataclass: - merged_params = {k: _process_param_defaults(v) for k, v in merged_params.items()} - - return Signature(parameters=list(merged_params.values()), return_annotation=None) diff --git a/lib/pydantic/_internal/_std_types_schema.py b/lib/pydantic/_internal/_std_types_schema.py deleted file mode 100644 index c8523bf4..00000000 --- a/lib/pydantic/_internal/_std_types_schema.py +++ /dev/null @@ -1,714 +0,0 @@ -"""Logic for generating pydantic-core schemas for standard library types. - -Import of this module is deferred since it contains imports of many standard library modules. -""" -from __future__ import annotations as _annotations - -import collections -import collections.abc -import dataclasses -import decimal -import inspect -import os -import typing -from enum import Enum -from functools import partial -from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network -from typing import Any, Callable, Iterable, TypeVar - -import typing_extensions -from pydantic_core import ( - CoreSchema, - MultiHostUrl, - PydanticCustomError, - PydanticOmit, - Url, - core_schema, -) -from typing_extensions import get_args, get_origin - -from pydantic.errors import PydanticSchemaGenerationError -from pydantic.fields import FieldInfo -from pydantic.types import Strict - -from ..config import ConfigDict -from ..json_schema import JsonSchemaValue, update_json_schema -from . import _known_annotated_metadata, _typing_extra, _validators -from ._core_utils import get_type_ref -from ._internal_dataclass import slots_true -from ._schema_generation_shared import GetCoreSchemaHandler, GetJsonSchemaHandler - -if typing.TYPE_CHECKING: - from ._generate_schema import GenerateSchema - - StdSchemaFunction = Callable[[GenerateSchema, type[Any]], core_schema.CoreSchema] - - -@dataclasses.dataclass(**slots_true) -class SchemaTransformer: - get_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema] - get_json_schema: Callable[[CoreSchema, GetJsonSchemaHandler], JsonSchemaValue] - - def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: - return self.get_core_schema(source_type, handler) - - def __get_pydantic_json_schema__(self, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: - return self.get_json_schema(schema, handler) - - -def get_enum_core_schema(enum_type: type[Enum], config: ConfigDict) -> CoreSchema: - cases: list[Any] = list(enum_type.__members__.values()) - - enum_ref = get_type_ref(enum_type) - description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__) - if description == 'An enumeration.': # This is the default value provided by enum.EnumMeta.__new__; don't use it - description = None - updates = {'title': enum_type.__name__, 'description': description} - updates = {k: v for k, v in updates.items() if v is not None} - - def get_json_schema(_, handler: GetJsonSchemaHandler) -> JsonSchemaValue: - json_schema = handler(core_schema.literal_schema([x.value for x in cases], ref=enum_ref)) - original_schema = handler.resolve_ref_schema(json_schema) - update_json_schema(original_schema, updates) - return json_schema - - if not cases: - # Use an isinstance check for enums with no cases. - # The most important use case for this is creating TypeVar bounds for generics that should - # be restricted to enums. This is more consistent than it might seem at first, since you can only - # subclass enum.Enum (or subclasses of enum.Enum) if all parent classes have no cases. - # We use the get_json_schema function when an Enum subclass has been declared with no cases - # so that we can still generate a valid json schema. - return core_schema.is_instance_schema(enum_type, metadata={'pydantic_js_functions': [get_json_schema]}) - - use_enum_values = config.get('use_enum_values', False) - - if len(cases) == 1: - expected = repr(cases[0].value) - else: - expected = ', '.join([repr(case.value) for case in cases[:-1]]) + f' or {cases[-1].value!r}' - - def to_enum(__input_value: Any) -> Enum: - try: - enum_field = enum_type(__input_value) - if use_enum_values: - return enum_field.value - return enum_field - except ValueError: - # The type: ignore on the next line is to ignore the requirement of LiteralString - raise PydanticCustomError('enum', f'Input should be {expected}', {'expected': expected}) # type: ignore - - strict_python_schema = core_schema.is_instance_schema(enum_type) - if use_enum_values: - strict_python_schema = core_schema.chain_schema( - [strict_python_schema, core_schema.no_info_plain_validator_function(lambda x: x.value)] - ) - - to_enum_validator = core_schema.no_info_plain_validator_function(to_enum) - if issubclass(enum_type, int): - # this handles `IntEnum`, and also `Foobar(int, Enum)` - updates['type'] = 'integer' - lax = core_schema.chain_schema([core_schema.int_schema(), to_enum_validator]) - # Disallow float from JSON due to strict mode - strict = core_schema.json_or_python_schema( - json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.int_schema()), - python_schema=strict_python_schema, - ) - elif issubclass(enum_type, str): - # this handles `StrEnum` (3.11 only), and also `Foobar(str, Enum)` - updates['type'] = 'string' - lax = core_schema.chain_schema([core_schema.str_schema(), to_enum_validator]) - strict = core_schema.json_or_python_schema( - json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.str_schema()), - python_schema=strict_python_schema, - ) - elif issubclass(enum_type, float): - updates['type'] = 'numeric' - lax = core_schema.chain_schema([core_schema.float_schema(), to_enum_validator]) - strict = core_schema.json_or_python_schema( - json_schema=core_schema.no_info_after_validator_function(to_enum, core_schema.float_schema()), - python_schema=strict_python_schema, - ) - else: - lax = to_enum_validator - strict = core_schema.json_or_python_schema(json_schema=to_enum_validator, python_schema=strict_python_schema) - return core_schema.lax_or_strict_schema( - lax_schema=lax, strict_schema=strict, ref=enum_ref, metadata={'pydantic_js_functions': [get_json_schema]} - ) - - -@dataclasses.dataclass(**slots_true) -class InnerSchemaValidator: - """Use a fixed CoreSchema, avoiding interference from outward annotations.""" - - core_schema: CoreSchema - js_schema: JsonSchemaValue | None = None - js_core_schema: CoreSchema | None = None - js_schema_update: JsonSchemaValue | None = None - - def __get_pydantic_json_schema__(self, _schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: - if self.js_schema is not None: - return self.js_schema - js_schema = handler(self.js_core_schema or self.core_schema) - if self.js_schema_update is not None: - js_schema.update(self.js_schema_update) - return js_schema - - def __get_pydantic_core_schema__(self, _source_type: Any, _handler: GetCoreSchemaHandler) -> CoreSchema: - return self.core_schema - - -def decimal_prepare_pydantic_annotations( - source: Any, annotations: Iterable[Any], config: ConfigDict -) -> tuple[Any, list[Any]] | None: - if source is not decimal.Decimal: - return None - - metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) - - config_allow_inf_nan = config.get('allow_inf_nan') - if config_allow_inf_nan is not None: - metadata.setdefault('allow_inf_nan', config_allow_inf_nan) - - _known_annotated_metadata.check_metadata( - metadata, {*_known_annotated_metadata.FLOAT_CONSTRAINTS, 'max_digits', 'decimal_places'}, decimal.Decimal - ) - return source, [InnerSchemaValidator(core_schema.decimal_schema(**metadata)), *remaining_annotations] - - -def datetime_prepare_pydantic_annotations( - source_type: Any, annotations: Iterable[Any], _config: ConfigDict -) -> tuple[Any, list[Any]] | None: - import datetime - - metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) - if source_type is datetime.date: - sv = InnerSchemaValidator(core_schema.date_schema(**metadata)) - elif source_type is datetime.datetime: - sv = InnerSchemaValidator(core_schema.datetime_schema(**metadata)) - elif source_type is datetime.time: - sv = InnerSchemaValidator(core_schema.time_schema(**metadata)) - elif source_type is datetime.timedelta: - sv = InnerSchemaValidator(core_schema.timedelta_schema(**metadata)) - else: - return None - # check now that we know the source type is correct - _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.DATE_TIME_CONSTRAINTS, source_type) - return (source_type, [sv, *remaining_annotations]) - - -def uuid_prepare_pydantic_annotations( - source_type: Any, annotations: Iterable[Any], _config: ConfigDict -) -> tuple[Any, list[Any]] | None: - # UUIDs have no constraints - they are fixed length, constructing a UUID instance checks the length - - from uuid import UUID - - if source_type is not UUID: - return None - - return (source_type, [InnerSchemaValidator(core_schema.uuid_schema()), *annotations]) - - -def path_schema_prepare_pydantic_annotations( - source_type: Any, annotations: Iterable[Any], _config: ConfigDict -) -> tuple[Any, list[Any]] | None: - import pathlib - - if source_type not in { - os.PathLike, - pathlib.Path, - pathlib.PurePath, - pathlib.PosixPath, - pathlib.PurePosixPath, - pathlib.PureWindowsPath, - }: - return None - - metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) - _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.STR_CONSTRAINTS, source_type) - - construct_path = pathlib.PurePath if source_type is os.PathLike else source_type - - def path_validator(input_value: str) -> os.PathLike[Any]: - try: - return construct_path(input_value) - except TypeError as e: - raise PydanticCustomError('path_type', 'Input is not a valid path') from e - - constrained_str_schema = core_schema.str_schema(**metadata) - - instance_schema = core_schema.json_or_python_schema( - json_schema=core_schema.no_info_after_validator_function(path_validator, constrained_str_schema), - python_schema=core_schema.is_instance_schema(source_type), - ) - - strict: bool | None = None - for annotation in annotations: - if isinstance(annotation, Strict): - strict = annotation.strict - - schema = core_schema.lax_or_strict_schema( - lax_schema=core_schema.union_schema( - [ - instance_schema, - core_schema.no_info_after_validator_function(path_validator, constrained_str_schema), - ], - custom_error_type='path_type', - custom_error_message='Input is not a valid path', - strict=True, - ), - strict_schema=instance_schema, - serialization=core_schema.to_string_ser_schema(), - strict=strict, - ) - - return ( - source_type, - [ - InnerSchemaValidator(schema, js_core_schema=constrained_str_schema, js_schema_update={'format': 'path'}), - *remaining_annotations, - ], - ) - - -def dequeue_validator( - input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, maxlen: None | int -) -> collections.deque[Any]: - if isinstance(input_value, collections.deque): - maxlens = [v for v in (input_value.maxlen, maxlen) if v is not None] - if maxlens: - maxlen = min(maxlens) - return collections.deque(handler(input_value), maxlen=maxlen) - else: - return collections.deque(handler(input_value), maxlen=maxlen) - - -@dataclasses.dataclass(**slots_true) -class SequenceValidator: - mapped_origin: type[Any] - item_source_type: type[Any] - min_length: int | None = None - max_length: int | None = None - strict: bool = False - - def serialize_sequence_via_list( - self, v: Any, handler: core_schema.SerializerFunctionWrapHandler, info: core_schema.SerializationInfo - ) -> Any: - items: list[Any] = [] - for index, item in enumerate(v): - try: - v = handler(item, index) - except PydanticOmit: - pass - else: - items.append(v) - - if info.mode_is_json(): - return items - else: - return self.mapped_origin(items) - - def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: - if self.item_source_type is Any: - items_schema = None - else: - items_schema = handler.generate_schema(self.item_source_type) - - metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict} - - if self.mapped_origin in (list, set, frozenset): - if self.mapped_origin is list: - constrained_schema = core_schema.list_schema(items_schema, **metadata) - elif self.mapped_origin is set: - constrained_schema = core_schema.set_schema(items_schema, **metadata) - else: - assert self.mapped_origin is frozenset # safety check in case we forget to add a case - constrained_schema = core_schema.frozenset_schema(items_schema, **metadata) - - schema = constrained_schema - else: - # safety check in case we forget to add a case - assert self.mapped_origin in (collections.deque, collections.Counter) - - if self.mapped_origin is collections.deque: - # if we have a MaxLen annotation might as well set that as the default maxlen on the deque - # this lets us re-use existing metadata annotations to let users set the maxlen on a dequeue - # that e.g. comes from JSON - coerce_instance_wrap = partial( - core_schema.no_info_wrap_validator_function, - partial(dequeue_validator, maxlen=metadata.get('max_length', None)), - ) - else: - coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin) - - constrained_schema = core_schema.list_schema(items_schema, **metadata) - - check_instance = core_schema.json_or_python_schema( - json_schema=core_schema.list_schema(), - python_schema=core_schema.is_instance_schema(self.mapped_origin), - ) - - serialization = core_schema.wrap_serializer_function_ser_schema( - self.serialize_sequence_via_list, schema=items_schema or core_schema.any_schema(), info_arg=True - ) - - strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)]) - - if metadata.get('strict', False): - schema = strict - else: - lax = coerce_instance_wrap(constrained_schema) - schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict) - schema['serialization'] = serialization - - return schema - - -SEQUENCE_ORIGIN_MAP: dict[Any, Any] = { - typing.Deque: collections.deque, - collections.deque: collections.deque, - list: list, - typing.List: list, - set: set, - typing.AbstractSet: set, - typing.Set: set, - frozenset: frozenset, - typing.FrozenSet: frozenset, - typing.Sequence: list, - typing.MutableSequence: list, - typing.MutableSet: set, - # this doesn't handle subclasses of these - # parametrized typing.Set creates one of these - collections.abc.MutableSet: set, - collections.abc.Set: frozenset, -} - - -def identity(s: CoreSchema) -> CoreSchema: - return s - - -def sequence_like_prepare_pydantic_annotations( - source_type: Any, annotations: Iterable[Any], _config: ConfigDict -) -> tuple[Any, list[Any]] | None: - origin: Any = get_origin(source_type) - - mapped_origin = SEQUENCE_ORIGIN_MAP.get(origin, None) if origin else SEQUENCE_ORIGIN_MAP.get(source_type, None) - if mapped_origin is None: - return None - - args = get_args(source_type) - - if not args: - args = (Any,) - elif len(args) != 1: - raise ValueError('Expected sequence to have exactly 1 generic parameter') - - item_source_type = args[0] - - metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) - _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type) - - return (source_type, [SequenceValidator(mapped_origin, item_source_type, **metadata), *remaining_annotations]) - - -MAPPING_ORIGIN_MAP: dict[Any, Any] = { - typing.DefaultDict: collections.defaultdict, - collections.defaultdict: collections.defaultdict, - collections.OrderedDict: collections.OrderedDict, - typing_extensions.OrderedDict: collections.OrderedDict, - dict: dict, - typing.Dict: dict, - collections.Counter: collections.Counter, - typing.Counter: collections.Counter, - # this doesn't handle subclasses of these - typing.Mapping: dict, - typing.MutableMapping: dict, - # parametrized typing.{Mutable}Mapping creates one of these - collections.abc.MutableMapping: dict, - collections.abc.Mapping: dict, -} - - -def defaultdict_validator( - input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, default_default_factory: Callable[[], Any] -) -> collections.defaultdict[Any, Any]: - if isinstance(input_value, collections.defaultdict): - default_factory = input_value.default_factory - return collections.defaultdict(default_factory, handler(input_value)) - else: - return collections.defaultdict(default_default_factory, handler(input_value)) - - -def get_defaultdict_default_default_factory(values_source_type: Any) -> Callable[[], Any]: - def infer_default() -> Callable[[], Any]: - allowed_default_types: dict[Any, Any] = { - typing.Tuple: tuple, - tuple: tuple, - collections.abc.Sequence: tuple, - collections.abc.MutableSequence: list, - typing.List: list, - list: list, - typing.Sequence: list, - typing.Set: set, - set: set, - typing.MutableSet: set, - collections.abc.MutableSet: set, - collections.abc.Set: frozenset, - typing.MutableMapping: dict, - typing.Mapping: dict, - collections.abc.Mapping: dict, - collections.abc.MutableMapping: dict, - float: float, - int: int, - str: str, - bool: bool, - } - values_type_origin = get_origin(values_source_type) or values_source_type - instructions = 'set using `DefaultDict[..., Annotated[..., Field(default_factory=...)]]`' - if isinstance(values_type_origin, TypeVar): - - def type_var_default_factory() -> None: - raise RuntimeError( - 'Generic defaultdict cannot be used without a concrete value type or an' - ' explicit default factory, ' + instructions - ) - - return type_var_default_factory - elif values_type_origin not in allowed_default_types: - # a somewhat subjective set of types that have reasonable default values - allowed_msg = ', '.join([t.__name__ for t in set(allowed_default_types.values())]) - raise PydanticSchemaGenerationError( - f'Unable to infer a default factory for keys of type {values_source_type}.' - f' Only {allowed_msg} are supported, other types require an explicit default factory' - ' ' + instructions - ) - return allowed_default_types[values_type_origin] - - # Assume Annotated[..., Field(...)] - if _typing_extra.is_annotated(values_source_type): - field_info = next((v for v in get_args(values_source_type) if isinstance(v, FieldInfo)), None) - else: - field_info = None - if field_info and field_info.default_factory: - default_default_factory = field_info.default_factory - else: - default_default_factory = infer_default() - return default_default_factory - - -@dataclasses.dataclass(**slots_true) -class MappingValidator: - mapped_origin: type[Any] - keys_source_type: type[Any] - values_source_type: type[Any] - min_length: int | None = None - max_length: int | None = None - strict: bool = False - - def serialize_mapping_via_dict(self, v: Any, handler: core_schema.SerializerFunctionWrapHandler) -> Any: - return handler(v) - - def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: - if self.keys_source_type is Any: - keys_schema = None - else: - keys_schema = handler.generate_schema(self.keys_source_type) - if self.values_source_type is Any: - values_schema = None - else: - values_schema = handler.generate_schema(self.values_source_type) - - metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict} - - if self.mapped_origin is dict: - schema = core_schema.dict_schema(keys_schema, values_schema, **metadata) - else: - constrained_schema = core_schema.dict_schema(keys_schema, values_schema, **metadata) - check_instance = core_schema.json_or_python_schema( - json_schema=core_schema.dict_schema(), - python_schema=core_schema.is_instance_schema(self.mapped_origin), - ) - - if self.mapped_origin is collections.defaultdict: - default_default_factory = get_defaultdict_default_default_factory(self.values_source_type) - coerce_instance_wrap = partial( - core_schema.no_info_wrap_validator_function, - partial(defaultdict_validator, default_default_factory=default_default_factory), - ) - else: - coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin) - - serialization = core_schema.wrap_serializer_function_ser_schema( - self.serialize_mapping_via_dict, - schema=core_schema.dict_schema( - keys_schema or core_schema.any_schema(), values_schema or core_schema.any_schema() - ), - info_arg=False, - ) - - strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)]) - - if metadata.get('strict', False): - schema = strict - else: - lax = coerce_instance_wrap(constrained_schema) - schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict) - schema['serialization'] = serialization - - return schema - - -def mapping_like_prepare_pydantic_annotations( - source_type: Any, annotations: Iterable[Any], _config: ConfigDict -) -> tuple[Any, list[Any]] | None: - origin: Any = get_origin(source_type) - - mapped_origin = MAPPING_ORIGIN_MAP.get(origin, None) if origin else MAPPING_ORIGIN_MAP.get(source_type, None) - if mapped_origin is None: - return None - - args = get_args(source_type) - - if not args: - args = (Any, Any) - elif mapped_origin is collections.Counter: - # a single generic - if len(args) != 1: - raise ValueError('Expected Counter to have exactly 1 generic parameter') - args = (args[0], int) # keys are always an int - elif len(args) != 2: - raise ValueError('Expected mapping to have exactly 2 generic parameters') - - keys_source_type, values_source_type = args - - metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) - _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type) - - return ( - source_type, - [ - MappingValidator(mapped_origin, keys_source_type, values_source_type, **metadata), - *remaining_annotations, - ], - ) - - -def ip_prepare_pydantic_annotations( - source_type: Any, annotations: Iterable[Any], _config: ConfigDict -) -> tuple[Any, list[Any]] | None: - def make_strict_ip_schema(tp: type[Any]) -> CoreSchema: - return core_schema.json_or_python_schema( - json_schema=core_schema.no_info_after_validator_function(tp, core_schema.str_schema()), - python_schema=core_schema.is_instance_schema(tp), - ) - - if source_type is IPv4Address: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.lax_or_strict_schema( - lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_address_validator), - strict_schema=make_strict_ip_schema(IPv4Address), - serialization=core_schema.to_string_ser_schema(), - ), - lambda _1, _2: {'type': 'string', 'format': 'ipv4'}, - ), - *annotations, - ] - if source_type is IPv4Network: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.lax_or_strict_schema( - lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_network_validator), - strict_schema=make_strict_ip_schema(IPv4Network), - serialization=core_schema.to_string_ser_schema(), - ), - lambda _1, _2: {'type': 'string', 'format': 'ipv4network'}, - ), - *annotations, - ] - if source_type is IPv4Interface: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.lax_or_strict_schema( - lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_interface_validator), - strict_schema=make_strict_ip_schema(IPv4Interface), - serialization=core_schema.to_string_ser_schema(), - ), - lambda _1, _2: {'type': 'string', 'format': 'ipv4interface'}, - ), - *annotations, - ] - - if source_type is IPv6Address: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.lax_or_strict_schema( - lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_address_validator), - strict_schema=make_strict_ip_schema(IPv6Address), - serialization=core_schema.to_string_ser_schema(), - ), - lambda _1, _2: {'type': 'string', 'format': 'ipv6'}, - ), - *annotations, - ] - if source_type is IPv6Network: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.lax_or_strict_schema( - lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_network_validator), - strict_schema=make_strict_ip_schema(IPv6Network), - serialization=core_schema.to_string_ser_schema(), - ), - lambda _1, _2: {'type': 'string', 'format': 'ipv6network'}, - ), - *annotations, - ] - if source_type is IPv6Interface: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.lax_or_strict_schema( - lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_interface_validator), - strict_schema=make_strict_ip_schema(IPv6Interface), - serialization=core_schema.to_string_ser_schema(), - ), - lambda _1, _2: {'type': 'string', 'format': 'ipv6interface'}, - ), - *annotations, - ] - - return None - - -def url_prepare_pydantic_annotations( - source_type: Any, annotations: Iterable[Any], _config: ConfigDict -) -> tuple[Any, list[Any]] | None: - if source_type is Url: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.url_schema(), - lambda cs, handler: handler(cs), - ), - *annotations, - ] - if source_type is MultiHostUrl: - return source_type, [ - SchemaTransformer( - lambda _1, _2: core_schema.multi_host_url_schema(), - lambda cs, handler: handler(cs), - ), - *annotations, - ] - - -PREPARE_METHODS: tuple[Callable[[Any, Iterable[Any], ConfigDict], tuple[Any, list[Any]] | None], ...] = ( - decimal_prepare_pydantic_annotations, - sequence_like_prepare_pydantic_annotations, - datetime_prepare_pydantic_annotations, - uuid_prepare_pydantic_annotations, - path_schema_prepare_pydantic_annotations, - mapping_like_prepare_pydantic_annotations, - ip_prepare_pydantic_annotations, - url_prepare_pydantic_annotations, -) diff --git a/lib/pydantic/_internal/_typing_extra.py b/lib/pydantic/_internal/_typing_extra.py deleted file mode 100644 index 1d5d3b3f..00000000 --- a/lib/pydantic/_internal/_typing_extra.py +++ /dev/null @@ -1,469 +0,0 @@ -"""Logic for interacting with type annotations, mostly extensions, shims and hacks to wrap python's typing module.""" -from __future__ import annotations as _annotations - -import dataclasses -import sys -import types -import typing -from collections.abc import Callable -from functools import partial -from types import GetSetDescriptorType -from typing import TYPE_CHECKING, Any, Final - -from typing_extensions import Annotated, Literal, TypeAliasType, TypeGuard, get_args, get_origin - -if TYPE_CHECKING: - from ._dataclasses import StandardDataclass - -try: - from typing import _TypingBase # type: ignore[attr-defined] -except ImportError: - from typing import _Final as _TypingBase # type: ignore[attr-defined] - -typing_base = _TypingBase - - -if sys.version_info < (3, 9): - # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) - TypingGenericAlias = () -else: - from typing import GenericAlias as TypingGenericAlias # type: ignore - - -if sys.version_info < (3, 11): - from typing_extensions import NotRequired, Required -else: - from typing import NotRequired, Required # noqa: F401 - - -if sys.version_info < (3, 10): - - def origin_is_union(tp: type[Any] | None) -> bool: - return tp is typing.Union - - WithArgsTypes = (TypingGenericAlias,) - -else: - - def origin_is_union(tp: type[Any] | None) -> bool: - return tp is typing.Union or tp is types.UnionType - - WithArgsTypes = typing._GenericAlias, types.GenericAlias, types.UnionType # type: ignore[attr-defined] - - -if sys.version_info < (3, 10): - NoneType = type(None) - EllipsisType = type(Ellipsis) -else: - from types import NoneType as NoneType - - -LITERAL_TYPES: set[Any] = {Literal} -if hasattr(typing, 'Literal'): - LITERAL_TYPES.add(typing.Literal) # type: ignore - -NONE_TYPES: tuple[Any, ...] = (None, NoneType, *(tp[None] for tp in LITERAL_TYPES)) - - -TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type - - -def is_none_type(type_: Any) -> bool: - return type_ in NONE_TYPES - - -def is_callable_type(type_: type[Any]) -> bool: - return type_ is Callable or get_origin(type_) is Callable - - -def is_literal_type(type_: type[Any]) -> bool: - return Literal is not None and get_origin(type_) in LITERAL_TYPES - - -def literal_values(type_: type[Any]) -> tuple[Any, ...]: - return get_args(type_) - - -def all_literal_values(type_: type[Any]) -> list[Any]: - """This method is used to retrieve all Literal values as - Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) - e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`. - """ - if not is_literal_type(type_): - return [type_] - - values = literal_values(type_) - return list(x for value in values for x in all_literal_values(value)) - - -def is_annotated(ann_type: Any) -> bool: - from ._utils import lenient_issubclass - - origin = get_origin(ann_type) - return origin is not None and lenient_issubclass(origin, Annotated) - - -def is_namedtuple(type_: type[Any]) -> bool: - """Check if a given class is a named tuple. - It can be either a `typing.NamedTuple` or `collections.namedtuple`. - """ - from ._utils import lenient_issubclass - - return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields') - - -test_new_type = typing.NewType('test_new_type', str) - - -def is_new_type(type_: type[Any]) -> bool: - """Check whether type_ was created using typing.NewType. - - Can't use isinstance because it fails <3.10. - """ - return isinstance(type_, test_new_type.__class__) and hasattr(type_, '__supertype__') # type: ignore[arg-type] - - -def _check_classvar(v: type[Any] | None) -> bool: - if v is None: - return False - - return v.__class__ == typing.ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar' - - -def is_classvar(ann_type: type[Any]) -> bool: - if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)): - return True - - # this is an ugly workaround for class vars that contain forward references and are therefore themselves - # forward references, see #3679 - if ann_type.__class__ == typing.ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): # type: ignore - return True - - return False - - -def _check_finalvar(v: type[Any] | None) -> bool: - """Check if a given type is a `typing.Final` type.""" - if v is None: - return False - - return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final') - - -def is_finalvar(ann_type: Any) -> bool: - return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type)) - - -def parent_frame_namespace(*, parent_depth: int = 2) -> dict[str, Any] | None: - """We allow use of items in parent namespace to get around the issue with `get_type_hints` only looking in the - global module namespace. See https://github.com/pydantic/pydantic/issues/2678#issuecomment-1008139014 -> Scope - and suggestion at the end of the next comment by @gvanrossum. - - WARNING 1: it matters exactly where this is called. By default, this function will build a namespace from the - parent of where it is called. - - WARNING 2: this only looks in the parent namespace, not other parents since (AFAIK) there's no way to collect a - dict of exactly what's in scope. Using `f_back` would work sometimes but would be very wrong and confusing in many - other cases. See https://discuss.python.org/t/is-there-a-way-to-access-parent-nested-namespaces/20659. - """ - frame = sys._getframe(parent_depth) - # if f_back is None, it's the global module namespace and we don't need to include it here - if frame.f_back is None: - return None - else: - return frame.f_locals - - -def add_module_globals(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]: - module_name = getattr(obj, '__module__', None) - if module_name: - try: - module_globalns = sys.modules[module_name].__dict__ - except KeyError: - # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 - pass - else: - if globalns: - return {**module_globalns, **globalns} - else: - # copy module globals to make sure it can't be updated later - return module_globalns.copy() - - return globalns or {} - - -def get_cls_types_namespace(cls: type[Any], parent_namespace: dict[str, Any] | None = None) -> dict[str, Any]: - ns = add_module_globals(cls, parent_namespace) - ns[cls.__name__] = cls - return ns - - -def get_cls_type_hints_lenient(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]: - """Collect annotations from a class, including those from parent classes. - - Unlike `typing.get_type_hints`, this function will not error if a forward reference is not resolvable. - """ - hints = {} - for base in reversed(obj.__mro__): - ann = base.__dict__.get('__annotations__') - localns = dict(vars(base)) - if ann is not None and ann is not GetSetDescriptorType: - for name, value in ann.items(): - hints[name] = eval_type_lenient(value, globalns, localns) - return hints - - -def eval_type_lenient(value: Any, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None) -> Any: - """Behaves like typing._eval_type, except it won't raise an error if a forward reference can't be resolved.""" - if value is None: - value = NoneType - elif isinstance(value, str): - value = _make_forward_ref(value, is_argument=False, is_class=True) - - try: - return eval_type_backport(value, globalns, localns) - except NameError: - # the point of this function is to be tolerant to this case - return value - - -def eval_type_backport( - value: Any, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None -) -> Any: - """Like `typing._eval_type`, but falls back to the `eval_type_backport` package if it's - installed to let older Python versions use newer typing features. - Specifically, this transforms `X | Y` into `typing.Union[X, Y]` - and `list[X]` into `typing.List[X]` etc. (for all the types made generic in PEP 585) - if the original syntax is not supported in the current Python version. - """ - try: - return typing._eval_type( # type: ignore - value, globalns, localns - ) - except TypeError as e: - if not (isinstance(value, typing.ForwardRef) and is_backport_fixable_error(e)): - raise - try: - from eval_type_backport import eval_type_backport - except ImportError: - raise TypeError( - f'You have a type annotation {value.__forward_arg__!r} ' - f'which makes use of newer typing features than are supported in your version of Python. ' - f'To handle this error, you should either remove the use of new syntax ' - f'or install the `eval_type_backport` package.' - ) from e - - return eval_type_backport(value, globalns, localns, try_default=False) - - -def is_backport_fixable_error(e: TypeError) -> bool: - msg = str(e) - return msg.startswith('unsupported operand type(s) for |: ') or "' object is not subscriptable" in msg - - -def get_function_type_hints( - function: Callable[..., Any], *, include_keys: set[str] | None = None, types_namespace: dict[str, Any] | None = None -) -> dict[str, Any]: - """Like `typing.get_type_hints`, but doesn't convert `X` to `Optional[X]` if the default value is `None`, also - copes with `partial`. - """ - if isinstance(function, partial): - annotations = function.func.__annotations__ - else: - annotations = function.__annotations__ - - globalns = add_module_globals(function) - type_hints = {} - for name, value in annotations.items(): - if include_keys is not None and name not in include_keys: - continue - if value is None: - value = NoneType - elif isinstance(value, str): - value = _make_forward_ref(value) - - type_hints[name] = eval_type_backport(value, globalns, types_namespace) - - return type_hints - - -if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1): - - def _make_forward_ref( - arg: Any, - is_argument: bool = True, - *, - is_class: bool = False, - ) -> typing.ForwardRef: - """Wrapper for ForwardRef that accounts for the `is_class` argument missing in older versions. - The `module` argument is omitted as it breaks <3.9.8, =3.10.0 and isn't used in the calls below. - - See https://github.com/python/cpython/pull/28560 for some background. - The backport happened on 3.9.8, see: - https://github.com/pydantic/pydantic/discussions/6244#discussioncomment-6275458, - and on 3.10.1 for the 3.10 branch, see: - https://github.com/pydantic/pydantic/issues/6912 - - Implemented as EAFP with memory. - """ - return typing.ForwardRef(arg, is_argument) - -else: - _make_forward_ref = typing.ForwardRef - - -if sys.version_info >= (3, 10): - get_type_hints = typing.get_type_hints - -else: - """ - For older versions of python, we have a custom implementation of `get_type_hints` which is a close as possible to - the implementation in CPython 3.10.8. - """ - - @typing.no_type_check - def get_type_hints( # noqa: C901 - obj: Any, - globalns: dict[str, Any] | None = None, - localns: dict[str, Any] | None = None, - include_extras: bool = False, - ) -> dict[str, Any]: # pragma: no cover - """Taken verbatim from python 3.10.8 unchanged, except: - * type annotations of the function definition above. - * prefixing `typing.` where appropriate - * Use `_make_forward_ref` instead of `typing.ForwardRef` to handle the `is_class` argument. - - https://github.com/python/cpython/blob/aaaf5174241496afca7ce4d4584570190ff972fe/Lib/typing.py#L1773-L1875 - - DO NOT CHANGE THIS METHOD UNLESS ABSOLUTELY NECESSARY. - ====================================================== - - Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. For classes, the search - order is globals first then locals. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - if getattr(obj, '__no_type_check__', None): - return {} - # Classes require a special treatment. - if isinstance(obj, type): - hints = {} - for base in reversed(obj.__mro__): - if globalns is None: - base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {}) - else: - base_globals = globalns - ann = base.__dict__.get('__annotations__', {}) - if isinstance(ann, types.GetSetDescriptorType): - ann = {} - base_locals = dict(vars(base)) if localns is None else localns - if localns is None and globalns is None: - # This is surprising, but required. Before Python 3.10, - # get_type_hints only evaluated the globalns of - # a class. To maintain backwards compatibility, we reverse - # the globalns and localns order so that eval() looks into - # *base_globals* first rather than *base_locals*. - # This only affects ForwardRefs. - base_globals, base_locals = base_locals, base_globals - for name, value in ann.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _make_forward_ref(value, is_argument=False, is_class=True) - - value = eval_type_backport(value, base_globals, base_locals) - hints[name] = value - if not include_extras and hasattr(typing, '_strip_annotations'): - return { - k: typing._strip_annotations(t) # type: ignore - for k, t in hints.items() - } - else: - return hints - - if globalns is None: - if isinstance(obj, types.ModuleType): - globalns = obj.__dict__ - else: - nsobj = obj - # Find globalns for the unwrapped object. - while hasattr(nsobj, '__wrapped__'): - nsobj = nsobj.__wrapped__ - globalns = getattr(nsobj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - hints = getattr(obj, '__annotations__', None) - if hints is None: - # Return empty annotations for something that _could_ have them. - if isinstance(obj, typing._allowed_types): # type: ignore - return {} - else: - raise TypeError(f'{obj!r} is not a module, class, method, ' 'or function.') - defaults = typing._get_defaults(obj) # type: ignore - hints = dict(hints) - for name, value in hints.items(): - if value is None: - value = type(None) - if isinstance(value, str): - # class-level forward refs were handled above, this must be either - # a module-level annotation or a function argument annotation - - value = _make_forward_ref( - value, - is_argument=not isinstance(obj, types.ModuleType), - is_class=False, - ) - value = eval_type_backport(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = typing.Optional[value] - hints[name] = value - return hints if include_extras else {k: typing._strip_annotations(t) for k, t in hints.items()} # type: ignore - - -def is_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]: - # The dataclasses.is_dataclass function doesn't seem to provide TypeGuard functionality, - # so I created this convenience function - return dataclasses.is_dataclass(_cls) - - -def origin_is_type_alias_type(origin: Any) -> TypeGuard[TypeAliasType]: - return isinstance(origin, TypeAliasType) - - -if sys.version_info >= (3, 10): - - def is_generic_alias(type_: type[Any]) -> bool: - return isinstance(type_, (types.GenericAlias, typing._GenericAlias)) # type: ignore[attr-defined] - -else: - - def is_generic_alias(type_: type[Any]) -> bool: - return isinstance(type_, typing._GenericAlias) # type: ignore diff --git a/lib/pydantic/_internal/_utils.py b/lib/pydantic/_internal/_utils.py deleted file mode 100644 index 31f5b2c5..00000000 --- a/lib/pydantic/_internal/_utils.py +++ /dev/null @@ -1,362 +0,0 @@ -"""Bucket of reusable internal utilities. - -This should be reduced as much as possible with functions only used in one place, moved to that place. -""" -from __future__ import annotations as _annotations - -import dataclasses -import keyword -import typing -import weakref -from collections import OrderedDict, defaultdict, deque -from copy import deepcopy -from itertools import zip_longest -from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType -from typing import Any, Mapping, TypeVar - -from typing_extensions import TypeAlias, TypeGuard - -from . import _repr, _typing_extra - -if typing.TYPE_CHECKING: - MappingIntStrAny: TypeAlias = 'typing.Mapping[int, Any] | typing.Mapping[str, Any]' - AbstractSetIntStr: TypeAlias = 'typing.AbstractSet[int] | typing.AbstractSet[str]' - from ..main import BaseModel - - -# these are types that are returned unchanged by deepcopy -IMMUTABLE_NON_COLLECTIONS_TYPES: set[type[Any]] = { - int, - float, - complex, - str, - bool, - bytes, - type, - _typing_extra.NoneType, - FunctionType, - BuiltinFunctionType, - LambdaType, - weakref.ref, - CodeType, - # note: including ModuleType will differ from behaviour of deepcopy by not producing error. - # It might be not a good idea in general, but considering that this function used only internally - # against default values of fields, this will allow to actually have a field with module as default value - ModuleType, - NotImplemented.__class__, - Ellipsis.__class__, -} - -# these are types that if empty, might be copied with simple copy() instead of deepcopy() -BUILTIN_COLLECTIONS: set[type[Any]] = { - list, - set, - tuple, - frozenset, - dict, - OrderedDict, - defaultdict, - deque, -} - - -def sequence_like(v: Any) -> bool: - return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) - - -def lenient_isinstance(o: Any, class_or_tuple: type[Any] | tuple[type[Any], ...] | None) -> bool: # pragma: no cover - try: - return isinstance(o, class_or_tuple) # type: ignore[arg-type] - except TypeError: - return False - - -def lenient_issubclass(cls: Any, class_or_tuple: Any) -> bool: # pragma: no cover - try: - return isinstance(cls, type) and issubclass(cls, class_or_tuple) - except TypeError: - if isinstance(cls, _typing_extra.WithArgsTypes): - return False - raise # pragma: no cover - - -def is_model_class(cls: Any) -> TypeGuard[type[BaseModel]]: - """Returns true if cls is a _proper_ subclass of BaseModel, and provides proper type-checking, - unlike raw calls to lenient_issubclass. - """ - from ..main import BaseModel - - return lenient_issubclass(cls, BaseModel) and cls is not BaseModel - - -def is_valid_identifier(identifier: str) -> bool: - """Checks that a string is a valid identifier and not a Python keyword. - :param identifier: The identifier to test. - :return: True if the identifier is valid. - """ - return identifier.isidentifier() and not keyword.iskeyword(identifier) - - -KeyType = TypeVar('KeyType') - - -def deep_update(mapping: dict[KeyType, Any], *updating_mappings: dict[KeyType, Any]) -> dict[KeyType, Any]: - updated_mapping = mapping.copy() - for updating_mapping in updating_mappings: - for k, v in updating_mapping.items(): - if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): - updated_mapping[k] = deep_update(updated_mapping[k], v) - else: - updated_mapping[k] = v - return updated_mapping - - -def update_not_none(mapping: dict[Any, Any], **update: Any) -> None: - mapping.update({k: v for k, v in update.items() if v is not None}) - - -T = TypeVar('T') - - -def unique_list( - input_list: list[T] | tuple[T, ...], - *, - name_factory: typing.Callable[[T], str] = str, -) -> list[T]: - """Make a list unique while maintaining order. - We update the list if another one with the same name is set - (e.g. model validator overridden in subclass). - """ - result: list[T] = [] - result_names: list[str] = [] - for v in input_list: - v_name = name_factory(v) - if v_name not in result_names: - result_names.append(v_name) - result.append(v) - else: - result[result_names.index(v_name)] = v - - return result - - -class ValueItems(_repr.Representation): - """Class for more convenient calculation of excluded or included fields on values.""" - - __slots__ = ('_items', '_type') - - def __init__(self, value: Any, items: AbstractSetIntStr | MappingIntStrAny) -> None: - items = self._coerce_items(items) - - if isinstance(value, (list, tuple)): - items = self._normalize_indexes(items, len(value)) # type: ignore - - self._items: MappingIntStrAny = items # type: ignore - - def is_excluded(self, item: Any) -> bool: - """Check if item is fully excluded. - - :param item: key or index of a value - """ - return self.is_true(self._items.get(item)) - - def is_included(self, item: Any) -> bool: - """Check if value is contained in self._items. - - :param item: key or index of value - """ - return item in self._items - - def for_element(self, e: int | str) -> AbstractSetIntStr | MappingIntStrAny | None: - """:param e: key or index of element on value - :return: raw values for element if self._items is dict and contain needed element - """ - item = self._items.get(e) # type: ignore - return item if not self.is_true(item) else None - - def _normalize_indexes(self, items: MappingIntStrAny, v_length: int) -> dict[int | str, Any]: - """:param items: dict or set of indexes which will be normalized - :param v_length: length of sequence indexes of which will be - - >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) - {0: True, 2: True, 3: True} - >>> self._normalize_indexes({'__all__': True}, 4) - {0: True, 1: True, 2: True, 3: True} - """ - normalized_items: dict[int | str, Any] = {} - all_items = None - for i, v in items.items(): - if not (isinstance(v, typing.Mapping) or isinstance(v, typing.AbstractSet) or self.is_true(v)): - raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') - if i == '__all__': - all_items = self._coerce_value(v) - continue - if not isinstance(i, int): - raise TypeError( - 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: ' - 'expected integer keys or keyword "__all__"' - ) - normalized_i = v_length + i if i < 0 else i - normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) - - if not all_items: - return normalized_items - if self.is_true(all_items): - for i in range(v_length): - normalized_items.setdefault(i, ...) - return normalized_items - for i in range(v_length): - normalized_item = normalized_items.setdefault(i, {}) - if not self.is_true(normalized_item): - normalized_items[i] = self.merge(all_items, normalized_item) - return normalized_items - - @classmethod - def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: - """Merge a `base` item with an `override` item. - - Both `base` and `override` are converted to dictionaries if possible. - Sets are converted to dictionaries with the sets entries as keys and - Ellipsis as values. - - Each key-value pair existing in `base` is merged with `override`, - while the rest of the key-value pairs are updated recursively with this function. - - Merging takes place based on the "union" of keys if `intersect` is - set to `False` (default) and on the intersection of keys if - `intersect` is set to `True`. - """ - override = cls._coerce_value(override) - base = cls._coerce_value(base) - if override is None: - return base - if cls.is_true(base) or base is None: - return override - if cls.is_true(override): - return base if intersect else override - - # intersection or union of keys while preserving ordering: - if intersect: - merge_keys = [k for k in base if k in override] + [k for k in override if k in base] - else: - merge_keys = list(base) + [k for k in override if k not in base] - - merged: dict[int | str, Any] = {} - for k in merge_keys: - merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) - if merged_item is not None: - merged[k] = merged_item - - return merged - - @staticmethod - def _coerce_items(items: AbstractSetIntStr | MappingIntStrAny) -> MappingIntStrAny: - if isinstance(items, typing.Mapping): - pass - elif isinstance(items, typing.AbstractSet): - items = dict.fromkeys(items, ...) # type: ignore - else: - class_name = getattr(items, '__class__', '???') - raise TypeError(f'Unexpected type of exclude value {class_name}') - return items # type: ignore - - @classmethod - def _coerce_value(cls, value: Any) -> Any: - if value is None or cls.is_true(value): - return value - return cls._coerce_items(value) - - @staticmethod - def is_true(v: Any) -> bool: - return v is True or v is ... - - def __repr_args__(self) -> _repr.ReprArgs: - return [(None, self._items)] - - -if typing.TYPE_CHECKING: - - def ClassAttribute(name: str, value: T) -> T: - ... - -else: - - class ClassAttribute: - """Hide class attribute from its instances.""" - - __slots__ = 'name', 'value' - - def __init__(self, name: str, value: Any) -> None: - self.name = name - self.value = value - - def __get__(self, instance: Any, owner: type[Any]) -> None: - if instance is None: - return self.value - raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') - - -Obj = TypeVar('Obj') - - -def smart_deepcopy(obj: Obj) -> Obj: - """Return type as is for immutable built-in types - Use obj.copy() for built-in empty collections - Use copy.deepcopy() for non-empty collections and unknown objects. - """ - obj_type = obj.__class__ - if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: - return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway - try: - if not obj and obj_type in BUILTIN_COLLECTIONS: - # faster way for empty collections, no need to copy its members - return obj if obj_type is tuple else obj.copy() # tuple doesn't have copy method # type: ignore - except (TypeError, ValueError, RuntimeError): - # do we really dare to catch ALL errors? Seems a bit risky - pass - - return deepcopy(obj) # slowest way when we actually might need a deepcopy - - -_SENTINEL = object() - - -def all_identical(left: typing.Iterable[Any], right: typing.Iterable[Any]) -> bool: - """Check that the items of `left` are the same objects as those in `right`. - - >>> a, b = object(), object() - >>> all_identical([a, b, a], [a, b, a]) - True - >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" - False - """ - for left_item, right_item in zip_longest(left, right, fillvalue=_SENTINEL): - if left_item is not right_item: - return False - return True - - -@dataclasses.dataclass(frozen=True) -class SafeGetItemProxy: - """Wrapper redirecting `__getitem__` to `get` with a sentinel value as default - - This makes is safe to use in `operator.itemgetter` when some keys may be missing - """ - - # Define __slots__manually for performances - # @dataclasses.dataclass() only support slots=True in python>=3.10 - __slots__ = ('wrapped',) - - wrapped: Mapping[str, Any] - - def __getitem__(self, __key: str) -> Any: - return self.wrapped.get(__key, _SENTINEL) - - # required to pass the object to operator.itemgetter() instances due to a quirk of typeshed - # https://github.com/python/mypy/issues/13713 - # https://github.com/python/typeshed/pull/8785 - # Since this is typing-only, hide it in a typing.TYPE_CHECKING block - if typing.TYPE_CHECKING: - - def __contains__(self, __key: str) -> bool: - return self.wrapped.__contains__(__key) diff --git a/lib/pydantic/_internal/_validate_call.py b/lib/pydantic/_internal/_validate_call.py deleted file mode 100644 index 664c0630..00000000 --- a/lib/pydantic/_internal/_validate_call.py +++ /dev/null @@ -1,84 +0,0 @@ -from __future__ import annotations as _annotations - -import inspect -from functools import partial -from typing import Any, Awaitable, Callable - -import pydantic_core - -from ..config import ConfigDict -from ..plugin._schema_validator import create_schema_validator -from . import _generate_schema, _typing_extra -from ._config import ConfigWrapper - - -class ValidateCallWrapper: - """This is a wrapper around a function that validates the arguments passed to it, and optionally the return value.""" - - __slots__ = ( - '__pydantic_validator__', - '__name__', - '__qualname__', - '__annotations__', - '__dict__', # required for __module__ - ) - - def __init__(self, function: Callable[..., Any], config: ConfigDict | None, validate_return: bool): - if isinstance(function, partial): - func = function.func - schema_type = func - self.__name__ = f'partial({func.__name__})' - self.__qualname__ = f'partial({func.__qualname__})' - self.__module__ = func.__module__ - else: - schema_type = function - self.__name__ = function.__name__ - self.__qualname__ = function.__qualname__ - self.__module__ = function.__module__ - - namespace = _typing_extra.add_module_globals(function, None) - config_wrapper = ConfigWrapper(config) - gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace) - schema = gen_schema.clean_schema(gen_schema.generate_schema(function)) - core_config = config_wrapper.core_config(self) - - self.__pydantic_validator__ = create_schema_validator( - schema, - schema_type, - self.__module__, - self.__qualname__, - 'validate_call', - core_config, - config_wrapper.plugin_settings, - ) - - if validate_return: - signature = inspect.signature(function) - return_type = signature.return_annotation if signature.return_annotation is not signature.empty else Any - gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace) - schema = gen_schema.clean_schema(gen_schema.generate_schema(return_type)) - validator = create_schema_validator( - schema, - schema_type, - self.__module__, - self.__qualname__, - 'validate_call', - core_config, - config_wrapper.plugin_settings, - ) - if inspect.iscoroutinefunction(function): - - async def return_val_wrapper(aw: Awaitable[Any]) -> None: - return validator.validate_python(await aw) - - self.__return_pydantic_validator__ = return_val_wrapper - else: - self.__return_pydantic_validator__ = validator.validate_python - else: - self.__return_pydantic_validator__ = None - - def __call__(self, *args: Any, **kwargs: Any) -> Any: - res = self.__pydantic_validator__.validate_python(pydantic_core.ArgsKwargs(args, kwargs)) - if self.__return_pydantic_validator__: - return self.__return_pydantic_validator__(res) - return res diff --git a/lib/pydantic/_internal/_validators.py b/lib/pydantic/_internal/_validators.py deleted file mode 100644 index 7193fe5c..00000000 --- a/lib/pydantic/_internal/_validators.py +++ /dev/null @@ -1,278 +0,0 @@ -"""Validator functions for standard library types. - -Import of this module is deferred since it contains imports of many standard library modules. -""" - -from __future__ import annotations as _annotations - -import math -import re -import typing -from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network -from typing import Any - -from pydantic_core import PydanticCustomError, core_schema -from pydantic_core._pydantic_core import PydanticKnownError - - -def sequence_validator( - __input_value: typing.Sequence[Any], - validator: core_schema.ValidatorFunctionWrapHandler, -) -> typing.Sequence[Any]: - """Validator for `Sequence` types, isinstance(v, Sequence) has already been called.""" - value_type = type(__input_value) - - # We don't accept any plain string as a sequence - # Relevant issue: https://github.com/pydantic/pydantic/issues/5595 - if issubclass(value_type, (str, bytes)): - raise PydanticCustomError( - 'sequence_str', - "'{type_name}' instances are not allowed as a Sequence value", - {'type_name': value_type.__name__}, - ) - - v_list = validator(__input_value) - - # the rest of the logic is just re-creating the original type from `v_list` - if value_type == list: - return v_list - elif issubclass(value_type, range): - # return the list as we probably can't re-create the range - return v_list - else: - # best guess at how to re-create the original type, more custom construction logic might be required - return value_type(v_list) # type: ignore[call-arg] - - -def import_string(value: Any) -> Any: - if isinstance(value, str): - try: - return _import_string_logic(value) - except ImportError as e: - raise PydanticCustomError('import_error', 'Invalid python path: {error}', {'error': str(e)}) from e - else: - # otherwise we just return the value and let the next validator do the rest of the work - return value - - -def _import_string_logic(dotted_path: str) -> Any: - """Inspired by uvicorn — dotted paths should include a colon before the final item if that item is not a module. - (This is necessary to distinguish between a submodule and an attribute when there is a conflict.). - - If the dotted path does not include a colon and the final item is not a valid module, importing as an attribute - rather than a submodule will be attempted automatically. - - So, for example, the following values of `dotted_path` result in the following returned values: - * 'collections': - * 'collections.abc': - * 'collections.abc:Mapping': - * `collections.abc.Mapping`: (though this is a bit slower than the previous line) - - An error will be raised under any of the following scenarios: - * `dotted_path` contains more than one colon (e.g., 'collections:abc:Mapping') - * the substring of `dotted_path` before the colon is not a valid module in the environment (e.g., '123:Mapping') - * the substring of `dotted_path` after the colon is not an attribute of the module (e.g., 'collections:abc123') - """ - from importlib import import_module - - components = dotted_path.strip().split(':') - if len(components) > 2: - raise ImportError(f"Import strings should have at most one ':'; received {dotted_path!r}") - - module_path = components[0] - if not module_path: - raise ImportError(f'Import strings should have a nonempty module name; received {dotted_path!r}') - - try: - module = import_module(module_path) - except ModuleNotFoundError as e: - if '.' in module_path: - # Check if it would be valid if the final item was separated from its module with a `:` - maybe_module_path, maybe_attribute = dotted_path.strip().rsplit('.', 1) - try: - return _import_string_logic(f'{maybe_module_path}:{maybe_attribute}') - except ImportError: - pass - raise ImportError(f'No module named {module_path!r}') from e - raise e - - if len(components) > 1: - attribute = components[1] - try: - return getattr(module, attribute) - except AttributeError as e: - raise ImportError(f'cannot import name {attribute!r} from {module_path!r}') from e - else: - return module - - -def pattern_either_validator(__input_value: Any) -> typing.Pattern[Any]: - if isinstance(__input_value, typing.Pattern): - return __input_value - elif isinstance(__input_value, (str, bytes)): - # todo strict mode - return compile_pattern(__input_value) # type: ignore - else: - raise PydanticCustomError('pattern_type', 'Input should be a valid pattern') - - -def pattern_str_validator(__input_value: Any) -> typing.Pattern[str]: - if isinstance(__input_value, typing.Pattern): - if isinstance(__input_value.pattern, str): - return __input_value - else: - raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern') - elif isinstance(__input_value, str): - return compile_pattern(__input_value) - elif isinstance(__input_value, bytes): - raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern') - else: - raise PydanticCustomError('pattern_type', 'Input should be a valid pattern') - - -def pattern_bytes_validator(__input_value: Any) -> typing.Pattern[bytes]: - if isinstance(__input_value, typing.Pattern): - if isinstance(__input_value.pattern, bytes): - return __input_value - else: - raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern') - elif isinstance(__input_value, bytes): - return compile_pattern(__input_value) - elif isinstance(__input_value, str): - raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern') - else: - raise PydanticCustomError('pattern_type', 'Input should be a valid pattern') - - -PatternType = typing.TypeVar('PatternType', str, bytes) - - -def compile_pattern(pattern: PatternType) -> typing.Pattern[PatternType]: - try: - return re.compile(pattern) - except re.error: - raise PydanticCustomError('pattern_regex', 'Input should be a valid regular expression') - - -def ip_v4_address_validator(__input_value: Any) -> IPv4Address: - if isinstance(__input_value, IPv4Address): - return __input_value - - try: - return IPv4Address(__input_value) - except ValueError: - raise PydanticCustomError('ip_v4_address', 'Input is not a valid IPv4 address') - - -def ip_v6_address_validator(__input_value: Any) -> IPv6Address: - if isinstance(__input_value, IPv6Address): - return __input_value - - try: - return IPv6Address(__input_value) - except ValueError: - raise PydanticCustomError('ip_v6_address', 'Input is not a valid IPv6 address') - - -def ip_v4_network_validator(__input_value: Any) -> IPv4Network: - """Assume IPv4Network initialised with a default `strict` argument. - - See more: - https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network - """ - if isinstance(__input_value, IPv4Network): - return __input_value - - try: - return IPv4Network(__input_value) - except ValueError: - raise PydanticCustomError('ip_v4_network', 'Input is not a valid IPv4 network') - - -def ip_v6_network_validator(__input_value: Any) -> IPv6Network: - """Assume IPv6Network initialised with a default `strict` argument. - - See more: - https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network - """ - if isinstance(__input_value, IPv6Network): - return __input_value - - try: - return IPv6Network(__input_value) - except ValueError: - raise PydanticCustomError('ip_v6_network', 'Input is not a valid IPv6 network') - - -def ip_v4_interface_validator(__input_value: Any) -> IPv4Interface: - if isinstance(__input_value, IPv4Interface): - return __input_value - - try: - return IPv4Interface(__input_value) - except ValueError: - raise PydanticCustomError('ip_v4_interface', 'Input is not a valid IPv4 interface') - - -def ip_v6_interface_validator(__input_value: Any) -> IPv6Interface: - if isinstance(__input_value, IPv6Interface): - return __input_value - - try: - return IPv6Interface(__input_value) - except ValueError: - raise PydanticCustomError('ip_v6_interface', 'Input is not a valid IPv6 interface') - - -def greater_than_validator(x: Any, gt: Any) -> Any: - if not (x > gt): - raise PydanticKnownError('greater_than', {'gt': gt}) - return x - - -def greater_than_or_equal_validator(x: Any, ge: Any) -> Any: - if not (x >= ge): - raise PydanticKnownError('greater_than_equal', {'ge': ge}) - return x - - -def less_than_validator(x: Any, lt: Any) -> Any: - if not (x < lt): - raise PydanticKnownError('less_than', {'lt': lt}) - return x - - -def less_than_or_equal_validator(x: Any, le: Any) -> Any: - if not (x <= le): - raise PydanticKnownError('less_than_equal', {'le': le}) - return x - - -def multiple_of_validator(x: Any, multiple_of: Any) -> Any: - if not (x % multiple_of == 0): - raise PydanticKnownError('multiple_of', {'multiple_of': multiple_of}) - return x - - -def min_length_validator(x: Any, min_length: Any) -> Any: - if not (len(x) >= min_length): - raise PydanticKnownError( - 'too_short', - {'field_type': 'Value', 'min_length': min_length, 'actual_length': len(x)}, - ) - return x - - -def max_length_validator(x: Any, max_length: Any) -> Any: - if len(x) > max_length: - raise PydanticKnownError( - 'too_long', - {'field_type': 'Value', 'max_length': max_length, 'actual_length': len(x)}, - ) - return x - - -def forbid_inf_nan_check(x: Any) -> Any: - if not math.isfinite(x): - raise PydanticKnownError('finite_number') - return x diff --git a/lib/pydantic/_migration.py b/lib/pydantic/_migration.py deleted file mode 100644 index c8478a62..00000000 --- a/lib/pydantic/_migration.py +++ /dev/null @@ -1,308 +0,0 @@ -import sys -from typing import Any, Callable, Dict - -from .version import version_short - -MOVED_IN_V2 = { - 'pydantic.utils:version_info': 'pydantic.version:version_info', - 'pydantic.error_wrappers:ValidationError': 'pydantic:ValidationError', - 'pydantic.utils:to_camel': 'pydantic.alias_generators:to_pascal', - 'pydantic.utils:to_lower_camel': 'pydantic.alias_generators:to_camel', - 'pydantic:PyObject': 'pydantic.types:ImportString', - 'pydantic.types:PyObject': 'pydantic.types:ImportString', - 'pydantic.generics:GenericModel': 'pydantic.BaseModel', -} - -DEPRECATED_MOVED_IN_V2 = { - 'pydantic.tools:schema_of': 'pydantic.deprecated.tools:schema_of', - 'pydantic.tools:parse_obj_as': 'pydantic.deprecated.tools:parse_obj_as', - 'pydantic.tools:schema_json_of': 'pydantic.deprecated.tools:schema_json_of', - 'pydantic.json:pydantic_encoder': 'pydantic.deprecated.json:pydantic_encoder', - 'pydantic:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments', - 'pydantic.json:custom_pydantic_encoder': 'pydantic.deprecated.json:custom_pydantic_encoder', - 'pydantic.json:timedelta_isoformat': 'pydantic.deprecated.json:timedelta_isoformat', - 'pydantic.decorator:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments', - 'pydantic.class_validators:validator': 'pydantic.deprecated.class_validators:validator', - 'pydantic.class_validators:root_validator': 'pydantic.deprecated.class_validators:root_validator', - 'pydantic.config:BaseConfig': 'pydantic.deprecated.config:BaseConfig', - 'pydantic.config:Extra': 'pydantic.deprecated.config:Extra', -} - -REDIRECT_TO_V1 = { - f'pydantic.utils:{obj}': f'pydantic.v1.utils:{obj}' - for obj in ( - 'deep_update', - 'GetterDict', - 'lenient_issubclass', - 'lenient_isinstance', - 'is_valid_field', - 'update_not_none', - 'import_string', - 'Representation', - 'ROOT_KEY', - 'smart_deepcopy', - 'sequence_like', - ) -} - - -REMOVED_IN_V2 = { - 'pydantic:ConstrainedBytes', - 'pydantic:ConstrainedDate', - 'pydantic:ConstrainedDecimal', - 'pydantic:ConstrainedFloat', - 'pydantic:ConstrainedFrozenSet', - 'pydantic:ConstrainedInt', - 'pydantic:ConstrainedList', - 'pydantic:ConstrainedSet', - 'pydantic:ConstrainedStr', - 'pydantic:JsonWrapper', - 'pydantic:NoneBytes', - 'pydantic:NoneStr', - 'pydantic:NoneStrBytes', - 'pydantic:Protocol', - 'pydantic:Required', - 'pydantic:StrBytes', - 'pydantic:compiled', - 'pydantic.config:get_config', - 'pydantic.config:inherit_config', - 'pydantic.config:prepare_config', - 'pydantic:create_model_from_namedtuple', - 'pydantic:create_model_from_typeddict', - 'pydantic.dataclasses:create_pydantic_model_from_dataclass', - 'pydantic.dataclasses:make_dataclass_validator', - 'pydantic.dataclasses:set_validation', - 'pydantic.datetime_parse:parse_date', - 'pydantic.datetime_parse:parse_time', - 'pydantic.datetime_parse:parse_datetime', - 'pydantic.datetime_parse:parse_duration', - 'pydantic.error_wrappers:ErrorWrapper', - 'pydantic.errors:AnyStrMaxLengthError', - 'pydantic.errors:AnyStrMinLengthError', - 'pydantic.errors:ArbitraryTypeError', - 'pydantic.errors:BoolError', - 'pydantic.errors:BytesError', - 'pydantic.errors:CallableError', - 'pydantic.errors:ClassError', - 'pydantic.errors:ColorError', - 'pydantic.errors:ConfigError', - 'pydantic.errors:DataclassTypeError', - 'pydantic.errors:DateError', - 'pydantic.errors:DateNotInTheFutureError', - 'pydantic.errors:DateNotInThePastError', - 'pydantic.errors:DateTimeError', - 'pydantic.errors:DecimalError', - 'pydantic.errors:DecimalIsNotFiniteError', - 'pydantic.errors:DecimalMaxDigitsError', - 'pydantic.errors:DecimalMaxPlacesError', - 'pydantic.errors:DecimalWholeDigitsError', - 'pydantic.errors:DictError', - 'pydantic.errors:DurationError', - 'pydantic.errors:EmailError', - 'pydantic.errors:EnumError', - 'pydantic.errors:EnumMemberError', - 'pydantic.errors:ExtraError', - 'pydantic.errors:FloatError', - 'pydantic.errors:FrozenSetError', - 'pydantic.errors:FrozenSetMaxLengthError', - 'pydantic.errors:FrozenSetMinLengthError', - 'pydantic.errors:HashableError', - 'pydantic.errors:IPv4AddressError', - 'pydantic.errors:IPv4InterfaceError', - 'pydantic.errors:IPv4NetworkError', - 'pydantic.errors:IPv6AddressError', - 'pydantic.errors:IPv6InterfaceError', - 'pydantic.errors:IPv6NetworkError', - 'pydantic.errors:IPvAnyAddressError', - 'pydantic.errors:IPvAnyInterfaceError', - 'pydantic.errors:IPvAnyNetworkError', - 'pydantic.errors:IntEnumError', - 'pydantic.errors:IntegerError', - 'pydantic.errors:InvalidByteSize', - 'pydantic.errors:InvalidByteSizeUnit', - 'pydantic.errors:InvalidDiscriminator', - 'pydantic.errors:InvalidLengthForBrand', - 'pydantic.errors:JsonError', - 'pydantic.errors:JsonTypeError', - 'pydantic.errors:ListError', - 'pydantic.errors:ListMaxLengthError', - 'pydantic.errors:ListMinLengthError', - 'pydantic.errors:ListUniqueItemsError', - 'pydantic.errors:LuhnValidationError', - 'pydantic.errors:MissingDiscriminator', - 'pydantic.errors:MissingError', - 'pydantic.errors:NoneIsAllowedError', - 'pydantic.errors:NoneIsNotAllowedError', - 'pydantic.errors:NotDigitError', - 'pydantic.errors:NotNoneError', - 'pydantic.errors:NumberNotGeError', - 'pydantic.errors:NumberNotGtError', - 'pydantic.errors:NumberNotLeError', - 'pydantic.errors:NumberNotLtError', - 'pydantic.errors:NumberNotMultipleError', - 'pydantic.errors:PathError', - 'pydantic.errors:PathNotADirectoryError', - 'pydantic.errors:PathNotAFileError', - 'pydantic.errors:PathNotExistsError', - 'pydantic.errors:PatternError', - 'pydantic.errors:PyObjectError', - 'pydantic.errors:PydanticTypeError', - 'pydantic.errors:PydanticValueError', - 'pydantic.errors:SequenceError', - 'pydantic.errors:SetError', - 'pydantic.errors:SetMaxLengthError', - 'pydantic.errors:SetMinLengthError', - 'pydantic.errors:StrError', - 'pydantic.errors:StrRegexError', - 'pydantic.errors:StrictBoolError', - 'pydantic.errors:SubclassError', - 'pydantic.errors:TimeError', - 'pydantic.errors:TupleError', - 'pydantic.errors:TupleLengthError', - 'pydantic.errors:UUIDError', - 'pydantic.errors:UUIDVersionError', - 'pydantic.errors:UrlError', - 'pydantic.errors:UrlExtraError', - 'pydantic.errors:UrlHostError', - 'pydantic.errors:UrlHostTldError', - 'pydantic.errors:UrlPortError', - 'pydantic.errors:UrlSchemeError', - 'pydantic.errors:UrlSchemePermittedError', - 'pydantic.errors:UrlUserInfoError', - 'pydantic.errors:WrongConstantError', - 'pydantic.main:validate_model', - 'pydantic.networks:stricturl', - 'pydantic:parse_file_as', - 'pydantic:parse_raw_as', - 'pydantic:stricturl', - 'pydantic.tools:parse_file_as', - 'pydantic.tools:parse_raw_as', - 'pydantic.types:ConstrainedBytes', - 'pydantic.types:ConstrainedDate', - 'pydantic.types:ConstrainedDecimal', - 'pydantic.types:ConstrainedFloat', - 'pydantic.types:ConstrainedFrozenSet', - 'pydantic.types:ConstrainedInt', - 'pydantic.types:ConstrainedList', - 'pydantic.types:ConstrainedSet', - 'pydantic.types:ConstrainedStr', - 'pydantic.types:JsonWrapper', - 'pydantic.types:NoneBytes', - 'pydantic.types:NoneStr', - 'pydantic.types:NoneStrBytes', - 'pydantic.types:StrBytes', - 'pydantic.typing:evaluate_forwardref', - 'pydantic.typing:AbstractSetIntStr', - 'pydantic.typing:AnyCallable', - 'pydantic.typing:AnyClassMethod', - 'pydantic.typing:CallableGenerator', - 'pydantic.typing:DictAny', - 'pydantic.typing:DictIntStrAny', - 'pydantic.typing:DictStrAny', - 'pydantic.typing:IntStr', - 'pydantic.typing:ListStr', - 'pydantic.typing:MappingIntStrAny', - 'pydantic.typing:NoArgAnyCallable', - 'pydantic.typing:NoneType', - 'pydantic.typing:ReprArgs', - 'pydantic.typing:SetStr', - 'pydantic.typing:StrPath', - 'pydantic.typing:TupleGenerator', - 'pydantic.typing:WithArgsTypes', - 'pydantic.typing:all_literal_values', - 'pydantic.typing:display_as_type', - 'pydantic.typing:get_all_type_hints', - 'pydantic.typing:get_args', - 'pydantic.typing:get_origin', - 'pydantic.typing:get_sub_types', - 'pydantic.typing:is_callable_type', - 'pydantic.typing:is_classvar', - 'pydantic.typing:is_finalvar', - 'pydantic.typing:is_literal_type', - 'pydantic.typing:is_namedtuple', - 'pydantic.typing:is_new_type', - 'pydantic.typing:is_none_type', - 'pydantic.typing:is_typeddict', - 'pydantic.typing:is_typeddict_special', - 'pydantic.typing:is_union', - 'pydantic.typing:new_type_supertype', - 'pydantic.typing:resolve_annotations', - 'pydantic.typing:typing_base', - 'pydantic.typing:update_field_forward_refs', - 'pydantic.typing:update_model_forward_refs', - 'pydantic.utils:ClassAttribute', - 'pydantic.utils:DUNDER_ATTRIBUTES', - 'pydantic.utils:PyObjectStr', - 'pydantic.utils:ValueItems', - 'pydantic.utils:almost_equal_floats', - 'pydantic.utils:get_discriminator_alias_and_values', - 'pydantic.utils:get_model', - 'pydantic.utils:get_unique_discriminator_alias', - 'pydantic.utils:in_ipython', - 'pydantic.utils:is_valid_identifier', - 'pydantic.utils:path_type', - 'pydantic.utils:validate_field_name', - 'pydantic:validate_model', -} - - -def getattr_migration(module: str) -> Callable[[str], Any]: - """Implement PEP 562 for objects that were either moved or removed on the migration - to V2. - - Args: - module: The module name. - - Returns: - A callable that will raise an error if the object is not found. - """ - # This avoids circular import with errors.py. - from .errors import PydanticImportError - - def wrapper(name: str) -> object: - """Raise an error if the object is not found, or warn if it was moved. - - In case it was moved, it still returns the object. - - Args: - name: The object name. - - Returns: - The object. - """ - if name == '__path__': - raise AttributeError(f'module {module!r} has no attribute {name!r}') - - import warnings - - from ._internal._validators import import_string - - import_path = f'{module}:{name}' - if import_path in MOVED_IN_V2.keys(): - new_location = MOVED_IN_V2[import_path] - warnings.warn(f'`{import_path}` has been moved to `{new_location}`.') - return import_string(MOVED_IN_V2[import_path]) - if import_path in DEPRECATED_MOVED_IN_V2: - # skip the warning here because a deprecation warning will be raised elsewhere - return import_string(DEPRECATED_MOVED_IN_V2[import_path]) - if import_path in REDIRECT_TO_V1: - new_location = REDIRECT_TO_V1[import_path] - warnings.warn( - f'`{import_path}` has been removed. We are importing from `{new_location}` instead.' - 'See the migration guide for more details: https://docs.pydantic.dev/latest/migration/' - ) - return import_string(REDIRECT_TO_V1[import_path]) - if import_path == 'pydantic:BaseSettings': - raise PydanticImportError( - '`BaseSettings` has been moved to the `pydantic-settings` package. ' - f'See https://docs.pydantic.dev/{version_short()}/migration/#basesettings-has-moved-to-pydantic-settings ' - 'for more details.' - ) - if import_path in REMOVED_IN_V2: - raise PydanticImportError(f'`{import_path}` has been removed in V2.') - globals: Dict[str, Any] = sys.modules[module].__dict__ - if name in globals: - return globals[name] - raise AttributeError(f'module {module!r} has no attribute {name!r}') - - return wrapper diff --git a/lib/pydantic/alias_generators.py b/lib/pydantic/alias_generators.py deleted file mode 100644 index 155e66e0..00000000 --- a/lib/pydantic/alias_generators.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Alias generators for converting between different capitalization conventions.""" -import re - -__all__ = ('to_pascal', 'to_camel', 'to_snake') - - -def to_pascal(snake: str) -> str: - """Convert a snake_case string to PascalCase. - - Args: - snake: The string to convert. - - Returns: - The PascalCase string. - """ - camel = snake.title() - return re.sub('([0-9A-Za-z])_(?=[0-9A-Z])', lambda m: m.group(1), camel) - - -def to_camel(snake: str) -> str: - """Convert a snake_case string to camelCase. - - Args: - snake: The string to convert. - - Returns: - The converted camelCase string. - """ - camel = to_pascal(snake) - return re.sub('(^_*[A-Z])', lambda m: m.group(1).lower(), camel) - - -def to_snake(camel: str) -> str: - """Convert a PascalCase or camelCase string to snake_case. - - Args: - camel: The string to convert. - - Returns: - The converted string in snake_case. - """ - # Handle the sequence of uppercase letters followed by a lowercase letter - snake = re.sub(r'([A-Z]+)([A-Z][a-z])', lambda m: f'{m.group(1)}_{m.group(2)}', camel) - # Insert an underscore between a lowercase letter and an uppercase letter - snake = re.sub(r'([a-z])([A-Z])', lambda m: f'{m.group(1)}_{m.group(2)}', snake) - # Insert an underscore between a digit and an uppercase letter - snake = re.sub(r'([0-9])([A-Z])', lambda m: f'{m.group(1)}_{m.group(2)}', snake) - # Insert an underscore between a lowercase letter and a digit - snake = re.sub(r'([a-z])([0-9])', lambda m: f'{m.group(1)}_{m.group(2)}', snake) - return snake.lower() diff --git a/lib/pydantic/aliases.py b/lib/pydantic/aliases.py deleted file mode 100644 index b53557b1..00000000 --- a/lib/pydantic/aliases.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Support for alias configurations.""" -from __future__ import annotations - -import dataclasses -from typing import Callable, Literal - -from ._internal import _internal_dataclass - -__all__ = ('AliasGenerator', 'AliasPath', 'AliasChoices') - - -@dataclasses.dataclass(**_internal_dataclass.slots_true) -class AliasPath: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/alias#aliaspath-and-aliaschoices - - A data class used by `validation_alias` as a convenience to create aliases. - - Attributes: - path: A list of string or integer aliases. - """ - - path: list[int | str] - - def __init__(self, first_arg: str, *args: str | int) -> None: - self.path = [first_arg] + list(args) - - def convert_to_aliases(self) -> list[str | int]: - """Converts arguments to a list of string or integer aliases. - - Returns: - The list of aliases. - """ - return self.path - - -@dataclasses.dataclass(**_internal_dataclass.slots_true) -class AliasChoices: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/alias#aliaspath-and-aliaschoices - - A data class used by `validation_alias` as a convenience to create aliases. - - Attributes: - choices: A list containing a string or `AliasPath`. - """ - - choices: list[str | AliasPath] - - def __init__(self, first_choice: str | AliasPath, *choices: str | AliasPath) -> None: - self.choices = [first_choice] + list(choices) - - def convert_to_aliases(self) -> list[list[str | int]]: - """Converts arguments to a list of lists containing string or integer aliases. - - Returns: - The list of aliases. - """ - aliases: list[list[str | int]] = [] - for c in self.choices: - if isinstance(c, AliasPath): - aliases.append(c.convert_to_aliases()) - else: - aliases.append([c]) - return aliases - - -@dataclasses.dataclass(**_internal_dataclass.slots_true) -class AliasGenerator: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/alias#using-an-aliasgenerator - - A data class used by `alias_generator` as a convenience to create various aliases. - - Attributes: - alias: A callable that takes a field name and returns an alias for it. - validation_alias: A callable that takes a field name and returns a validation alias for it. - serialization_alias: A callable that takes a field name and returns a serialization alias for it. - """ - - alias: Callable[[str], str] | None = None - validation_alias: Callable[[str], str | AliasPath | AliasChoices] | None = None - serialization_alias: Callable[[str], str] | None = None - - def _generate_alias( - self, - alias_kind: Literal['alias', 'validation_alias', 'serialization_alias'], - allowed_types: tuple[type[str] | type[AliasPath] | type[AliasChoices], ...], - field_name: str, - ) -> str | AliasPath | AliasChoices | None: - """Generate an alias of the specified kind. Returns None if the alias generator is None. - - Raises: - TypeError: If the alias generator produces an invalid type. - """ - alias = None - if alias_generator := getattr(self, alias_kind): - alias = alias_generator(field_name) - if alias and not isinstance(alias, allowed_types): - raise TypeError( - f'Invalid `{alias_kind}` type. `{alias_kind}` generator must produce one of `{allowed_types}`' - ) - return alias - - def generate_aliases(self, field_name: str) -> tuple[str | None, str | AliasPath | AliasChoices | None, str | None]: - """Generate `alias`, `validation_alias`, and `serialization_alias` for a field. - - Returns: - A tuple of three aliases - validation, alias, and serialization. - """ - alias = self._generate_alias('alias', (str,), field_name) - validation_alias = self._generate_alias('validation_alias', (str, AliasChoices, AliasPath), field_name) - serialization_alias = self._generate_alias('serialization_alias', (str,), field_name) - - return alias, validation_alias, serialization_alias # type: ignore diff --git a/lib/pydantic/annotated_handlers.py b/lib/pydantic/annotated_handlers.py deleted file mode 100644 index 081949a8..00000000 --- a/lib/pydantic/annotated_handlers.py +++ /dev/null @@ -1,120 +0,0 @@ -"""Type annotations to use with `__get_pydantic_core_schema__` and `__get_pydantic_json_schema__`.""" -from __future__ import annotations as _annotations - -from typing import TYPE_CHECKING, Any, Union - -from pydantic_core import core_schema - -if TYPE_CHECKING: - from .json_schema import JsonSchemaMode, JsonSchemaValue - - CoreSchemaOrField = Union[ - core_schema.CoreSchema, - core_schema.ModelField, - core_schema.DataclassField, - core_schema.TypedDictField, - core_schema.ComputedField, - ] - -__all__ = 'GetJsonSchemaHandler', 'GetCoreSchemaHandler' - - -class GetJsonSchemaHandler: - """Handler to call into the next JSON schema generation function. - - Attributes: - mode: Json schema mode, can be `validation` or `serialization`. - """ - - mode: JsonSchemaMode - - def __call__(self, __core_schema: CoreSchemaOrField) -> JsonSchemaValue: - """Call the inner handler and get the JsonSchemaValue it returns. - This will call the next JSON schema modifying function up until it calls - into `pydantic.json_schema.GenerateJsonSchema`, which will raise a - `pydantic.errors.PydanticInvalidForJsonSchema` error if it cannot generate - a JSON schema. - - Args: - __core_schema: A `pydantic_core.core_schema.CoreSchema`. - - Returns: - JsonSchemaValue: The JSON schema generated by the inner JSON schema modify - functions. - """ - raise NotImplementedError - - def resolve_ref_schema(self, __maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue: - """Get the real schema for a `{"$ref": ...}` schema. - If the schema given is not a `$ref` schema, it will be returned as is. - This means you don't have to check before calling this function. - - Args: - __maybe_ref_json_schema: A JsonSchemaValue which may be a `$ref` schema. - - Raises: - LookupError: If the ref is not found. - - Returns: - JsonSchemaValue: A JsonSchemaValue that has no `$ref`. - """ - raise NotImplementedError - - -class GetCoreSchemaHandler: - """Handler to call into the next CoreSchema schema generation function.""" - - def __call__(self, __source_type: Any) -> core_schema.CoreSchema: - """Call the inner handler and get the CoreSchema it returns. - This will call the next CoreSchema modifying function up until it calls - into Pydantic's internal schema generation machinery, which will raise a - `pydantic.errors.PydanticSchemaGenerationError` error if it cannot generate - a CoreSchema for the given source type. - - Args: - __source_type: The input type. - - Returns: - CoreSchema: The `pydantic-core` CoreSchema generated. - """ - raise NotImplementedError - - def generate_schema(self, __source_type: Any) -> core_schema.CoreSchema: - """Generate a schema unrelated to the current context. - Use this function if e.g. you are handling schema generation for a sequence - and want to generate a schema for its items. - Otherwise, you may end up doing something like applying a `min_length` constraint - that was intended for the sequence itself to its items! - - Args: - __source_type: The input type. - - Returns: - CoreSchema: The `pydantic-core` CoreSchema generated. - """ - raise NotImplementedError - - def resolve_ref_schema(self, __maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema: - """Get the real schema for a `definition-ref` schema. - If the schema given is not a `definition-ref` schema, it will be returned as is. - This means you don't have to check before calling this function. - - Args: - __maybe_ref_schema: A `CoreSchema`, `ref`-based or not. - - Raises: - LookupError: If the `ref` is not found. - - Returns: - A concrete `CoreSchema`. - """ - raise NotImplementedError - - @property - def field_name(self) -> str | None: - """Get the name of the closest field to this validator.""" - raise NotImplementedError - - def _get_types_namespace(self) -> dict[str, Any] | None: - """Internal method used during type resolution for serializer annotations.""" - raise NotImplementedError diff --git a/lib/pydantic/v1/annotated_types.py b/lib/pydantic/annotated_types.py similarity index 100% rename from lib/pydantic/v1/annotated_types.py rename to lib/pydantic/annotated_types.py diff --git a/lib/pydantic/class_validators.py b/lib/pydantic/class_validators.py index 2ff72ae5..87190610 100644 --- a/lib/pydantic/class_validators.py +++ b/lib/pydantic/class_validators.py @@ -1,4 +1,342 @@ -"""`class_validators` module is a backport module from V1.""" -from ._migration import getattr_migration +import warnings +from collections import ChainMap +from functools import wraps +from itertools import chain +from types import FunctionType +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload -__getattr__ = getattr_migration(__name__) +from .errors import ConfigError +from .typing import AnyCallable +from .utils import ROOT_KEY, in_ipython + +if TYPE_CHECKING: + from .typing import AnyClassMethod + + +class Validator: + __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure' + + def __init__( + self, + func: AnyCallable, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = False, + skip_on_failure: bool = False, + ): + self.func = func + self.pre = pre + self.each_item = each_item + self.always = always + self.check_fields = check_fields + self.skip_on_failure = skip_on_failure + + +if TYPE_CHECKING: + from inspect import Signature + + from .config import BaseConfig + from .fields import ModelField + from .types import ModelOrDc + + ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any] + ValidatorsList = List[ValidatorCallable] + ValidatorListDict = Dict[str, List[Validator]] + +_FUNCS: Set[str] = set() +VALIDATOR_CONFIG_KEY = '__validator_config__' +ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__' + + +def validator( + *fields: str, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = True, + whole: bool = None, + allow_reuse: bool = False, +) -> Callable[[AnyCallable], 'AnyClassMethod']: + """ + Decorate methods on the class indicating that they should be used to validate fields + :param fields: which field(s) the method should be called on + :param pre: whether or not this validator should be called before the standard validators (else after) + :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the + whole object + :param always: whether this method and other validators should be called even if the value is missing + :param check_fields: whether to check that the fields actually exist on the model + :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function + """ + if not fields: + raise ConfigError('validator with no fields specified') + elif isinstance(fields[0], FunctionType): + raise ConfigError( + "validators should be used with fields and keyword arguments, not bare. " # noqa: Q000 + "E.g. usage should be `@validator('', ...)`" + ) + elif not all(isinstance(field, str) for field in fields): + raise ConfigError( + "validator fields should be passed as separate string args. " # noqa: Q000 + "E.g. usage should be `@validator('', '', ...)`" + ) + + if whole is not None: + warnings.warn( + 'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead', + DeprecationWarning, + ) + assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' + each_item = not whole + + def dec(f: AnyCallable) -> 'AnyClassMethod': + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, + VALIDATOR_CONFIG_KEY, + ( + fields, + Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields), + ), + ) + return f_cls + + return dec + + +@overload +def root_validator(_func: AnyCallable) -> 'AnyClassMethod': + ... + + +@overload +def root_validator( + *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Callable[[AnyCallable], 'AnyClassMethod']: + ... + + +def root_validator( + _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]: + """ + Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either + before or after standard model parsing/validation is performed. + """ + if _func: + f_cls = _prepare_validator(_func, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + def dec(f: AnyCallable) -> 'AnyClassMethod': + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + return dec + + +def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod': + """ + Avoid validators with duplicated names since without this, validators can be overwritten silently + which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False. + """ + f_cls = function if isinstance(function, classmethod) else classmethod(function) + if not in_ipython() and not allow_reuse: + ref = f_cls.__func__.__module__ + '.' + f_cls.__func__.__qualname__ + if ref in _FUNCS: + raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`') + _FUNCS.add(ref) + return f_cls + + +class ValidatorGroup: + def __init__(self, validators: 'ValidatorListDict') -> None: + self.validators = validators + self.used_validators = {'*'} + + def get_validators(self, name: str) -> Optional[Dict[str, Validator]]: + self.used_validators.add(name) + validators = self.validators.get(name, []) + if name != ROOT_KEY: + validators += self.validators.get('*', []) + if validators: + return {v.func.__name__: v for v in validators} + else: + return None + + def check_for_unused(self) -> None: + unused_validators = set( + chain.from_iterable( + (v.func.__name__ for v in self.validators[f] if v.check_fields) + for f in (self.validators.keys() - self.used_validators) + ) + ) + if unused_validators: + fn = ', '.join(unused_validators) + raise ConfigError( + f"Validators defined with incorrect fields: {fn} " # noqa: Q000 + f"(use check_fields=False if you're inheriting from the model and intended this)" + ) + + +def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]: + validators: Dict[str, List[Validator]] = {} + for var_name, value in namespace.items(): + validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None) + if validator_config: + fields, v = validator_config + for field in fields: + if field in validators: + validators[field].append(v) + else: + validators[field] = [v] + return validators + + +def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]: + from inspect import signature + + pre_validators: List[AnyCallable] = [] + post_validators: List[Tuple[bool, AnyCallable]] = [] + for name, value in namespace.items(): + validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None) + if validator_config: + sig = signature(validator_config.func) + args = list(sig.parameters.keys()) + if args[0] == 'self': + raise ConfigError( + f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, ' + f'should be: (cls, values).' + ) + if len(args) != 2: + raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).') + # check function signature + if validator_config.pre: + pre_validators.append(validator_config.func) + else: + post_validators.append((validator_config.skip_on_failure, validator_config.func)) + return pre_validators, post_validators + + +def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict': + for field, field_validators in base_validators.items(): + if field not in validators: + validators[field] = [] + validators[field] += field_validators + return validators + + +def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable': + """ + Make a generic function which calls a validator with the right arguments. + + Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, + hence this laborious way of doing things. + + It's done like this so validators don't all need **kwargs in their signature, eg. any combination of + the arguments "values", "fields" and/or "config" are permitted. + """ + from inspect import signature + + sig = signature(validator) + args = list(sig.parameters.keys()) + first_arg = args.pop(0) + if first_arg == 'self': + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, ' + f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + elif first_arg == 'cls': + # assume the second argument is value + return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) + else: + # assume the first argument was value which has already been removed + return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) + + +def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList': + return [make_generic_validator(f) for f in v_funcs if f] + + +all_kwargs = {'values', 'field', 'config'} + + +def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': + # assume the first argument is value + has_kwargs = False + if 'kwargs' in args: + has_kwargs = True + args -= {'kwargs'} + + if not args.issubset(all_kwargs): + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, should be: ' + f'(cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(cls, v) + elif args == {'values'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values) + elif args == {'field'}: + return lambda cls, v, values, field, config: validator(cls, v, field=field) + elif args == {'config'}: + return lambda cls, v, values, field, config: validator(cls, v, config=config) + elif args == {'values', 'field'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field) + elif args == {'values', 'config'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config) + elif args == {'field', 'config'}: + return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + + +def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': + has_kwargs = False + if 'kwargs' in args: + has_kwargs = True + args -= {'kwargs'} + + if not args.issubset(all_kwargs): + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, should be: ' + f'(value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(v) + elif args == {'values'}: + return lambda cls, v, values, field, config: validator(v, values=values) + elif args == {'field'}: + return lambda cls, v, values, field, config: validator(v, field=field) + elif args == {'config'}: + return lambda cls, v, values, field, config: validator(v, config=config) + elif args == {'values', 'field'}: + return lambda cls, v, values, field, config: validator(v, values=values, field=field) + elif args == {'values', 'config'}: + return lambda cls, v, values, field, config: validator(v, values=values, config=config) + elif args == {'field', 'config'}: + return lambda cls, v, values, field, config: validator(v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + + +def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']: + all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) # type: ignore[arg-type,var-annotated] + return { + k: v + for k, v in all_attributes.items() + if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY) + } diff --git a/lib/pydantic/color.py b/lib/pydantic/color.py index 108bb8fa..6fdc9fb1 100644 --- a/lib/pydantic/color.py +++ b/lib/pydantic/color.py @@ -1,28 +1,22 @@ -"""Color definitions are used as per the CSS3 -[CSS Color Module Level 3](http://www.w3.org/TR/css3-color/#svg-color) specification. +""" +Color definitions are used as per CSS3 specification: +http://www.w3.org/TR/css3-color/#svg-color A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. -In these cases the _last_ color when sorted alphabetically takes preferences, -eg. `Color((0, 255, 255)).as_named() == 'cyan'` because "cyan" comes after "aqua". - -Warning: Deprecated - The `Color` class is deprecated, use `pydantic_extra_types` instead. - See [`pydantic-extra-types.Color`](../usage/types/extra_types/color_types.md) - for more information. +In these cases the LAST color when sorted alphabetically takes preferences, +eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua". """ import math import re from colorsys import hls_to_rgb, rgb_to_hls -from typing import Any, Callable, Optional, Tuple, Type, Union, cast +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast -from pydantic_core import CoreSchema, PydanticCustomError, core_schema -from typing_extensions import deprecated +from .errors import ColorError +from .utils import Representation, almost_equal_floats -from ._internal import _repr -from ._internal._schema_generation_shared import GetJsonSchemaHandler as _GetJsonSchemaHandler -from .json_schema import JsonSchemaValue -from .warnings import PydanticDeprecatedSince20 +if TYPE_CHECKING: + from .typing import CallableGenerator, ReprArgs ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] ColorType = Union[ColorTuple, str] @@ -30,7 +24,9 @@ HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, flo class RGBA: - """Internal use only as a representation of a color.""" + """ + Internal use only as a representation of a color. + """ __slots__ = 'r', 'g', 'b', 'alpha', '_tuple' @@ -47,35 +43,24 @@ class RGBA: # these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached -_r_255 = r'(\d{1,3}(?:\.\d+)?)' -_r_comma = r'\s*,\s*' -_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' -_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' -_r_sl = r'(\d{1,3}(?:\.\d+)?)%' r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*' r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*' -# CSS3 RGB examples: rgb(0, 0, 0), rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 50%) -r_rgb = rf'\s*rgba?\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}(?:{_r_comma}{_r_alpha})?\s*\)\s*' -# CSS3 HSL examples: hsl(270, 60%, 50%), hsla(270, 60%, 50%, 0.5), hsla(270, 60%, 50%, 50%) -r_hsl = rf'\s*hsla?\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}(?:{_r_comma}{_r_alpha})?\s*\)\s*' -# CSS4 RGB examples: rgb(0 0 0), rgb(0 0 0 / 0.5), rgb(0 0 0 / 50%), rgba(0 0 0 / 50%) -r_rgb_v4_style = rf'\s*rgba?\(\s*{_r_255}\s+{_r_255}\s+{_r_255}(?:\s*/\s*{_r_alpha})?\s*\)\s*' -# CSS4 HSL examples: hsl(270 60% 50%), hsl(270 60% 50% / 0.5), hsl(270 60% 50% / 50%), hsla(270 60% 50% / 50%) -r_hsl_v4_style = rf'\s*hsla?\(\s*{_r_h}\s+{_r_sl}\s+{_r_sl}(?:\s*/\s*{_r_alpha})?\s*\)\s*' +_r_255 = r'(\d{1,3}(?:\.\d+)?)' +_r_comma = r'\s*,\s*' +r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*' +_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' +r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*' +_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' +_r_sl = r'(\d{1,3}(?:\.\d+)?)%' +r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*' +r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*' # colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'} rads = 2 * math.pi -@deprecated( - 'The `Color` class is deprecated, use `pydantic_extra_types` instead. ' - 'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_color/.', - category=PydanticDeprecatedSince20, -) -class Color(_repr.Representation): - """Represents a color.""" - +class Color(Representation): __slots__ = '_original', '_rgba' def __init__(self, value: ColorType) -> None: @@ -89,39 +74,22 @@ class Color(_repr.Representation): self._rgba = value._rgba value = value._original else: - raise PydanticCustomError( - 'color_error', 'value is not a valid color: value must be a tuple, list or string' - ) + raise ColorError(reason='value must be a tuple, list or string') # if we've got here value must be a valid color self._original = value @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: core_schema.CoreSchema, handler: _GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = {} + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='color') - return field_schema def original(self) -> ColorType: - """Original value passed to `Color`.""" + """ + Original value passed to Color + """ return self._original def as_named(self, *, fallback: bool = False) -> str: - """Returns the name of the color if it can be found in `COLORS_BY_VALUE` dictionary, - otherwise returns the hexadecimal representation of the color or raises `ValueError`. - - Args: - fallback: If True, falls back to returning the hexadecimal representation of - the color instead of raising a ValueError when no named color is found. - - Returns: - The name of the color, or the hexadecimal representation of the color. - - Raises: - ValueError: When no named color is found and fallback is `False`. - """ if self._rgba.alpha is None: rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) try: @@ -135,13 +103,9 @@ class Color(_repr.Representation): return self.as_hex() def as_hex(self) -> str: - """Returns the hexadecimal representation of the color. - - Hex string representing the color can be 3, 4, 6, or 8 characters depending on whether the string + """ + Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string a "short" representation of the color is possible and whether there's an alpha channel. - - Returns: - The hexadecimal representation of the color. """ values = [float_to_255(c) for c in self._rgba[:3]] if self._rgba.alpha is not None: @@ -153,7 +117,9 @@ class Color(_repr.Representation): return '#' + as_hex def as_rgb(self) -> str: - """Color as an `rgb(, , )` or `rgba(, , , )` string.""" + """ + Color as an rgb(, , ) or rgba(, , , ) string. + """ if self._rgba.alpha is None: return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})' else: @@ -163,18 +129,14 @@ class Color(_repr.Representation): ) def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: - """Returns the color as an RGB or RGBA tuple. + """ + Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is + in the range 0 to 1. - Args: - alpha: Whether to include the alpha channel. There are three options for this input: - - - `None` (default): Include alpha only if it's set. (e.g. not `None`) - - `True`: Always include alpha. - - `False`: Always omit alpha. - - Returns: - A tuple that contains the values of the red, green, and blue channels in the range 0 to 255. - If alpha is included, it is in the range 0 to 1. + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, """ r, g, b = (float_to_255(c) for c in self._rgba[:3]) if alpha is None: @@ -189,7 +151,9 @@ class Color(_repr.Representation): return r, g, b def as_hsl(self) -> str: - """Color as an `hsl(, , )` or `hsl(, , , )` string.""" + """ + Color as an hsl(, , ) or hsl(, , , ) string. + """ if self._rgba.alpha is None: h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})' @@ -198,23 +162,18 @@ class Color(_repr.Representation): return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})' def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: - """Returns the color as an HSL or HSLA tuple. - - Args: - alpha: Whether to include the alpha channel. - - - `None` (default): Include the alpha channel only if it's set (e.g. not `None`). - - `True`: Always include alpha. - - `False`: Always omit alpha. - - Returns: - The color as a tuple of hue, saturation, lightness, and alpha (if included). - All elements are in the range 0 to 1. - - Note: - This is HSL as used in HTML and most other places, not HLS as used in Python's `colorsys`. """ - h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) # noqa: E741 + Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in + the range 0 to 1. + + NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys. + + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, + """ + h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) if alpha is None: if self._rgba.alpha is None: return h, s, l @@ -230,22 +189,14 @@ class Color(_repr.Representation): return 1 if self._rgba.alpha is None else self._rgba.alpha @classmethod - def __get_pydantic_core_schema__( - cls, source: Type[Any], handler: Callable[[Any], CoreSchema] - ) -> core_schema.CoreSchema: - return core_schema.with_info_plain_validator_function( - cls._validate, serialization=core_schema.to_string_ser_schema() - ) - - @classmethod - def _validate(cls, __input_value: Any, _: Any) -> 'Color': - return cls(__input_value) + def __get_validators__(cls) -> 'CallableGenerator': + yield cls def __str__(self) -> str: return self.as_named(fallback=True) - def __repr_args__(self) -> '_repr.ReprArgs': - return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] + def __repr_args__(self) -> 'ReprArgs': + return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore def __eq__(self, other: Any) -> bool: return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple() @@ -255,16 +206,8 @@ class Color(_repr.Representation): def parse_tuple(value: Tuple[Any, ...]) -> RGBA: - """Parse a tuple or list to get RGBA values. - - Args: - value: A tuple or list. - - Returns: - An `RGBA` tuple parsed from the input tuple. - - Raises: - PydanticCustomError: If tuple is not valid. + """ + Parse a tuple or list as a color. """ if len(value) == 3: r, g, b = (parse_color_value(v) for v in value) @@ -273,28 +216,17 @@ def parse_tuple(value: Tuple[Any, ...]) -> RGBA: r, g, b = (parse_color_value(v) for v in value[:3]) return RGBA(r, g, b, parse_float_alpha(value[3])) else: - raise PydanticCustomError('color_error', 'value is not a valid color: tuples must have length 3 or 4') + raise ColorError(reason='tuples must have length 3 or 4') def parse_str(value: str) -> RGBA: - """Parse a string representing a color to an RGBA tuple. - - Possible formats for the input string include: - - * named color, see `COLORS_BY_NAME` + """ + Parse a string to an RGBA tuple, trying the following formats (in this order): + * named color, see COLORS_BY_NAME below * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) - * `rgb(, , )` + * `rgb(, , ) ` * `rgba(, , , )` - - Args: - value: A string representing a color. - - Returns: - An `RGBA` tuple parsed from the input string. - - Raises: - ValueError: If the input string cannot be parsed to an RGBA tuple. """ value_lower = value.lower() try: @@ -324,70 +256,49 @@ def parse_str(value: str) -> RGBA: alpha = None return ints_to_rgba(r, g, b, alpha) - m = re.fullmatch(r_rgb, value_lower) or re.fullmatch(r_rgb_v4_style, value_lower) + m = re.fullmatch(r_rgb, value_lower) + if m: + return ints_to_rgba(*m.groups(), None) # type: ignore + + m = re.fullmatch(r_rgba, value_lower) if m: return ints_to_rgba(*m.groups()) # type: ignore - m = re.fullmatch(r_hsl, value_lower) or re.fullmatch(r_hsl_v4_style, value_lower) + m = re.fullmatch(r_hsl, value_lower) if m: - return parse_hsl(*m.groups()) # type: ignore + h, h_units, s, l_ = m.groups() + return parse_hsl(h, h_units, s, l_) - raise PydanticCustomError('color_error', 'value is not a valid color: string not recognised as a valid color') + m = re.fullmatch(r_hsla, value_lower) + if m: + h, h_units, s, l_, a = m.groups() + return parse_hsl(h, h_units, s, l_, parse_float_alpha(a)) + + raise ColorError(reason='string not recognised as a valid color') -def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float] = None) -> RGBA: - """Converts integer or string values for RGB color and an optional alpha value to an `RGBA` object. - - Args: - r: An integer or string representing the red color value. - g: An integer or string representing the green color value. - b: An integer or string representing the blue color value. - alpha: A float representing the alpha value. Defaults to None. - - Returns: - An instance of the `RGBA` class with the corresponding color and alpha values. - """ +def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA: return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: - """Parse the color value provided and return a number between 0 and 1. - - Args: - value: An integer or string color value. - max_val: Maximum range value. Defaults to 255. - - Raises: - PydanticCustomError: If the value is not a valid color. - - Returns: - A number between 0 and 1. + """ + Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number + in the range 0 to 1 """ try: color = float(value) except ValueError: - raise PydanticCustomError('color_error', 'value is not a valid color: color values must be a valid number') + raise ColorError(reason='color values must be a valid number') if 0 <= color <= max_val: return color / max_val else: - raise PydanticCustomError( - 'color_error', - 'value is not a valid color: color values must be in the range 0 to {max_val}', - {'max_val': max_val}, - ) + raise ColorError(reason=f'color values must be in the range 0 to {max_val}') def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: - """Parse an alpha value checking it's a valid float in the range 0 to 1. - - Args: - value: The input value to parse. - - Returns: - The parsed value as a float, or `None` if the value was None or equal 1. - - Raises: - PydanticCustomError: If the input value cannot be successfully parsed as a float in the expected range. + """ + Parse a value checking it's a valid float in the range 0 to 1 """ if value is None: return None @@ -397,28 +308,19 @@ def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: else: alpha = float(value) except ValueError: - raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be a valid float') + raise ColorError(reason='alpha values must be a valid float') - if math.isclose(alpha, 1): + if almost_equal_floats(alpha, 1): return None elif 0 <= alpha <= 1: return alpha else: - raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be in the range 0 to 1') + raise ColorError(reason='alpha values must be in the range 0 to 1') def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA: - """Parse raw hue, saturation, lightness, and alpha values and convert to RGBA. - - Args: - h: The hue value. - h_units: The unit for hue value. - sat: The saturation value. - light: The lightness value. - alpha: Alpha value. - - Returns: - An instance of `RGBA`. + """ + Parse raw hue, saturation, lightness and alpha values and convert to RGBA. """ s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100) @@ -432,21 +334,10 @@ def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] h_value = h_value % 1 r, g, b = hls_to_rgb(h_value, l_value, s_value) - return RGBA(r, g, b, parse_float_alpha(alpha)) + return RGBA(r, g, b, alpha) def float_to_255(c: float) -> int: - """Converts a float value between 0 and 1 (inclusive) to an integer between 0 and 255 (inclusive). - - Args: - c: The float value to be converted. Must be between 0 and 1 (inclusive). - - Returns: - The integer equivalent of the given float value rounded to the nearest whole number. - - Raises: - ValueError: If the given float value is outside the acceptable range of 0 to 1 (inclusive). - """ return int(round(c * 255)) diff --git a/lib/pydantic/config.py b/lib/pydantic/config.py index 6b22586b..74687ca0 100644 --- a/lib/pydantic/config.py +++ b/lib/pydantic/config.py @@ -1,912 +1,192 @@ -"""Configuration for Pydantic models.""" -from __future__ import annotations as _annotations +import json +from enum import Enum +from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Type, Union +from typing_extensions import Literal, Protocol -from typing_extensions import Literal, TypeAlias, TypedDict - -from ._migration import getattr_migration -from .aliases import AliasGenerator +from .typing import AnyArgTCallable, AnyCallable +from .utils import GetterDict +from .version import compiled if TYPE_CHECKING: - from ._internal._generate_schema import GenerateSchema as _GenerateSchema - -__all__ = ('ConfigDict',) - - -JsonValue: TypeAlias = Union[int, float, str, bool, None, List['JsonValue'], 'JsonDict'] -JsonDict: TypeAlias = Dict[str, JsonValue] - -JsonEncoder = Callable[[Any], Any] - -JsonSchemaExtraCallable: TypeAlias = Union[ - Callable[[JsonDict], None], - Callable[[JsonDict, Type[Any]], None], -] - -ExtraValues = Literal['allow', 'ignore', 'forbid'] - - -class ConfigDict(TypedDict, total=False): - """A TypedDict for configuring Pydantic behaviour.""" - - title: str | None - """The title for the generated JSON schema, defaults to the model's name""" - - str_to_lower: bool - """Whether to convert all characters to lowercase for str types. Defaults to `False`.""" - - str_to_upper: bool - """Whether to convert all characters to uppercase for str types. Defaults to `False`.""" - str_strip_whitespace: bool - """Whether to strip leading and trailing whitespace for str types.""" - - str_min_length: int - """The minimum length for str types. Defaults to `None`.""" - - str_max_length: int | None - """The maximum length for str types. Defaults to `None`.""" - - extra: ExtraValues | None - """ - Whether to ignore, allow, or forbid extra attributes during model initialization. Defaults to `'ignore'`. - - You can configure how pydantic handles the attributes that are not defined in the model: - - * `allow` - Allow any extra attributes. - * `forbid` - Forbid any extra attributes. - * `ignore` - Ignore any extra attributes. - - ```py - from pydantic import BaseModel, ConfigDict - - - class User(BaseModel): - model_config = ConfigDict(extra='ignore') # (1)! - - name: str - - - user = User(name='John Doe', age=20) # (2)! - print(user) - #> name='John Doe' - ``` - - 1. This is the default behaviour. - 2. The `age` argument is ignored. - - Instead, with `extra='allow'`, the `age` argument is included: - - ```py - from pydantic import BaseModel, ConfigDict - - - class User(BaseModel): - model_config = ConfigDict(extra='allow') - - name: str - - - user = User(name='John Doe', age=20) # (1)! - print(user) - #> name='John Doe' age=20 - ``` - - 1. The `age` argument is included. - - With `extra='forbid'`, an error is raised: - - ```py - from pydantic import BaseModel, ConfigDict, ValidationError - - - class User(BaseModel): - model_config = ConfigDict(extra='forbid') - - name: str - - - try: - User(name='John Doe', age=20) - except ValidationError as e: - print(e) - ''' - 1 validation error for User - age - Extra inputs are not permitted [type=extra_forbidden, input_value=20, input_type=int] - ''' - ``` - """ - - frozen: bool - """ - Whether models are faux-immutable, i.e. whether `__setattr__` is allowed, and also generates - a `__hash__()` method for the model. This makes instances of the model potentially hashable if all the - attributes are hashable. Defaults to `False`. - - Note: - On V1, the inverse of this setting was called `allow_mutation`, and was `True` by default. - """ - - populate_by_name: bool - """ - Whether an aliased field may be populated by its name as given by the model - attribute, as well as the alias. Defaults to `False`. - - Note: - The name of this configuration setting was changed in **v2.0** from - `allow_population_by_field_name` to `populate_by_name`. - - ```py - from pydantic import BaseModel, ConfigDict, Field - - - class User(BaseModel): - model_config = ConfigDict(populate_by_name=True) - - name: str = Field(alias='full_name') # (1)! - age: int - - - user = User(full_name='John Doe', age=20) # (2)! - print(user) - #> name='John Doe' age=20 - user = User(name='John Doe', age=20) # (3)! - print(user) - #> name='John Doe' age=20 - ``` - - 1. The field `'name'` has an alias `'full_name'`. - 2. The model is populated by the alias `'full_name'`. - 3. The model is populated by the field name `'name'`. - """ - - use_enum_values: bool - """ - Whether to populate models with the `value` property of enums, rather than the raw enum. - This may be useful if you want to serialize `model.model_dump()` later. Defaults to `False`. - - !!! note - If you have an `Optional[Enum]` value that you set a default for, you need to use `validate_default=True` - for said Field to ensure that the `use_enum_values` flag takes effect on the default, as extracting an - enum's value occurs during validation, not serialization. - - ```py - from enum import Enum - from typing import Optional - - from pydantic import BaseModel, ConfigDict, Field - - - class SomeEnum(Enum): - FOO = 'foo' - BAR = 'bar' - BAZ = 'baz' - - - class SomeModel(BaseModel): - model_config = ConfigDict(use_enum_values=True) - - some_enum: SomeEnum - another_enum: Optional[SomeEnum] = Field(default=SomeEnum.FOO, validate_default=True) - - - model1 = SomeModel(some_enum=SomeEnum.BAR) - print(model1.model_dump()) - # {'some_enum': 'bar', 'another_enum': 'foo'} - - model2 = SomeModel(some_enum=SomeEnum.BAR, another_enum=SomeEnum.BAZ) - print(model2.model_dump()) - #> {'some_enum': 'bar', 'another_enum': 'baz'} - ``` - """ - - validate_assignment: bool - """ - Whether to validate the data when the model is changed. Defaults to `False`. - - The default behavior of Pydantic is to validate the data when the model is created. - - In case the user changes the data after the model is created, the model is _not_ revalidated. - - ```py - from pydantic import BaseModel - - class User(BaseModel): - name: str - - user = User(name='John Doe') # (1)! - print(user) - #> name='John Doe' - user.name = 123 # (1)! - print(user) - #> name=123 - ``` - - 1. The validation happens only when the model is created. - 2. The validation does not happen when the data is changed. - - In case you want to revalidate the model when the data is changed, you can use `validate_assignment=True`: - - ```py - from pydantic import BaseModel, ValidationError - - class User(BaseModel, validate_assignment=True): # (1)! - name: str - - user = User(name='John Doe') # (2)! - print(user) - #> name='John Doe' - try: - user.name = 123 # (3)! - except ValidationError as e: - print(e) - ''' - 1 validation error for User - name - Input should be a valid string [type=string_type, input_value=123, input_type=int] - ''' - ``` - - 1. You can either use class keyword arguments, or `model_config` to set `validate_assignment=True`. - 2. The validation happens when the model is created. - 3. The validation _also_ happens when the data is changed. - """ - - arbitrary_types_allowed: bool - """ - Whether arbitrary types are allowed for field types. Defaults to `False`. - - ```py - from pydantic import BaseModel, ConfigDict, ValidationError - - # This is not a pydantic model, it's an arbitrary class - class Pet: - def __init__(self, name: str): - self.name = name - - class Model(BaseModel): - model_config = ConfigDict(arbitrary_types_allowed=True) - - pet: Pet - owner: str - - pet = Pet(name='Hedwig') - # A simple check of instance type is used to validate the data - model = Model(owner='Harry', pet=pet) - print(model) - #> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry' - print(model.pet) - #> <__main__.Pet object at 0x0123456789ab> - print(model.pet.name) - #> Hedwig - print(type(model.pet)) - #> - try: - # If the value is not an instance of the type, it's invalid - Model(owner='Harry', pet='Hedwig') - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - pet - Input should be an instance of Pet [type=is_instance_of, input_value='Hedwig', input_type=str] - ''' - - # Nothing in the instance of the arbitrary type is checked - # Here name probably should have been a str, but it's not validated - pet2 = Pet(name=42) - model2 = Model(owner='Harry', pet=pet2) - print(model2) - #> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry' - print(model2.pet) - #> <__main__.Pet object at 0x0123456789ab> - print(model2.pet.name) - #> 42 - print(type(model2.pet)) - #> - ``` - """ - - from_attributes: bool - """ - Whether to build models and look up discriminators of tagged unions using python object attributes. - """ - - loc_by_alias: bool - """Whether to use the actual key provided in the data (e.g. alias) for error `loc`s rather than the field's name. Defaults to `True`.""" - - alias_generator: Callable[[str], str] | AliasGenerator | None - """ - A callable that takes a field name and returns an alias for it - or an instance of [`AliasGenerator`][pydantic.aliases.AliasGenerator]. Defaults to `None`. - - When using a callable, the alias generator is used for both validation and serialization. - If you want to use different alias generators for validation and serialization, you can use - [`AliasGenerator`][pydantic.aliases.AliasGenerator] instead. - - If data source field names do not match your code style (e. g. CamelCase fields), - you can automatically generate aliases using `alias_generator`. Here's an example with - a basic callable: - - ```py - from pydantic import BaseModel, ConfigDict - from pydantic.alias_generators import to_pascal - - class Voice(BaseModel): - model_config = ConfigDict(alias_generator=to_pascal) - - name: str - language_code: str - - voice = Voice(Name='Filiz', LanguageCode='tr-TR') - print(voice.language_code) - #> tr-TR - print(voice.model_dump(by_alias=True)) - #> {'Name': 'Filiz', 'LanguageCode': 'tr-TR'} - ``` - - If you want to use different alias generators for validation and serialization, you can use - [`AliasGenerator`][pydantic.aliases.AliasGenerator]. - - ```py - from pydantic import AliasGenerator, BaseModel, ConfigDict - from pydantic.alias_generators import to_camel, to_pascal - - class Athlete(BaseModel): - first_name: str - last_name: str - sport: str - - model_config = ConfigDict( - alias_generator=AliasGenerator( - validation_alias=to_camel, - serialization_alias=to_pascal, - ) + from typing import overload + + from .fields import ModelField + from .main import BaseModel + + ConfigType = Type['BaseConfig'] + + class SchemaExtraCallable(Protocol): + @overload + def __call__(self, schema: Dict[str, Any]) -> None: + pass + + @overload + def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None: + pass + +else: + SchemaExtraCallable = Callable[..., None] + +__all__ = 'BaseConfig', 'ConfigDict', 'get_config', 'Extra', 'inherit_config', 'prepare_config' + + +class Extra(str, Enum): + allow = 'allow' + ignore = 'ignore' + forbid = 'forbid' + + +# https://github.com/cython/cython/issues/4003 +# Will be fixed with Cython 3 but still in alpha right now +if not compiled: + from typing_extensions import TypedDict + + class ConfigDict(TypedDict, total=False): + title: Optional[str] + anystr_lower: bool + anystr_strip_whitespace: bool + min_anystr_length: int + max_anystr_length: Optional[int] + validate_all: bool + extra: Extra + allow_mutation: bool + frozen: bool + allow_population_by_field_name: bool + use_enum_values: bool + fields: Dict[str, Union[str, Dict[str, str]]] + validate_assignment: bool + error_msg_templates: Dict[str, str] + arbitrary_types_allowed: bool + orm_mode: bool + getter_dict: Type[GetterDict] + alias_generator: Optional[Callable[[str], str]] + keep_untouched: Tuple[type, ...] + schema_extra: Union[Dict[str, object], 'SchemaExtraCallable'] + json_loads: Callable[[str], object] + json_dumps: AnyArgTCallable[str] + json_encoders: Dict[Type[object], AnyCallable] + underscore_attrs_are_private: bool + allow_inf_nan: bool + + # whether or not inherited models as fields should be reconstructed as base model + copy_on_model_validation: bool + # whether dataclass `__post_init__` should be run after validation + post_init_call: Literal['before_validation', 'after_validation'] + +else: + ConfigDict = dict # type: ignore + + +class BaseConfig: + title: Optional[str] = None + anystr_lower: bool = False + anystr_upper: bool = False + anystr_strip_whitespace: bool = False + min_anystr_length: int = 0 + max_anystr_length: Optional[int] = None + validate_all: bool = False + extra: Extra = Extra.ignore + allow_mutation: bool = True + frozen: bool = False + allow_population_by_field_name: bool = False + use_enum_values: bool = False + fields: Dict[str, Union[str, Dict[str, str]]] = {} + validate_assignment: bool = False + error_msg_templates: Dict[str, str] = {} + arbitrary_types_allowed: bool = False + orm_mode: bool = False + getter_dict: Type[GetterDict] = GetterDict + alias_generator: Optional[Callable[[str], str]] = None + keep_untouched: Tuple[type, ...] = () + schema_extra: Union[Dict[str, Any], 'SchemaExtraCallable'] = {} + json_loads: Callable[[str], Any] = json.loads + json_dumps: Callable[..., str] = json.dumps + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {} + underscore_attrs_are_private: bool = False + allow_inf_nan: bool = True + + # whether inherited models as fields should be reconstructed as base model, + # and whether such a copy should be shallow or deep + copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow' + + # whether `Union` should check all allowed types before even trying to coerce + smart_union: bool = False + # whether dataclass `__post_init__` should be run before or after validation + post_init_call: Literal['before_validation', 'after_validation'] = 'before_validation' + + @classmethod + def get_field_info(cls, name: str) -> Dict[str, Any]: + """ + Get properties of FieldInfo from the `fields` property of the config class. + """ + + fields_value = cls.fields.get(name) + + if isinstance(fields_value, str): + field_info: Dict[str, Any] = {'alias': fields_value} + elif isinstance(fields_value, dict): + field_info = fields_value + else: + field_info = {} + + if 'alias' in field_info: + field_info.setdefault('alias_priority', 2) + + if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator: + alias = cls.alias_generator(name) + if not isinstance(alias, str): + raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}') + field_info.update(alias=alias, alias_priority=1) + return field_info + + @classmethod + def prepare_field(cls, field: 'ModelField') -> None: + """ + Optional hook to check or modify fields during model creation. + """ + pass + + +def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]: + if config is None: + return BaseConfig + + else: + config_dict = ( + config + if isinstance(config, dict) + else {k: getattr(config, k) for k in dir(config) if not k.startswith('__')} ) - athlete = Athlete(firstName='John', lastName='Doe', sport='track') - print(athlete.model_dump(by_alias=True)) - #> {'FirstName': 'John', 'LastName': 'Doe', 'Sport': 'track'} - ``` - - Note: - Pydantic offers three built-in alias generators: [`to_pascal`][pydantic.alias_generators.to_pascal], - [`to_camel`][pydantic.alias_generators.to_camel], and [`to_snake`][pydantic.alias_generators.to_snake]. - """ - - ignored_types: tuple[type, ...] - """A tuple of types that may occur as values of class attributes without annotations. This is - typically used for custom descriptors (classes that behave like `property`). If an attribute is set on a - class without an annotation and has a type that is not in this tuple (or otherwise recognized by - _pydantic_), an error will be raised. Defaults to `()`. - """ - - allow_inf_nan: bool - """Whether to allow infinity (`+inf` an `-inf`) and NaN values to float fields. Defaults to `True`.""" - - json_schema_extra: JsonDict | JsonSchemaExtraCallable | None - """A dict or callable to provide extra JSON schema properties. Defaults to `None`.""" - - json_encoders: dict[type[object], JsonEncoder] | None - """ - A `dict` of custom JSON encoders for specific types. Defaults to `None`. - - !!! warning "Deprecated" - This config option is a carryover from v1. - We originally planned to remove it in v2 but didn't have a 1:1 replacement so we are keeping it for now. - It is still deprecated and will likely be removed in the future. - """ - - # new in V2 - strict: bool - """ - _(new in V2)_ If `True`, strict validation is applied to all fields on the model. - - By default, Pydantic attempts to coerce values to the correct type, when possible. - - There are situations in which you may want to disable this behavior, and instead raise an error if a value's type - does not match the field's type annotation. - - To configure strict mode for all fields on a model, you can set `strict=True` on the model. - - ```py - from pydantic import BaseModel, ConfigDict - - class Model(BaseModel): - model_config = ConfigDict(strict=True) - - name: str - age: int - ``` - - See [Strict Mode](../concepts/strict_mode.md) for more details. - - See the [Conversion Table](../concepts/conversion_table.md) for more details on how Pydantic converts data in both - strict and lax modes. - """ - # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never' - revalidate_instances: Literal['always', 'never', 'subclass-instances'] - """ - When and how to revalidate models and dataclasses during validation. Accepts the string - values of `'never'`, `'always'` and `'subclass-instances'`. Defaults to `'never'`. - - - `'never'` will not revalidate models and dataclasses during validation - - `'always'` will revalidate models and dataclasses during validation - - `'subclass-instances'` will revalidate models and dataclasses during validation if the instance is a - subclass of the model or dataclass - - By default, model and dataclass instances are not revalidated during validation. - - ```py - from typing import List - - from pydantic import BaseModel - - class User(BaseModel, revalidate_instances='never'): # (1)! - hobbies: List[str] - - class SubUser(User): - sins: List[str] - - class Transaction(BaseModel): - user: User - - my_user = User(hobbies=['reading']) - t = Transaction(user=my_user) - print(t) - #> user=User(hobbies=['reading']) - - my_user.hobbies = [1] # (2)! - t = Transaction(user=my_user) # (3)! - print(t) - #> user=User(hobbies=[1]) - - my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying']) - t = Transaction(user=my_sub_user) - print(t) - #> user=SubUser(hobbies=['scuba diving'], sins=['lying']) - ``` - - 1. `revalidate_instances` is set to `'never'` by **default. - 2. The assignment is not validated, unless you set `validate_assignment` to `True` in the model's config. - 3. Since `revalidate_instances` is set to `never`, this is not revalidated. - - If you want to revalidate instances during validation, you can set `revalidate_instances` to `'always'` - in the model's config. - - ```py - from typing import List - - from pydantic import BaseModel, ValidationError - - class User(BaseModel, revalidate_instances='always'): # (1)! - hobbies: List[str] - - class SubUser(User): - sins: List[str] - - class Transaction(BaseModel): - user: User - - my_user = User(hobbies=['reading']) - t = Transaction(user=my_user) - print(t) - #> user=User(hobbies=['reading']) - - my_user.hobbies = [1] - try: - t = Transaction(user=my_user) # (2)! - except ValidationError as e: - print(e) - ''' - 1 validation error for Transaction - user.hobbies.0 - Input should be a valid string [type=string_type, input_value=1, input_type=int] - ''' - - my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying']) - t = Transaction(user=my_sub_user) - print(t) # (3)! - #> user=User(hobbies=['scuba diving']) - ``` - - 1. `revalidate_instances` is set to `'always'`. - 2. The model is revalidated, since `revalidate_instances` is set to `'always'`. - 3. Using `'never'` we would have gotten `user=SubUser(hobbies=['scuba diving'], sins=['lying'])`. - - It's also possible to set `revalidate_instances` to `'subclass-instances'` to only revalidate instances - of subclasses of the model. - - ```py - from typing import List - - from pydantic import BaseModel - - class User(BaseModel, revalidate_instances='subclass-instances'): # (1)! - hobbies: List[str] - - class SubUser(User): - sins: List[str] - - class Transaction(BaseModel): - user: User - - my_user = User(hobbies=['reading']) - t = Transaction(user=my_user) - print(t) - #> user=User(hobbies=['reading']) - - my_user.hobbies = [1] - t = Transaction(user=my_user) # (2)! - print(t) - #> user=User(hobbies=[1]) - - my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying']) - t = Transaction(user=my_sub_user) - print(t) # (3)! - #> user=User(hobbies=['scuba diving']) - ``` - - 1. `revalidate_instances` is set to `'subclass-instances'`. - 2. This is not revalidated, since `my_user` is not a subclass of `User`. - 3. Using `'never'` we would have gotten `user=SubUser(hobbies=['scuba diving'], sins=['lying'])`. - """ - - ser_json_timedelta: Literal['iso8601', 'float'] - """ - The format of JSON serialized timedeltas. Accepts the string values of `'iso8601'` and - `'float'`. Defaults to `'iso8601'`. - - - `'iso8601'` will serialize timedeltas to ISO 8601 durations. - - `'float'` will serialize timedeltas to the total number of seconds. - """ - - ser_json_bytes: Literal['utf8', 'base64'] - """ - The encoding of JSON serialized bytes. Accepts the string values of `'utf8'` and `'base64'`. - Defaults to `'utf8'`. - - - `'utf8'` will serialize bytes to UTF-8 strings. - - `'base64'` will serialize bytes to URL safe base64 strings. - """ - - ser_json_inf_nan: Literal['null', 'constants'] - """ - The encoding of JSON serialized infinity and NaN float values. Accepts the string values of `'null'` and `'constants'`. - Defaults to `'null'`. - - - `'null'` will serialize infinity and NaN values as `null`. - - `'constants'` will serialize infinity and NaN values as `Infinity` and `NaN`. - """ - - # whether to validate default values during validation, default False - validate_default: bool - """Whether to validate default values during validation. Defaults to `False`.""" - - validate_return: bool - """whether to validate the return value from call validators. Defaults to `False`.""" - - protected_namespaces: tuple[str, ...] - """ - A `tuple` of strings that prevent model to have field which conflict with them. - Defaults to `('model_', )`). - - Pydantic prevents collisions between model attributes and `BaseModel`'s own methods by - namespacing them with the prefix `model_`. - - ```py - import warnings - - from pydantic import BaseModel - - warnings.filterwarnings('error') # Raise warnings as errors - - try: - - class Model(BaseModel): - model_prefixed_field: str - - except UserWarning as e: - print(e) - ''' - Field "model_prefixed_field" has conflict with protected namespace "model_". - - You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ()`. - ''' - ``` - - You can customize this behavior using the `protected_namespaces` setting: - - ```py - import warnings - - from pydantic import BaseModel, ConfigDict - - warnings.filterwarnings('error') # Raise warnings as errors - - try: - - class Model(BaseModel): - model_prefixed_field: str - also_protect_field: str - - model_config = ConfigDict( - protected_namespaces=('protect_me_', 'also_protect_') - ) - - except UserWarning as e: - print(e) - ''' - Field "also_protect_field" has conflict with protected namespace "also_protect_". - - You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ('protect_me_',)`. - ''' - ``` - - While Pydantic will only emit a warning when an item is in a protected namespace but does not actually have a collision, - an error _is_ raised if there is an actual collision with an existing attribute: - - ```py - from pydantic import BaseModel - - try: - - class Model(BaseModel): - model_validate: str - - except NameError as e: - print(e) - ''' - Field "model_validate" conflicts with member > of protected namespace "model_". - ''' - ``` - """ - - hide_input_in_errors: bool - """ - Whether to hide inputs when printing errors. Defaults to `False`. - - Pydantic shows the input value and type when it raises `ValidationError` during the validation. - - ```py - from pydantic import BaseModel, ValidationError - - class Model(BaseModel): - a: str - - try: - Model(a=123) - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - a - Input should be a valid string [type=string_type, input_value=123, input_type=int] - ''' - ``` - - You can hide the input value and type by setting the `hide_input_in_errors` config to `True`. - - ```py - from pydantic import BaseModel, ConfigDict, ValidationError - - class Model(BaseModel): - a: str - model_config = ConfigDict(hide_input_in_errors=True) - - try: - Model(a=123) - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - a - Input should be a valid string [type=string_type] - ''' - ``` - """ - - defer_build: bool - """ - Whether to defer model validator and serializer construction until the first model validation. - - This can be useful to avoid the overhead of building models which are only - used nested within other models, or when you want to manually define type namespace via - [`Model.model_rebuild(_types_namespace=...)`][pydantic.BaseModel.model_rebuild]. Defaults to False. - """ - - plugin_settings: dict[str, object] | None - """A `dict` of settings for plugins. Defaults to `None`. - - See [Pydantic Plugins](../concepts/plugins.md) for details. - """ - - schema_generator: type[_GenerateSchema] | None - """ - A custom core schema generator class to use when generating JSON schemas. - Useful if you want to change the way types are validated across an entire model/schema. Defaults to `None`. - - The `GenerateSchema` interface is subject to change, currently only the `string_schema` method is public. - - See [#6737](https://github.com/pydantic/pydantic/pull/6737) for details. - """ - - json_schema_serialization_defaults_required: bool - """ - Whether fields with default values should be marked as required in the serialization schema. Defaults to `False`. - - This ensures that the serialization schema will reflect the fact a field with a default will always be present - when serializing the model, even though it is not required for validation. - - However, there are scenarios where this may be undesirable — in particular, if you want to share the schema - between validation and serialization, and don't mind fields with defaults being marked as not required during - serialization. See [#7209](https://github.com/pydantic/pydantic/issues/7209) for more details. - - ```py - from pydantic import BaseModel, ConfigDict - - class Model(BaseModel): - a: str = 'a' - - model_config = ConfigDict(json_schema_serialization_defaults_required=True) - - print(Model.model_json_schema(mode='validation')) - ''' - { - 'properties': {'a': {'default': 'a', 'title': 'A', 'type': 'string'}}, - 'title': 'Model', - 'type': 'object', + class Config(BaseConfig): + ... + + for k, v in config_dict.items(): + setattr(Config, k, v) + return Config + + +def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType': + if not self_config: + base_classes: Tuple['ConfigType', ...] = (parent_config,) + elif self_config == parent_config: + base_classes = (self_config,) + else: + base_classes = self_config, parent_config + + namespace['json_encoders'] = { + **getattr(parent_config, 'json_encoders', {}), + **getattr(self_config, 'json_encoders', {}), + **namespace.get('json_encoders', {}), } - ''' - print(Model.model_json_schema(mode='serialization')) - ''' - { - 'properties': {'a': {'default': 'a', 'title': 'A', 'type': 'string'}}, - 'required': ['a'], - 'title': 'Model', - 'type': 'object', - } - ''' - ``` - """ - json_schema_mode_override: Literal['validation', 'serialization', None] - """ - If not `None`, the specified mode will be used to generate the JSON schema regardless of what `mode` was passed to - the function call. Defaults to `None`. - - This provides a way to force the JSON schema generation to reflect a specific mode, e.g., to always use the - validation schema. - - It can be useful when using frameworks (such as FastAPI) that may generate different schemas for validation - and serialization that must both be referenced from the same schema; when this happens, we automatically append - `-Input` to the definition reference for the validation schema and `-Output` to the definition reference for the - serialization schema. By specifying a `json_schema_mode_override` though, this prevents the conflict between - the validation and serialization schemas (since both will use the specified schema), and so prevents the suffixes - from being added to the definition references. - - ```py - from pydantic import BaseModel, ConfigDict, Json - - class Model(BaseModel): - a: Json[int] # requires a string to validate, but will dump an int - - print(Model.model_json_schema(mode='serialization')) - ''' - { - 'properties': {'a': {'title': 'A', 'type': 'integer'}}, - 'required': ['a'], - 'title': 'Model', - 'type': 'object', - } - ''' - - class ForceInputModel(Model): - # the following ensures that even with mode='serialization', we - # will get the schema that would be generated for validation. - model_config = ConfigDict(json_schema_mode_override='validation') - - print(ForceInputModel.model_json_schema(mode='serialization')) - ''' - { - 'properties': { - 'a': { - 'contentMediaType': 'application/json', - 'contentSchema': {'type': 'integer'}, - 'title': 'A', - 'type': 'string', - } - }, - 'required': ['a'], - 'title': 'ForceInputModel', - 'type': 'object', - } - ''' - ``` - """ - - coerce_numbers_to_str: bool - """ - If `True`, enables automatic coercion of any `Number` type to `str` in "lax" (non-strict) mode. Defaults to `False`. - - Pydantic doesn't allow number types (`int`, `float`, `Decimal`) to be coerced as type `str` by default. - - ```py - from decimal import Decimal - - from pydantic import BaseModel, ConfigDict, ValidationError - - class Model(BaseModel): - value: str - - try: - print(Model(value=42)) - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - value - Input should be a valid string [type=string_type, input_value=42, input_type=int] - ''' - - class Model(BaseModel): - model_config = ConfigDict(coerce_numbers_to_str=True) - - value: str - - repr(Model(value=42).value) - #> "42" - repr(Model(value=42.13).value) - #> "42.13" - repr(Model(value=Decimal('42.13')).value) - #> "42.13" - ``` - """ - - regex_engine: Literal['rust-regex', 'python-re'] - """ - The regex engine to used for pattern validation - Defaults to `'rust-regex'`. - - - `rust-regex` uses the [`regex`](https://docs.rs/regex) Rust crate, - which is non-backtracking and therefore more DDoS resistant, but does not support all regex features. - - `python-re` use the [`re`](https://docs.python.org/3/library/re.html) module, - which supports all regex features, but may be slower. - - ```py - from pydantic import BaseModel, ConfigDict, Field, ValidationError - - class Model(BaseModel): - model_config = ConfigDict(regex_engine='python-re') - - value: str = Field(pattern=r'^abc(?=def)') - - print(Model(value='abcdef').value) - #> abcdef - - try: - print(Model(value='abxyzcdef')) - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - value - String should match pattern '^abc(?=def)' [type=string_pattern_mismatch, input_value='abxyzcdef', input_type=str] - ''' - ``` - """ - - validation_error_cause: bool - """ - If `True`, python exceptions that were part of a validation failure will be shown as an exception group as a cause. Can be useful for debugging. Defaults to `False`. - - Note: - Python 3.10 and older don't support exception groups natively. <=3.10, backport must be installed: `pip install exceptiongroup`. - - Note: - The structure of validation errors are likely to change in future pydantic versions. Pydantic offers no guarantees about the structure of validation errors. Should be used for visual traceback debugging only. - """ + return type('Config', base_classes, namespace) -__getattr__ = getattr_migration(__name__) +def prepare_config(config: Type[BaseConfig], cls_name: str) -> None: + if not isinstance(config.extra, Extra): + try: + config.extra = Extra(config.extra) + except ValueError: + raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"') diff --git a/lib/pydantic/dataclasses.py b/lib/pydantic/dataclasses.py index d9c9c903..68331127 100644 --- a/lib/pydantic/dataclasses.py +++ b/lib/pydantic/dataclasses.py @@ -1,327 +1,479 @@ -"""Provide an enhanced dataclass that performs validation.""" -from __future__ import annotations as _annotations +""" +The main purpose is to enhance stdlib dataclasses by adding validation +A pydantic dataclass can be generated from scratch or from a stdlib one. -import dataclasses +Behind the scene, a pydantic dataclass is just like a regular one on which we attach +a `BaseModel` and magic methods to trigger the validation of the data. +`__init__` and `__post_init__` are hence overridden and have extra logic to be +able to validate input data. + +When a pydantic dataclass is generated from scratch, it's just a plain dataclass +with validation triggered at initialization + +The tricky part if for stdlib dataclasses that are converted after into pydantic ones e.g. + +```py +@dataclasses.dataclass +class M: + x: int + +ValidatedM = pydantic.dataclasses.dataclass(M) +``` + +We indeed still want to support equality, hashing, repr, ... as if it was the stdlib one! + +```py +assert isinstance(ValidatedM(x=1), M) +assert ValidatedM(x=1) == M(x=1) +``` + +This means we **don't want to create a new dataclass that inherits from it** +The trick is to create a wrapper around `M` that will act as a proxy to trigger +validation without altering default `M` behaviour. +""" import sys -import types -from typing import TYPE_CHECKING, Any, Callable, Generic, NoReturn, TypeVar, overload +from contextlib import contextmanager +from functools import wraps +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Dict, + Generator, + Optional, + Set, + Type, + TypeVar, + Union, + overload, +) -from typing_extensions import Literal, TypeGuard, dataclass_transform +from typing_extensions import dataclass_transform -from ._internal import _config, _decorators, _typing_extra -from ._internal import _dataclasses as _pydantic_dataclasses -from ._migration import getattr_migration -from .config import ConfigDict -from .fields import Field, FieldInfo +from .class_validators import gather_all_validators +from .config import BaseConfig, ConfigDict, Extra, get_config +from .error_wrappers import ValidationError +from .errors import DataclassTypeError +from .fields import Field, FieldInfo, Required, Undefined +from .main import create_model, validate_model +from .utils import ClassAttribute if TYPE_CHECKING: - from ._internal._dataclasses import PydanticDataclass + from .main import BaseModel + from .typing import CallableGenerator, NoArgAnyCallable -__all__ = 'dataclass', 'rebuild_dataclass' + DataclassT = TypeVar('DataclassT', bound='Dataclass') + + DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy'] + + class Dataclass: + # stdlib attributes + __dataclass_fields__: ClassVar[Dict[str, Any]] + __dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams` + __post_init__: ClassVar[Callable[..., None]] + + # Added by pydantic + __pydantic_run_validation__: ClassVar[bool] + __post_init_post_parse__: ClassVar[Callable[..., None]] + __pydantic_initialised__: ClassVar[bool] + __pydantic_model__: ClassVar[Type[BaseModel]] + __pydantic_validate_values__: ClassVar[Callable[['Dataclass'], None]] + __pydantic_has_field_info_default__: ClassVar[bool] # whether a `pydantic.Field` is used as default value + + def __init__(self, *args: object, **kwargs: object) -> None: + pass + + @classmethod + def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator': + pass + + @classmethod + def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT': + pass + + +__all__ = [ + 'dataclass', + 'set_validation', + 'create_pydantic_model_from_dataclass', + 'is_builtin_dataclass', + 'make_dataclass_validator', +] _T = TypeVar('_T') if sys.version_info >= (3, 10): - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) @overload def dataclass( *, - init: Literal[False] = False, + init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - config: ConfigDict | type[object] | None = None, - validate_on_init: bool | None = None, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, kw_only: bool = ..., - slots: bool = ..., - ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore + ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: ... - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) @overload def dataclass( - _cls: type[_T], # type: ignore + _cls: Type[_T], *, - init: Literal[False] = False, + init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - config: ConfigDict | type[object] | None = None, - validate_on_init: bool | None = None, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, kw_only: bool = ..., - slots: bool = ..., - ) -> type[PydanticDataclass]: + ) -> 'DataclassClassOrWrapper': ... else: - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) @overload def dataclass( *, - init: Literal[False] = False, + init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - config: ConfigDict | type[object] | None = None, - validate_on_init: bool | None = None, - ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: ... - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) @overload def dataclass( - _cls: type[_T], # type: ignore + _cls: Type[_T], *, - init: Literal[False] = False, + init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - config: ConfigDict | type[object] | None = None, - validate_on_init: bool | None = None, - ) -> type[PydanticDataclass]: + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + ) -> 'DataclassClassOrWrapper': ... -@dataclass_transform(field_specifiers=(dataclasses.field, Field)) -def dataclass( # noqa: C901 - _cls: type[_T] | None = None, +@dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) +def dataclass( + _cls: Optional[Type[_T]] = None, *, - init: Literal[False] = False, + init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - config: ConfigDict | type[object] | None = None, - validate_on_init: bool | None = None, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, kw_only: bool = False, - slots: bool = False, -) -> Callable[[type[_T]], type[PydanticDataclass]] | type[PydanticDataclass]: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/dataclasses/ - - A decorator used to create a Pydantic-enhanced dataclass, similar to the standard Python `dataclass`, - but with added validation. - - This function should be used similarly to `dataclasses.dataclass`. - - Args: - _cls: The target `dataclass`. - init: Included for signature compatibility with `dataclasses.dataclass`, and is passed through to - `dataclasses.dataclass` when appropriate. If specified, must be set to `False`, as pydantic inserts its - own `__init__` function. - repr: A boolean indicating whether to include the field in the `__repr__` output. - eq: Determines if a `__eq__` method should be generated for the class. - order: Determines if comparison magic methods should be generated, such as `__lt__`, but not `__eq__`. - unsafe_hash: Determines if a `__hash__` method should be included in the class, as in `dataclasses.dataclass`. - frozen: Determines if the generated class should be a 'frozen' `dataclass`, which does not allow its - attributes to be modified after it has been initialized. - config: The Pydantic config to use for the `dataclass`. - validate_on_init: A deprecated parameter included for backwards compatibility; in V2, all Pydantic dataclasses - are validated on init. - kw_only: Determines if `__init__` method parameters must be specified by keyword only. Defaults to `False`. - slots: Determines if the generated class should be a 'slots' `dataclass`, which does not allow the addition of - new attributes after instantiation. - - Returns: - A decorator that accepts a class as its argument and returns a Pydantic `dataclass`. - - Raises: - AssertionError: Raised if `init` is not `False` or `validate_on_init` is `False`. +) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']: """ - assert init is False, 'pydantic.dataclasses.dataclass only supports init=False' - assert validate_on_init is not False, 'validate_on_init=False is no longer supported' + Like the python standard lib dataclasses but with type validation. + The result is either a pydantic dataclass that will validate input data + or a wrapper that will trigger validation around a stdlib dataclass + to avoid modifying it directly + """ + the_config = get_config(config) - if sys.version_info >= (3, 10): - kwargs = dict(kw_only=kw_only, slots=slots) - else: - kwargs = {} + def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper': + import dataclasses - def make_pydantic_fields_compatible(cls: type[Any]) -> None: - """Make sure that stdlib `dataclasses` understands `Field` kwargs like `kw_only` - To do that, we simply change - `x: int = pydantic.Field(..., kw_only=True)` - into - `x: int = dataclasses.field(default=pydantic.Field(..., kw_only=True), kw_only=True)` - """ - for annotation_cls in cls.__mro__: - # In Python < 3.9, `__annotations__` might not be present if there are no fields. - # we therefore need to use `getattr` to avoid an `AttributeError`. - annotations = getattr(annotation_cls, '__annotations__', []) - for field_name in annotations: - field_value = getattr(cls, field_name, None) - # Process only if this is an instance of `FieldInfo`. - if not isinstance(field_value, FieldInfo): - continue + if is_builtin_dataclass(cls) and _extra_dc_args(_cls) == _extra_dc_args(_cls.__bases__[0]): # type: ignore + dc_cls_doc = '' + dc_cls = DataclassProxy(cls) + default_validate_on_init = False + else: + dc_cls_doc = cls.__doc__ or '' # needs to be done before generating dataclass + if sys.version_info >= (3, 10): + dc_cls = dataclasses.dataclass( + cls, + init=init, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + kw_only=kw_only, + ) + else: + dc_cls = dataclasses.dataclass( # type: ignore + cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen + ) + default_validate_on_init = True - # Initialize arguments for the standard `dataclasses.field`. - field_args: dict = {'default': field_value} - - # Handle `kw_only` for Python 3.10+ - if sys.version_info >= (3, 10) and field_value.kw_only: - field_args['kw_only'] = True - - # Set `repr` attribute if it's explicitly specified to be not `True`. - if field_value.repr is not True: - field_args['repr'] = field_value.repr - - setattr(cls, field_name, dataclasses.field(**field_args)) - # In Python 3.8, dataclasses checks cls.__dict__['__annotations__'] for annotations, - # so we must make sure it's initialized before we add to it. - if cls.__dict__.get('__annotations__') is None: - cls.__annotations__ = {} - cls.__annotations__[field_name] = annotations[field_name] - - def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]: - """Create a Pydantic dataclass from a regular dataclass. - - Args: - cls: The class to create the Pydantic dataclass from. - - Returns: - A Pydantic dataclass. - """ - original_cls = cls - - config_dict = config - if config_dict is None: - # if not explicitly provided, read from the type - cls_config = getattr(cls, '__pydantic_config__', None) - if cls_config is not None: - config_dict = cls_config - config_wrapper = _config.ConfigWrapper(config_dict) - decorators = _decorators.DecoratorInfos.build(cls) - - # Keep track of the original __doc__ so that we can restore it after applying the dataclasses decorator - # Otherwise, classes with no __doc__ will have their signature added into the JSON schema description, - # since dataclasses.dataclass will set this as the __doc__ - original_doc = cls.__doc__ - - if _pydantic_dataclasses.is_builtin_dataclass(cls): - # Don't preserve the docstring for vanilla dataclasses, as it may include the signature - # This matches v1 behavior, and there was an explicit test for it - original_doc = None - - # We don't want to add validation to the existing std lib dataclass, so we will subclass it - # If the class is generic, we need to make sure the subclass also inherits from Generic - # with all the same parameters. - bases = (cls,) - if issubclass(cls, Generic): - generic_base = Generic[cls.__parameters__] # type: ignore - bases = bases + (generic_base,) - cls = types.new_class(cls.__name__, bases) - - make_pydantic_fields_compatible(cls) - - cls = dataclasses.dataclass( # type: ignore[call-overload] - cls, - # the value of init here doesn't affect anything except that it makes it easier to generate a signature - init=True, - repr=repr, - eq=eq, - order=order, - unsafe_hash=unsafe_hash, - frozen=frozen, - **kwargs, - ) - - cls.__pydantic_decorators__ = decorators # type: ignore - cls.__doc__ = original_doc - cls.__module__ = original_cls.__module__ - cls.__qualname__ = original_cls.__qualname__ - pydantic_complete = _pydantic_dataclasses.complete_dataclass( - cls, config_wrapper, raise_errors=False, types_namespace=None - ) - cls.__pydantic_complete__ = pydantic_complete # type: ignore - return cls + should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init + _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc) + dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls}) + return dc_cls if _cls is None: - return create_dataclass + return wrap - return create_dataclass(_cls) + return wrap(_cls) -__getattr__ = getattr_migration(__name__) - -if (3, 8) <= sys.version_info < (3, 11): - # Monkeypatch dataclasses.InitVar so that typing doesn't error if it occurs as a type when evaluating type hints - # Starting in 3.11, typing.get_type_hints will not raise an error if the retrieved type hints are not callable. - - def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn: - """This function does nothing but raise an error that is as similar as possible to what you'd get - if you were to try calling `InitVar[int]()` without this monkeypatch. The whole purpose is just - to ensure typing._type_check does not error if the type hint evaluates to `InitVar[]`. - """ - raise TypeError("'InitVar' object is not callable") - - dataclasses.InitVar.__call__ = _call_initvar +@contextmanager +def set_validation(cls: Type['DataclassT'], value: bool) -> Generator[Type['DataclassT'], None, None]: + original_run_validation = cls.__pydantic_run_validation__ + try: + cls.__pydantic_run_validation__ = value + yield cls + finally: + cls.__pydantic_run_validation__ = original_run_validation -def rebuild_dataclass( - cls: type[PydanticDataclass], - *, - force: bool = False, - raise_errors: bool = True, - _parent_namespace_depth: int = 2, - _types_namespace: dict[str, Any] | None = None, -) -> bool | None: - """Try to rebuild the pydantic-core schema for the dataclass. +class DataclassProxy: + __slots__ = '__dataclass__' - This may be necessary when one of the annotations is a ForwardRef which could not be resolved during - the initial attempt to build the schema, and automatic rebuilding fails. + def __init__(self, dc_cls: Type['Dataclass']) -> None: + object.__setattr__(self, '__dataclass__', dc_cls) - This is analogous to `BaseModel.model_rebuild`. + def __call__(self, *args: Any, **kwargs: Any) -> Any: + with set_validation(self.__dataclass__, True): + return self.__dataclass__(*args, **kwargs) - Args: - cls: The class to rebuild the pydantic-core schema for. - force: Whether to force the rebuilding of the schema, defaults to `False`. - raise_errors: Whether to raise errors, defaults to `True`. - _parent_namespace_depth: The depth level of the parent namespace, defaults to 2. - _types_namespace: The types namespace, defaults to `None`. + def __getattr__(self, name: str) -> Any: + return getattr(self.__dataclass__, name) - Returns: - Returns `None` if the schema is already "complete" and rebuilding was not required. - If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. + def __instancecheck__(self, instance: Any) -> bool: + return isinstance(instance, self.__dataclass__) + + +def _add_pydantic_validation_attributes( # noqa: C901 (ignore complexity) + dc_cls: Type['Dataclass'], + config: Type[BaseConfig], + validate_on_init: bool, + dc_cls_doc: str, +) -> None: """ - if not force and cls.__pydantic_complete__: - return None - else: - if _types_namespace is not None: - types_namespace: dict[str, Any] | None = _types_namespace.copy() + We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass + it won't even exist (code is generated on the fly by `dataclasses`) + By default, we run validation after `__init__` or `__post_init__` if defined + """ + init = dc_cls.__init__ + + @wraps(init) + def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + if config.extra == Extra.ignore: + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + + elif config.extra == Extra.allow: + for k, v in kwargs.items(): + self.__dict__.setdefault(k, v) + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + else: - if _parent_namespace_depth > 0: - frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {} - # Note: we may need to add something similar to cls.__pydantic_parent_namespace__ from BaseModel - # here when implementing handling of recursive generics. See BaseModel.model_rebuild for reference. - types_namespace = frame_parent_ns - else: - types_namespace = {} + init(self, *args, **kwargs) - types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace) - return _pydantic_dataclasses.complete_dataclass( - cls, - _config.ConfigWrapper(cls.__pydantic_config__, check=False), - raise_errors=raise_errors, - types_namespace=types_namespace, - ) + if hasattr(dc_cls, '__post_init__'): + post_init = dc_cls.__post_init__ + + @wraps(post_init) + def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + if config.post_init_call == 'before_validation': + post_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + if hasattr(self, '__post_init_post_parse__'): + self.__post_init_post_parse__(*args, **kwargs) + + if config.post_init_call == 'after_validation': + post_init(self, *args, **kwargs) + + setattr(dc_cls, '__init__', handle_extra_init) + setattr(dc_cls, '__post_init__', new_post_init) + + else: + + @wraps(init) + def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + handle_extra_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + + if hasattr(self, '__post_init_post_parse__'): + # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of + # public method `dataclasses.fields` + import dataclasses + + # get all initvars and their default values + initvars_and_values: Dict[str, Any] = {} + for i, f in enumerate(self.__class__.__dataclass_fields__.values()): + if f._field_type is dataclasses._FIELD_INITVAR: # type: ignore[attr-defined] + try: + # set arg value by default + initvars_and_values[f.name] = args[i] + except IndexError: + initvars_and_values[f.name] = kwargs.get(f.name, f.default) + + self.__post_init_post_parse__(**initvars_and_values) + + setattr(dc_cls, '__init__', new_init) + + setattr(dc_cls, '__pydantic_run_validation__', ClassAttribute('__pydantic_run_validation__', validate_on_init)) + setattr(dc_cls, '__pydantic_initialised__', False) + setattr(dc_cls, '__pydantic_model__', create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc)) + setattr(dc_cls, '__pydantic_validate_values__', _dataclass_validate_values) + setattr(dc_cls, '__validate__', classmethod(_validate_dataclass)) + setattr(dc_cls, '__get_validators__', classmethod(_get_validators)) + + if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen: + setattr(dc_cls, '__setattr__', _dataclass_validate_assignment_setattr) -def is_pydantic_dataclass(__cls: type[Any]) -> TypeGuard[type[PydanticDataclass]]: - """Whether a class is a pydantic dataclass. +def _get_validators(cls: 'DataclassClassOrWrapper') -> 'CallableGenerator': + yield cls.__validate__ - Args: - __cls: The class. - Returns: - `True` if the class is a pydantic dataclass, `False` otherwise. +def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT': + with set_validation(cls, True): + if isinstance(v, cls): + v.__pydantic_validate_values__() + return v + elif isinstance(v, (list, tuple)): + return cls(*v) + elif isinstance(v, dict): + return cls(**v) + else: + raise DataclassTypeError(class_name=cls.__name__) + + +def create_pydantic_model_from_dataclass( + dc_cls: Type['Dataclass'], + config: Type[Any] = BaseConfig, + dc_cls_doc: Optional[str] = None, +) -> Type['BaseModel']: + import dataclasses + + field_definitions: Dict[str, Any] = {} + for field in dataclasses.fields(dc_cls): + default: Any = Undefined + default_factory: Optional['NoArgAnyCallable'] = None + field_info: FieldInfo + + if field.default is not dataclasses.MISSING: + default = field.default + elif field.default_factory is not dataclasses.MISSING: + default_factory = field.default_factory + else: + default = Required + + if isinstance(default, FieldInfo): + field_info = default + dc_cls.__pydantic_has_field_info_default__ = True + else: + field_info = Field(default=default, default_factory=default_factory, **field.metadata) + + field_definitions[field.name] = (field.type, field_info) + + validators = gather_all_validators(dc_cls) + model: Type['BaseModel'] = create_model( + dc_cls.__name__, + __config__=config, + __module__=dc_cls.__module__, + __validators__=validators, + __cls_kwargs__={'__resolve_forward_refs__': False}, + **field_definitions, + ) + model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or '' + return model + + +def _dataclass_validate_values(self: 'Dataclass') -> None: + # validation errors can occur if this function is called twice on an already initialised dataclass. + # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property + if getattr(self, '__pydantic_initialised__'): + return + if getattr(self, '__pydantic_has_field_info_default__', False): + # We need to remove `FieldInfo` values since they are not valid as input + # It's ok to do that because they are obviously the default values! + input_data = {k: v for k, v in self.__dict__.items() if not isinstance(v, FieldInfo)} + else: + input_data = self.__dict__ + d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__) + if validation_error: + raise validation_error + self.__dict__.update(d) + object.__setattr__(self, '__pydantic_initialised__', True) + + +def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value: Any) -> None: + if self.__pydantic_initialised__: + d = dict(self.__dict__) + d.pop(name, None) + known_field = self.__pydantic_model__.__fields__.get(name, None) + if known_field: + value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) + + object.__setattr__(self, name, value) + + +def _extra_dc_args(cls: Type[Any]) -> Set[str]: + return { + x + for x in dir(cls) + if x not in getattr(cls, '__dataclass_fields__', {}) and not (x.startswith('__') and x.endswith('__')) + } + + +def is_builtin_dataclass(_cls: Type[Any]) -> bool: """ - return dataclasses.is_dataclass(__cls) and '__pydantic_validator__' in __cls.__dict__ + Whether a class is a stdlib dataclass + (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass) + + we check that + - `_cls` is a dataclass + - `_cls` is not a processed pydantic dataclass (with a basemodel attached) + - `_cls` is not a pydantic dataclass inheriting directly from a stdlib dataclass + e.g. + ``` + @dataclasses.dataclass + class A: + x: int + + @pydantic.dataclasses.dataclass + class B(A): + y: int + ``` + In this case, when we first check `B`, we make an extra check and look at the annotations ('y'), + which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x') + """ + import dataclasses + + return ( + dataclasses.is_dataclass(_cls) + and not hasattr(_cls, '__pydantic_model__') + and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {}))) + ) + + +def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]) -> 'CallableGenerator': + """ + Create a pydantic.dataclass from a builtin dataclass to add type validation + and yield the validators + It retrieves the parameters of the dataclass and forwards them to the newly created dataclass + """ + yield from _get_validators(dataclass(dc_cls, config=config, validate_on_init=False)) diff --git a/lib/pydantic/datetime_parse.py b/lib/pydantic/datetime_parse.py index 902219df..cfd54593 100644 --- a/lib/pydantic/datetime_parse.py +++ b/lib/pydantic/datetime_parse.py @@ -1,4 +1,248 @@ -"""The `datetime_parse` module is a backport module from V1.""" -from ._migration import getattr_migration +""" +Functions to parse datetime objects. -__getattr__ = getattr_migration(__name__) +We're using regular expressions rather than time.strptime because: +- They provide both validation and parsing. +- They're more flexible for datetimes. +- The date/datetime/time constructors produce friendlier error messages. + +Stolen from https://raw.githubusercontent.com/django/django/main/django/utils/dateparse.py at +9718fa2e8abe430c3526a9278dd976443d4ae3c6 + +Changed to: +* use standard python datetime types not django.utils.timezone +* raise ValueError when regex doesn't match rather than returning None +* support parsing unix timestamps for dates and datetimes +""" +import re +from datetime import date, datetime, time, timedelta, timezone +from typing import Dict, Optional, Type, Union + +from . import errors + +date_expr = r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' +time_expr = ( + r'(?P\d{1,2}):(?P\d{1,2})' + r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' + r'(?PZ|[+-]\d{2}(?::?\d{2})?)?$' +) + +date_re = re.compile(f'{date_expr}$') +time_re = re.compile(time_expr) +datetime_re = re.compile(f'{date_expr}[T ]{time_expr}') + +standard_duration_re = re.compile( + r'^' + r'(?:(?P-?\d+) (days?, )?)?' + r'((?:(?P-?\d+):)(?=\d+:\d+))?' + r'(?:(?P-?\d+):)?' + r'(?P-?\d+)' + r'(?:\.(?P\d{1,6})\d{0,6})?' + r'$' +) + +# Support the sections of ISO 8601 date representation that are accepted by timedelta +iso8601_duration_re = re.compile( + r'^(?P[-+]?)' + r'P' + r'(?:(?P\d+(.\d+)?)D)?' + r'(?:T' + r'(?:(?P\d+(.\d+)?)H)?' + r'(?:(?P\d+(.\d+)?)M)?' + r'(?:(?P\d+(.\d+)?)S)?' + r')?' + r'$' +) + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) +StrBytesIntFloat = Union[str, bytes, int, float] + + +def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float') + + +def from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]: + if value == 'Z': + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == '-': + offset = -offset + try: + return timezone(timedelta(minutes=offset)) + except ValueError: + raise error() + else: + return None + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = get_numeric(value, 'date') + if number is not None: + return from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + match = date_re.match(value) # type: ignore + if match is None: + raise errors.DateError() + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise errors.DateError() + + +def parse_time(value: Union[time, StrBytesIntFloat]) -> time: + """ + Parse a time/string and return a datetime.time. + + Raise ValueError if the input is well formatted but not a valid time. + Raise ValueError if the input isn't well formatted, in particular if it contains an offset. + """ + if isinstance(value, time): + return value + + number = get_numeric(value, 'time') + if number is not None: + if number >= 86400: + # doesn't make sense since the time time loop back around to 0 + raise errors.TimeError() + return (datetime.min + timedelta(seconds=number)).time() + + if isinstance(value, bytes): + value = value.decode() + + match = time_re.match(value) # type: ignore + if match is None: + raise errors.TimeError() + + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + + tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_['tzinfo'] = tzinfo + + try: + return time(**kw_) # type: ignore + except ValueError: + raise errors.TimeError() + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = get_numeric(value, 'datetime') + if number is not None: + return from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + match = datetime_re.match(value) # type: ignore + if match is None: + raise errors.DateTimeError() + + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + + tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_['tzinfo'] = tzinfo + + try: + return datetime(**kw_) # type: ignore + except ValueError: + raise errors.DateTimeError() + + +def parse_duration(value: StrBytesIntFloat) -> timedelta: + """ + Parse a duration int/float/string and return a datetime.timedelta. + + The preferred format for durations in Django is '%d %H:%M:%S.%f'. + + Also supports ISO 8601 representation. + """ + if isinstance(value, timedelta): + return value + + if isinstance(value, (int, float)): + # below code requires a string + value = f'{value:f}' + elif isinstance(value, bytes): + value = value.decode() + + try: + match = standard_duration_re.match(value) or iso8601_duration_re.match(value) + except TypeError: + raise TypeError('invalid type; expected timedelta, string, bytes, int or float') + + if not match: + raise errors.DurationError() + + kw = match.groupdict() + sign = -1 if kw.pop('sign', '+') == '-' else 1 + if kw.get('microseconds'): + kw['microseconds'] = kw['microseconds'].ljust(6, '0') + + if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'): + kw['microseconds'] = '-' + kw['microseconds'] + + kw_ = {k: float(v) for k, v in kw.items() if v is not None} + + return sign * timedelta(**kw_) diff --git a/lib/pydantic/decorator.py b/lib/pydantic/decorator.py index c3643468..089aab65 100644 --- a/lib/pydantic/decorator.py +++ b/lib/pydantic/decorator.py @@ -1,4 +1,264 @@ -"""The `decorator` module is a backport module from V1.""" -from ._migration import getattr_migration +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload -__getattr__ = getattr_migration(__name__) +from . import validator +from .config import Extra +from .errors import ConfigError +from .main import BaseModel, create_model +from .typing import get_all_type_hints +from .utils import to_camel + +__all__ = ('validate_arguments',) + +if TYPE_CHECKING: + from .typing import AnyCallable + + AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable) + ConfigType = Union[None, Type[Any], Dict[str, Any]] + + +@overload +def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']: + ... + + +@overload +def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': + ... + + +def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any: + """ + Decorator to validate the arguments passed to a function. + """ + + def validate(_func: 'AnyCallable') -> 'AnyCallable': + vd = ValidatedFunction(_func, config) + + @wraps(_func) + def wrapper_function(*args: Any, **kwargs: Any) -> Any: + return vd.call(*args, **kwargs) + + wrapper_function.vd = vd # type: ignore + wrapper_function.validate = vd.init_model_instance # type: ignore + wrapper_function.raw_function = vd.raw_function # type: ignore + wrapper_function.model = vd.model # type: ignore + return wrapper_function + + if func: + return validate(func) + else: + return validate + + +ALT_V_ARGS = 'v__args' +ALT_V_KWARGS = 'v__kwargs' +V_POSITIONAL_ONLY_NAME = 'v__positional_only' +V_DUPLICATE_KWARGS = 'v__duplicate_kwargs' + + +class ValidatedFunction: + def __init__(self, function: 'AnyCallableT', config: 'ConfigType'): # noqa C901 + from inspect import Parameter, signature + + parameters: Mapping[str, Parameter] = signature(function).parameters + + if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}: + raise ConfigError( + f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" ' + f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator' + ) + + self.raw_function = function + self.arg_mapping: Dict[int, str] = {} + self.positional_only_args = set() + self.v_args_name = 'args' + self.v_kwargs_name = 'kwargs' + + type_hints = get_all_type_hints(function) + takes_args = False + takes_kwargs = False + fields: Dict[str, Tuple[Any, Any]] = {} + for i, (name, p) in enumerate(parameters.items()): + if p.annotation is p.empty: + annotation = Any + else: + annotation = type_hints[name] + + default = ... if p.default is p.empty else p.default + if p.kind == Parameter.POSITIONAL_ONLY: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_POSITIONAL_ONLY_NAME] = List[str], None + self.positional_only_args.add(name) + elif p.kind == Parameter.POSITIONAL_OR_KEYWORD: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_DUPLICATE_KWARGS] = List[str], None + elif p.kind == Parameter.KEYWORD_ONLY: + fields[name] = annotation, default + elif p.kind == Parameter.VAR_POSITIONAL: + self.v_args_name = name + fields[name] = Tuple[annotation, ...], None + takes_args = True + else: + assert p.kind == Parameter.VAR_KEYWORD, p.kind + self.v_kwargs_name = name + fields[name] = Dict[str, annotation], None # type: ignore + takes_kwargs = True + + # these checks avoid a clash between "args" and a field with that name + if not takes_args and self.v_args_name in fields: + self.v_args_name = ALT_V_ARGS + + # same with "kwargs" + if not takes_kwargs and self.v_kwargs_name in fields: + self.v_kwargs_name = ALT_V_KWARGS + + if not takes_args: + # we add the field so validation below can raise the correct exception + fields[self.v_args_name] = List[Any], None + + if not takes_kwargs: + # same with kwargs + fields[self.v_kwargs_name] = Dict[Any, Any], None + + self.create_model(fields, takes_args, takes_kwargs, config) + + def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel: + values = self.build_values(args, kwargs) + return self.model(**values) + + def call(self, *args: Any, **kwargs: Any) -> Any: + m = self.init_model_instance(*args, **kwargs) + return self.execute(m) + + def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]: + values: Dict[str, Any] = {} + if args: + arg_iter = enumerate(args) + while True: + try: + i, a = next(arg_iter) + except StopIteration: + break + arg_name = self.arg_mapping.get(i) + if arg_name is not None: + values[arg_name] = a + else: + values[self.v_args_name] = [a] + [a for _, a in arg_iter] + break + + var_kwargs: Dict[str, Any] = {} + wrong_positional_args = [] + duplicate_kwargs = [] + fields_alias = [ + field.alias + for name, field in self.model.__fields__.items() + if name not in (self.v_args_name, self.v_kwargs_name) + ] + non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name} + for k, v in kwargs.items(): + if k in non_var_fields or k in fields_alias: + if k in self.positional_only_args: + wrong_positional_args.append(k) + if k in values: + duplicate_kwargs.append(k) + values[k] = v + else: + var_kwargs[k] = v + + if var_kwargs: + values[self.v_kwargs_name] = var_kwargs + if wrong_positional_args: + values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args + if duplicate_kwargs: + values[V_DUPLICATE_KWARGS] = duplicate_kwargs + return values + + def execute(self, m: BaseModel) -> Any: + d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory} + var_kwargs = d.pop(self.v_kwargs_name, {}) + + if self.v_args_name in d: + args_: List[Any] = [] + in_kwargs = False + kwargs = {} + for name, value in d.items(): + if in_kwargs: + kwargs[name] = value + elif name == self.v_args_name: + args_ += value + in_kwargs = True + else: + args_.append(value) + return self.raw_function(*args_, **kwargs, **var_kwargs) + elif self.positional_only_args: + args_ = [] + kwargs = {} + for name, value in d.items(): + if name in self.positional_only_args: + args_.append(value) + else: + kwargs[name] = value + return self.raw_function(*args_, **kwargs, **var_kwargs) + else: + return self.raw_function(**d, **var_kwargs) + + def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None: + pos_args = len(self.arg_mapping) + + class CustomConfig: + pass + + if not TYPE_CHECKING: # pragma: no branch + if isinstance(config, dict): + CustomConfig = type('Config', (), config) # noqa: F811 + elif config is not None: + CustomConfig = config # noqa: F811 + + if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'): + raise ConfigError( + 'Setting the "fields" and "alias_generator" property on custom Config for ' + '@validate_arguments is not yet supported, please remove.' + ) + + class DecoratorBaseModel(BaseModel): + @validator(self.v_args_name, check_fields=False, allow_reuse=True) + def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]: + if takes_args or v is None: + return v + + raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given') + + @validator(self.v_kwargs_name, check_fields=False, allow_reuse=True) + def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: + if takes_kwargs or v is None: + return v + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v.keys())) + raise TypeError(f'unexpected keyword argument{plural}: {keys}') + + @validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True) + def check_positional_only(cls, v: Optional[List[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}') + + @validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True) + def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'multiple values for argument{plural}: {keys}') + + class Config(CustomConfig): + extra = getattr(CustomConfig, 'extra', Extra.forbid) + + self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields) diff --git a/lib/pydantic/deprecated/__init__.py b/lib/pydantic/deprecated/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pydantic/deprecated/class_validators.py b/lib/pydantic/deprecated/class_validators.py deleted file mode 100644 index 7b48afd2..00000000 --- a/lib/pydantic/deprecated/class_validators.py +++ /dev/null @@ -1,253 +0,0 @@ -"""Old `@validator` and `@root_validator` function validators from V1.""" - -from __future__ import annotations as _annotations - -from functools import partial, partialmethod -from types import FunctionType -from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload -from warnings import warn - -from typing_extensions import Literal, Protocol, TypeAlias - -from .._internal import _decorators, _decorators_v1 -from ..errors import PydanticUserError -from ..warnings import PydanticDeprecatedSince20 - -_ALLOW_REUSE_WARNING_MESSAGE = '`allow_reuse` is deprecated and will be ignored; it should no longer be necessary' - - -if TYPE_CHECKING: - - class _OnlyValueValidatorClsMethod(Protocol): - def __call__(self, __cls: Any, __value: Any) -> Any: - ... - - class _V1ValidatorWithValuesClsMethod(Protocol): - def __call__(self, __cls: Any, __value: Any, values: dict[str, Any]) -> Any: - ... - - class _V1ValidatorWithValuesKwOnlyClsMethod(Protocol): - def __call__(self, __cls: Any, __value: Any, *, values: dict[str, Any]) -> Any: - ... - - class _V1ValidatorWithKwargsClsMethod(Protocol): - def __call__(self, __cls: Any, **kwargs: Any) -> Any: - ... - - class _V1ValidatorWithValuesAndKwargsClsMethod(Protocol): - def __call__(self, __cls: Any, values: dict[str, Any], **kwargs: Any) -> Any: - ... - - class _V1RootValidatorClsMethod(Protocol): - def __call__( - self, __cls: Any, __values: _decorators_v1.RootValidatorValues - ) -> _decorators_v1.RootValidatorValues: - ... - - V1Validator = Union[ - _OnlyValueValidatorClsMethod, - _V1ValidatorWithValuesClsMethod, - _V1ValidatorWithValuesKwOnlyClsMethod, - _V1ValidatorWithKwargsClsMethod, - _V1ValidatorWithValuesAndKwargsClsMethod, - _decorators_v1.V1ValidatorWithValues, - _decorators_v1.V1ValidatorWithValuesKwOnly, - _decorators_v1.V1ValidatorWithKwargs, - _decorators_v1.V1ValidatorWithValuesAndKwargs, - ] - - V1RootValidator = Union[ - _V1RootValidatorClsMethod, - _decorators_v1.V1RootValidatorFunction, - ] - - _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]] - - # Allow both a V1 (assumed pre=False) or V2 (assumed mode='after') validator - # We lie to type checkers and say we return the same thing we get - # but in reality we return a proxy object that _mostly_ behaves like the wrapped thing - _V1ValidatorType = TypeVar('_V1ValidatorType', V1Validator, _PartialClsOrStaticMethod) - _V1RootValidatorFunctionType = TypeVar( - '_V1RootValidatorFunctionType', - _decorators_v1.V1RootValidatorFunction, - _V1RootValidatorClsMethod, - _PartialClsOrStaticMethod, - ) -else: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - - -def validator( - __field: str, - *fields: str, - pre: bool = False, - each_item: bool = False, - always: bool = False, - check_fields: bool | None = None, - allow_reuse: bool = False, -) -> Callable[[_V1ValidatorType], _V1ValidatorType]: - """Decorate methods on the class indicating that they should be used to validate fields. - - Args: - __field (str): The first field the validator should be called on; this is separate - from `fields` to ensure an error is raised if you don't pass at least one. - *fields (str): Additional field(s) the validator should be called on. - pre (bool, optional): Whether this validator should be called before the standard - validators (else after). Defaults to False. - each_item (bool, optional): For complex objects (sets, lists etc.) whether to validate - individual elements rather than the whole object. Defaults to False. - always (bool, optional): Whether this method and other validators should be called even if - the value is missing. Defaults to False. - check_fields (bool | None, optional): Whether to check that the fields actually exist on the model. - Defaults to None. - allow_reuse (bool, optional): Whether to track and raise an error if another validator refers to - the decorated function. Defaults to False. - - Returns: - Callable: A decorator that can be used to decorate a - function to be used as a validator. - """ - if allow_reuse is True: # pragma: no cover - warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning) - fields = tuple((__field, *fields)) - if isinstance(fields[0], FunctionType): - raise PydanticUserError( - '`@validator` should be used with fields and keyword arguments, not bare. ' - "E.g. usage should be `@validator('', ...)`", - code='validator-no-fields', - ) - elif not all(isinstance(field, str) for field in fields): - raise PydanticUserError( - '`@validator` fields should be passed as separate string args. ' - "E.g. usage should be `@validator('', '', ...)`", - code='validator-invalid-fields', - ) - - warn( - 'Pydantic V1 style `@validator` validators are deprecated.' - ' You should migrate to Pydantic V2 style `@field_validator` validators,' - ' see the migration guide for more details', - DeprecationWarning, - stacklevel=2, - ) - - mode: Literal['before', 'after'] = 'before' if pre is True else 'after' - - def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]: - if _decorators.is_instance_method_from_sig(f): - raise PydanticUserError( - '`@validator` cannot be applied to instance methods', code='validator-instance-method' - ) - # auto apply the @classmethod decorator - f = _decorators.ensure_classmethod_based_on_signature(f) - wrap = _decorators_v1.make_generic_v1_field_validator - validator_wrapper_info = _decorators.ValidatorDecoratorInfo( - fields=fields, - mode=mode, - each_item=each_item, - always=always, - check_fields=check_fields, - ) - return _decorators.PydanticDescriptorProxy(f, validator_wrapper_info, shim=wrap) - - return dec # type: ignore[return-value] - - -@overload -def root_validator( - *, - # if you don't specify `pre` the default is `pre=False` - # which means you need to specify `skip_on_failure=True` - skip_on_failure: Literal[True], - allow_reuse: bool = ..., -) -> Callable[ - [_V1RootValidatorFunctionType], - _V1RootValidatorFunctionType, -]: - ... - - -@overload -def root_validator( - *, - # if you specify `pre=True` then you don't need to specify - # `skip_on_failure`, in fact it is not allowed as an argument! - pre: Literal[True], - allow_reuse: bool = ..., -) -> Callable[ - [_V1RootValidatorFunctionType], - _V1RootValidatorFunctionType, -]: - ... - - -@overload -def root_validator( - *, - # if you explicitly specify `pre=False` then you - # MUST specify `skip_on_failure=True` - pre: Literal[False], - skip_on_failure: Literal[True], - allow_reuse: bool = ..., -) -> Callable[ - [_V1RootValidatorFunctionType], - _V1RootValidatorFunctionType, -]: - ... - - -def root_validator( - *__args, - pre: bool = False, - skip_on_failure: bool = False, - allow_reuse: bool = False, -) -> Any: - """Decorate methods on a model indicating that they should be used to validate (and perhaps - modify) data either before or after standard model parsing/validation is performed. - - Args: - pre (bool, optional): Whether this validator should be called before the standard - validators (else after). Defaults to False. - skip_on_failure (bool, optional): Whether to stop validation and return as soon as a - failure is encountered. Defaults to False. - allow_reuse (bool, optional): Whether to track and raise an error if another validator - refers to the decorated function. Defaults to False. - - Returns: - Any: A decorator that can be used to decorate a function to be used as a root_validator. - """ - warn( - 'Pydantic V1 style `@root_validator` validators are deprecated.' - ' You should migrate to Pydantic V2 style `@model_validator` validators,' - ' see the migration guide for more details', - DeprecationWarning, - stacklevel=2, - ) - - if __args: - # Ensure a nice error is raised if someone attempts to use the bare decorator - return root_validator()(*__args) # type: ignore - - if allow_reuse is True: # pragma: no cover - warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning) - mode: Literal['before', 'after'] = 'before' if pre is True else 'after' - if pre is False and skip_on_failure is not True: - raise PydanticUserError( - 'If you use `@root_validator` with pre=False (the default) you MUST specify `skip_on_failure=True`.' - ' Note that `@root_validator` is deprecated and should be replaced with `@model_validator`.', - code='root-validator-pre-skip', - ) - - wrap = partial(_decorators_v1.make_v1_generic_root_validator, pre=pre) - - def dec(f: Callable[..., Any] | classmethod[Any, Any, Any] | staticmethod[Any, Any]) -> Any: - if _decorators.is_instance_method_from_sig(f): - raise TypeError('`@root_validator` cannot be applied to instance methods') - # auto apply the @classmethod decorator - res = _decorators.ensure_classmethod_based_on_signature(f) - dec_info = _decorators.RootValidatorDecoratorInfo(mode=mode) - return _decorators.PydanticDescriptorProxy(res, dec_info, shim=wrap) - - return dec diff --git a/lib/pydantic/deprecated/config.py b/lib/pydantic/deprecated/config.py deleted file mode 100644 index 45400c65..00000000 --- a/lib/pydantic/deprecated/config.py +++ /dev/null @@ -1,72 +0,0 @@ -from __future__ import annotations as _annotations - -import warnings -from typing import TYPE_CHECKING, Any - -from typing_extensions import Literal, deprecated - -from .._internal import _config -from ..warnings import PydanticDeprecatedSince20 - -if not TYPE_CHECKING: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - -__all__ = 'BaseConfig', 'Extra' - - -class _ConfigMetaclass(type): - def __getattr__(self, item: str) -> Any: - try: - obj = _config.config_defaults[item] - warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning) - return obj - except KeyError as exc: - raise AttributeError(f"type object '{self.__name__}' has no attribute {exc}") from exc - - -@deprecated('BaseConfig is deprecated. Use the `pydantic.ConfigDict` instead.', category=PydanticDeprecatedSince20) -class BaseConfig(metaclass=_ConfigMetaclass): - """This class is only retained for backwards compatibility. - - !!! Warning "Deprecated" - BaseConfig is deprecated. Use the [`pydantic.ConfigDict`][pydantic.ConfigDict] instead. - """ - - def __getattr__(self, item: str) -> Any: - try: - obj = super().__getattribute__(item) - warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning) - return obj - except AttributeError as exc: - try: - return getattr(type(self), item) - except AttributeError: - # re-raising changes the displayed text to reflect that `self` is not a type - raise AttributeError(str(exc)) from exc - - def __init_subclass__(cls, **kwargs: Any) -> None: - warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning) - return super().__init_subclass__(**kwargs) - - -class _ExtraMeta(type): - def __getattribute__(self, __name: str) -> Any: - # The @deprecated decorator accesses other attributes, so we only emit a warning for the expected ones - if __name in {'allow', 'ignore', 'forbid'}: - warnings.warn( - "`pydantic.config.Extra` is deprecated, use literal values instead (e.g. `extra='allow'`)", - DeprecationWarning, - stacklevel=2, - ) - return super().__getattribute__(__name) - - -@deprecated( - "Extra is deprecated. Use literal values instead (e.g. `extra='allow'`)", category=PydanticDeprecatedSince20 -) -class Extra(metaclass=_ExtraMeta): - allow: Literal['allow'] = 'allow' - ignore: Literal['ignore'] = 'ignore' - forbid: Literal['forbid'] = 'forbid' diff --git a/lib/pydantic/deprecated/copy_internals.py b/lib/pydantic/deprecated/copy_internals.py deleted file mode 100644 index efe5de28..00000000 --- a/lib/pydantic/deprecated/copy_internals.py +++ /dev/null @@ -1,224 +0,0 @@ -from __future__ import annotations as _annotations - -import typing -from copy import deepcopy -from enum import Enum -from typing import Any, Tuple - -import typing_extensions - -from .._internal import ( - _model_construction, - _typing_extra, - _utils, -) - -if typing.TYPE_CHECKING: - from .. import BaseModel - from .._internal._utils import AbstractSetIntStr, MappingIntStrAny - - AnyClassMethod = classmethod[Any, Any, Any] - TupleGenerator = typing.Generator[Tuple[str, Any], None, None] - Model = typing.TypeVar('Model', bound='BaseModel') - # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope - IncEx: typing_extensions.TypeAlias = 'set[int] | set[str] | dict[int, Any] | dict[str, Any] | None' - -_object_setattr = _model_construction.object_setattr - - -def _iter( - self: BaseModel, - to_dict: bool = False, - by_alias: bool = False, - include: AbstractSetIntStr | MappingIntStrAny | None = None, - exclude: AbstractSetIntStr | MappingIntStrAny | None = None, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, -) -> TupleGenerator: - # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. - # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. - if exclude is not None: - exclude = _utils.ValueItems.merge( - {k: v.exclude for k, v in self.model_fields.items() if v.exclude is not None}, exclude - ) - - if include is not None: - include = _utils.ValueItems.merge({k: True for k in self.model_fields}, include, intersect=True) - - allowed_keys = _calculate_keys(self, include=include, exclude=exclude, exclude_unset=exclude_unset) # type: ignore - if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): - # huge boost for plain _iter() - yield from self.__dict__.items() - if self.__pydantic_extra__: - yield from self.__pydantic_extra__.items() - return - - value_exclude = _utils.ValueItems(self, exclude) if exclude is not None else None - value_include = _utils.ValueItems(self, include) if include is not None else None - - if self.__pydantic_extra__ is None: - items = self.__dict__.items() - else: - items = list(self.__dict__.items()) + list(self.__pydantic_extra__.items()) - - for field_key, v in items: - if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): - continue - - if exclude_defaults: - try: - field = self.model_fields[field_key] - except KeyError: - pass - else: - if not field.is_required() and field.default == v: - continue - - if by_alias and field_key in self.model_fields: - dict_key = self.model_fields[field_key].alias or field_key - else: - dict_key = field_key - - if to_dict or value_include or value_exclude: - v = _get_value( - type(self), - v, - to_dict=to_dict, - by_alias=by_alias, - include=value_include and value_include.for_element(field_key), - exclude=value_exclude and value_exclude.for_element(field_key), - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - yield dict_key, v - - -def _copy_and_set_values( - self: Model, - values: dict[str, Any], - fields_set: set[str], - extra: dict[str, Any] | None = None, - private: dict[str, Any] | None = None, - *, - deep: bool, # UP006 -) -> Model: - if deep: - # chances of having empty dict here are quite low for using smart_deepcopy - values = deepcopy(values) - extra = deepcopy(extra) - private = deepcopy(private) - - cls = self.__class__ - m = cls.__new__(cls) - _object_setattr(m, '__dict__', values) - _object_setattr(m, '__pydantic_extra__', extra) - _object_setattr(m, '__pydantic_fields_set__', fields_set) - _object_setattr(m, '__pydantic_private__', private) - - return m - - -@typing.no_type_check -def _get_value( - cls: type[BaseModel], - v: Any, - to_dict: bool, - by_alias: bool, - include: AbstractSetIntStr | MappingIntStrAny | None, - exclude: AbstractSetIntStr | MappingIntStrAny | None, - exclude_unset: bool, - exclude_defaults: bool, - exclude_none: bool, -) -> Any: - from .. import BaseModel - - if isinstance(v, BaseModel): - if to_dict: - return v.model_dump( - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - include=include, # type: ignore - exclude=exclude, # type: ignore - exclude_none=exclude_none, - ) - else: - return v.copy(include=include, exclude=exclude) - - value_exclude = _utils.ValueItems(v, exclude) if exclude else None - value_include = _utils.ValueItems(v, include) if include else None - - if isinstance(v, dict): - return { - k_: _get_value( - cls, - v_, - to_dict=to_dict, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - include=value_include and value_include.for_element(k_), - exclude=value_exclude and value_exclude.for_element(k_), - exclude_none=exclude_none, - ) - for k_, v_ in v.items() - if (not value_exclude or not value_exclude.is_excluded(k_)) - and (not value_include or value_include.is_included(k_)) - } - - elif _utils.sequence_like(v): - seq_args = ( - _get_value( - cls, - v_, - to_dict=to_dict, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - include=value_include and value_include.for_element(i), - exclude=value_exclude and value_exclude.for_element(i), - exclude_none=exclude_none, - ) - for i, v_ in enumerate(v) - if (not value_exclude or not value_exclude.is_excluded(i)) - and (not value_include or value_include.is_included(i)) - ) - - return v.__class__(*seq_args) if _typing_extra.is_namedtuple(v.__class__) else v.__class__(seq_args) - - elif isinstance(v, Enum) and getattr(cls.model_config, 'use_enum_values', False): - return v.value - - else: - return v - - -def _calculate_keys( - self: BaseModel, - include: MappingIntStrAny | None, - exclude: MappingIntStrAny | None, - exclude_unset: bool, - update: typing.Dict[str, Any] | None = None, # noqa UP006 -) -> typing.AbstractSet[str] | None: - if include is None and exclude is None and exclude_unset is False: - return None - - keys: typing.AbstractSet[str] - if exclude_unset: - keys = self.__pydantic_fields_set__.copy() - else: - keys = set(self.__dict__.keys()) - keys = keys | (self.__pydantic_extra__ or {}).keys() - - if include is not None: - keys &= include.keys() - - if update: - keys -= update.keys() - - if exclude: - keys -= {k for k, v in exclude.items() if _utils.ValueItems.is_true(v)} - - return keys diff --git a/lib/pydantic/deprecated/decorator.py b/lib/pydantic/deprecated/decorator.py deleted file mode 100644 index 36bd0690..00000000 --- a/lib/pydantic/deprecated/decorator.py +++ /dev/null @@ -1,279 +0,0 @@ -import warnings -from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload - -from typing_extensions import deprecated - -from .._internal import _config, _typing_extra -from ..alias_generators import to_pascal -from ..errors import PydanticUserError -from ..functional_validators import field_validator -from ..main import BaseModel, create_model -from ..warnings import PydanticDeprecatedSince20 - -if not TYPE_CHECKING: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - -__all__ = ('validate_arguments',) - -if TYPE_CHECKING: - AnyCallable = Callable[..., Any] - - AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable) - ConfigType = Union[None, Type[Any], Dict[str, Any]] - - -@overload -def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']: - ... - - -@overload -def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': - ... - - -@deprecated( - 'The `validate_arguments` method is deprecated; use `validate_call` instead.', - category=None, -) -def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any: - """Decorator to validate the arguments passed to a function.""" - warnings.warn( - 'The `validate_arguments` method is deprecated; use `validate_call` instead.', - PydanticDeprecatedSince20, - stacklevel=2, - ) - - def validate(_func: 'AnyCallable') -> 'AnyCallable': - vd = ValidatedFunction(_func, config) - - @wraps(_func) - def wrapper_function(*args: Any, **kwargs: Any) -> Any: - return vd.call(*args, **kwargs) - - wrapper_function.vd = vd # type: ignore - wrapper_function.validate = vd.init_model_instance # type: ignore - wrapper_function.raw_function = vd.raw_function # type: ignore - wrapper_function.model = vd.model # type: ignore - return wrapper_function - - if func: - return validate(func) - else: - return validate - - -ALT_V_ARGS = 'v__args' -ALT_V_KWARGS = 'v__kwargs' -V_POSITIONAL_ONLY_NAME = 'v__positional_only' -V_DUPLICATE_KWARGS = 'v__duplicate_kwargs' - - -class ValidatedFunction: - def __init__(self, function: 'AnyCallable', config: 'ConfigType'): - from inspect import Parameter, signature - - parameters: Mapping[str, Parameter] = signature(function).parameters - - if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}: - raise PydanticUserError( - f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" ' - f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator', - code=None, - ) - - self.raw_function = function - self.arg_mapping: Dict[int, str] = {} - self.positional_only_args: set[str] = set() - self.v_args_name = 'args' - self.v_kwargs_name = 'kwargs' - - type_hints = _typing_extra.get_type_hints(function, include_extras=True) - takes_args = False - takes_kwargs = False - fields: Dict[str, Tuple[Any, Any]] = {} - for i, (name, p) in enumerate(parameters.items()): - if p.annotation is p.empty: - annotation = Any - else: - annotation = type_hints[name] - - default = ... if p.default is p.empty else p.default - if p.kind == Parameter.POSITIONAL_ONLY: - self.arg_mapping[i] = name - fields[name] = annotation, default - fields[V_POSITIONAL_ONLY_NAME] = List[str], None - self.positional_only_args.add(name) - elif p.kind == Parameter.POSITIONAL_OR_KEYWORD: - self.arg_mapping[i] = name - fields[name] = annotation, default - fields[V_DUPLICATE_KWARGS] = List[str], None - elif p.kind == Parameter.KEYWORD_ONLY: - fields[name] = annotation, default - elif p.kind == Parameter.VAR_POSITIONAL: - self.v_args_name = name - fields[name] = Tuple[annotation, ...], None - takes_args = True - else: - assert p.kind == Parameter.VAR_KEYWORD, p.kind - self.v_kwargs_name = name - fields[name] = Dict[str, annotation], None - takes_kwargs = True - - # these checks avoid a clash between "args" and a field with that name - if not takes_args and self.v_args_name in fields: - self.v_args_name = ALT_V_ARGS - - # same with "kwargs" - if not takes_kwargs and self.v_kwargs_name in fields: - self.v_kwargs_name = ALT_V_KWARGS - - if not takes_args: - # we add the field so validation below can raise the correct exception - fields[self.v_args_name] = List[Any], None - - if not takes_kwargs: - # same with kwargs - fields[self.v_kwargs_name] = Dict[Any, Any], None - - self.create_model(fields, takes_args, takes_kwargs, config) - - def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel: - values = self.build_values(args, kwargs) - return self.model(**values) - - def call(self, *args: Any, **kwargs: Any) -> Any: - m = self.init_model_instance(*args, **kwargs) - return self.execute(m) - - def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]: - values: Dict[str, Any] = {} - if args: - arg_iter = enumerate(args) - while True: - try: - i, a = next(arg_iter) - except StopIteration: - break - arg_name = self.arg_mapping.get(i) - if arg_name is not None: - values[arg_name] = a - else: - values[self.v_args_name] = [a] + [a for _, a in arg_iter] - break - - var_kwargs: Dict[str, Any] = {} - wrong_positional_args = [] - duplicate_kwargs = [] - fields_alias = [ - field.alias - for name, field in self.model.model_fields.items() - if name not in (self.v_args_name, self.v_kwargs_name) - ] - non_var_fields = set(self.model.model_fields) - {self.v_args_name, self.v_kwargs_name} - for k, v in kwargs.items(): - if k in non_var_fields or k in fields_alias: - if k in self.positional_only_args: - wrong_positional_args.append(k) - if k in values: - duplicate_kwargs.append(k) - values[k] = v - else: - var_kwargs[k] = v - - if var_kwargs: - values[self.v_kwargs_name] = var_kwargs - if wrong_positional_args: - values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args - if duplicate_kwargs: - values[V_DUPLICATE_KWARGS] = duplicate_kwargs - return values - - def execute(self, m: BaseModel) -> Any: - d = {k: v for k, v in m.__dict__.items() if k in m.__pydantic_fields_set__ or m.model_fields[k].default_factory} - var_kwargs = d.pop(self.v_kwargs_name, {}) - - if self.v_args_name in d: - args_: List[Any] = [] - in_kwargs = False - kwargs = {} - for name, value in d.items(): - if in_kwargs: - kwargs[name] = value - elif name == self.v_args_name: - args_ += value - in_kwargs = True - else: - args_.append(value) - return self.raw_function(*args_, **kwargs, **var_kwargs) - elif self.positional_only_args: - args_ = [] - kwargs = {} - for name, value in d.items(): - if name in self.positional_only_args: - args_.append(value) - else: - kwargs[name] = value - return self.raw_function(*args_, **kwargs, **var_kwargs) - else: - return self.raw_function(**d, **var_kwargs) - - def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None: - pos_args = len(self.arg_mapping) - - config_wrapper = _config.ConfigWrapper(config) - - if config_wrapper.alias_generator: - raise PydanticUserError( - 'Setting the "alias_generator" property on custom Config for ' - '@validate_arguments is not yet supported, please remove.', - code=None, - ) - if config_wrapper.extra is None: - config_wrapper.config_dict['extra'] = 'forbid' - - class DecoratorBaseModel(BaseModel): - @field_validator(self.v_args_name, check_fields=False) - @classmethod - def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]: - if takes_args or v is None: - return v - - raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given') - - @field_validator(self.v_kwargs_name, check_fields=False) - @classmethod - def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: - if takes_kwargs or v is None: - return v - - plural = '' if len(v) == 1 else 's' - keys = ', '.join(map(repr, v.keys())) - raise TypeError(f'unexpected keyword argument{plural}: {keys}') - - @field_validator(V_POSITIONAL_ONLY_NAME, check_fields=False) - @classmethod - def check_positional_only(cls, v: Optional[List[str]]) -> None: - if v is None: - return - - plural = '' if len(v) == 1 else 's' - keys = ', '.join(map(repr, v)) - raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}') - - @field_validator(V_DUPLICATE_KWARGS, check_fields=False) - @classmethod - def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None: - if v is None: - return - - plural = '' if len(v) == 1 else 's' - keys = ', '.join(map(repr, v)) - raise TypeError(f'multiple values for argument{plural}: {keys}') - - model_config = config_wrapper.config_dict - - self.model = create_model(to_pascal(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields) diff --git a/lib/pydantic/deprecated/json.py b/lib/pydantic/deprecated/json.py deleted file mode 100644 index 79e2f44a..00000000 --- a/lib/pydantic/deprecated/json.py +++ /dev/null @@ -1,140 +0,0 @@ -import datetime -import warnings -from collections import deque -from decimal import Decimal -from enum import Enum -from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network -from pathlib import Path -from re import Pattern -from types import GeneratorType -from typing import TYPE_CHECKING, Any, Callable, Dict, Type, Union -from uuid import UUID - -from typing_extensions import deprecated - -from ..color import Color -from ..networks import NameEmail -from ..types import SecretBytes, SecretStr -from ..warnings import PydanticDeprecatedSince20 - -if not TYPE_CHECKING: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - -__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' - - -def isoformat(o: Union[datetime.date, datetime.time]) -> str: - return o.isoformat() - - -def decimal_encoder(dec_value: Decimal) -> Union[int, float]: - """Encodes a Decimal as int of there's no exponent, otherwise float. - - This is useful when we use ConstrainedDecimal to represent Numeric(x,0) - where a integer (but not int typed) is used. Encoding this as a float - results in failed round-tripping between encode and parse. - Our Id type is a prime example of this. - - >>> decimal_encoder(Decimal("1.0")) - 1.0 - - >>> decimal_encoder(Decimal("1")) - 1 - """ - exponent = dec_value.as_tuple().exponent - if isinstance(exponent, int) and exponent >= 0: - return int(dec_value) - else: - return float(dec_value) - - -ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { - bytes: lambda o: o.decode(), - Color: str, - datetime.date: isoformat, - datetime.datetime: isoformat, - datetime.time: isoformat, - datetime.timedelta: lambda td: td.total_seconds(), - Decimal: decimal_encoder, - Enum: lambda o: o.value, - frozenset: list, - deque: list, - GeneratorType: list, - IPv4Address: str, - IPv4Interface: str, - IPv4Network: str, - IPv6Address: str, - IPv6Interface: str, - IPv6Network: str, - NameEmail: str, - Path: str, - Pattern: lambda o: o.pattern, - SecretBytes: str, - SecretStr: str, - set: list, - UUID: str, -} - - -@deprecated( - '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.', - category=None, -) -def pydantic_encoder(obj: Any) -> Any: - warnings.warn( - '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.', - category=PydanticDeprecatedSince20, - stacklevel=2, - ) - from dataclasses import asdict, is_dataclass - - from ..main import BaseModel - - if isinstance(obj, BaseModel): - return obj.model_dump() - elif is_dataclass(obj): - return asdict(obj) - - # Check the class type and its superclasses for a matching encoder - for base in obj.__class__.__mro__[:-1]: - try: - encoder = ENCODERS_BY_TYPE[base] - except KeyError: - continue - return encoder(obj) - else: # We have exited the for loop without finding a suitable encoder - raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") - - -# TODO: Add a suggested migration path once there is a way to use custom encoders -@deprecated( - '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.', - category=None, -) -def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: - warnings.warn( - '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.', - category=PydanticDeprecatedSince20, - stacklevel=2, - ) - # Check the class type and its superclasses for a matching encoder - for base in obj.__class__.__mro__[:-1]: - try: - encoder = type_encoders[base] - except KeyError: - continue - - return encoder(obj) - else: # We have exited the for loop without finding a suitable encoder - return pydantic_encoder(obj) - - -@deprecated('`timedelta_isoformat` is deprecated.', category=None) -def timedelta_isoformat(td: datetime.timedelta) -> str: - """ISO 8601 encoding for Python timedelta object.""" - warnings.warn('`timedelta_isoformat` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2) - minutes, seconds = divmod(td.seconds, 60) - hours, minutes = divmod(minutes, 60) - return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' diff --git a/lib/pydantic/deprecated/parse.py b/lib/pydantic/deprecated/parse.py deleted file mode 100644 index 2a92e62b..00000000 --- a/lib/pydantic/deprecated/parse.py +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import annotations - -import json -import pickle -import warnings -from enum import Enum -from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable - -from typing_extensions import deprecated - -from ..warnings import PydanticDeprecatedSince20 - -if not TYPE_CHECKING: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - - -class Protocol(str, Enum): - json = 'json' - pickle = 'pickle' - - -@deprecated('`load_str_bytes` is deprecated.', category=None) -def load_str_bytes( - b: str | bytes, - *, - content_type: str | None = None, - encoding: str = 'utf8', - proto: Protocol | None = None, - allow_pickle: bool = False, - json_loads: Callable[[str], Any] = json.loads, -) -> Any: - warnings.warn('`load_str_bytes` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2) - if proto is None and content_type: - if content_type.endswith(('json', 'javascript')): - pass - elif allow_pickle and content_type.endswith('pickle'): - proto = Protocol.pickle - else: - raise TypeError(f'Unknown content-type: {content_type}') - - proto = proto or Protocol.json - - if proto == Protocol.json: - if isinstance(b, bytes): - b = b.decode(encoding) - return json_loads(b) # type: ignore - elif proto == Protocol.pickle: - if not allow_pickle: - raise RuntimeError('Trying to decode with pickle with allow_pickle=False') - bb = b if isinstance(b, bytes) else b.encode() # type: ignore - return pickle.loads(bb) - else: - raise TypeError(f'Unknown protocol: {proto}') - - -@deprecated('`load_file` is deprecated.', category=None) -def load_file( - path: str | Path, - *, - content_type: str | None = None, - encoding: str = 'utf8', - proto: Protocol | None = None, - allow_pickle: bool = False, - json_loads: Callable[[str], Any] = json.loads, -) -> Any: - warnings.warn('`load_file` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2) - path = Path(path) - b = path.read_bytes() - if content_type is None: - if path.suffix in ('.js', '.json'): - proto = Protocol.json - elif path.suffix == '.pkl': - proto = Protocol.pickle - - return load_str_bytes( - b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads - ) diff --git a/lib/pydantic/deprecated/tools.py b/lib/pydantic/deprecated/tools.py deleted file mode 100644 index b04eae40..00000000 --- a/lib/pydantic/deprecated/tools.py +++ /dev/null @@ -1,103 +0,0 @@ -from __future__ import annotations - -import json -import warnings -from typing import TYPE_CHECKING, Any, Callable, Type, TypeVar, Union - -from typing_extensions import deprecated - -from ..json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema -from ..type_adapter import TypeAdapter -from ..warnings import PydanticDeprecatedSince20 - -if not TYPE_CHECKING: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - -__all__ = 'parse_obj_as', 'schema_of', 'schema_json_of' - -NameFactory = Union[str, Callable[[Type[Any]], str]] - - -T = TypeVar('T') - - -@deprecated( - '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.', - category=None, -) -def parse_obj_as(type_: type[T], obj: Any, type_name: NameFactory | None = None) -> T: - warnings.warn( - '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.', - category=PydanticDeprecatedSince20, - stacklevel=2, - ) - if type_name is not None: # pragma: no cover - warnings.warn( - 'The type_name parameter is deprecated. parse_obj_as no longer creates temporary models', - DeprecationWarning, - stacklevel=2, - ) - return TypeAdapter(type_).validate_python(obj) - - -@deprecated( - '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', - category=None, -) -def schema_of( - type_: Any, - *, - title: NameFactory | None = None, - by_alias: bool = True, - ref_template: str = DEFAULT_REF_TEMPLATE, - schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, -) -> dict[str, Any]: - """Generate a JSON schema (as dict) for the passed model or dynamically generated one.""" - warnings.warn( - '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', - category=PydanticDeprecatedSince20, - stacklevel=2, - ) - res = TypeAdapter(type_).json_schema( - by_alias=by_alias, - schema_generator=schema_generator, - ref_template=ref_template, - ) - if title is not None: - if isinstance(title, str): - res['title'] = title - else: - warnings.warn( - 'Passing a callable for the `title` parameter is deprecated and no longer supported', - DeprecationWarning, - stacklevel=2, - ) - res['title'] = title(type_) - return res - - -@deprecated( - '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', - category=None, -) -def schema_json_of( - type_: Any, - *, - title: NameFactory | None = None, - by_alias: bool = True, - ref_template: str = DEFAULT_REF_TEMPLATE, - schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, - **dumps_kwargs: Any, -) -> str: - """Generate a JSON schema (as JSON) for the passed model or dynamically generated one.""" - warnings.warn( - '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', - category=PydanticDeprecatedSince20, - stacklevel=2, - ) - return json.dumps( - schema_of(type_, title=title, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator), - **dumps_kwargs, - ) diff --git a/lib/pydantic/env_settings.py b/lib/pydantic/env_settings.py index 662f5900..e9988c01 100644 --- a/lib/pydantic/env_settings.py +++ b/lib/pydantic/env_settings.py @@ -1,4 +1,346 @@ -"""The `env_settings` module is a backport module from V1.""" -from ._migration import getattr_migration +import os +import warnings +from pathlib import Path +from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union -__getattr__ = getattr_migration(__name__) +from .config import BaseConfig, Extra +from .fields import ModelField +from .main import BaseModel +from .typing import StrPath, display_as_type, get_origin, is_union +from .utils import deep_update, path_type, sequence_like + +env_file_sentinel = str(object()) + +SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]] +DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]] + + +class SettingsError(ValueError): + pass + + +class BaseSettings(BaseModel): + """ + Base class for settings, allowing values to be overridden by environment variables. + + This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose), + Heroku and any 12 factor app design. + """ + + def __init__( + __pydantic_self__, + _env_file: Optional[DotenvType] = env_file_sentinel, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + **values: Any, + ) -> None: + # Uses something other than `self` the first arg to allow "self" as a settable attribute + super().__init__( + **__pydantic_self__._build_values( + values, + _env_file=_env_file, + _env_file_encoding=_env_file_encoding, + _env_nested_delimiter=_env_nested_delimiter, + _secrets_dir=_secrets_dir, + ) + ) + + def _build_values( + self, + init_kwargs: Dict[str, Any], + _env_file: Optional[DotenvType] = None, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + ) -> Dict[str, Any]: + # Configure built-in sources + init_settings = InitSettingsSource(init_kwargs=init_kwargs) + env_settings = EnvSettingsSource( + env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file), + env_file_encoding=( + _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding + ), + env_nested_delimiter=( + _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter + ), + env_prefix_len=len(self.__config__.env_prefix), + ) + file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir) + # Provide a hook to set built-in sources priority and add / remove sources + sources = self.__config__.customise_sources( + init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings + ) + if sources: + return deep_update(*reversed([source(self) for source in sources])) + else: + # no one should mean to do this, but I think returning an empty dict is marginally preferable + # to an informative error and much better than a confusing error + return {} + + class Config(BaseConfig): + env_prefix: str = '' + env_file: Optional[DotenvType] = None + env_file_encoding: Optional[str] = None + env_nested_delimiter: Optional[str] = None + secrets_dir: Optional[StrPath] = None + validate_all: bool = True + extra: Extra = Extra.forbid + arbitrary_types_allowed: bool = True + case_sensitive: bool = False + + @classmethod + def prepare_field(cls, field: ModelField) -> None: + env_names: Union[List[str], AbstractSet[str]] + field_info_from_config = cls.get_field_info(field.name) + + env = field_info_from_config.get('env') or field.field_info.extra.get('env') + if env is None: + if field.has_alias: + warnings.warn( + 'aliases are no longer used by BaseSettings to define which environment variables to read. ' + 'Instead use the "env" field setting. ' + 'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names', + FutureWarning, + ) + env_names = {cls.env_prefix + field.name} + elif isinstance(env, str): + env_names = {env} + elif isinstance(env, (set, frozenset)): + env_names = env + elif sequence_like(env): + env_names = list(env) + else: + raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set') + + if not cls.case_sensitive: + env_names = env_names.__class__(n.lower() for n in env_names) + field.field_info.extra['env_names'] = env_names + + @classmethod + def customise_sources( + cls, + init_settings: SettingsSourceCallable, + env_settings: SettingsSourceCallable, + file_secret_settings: SettingsSourceCallable, + ) -> Tuple[SettingsSourceCallable, ...]: + return init_settings, env_settings, file_secret_settings + + @classmethod + def parse_env_var(cls, field_name: str, raw_val: str) -> Any: + return cls.json_loads(raw_val) + + # populated by the metaclass using the Config class defined above, annotated here to help IDEs only + __config__: ClassVar[Type[Config]] + + +class InitSettingsSource: + __slots__ = ('init_kwargs',) + + def __init__(self, init_kwargs: Dict[str, Any]): + self.init_kwargs = init_kwargs + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + return self.init_kwargs + + def __repr__(self) -> str: + return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})' + + +class EnvSettingsSource: + __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter', 'env_prefix_len') + + def __init__( + self, + env_file: Optional[DotenvType], + env_file_encoding: Optional[str], + env_nested_delimiter: Optional[str] = None, + env_prefix_len: int = 0, + ): + self.env_file: Optional[DotenvType] = env_file + self.env_file_encoding: Optional[str] = env_file_encoding + self.env_nested_delimiter: Optional[str] = env_nested_delimiter + self.env_prefix_len: int = env_prefix_len + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901 + """ + Build environment variables suitable for passing to the Model. + """ + d: Dict[str, Any] = {} + + if settings.__config__.case_sensitive: + env_vars: Mapping[str, Optional[str]] = os.environ + else: + env_vars = {k.lower(): v for k, v in os.environ.items()} + + dotenv_vars = self._read_env_files(settings.__config__.case_sensitive) + if dotenv_vars: + env_vars = {**dotenv_vars, **env_vars} + + for field in settings.__fields__.values(): + env_val: Optional[str] = None + for env_name in field.field_info.extra['env_names']: + env_val = env_vars.get(env_name) + if env_val is not None: + break + + is_complex, allow_parse_failure = self.field_is_complex(field) + if is_complex: + if env_val is None: + # field is complex but no value found so far, try explode_env_vars + env_val_built = self.explode_env_vars(field, env_vars) + if env_val_built: + d[field.alias] = env_val_built + else: + # field is complex and there's a value, decode that as JSON, then add explode_env_vars + try: + env_val = settings.__config__.parse_env_var(field.name, env_val) + except ValueError as e: + if not allow_parse_failure: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + if isinstance(env_val, dict): + d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars)) + else: + d[field.alias] = env_val + elif env_val is not None: + # simplest case, field is not complex, we only need to add the value if it was found + d[field.alias] = env_val + + return d + + def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]: + env_files = self.env_file + if env_files is None: + return {} + + if isinstance(env_files, (str, os.PathLike)): + env_files = [env_files] + + dotenv_vars = {} + for env_file in env_files: + env_path = Path(env_file).expanduser() + if env_path.is_file(): + dotenv_vars.update( + read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive) + ) + + return dotenv_vars + + def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]: + """ + Find out if a field is complex, and if so whether JSON errors should be ignored + """ + if field.is_complex(): + allow_parse_failure = False + elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields): + allow_parse_failure = True + else: + return False, False + + return True, allow_parse_failure + + def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]: + """ + Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries. + + This is applied to a single field, hence filtering by env_var prefix. + """ + prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']] + result: Dict[str, Any] = {} + for env_name, env_val in env_vars.items(): + if not any(env_name.startswith(prefix) for prefix in prefixes): + continue + # we remove the prefix before splitting in case the prefix has characters in common with the delimiter + env_name_without_prefix = env_name[self.env_prefix_len :] + _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter) + env_var = result + for key in keys: + env_var = env_var.setdefault(key, {}) + env_var[last_key] = env_val + + return result + + def __repr__(self) -> str: + return ( + f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, ' + f'env_nested_delimiter={self.env_nested_delimiter!r})' + ) + + +class SecretsSettingsSource: + __slots__ = ('secrets_dir',) + + def __init__(self, secrets_dir: Optional[StrPath]): + self.secrets_dir: Optional[StrPath] = secrets_dir + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + """ + Build fields from "secrets" files. + """ + secrets: Dict[str, Optional[str]] = {} + + if self.secrets_dir is None: + return secrets + + secrets_path = Path(self.secrets_dir).expanduser() + + if not secrets_path.exists(): + warnings.warn(f'directory "{secrets_path}" does not exist') + return secrets + + if not secrets_path.is_dir(): + raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}') + + for field in settings.__fields__.values(): + for env_name in field.field_info.extra['env_names']: + path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive) + if not path: + # path does not exist, we curently don't return a warning for this + continue + + if path.is_file(): + secret_value = path.read_text().strip() + if field.is_complex(): + try: + secret_value = settings.__config__.parse_env_var(field.name, secret_value) + except ValueError as e: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + secrets[field.alias] = secret_value + else: + warnings.warn( + f'attempted to load secret file "{path}" but found a {path_type(path)} instead.', + stacklevel=4, + ) + return secrets + + def __repr__(self) -> str: + return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})' + + +def read_env_file( + file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False +) -> Dict[str, Optional[str]]: + try: + from dotenv import dotenv_values + except ImportError as e: + raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e + + file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8') + if not case_sensitive: + return {k.lower(): v for k, v in file_vars.items()} + else: + return file_vars + + +def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]: + """ + Find a file within path's directory matching filename, optionally ignoring case. + """ + for f in dir_path.iterdir(): + if f.name == file_name: + return f + elif not case_sensitive and f.name.lower() == file_name.lower(): + return f + return None diff --git a/lib/pydantic/error_wrappers.py b/lib/pydantic/error_wrappers.py index 5144eeee..5d3204f4 100644 --- a/lib/pydantic/error_wrappers.py +++ b/lib/pydantic/error_wrappers.py @@ -1,4 +1,162 @@ -"""The `error_wrappers` module is a backport module from V1.""" -from ._migration import getattr_migration +import json +from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union -__getattr__ = getattr_migration(__name__) +from .json import pydantic_encoder +from .utils import Representation + +if TYPE_CHECKING: + from typing_extensions import TypedDict + + from .config import BaseConfig + from .types import ModelOrDc + from .typing import ReprArgs + + Loc = Tuple[Union[int, str], ...] + + class _ErrorDictRequired(TypedDict): + loc: Loc + msg: str + type: str + + class ErrorDict(_ErrorDictRequired, total=False): + ctx: Dict[str, Any] + + +__all__ = 'ErrorWrapper', 'ValidationError' + + +class ErrorWrapper(Representation): + __slots__ = 'exc', '_loc' + + def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: + self.exc = exc + self._loc = loc + + def loc_tuple(self) -> 'Loc': + if isinstance(self._loc, tuple): + return self._loc + else: + return (self._loc,) + + def __repr_args__(self) -> 'ReprArgs': + return [('exc', self.exc), ('loc', self.loc_tuple())] + + +# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] +# but recursive, therefore just use: +ErrorList = Union[Sequence[Any], ErrorWrapper] + + +class ValidationError(Representation, ValueError): + __slots__ = 'raw_errors', 'model', '_error_cache' + + def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None: + self.raw_errors = errors + self.model = model + self._error_cache: Optional[List['ErrorDict']] = None + + def errors(self) -> List['ErrorDict']: + if self._error_cache is None: + try: + config = self.model.__config__ # type: ignore + except AttributeError: + config = self.model.__pydantic_model__.__config__ # type: ignore + self._error_cache = list(flatten_errors(self.raw_errors, config)) + return self._error_cache + + def json(self, *, indent: Union[None, int, str] = 2) -> str: + return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) + + def __str__(self) -> str: + errors = self.errors() + no_errors = len(errors) + return ( + f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' + f'{display_errors(errors)}' + ) + + def __repr_args__(self) -> 'ReprArgs': + return [('model', self.model.__name__), ('errors', self.errors())] + + +def display_errors(errors: List['ErrorDict']) -> str: + return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) + + +def _display_error_loc(error: 'ErrorDict') -> str: + return ' -> '.join(str(e) for e in error['loc']) + + +def _display_error_type_and_ctx(error: 'ErrorDict') -> str: + t = 'type=' + error['type'] + ctx = error.get('ctx') + if ctx: + return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) + else: + return t + + +def flatten_errors( + errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None +) -> Generator['ErrorDict', None, None]: + for error in errors: + if isinstance(error, ErrorWrapper): + + if loc: + error_loc = loc + error.loc_tuple() + else: + error_loc = error.loc_tuple() + + if isinstance(error.exc, ValidationError): + yield from flatten_errors(error.exc.raw_errors, config, error_loc) + else: + yield error_dict(error.exc, config, error_loc) + elif isinstance(error, list): + yield from flatten_errors(error, config, loc=loc) + else: + raise RuntimeError(f'Unknown error object: {error}') + + +def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict': + type_ = get_exc_type(exc.__class__) + msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None) + ctx = exc.__dict__ + if msg_template: + msg = msg_template.format(**ctx) + else: + msg = str(exc) + + d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_} + + if ctx: + d['ctx'] = ctx + + return d + + +_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} + + +def get_exc_type(cls: Type[Exception]) -> str: + # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up + try: + return _EXC_TYPE_CACHE[cls] + except KeyError: + r = _get_exc_type(cls) + _EXC_TYPE_CACHE[cls] = r + return r + + +def _get_exc_type(cls: Type[Exception]) -> str: + if issubclass(cls, AssertionError): + return 'assertion_error' + + base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' + if cls in (TypeError, ValueError): + # just TypeError or ValueError, no extra code + return base_name + + # if it's not a TypeError or ValueError, we just take the lowercase of the exception name + # no chaining or snake case logic, use "code" for more complex error types. + code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() + return base_name + '.' + code diff --git a/lib/pydantic/errors.py b/lib/pydantic/errors.py index c5fa9612..7bdafdd1 100644 --- a/lib/pydantic/errors.py +++ b/lib/pydantic/errors.py @@ -1,152 +1,646 @@ -"""Pydantic-specific errors.""" -from __future__ import annotations as _annotations +from decimal import Decimal +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union -import re +from .typing import display_as_type -from typing_extensions import Literal, Self - -from ._migration import getattr_migration -from .version import version_short +if TYPE_CHECKING: + from .typing import DictStrAny +# explicitly state exports to avoid "from .errors import *" also importing Decimal, Path etc. __all__ = ( - 'PydanticUserError', - 'PydanticUndefinedAnnotation', - 'PydanticImportError', - 'PydanticSchemaGenerationError', - 'PydanticInvalidForJsonSchema', - 'PydanticErrorCodes', + 'PydanticTypeError', + 'PydanticValueError', + 'ConfigError', + 'MissingError', + 'ExtraError', + 'NoneIsNotAllowedError', + 'NoneIsAllowedError', + 'WrongConstantError', + 'NotNoneError', + 'BoolError', + 'BytesError', + 'DictError', + 'EmailError', + 'UrlError', + 'UrlSchemeError', + 'UrlSchemePermittedError', + 'UrlUserInfoError', + 'UrlHostError', + 'UrlHostTldError', + 'UrlPortError', + 'UrlExtraError', + 'EnumError', + 'IntEnumError', + 'EnumMemberError', + 'IntegerError', + 'FloatError', + 'PathError', + 'PathNotExistsError', + 'PathNotAFileError', + 'PathNotADirectoryError', + 'PyObjectError', + 'SequenceError', + 'ListError', + 'SetError', + 'FrozenSetError', + 'TupleError', + 'TupleLengthError', + 'ListMinLengthError', + 'ListMaxLengthError', + 'ListUniqueItemsError', + 'SetMinLengthError', + 'SetMaxLengthError', + 'FrozenSetMinLengthError', + 'FrozenSetMaxLengthError', + 'AnyStrMinLengthError', + 'AnyStrMaxLengthError', + 'StrError', + 'StrRegexError', + 'NumberNotGtError', + 'NumberNotGeError', + 'NumberNotLtError', + 'NumberNotLeError', + 'NumberNotMultipleError', + 'DecimalError', + 'DecimalIsNotFiniteError', + 'DecimalMaxDigitsError', + 'DecimalMaxPlacesError', + 'DecimalWholeDigitsError', + 'DateTimeError', + 'DateError', + 'DateNotInThePastError', + 'DateNotInTheFutureError', + 'TimeError', + 'DurationError', + 'HashableError', + 'UUIDError', + 'UUIDVersionError', + 'ArbitraryTypeError', + 'ClassError', + 'SubclassError', + 'JsonError', + 'JsonTypeError', + 'PatternError', + 'DataclassTypeError', + 'CallableError', + 'IPvAnyAddressError', + 'IPvAnyInterfaceError', + 'IPvAnyNetworkError', + 'IPv4AddressError', + 'IPv6AddressError', + 'IPv4NetworkError', + 'IPv6NetworkError', + 'IPv4InterfaceError', + 'IPv6InterfaceError', + 'ColorError', + 'StrictBoolError', + 'NotDigitError', + 'LuhnValidationError', + 'InvalidLengthForBrand', + 'InvalidByteSize', + 'InvalidByteSizeUnit', + 'MissingDiscriminator', + 'InvalidDiscriminator', ) -# We use this URL to allow for future flexibility about how we host the docs, while allowing for Pydantic -# code in the while with "old" URLs to still work. -# 'u' refers to "user errors" - e.g. errors caused by developers using pydantic, as opposed to validation errors. -DEV_ERROR_DOCS_URL = f'https://errors.pydantic.dev/{version_short()}/u/' -PydanticErrorCodes = Literal[ - 'class-not-fully-defined', - 'custom-json-schema', - 'decorator-missing-field', - 'discriminator-no-field', - 'discriminator-alias-type', - 'discriminator-needs-literal', - 'discriminator-alias', - 'discriminator-validator', - 'callable-discriminator-no-tag', - 'typed-dict-version', - 'model-field-overridden', - 'model-field-missing-annotation', - 'config-both', - 'removed-kwargs', - 'invalid-for-json-schema', - 'json-schema-already-used', - 'base-model-instantiated', - 'undefined-annotation', - 'schema-for-unknown-type', - 'import-error', - 'create-model-field-definitions', - 'create-model-config-base', - 'validator-no-fields', - 'validator-invalid-fields', - 'validator-instance-method', - 'root-validator-pre-skip', - 'model-serializer-instance-method', - 'validator-field-config-info', - 'validator-v1-signature', - 'validator-signature', - 'field-serializer-signature', - 'model-serializer-signature', - 'multiple-field-serializers', - 'invalid_annotated_type', - 'type-adapter-config-unused', - 'root-model-extra', - 'unevaluable-type-annotation', - 'dataclass-init-false-extra-allow', - 'clashing-init-and-init-var', -] + +def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin': + """ + For built-in exceptions like ValueError or TypeError, we need to implement + __reduce__ to override the default behaviour (instead of __getstate__/__setstate__) + By default pickle protocol 2 calls `cls.__new__(cls, *args)`. + Since we only use kwargs, we need a little constructor to change that. + Note: the callable can't be a lambda as pickle looks in the namespace to find it + """ + return cls(**ctx) class PydanticErrorMixin: - """A mixin class for common functionality shared by all Pydantic-specific errors. + code: str + msg_template: str - Attributes: - message: A message describing the error. - code: An optional error code from PydanticErrorCodes enum. - """ - - def __init__(self, message: str, *, code: PydanticErrorCodes | None) -> None: - self.message = message - self.code = code + def __init__(self, **ctx: Any) -> None: + self.__dict__ = ctx def __str__(self) -> str: - if self.code is None: - return self.message - else: - return f'{self.message}\n\nFor further information visit {DEV_ERROR_DOCS_URL}{self.code}' + return self.msg_template.format(**self.__dict__) + + def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]: + return cls_kwargs, (self.__class__, self.__dict__) -class PydanticUserError(PydanticErrorMixin, TypeError): - """An error raised due to incorrect use of Pydantic.""" +class PydanticTypeError(PydanticErrorMixin, TypeError): + pass -class PydanticUndefinedAnnotation(PydanticErrorMixin, NameError): - """A subclass of `NameError` raised when handling undefined annotations during `CoreSchema` generation. - - Attributes: - name: Name of the error. - message: Description of the error. - """ - - def __init__(self, name: str, message: str) -> None: - self.name = name - super().__init__(message=message, code='undefined-annotation') - - @classmethod - def from_name_error(cls, name_error: NameError) -> Self: - """Convert a `NameError` to a `PydanticUndefinedAnnotation` error. - - Args: - name_error: `NameError` to be converted. - - Returns: - Converted `PydanticUndefinedAnnotation` error. - """ - try: - name = name_error.name # type: ignore # python > 3.10 - except AttributeError: - name = re.search(r".*'(.+?)'", str(name_error)).group(1) # type: ignore[union-attr] - return cls(name=name, message=str(name_error)) +class PydanticValueError(PydanticErrorMixin, ValueError): + pass -class PydanticImportError(PydanticErrorMixin, ImportError): - """An error raised when an import fails due to module changes between V1 and V2. - - Attributes: - message: Description of the error. - """ - - def __init__(self, message: str) -> None: - super().__init__(message, code='import-error') +class ConfigError(RuntimeError): + pass -class PydanticSchemaGenerationError(PydanticUserError): - """An error raised during failures to generate a `CoreSchema` for some type. - - Attributes: - message: Description of the error. - """ - - def __init__(self, message: str) -> None: - super().__init__(message, code='schema-for-unknown-type') +class MissingError(PydanticValueError): + msg_template = 'field required' -class PydanticInvalidForJsonSchema(PydanticUserError): - """An error raised during failures to generate a JSON schema for some `CoreSchema`. - - Attributes: - message: Description of the error. - """ - - def __init__(self, message: str) -> None: - super().__init__(message, code='invalid-for-json-schema') +class ExtraError(PydanticValueError): + msg_template = 'extra fields not permitted' -__getattr__ = getattr_migration(__name__) +class NoneIsNotAllowedError(PydanticTypeError): + code = 'none.not_allowed' + msg_template = 'none is not an allowed value' + + +class NoneIsAllowedError(PydanticTypeError): + code = 'none.allowed' + msg_template = 'value is not none' + + +class WrongConstantError(PydanticValueError): + code = 'const' + + def __str__(self) -> str: + permitted = ', '.join(repr(v) for v in self.permitted) # type: ignore + return f'unexpected value; permitted: {permitted}' + + +class NotNoneError(PydanticTypeError): + code = 'not_none' + msg_template = 'value is not None' + + +class BoolError(PydanticTypeError): + msg_template = 'value could not be parsed to a boolean' + + +class BytesError(PydanticTypeError): + msg_template = 'byte type expected' + + +class DictError(PydanticTypeError): + msg_template = 'value is not a valid dict' + + +class EmailError(PydanticValueError): + msg_template = 'value is not a valid email address' + + +class UrlError(PydanticValueError): + code = 'url' + + +class UrlSchemeError(UrlError): + code = 'url.scheme' + msg_template = 'invalid or missing URL scheme' + + +class UrlSchemePermittedError(UrlError): + code = 'url.scheme' + msg_template = 'URL scheme not permitted' + + def __init__(self, allowed_schemes: Set[str]): + super().__init__(allowed_schemes=allowed_schemes) + + +class UrlUserInfoError(UrlError): + code = 'url.userinfo' + msg_template = 'userinfo required in URL but missing' + + +class UrlHostError(UrlError): + code = 'url.host' + msg_template = 'URL host invalid' + + +class UrlHostTldError(UrlError): + code = 'url.host' + msg_template = 'URL host invalid, top level domain required' + + +class UrlPortError(UrlError): + code = 'url.port' + msg_template = 'URL port invalid, port cannot exceed 65535' + + +class UrlExtraError(UrlError): + code = 'url.extra' + msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}' + + +class EnumMemberError(PydanticTypeError): + code = 'enum' + + def __str__(self) -> str: + permitted = ', '.join(repr(v.value) for v in self.enum_values) # type: ignore + return f'value is not a valid enumeration member; permitted: {permitted}' + + +class IntegerError(PydanticTypeError): + msg_template = 'value is not a valid integer' + + +class FloatError(PydanticTypeError): + msg_template = 'value is not a valid float' + + +class PathError(PydanticTypeError): + msg_template = 'value is not a valid path' + + +class _PathValueError(PydanticValueError): + def __init__(self, *, path: Path) -> None: + super().__init__(path=str(path)) + + +class PathNotExistsError(_PathValueError): + code = 'path.not_exists' + msg_template = 'file or directory at path "{path}" does not exist' + + +class PathNotAFileError(_PathValueError): + code = 'path.not_a_file' + msg_template = 'path "{path}" does not point to a file' + + +class PathNotADirectoryError(_PathValueError): + code = 'path.not_a_directory' + msg_template = 'path "{path}" does not point to a directory' + + +class PyObjectError(PydanticTypeError): + msg_template = 'ensure this value contains valid import path or valid callable: {error_message}' + + +class SequenceError(PydanticTypeError): + msg_template = 'value is not a valid sequence' + + +class IterableError(PydanticTypeError): + msg_template = 'value is not a valid iterable' + + +class ListError(PydanticTypeError): + msg_template = 'value is not a valid list' + + +class SetError(PydanticTypeError): + msg_template = 'value is not a valid set' + + +class FrozenSetError(PydanticTypeError): + msg_template = 'value is not a valid frozenset' + + +class DequeError(PydanticTypeError): + msg_template = 'value is not a valid deque' + + +class TupleError(PydanticTypeError): + msg_template = 'value is not a valid tuple' + + +class TupleLengthError(PydanticValueError): + code = 'tuple.length' + msg_template = 'wrong tuple length {actual_length}, expected {expected_length}' + + def __init__(self, *, actual_length: int, expected_length: int) -> None: + super().__init__(actual_length=actual_length, expected_length=expected_length) + + +class ListMinLengthError(PydanticValueError): + code = 'list.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListMaxLengthError(PydanticValueError): + code = 'list.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListUniqueItemsError(PydanticValueError): + code = 'list.unique_items' + msg_template = 'the list has duplicated items' + + +class SetMinLengthError(PydanticValueError): + code = 'set.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class SetMaxLengthError(PydanticValueError): + code = 'set.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMinLengthError(PydanticValueError): + code = 'frozenset.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMaxLengthError(PydanticValueError): + code = 'frozenset.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMinLengthError(PydanticValueError): + code = 'any_str.min_length' + msg_template = 'ensure this value has at least {limit_value} characters' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMaxLengthError(PydanticValueError): + code = 'any_str.max_length' + msg_template = 'ensure this value has at most {limit_value} characters' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class StrError(PydanticTypeError): + msg_template = 'str type expected' + + +class StrRegexError(PydanticValueError): + code = 'str.regex' + msg_template = 'string does not match regex "{pattern}"' + + def __init__(self, *, pattern: str) -> None: + super().__init__(pattern=pattern) + + +class _NumberBoundError(PydanticValueError): + def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None: + super().__init__(limit_value=limit_value) + + +class NumberNotGtError(_NumberBoundError): + code = 'number.not_gt' + msg_template = 'ensure this value is greater than {limit_value}' + + +class NumberNotGeError(_NumberBoundError): + code = 'number.not_ge' + msg_template = 'ensure this value is greater than or equal to {limit_value}' + + +class NumberNotLtError(_NumberBoundError): + code = 'number.not_lt' + msg_template = 'ensure this value is less than {limit_value}' + + +class NumberNotLeError(_NumberBoundError): + code = 'number.not_le' + msg_template = 'ensure this value is less than or equal to {limit_value}' + + +class NumberNotFiniteError(PydanticValueError): + code = 'number.not_finite_number' + msg_template = 'ensure this value is a finite number' + + +class NumberNotMultipleError(PydanticValueError): + code = 'number.not_multiple' + msg_template = 'ensure this value is a multiple of {multiple_of}' + + def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None: + super().__init__(multiple_of=multiple_of) + + +class DecimalError(PydanticTypeError): + msg_template = 'value is not a valid decimal' + + +class DecimalIsNotFiniteError(PydanticValueError): + code = 'decimal.not_finite' + msg_template = 'value is not a valid decimal' + + +class DecimalMaxDigitsError(PydanticValueError): + code = 'decimal.max_digits' + msg_template = 'ensure that there are no more than {max_digits} digits in total' + + def __init__(self, *, max_digits: int) -> None: + super().__init__(max_digits=max_digits) + + +class DecimalMaxPlacesError(PydanticValueError): + code = 'decimal.max_places' + msg_template = 'ensure that there are no more than {decimal_places} decimal places' + + def __init__(self, *, decimal_places: int) -> None: + super().__init__(decimal_places=decimal_places) + + +class DecimalWholeDigitsError(PydanticValueError): + code = 'decimal.whole_digits' + msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point' + + def __init__(self, *, whole_digits: int) -> None: + super().__init__(whole_digits=whole_digits) + + +class DateTimeError(PydanticValueError): + msg_template = 'invalid datetime format' + + +class DateError(PydanticValueError): + msg_template = 'invalid date format' + + +class DateNotInThePastError(PydanticValueError): + code = 'date.not_in_the_past' + msg_template = 'date is not in the past' + + +class DateNotInTheFutureError(PydanticValueError): + code = 'date.not_in_the_future' + msg_template = 'date is not in the future' + + +class TimeError(PydanticValueError): + msg_template = 'invalid time format' + + +class DurationError(PydanticValueError): + msg_template = 'invalid duration format' + + +class HashableError(PydanticTypeError): + msg_template = 'value is not a valid hashable' + + +class UUIDError(PydanticTypeError): + msg_template = 'value is not a valid uuid' + + +class UUIDVersionError(PydanticValueError): + code = 'uuid.version' + msg_template = 'uuid version {required_version} expected' + + def __init__(self, *, required_version: int) -> None: + super().__init__(required_version=required_version) + + +class ArbitraryTypeError(PydanticTypeError): + code = 'arbitrary_type' + msg_template = 'instance of {expected_arbitrary_type} expected' + + def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None: + super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type)) + + +class ClassError(PydanticTypeError): + code = 'class' + msg_template = 'a class is expected' + + +class SubclassError(PydanticTypeError): + code = 'subclass' + msg_template = 'subclass of {expected_class} expected' + + def __init__(self, *, expected_class: Type[Any]) -> None: + super().__init__(expected_class=display_as_type(expected_class)) + + +class JsonError(PydanticValueError): + msg_template = 'Invalid JSON' + + +class JsonTypeError(PydanticTypeError): + code = 'json' + msg_template = 'JSON object must be str, bytes or bytearray' + + +class PatternError(PydanticValueError): + code = 'regex_pattern' + msg_template = 'Invalid regular expression' + + +class DataclassTypeError(PydanticTypeError): + code = 'dataclass' + msg_template = 'instance of {class_name}, tuple or dict expected' + + +class CallableError(PydanticTypeError): + msg_template = '{value} is not callable' + + +class EnumError(PydanticTypeError): + code = 'enum_instance' + msg_template = '{value} is not a valid Enum instance' + + +class IntEnumError(PydanticTypeError): + code = 'int_enum_instance' + msg_template = '{value} is not a valid IntEnum instance' + + +class IPvAnyAddressError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 address' + + +class IPvAnyInterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 interface' + + +class IPvAnyNetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 network' + + +class IPv4AddressError(PydanticValueError): + msg_template = 'value is not a valid IPv4 address' + + +class IPv6AddressError(PydanticValueError): + msg_template = 'value is not a valid IPv6 address' + + +class IPv4NetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv4 network' + + +class IPv6NetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv6 network' + + +class IPv4InterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv4 interface' + + +class IPv6InterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv6 interface' + + +class ColorError(PydanticValueError): + msg_template = 'value is not a valid color: {reason}' + + +class StrictBoolError(PydanticValueError): + msg_template = 'value is not a valid boolean' + + +class NotDigitError(PydanticValueError): + code = 'payment_card_number.digits' + msg_template = 'card number is not all digits' + + +class LuhnValidationError(PydanticValueError): + code = 'payment_card_number.luhn_check' + msg_template = 'card number is not luhn valid' + + +class InvalidLengthForBrand(PydanticValueError): + code = 'payment_card_number.invalid_length_for_brand' + msg_template = 'Length for a {brand} card must be {required_length}' + + +class InvalidByteSize(PydanticValueError): + msg_template = 'could not parse value and unit from byte string' + + +class InvalidByteSizeUnit(PydanticValueError): + msg_template = 'could not interpret byte unit: {unit}' + + +class MissingDiscriminator(PydanticValueError): + code = 'discriminated_union.missing_discriminator' + msg_template = 'Discriminator {discriminator_key!r} is missing in value' + + +class InvalidDiscriminator(PydanticValueError): + code = 'discriminated_union.invalid_discriminator' + msg_template = ( + 'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} ' + '(allowed values: {allowed_values})' + ) + + def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None: + super().__init__( + discriminator_key=discriminator_key, + discriminator_value=discriminator_value, + allowed_values=', '.join(map(repr, allowed_values)), + ) diff --git a/lib/pydantic/fields.py b/lib/pydantic/fields.py index b416bb7d..cecd3d20 100644 --- a/lib/pydantic/fields.py +++ b/lib/pydantic/fields.py @@ -1,875 +1,1209 @@ -"""Defining fields on models.""" -from __future__ import annotations as _annotations +import copy +import re +from collections import Counter as CollectionCounter, defaultdict, deque +from collections.abc import Callable, Hashable as CollectionsHashable, Iterable as CollectionsIterable +from typing import ( + TYPE_CHECKING, + Any, + Counter, + DefaultDict, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, +) -import dataclasses -import inspect -import typing -from copy import copy -from dataclasses import Field as DataclassField -from functools import cached_property -from typing import Any, ClassVar -from warnings import warn +from typing_extensions import Annotated, Final -import annotated_types -import typing_extensions -from pydantic_core import PydanticUndefined -from typing_extensions import Literal, Unpack +from . import errors as errors_ +from .class_validators import Validator, make_generic_validator, prep_validators +from .error_wrappers import ErrorWrapper +from .errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError +from .types import Json, JsonWrapper +from .typing import ( + NoArgAnyCallable, + convert_generics, + display_as_type, + get_args, + get_origin, + is_finalvar, + is_literal_type, + is_new_type, + is_none_type, + is_typeddict, + is_typeddict_special, + is_union, + new_type_supertype, +) +from .utils import ( + PyObjectStr, + Representation, + ValueItems, + get_discriminator_alias_and_values, + get_unique_discriminator_alias, + lenient_isinstance, + lenient_issubclass, + sequence_like, + smart_deepcopy, +) +from .validators import constant_validator, dict_validator, find_validators, validate_json -from . import types -from ._internal import _decorators, _fields, _generics, _internal_dataclass, _repr, _typing_extra, _utils -from .aliases import AliasChoices, AliasPath -from .config import JsonDict -from .errors import PydanticUserError -from .warnings import PydanticDeprecatedSince20 +Required: Any = Ellipsis -if typing.TYPE_CHECKING: - from ._internal._repr import ReprArgs -else: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 +T = TypeVar('T') -_Unset: Any = PydanticUndefined +class UndefinedType: + def __repr__(self) -> str: + return 'PydanticUndefined' + + def __copy__(self: T) -> T: + return self + + def __reduce__(self) -> str: + return 'Undefined' + + def __deepcopy__(self: T, _: Any) -> T: + return self -class _FromFieldInfoInputs(typing_extensions.TypedDict, total=False): - """This class exists solely to add type checking for the `**kwargs` in `FieldInfo.from_field`.""" +Undefined = UndefinedType() - annotation: type[Any] | None - default_factory: typing.Callable[[], Any] | None - alias: str | None - alias_priority: int | None - validation_alias: str | AliasPath | AliasChoices | None - serialization_alias: str | None - title: str | None - description: str | None - examples: list[Any] | None - exclude: bool | None - gt: float | None - ge: float | None - lt: float | None - le: float | None - multiple_of: float | None - strict: bool | None - min_length: int | None - max_length: int | None - pattern: str | None - allow_inf_nan: bool | None - max_digits: int | None - decimal_places: int | None - union_mode: Literal['smart', 'left_to_right'] | None - discriminator: str | types.Discriminator | None - json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None - frozen: bool | None - validate_default: bool | None - repr: bool - init: bool | None - init_var: bool | None - kw_only: bool | None +if TYPE_CHECKING: + from .class_validators import ValidatorsList + from .config import BaseConfig + from .error_wrappers import ErrorList + from .types import ModelOrDc + from .typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs + + ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]] + LocStr = Union[Tuple[Union[int, str], ...], str] + BoolUndefined = Union[bool, UndefinedType] -class _FieldInfoInputs(_FromFieldInfoInputs, total=False): - """This class exists solely to add type checking for the `**kwargs` in `FieldInfo.__init__`.""" - - default: Any - - -class FieldInfo(_repr.Representation): - """This class holds information about a field. - - `FieldInfo` is used for any field definition regardless of whether the [`Field()`][pydantic.fields.Field] - function is explicitly used. - - !!! warning - You generally shouldn't be creating `FieldInfo` directly, you'll only need to use it when accessing - [`BaseModel`][pydantic.main.BaseModel] `.model_fields` internals. - - Attributes: - annotation: The type annotation of the field. - default: The default value of the field. - default_factory: The factory function used to construct the default for the field. - alias: The alias name of the field. - alias_priority: The priority of the field's alias. - validation_alias: The validation alias of the field. - serialization_alias: The serialization alias of the field. - title: The title of the field. - description: The description of the field. - examples: List of examples of the field. - exclude: Whether to exclude the field from the model serialization. - discriminator: Field name or Discriminator for discriminating the type in a tagged union. - json_schema_extra: A dict or callable to provide extra JSON schema properties. - frozen: Whether the field is frozen. - validate_default: Whether to validate the default value of the field. - repr: Whether to include the field in representation of the model. - init: Whether the field should be included in the constructor of the dataclass. - init_var: Whether the field should _only_ be included in the constructor of the dataclass, and not stored. - kw_only: Whether the field should be a keyword-only argument in the constructor of the dataclass. - metadata: List of metadata constraints. +class FieldInfo(Representation): + """ + Captures extra information about a field. """ - annotation: type[Any] | None - default: Any - default_factory: typing.Callable[[], Any] | None - alias: str | None - alias_priority: int | None - validation_alias: str | AliasPath | AliasChoices | None - serialization_alias: str | None - title: str | None - description: str | None - examples: list[Any] | None - exclude: bool | None - discriminator: str | types.Discriminator | None - json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None - frozen: bool | None - validate_default: bool | None - repr: bool - init: bool | None - init_var: bool | None - kw_only: bool | None - metadata: list[Any] - __slots__ = ( - 'annotation', 'default', 'default_factory', 'alias', 'alias_priority', - 'validation_alias', - 'serialization_alias', 'title', 'description', - 'examples', 'exclude', - 'discriminator', - 'json_schema_extra', - 'frozen', - 'validate_default', + 'include', + 'const', + 'gt', + 'ge', + 'lt', + 'le', + 'multiple_of', + 'allow_inf_nan', + 'max_digits', + 'decimal_places', + 'min_items', + 'max_items', + 'unique_items', + 'min_length', + 'max_length', + 'allow_mutation', 'repr', - 'init', - 'init_var', - 'kw_only', - 'metadata', - '_attributes_set', + 'regex', + 'discriminator', + 'extra', ) - # used to convert kwargs to metadata/constraints, - # None has a special meaning - these items are collected into a `PydanticGeneralMetadata` - metadata_lookup: ClassVar[dict[str, typing.Callable[[Any], Any] | None]] = { - 'strict': types.Strict, - 'gt': annotated_types.Gt, - 'ge': annotated_types.Ge, - 'lt': annotated_types.Lt, - 'le': annotated_types.Le, - 'multiple_of': annotated_types.MultipleOf, - 'min_length': annotated_types.MinLen, - 'max_length': annotated_types.MaxLen, - 'pattern': None, + # field constraints with the default value, it's also used in update_from_config below + __field_constraints__ = { + 'min_length': None, + 'max_length': None, + 'regex': None, + 'gt': None, + 'lt': None, + 'ge': None, + 'le': None, + 'multiple_of': None, 'allow_inf_nan': None, 'max_digits': None, 'decimal_places': None, - 'union_mode': None, + 'min_items': None, + 'max_items': None, + 'unique_items': None, + 'allow_mutation': True, } - def __init__(self, **kwargs: Unpack[_FieldInfoInputs]) -> None: - """This class should generally not be initialized directly; instead, use the `pydantic.fields.Field` function - or one of the constructor classmethods. - - See the signature of `pydantic.fields.Field` for more details about the expected arguments. - """ - self._attributes_set = {k: v for k, v in kwargs.items() if v is not _Unset} - kwargs = {k: _DefaultValues.get(k) if v is _Unset else v for k, v in kwargs.items()} # type: ignore - self.annotation, annotation_metadata = self._extract_metadata(kwargs.get('annotation')) - - default = kwargs.pop('default', PydanticUndefined) - if default is Ellipsis: - self.default = PydanticUndefined - else: - self.default = default - + def __init__(self, default: Any = Undefined, **kwargs: Any) -> None: + self.default = default self.default_factory = kwargs.pop('default_factory', None) - - if self.default is not PydanticUndefined and self.default_factory is not None: - raise TypeError('cannot specify both default and default_factory') - - self.title = kwargs.pop('title', None) self.alias = kwargs.pop('alias', None) - self.validation_alias = kwargs.pop('validation_alias', None) - self.serialization_alias = kwargs.pop('serialization_alias', None) - alias_is_set = any(alias is not None for alias in (self.alias, self.validation_alias, self.serialization_alias)) - self.alias_priority = kwargs.pop('alias_priority', None) or 2 if alias_is_set else None + self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias is not None else None) + self.title = kwargs.pop('title', None) self.description = kwargs.pop('description', None) - self.examples = kwargs.pop('examples', None) self.exclude = kwargs.pop('exclude', None) + self.include = kwargs.pop('include', None) + self.const = kwargs.pop('const', None) + self.gt = kwargs.pop('gt', None) + self.ge = kwargs.pop('ge', None) + self.lt = kwargs.pop('lt', None) + self.le = kwargs.pop('le', None) + self.multiple_of = kwargs.pop('multiple_of', None) + self.allow_inf_nan = kwargs.pop('allow_inf_nan', None) + self.max_digits = kwargs.pop('max_digits', None) + self.decimal_places = kwargs.pop('decimal_places', None) + self.min_items = kwargs.pop('min_items', None) + self.max_items = kwargs.pop('max_items', None) + self.unique_items = kwargs.pop('unique_items', None) + self.min_length = kwargs.pop('min_length', None) + self.max_length = kwargs.pop('max_length', None) + self.allow_mutation = kwargs.pop('allow_mutation', True) + self.regex = kwargs.pop('regex', None) self.discriminator = kwargs.pop('discriminator', None) self.repr = kwargs.pop('repr', True) - self.json_schema_extra = kwargs.pop('json_schema_extra', None) - self.validate_default = kwargs.pop('validate_default', None) - self.frozen = kwargs.pop('frozen', None) - # currently only used on dataclasses - self.init = kwargs.pop('init', None) - self.init_var = kwargs.pop('init_var', None) - self.kw_only = kwargs.pop('kw_only', None) + self.extra = kwargs - self.metadata = self._collect_metadata(kwargs) + annotation_metadata # type: ignore + def __repr_args__(self) -> 'ReprArgs': - @staticmethod - def from_field(default: Any = PydanticUndefined, **kwargs: Unpack[_FromFieldInfoInputs]) -> FieldInfo: - """Create a new `FieldInfo` object with the `Field` function. + field_defaults_to_hide: Dict[str, Any] = { + 'repr': True, + **self.__field_constraints__, + } - Args: - default: The default value for the field. Defaults to Undefined. - **kwargs: Additional arguments dictionary. + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)] - Raises: - TypeError: If 'annotation' is passed as a keyword argument. - - Returns: - A new FieldInfo object with the given parameters. - - Example: - This is how you can create a field with default value like this: - - ```python - import pydantic - - class MyModel(pydantic.BaseModel): - foo: int = pydantic.Field(4) - ``` + def get_constraints(self) -> Set[str]: """ - if 'annotation' in kwargs: - raise TypeError('"annotation" is not permitted as a Field keyword argument') - return FieldInfo(default=default, **kwargs) + Gets the constraints set on the field by comparing the constraint value with its default value - @staticmethod - def from_annotation(annotation: type[Any]) -> FieldInfo: - """Creates a `FieldInfo` instance from a bare annotation. - - This function is used internally to create a `FieldInfo` from a bare annotation like this: - - ```python - import pydantic - - class MyModel(pydantic.BaseModel): - foo: int # <-- like this - ``` - - We also account for the case where the annotation can be an instance of `Annotated` and where - one of the (not first) arguments in `Annotated` is an instance of `FieldInfo`, e.g.: - - ```python - import annotated_types - from typing_extensions import Annotated - - import pydantic - - class MyModel(pydantic.BaseModel): - foo: Annotated[int, annotated_types.Gt(42)] - bar: Annotated[int, pydantic.Field(gt=42)] - ``` - - Args: - annotation: An annotation object. - - Returns: - An instance of the field metadata. + :return: the constraints set on field_info """ - final = False - if _typing_extra.is_finalvar(annotation): - final = True - if annotation is not typing_extensions.Final: - annotation = typing_extensions.get_args(annotation)[0] + return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default} - if _typing_extra.is_annotated(annotation): - first_arg, *extra_args = typing_extensions.get_args(annotation) - if _typing_extra.is_finalvar(first_arg): - final = True - field_info_annotations = [a for a in extra_args if isinstance(a, FieldInfo)] - field_info = FieldInfo.merge_field_infos(*field_info_annotations, annotation=first_arg) - if field_info: - new_field_info = copy(field_info) - new_field_info.annotation = first_arg - new_field_info.frozen = final or field_info.frozen - metadata: list[Any] = [] - for a in extra_args: - if not isinstance(a, FieldInfo): - metadata.append(a) - else: - metadata.extend(a.metadata) - new_field_info.metadata = metadata - return new_field_info - - return FieldInfo(annotation=annotation, frozen=final or None) - - @staticmethod - def from_annotated_attribute(annotation: type[Any], default: Any) -> FieldInfo: - """Create `FieldInfo` from an annotation with a default value. - - This is used in cases like the following: - - ```python - import annotated_types - from typing_extensions import Annotated - - import pydantic - - class MyModel(pydantic.BaseModel): - foo: int = 4 # <-- like this - bar: Annotated[int, annotated_types.Gt(4)] = 4 # <-- or this - spam: Annotated[int, pydantic.Field(gt=4)] = 4 # <-- or this - ``` - - Args: - annotation: The type annotation of the field. - default: The default value of the field. - - Returns: - A field object with the passed values. + def update_from_config(self, from_config: Dict[str, Any]) -> None: """ - if annotation is default: - raise PydanticUserError( - 'Error when building FieldInfo from annotated attribute. ' - "Make sure you don't have any field name clashing with a type annotation ", - code='unevaluable-type-annotation', - ) - - final = False - if _typing_extra.is_finalvar(annotation): - final = True - if annotation is not typing_extensions.Final: - annotation = typing_extensions.get_args(annotation)[0] - - if isinstance(default, FieldInfo): - default.annotation, annotation_metadata = FieldInfo._extract_metadata(annotation) - default.metadata += annotation_metadata - default = default.merge_field_infos( - *[x for x in annotation_metadata if isinstance(x, FieldInfo)], default, annotation=default.annotation - ) - default.frozen = final or default.frozen - return default - elif isinstance(default, dataclasses.Field): - init_var = False - if annotation is dataclasses.InitVar: - init_var = True - annotation = Any - elif isinstance(annotation, dataclasses.InitVar): - init_var = True - annotation = annotation.type - pydantic_field = FieldInfo._from_dataclass_field(default) - pydantic_field.annotation, annotation_metadata = FieldInfo._extract_metadata(annotation) - pydantic_field.metadata += annotation_metadata - pydantic_field = pydantic_field.merge_field_infos( - *[x for x in annotation_metadata if isinstance(x, FieldInfo)], - pydantic_field, - annotation=pydantic_field.annotation, - ) - pydantic_field.frozen = final or pydantic_field.frozen - pydantic_field.init_var = init_var - pydantic_field.init = getattr(default, 'init', None) - pydantic_field.kw_only = getattr(default, 'kw_only', None) - return pydantic_field - else: - if _typing_extra.is_annotated(annotation): - first_arg, *extra_args = typing_extensions.get_args(annotation) - field_infos = [a for a in extra_args if isinstance(a, FieldInfo)] - field_info = FieldInfo.merge_field_infos(*field_infos, annotation=first_arg, default=default) - metadata: list[Any] = [] - for a in extra_args: - if not isinstance(a, FieldInfo): - metadata.append(a) - else: - metadata.extend(a.metadata) - field_info.metadata = metadata - return field_info - - return FieldInfo(annotation=annotation, default=default, frozen=final or None) - - @staticmethod - def merge_field_infos(*field_infos: FieldInfo, **overrides: Any) -> FieldInfo: - """Merge `FieldInfo` instances keeping only explicitly set attributes. - - Later `FieldInfo` instances override earlier ones. - - Returns: - FieldInfo: A merged FieldInfo instance. + Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated. """ - flattened_field_infos: list[FieldInfo] = [] - for field_info in field_infos: - flattened_field_infos.extend(x for x in field_info.metadata if isinstance(x, FieldInfo)) - flattened_field_infos.append(field_info) - field_infos = tuple(flattened_field_infos) - if len(field_infos) == 1: - # No merging necessary, but we still need to make a copy and apply the overrides - field_info = copy(field_infos[0]) - field_info._attributes_set.update(overrides) - for k, v in overrides.items(): - setattr(field_info, k, v) - return field_info # type: ignore - - new_kwargs: dict[str, Any] = {} - metadata = {} - for field_info in field_infos: - new_kwargs.update(field_info._attributes_set) - for x in field_info.metadata: - if not isinstance(x, FieldInfo): - metadata[type(x)] = x - new_kwargs.update(overrides) - field_info = FieldInfo(**new_kwargs) - field_info.metadata = list(metadata.values()) - return field_info - - @staticmethod - def _from_dataclass_field(dc_field: DataclassField[Any]) -> FieldInfo: - """Return a new `FieldInfo` instance from a `dataclasses.Field` instance. - - Args: - dc_field: The `dataclasses.Field` instance to convert. - - Returns: - The corresponding `FieldInfo` instance. - - Raises: - TypeError: If any of the `FieldInfo` kwargs does not match the `dataclass.Field` kwargs. - """ - default = dc_field.default - if default is dataclasses.MISSING: - default = PydanticUndefined - - if dc_field.default_factory is dataclasses.MISSING: - default_factory: typing.Callable[[], Any] | None = None - else: - default_factory = dc_field.default_factory - - # use the `Field` function so in correct kwargs raise the correct `TypeError` - dc_field_metadata = {k: v for k, v in dc_field.metadata.items() if k in _FIELD_ARG_NAMES} - return Field(default=default, default_factory=default_factory, repr=dc_field.repr, **dc_field_metadata) - - @staticmethod - def _extract_metadata(annotation: type[Any] | None) -> tuple[type[Any] | None, list[Any]]: - """Tries to extract metadata/constraints from an annotation if it uses `Annotated`. - - Args: - annotation: The type hint annotation for which metadata has to be extracted. - - Returns: - A tuple containing the extracted metadata type and the list of extra arguments. - """ - if annotation is not None: - if _typing_extra.is_annotated(annotation): - first_arg, *extra_args = typing_extensions.get_args(annotation) - return first_arg, list(extra_args) - - return annotation, [] - - @staticmethod - def _collect_metadata(kwargs: dict[str, Any]) -> list[Any]: - """Collect annotations from kwargs. - - Args: - kwargs: Keyword arguments passed to the function. - - Returns: - A list of metadata objects - a combination of `annotated_types.BaseMetadata` and - `PydanticMetadata`. - """ - metadata: list[Any] = [] - general_metadata = {} - for key, value in list(kwargs.items()): + for attr_name, value in from_config.items(): try: - marker = FieldInfo.metadata_lookup[key] - except KeyError: - continue - - del kwargs[key] - if value is not None: - if marker is None: - general_metadata[key] = value - else: - metadata.append(marker(value)) - if general_metadata: - metadata.append(_fields.pydantic_general_metadata(**general_metadata)) - return metadata - - def get_default(self, *, call_default_factory: bool = False) -> Any: - """Get the default value. - - We expose an option for whether to call the default_factory (if present), as calling it may - result in side effects that we want to avoid. However, there are times when it really should - be called (namely, when instantiating a model via `model_construct`). - - Args: - call_default_factory: Whether to call the default_factory or not. Defaults to `False`. - - Returns: - The default value, calling the default factory if requested or `None` if not set. - """ - if self.default_factory is None: - return _utils.smart_deepcopy(self.default) - elif call_default_factory: - return self.default_factory() - else: - return None - - def is_required(self) -> bool: - """Check if the field is required (i.e., does not have a default value or factory). - - Returns: - `True` if the field is required, `False` otherwise. - """ - return self.default is PydanticUndefined and self.default_factory is None - - def rebuild_annotation(self) -> Any: - """Attempts to rebuild the original annotation for use in function signatures. - - If metadata is present, it adds it to the original annotation using - `Annotated`. Otherwise, it returns the original annotation as-is. - - Note that because the metadata has been flattened, the original annotation - may not be reconstructed exactly as originally provided, e.g. if the original - type had unrecognized annotations, or was annotated with a call to `pydantic.Field`. - - Returns: - The rebuilt annotation. - """ - if not self.metadata: - return self.annotation - else: - # Annotated arguments must be a tuple - return typing_extensions.Annotated[(self.annotation, *self.metadata)] # type: ignore - - def apply_typevars_map(self, typevars_map: dict[Any, Any] | None, types_namespace: dict[str, Any] | None) -> None: - """Apply a `typevars_map` to the annotation. - - This method is used when analyzing parametrized generic types to replace typevars with their concrete types. - - This method applies the `typevars_map` to the annotation in place. - - Args: - typevars_map: A dictionary mapping type variables to their concrete types. - types_namespace (dict | None): A dictionary containing related types to the annotated type. - - See Also: - pydantic._internal._generics.replace_types is used for replacing the typevars with - their concrete types. - """ - annotation = _typing_extra.eval_type_lenient(self.annotation, types_namespace) - self.annotation = _generics.replace_types(annotation, typevars_map) - - def __repr_args__(self) -> ReprArgs: - yield 'annotation', _repr.PlainRepr(_repr.display_as_type(self.annotation)) - yield 'required', self.is_required() - - for s in self.__slots__: - if s == '_attributes_set': - continue - if s == 'annotation': - continue - elif s == 'metadata' and not self.metadata: - continue - elif s == 'repr' and self.repr is True: - continue - if s == 'frozen' and self.frozen is False: - continue - if s == 'validation_alias' and self.validation_alias == self.alias: - continue - if s == 'serialization_alias' and self.serialization_alias == self.alias: - continue - if s == 'default_factory' and self.default_factory is not None: - yield 'default_factory', _repr.PlainRepr(_repr.display_as_type(self.default_factory)) + current_value = getattr(self, attr_name) + except AttributeError: + # attr_name is not an attribute of FieldInfo, it should therefore be added to extra + # (except if extra already has this value!) + self.extra.setdefault(attr_name, value) else: - value = getattr(self, s) - if value is not None and value is not PydanticUndefined: - yield s, value + if current_value is self.__field_constraints__.get(attr_name, None): + setattr(self, attr_name, value) + elif attr_name == 'exclude': + self.exclude = ValueItems.merge(value, current_value) + elif attr_name == 'include': + self.include = ValueItems.merge(value, current_value, intersect=True) + + def _validate(self) -> None: + if self.default is not Undefined and self.default_factory is not None: + raise ValueError('cannot specify both default and default_factory') -class _EmptyKwargs(typing_extensions.TypedDict): - """This class exists solely to ensure that type checking warns about passing `**extra` in `Field`.""" - - -_DefaultValues = dict( - default=..., - default_factory=None, - alias=None, - alias_priority=None, - validation_alias=None, - serialization_alias=None, - title=None, - description=None, - examples=None, - exclude=None, - discriminator=None, - json_schema_extra=None, - frozen=None, - validate_default=None, - repr=True, - init=None, - init_var=None, - kw_only=None, - pattern=None, - strict=None, - gt=None, - ge=None, - lt=None, - le=None, - multiple_of=None, - allow_inf_nan=None, - max_digits=None, - decimal_places=None, - min_length=None, - max_length=None, -) - - -def Field( # noqa: C901 - default: Any = PydanticUndefined, +def Field( + default: Any = Undefined, *, - default_factory: typing.Callable[[], Any] | None = _Unset, - alias: str | None = _Unset, - alias_priority: int | None = _Unset, - validation_alias: str | AliasPath | AliasChoices | None = _Unset, - serialization_alias: str | None = _Unset, - title: str | None = _Unset, - description: str | None = _Unset, - examples: list[Any] | None = _Unset, - exclude: bool | None = _Unset, - discriminator: str | types.Discriminator | None = _Unset, - json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = _Unset, - frozen: bool | None = _Unset, - validate_default: bool | None = _Unset, - repr: bool = _Unset, - init: bool | None = _Unset, - init_var: bool | None = _Unset, - kw_only: bool | None = _Unset, - pattern: str | None = _Unset, - strict: bool | None = _Unset, - gt: float | None = _Unset, - ge: float | None = _Unset, - lt: float | None = _Unset, - le: float | None = _Unset, - multiple_of: float | None = _Unset, - allow_inf_nan: bool | None = _Unset, - max_digits: int | None = _Unset, - decimal_places: int | None = _Unset, - min_length: int | None = _Unset, - max_length: int | None = _Unset, - union_mode: Literal['smart', 'left_to_right'] = _Unset, - **extra: Unpack[_EmptyKwargs], + default_factory: Optional[NoArgAnyCallable] = None, + alias: str = None, + title: str = None, + description: str = None, + exclude: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None, + include: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None, + const: bool = None, + gt: float = None, + ge: float = None, + lt: float = None, + le: float = None, + multiple_of: float = None, + allow_inf_nan: bool = None, + max_digits: int = None, + decimal_places: int = None, + min_items: int = None, + max_items: int = None, + unique_items: bool = None, + min_length: int = None, + max_length: int = None, + allow_mutation: bool = True, + regex: str = None, + discriminator: str = None, + repr: bool = True, + **extra: Any, ) -> Any: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/fields - - Create a field for objects that can be configured. - - Used to provide extra information about a field, either for the model schema or complex validation. Some arguments - apply only to number fields (`int`, `float`, `Decimal`) and some apply only to `str`. - - Note: - - Any `_Unset` objects will be replaced by the corresponding value defined in the `_DefaultValues` dictionary. If a key for the `_Unset` object is not found in the `_DefaultValues` dictionary, it will default to `None` - - Args: - default: Default value if the field is not set. - default_factory: A callable to generate the default value, such as :func:`~datetime.utcnow`. - alias: The name to use for the attribute when validating or serializing by alias. - This is often used for things like converting between snake and camel case. - alias_priority: Priority of the alias. This affects whether an alias generator is used. - validation_alias: Like `alias`, but only affects validation, not serialization. - serialization_alias: Like `alias`, but only affects serialization, not validation. - title: Human-readable title. - description: Human-readable description. - examples: Example values for this field. - exclude: Whether to exclude the field from the model serialization. - discriminator: Field name or Discriminator for discriminating the type in a tagged union. - json_schema_extra: A dict or callable to provide extra JSON schema properties. - frozen: Whether the field is frozen. If true, attempts to change the value on an instance will raise an error. - validate_default: If `True`, apply validation to the default value every time you create an instance. - Otherwise, for performance reasons, the default value of the field is trusted and not validated. - repr: A boolean indicating whether to include the field in the `__repr__` output. - init: Whether the field should be included in the constructor of the dataclass. - (Only applies to dataclasses.) - init_var: Whether the field should _only_ be included in the constructor of the dataclass. - (Only applies to dataclasses.) - kw_only: Whether the field should be a keyword-only argument in the constructor of the dataclass. - (Only applies to dataclasses.) - strict: If `True`, strict validation is applied to the field. - See [Strict Mode](../concepts/strict_mode.md) for details. - gt: Greater than. If set, value must be greater than this. Only applicable to numbers. - ge: Greater than or equal. If set, value must be greater than or equal to this. Only applicable to numbers. - lt: Less than. If set, value must be less than this. Only applicable to numbers. - le: Less than or equal. If set, value must be less than or equal to this. Only applicable to numbers. - multiple_of: Value must be a multiple of this. Only applicable to numbers. - min_length: Minimum length for strings. - max_length: Maximum length for strings. - pattern: Pattern for strings (a regular expression). - allow_inf_nan: Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - max_digits: Maximum number of allow digits for strings. - decimal_places: Maximum number of decimal places allowed for numbers. - union_mode: The strategy to apply when validating a union. Can be `smart` (the default), or `left_to_right`. - See [Union Mode](standard_library_types.md#union-mode) for details. - extra: (Deprecated) Extra fields that will be included in the JSON schema. - - !!! warning Deprecated - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - - Returns: - A new [`FieldInfo`][pydantic.fields.FieldInfo]. The return annotation is `Any` so `Field` can be used on - type-annotated fields without causing a type error. """ - # Check deprecated and removed params from V1. This logic should eventually be removed. - const = extra.pop('const', None) # type: ignore - if const is not None: - raise PydanticUserError('`const` is removed, use `Literal` instead', code='removed-kwargs') + Used to provide extra information about a field, either for the model schema or complex validation. Some arguments + apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. - min_items = extra.pop('min_items', None) # type: ignore - if min_items is not None: - warn('`min_items` is deprecated and will be removed, use `min_length` instead', DeprecationWarning) - if min_length in (None, _Unset): - min_length = min_items # type: ignore - - max_items = extra.pop('max_items', None) # type: ignore - if max_items is not None: - warn('`max_items` is deprecated and will be removed, use `max_length` instead', DeprecationWarning) - if max_length in (None, _Unset): - max_length = max_items # type: ignore - - unique_items = extra.pop('unique_items', None) # type: ignore - if unique_items is not None: - raise PydanticUserError( - ( - '`unique_items` is removed, use `Set` instead' - '(this feature is discussed in https://github.com/pydantic/pydantic-core/issues/296)' - ), - code='removed-kwargs', - ) - - allow_mutation = extra.pop('allow_mutation', None) # type: ignore - if allow_mutation is not None: - warn('`allow_mutation` is deprecated and will be removed. use `frozen` instead', DeprecationWarning) - if allow_mutation is False: - frozen = True - - regex = extra.pop('regex', None) # type: ignore - if regex is not None: - raise PydanticUserError('`regex` is removed. use `pattern` instead', code='removed-kwargs') - - if extra: - warn( - 'Using extra keyword arguments on `Field` is deprecated and will be removed.' - ' Use `json_schema_extra` instead.' - f' (Extra keys: {", ".join(k.__repr__() for k in extra.keys())})', - DeprecationWarning, - ) - if not json_schema_extra or json_schema_extra is _Unset: - json_schema_extra = extra # type: ignore - - if ( - validation_alias - and validation_alias is not _Unset - and not isinstance(validation_alias, (str, AliasChoices, AliasPath)) - ): - raise TypeError('Invalid `validation_alias` type. it should be `str`, `AliasChoices`, or `AliasPath`') - - if serialization_alias in (_Unset, None) and isinstance(alias, str): - serialization_alias = alias - - if validation_alias in (_Unset, None): - validation_alias = alias - - include = extra.pop('include', None) # type: ignore - if include is not None: - warn('`include` is deprecated and does nothing. It will be removed, use `exclude` instead', DeprecationWarning) - - return FieldInfo.from_field( + :param default: since this is replacing the field’s default, its first argument is used + to set the default, use ellipsis (``...``) to indicate the field is required + :param default_factory: callable that will be called when a default value is needed for this field + If both `default` and `default_factory` are set, an error is raised. + :param alias: the public name of the field + :param title: can be any string, used in the schema + :param description: can be any string, used in the schema + :param exclude: exclude this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param include: include this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param const: this field is required and *must* take it's default value + :param gt: only applies to numbers, requires the field to be "greater than". The schema + will have an ``exclusiveMinimum`` validation keyword + :param ge: only applies to numbers, requires the field to be "greater than or equal to". The + schema will have a ``minimum`` validation keyword + :param lt: only applies to numbers, requires the field to be "less than". The schema + will have an ``exclusiveMaximum`` validation keyword + :param le: only applies to numbers, requires the field to be "less than or equal to". The + schema will have a ``maximum`` validation keyword + :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The + schema will have a ``multipleOf`` validation keyword + :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf), + which is a valid Python float. Default True, set to False for compatibility with JSON. + :param max_digits: only applies to Decimals, requires the field to have a maximum number + of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. + :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places + allowed. It does not include trailing decimal zeroes. + :param min_items: only applies to lists, requires the field to have a minimum number of + elements. The schema will have a ``minItems`` validation keyword + :param max_items: only applies to lists, requires the field to have a maximum number of + elements. The schema will have a ``maxItems`` validation keyword + :param unique_items: only applies to lists, requires the field not to have duplicated + elements. The schema will have a ``uniqueItems`` validation keyword + :param min_length: only applies to strings, requires the field to have a minimum length. The + schema will have a ``maximum`` validation keyword + :param max_length: only applies to strings, requires the field to have a maximum length. The + schema will have a ``maxLength`` validation keyword + :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is + assigned on an instance. The BaseModel Config must set validate_assignment to True + :param regex: only applies to strings, requires the field match against a regular expression + pattern string. The schema will have a ``pattern`` validation keyword + :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field. + The `discriminator` is the name of this common field to shorten validation and improve generated schema + :param repr: show this field in the representation + :param **extra: any additional keyword arguments will be added as is to the schema + """ + field_info = FieldInfo( default, default_factory=default_factory, alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, title=title, description=description, - examples=examples, exclude=exclude, - discriminator=discriminator, - json_schema_extra=json_schema_extra, - frozen=frozen, - pattern=pattern, - validate_default=validate_default, - repr=repr, - init=init, - init_var=init_var, - kw_only=kw_only, - strict=strict, + include=include, + const=const, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, - min_length=min_length, - max_length=max_length, allow_inf_nan=allow_inf_nan, max_digits=max_digits, decimal_places=decimal_places, - union_mode=union_mode, + min_items=min_items, + max_items=max_items, + unique_items=unique_items, + min_length=min_length, + max_length=max_length, + allow_mutation=allow_mutation, + regex=regex, + discriminator=discriminator, + repr=repr, + **extra, + ) + field_info._validate() + return field_info + + +# used to be an enum but changed to int's for small performance improvement as less access overhead +SHAPE_SINGLETON = 1 +SHAPE_LIST = 2 +SHAPE_SET = 3 +SHAPE_MAPPING = 4 +SHAPE_TUPLE = 5 +SHAPE_TUPLE_ELLIPSIS = 6 +SHAPE_SEQUENCE = 7 +SHAPE_FROZENSET = 8 +SHAPE_ITERABLE = 9 +SHAPE_GENERIC = 10 +SHAPE_DEQUE = 11 +SHAPE_DICT = 12 +SHAPE_DEFAULTDICT = 13 +SHAPE_COUNTER = 14 +SHAPE_NAME_LOOKUP = { + SHAPE_LIST: 'List[{}]', + SHAPE_SET: 'Set[{}]', + SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]', + SHAPE_SEQUENCE: 'Sequence[{}]', + SHAPE_FROZENSET: 'FrozenSet[{}]', + SHAPE_ITERABLE: 'Iterable[{}]', + SHAPE_DEQUE: 'Deque[{}]', + SHAPE_DICT: 'Dict[{}]', + SHAPE_DEFAULTDICT: 'DefaultDict[{}]', + SHAPE_COUNTER: 'Counter[{}]', +} + +MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER} + + +class ModelField(Representation): + __slots__ = ( + 'type_', + 'outer_type_', + 'annotation', + 'sub_fields', + 'sub_fields_mapping', + 'key_field', + 'validators', + 'pre_validators', + 'post_validators', + 'default', + 'default_factory', + 'required', + 'final', + 'model_config', + 'name', + 'alias', + 'has_alias', + 'field_info', + 'discriminator_key', + 'discriminator_alias', + 'validate_always', + 'allow_none', + 'shape', + 'class_validators', + 'parse_json', ) - -_FIELD_ARG_NAMES = set(inspect.signature(Field).parameters) -_FIELD_ARG_NAMES.remove('extra') # do not include the varkwargs parameter - - -class ModelPrivateAttr(_repr.Representation): - """A descriptor for private attributes in class models. - - !!! warning - You generally shouldn't be creating `ModelPrivateAttr` instances directly, instead use - `pydantic.fields.PrivateAttr`. (This is similar to `FieldInfo` vs. `Field`.) - - Attributes: - default: The default value of the attribute if not provided. - default_factory: A callable function that generates the default value of the - attribute if not provided. - """ - - __slots__ = 'default', 'default_factory' - def __init__( - self, default: Any = PydanticUndefined, *, default_factory: typing.Callable[[], Any] | None = None + self, + *, + name: str, + type_: Type[Any], + class_validators: Optional[Dict[str, Validator]], + model_config: Type['BaseConfig'], + default: Any = None, + default_factory: Optional[NoArgAnyCallable] = None, + required: 'BoolUndefined' = Undefined, + final: bool = False, + alias: str = None, + field_info: Optional[FieldInfo] = None, ) -> None: + + self.name: str = name + self.has_alias: bool = alias is not None + self.alias: str = alias if alias is not None else name + self.annotation = type_ + self.type_: Any = convert_generics(type_) + self.outer_type_: Any = type_ + self.class_validators = class_validators or {} + self.default: Any = default + self.default_factory: Optional[NoArgAnyCallable] = default_factory + self.required: 'BoolUndefined' = required + self.final: bool = final + self.model_config = model_config + self.field_info: FieldInfo = field_info or FieldInfo(default) + self.discriminator_key: Optional[str] = self.field_info.discriminator + self.discriminator_alias: Optional[str] = self.discriminator_key + + self.allow_none: bool = False + self.validate_always: bool = False + self.sub_fields: Optional[List[ModelField]] = None + self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None # used for discriminated union + self.key_field: Optional[ModelField] = None + self.validators: 'ValidatorsList' = [] + self.pre_validators: Optional['ValidatorsList'] = None + self.post_validators: Optional['ValidatorsList'] = None + self.parse_json: bool = False + self.shape: int = SHAPE_SINGLETON + self.model_config.prepare_field(self) + self.prepare() + + def get_default(self) -> Any: + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + @staticmethod + def _get_field_info( + field_name: str, annotation: Any, value: Any, config: Type['BaseConfig'] + ) -> Tuple[FieldInfo, Any]: + """ + Get a FieldInfo from a root typing.Annotated annotation, value, or config default. + + The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain + a FieldInfo, a new one will be created using the config. + + :param field_name: name of the field for use in error messages + :param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]` + :param value: the field's assigned value + :param config: the model's config object + :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config. + """ + field_info_from_config = config.get_field_info(field_name) + + field_info = None + if get_origin(annotation) is Annotated: + field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)] + if len(field_infos) > 1: + raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}') + field_info = next(iter(field_infos), None) + if field_info is not None: + field_info = copy.copy(field_info) + field_info.update_from_config(field_info_from_config) + if field_info.default not in (Undefined, Required): + raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}') + if value is not Undefined and value is not Required: + # check also `Required` because of `validate_arguments` that sets `...` as default value + field_info.default = value + + if isinstance(value, FieldInfo): + if field_info is not None: + raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}') + field_info = value + field_info.update_from_config(field_info_from_config) + elif field_info is None: + field_info = FieldInfo(value, **field_info_from_config) + value = None if field_info.default_factory is not None else field_info.default + field_info._validate() + return field_info, value + + @classmethod + def infer( + cls, + *, + name: str, + value: Any, + annotation: Any, + class_validators: Optional[Dict[str, Validator]], + config: Type['BaseConfig'], + ) -> 'ModelField': + from .schema import get_annotation_from_field_info + + field_info, value = cls._get_field_info(name, annotation, value, config) + required: 'BoolUndefined' = Undefined + if value is Required: + required = True + value = None + elif value is not Undefined: + required = False + annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment) + + return cls( + name=name, + type_=annotation, + alias=field_info.alias, + class_validators=class_validators, + default=value, + default_factory=field_info.default_factory, + required=required, + model_config=config, + field_info=field_info, + ) + + def set_config(self, config: Type['BaseConfig']) -> None: + self.model_config = config + info_from_config = config.get_field_info(self.name) + config.prepare_field(self) + new_alias = info_from_config.get('alias') + new_alias_priority = info_from_config.get('alias_priority') or 0 + if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0): + self.field_info.alias = new_alias + self.field_info.alias_priority = new_alias_priority + self.alias = new_alias + new_exclude = info_from_config.get('exclude') + if new_exclude is not None: + self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude) + new_include = info_from_config.get('include') + if new_include is not None: + self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True) + + @property + def alt_alias(self) -> bool: + return self.name != self.alias + + def prepare(self) -> None: + """ + Prepare the field but inspecting self.default, self.type_ etc. + + Note: this method is **not** idempotent (because _type_analysis is not idempotent), + e.g. calling it it multiple times may modify the field and configure it incorrectly. + """ + self._set_default_and_type() + if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType: + # self.type_ is currently a ForwardRef and there's nothing we can do now, + # user will need to call model.update_forward_refs() + return + + self._type_analysis() + if self.required is Undefined: + self.required = True + if self.default is Undefined and self.default_factory is None: + self.default = None + self.populate_validators() + + def _set_default_and_type(self) -> None: + """ + Set the default value, infer the type if needed and check if `None` value is valid. + """ + if self.default_factory is not None: + if self.type_ is Undefined: + raise errors_.ConfigError( + f'you need to set the type of field {self.name!r} when using `default_factory`' + ) + return + + default_value = self.get_default() + + if default_value is not None and self.type_ is Undefined: + self.type_ = default_value.__class__ + self.outer_type_ = self.type_ + self.annotation = self.type_ + + if self.type_ is Undefined: + raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') + + if self.required is False and default_value is None: + self.allow_none = True + + def _type_analysis(self) -> None: # noqa: C901 (ignore complexity) + # typing interface is horrible, we have to do some ugly checks + if lenient_issubclass(self.type_, JsonWrapper): + self.type_ = self.type_.inner_type + self.parse_json = True + elif lenient_issubclass(self.type_, Json): + self.type_ = Any + self.parse_json = True + elif isinstance(self.type_, TypeVar): + if self.type_.__bound__: + self.type_ = self.type_.__bound__ + elif self.type_.__constraints__: + self.type_ = Union[self.type_.__constraints__] + else: + self.type_ = Any + elif is_new_type(self.type_): + self.type_ = new_type_supertype(self.type_) + + if self.type_ is Any or self.type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + return + elif self.type_ is Pattern or self.type_ is re.Pattern: + # python 3.7 only, Pattern is a typing object but without sub fields + return + elif is_literal_type(self.type_): + return + elif is_typeddict(self.type_): + return + + if is_finalvar(self.type_): + self.final = True + + if self.type_ is Final: + self.type_ = Any + else: + self.type_ = get_args(self.type_)[0] + + self._type_analysis() + return + + origin = get_origin(self.type_) + + if origin is Annotated or is_typeddict_special(origin): + self.type_ = get_args(self.type_)[0] + self._type_analysis() + return + + if self.discriminator_key is not None and not is_union(origin): + raise TypeError('`discriminator` can only be used with `Union` type with more than one variant') + + # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None` + if origin is None or origin is CollectionsHashable: + # field is not "typing" object eg. Union, Dict, List etc. + # allow None for virtual superclasses of NoneType, e.g. Hashable + if isinstance(self.type_, type) and isinstance(None, self.type_): + self.allow_none = True + return + elif origin is Callable: + return + elif is_union(origin): + types_ = [] + for type_ in get_args(self.type_): + if is_none_type(type_) or type_ is Any or type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + if is_none_type(type_): + continue + types_.append(type_) + + if len(types_) == 1: + # Optional[] + self.type_ = types_[0] + # this is the one case where the "outer type" isn't just the original type + self.outer_type_ = self.type_ + # re-run to correctly interpret the new self.type_ + self._type_analysis() + else: + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_] + + if self.discriminator_key is not None: + self.prepare_discriminated_union_sub_fields() + return + elif issubclass(origin, Tuple): # type: ignore + # origin == Tuple without item type + args = get_args(self.type_) + if not args: # plain tuple + self.type_ = Any + self.shape = SHAPE_TUPLE_ELLIPSIS + elif len(args) == 2 and args[1] is Ellipsis: # e.g. Tuple[int, ...] + self.type_ = args[0] + self.shape = SHAPE_TUPLE_ELLIPSIS + self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')] + elif args == ((),): # Tuple[()] means empty tuple + self.shape = SHAPE_TUPLE + self.type_ = Any + self.sub_fields = [] + else: + self.shape = SHAPE_TUPLE + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)] + return + elif issubclass(origin, List): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_LIST + elif issubclass(origin, Set): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SET + elif issubclass(origin, FrozenSet): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_FROZENSET + elif issubclass(origin, Deque): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_DEQUE + elif issubclass(origin, Sequence): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SEQUENCE + # priority to most common mapping: dict + elif origin is dict or origin is Dict: + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DICT + elif issubclass(origin, DefaultDict): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DEFAULTDICT + elif issubclass(origin, Counter): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = int + self.shape = SHAPE_COUNTER + elif issubclass(origin, Mapping): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_MAPPING + # Equality check as almost everything inherits form Iterable, including str + # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other + elif origin in {Iterable, CollectionsIterable}: + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_ITERABLE + self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')] + elif issubclass(origin, Type): # type: ignore + return + elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed: + # Is a Pydantic-compatible generic that handles itself + # or we have arbitrary_types_allowed = True + self.shape = SHAPE_GENERIC + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))] + self.type_ = origin + return + else: + raise TypeError(f'Fields of type "{origin}" are not supported.') + + # type_ has been refined eg. as the type of a List and sub_fields needs to be populated + self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)] + + def prepare_discriminated_union_sub_fields(self) -> None: + """ + Prepare the mapping -> and update `sub_fields` + Note that this process can be aborted if a `ForwardRef` is encountered + """ + assert self.discriminator_key is not None + + if self.type_.__class__ is DeferredType: + return + + assert self.sub_fields is not None + sub_fields_mapping: Dict[str, 'ModelField'] = {} + all_aliases: Set[str] = set() + + for sub_field in self.sub_fields: + t = sub_field.type_ + if t.__class__ is ForwardRef: + # Stopping everything...will need to call `update_forward_refs` + return + + alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key) + all_aliases.add(alias) + for discriminator_value in discriminator_values: + sub_fields_mapping[discriminator_value] = sub_field + + self.sub_fields_mapping = sub_fields_mapping + self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key) + + def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField': + if for_keys: + class_validators = None + else: + # validators for sub items should not have `each_item` as we want to check only the first sublevel + class_validators = { + k: Validator( + func=v.func, + pre=v.pre, + each_item=False, + always=v.always, + check_fields=v.check_fields, + skip_on_failure=v.skip_on_failure, + ) + for k, v in self.class_validators.items() + if v.each_item + } + + field_info, _ = self._get_field_info(name, type_, None, self.model_config) + + return self.__class__( + type_=type_, + name=name, + class_validators=class_validators, + model_config=self.model_config, + field_info=field_info, + ) + + def populate_validators(self) -> None: + """ + Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__ + and class validators. This method should be idempotent, e.g. it should be safe to call multiple times + without mis-configuring the field. + """ + self.validate_always = getattr(self.type_, 'validate_always', False) or any( + v.always for v in self.class_validators.values() + ) + + class_validators_ = self.class_validators.values() + if not self.sub_fields or self.shape == SHAPE_GENERIC: + get_validators = getattr(self.type_, '__get_validators__', None) + v_funcs = ( + *[v.func for v in class_validators_ if v.each_item and v.pre], + *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))), + *[v.func for v in class_validators_ if v.each_item and not v.pre], + ) + self.validators = prep_validators(v_funcs) + + self.pre_validators = [] + self.post_validators = [] + + if self.field_info and self.field_info.const: + self.post_validators.append(make_generic_validator(constant_validator)) + + if class_validators_: + self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) + self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) + + if self.parse_json: + self.pre_validators.append(make_generic_validator(validate_json)) + + self.pre_validators = self.pre_validators or None + self.post_validators = self.post_validators or None + + def validate( + self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None + ) -> 'ValidateReturn': + + assert self.type_.__class__ is not DeferredType + + if self.type_.__class__ is ForwardRef: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f'you might need to call {cls.__name__}.update_forward_refs().' + ) + + errors: Optional['ErrorList'] + if self.pre_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators) + if errors: + return v, errors + + if v is None: + if is_none_type(self.type_): + # keep validating + pass + elif self.allow_none: + if self.post_validators: + return self._apply_validators(v, values, loc, cls, self.post_validators) + else: + return None, None + else: + return v, ErrorWrapper(NoneIsNotAllowedError(), loc) + + if self.shape == SHAPE_SINGLETON: + v, errors = self._validate_singleton(v, values, loc, cls) + elif self.shape in MAPPING_LIKE_SHAPES: + v, errors = self._validate_mapping_like(v, values, loc, cls) + elif self.shape == SHAPE_TUPLE: + v, errors = self._validate_tuple(v, values, loc, cls) + elif self.shape == SHAPE_ITERABLE: + v, errors = self._validate_iterable(v, values, loc, cls) + elif self.shape == SHAPE_GENERIC: + v, errors = self._apply_validators(v, values, loc, cls, self.validators) + else: + # sequence, list, set, generator, tuple with ellipsis, frozen set + v, errors = self._validate_sequence_like(v, values, loc, cls) + + if not errors and self.post_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.post_validators) + return v, errors + + def _validate_sequence_like( # noqa: C901 (ignore complexity) + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + """ + Validate sequence-like containers: lists, tuples, sets and generators + Note that large if-else blocks are necessary to enable Cython + optimization, which is why we disable the complexity check above. + """ + if not sequence_like(v): + e: errors_.PydanticTypeError + if self.shape == SHAPE_LIST: + e = errors_.ListError() + elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS): + e = errors_.TupleError() + elif self.shape == SHAPE_SET: + e = errors_.SetError() + elif self.shape == SHAPE_FROZENSET: + e = errors_.FrozenSetError() + else: + e = errors_.SequenceError() + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, v_ in enumerate(v): + v_loc = *loc, i + r, ee = self._validate_singleton(v_, values, v_loc, cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + + converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result + + if self.shape == SHAPE_SET: + converted = set(result) + elif self.shape == SHAPE_FROZENSET: + converted = frozenset(result) + elif self.shape == SHAPE_TUPLE_ELLIPSIS: + converted = tuple(result) + elif self.shape == SHAPE_DEQUE: + converted = deque(result) + elif self.shape == SHAPE_SEQUENCE: + if isinstance(v, tuple): + converted = tuple(result) + elif isinstance(v, set): + converted = set(result) + elif isinstance(v, Generator): + converted = iter(result) + elif isinstance(v, deque): + converted = deque(result) + return converted, None + + def _validate_iterable( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + """ + Validate Iterables. + + This intentionally doesn't validate values to allow infinite generators. + """ + + try: + iterable = iter(v) + except TypeError: + return v, ErrorWrapper(errors_.IterableError(), loc) + return iterable, None + + def _validate_tuple( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + e: Optional[Exception] = None + if not sequence_like(v): + e = errors_.TupleError() + else: + actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore + if actual_length != expected_length: + e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length) + + if e: + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore + v_loc = *loc, i + r, ee = field.validate(v_, values, loc=v_loc, cls=cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + else: + return tuple(result), None + + def _validate_mapping_like( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + try: + v_iter = dict_validator(v) + except TypeError as exc: + return v, ErrorWrapper(exc, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result, errors = {}, [] + for k, v_ in v_iter.items(): + v_loc = *loc, '__key__' + key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore + if key_errors: + errors.append(key_errors) + continue + + v_loc = *loc, k + value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls) + if value_errors: + errors.append(value_errors) + continue + + result[key_result] = value_result + if errors: + return v, errors + elif self.shape == SHAPE_DICT: + return result, None + elif self.shape == SHAPE_DEFAULTDICT: + return defaultdict(self.type_, result), None + elif self.shape == SHAPE_COUNTER: + return CollectionCounter(result), None + else: + return self._get_mapping_value(v, result), None + + def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]: + """ + When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid + coercing to `dict` unwillingly. + """ + original_cls = original.__class__ + + if original_cls == dict or original_cls == Dict: + return converted + elif original_cls in {defaultdict, DefaultDict}: + return defaultdict(self.type_, converted) + else: + try: + # Counter, OrderedDict, UserDict, ... + return original_cls(converted) # type: ignore + except TypeError: + raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None + + def _validate_singleton( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + if self.sub_fields: + if self.discriminator_key is not None: + return self._validate_discriminated_union(v, values, loc, cls) + + errors = [] + + if self.model_config.smart_union and is_union(get_origin(self.type_)): + # 1st pass: check if the value is an exact instance of one of the Union types + # (e.g. to avoid coercing a bool into an int) + for field in self.sub_fields: + if v.__class__ is field.outer_type_: + return v, None + + # 2nd pass: check if the value is an instance of any subclass of the Union types + for field in self.sub_fields: + # This whole logic will be improved later on to support more complex `isinstance` checks + # It will probably be done once a strict mode is added and be something like: + # ``` + # value, error = field.validate(v, values, strict=True) + # if error is None: + # return value, None + # ``` + try: + if isinstance(v, field.outer_type_): + return v, None + except TypeError: + # compound type + if lenient_isinstance(v, get_origin(field.outer_type_)): + value, error = field.validate(v, values, loc=loc, cls=cls) + if not error: + return value, None + + # 1st pass by default or 3rd pass with `smart_union` enabled: + # check if the value can be coerced into one of the Union types + for field in self.sub_fields: + value, error = field.validate(v, values, loc=loc, cls=cls) + if error: + errors.append(error) + else: + return value, None + return v, errors + else: + return self._apply_validators(v, values, loc, cls, self.validators) + + def _validate_discriminated_union( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + assert self.discriminator_key is not None + assert self.discriminator_alias is not None + + try: + discriminator_value = v[self.discriminator_alias] + except KeyError: + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + except TypeError: + try: + # BaseModel or dataclass + discriminator_value = getattr(v, self.discriminator_key) + except (AttributeError, TypeError): + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + + try: + sub_field = self.sub_fields_mapping[discriminator_value] # type: ignore[index] + except TypeError: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f'you might need to call {cls.__name__}.update_forward_refs().' + ) + except KeyError: + assert self.sub_fields_mapping is not None + return v, ErrorWrapper( + InvalidDiscriminator( + discriminator_key=self.discriminator_key, + discriminator_value=discriminator_value, + allowed_values=list(self.sub_fields_mapping), + ), + loc, + ) + else: + if not isinstance(loc, tuple): + loc = (loc,) + return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls) + + def _apply_validators( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList' + ) -> 'ValidateReturn': + for validator in validators: + try: + v = validator(cls, v, values, self, self.model_config) + except (ValueError, TypeError, AssertionError) as exc: + return v, ErrorWrapper(exc, loc) + return v, None + + def is_complex(self) -> bool: + """ + Whether the field is "complex" eg. env variables should be parsed as JSON. + """ + from .main import BaseModel + + return ( + self.shape != SHAPE_SINGLETON + or hasattr(self.type_, '__pydantic_model__') + or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict)) + ) + + def _type_display(self) -> PyObjectStr: + t = display_as_type(self.type_) + + if self.shape in MAPPING_LIKE_SHAPES: + t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]' # type: ignore + elif self.shape == SHAPE_TUPLE: + t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore + elif self.shape == SHAPE_GENERIC: + assert self.sub_fields + t = '{}[{}]'.format( + display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields) + ) + elif self.shape != SHAPE_SINGLETON: + t = SHAPE_NAME_LOOKUP[self.shape].format(t) + + if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields): + t = f'Optional[{t}]' + return PyObjectStr(t) + + def __repr_args__(self) -> 'ReprArgs': + args = [('name', self.name), ('type', self._type_display()), ('required', self.required)] + + if not self.required: + if self.default_factory is not None: + args.append(('default_factory', f'')) + else: + args.append(('default', self.default)) + + if self.alt_alias: + args.append(('alias', self.alias)) + return args + + +class ModelPrivateAttr(Representation): + __slots__ = ('default', 'default_factory') + + def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None: self.default = default self.default_factory = default_factory - if not typing.TYPE_CHECKING: - # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access - - def __getattr__(self, item: str) -> Any: - """This function improves compatibility with custom descriptors by ensuring delegation happens - as expected when the default value of a private attribute is a descriptor. - """ - if item in {'__get__', '__set__', '__delete__'}: - if hasattr(self.default, item): - return getattr(self.default, item) - raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') - - def __set_name__(self, cls: type[Any], name: str) -> None: - """Preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487.""" - if self.default is PydanticUndefined: - return - if not hasattr(self.default, '__set_name__'): - return - set_name = self.default.__set_name__ - if callable(set_name): - set_name(cls, name) - def get_default(self) -> Any: - """Retrieve the default value of the object. - - If `self.default_factory` is `None`, the method will return a deep copy of the `self.default` object. - - If `self.default_factory` is not `None`, it will call `self.default_factory` and return the value returned. - - Returns: - The default value of the object. - """ - return _utils.smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and (self.default, self.default_factory) == ( @@ -879,32 +1213,23 @@ class ModelPrivateAttr(_repr.Representation): def PrivateAttr( - default: Any = PydanticUndefined, + default: Any = Undefined, *, - default_factory: typing.Callable[[], Any] | None = None, + default_factory: Optional[NoArgAnyCallable] = None, ) -> Any: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/models/#private-model-attributes - - Indicates that an attribute is intended for private use and not handled during normal validation/serialization. - - Private attributes are not validated by Pydantic, so it's up to you to ensure they are used in a type-safe manner. - - Private attributes are stored in `__private_attributes__` on the model. - - Args: - default: The attribute's default value. Defaults to Undefined. - default_factory: Callable that will be - called when a default value is needed for this attribute. - If both `default` and `default_factory` are set, an error will be raised. - - Returns: - An instance of [`ModelPrivateAttr`][pydantic.fields.ModelPrivateAttr] class. - - Raises: - ValueError: If both `default` and `default_factory` are set. """ - if default is not PydanticUndefined and default_factory is not None: - raise TypeError('cannot specify both default and default_factory') + Indicates that attribute is only used internally and never mixed with regular fields. + + Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant. + + Private attrs are stored in model __slots__. + + :param default: the attribute’s default value + :param default_factory: callable that will be called when a default value is needed for this attribute + If both `default` and `default_factory` are set, an error is raised. + """ + if default is not Undefined and default_factory is not None: + raise ValueError('cannot specify both default and default_factory') return ModelPrivateAttr( default, @@ -912,243 +1237,11 @@ def PrivateAttr( ) -@dataclasses.dataclass(**_internal_dataclass.slots_true) -class ComputedFieldInfo: - """A container for data from `@computed_field` so that we can access it while building the pydantic-core schema. - - Attributes: - decorator_repr: A class variable representing the decorator string, '@computed_field'. - wrapped_property: The wrapped computed field property. - return_type: The type of the computed field property's return value. - alias: The alias of the property to be used during serialization. - alias_priority: The priority of the alias. This affects whether an alias generator is used. - title: Title of the computed field to include in the serialization JSON schema. - description: Description of the computed field to include in the serialization JSON schema. - examples: Example values of the computed field to include in the serialization JSON schema. - json_schema_extra: A dict or callable to provide extra JSON schema properties. - repr: A boolean indicating whether to include the field in the __repr__ output. +class DeferredType: + """ + Used to postpone field preparation, while creating recursive generic models. """ - decorator_repr: ClassVar[str] = '@computed_field' - wrapped_property: property - return_type: Any - alias: str | None - alias_priority: int | None - title: str | None - description: str | None - examples: list[Any] | None - json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None - repr: bool - -def _wrapped_property_is_private(property_: cached_property | property) -> bool: # type: ignore - """Returns true if provided property is private, False otherwise.""" - wrapped_name: str = '' - - if isinstance(property_, property): - wrapped_name = getattr(property_.fget, '__name__', '') - elif isinstance(property_, cached_property): # type: ignore - wrapped_name = getattr(property_.func, '__name__', '') # type: ignore - - return wrapped_name.startswith('_') and not wrapped_name.startswith('__') - - -# this should really be `property[T], cached_property[T]` but property is not generic unlike cached_property -# See https://github.com/python/typing/issues/985 and linked issues -PropertyT = typing.TypeVar('PropertyT') - - -@typing.overload -def computed_field( - *, - alias: str | None = None, - alias_priority: int | None = None, - title: str | None = None, - description: str | None = None, - examples: list[Any] | None = None, - json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = None, - repr: bool = True, - return_type: Any = PydanticUndefined, -) -> typing.Callable[[PropertyT], PropertyT]: - ... - - -@typing.overload -def computed_field(__func: PropertyT) -> PropertyT: - ... - - -def computed_field( - __f: PropertyT | None = None, - *, - alias: str | None = None, - alias_priority: int | None = None, - title: str | None = None, - description: str | None = None, - examples: list[Any] | None = None, - json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = None, - repr: bool | None = None, - return_type: Any = PydanticUndefined, -) -> PropertyT | typing.Callable[[PropertyT], PropertyT]: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/fields#the-computed_field-decorator - - Decorator to include `property` and `cached_property` when serializing models or dataclasses. - - This is useful for fields that are computed from other fields, or for fields that are expensive to compute and should be cached. - - ```py - from pydantic import BaseModel, computed_field - - class Rectangle(BaseModel): - width: int - length: int - - @computed_field - @property - def area(self) -> int: - return self.width * self.length - - print(Rectangle(width=3, length=2).model_dump()) - #> {'width': 3, 'length': 2, 'area': 6} - ``` - - If applied to functions not yet decorated with `@property` or `@cached_property`, the function is - automatically wrapped with `property`. Although this is more concise, you will lose IntelliSense in your IDE, - and confuse static type checkers, thus explicit use of `@property` is recommended. - - !!! warning "Mypy Warning" - Even with the `@property` or `@cached_property` applied to your function before `@computed_field`, - mypy may throw a `Decorated property not supported` error. - See [mypy issue #1362](https://github.com/python/mypy/issues/1362), for more information. - To avoid this error message, add `# type: ignore[misc]` to the `@computed_field` line. - - [pyright](https://github.com/microsoft/pyright) supports `@computed_field` without error. - - ```py - import random - - from pydantic import BaseModel, computed_field - - class Square(BaseModel): - width: float - - @computed_field - def area(self) -> float: # converted to a `property` by `computed_field` - return round(self.width**2, 2) - - @area.setter - def area(self, new_area: float) -> None: - self.width = new_area**0.5 - - @computed_field(alias='the magic number', repr=False) - def random_number(self) -> int: - return random.randint(0, 1_000) - - square = Square(width=1.3) - - # `random_number` does not appear in representation - print(repr(square)) - #> Square(width=1.3, area=1.69) - - print(square.random_number) - #> 3 - - square.area = 4 - - print(square.model_dump_json(by_alias=True)) - #> {"width":2.0,"area":4.0,"the magic number":3} - ``` - - !!! warning "Overriding with `computed_field`" - You can't override a field from a parent class with a `computed_field` in the child class. - `mypy` complains about this behavior if allowed, and `dataclasses` doesn't allow this pattern either. - See the example below: - - ```py - from pydantic import BaseModel, computed_field - - class Parent(BaseModel): - a: str - - try: - - class Child(Parent): - @computed_field - @property - def a(self) -> str: - return 'new a' - - except ValueError as e: - print(repr(e)) - #> ValueError("you can't override a field with a computed field") - ``` - - Private properties decorated with `@computed_field` have `repr=False` by default. - - ```py - from functools import cached_property - - from pydantic import BaseModel, computed_field - - class Model(BaseModel): - foo: int - - @computed_field - @cached_property - def _private_cached_property(self) -> int: - return -self.foo - - @computed_field - @property - def _private_property(self) -> int: - return -self.foo - - m = Model(foo=1) - print(repr(m)) - #> M(foo=1) - ``` - - Args: - __f: the function to wrap. - alias: alias to use when serializing this computed field, only used when `by_alias=True` - alias_priority: priority of the alias. This affects whether an alias generator is used - title: Title to use when including this computed field in JSON Schema - description: Description to use when including this computed field in JSON Schema, defaults to the function's - docstring - examples: Example values to use when including this computed field in JSON Schema - json_schema_extra: A dict or callable to provide extra JSON schema properties. - repr: whether to include this computed field in model repr. - Default is `False` for private properties and `True` for public properties. - return_type: optional return for serialization logic to expect when serializing to JSON, if included - this must be correct, otherwise a `TypeError` is raised. - If you don't include a return type Any is used, which does runtime introspection to handle arbitrary - objects. - - Returns: - A proxy wrapper for the property. - """ - - def dec(f: Any) -> Any: - nonlocal description, return_type, alias_priority - unwrapped = _decorators.unwrap_wrapped_function(f) - if description is None and unwrapped.__doc__: - description = inspect.cleandoc(unwrapped.__doc__) - - # if the function isn't already decorated with `@property` (or another descriptor), then we wrap it now - f = _decorators.ensure_property(f) - alias_priority = (alias_priority or 2) if alias is not None else None - - if repr is None: - repr_: bool = False if _wrapped_property_is_private(property_=f) else True - else: - repr_ = repr - - dec_info = ComputedFieldInfo( - f, return_type, alias, alias_priority, title, description, examples, json_schema_extra, repr_ - ) - return _decorators.PydanticDescriptorProxy(f, dec_info) - - if __f is None: - return dec - else: - return dec(__f) +def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool: + return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo) diff --git a/lib/pydantic/functional_serializers.py b/lib/pydantic/functional_serializers.py deleted file mode 100644 index 6e31bf67..00000000 --- a/lib/pydantic/functional_serializers.py +++ /dev/null @@ -1,395 +0,0 @@ -"""This module contains related classes and functions for serialization.""" -from __future__ import annotations - -import dataclasses -from functools import partialmethod -from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload - -from pydantic_core import PydanticUndefined, core_schema -from pydantic_core import core_schema as _core_schema -from typing_extensions import Annotated, Literal, TypeAlias - -from . import PydanticUndefinedAnnotation -from ._internal import _decorators, _internal_dataclass -from .annotated_handlers import GetCoreSchemaHandler - - -@dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) -class PlainSerializer: - """Plain serializers use a function to modify the output of serialization. - - This is particularly helpful when you want to customize the serialization for annotated types. - Consider an input of `list`, which will be serialized into a space-delimited string. - - ```python - from typing import List - - from typing_extensions import Annotated - - from pydantic import BaseModel, PlainSerializer - - CustomStr = Annotated[ - List, PlainSerializer(lambda x: ' '.join(x), return_type=str) - ] - - class StudentModel(BaseModel): - courses: CustomStr - - student = StudentModel(courses=['Math', 'Chemistry', 'English']) - print(student.model_dump()) - #> {'courses': 'Math Chemistry English'} - ``` - - Attributes: - func: The serializer function. - return_type: The return type for the function. If omitted it will be inferred from the type annotation. - when_used: Determines when this serializer should be used. Accepts a string with values `'always'`, - `'unless-none'`, `'json'`, and `'json-unless-none'`. Defaults to 'always'. - """ - - func: core_schema.SerializerFunction - return_type: Any = PydanticUndefined - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always' - - def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - """Gets the Pydantic core schema. - - Args: - source_type: The source type. - handler: The `GetCoreSchemaHandler` instance. - - Returns: - The Pydantic core schema. - """ - schema = handler(source_type) - try: - return_type = _decorators.get_function_return_type( - self.func, self.return_type, handler._get_types_namespace() - ) - except NameError as e: - raise PydanticUndefinedAnnotation.from_name_error(e) from e - return_schema = None if return_type is PydanticUndefined else handler.generate_schema(return_type) - schema['serialization'] = core_schema.plain_serializer_function_ser_schema( - function=self.func, - info_arg=_decorators.inspect_annotated_serializer(self.func, 'plain'), - return_schema=return_schema, - when_used=self.when_used, - ) - return schema - - -@dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) -class WrapSerializer: - """Wrap serializers receive the raw inputs along with a handler function that applies the standard serialization - logic, and can modify the resulting value before returning it as the final output of serialization. - - For example, here's a scenario in which a wrap serializer transforms timezones to UTC **and** utilizes the existing `datetime` serialization logic. - - ```python - from datetime import datetime, timezone - from typing import Any, Dict - - from typing_extensions import Annotated - - from pydantic import BaseModel, WrapSerializer - - class EventDatetime(BaseModel): - start: datetime - end: datetime - - def convert_to_utc(value: Any, handler, info) -> Dict[str, datetime]: - # Note that `helper` can actually help serialize the `value` for further custom serialization in case it's a subclass. - partial_result = handler(value, info) - if info.mode == 'json': - return { - k: datetime.fromisoformat(v).astimezone(timezone.utc) - for k, v in partial_result.items() - } - return {k: v.astimezone(timezone.utc) for k, v in partial_result.items()} - - UTCEventDatetime = Annotated[EventDatetime, WrapSerializer(convert_to_utc)] - - class EventModel(BaseModel): - event_datetime: UTCEventDatetime - - dt = EventDatetime( - start='2024-01-01T07:00:00-08:00', end='2024-01-03T20:00:00+06:00' - ) - event = EventModel(event_datetime=dt) - print(event.model_dump()) - ''' - { - 'event_datetime': { - 'start': datetime.datetime( - 2024, 1, 1, 15, 0, tzinfo=datetime.timezone.utc - ), - 'end': datetime.datetime( - 2024, 1, 3, 14, 0, tzinfo=datetime.timezone.utc - ), - } - } - ''' - - print(event.model_dump_json()) - ''' - {"event_datetime":{"start":"2024-01-01T15:00:00Z","end":"2024-01-03T14:00:00Z"}} - ''' - ``` - - Attributes: - func: The serializer function to be wrapped. - return_type: The return type for the function. If omitted it will be inferred from the type annotation. - when_used: Determines when this serializer should be used. Accepts a string with values `'always'`, - `'unless-none'`, `'json'`, and `'json-unless-none'`. Defaults to 'always'. - """ - - func: core_schema.WrapSerializerFunction - return_type: Any = PydanticUndefined - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always' - - def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - """This method is used to get the Pydantic core schema of the class. - - Args: - source_type: Source type. - handler: Core schema handler. - - Returns: - The generated core schema of the class. - """ - schema = handler(source_type) - try: - return_type = _decorators.get_function_return_type( - self.func, self.return_type, handler._get_types_namespace() - ) - except NameError as e: - raise PydanticUndefinedAnnotation.from_name_error(e) from e - return_schema = None if return_type is PydanticUndefined else handler.generate_schema(return_type) - schema['serialization'] = core_schema.wrap_serializer_function_ser_schema( - function=self.func, - info_arg=_decorators.inspect_annotated_serializer(self.func, 'wrap'), - return_schema=return_schema, - when_used=self.when_used, - ) - return schema - - -if TYPE_CHECKING: - _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]] - _PlainSerializationFunction = Union[_core_schema.SerializerFunction, _PartialClsOrStaticMethod] - _WrapSerializationFunction = Union[_core_schema.WrapSerializerFunction, _PartialClsOrStaticMethod] - _PlainSerializeMethodType = TypeVar('_PlainSerializeMethodType', bound=_PlainSerializationFunction) - _WrapSerializeMethodType = TypeVar('_WrapSerializeMethodType', bound=_WrapSerializationFunction) - - -@overload -def field_serializer( - __field: str, - *fields: str, - return_type: Any = ..., - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ..., - check_fields: bool | None = ..., -) -> Callable[[_PlainSerializeMethodType], _PlainSerializeMethodType]: - ... - - -@overload -def field_serializer( - __field: str, - *fields: str, - mode: Literal['plain'], - return_type: Any = ..., - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ..., - check_fields: bool | None = ..., -) -> Callable[[_PlainSerializeMethodType], _PlainSerializeMethodType]: - ... - - -@overload -def field_serializer( - __field: str, - *fields: str, - mode: Literal['wrap'], - return_type: Any = ..., - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ..., - check_fields: bool | None = ..., -) -> Callable[[_WrapSerializeMethodType], _WrapSerializeMethodType]: - ... - - -def field_serializer( - *fields: str, - mode: Literal['plain', 'wrap'] = 'plain', - return_type: Any = PydanticUndefined, - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always', - check_fields: bool | None = None, -) -> Callable[[Any], Any]: - """Decorator that enables custom field serialization. - - In the below example, a field of type `set` is used to mitigate duplication. A `field_serializer` is used to serialize the data as a sorted list. - - ```python - from typing import Set - - from pydantic import BaseModel, field_serializer - - class StudentModel(BaseModel): - name: str = 'Jane' - courses: Set[str] - - @field_serializer('courses', when_used='json') - def serialize_courses_in_order(courses: Set[str]): - return sorted(courses) - - student = StudentModel(courses={'Math', 'Chemistry', 'English'}) - print(student.model_dump_json()) - #> {"name":"Jane","courses":["Chemistry","English","Math"]} - ``` - - See [Custom serializers](../concepts/serialization.md#custom-serializers) for more information. - - Four signatures are supported: - - - `(self, value: Any, info: FieldSerializationInfo)` - - `(self, value: Any, nxt: SerializerFunctionWrapHandler, info: FieldSerializationInfo)` - - `(value: Any, info: SerializationInfo)` - - `(value: Any, nxt: SerializerFunctionWrapHandler, info: SerializationInfo)` - - Args: - fields: Which field(s) the method should be called on. - mode: The serialization mode. - - - `plain` means the function will be called instead of the default serialization logic, - - `wrap` means the function will be called with an argument to optionally call the - default serialization logic. - return_type: Optional return type for the function, if omitted it will be inferred from the type annotation. - when_used: Determines the serializer will be used for serialization. - check_fields: Whether to check that the fields actually exist on the model. - - Returns: - The decorator function. - """ - - def dec( - f: Callable[..., Any] | staticmethod[Any, Any] | classmethod[Any, Any, Any], - ) -> _decorators.PydanticDescriptorProxy[Any]: - dec_info = _decorators.FieldSerializerDecoratorInfo( - fields=fields, - mode=mode, - return_type=return_type, - when_used=when_used, - check_fields=check_fields, - ) - return _decorators.PydanticDescriptorProxy(f, dec_info) - - return dec - - -FuncType = TypeVar('FuncType', bound=Callable[..., Any]) - - -@overload -def model_serializer(__f: FuncType) -> FuncType: - ... - - -@overload -def model_serializer( - *, - mode: Literal['plain', 'wrap'] = ..., - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always', - return_type: Any = ..., -) -> Callable[[FuncType], FuncType]: - ... - - -def model_serializer( - __f: Callable[..., Any] | None = None, - *, - mode: Literal['plain', 'wrap'] = 'plain', - when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always', - return_type: Any = PydanticUndefined, -) -> Callable[[Any], Any]: - """Decorator that enables custom model serialization. - - This is useful when a model need to be serialized in a customized manner, allowing for flexibility beyond just specific fields. - - An example would be to serialize temperature to the same temperature scale, such as degrees Celsius. - - ```python - from typing import Literal - - from pydantic import BaseModel, model_serializer - - class TemperatureModel(BaseModel): - unit: Literal['C', 'F'] - value: int - - @model_serializer() - def serialize_model(self): - if self.unit == 'F': - return {'unit': 'C', 'value': int((self.value - 32) / 1.8)} - return {'unit': self.unit, 'value': self.value} - - temperature = TemperatureModel(unit='F', value=212) - print(temperature.model_dump()) - #> {'unit': 'C', 'value': 100} - ``` - - See [Custom serializers](../concepts/serialization.md#custom-serializers) for more information. - - Args: - __f: The function to be decorated. - mode: The serialization mode. - - - `'plain'` means the function will be called instead of the default serialization logic - - `'wrap'` means the function will be called with an argument to optionally call the default - serialization logic. - when_used: Determines when this serializer should be used. - return_type: The return type for the function. If omitted it will be inferred from the type annotation. - - Returns: - The decorator function. - """ - - def dec(f: Callable[..., Any]) -> _decorators.PydanticDescriptorProxy[Any]: - dec_info = _decorators.ModelSerializerDecoratorInfo(mode=mode, return_type=return_type, when_used=when_used) - return _decorators.PydanticDescriptorProxy(f, dec_info) - - if __f is None: - return dec - else: - return dec(__f) # type: ignore - - -AnyType = TypeVar('AnyType') - - -if TYPE_CHECKING: - SerializeAsAny = Annotated[AnyType, ...] # SerializeAsAny[list[str]] will be treated by type checkers as list[str] - """Force serialization to ignore whatever is defined in the schema and instead ask the object - itself how it should be serialized. - In particular, this means that when model subclasses are serialized, fields present in the subclass - but not in the original schema will be included. - """ -else: - - @dataclasses.dataclass(**_internal_dataclass.slots_true) - class SerializeAsAny: # noqa: D101 - def __class_getitem__(cls, item: Any) -> Any: - return Annotated[item, SerializeAsAny()] - - def __get_pydantic_core_schema__( - self, source_type: Any, handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - schema = handler(source_type) - schema_to_update = schema - while schema_to_update['type'] == 'definitions': - schema_to_update = schema_to_update.copy() - schema_to_update = schema_to_update['schema'] - schema_to_update['serialization'] = core_schema.wrap_serializer_function_ser_schema( - lambda x, h: h(x), schema=core_schema.any_schema() - ) - return schema - - __hash__ = object.__hash__ diff --git a/lib/pydantic/functional_validators.py b/lib/pydantic/functional_validators.py deleted file mode 100644 index b547755b..00000000 --- a/lib/pydantic/functional_validators.py +++ /dev/null @@ -1,706 +0,0 @@ -"""This module contains related classes and functions for validation.""" - -from __future__ import annotations as _annotations - -import dataclasses -import sys -from functools import partialmethod -from types import FunctionType -from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, cast, overload - -from pydantic_core import core_schema -from pydantic_core import core_schema as _core_schema -from typing_extensions import Annotated, Literal, TypeAlias - -from . import GetCoreSchemaHandler as _GetCoreSchemaHandler -from ._internal import _core_metadata, _decorators, _generics, _internal_dataclass -from .annotated_handlers import GetCoreSchemaHandler -from .errors import PydanticUserError - -if sys.version_info < (3, 11): - from typing_extensions import Protocol -else: - from typing import Protocol - -_inspect_validator = _decorators.inspect_validator - - -@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) -class AfterValidator: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/validators/#annotated-validators - - A metadata class that indicates that a validation should be applied **after** the inner validation logic. - - Attributes: - func: The validator function. - - Example: - ```py - from typing_extensions import Annotated - - from pydantic import AfterValidator, BaseModel, ValidationError - - MyInt = Annotated[int, AfterValidator(lambda v: v + 1)] - - class Model(BaseModel): - a: MyInt - - print(Model(a=1).a) - #> 2 - - try: - Model(a='a') - except ValidationError as e: - print(e.json(indent=2)) - ''' - [ - { - "type": "int_parsing", - "loc": [ - "a" - ], - "msg": "Input should be a valid integer, unable to parse string as an integer", - "input": "a", - "url": "https://errors.pydantic.dev/2/v/int_parsing" - } - ] - ''' - ``` - """ - - func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction - - def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: - schema = handler(source_type) - info_arg = _inspect_validator(self.func, 'after') - if info_arg: - func = cast(core_schema.WithInfoValidatorFunction, self.func) - return core_schema.with_info_after_validator_function(func, schema=schema, field_name=handler.field_name) - else: - func = cast(core_schema.NoInfoValidatorFunction, self.func) - return core_schema.no_info_after_validator_function(func, schema=schema) - - -@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) -class BeforeValidator: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/validators/#annotated-validators - - A metadata class that indicates that a validation should be applied **before** the inner validation logic. - - Attributes: - func: The validator function. - - Example: - ```py - from typing_extensions import Annotated - - from pydantic import BaseModel, BeforeValidator - - MyInt = Annotated[int, BeforeValidator(lambda v: v + 1)] - - class Model(BaseModel): - a: MyInt - - print(Model(a=1).a) - #> 2 - - try: - Model(a='a') - except TypeError as e: - print(e) - #> can only concatenate str (not "int") to str - ``` - """ - - func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction - - def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: - schema = handler(source_type) - info_arg = _inspect_validator(self.func, 'before') - if info_arg: - func = cast(core_schema.WithInfoValidatorFunction, self.func) - return core_schema.with_info_before_validator_function(func, schema=schema, field_name=handler.field_name) - else: - func = cast(core_schema.NoInfoValidatorFunction, self.func) - return core_schema.no_info_before_validator_function(func, schema=schema) - - -@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) -class PlainValidator: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/validators/#annotated-validators - - A metadata class that indicates that a validation should be applied **instead** of the inner validation logic. - - Attributes: - func: The validator function. - - Example: - ```py - from typing_extensions import Annotated - - from pydantic import BaseModel, PlainValidator - - MyInt = Annotated[int, PlainValidator(lambda v: int(v) + 1)] - - class Model(BaseModel): - a: MyInt - - print(Model(a='1').a) - #> 2 - ``` - """ - - func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction - - def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: - # Note that for some valid uses of PlainValidator, it is not possible to generate a core schema for the - # source_type, so calling `handler(source_type)` will error, which prevents us from generating a proper - # serialization schema. To work around this for use cases that will not involve serialization, we simply - # catch any PydanticSchemaGenerationError that may be raised while attempting to build the serialization schema - # and abort any attempts to handle special serialization. - from pydantic import PydanticSchemaGenerationError - - try: - schema = handler(source_type) - serialization = core_schema.wrap_serializer_function_ser_schema(function=lambda v, h: h(v), schema=schema) - except PydanticSchemaGenerationError: - serialization = None - - info_arg = _inspect_validator(self.func, 'plain') - if info_arg: - func = cast(core_schema.WithInfoValidatorFunction, self.func) - return core_schema.with_info_plain_validator_function( - func, field_name=handler.field_name, serialization=serialization - ) - else: - func = cast(core_schema.NoInfoValidatorFunction, self.func) - return core_schema.no_info_plain_validator_function(func, serialization=serialization) - - -@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) -class WrapValidator: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/validators/#annotated-validators - - A metadata class that indicates that a validation should be applied **around** the inner validation logic. - - Attributes: - func: The validator function. - - ```py - from datetime import datetime - - from typing_extensions import Annotated - - from pydantic import BaseModel, ValidationError, WrapValidator - - def validate_timestamp(v, handler): - if v == 'now': - # we don't want to bother with further validation, just return the new value - return datetime.now() - try: - return handler(v) - except ValidationError: - # validation failed, in this case we want to return a default value - return datetime(2000, 1, 1) - - MyTimestamp = Annotated[datetime, WrapValidator(validate_timestamp)] - - class Model(BaseModel): - a: MyTimestamp - - print(Model(a='now').a) - #> 2032-01-02 03:04:05.000006 - print(Model(a='invalid').a) - #> 2000-01-01 00:00:00 - ``` - """ - - func: core_schema.NoInfoWrapValidatorFunction | core_schema.WithInfoWrapValidatorFunction - - def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: - schema = handler(source_type) - info_arg = _inspect_validator(self.func, 'wrap') - if info_arg: - func = cast(core_schema.WithInfoWrapValidatorFunction, self.func) - return core_schema.with_info_wrap_validator_function(func, schema=schema, field_name=handler.field_name) - else: - func = cast(core_schema.NoInfoWrapValidatorFunction, self.func) - return core_schema.no_info_wrap_validator_function(func, schema=schema) - - -if TYPE_CHECKING: - - class _OnlyValueValidatorClsMethod(Protocol): - def __call__(self, cls: Any, value: Any, /) -> Any: - ... - - class _V2ValidatorClsMethod(Protocol): - def __call__(self, cls: Any, value: Any, info: _core_schema.ValidationInfo, /) -> Any: - ... - - class _V2WrapValidatorClsMethod(Protocol): - def __call__( - self, - cls: Any, - value: Any, - handler: _core_schema.ValidatorFunctionWrapHandler, - info: _core_schema.ValidationInfo, - /, - ) -> Any: - ... - - _V2Validator = Union[ - _V2ValidatorClsMethod, - _core_schema.WithInfoValidatorFunction, - _OnlyValueValidatorClsMethod, - _core_schema.NoInfoValidatorFunction, - ] - - _V2WrapValidator = Union[ - _V2WrapValidatorClsMethod, - _core_schema.WithInfoWrapValidatorFunction, - ] - - _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]] - - _V2BeforeAfterOrPlainValidatorType = TypeVar( - '_V2BeforeAfterOrPlainValidatorType', - _V2Validator, - _PartialClsOrStaticMethod, - ) - _V2WrapValidatorType = TypeVar('_V2WrapValidatorType', _V2WrapValidator, _PartialClsOrStaticMethod) - - -@overload -def field_validator( - __field: str, - *fields: str, - mode: Literal['before', 'after', 'plain'] = ..., - check_fields: bool | None = ..., -) -> Callable[[_V2BeforeAfterOrPlainValidatorType], _V2BeforeAfterOrPlainValidatorType]: - ... - - -@overload -def field_validator( - __field: str, - *fields: str, - mode: Literal['wrap'], - check_fields: bool | None = ..., -) -> Callable[[_V2WrapValidatorType], _V2WrapValidatorType]: - ... - - -FieldValidatorModes: TypeAlias = Literal['before', 'after', 'wrap', 'plain'] - - -def field_validator( - __field: str, - *fields: str, - mode: FieldValidatorModes = 'after', - check_fields: bool | None = None, -) -> Callable[[Any], Any]: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/validators/#field-validators - - Decorate methods on the class indicating that they should be used to validate fields. - - Example usage: - ```py - from typing import Any - - from pydantic import ( - BaseModel, - ValidationError, - field_validator, - ) - - class Model(BaseModel): - a: str - - @field_validator('a') - @classmethod - def ensure_foobar(cls, v: Any): - if 'foobar' not in v: - raise ValueError('"foobar" not found in a') - return v - - print(repr(Model(a='this is foobar good'))) - #> Model(a='this is foobar good') - - try: - Model(a='snap') - except ValidationError as exc_info: - print(exc_info) - ''' - 1 validation error for Model - a - Value error, "foobar" not found in a [type=value_error, input_value='snap', input_type=str] - ''' - ``` - - For more in depth examples, see [Field Validators](../concepts/validators.md#field-validators). - - Args: - __field: The first field the `field_validator` should be called on; this is separate - from `fields` to ensure an error is raised if you don't pass at least one. - *fields: Additional field(s) the `field_validator` should be called on. - mode: Specifies whether to validate the fields before or after validation. - check_fields: Whether to check that the fields actually exist on the model. - - Returns: - A decorator that can be used to decorate a function to be used as a field_validator. - - Raises: - PydanticUserError: - - If `@field_validator` is used bare (with no fields). - - If the args passed to `@field_validator` as fields are not strings. - - If `@field_validator` applied to instance methods. - """ - if isinstance(__field, FunctionType): - raise PydanticUserError( - '`@field_validator` should be used with fields and keyword arguments, not bare. ' - "E.g. usage should be `@validator('', ...)`", - code='validator-no-fields', - ) - fields = __field, *fields - if not all(isinstance(field, str) for field in fields): - raise PydanticUserError( - '`@field_validator` fields should be passed as separate string args. ' - "E.g. usage should be `@validator('', '', ...)`", - code='validator-invalid-fields', - ) - - def dec( - f: Callable[..., Any] | staticmethod[Any, Any] | classmethod[Any, Any, Any], - ) -> _decorators.PydanticDescriptorProxy[Any]: - if _decorators.is_instance_method_from_sig(f): - raise PydanticUserError( - '`@field_validator` cannot be applied to instance methods', code='validator-instance-method' - ) - - # auto apply the @classmethod decorator - f = _decorators.ensure_classmethod_based_on_signature(f) - - dec_info = _decorators.FieldValidatorDecoratorInfo(fields=fields, mode=mode, check_fields=check_fields) - return _decorators.PydanticDescriptorProxy(f, dec_info) - - return dec - - -_ModelType = TypeVar('_ModelType') -_ModelTypeCo = TypeVar('_ModelTypeCo', covariant=True) - - -class ModelWrapValidatorHandler(_core_schema.ValidatorFunctionWrapHandler, Protocol[_ModelTypeCo]): - """@model_validator decorated function handler argument type. This is used when `mode='wrap'`.""" - - def __call__( # noqa: D102 - self, - value: Any, - outer_location: str | int | None = None, - /, - ) -> _ModelTypeCo: # pragma: no cover - ... - - -class ModelWrapValidatorWithoutInfo(Protocol[_ModelType]): - """A @model_validator decorated function signature. - This is used when `mode='wrap'` and the function does not have info argument. - """ - - def __call__( # noqa: D102 - self, - cls: type[_ModelType], - # this can be a dict, a model instance - # or anything else that gets passed to validate_python - # thus validators _must_ handle all cases - value: Any, - handler: ModelWrapValidatorHandler[_ModelType], - /, - ) -> _ModelType: - ... - - -class ModelWrapValidator(Protocol[_ModelType]): - """A @model_validator decorated function signature. This is used when `mode='wrap'`.""" - - def __call__( # noqa: D102 - self, - cls: type[_ModelType], - # this can be a dict, a model instance - # or anything else that gets passed to validate_python - # thus validators _must_ handle all cases - value: Any, - handler: ModelWrapValidatorHandler[_ModelType], - info: _core_schema.ValidationInfo, - /, - ) -> _ModelType: - ... - - -class FreeModelBeforeValidatorWithoutInfo(Protocol): - """A @model_validator decorated function signature. - This is used when `mode='before'` and the function does not have info argument. - """ - - def __call__( # noqa: D102 - self, - # this can be a dict, a model instance - # or anything else that gets passed to validate_python - # thus validators _must_ handle all cases - value: Any, - /, - ) -> Any: - ... - - -class ModelBeforeValidatorWithoutInfo(Protocol): - """A @model_validator decorated function signature. - This is used when `mode='before'` and the function does not have info argument. - """ - - def __call__( # noqa: D102 - self, - cls: Any, - # this can be a dict, a model instance - # or anything else that gets passed to validate_python - # thus validators _must_ handle all cases - value: Any, - /, - ) -> Any: - ... - - -class FreeModelBeforeValidator(Protocol): - """A `@model_validator` decorated function signature. This is used when `mode='before'`.""" - - def __call__( # noqa: D102 - self, - # this can be a dict, a model instance - # or anything else that gets passed to validate_python - # thus validators _must_ handle all cases - value: Any, - info: _core_schema.ValidationInfo, - /, - ) -> Any: - ... - - -class ModelBeforeValidator(Protocol): - """A `@model_validator` decorated function signature. This is used when `mode='before'`.""" - - def __call__( # noqa: D102 - self, - cls: Any, - # this can be a dict, a model instance - # or anything else that gets passed to validate_python - # thus validators _must_ handle all cases - value: Any, - info: _core_schema.ValidationInfo, - /, - ) -> Any: - ... - - -ModelAfterValidatorWithoutInfo = Callable[[_ModelType], _ModelType] -"""A `@model_validator` decorated function signature. This is used when `mode='after'` and the function does not -have info argument. -""" - -ModelAfterValidator = Callable[[_ModelType, _core_schema.ValidationInfo], _ModelType] -"""A `@model_validator` decorated function signature. This is used when `mode='after'`.""" - -_AnyModelWrapValidator = Union[ModelWrapValidator[_ModelType], ModelWrapValidatorWithoutInfo[_ModelType]] -_AnyModeBeforeValidator = Union[ - FreeModelBeforeValidator, ModelBeforeValidator, FreeModelBeforeValidatorWithoutInfo, ModelBeforeValidatorWithoutInfo -] -_AnyModelAfterValidator = Union[ModelAfterValidator[_ModelType], ModelAfterValidatorWithoutInfo[_ModelType]] - - -@overload -def model_validator( - *, - mode: Literal['wrap'], -) -> Callable[ - [_AnyModelWrapValidator[_ModelType]], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo] -]: - ... - - -@overload -def model_validator( - *, - mode: Literal['before'], -) -> Callable[[_AnyModeBeforeValidator], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo]]: - ... - - -@overload -def model_validator( - *, - mode: Literal['after'], -) -> Callable[ - [_AnyModelAfterValidator[_ModelType]], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo] -]: - ... - - -def model_validator( - *, - mode: Literal['wrap', 'before', 'after'], -) -> Any: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/validators/#model-validators - - Decorate model methods for validation purposes. - - Example usage: - ```py - from typing_extensions import Self - - from pydantic import BaseModel, ValidationError, model_validator - - class Square(BaseModel): - width: float - height: float - - @model_validator(mode='after') - def verify_square(self) -> Self: - if self.width != self.height: - raise ValueError('width and height do not match') - return self - - s = Square(width=1, height=1) - print(repr(s)) - #> Square(width=1.0, height=1.0) - - try: - Square(width=1, height=2) - except ValidationError as e: - print(e) - ''' - 1 validation error for Square - Value error, width and height do not match [type=value_error, input_value={'width': 1, 'height': 2}, input_type=dict] - ''' - ``` - - For more in depth examples, see [Model Validators](../concepts/validators.md#model-validators). - - Args: - mode: A required string literal that specifies the validation mode. - It can be one of the following: 'wrap', 'before', or 'after'. - - Returns: - A decorator that can be used to decorate a function to be used as a model validator. - """ - - def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]: - # auto apply the @classmethod decorator - f = _decorators.ensure_classmethod_based_on_signature(f) - dec_info = _decorators.ModelValidatorDecoratorInfo(mode=mode) - return _decorators.PydanticDescriptorProxy(f, dec_info) - - return dec - - -AnyType = TypeVar('AnyType') - - -if TYPE_CHECKING: - # If we add configurable attributes to IsInstance, we'd probably need to stop hiding it from type checkers like this - InstanceOf = Annotated[AnyType, ...] # `IsInstance[Sequence]` will be recognized by type checkers as `Sequence` - -else: - - @dataclasses.dataclass(**_internal_dataclass.slots_true) - class InstanceOf: - '''Generic type for annotating a type that is an instance of a given class. - - Example: - ```py - from pydantic import BaseModel, InstanceOf - - class Foo: - ... - - class Bar(BaseModel): - foo: InstanceOf[Foo] - - Bar(foo=Foo()) - try: - Bar(foo=42) - except ValidationError as e: - print(e) - """ - [ - │ { - │ │ 'type': 'is_instance_of', - │ │ 'loc': ('foo',), - │ │ 'msg': 'Input should be an instance of Foo', - │ │ 'input': 42, - │ │ 'ctx': {'class': 'Foo'}, - │ │ 'url': 'https://errors.pydantic.dev/0.38.0/v/is_instance_of' - │ } - ] - """ - ``` - ''' - - @classmethod - def __class_getitem__(cls, item: AnyType) -> AnyType: - return Annotated[item, cls()] - - @classmethod - def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - from pydantic import PydanticSchemaGenerationError - - # use the generic _origin_ as the second argument to isinstance when appropriate - instance_of_schema = core_schema.is_instance_schema(_generics.get_origin(source) or source) - - try: - # Try to generate the "standard" schema, which will be used when loading from JSON - original_schema = handler(source) - except PydanticSchemaGenerationError: - # If that fails, just produce a schema that can validate from python - return instance_of_schema - else: - # Use the "original" approach to serialization - instance_of_schema['serialization'] = core_schema.wrap_serializer_function_ser_schema( - function=lambda v, h: h(v), schema=original_schema - ) - return core_schema.json_or_python_schema(python_schema=instance_of_schema, json_schema=original_schema) - - __hash__ = object.__hash__ - - -if TYPE_CHECKING: - SkipValidation = Annotated[AnyType, ...] # SkipValidation[list[str]] will be treated by type checkers as list[str] -else: - - @dataclasses.dataclass(**_internal_dataclass.slots_true) - class SkipValidation: - """If this is applied as an annotation (e.g., via `x: Annotated[int, SkipValidation]`), validation will be - skipped. You can also use `SkipValidation[int]` as a shorthand for `Annotated[int, SkipValidation]`. - - This can be useful if you want to use a type annotation for documentation/IDE/type-checking purposes, - and know that it is safe to skip validation for one or more of the fields. - - Because this converts the validation schema to `any_schema`, subsequent annotation-applied transformations - may not have the expected effects. Therefore, when used, this annotation should generally be the final - annotation applied to a type. - """ - - def __class_getitem__(cls, item: Any) -> Any: - return Annotated[item, SkipValidation()] - - @classmethod - def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - original_schema = handler(source) - metadata = _core_metadata.build_metadata_dict(js_annotation_functions=[lambda _c, h: h(original_schema)]) - return core_schema.any_schema( - metadata=metadata, - serialization=core_schema.wrap_serializer_function_ser_schema( - function=lambda v, h: h(v), schema=original_schema - ), - ) - - __hash__ = object.__hash__ diff --git a/lib/pydantic/generics.py b/lib/pydantic/generics.py index 5f6f7f7a..a3f52bfe 100644 --- a/lib/pydantic/generics.py +++ b/lib/pydantic/generics.py @@ -1,4 +1,364 @@ -"""The `generics` module is a backport module from V1.""" -from ._migration import getattr_migration +import sys +import typing +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + Generic, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, +) -__getattr__ = getattr_migration(__name__) +from typing_extensions import Annotated + +from .class_validators import gather_all_validators +from .fields import DeferredType +from .main import BaseModel, create_model +from .types import JsonWrapper +from .typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base +from .utils import LimitedDict, all_identical, lenient_issubclass + +GenericModelT = TypeVar('GenericModelT', bound='GenericModel') +TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type + +Parametrization = Mapping[TypeVarType, Type[Any]] + +_generic_types_cache: LimitedDict[Tuple[Type[Any], Union[Any, Tuple[Any, ...]]], Type[BaseModel]] = LimitedDict() +# _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations +# as captured during construction of the class (not instances). +# E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created, +# `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`. +# (This information is only otherwise available after creation from the class name string). +_assigned_parameters: LimitedDict[Type[Any], Parametrization] = LimitedDict() + + +class GenericModel(BaseModel): + __slots__ = () + __concrete__: ClassVar[bool] = False + + if TYPE_CHECKING: + # Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with + # `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of + # `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below. + __parameters__: ClassVar[Tuple[TypeVarType, ...]] + + # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings + def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]: + """Instantiates a new class from a generic class `cls` and type variables `params`. + + :param params: Tuple of types the class . Given a generic class + `Model` with 2 type variables and a concrete model `Model[str, int]`, + the value `(str, int)` would be passed to `params`. + :return: New model class inheriting from `cls` with instantiated + types described by `params`. If no parameters are given, `cls` is + returned as is. + + """ + + def _cache_key(_params: Any) -> Tuple[Type[GenericModelT], Any, Tuple[Any, ...]]: + return cls, _params, get_args(_params) + + cached = _generic_types_cache.get(_cache_key(params)) + if cached is not None: + return cached + if cls.__concrete__ and Generic not in cls.__bases__: + raise TypeError('Cannot parameterize a concrete instantiation of a generic model') + if not isinstance(params, tuple): + params = (params,) + if cls is GenericModel and any(isinstance(param, TypeVar) for param in params): + raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel') + if not hasattr(cls, '__parameters__'): + raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized') + + check_parameters_count(cls, params) + # Build map from generic typevars to passed params + typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params)) + if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: + return cls # if arguments are equal to parameters it's the same object + + # Create new model with original model as parent inserting fields with DeferredType. + model_name = cls.__concrete_name__(params) + validators = gather_all_validators(cls) + + type_hints = get_all_type_hints(cls).items() + instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar} + + fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__} + + model_module, called_globally = get_caller_frame_info() + created_model = cast( + Type[GenericModel], # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes + create_model( + model_name, + __module__=model_module or cls.__module__, + __base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)), + __config__=None, + __validators__=validators, + __cls_kwargs__=None, + **fields, + ), + ) + + _assigned_parameters[created_model] = typevars_map + + if called_globally: # create global reference and therefore allow pickling + object_by_reference = None + reference_name = model_name + reference_module_globals = sys.modules[created_model.__module__].__dict__ + while object_by_reference is not created_model: + object_by_reference = reference_module_globals.setdefault(reference_name, created_model) + reference_name += '_' + + created_model.Config = cls.Config + + # Find any typevars that are still present in the model. + # If none are left, the model is fully "concrete", otherwise the new + # class is a generic class as well taking the found typevars as + # parameters. + new_params = tuple( + {param: None for param in iter_contained_typevars(typevars_map.values())} + ) # use dict as ordered set + created_model.__concrete__ = not new_params + if new_params: + created_model.__parameters__ = new_params + + # Save created model in cache so we don't end up creating duplicate + # models that should be identical. + _generic_types_cache[_cache_key(params)] = created_model + if len(params) == 1: + _generic_types_cache[_cache_key(params[0])] = created_model + + # Recursively walk class type hints and replace generic typevars + # with concrete types that were passed. + _prepare_model_fields(created_model, fields, instance_type_hints, typevars_map) + + return created_model + + @classmethod + def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: + """Compute class name for child classes. + + :param params: Tuple of types the class . Given a generic class + `Model` with 2 type variables and a concrete model `Model[str, int]`, + the value `(str, int)` would be passed to `params`. + :return: String representing a the new class where `params` are + passed to `cls` as type variables. + + This method can be overridden to achieve a custom naming scheme for GenericModels. + """ + param_names = [display_as_type(param) for param in params] + params_component = ', '.join(param_names) + return f'{cls.__name__}[{params_component}]' + + @classmethod + def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]: + """ + Returns unbound bases of cls parameterised to given type variables + + :param typevars_map: Dictionary of type applications for binding subclasses. + Given a generic class `Model` with 2 type variables [S, T] + and a concrete model `Model[str, int]`, + the value `{S: str, T: int}` would be passed to `typevars_map`. + :return: an iterator of generic sub classes, parameterised by `typevars_map` + and other assigned parameters of `cls` + + e.g.: + ``` + class A(GenericModel, Generic[T]): + ... + + class B(A[V], Generic[V]): + ... + + assert A[int] in B.__parameterized_bases__({V: int}) + ``` + """ + + def build_base_model( + base_model: Type[GenericModel], mapped_types: Parametrization + ) -> Iterator[Type[GenericModel]]: + base_parameters = tuple(mapped_types[param] for param in base_model.__parameters__) + parameterized_base = base_model.__class_getitem__(base_parameters) + if parameterized_base is base_model or parameterized_base is cls: + # Avoid duplication in MRO + return + yield parameterized_base + + for base_model in cls.__bases__: + if not issubclass(base_model, GenericModel): + # not a class that can be meaningfully parameterized + continue + elif not getattr(base_model, '__parameters__', None): + # base_model is "GenericModel" (and has no __parameters__) + # or + # base_model is already concrete, and will be included transitively via cls. + continue + elif cls in _assigned_parameters: + if base_model in _assigned_parameters: + # cls is partially parameterised but not from base_model + # e.g. cls = B[S], base_model = A[S] + # B[S][int] should subclass A[int], (and will be transitively via B[int]) + # but it's not viable to consistently subclass types with arbitrary construction + # So don't attempt to include A[S][int] + continue + else: # base_model not in _assigned_parameters: + # cls is partially parameterized, base_model is original generic + # e.g. cls = B[str, T], base_model = B[S, T] + # Need to determine the mapping for the base_model parameters + mapped_types: Parametrization = { + key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items() + } + yield from build_base_model(base_model, mapped_types) + else: + # cls is base generic, so base_class has a distinct base + # can construct the Parameterised base model using typevars_map directly + yield from build_base_model(base_model, typevars_map) + + +def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any: + """Return type with all occurrences of `type_map` keys recursively replaced with their values. + + :param type_: Any type, class or generic alias + :param type_map: Mapping from `TypeVar` instance to concrete types. + :return: New type representing the basic structure of `type_` with all + `typevar_map` keys recursively replaced. + + >>> replace_types(Tuple[str, Union[List[str], float]], {str: int}) + Tuple[int, Union[List[int], float]] + + """ + if not type_map: + return type_ + + type_args = get_args(type_) + origin_type = get_origin(type_) + + if origin_type is Annotated: + annotated_type, *annotations = type_args + return Annotated[replace_types(annotated_type, type_map), tuple(annotations)] + + # Having type args is a good indicator that this is a typing module + # class instantiation or a generic alias of some sort. + if type_args: + resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args) + if all_identical(type_args, resolved_type_args): + # If all arguments are the same, there is no need to modify the + # type or create a new object at all + return type_ + if ( + origin_type is not None + and isinstance(type_, typing_base) + and not isinstance(origin_type, typing_base) + and getattr(type_, '_name', None) is not None + ): + # In python < 3.9 generic aliases don't exist so any of these like `list`, + # `type` or `collections.abc.Callable` need to be translated. + # See: https://www.python.org/dev/peps/pep-0585 + origin_type = getattr(typing, type_._name) + assert origin_type is not None + return origin_type[resolved_type_args] + + # We handle pydantic generic models separately as they don't have the same + # semantics as "typing" classes or generic aliases + if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__: + type_args = type_.__parameters__ + resolved_type_args = tuple(replace_types(t, type_map) for t in type_args) + if all_identical(type_args, resolved_type_args): + return type_ + return type_[resolved_type_args] + + # Handle special case for typehints that can have lists as arguments. + # `typing.Callable[[int, str], int]` is an example for this. + if isinstance(type_, (List, list)): + resolved_list = list(replace_types(element, type_map) for element in type_) + if all_identical(type_, resolved_list): + return type_ + return resolved_list + + # For JsonWrapperValue, need to handle its inner type to allow correct parsing + # of generic Json arguments like Json[T] + if not origin_type and lenient_issubclass(type_, JsonWrapper): + type_.inner_type = replace_types(type_.inner_type, type_map) + return type_ + + # If all else fails, we try to resolve the type directly and otherwise just + # return the input with no modifications. + return type_map.get(type_, type_) + + +def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None: + actual = len(parameters) + expected = len(cls.__parameters__) + if actual != expected: + description = 'many' if actual > expected else 'few' + raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}') + + +DictValues: Type[Any] = {}.values().__class__ + + +def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]: + """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.""" + if isinstance(v, TypeVar): + yield v + elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel): + yield from v.__parameters__ + elif isinstance(v, (DictValues, list)): + for var in v: + yield from iter_contained_typevars(var) + else: + args = get_args(v) + for arg in args: + yield from iter_contained_typevars(arg) + + +def get_caller_frame_info() -> Tuple[Optional[str], bool]: + """ + Used inside a function to check whether it was called globally + + Will only work against non-compiled code, therefore used only in pydantic.generics + + :returns Tuple[module_name, called_globally] + """ + try: + previous_caller_frame = sys._getframe(2) + except ValueError as e: + raise RuntimeError('This function must be used inside another function') from e + except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it + return None, False + frame_globals = previous_caller_frame.f_globals + return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals + + +def _prepare_model_fields( + created_model: Type[GenericModel], + fields: Mapping[str, Any], + instance_type_hints: Mapping[str, type], + typevars_map: Mapping[Any, type], +) -> None: + """ + Replace DeferredType fields with concrete type hints and prepare them. + """ + + for key, field in created_model.__fields__.items(): + if key not in fields: + assert field.type_.__class__ is not DeferredType + # https://github.com/nedbat/coveragepy/issues/198 + continue # pragma: no cover + + assert field.type_.__class__ is DeferredType, field.type_.__class__ + + field_type_hint = instance_type_hints[key] + concrete_type = replace_types(field_type_hint, typevars_map) + field.type_ = concrete_type + field.outer_type_ = concrete_type + field.prepare() + created_model.__annotations__[key] = concrete_type diff --git a/lib/pydantic/json.py b/lib/pydantic/json.py index 020fb6d2..b358b850 100644 --- a/lib/pydantic/json.py +++ b/lib/pydantic/json.py @@ -1,4 +1,112 @@ -"""The `json` module is a backport module from V1.""" -from ._migration import getattr_migration +import datetime +from collections import deque +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from re import Pattern +from types import GeneratorType +from typing import Any, Callable, Dict, Type, Union +from uuid import UUID -__getattr__ = getattr_migration(__name__) +from .color import Color +from .networks import NameEmail +from .types import SecretBytes, SecretStr + +__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' + + +def isoformat(o: Union[datetime.date, datetime.time]) -> str: + return o.isoformat() + + +def decimal_encoder(dec_value: Decimal) -> Union[int, float]: + """ + Encodes a Decimal as int of there's no exponent, otherwise float + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where a integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + """ + if dec_value.as_tuple().exponent >= 0: + return int(dec_value) + else: + return float(dec_value) + + +ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, +} + + +def pydantic_encoder(obj: Any) -> Any: + from dataclasses import asdict, is_dataclass + + from .main import BaseModel + + if isinstance(obj, BaseModel): + return obj.dict() + elif is_dataclass(obj): + return asdict(obj) + + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = ENCODERS_BY_TYPE[base] + except KeyError: + continue + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") + + +def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = type_encoders[base] + except KeyError: + continue + + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + return pydantic_encoder(obj) + + +def timedelta_isoformat(td: datetime.timedelta) -> str: + """ + ISO 8601 encoding for Python timedelta object. + """ + minutes, seconds = divmod(td.seconds, 60) + hours, minutes = divmod(minutes, 60) + return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' diff --git a/lib/pydantic/json_schema.py b/lib/pydantic/json_schema.py deleted file mode 100644 index eee9e60e..00000000 --- a/lib/pydantic/json_schema.py +++ /dev/null @@ -1,2425 +0,0 @@ -""" -Usage docs: https://docs.pydantic.dev/2.5/concepts/json_schema/ - -The `json_schema` module contains classes and functions to allow the way [JSON Schema](https://json-schema.org/) -is generated to be customized. - -In general you shouldn't need to use this module directly; instead, you can -[`BaseModel.model_json_schema`][pydantic.BaseModel.model_json_schema] and -[`TypeAdapter.json_schema`][pydantic.TypeAdapter.json_schema]. -""" -from __future__ import annotations as _annotations - -import dataclasses -import inspect -import math -import re -import warnings -from collections import defaultdict -from copy import deepcopy -from dataclasses import is_dataclass -from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Counter, - Dict, - Hashable, - Iterable, - NewType, - Sequence, - Tuple, - TypeVar, - Union, - cast, -) - -import pydantic_core -from pydantic_core import CoreSchema, PydanticOmit, core_schema, to_jsonable_python -from pydantic_core.core_schema import ComputedField -from typing_extensions import Annotated, Literal, TypeAlias, assert_never, deprecated, final - -from pydantic.warnings import PydanticDeprecatedSince26 - -from ._internal import ( - _config, - _core_metadata, - _core_utils, - _decorators, - _internal_dataclass, - _mock_val_ser, - _schema_generation_shared, - _typing_extra, -) -from .annotated_handlers import GetJsonSchemaHandler -from .config import JsonDict, JsonSchemaExtraCallable, JsonValue -from .errors import PydanticInvalidForJsonSchema, PydanticUserError - -if TYPE_CHECKING: - from . import ConfigDict - from ._internal._core_utils import CoreSchemaField, CoreSchemaOrField - from ._internal._dataclasses import PydanticDataclass - from ._internal._schema_generation_shared import GetJsonSchemaFunction - from .main import BaseModel - - -CoreSchemaOrFieldType = Literal[core_schema.CoreSchemaType, core_schema.CoreSchemaFieldType] -""" -A type alias for defined schema types that represents a union of -`core_schema.CoreSchemaType` and -`core_schema.CoreSchemaFieldType`. -""" - -JsonSchemaValue = Dict[str, Any] -""" -A type alias for a JSON schema value. This is a dictionary of string keys to arbitrary JSON values. -""" - -JsonSchemaMode = Literal['validation', 'serialization'] -""" -A type alias that represents the mode of a JSON schema; either 'validation' or 'serialization'. - -For some types, the inputs to validation differ from the outputs of serialization. For example, -computed fields will only be present when serializing, and should not be provided when -validating. This flag provides a way to indicate whether you want the JSON schema required -for validation inputs, or that will be matched by serialization outputs. -""" - -_MODE_TITLE_MAPPING: dict[JsonSchemaMode, str] = {'validation': 'Input', 'serialization': 'Output'} - - -def update_json_schema(schema: JsonSchemaValue, updates: dict[str, Any]) -> JsonSchemaValue: - """Update a JSON schema in-place by providing a dictionary of updates. - - This function sets the provided key-value pairs in the schema and returns the updated schema. - - Args: - schema: The JSON schema to update. - updates: A dictionary of key-value pairs to set in the schema. - - Returns: - The updated JSON schema. - """ - schema.update(updates) - return schema - - -JsonSchemaWarningKind = Literal['skipped-choice', 'non-serializable-default'] -""" -A type alias representing the kinds of warnings that can be emitted during JSON schema generation. - -See [`GenerateJsonSchema.render_warning_message`][pydantic.json_schema.GenerateJsonSchema.render_warning_message] -for more details. -""" - - -class PydanticJsonSchemaWarning(UserWarning): - """This class is used to emit warnings produced during JSON schema generation. - See the [`GenerateJsonSchema.emit_warning`][pydantic.json_schema.GenerateJsonSchema.emit_warning] and - [`GenerateJsonSchema.render_warning_message`][pydantic.json_schema.GenerateJsonSchema.render_warning_message] - methods for more details; these can be overridden to control warning behavior. - """ - - -# ##### JSON Schema Generation ##### -DEFAULT_REF_TEMPLATE = '#/$defs/{model}' -"""The default format string used to generate reference names.""" - -# There are three types of references relevant to building JSON schemas: -# 1. core_schema "ref" values; these are not exposed as part of the JSON schema -# * these might look like the fully qualified path of a model, its id, or something similar -CoreRef = NewType('CoreRef', str) -# 2. keys of the "definitions" object that will eventually go into the JSON schema -# * by default, these look like "MyModel", though may change in the presence of collisions -# * eventually, we may want to make it easier to modify the way these names are generated -DefsRef = NewType('DefsRef', str) -# 3. the values corresponding to the "$ref" key in the schema -# * By default, these look like "#/$defs/MyModel", as in {"$ref": "#/$defs/MyModel"} -JsonRef = NewType('JsonRef', str) - -CoreModeRef = Tuple[CoreRef, JsonSchemaMode] -JsonSchemaKeyT = TypeVar('JsonSchemaKeyT', bound=Hashable) - - -@dataclasses.dataclass(**_internal_dataclass.slots_true) -class _DefinitionsRemapping: - defs_remapping: dict[DefsRef, DefsRef] - json_remapping: dict[JsonRef, JsonRef] - - @staticmethod - def from_prioritized_choices( - prioritized_choices: dict[DefsRef, list[DefsRef]], - defs_to_json: dict[DefsRef, JsonRef], - definitions: dict[DefsRef, JsonSchemaValue], - ) -> _DefinitionsRemapping: - """ - This function should produce a remapping that replaces complex DefsRef with the simpler ones from the - prioritized_choices such that applying the name remapping would result in an equivalent JSON schema. - """ - # We need to iteratively simplify the definitions until we reach a fixed point. - # The reason for this is that outer definitions may reference inner definitions that get simplified - # into an equivalent reference, and the outer definitions won't be equivalent until we've simplified - # the inner definitions. - copied_definitions = deepcopy(definitions) - definitions_schema = {'$defs': copied_definitions} - for _iter in range(100): # prevent an infinite loop in the case of a bug, 100 iterations should be enough - # For every possible remapped DefsRef, collect all schemas that that DefsRef might be used for: - schemas_for_alternatives: dict[DefsRef, list[JsonSchemaValue]] = defaultdict(list) - for defs_ref in copied_definitions: - alternatives = prioritized_choices[defs_ref] - for alternative in alternatives: - schemas_for_alternatives[alternative].append(copied_definitions[defs_ref]) - - # Deduplicate the schemas for each alternative; the idea is that we only want to remap to a new DefsRef - # if it introduces no ambiguity, i.e., there is only one distinct schema for that DefsRef. - for defs_ref, schemas in schemas_for_alternatives.items(): - schemas_for_alternatives[defs_ref] = _deduplicate_schemas(schemas_for_alternatives[defs_ref]) - - # Build the remapping - defs_remapping: dict[DefsRef, DefsRef] = {} - json_remapping: dict[JsonRef, JsonRef] = {} - for original_defs_ref in definitions: - alternatives = prioritized_choices[original_defs_ref] - # Pick the first alternative that has only one schema, since that means there is no collision - remapped_defs_ref = next(x for x in alternatives if len(schemas_for_alternatives[x]) == 1) - defs_remapping[original_defs_ref] = remapped_defs_ref - json_remapping[defs_to_json[original_defs_ref]] = defs_to_json[remapped_defs_ref] - remapping = _DefinitionsRemapping(defs_remapping, json_remapping) - new_definitions_schema = remapping.remap_json_schema({'$defs': copied_definitions}) - if definitions_schema == new_definitions_schema: - # We've reached the fixed point - return remapping - definitions_schema = new_definitions_schema - - raise PydanticInvalidForJsonSchema('Failed to simplify the JSON schema definitions') - - def remap_defs_ref(self, ref: DefsRef) -> DefsRef: - return self.defs_remapping.get(ref, ref) - - def remap_json_ref(self, ref: JsonRef) -> JsonRef: - return self.json_remapping.get(ref, ref) - - def remap_json_schema(self, schema: Any) -> Any: - """ - Recursively update the JSON schema replacing all $refs - """ - if isinstance(schema, str): - # Note: this may not really be a JsonRef; we rely on having no collisions between JsonRefs and other strings - return self.remap_json_ref(JsonRef(schema)) - elif isinstance(schema, list): - return [self.remap_json_schema(item) for item in schema] - elif isinstance(schema, dict): - for key, value in schema.items(): - if key == '$ref' and isinstance(value, str): - schema['$ref'] = self.remap_json_ref(JsonRef(value)) - elif key == '$defs': - schema['$defs'] = { - self.remap_defs_ref(DefsRef(key)): self.remap_json_schema(value) - for key, value in schema['$defs'].items() - } - else: - schema[key] = self.remap_json_schema(value) - return schema - - -class GenerateJsonSchema: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/json_schema/#customizing-the-json-schema-generation-process - - A class for generating JSON schemas. - - This class generates JSON schemas based on configured parameters. The default schema dialect - is [https://json-schema.org/draft/2020-12/schema](https://json-schema.org/draft/2020-12/schema). - The class uses `by_alias` to configure how fields with - multiple names are handled and `ref_template` to format reference names. - - Attributes: - schema_dialect: The JSON schema dialect used to generate the schema. See - [Declaring a Dialect](https://json-schema.org/understanding-json-schema/reference/schema.html#id4) - in the JSON Schema documentation for more information about dialects. - ignored_warning_kinds: Warnings to ignore when generating the schema. `self.render_warning_message` will - do nothing if its argument `kind` is in `ignored_warning_kinds`; - this value can be modified on subclasses to easily control which warnings are emitted. - by_alias: Whether to use field aliases when generating the schema. - ref_template: The format string used when generating reference names. - core_to_json_refs: A mapping of core refs to JSON refs. - core_to_defs_refs: A mapping of core refs to definition refs. - defs_to_core_refs: A mapping of definition refs to core refs. - json_to_defs_refs: A mapping of JSON refs to definition refs. - definitions: Definitions in the schema. - - Args: - by_alias: Whether to use field aliases in the generated schemas. - ref_template: The format string to use when generating reference names. - - Raises: - JsonSchemaError: If the instance of the class is inadvertently re-used after generating a schema. - """ - - schema_dialect = 'https://json-schema.org/draft/2020-12/schema' - - # `self.render_warning_message` will do nothing if its argument `kind` is in `ignored_warning_kinds`; - # this value can be modified on subclasses to easily control which warnings are emitted - ignored_warning_kinds: set[JsonSchemaWarningKind] = {'skipped-choice'} - - def __init__(self, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE): - self.by_alias = by_alias - self.ref_template = ref_template - - self.core_to_json_refs: dict[CoreModeRef, JsonRef] = {} - self.core_to_defs_refs: dict[CoreModeRef, DefsRef] = {} - self.defs_to_core_refs: dict[DefsRef, CoreModeRef] = {} - self.json_to_defs_refs: dict[JsonRef, DefsRef] = {} - - self.definitions: dict[DefsRef, JsonSchemaValue] = {} - self._config_wrapper_stack = _config.ConfigWrapperStack(_config.ConfigWrapper({})) - - self._mode: JsonSchemaMode = 'validation' - - # The following includes a mapping of a fully-unique defs ref choice to a list of preferred - # alternatives, which are generally simpler, such as only including the class name. - # At the end of schema generation, we use these to produce a JSON schema with more human-readable - # definitions, which would also work better in a generated OpenAPI client, etc. - self._prioritized_defsref_choices: dict[DefsRef, list[DefsRef]] = {} - self._collision_counter: dict[str, int] = defaultdict(int) - self._collision_index: dict[str, int] = {} - - self._schema_type_to_method = self.build_schema_type_to_method() - - # When we encounter definitions we need to try to build them immediately - # so that they are available schemas that reference them - # But it's possible that CoreSchema was never going to be used - # (e.g. because the CoreSchema that references short circuits is JSON schema generation without needing - # the reference) so instead of failing altogether if we can't build a definition we - # store the error raised and re-throw it if we end up needing that def - self._core_defs_invalid_for_json_schema: dict[DefsRef, PydanticInvalidForJsonSchema] = {} - - # This changes to True after generating a schema, to prevent issues caused by accidental re-use - # of a single instance of a schema generator - self._used = False - - @property - def _config(self) -> _config.ConfigWrapper: - return self._config_wrapper_stack.tail - - @property - def mode(self) -> JsonSchemaMode: - if self._config.json_schema_mode_override is not None: - return self._config.json_schema_mode_override - else: - return self._mode - - def build_schema_type_to_method( - self, - ) -> dict[CoreSchemaOrFieldType, Callable[[CoreSchemaOrField], JsonSchemaValue]]: - """Builds a dictionary mapping fields to methods for generating JSON schemas. - - Returns: - A dictionary containing the mapping of `CoreSchemaOrFieldType` to a handler method. - - Raises: - TypeError: If no method has been defined for generating a JSON schema for a given pydantic core schema type. - """ - mapping: dict[CoreSchemaOrFieldType, Callable[[CoreSchemaOrField], JsonSchemaValue]] = {} - core_schema_types: list[CoreSchemaOrFieldType] = _typing_extra.all_literal_values( - CoreSchemaOrFieldType # type: ignore - ) - for key in core_schema_types: - method_name = f"{key.replace('-', '_')}_schema" - try: - mapping[key] = getattr(self, method_name) - except AttributeError as e: # pragma: no cover - raise TypeError( - f'No method for generating JsonSchema for core_schema.type={key!r} ' - f'(expected: {type(self).__name__}.{method_name})' - ) from e - return mapping - - def generate_definitions( - self, inputs: Sequence[tuple[JsonSchemaKeyT, JsonSchemaMode, core_schema.CoreSchema]] - ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], dict[DefsRef, JsonSchemaValue]]: - """Generates JSON schema definitions from a list of core schemas, pairing the generated definitions with a - mapping that links the input keys to the definition references. - - Args: - inputs: A sequence of tuples, where: - - - The first element is a JSON schema key type. - - The second element is the JSON mode: either 'validation' or 'serialization'. - - The third element is a core schema. - - Returns: - A tuple where: - - - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and - whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have - JsonRef references to definitions that are defined in the second returned element.) - - The second element is a dictionary whose keys are definition references for the JSON schemas - from the first returned element, and whose values are the actual JSON schema definitions. - - Raises: - PydanticUserError: Raised if the JSON schema generator has already been used to generate a JSON schema. - """ - if self._used: - raise PydanticUserError( - 'This JSON schema generator has already been used to generate a JSON schema. ' - f'You must create a new instance of {type(self).__name__} to generate a new JSON schema.', - code='json-schema-already-used', - ) - - for key, mode, schema in inputs: - self._mode = mode - self.generate_inner(schema) - - definitions_remapping = self._build_definitions_remapping() - - json_schemas_map: dict[tuple[JsonSchemaKeyT, JsonSchemaMode], DefsRef] = {} - for key, mode, schema in inputs: - self._mode = mode - json_schema = self.generate_inner(schema) - json_schemas_map[(key, mode)] = definitions_remapping.remap_json_schema(json_schema) - - json_schema = {'$defs': self.definitions} - json_schema = definitions_remapping.remap_json_schema(json_schema) - self._used = True - return json_schemas_map, _sort_json_schema(json_schema['$defs']) # type: ignore - - def generate(self, schema: CoreSchema, mode: JsonSchemaMode = 'validation') -> JsonSchemaValue: - """Generates a JSON schema for a specified schema in a specified mode. - - Args: - schema: A Pydantic model. - mode: The mode in which to generate the schema. Defaults to 'validation'. - - Returns: - A JSON schema representing the specified schema. - - Raises: - PydanticUserError: If the JSON schema generator has already been used to generate a JSON schema. - """ - self._mode = mode - if self._used: - raise PydanticUserError( - 'This JSON schema generator has already been used to generate a JSON schema. ' - f'You must create a new instance of {type(self).__name__} to generate a new JSON schema.', - code='json-schema-already-used', - ) - - json_schema: JsonSchemaValue = self.generate_inner(schema) - json_ref_counts = self.get_json_ref_counts(json_schema) - - # Remove the top-level $ref if present; note that the _generate method already ensures there are no sibling keys - ref = cast(JsonRef, json_schema.get('$ref')) - while ref is not None: # may need to unpack multiple levels - ref_json_schema = self.get_schema_from_definitions(ref) - if json_ref_counts[ref] > 1 or ref_json_schema is None: - # Keep the ref, but use an allOf to remove the top level $ref - json_schema = {'allOf': [{'$ref': ref}]} - else: - # "Unpack" the ref since this is the only reference - json_schema = ref_json_schema.copy() # copy to prevent recursive dict reference - json_ref_counts[ref] -= 1 - ref = cast(JsonRef, json_schema.get('$ref')) - - self._garbage_collect_definitions(json_schema) - definitions_remapping = self._build_definitions_remapping() - - if self.definitions: - json_schema['$defs'] = self.definitions - - json_schema = definitions_remapping.remap_json_schema(json_schema) - - # For now, we will not set the $schema key. However, if desired, this can be easily added by overriding - # this method and adding the following line after a call to super().generate(schema): - # json_schema['$schema'] = self.schema_dialect - - self._used = True - return _sort_json_schema(json_schema) - - def generate_inner(self, schema: CoreSchemaOrField) -> JsonSchemaValue: # noqa: C901 - """Generates a JSON schema for a given core schema. - - Args: - schema: The given core schema. - - Returns: - The generated JSON schema. - """ - # If a schema with the same CoreRef has been handled, just return a reference to it - # Note that this assumes that it will _never_ be the case that the same CoreRef is used - # on types that should have different JSON schemas - if 'ref' in schema: - core_ref = CoreRef(schema['ref']) # type: ignore[typeddict-item] - core_mode_ref = (core_ref, self.mode) - if core_mode_ref in self.core_to_defs_refs and self.core_to_defs_refs[core_mode_ref] in self.definitions: - return {'$ref': self.core_to_json_refs[core_mode_ref]} - - # Generate the JSON schema, accounting for the json_schema_override and core_schema_override - metadata_handler = _core_metadata.CoreMetadataHandler(schema) - - def populate_defs(core_schema: CoreSchema, json_schema: JsonSchemaValue) -> JsonSchemaValue: - if 'ref' in core_schema: - core_ref = CoreRef(core_schema['ref']) # type: ignore[typeddict-item] - defs_ref, ref_json_schema = self.get_cache_defs_ref_schema(core_ref) - json_ref = JsonRef(ref_json_schema['$ref']) - self.json_to_defs_refs[json_ref] = defs_ref - # Replace the schema if it's not a reference to itself - # What we want to avoid is having the def be just a ref to itself - # which is what would happen if we blindly assigned any - if json_schema.get('$ref', None) != json_ref: - self.definitions[defs_ref] = json_schema - self._core_defs_invalid_for_json_schema.pop(defs_ref, None) - json_schema = ref_json_schema - return json_schema - - def convert_to_all_of(json_schema: JsonSchemaValue) -> JsonSchemaValue: - if '$ref' in json_schema and len(json_schema.keys()) > 1: - # technically you can't have any other keys next to a "$ref" - # but it's an easy mistake to make and not hard to correct automatically here - json_schema = json_schema.copy() - ref = json_schema.pop('$ref') - json_schema = {'allOf': [{'$ref': ref}], **json_schema} - return json_schema - - def handler_func(schema_or_field: CoreSchemaOrField) -> JsonSchemaValue: - """Generate a JSON schema based on the input schema. - - Args: - schema_or_field: The core schema to generate a JSON schema from. - - Returns: - The generated JSON schema. - - Raises: - TypeError: If an unexpected schema type is encountered. - """ - # Generate the core-schema-type-specific bits of the schema generation: - json_schema: JsonSchemaValue | None = None - if self.mode == 'serialization' and 'serialization' in schema_or_field: - ser_schema = schema_or_field['serialization'] # type: ignore - json_schema = self.ser_schema(ser_schema) - if json_schema is None: - if _core_utils.is_core_schema(schema_or_field) or _core_utils.is_core_schema_field(schema_or_field): - generate_for_schema_type = self._schema_type_to_method[schema_or_field['type']] - json_schema = generate_for_schema_type(schema_or_field) - else: - raise TypeError(f'Unexpected schema type: schema={schema_or_field}') - if _core_utils.is_core_schema(schema_or_field): - json_schema = populate_defs(schema_or_field, json_schema) - json_schema = convert_to_all_of(json_schema) - return json_schema - - current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, handler_func) - - for js_modify_function in metadata_handler.metadata.get('pydantic_js_functions', ()): - - def new_handler_func( - schema_or_field: CoreSchemaOrField, - current_handler: GetJsonSchemaHandler = current_handler, - js_modify_function: GetJsonSchemaFunction = js_modify_function, - ) -> JsonSchemaValue: - json_schema = js_modify_function(schema_or_field, current_handler) - if _core_utils.is_core_schema(schema_or_field): - json_schema = populate_defs(schema_or_field, json_schema) - original_schema = current_handler.resolve_ref_schema(json_schema) - ref = json_schema.pop('$ref', None) - if ref and json_schema: - original_schema.update(json_schema) - return original_schema - - current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, new_handler_func) - - for js_modify_function in metadata_handler.metadata.get('pydantic_js_annotation_functions', ()): - - def new_handler_func( - schema_or_field: CoreSchemaOrField, - current_handler: GetJsonSchemaHandler = current_handler, - js_modify_function: GetJsonSchemaFunction = js_modify_function, - ) -> JsonSchemaValue: - json_schema = js_modify_function(schema_or_field, current_handler) - if _core_utils.is_core_schema(schema_or_field): - json_schema = populate_defs(schema_or_field, json_schema) - json_schema = convert_to_all_of(json_schema) - return json_schema - - current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, new_handler_func) - - json_schema = current_handler(schema) - if _core_utils.is_core_schema(schema): - json_schema = populate_defs(schema, json_schema) - json_schema = convert_to_all_of(json_schema) - return json_schema - - # ### Schema generation methods - def any_schema(self, schema: core_schema.AnySchema) -> JsonSchemaValue: - """Generates a JSON schema that matches any value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return {} - - def none_schema(self, schema: core_schema.NoneSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches `None`. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return {'type': 'null'} - - def bool_schema(self, schema: core_schema.BoolSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a bool value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return {'type': 'boolean'} - - def int_schema(self, schema: core_schema.IntSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches an int value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema: dict[str, Any] = {'type': 'integer'} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.numeric) - json_schema = {k: v for k, v in json_schema.items() if v not in {math.inf, -math.inf}} - return json_schema - - def float_schema(self, schema: core_schema.FloatSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a float value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema: dict[str, Any] = {'type': 'number'} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.numeric) - json_schema = {k: v for k, v in json_schema.items() if v not in {math.inf, -math.inf}} - return json_schema - - def decimal_schema(self, schema: core_schema.DecimalSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a decimal value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema = self.str_schema(core_schema.str_schema()) - if self.mode == 'validation': - multiple_of = schema.get('multiple_of') - le = schema.get('le') - ge = schema.get('ge') - lt = schema.get('lt') - gt = schema.get('gt') - json_schema = { - 'anyOf': [ - self.float_schema( - core_schema.float_schema( - allow_inf_nan=schema.get('allow_inf_nan'), - multiple_of=None if multiple_of is None else float(multiple_of), - le=None if le is None else float(le), - ge=None if ge is None else float(ge), - lt=None if lt is None else float(lt), - gt=None if gt is None else float(gt), - ) - ), - json_schema, - ], - } - return json_schema - - def str_schema(self, schema: core_schema.StringSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a string value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema = {'type': 'string'} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.string) - return json_schema - - def bytes_schema(self, schema: core_schema.BytesSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a bytes value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema = {'type': 'string', 'format': 'base64url' if self._config.ser_json_bytes == 'base64' else 'binary'} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.bytes) - return json_schema - - def date_schema(self, schema: core_schema.DateSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a date value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema = {'type': 'string', 'format': 'date'} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.date) - return json_schema - - def time_schema(self, schema: core_schema.TimeSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a time value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return {'type': 'string', 'format': 'time'} - - def datetime_schema(self, schema: core_schema.DatetimeSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a datetime value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return {'type': 'string', 'format': 'date-time'} - - def timedelta_schema(self, schema: core_schema.TimedeltaSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a timedelta value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - if self._config.ser_json_timedelta == 'float': - return {'type': 'number'} - return {'type': 'string', 'format': 'duration'} - - def literal_schema(self, schema: core_schema.LiteralSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a literal value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - expected = [v.value if isinstance(v, Enum) else v for v in schema['expected']] - # jsonify the expected values - expected = [to_jsonable_python(v) for v in expected] - - if len(expected) == 1: - return {'const': expected[0]} - - types = {type(e) for e in expected} - if types == {str}: - return {'enum': expected, 'type': 'string'} - elif types == {int}: - return {'enum': expected, 'type': 'integer'} - elif types == {float}: - return {'enum': expected, 'type': 'number'} - elif types == {bool}: - return {'enum': expected, 'type': 'boolean'} - elif types == {list}: - return {'enum': expected, 'type': 'array'} - # there is not None case because if it's mixed it hits the final `else` - # if it's a single Literal[None] then it becomes a `const` schema above - else: - return {'enum': expected} - - def is_instance_schema(self, schema: core_schema.IsInstanceSchema) -> JsonSchemaValue: - """Handles JSON schema generation for a core schema that checks if a value is an instance of a class. - - Unless overridden in a subclass, this raises an error. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.handle_invalid_for_json_schema(schema, f'core_schema.IsInstanceSchema ({schema["cls"]})') - - def is_subclass_schema(self, schema: core_schema.IsSubclassSchema) -> JsonSchemaValue: - """Handles JSON schema generation for a core schema that checks if a value is a subclass of a class. - - For backwards compatibility with v1, this does not raise an error, but can be overridden to change this. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - # Note: This is for compatibility with V1; you can override if you want different behavior. - return {} - - def callable_schema(self, schema: core_schema.CallableSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a callable value. - - Unless overridden in a subclass, this raises an error. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.handle_invalid_for_json_schema(schema, 'core_schema.CallableSchema') - - def list_schema(self, schema: core_schema.ListSchema) -> JsonSchemaValue: - """Returns a schema that matches a list schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema']) - json_schema = {'type': 'array', 'items': items_schema} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) - return json_schema - - @deprecated('`tuple_positional_schema` is deprecated. Use `tuple_schema` instead.', category=None) - @final - def tuple_positional_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue: - """Replaced by `tuple_schema`.""" - warnings.warn( - '`tuple_positional_schema` is deprecated. Use `tuple_schema` instead.', - PydanticDeprecatedSince26, - stacklevel=2, - ) - return self.tuple_schema(schema) - - @deprecated('`tuple_variable_schema` is deprecated. Use `tuple_schema` instead.', category=None) - @final - def tuple_variable_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue: - """Replaced by `tuple_schema`.""" - warnings.warn( - '`tuple_variable_schema` is deprecated. Use `tuple_schema` instead.', - PydanticDeprecatedSince26, - stacklevel=2, - ) - return self.tuple_schema(schema) - - def tuple_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a tuple schema e.g. `Tuple[int, - str, bool]` or `Tuple[int, ...]`. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema: JsonSchemaValue = {'type': 'array'} - if 'variadic_item_index' in schema: - variadic_item_index = schema['variadic_item_index'] - if variadic_item_index > 0: - json_schema['minItems'] = variadic_item_index - json_schema['prefixItems'] = [ - self.generate_inner(item) for item in schema['items_schema'][:variadic_item_index] - ] - if variadic_item_index + 1 == len(schema['items_schema']): - # if the variadic item is the last item, then represent it faithfully - json_schema['items'] = self.generate_inner(schema['items_schema'][variadic_item_index]) - else: - # otherwise, 'items' represents the schema for the variadic - # item plus the suffix, so just allow anything for simplicity - # for now - json_schema['items'] = True - else: - prefixItems = [self.generate_inner(item) for item in schema['items_schema']] - if prefixItems: - json_schema['prefixItems'] = prefixItems - json_schema['minItems'] = len(prefixItems) - json_schema['maxItems'] = len(prefixItems) - self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) - return json_schema - - def set_schema(self, schema: core_schema.SetSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a set schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self._common_set_schema(schema) - - def frozenset_schema(self, schema: core_schema.FrozenSetSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a frozenset schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self._common_set_schema(schema) - - def _common_set_schema(self, schema: core_schema.SetSchema | core_schema.FrozenSetSchema) -> JsonSchemaValue: - items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema']) - json_schema = {'type': 'array', 'uniqueItems': True, 'items': items_schema} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) - return json_schema - - def generator_schema(self, schema: core_schema.GeneratorSchema) -> JsonSchemaValue: - """Returns a JSON schema that represents the provided GeneratorSchema. - - Args: - schema: The schema. - - Returns: - The generated JSON schema. - """ - items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema']) - json_schema = {'type': 'array', 'items': items_schema} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) - return json_schema - - def dict_schema(self, schema: core_schema.DictSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a dict schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema: JsonSchemaValue = {'type': 'object'} - - keys_schema = self.generate_inner(schema['keys_schema']).copy() if 'keys_schema' in schema else {} - keys_pattern = keys_schema.pop('pattern', None) - - values_schema = self.generate_inner(schema['values_schema']).copy() if 'values_schema' in schema else {} - values_schema.pop('title', None) # don't give a title to the additionalProperties - if values_schema or keys_pattern is not None: # don't add additionalProperties if it's empty - if keys_pattern is None: - json_schema['additionalProperties'] = values_schema - else: - json_schema['patternProperties'] = {keys_pattern: values_schema} - - self.update_with_validations(json_schema, schema, self.ValidationsMapping.object) - return json_schema - - def _function_schema( - self, - schema: _core_utils.AnyFunctionSchema, - ) -> JsonSchemaValue: - if _core_utils.is_function_with_inner_schema(schema): - # This could be wrong if the function's mode is 'before', but in practice will often be right, and when it - # isn't, I think it would be hard to automatically infer what the desired schema should be. - return self.generate_inner(schema['schema']) - - # function-plain - return self.handle_invalid_for_json_schema( - schema, f'core_schema.PlainValidatorFunctionSchema ({schema["function"]})' - ) - - def function_before_schema(self, schema: core_schema.BeforeValidatorFunctionSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a function-before schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self._function_schema(schema) - - def function_after_schema(self, schema: core_schema.AfterValidatorFunctionSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a function-after schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self._function_schema(schema) - - def function_plain_schema(self, schema: core_schema.PlainValidatorFunctionSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a function-plain schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self._function_schema(schema) - - def function_wrap_schema(self, schema: core_schema.WrapValidatorFunctionSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a function-wrap schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self._function_schema(schema) - - def default_schema(self, schema: core_schema.WithDefaultSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema with a default value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema = self.generate_inner(schema['schema']) - - if 'default' not in schema: - return json_schema - default = schema['default'] - # Note: if you want to include the value returned by the default_factory, - # override this method and replace the code above with: - # if 'default' in schema: - # default = schema['default'] - # elif 'default_factory' in schema: - # default = schema['default_factory']() - # else: - # return json_schema - - try: - encoded_default = self.encode_default(default) - except pydantic_core.PydanticSerializationError: - self.emit_warning( - 'non-serializable-default', - f'Default value {default} is not JSON serializable; excluding default from JSON schema', - ) - # Return the inner schema, as though there was no default - return json_schema - - if '$ref' in json_schema: - # Since reference schemas do not support child keys, we wrap the reference schema in a single-case allOf: - return {'allOf': [json_schema], 'default': encoded_default} - else: - json_schema['default'] = encoded_default - return json_schema - - def nullable_schema(self, schema: core_schema.NullableSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that allows null values. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - null_schema = {'type': 'null'} - inner_json_schema = self.generate_inner(schema['schema']) - - if inner_json_schema == null_schema: - return null_schema - else: - # Thanks to the equality check against `null_schema` above, I think 'oneOf' would also be valid here; - # I'll use 'anyOf' for now, but it could be changed it if it would work better with some external tooling - return self.get_flattened_anyof([inner_json_schema, null_schema]) - - def union_schema(self, schema: core_schema.UnionSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that allows values matching any of the given schemas. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - generated: list[JsonSchemaValue] = [] - - choices = schema['choices'] - for choice in choices: - # choice will be a tuple if an explicit label was provided - choice_schema = choice[0] if isinstance(choice, tuple) else choice - try: - generated.append(self.generate_inner(choice_schema)) - except PydanticOmit: - continue - except PydanticInvalidForJsonSchema as exc: - self.emit_warning('skipped-choice', exc.message) - if len(generated) == 1: - return generated[0] - return self.get_flattened_anyof(generated) - - def tagged_union_schema(self, schema: core_schema.TaggedUnionSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that allows values matching any of the given schemas, where - the schemas are tagged with a discriminator field that indicates which schema should be used to validate - the value. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - generated: dict[str, JsonSchemaValue] = {} - for k, v in schema['choices'].items(): - if isinstance(k, Enum): - k = k.value - try: - # Use str(k) since keys must be strings for json; while not technically correct, - # it's the closest that can be represented in valid JSON - generated[str(k)] = self.generate_inner(v).copy() - except PydanticOmit: - continue - except PydanticInvalidForJsonSchema as exc: - self.emit_warning('skipped-choice', exc.message) - - one_of_choices = _deduplicate_schemas(generated.values()) - json_schema: JsonSchemaValue = {'oneOf': one_of_choices} - - # This reflects the v1 behavior; TODO: we should make it possible to exclude OpenAPI stuff from the JSON schema - openapi_discriminator = self._extract_discriminator(schema, one_of_choices) - if openapi_discriminator is not None: - json_schema['discriminator'] = { - 'propertyName': openapi_discriminator, - 'mapping': {k: v.get('$ref', v) for k, v in generated.items()}, - } - - return json_schema - - def _extract_discriminator( - self, schema: core_schema.TaggedUnionSchema, one_of_choices: list[JsonDict] - ) -> str | None: - """Extract a compatible OpenAPI discriminator from the schema and one_of choices that end up in the final - schema.""" - openapi_discriminator: str | None = None - - if isinstance(schema['discriminator'], str): - return schema['discriminator'] - - if isinstance(schema['discriminator'], list): - # If the discriminator is a single item list containing a string, that is equivalent to the string case - if len(schema['discriminator']) == 1 and isinstance(schema['discriminator'][0], str): - return schema['discriminator'][0] - # When an alias is used that is different from the field name, the discriminator will be a list of single - # str lists, one for the attribute and one for the actual alias. The logic here will work even if there is - # more than one possible attribute, and looks for whether a single alias choice is present as a documented - # property on all choices. If so, that property will be used as the OpenAPI discriminator. - for alias_path in schema['discriminator']: - if not isinstance(alias_path, list): - break # this means that the discriminator is not a list of alias paths - if len(alias_path) != 1: - continue # this means that the "alias" does not represent a single field - alias = alias_path[0] - if not isinstance(alias, str): - continue # this means that the "alias" does not represent a field - alias_is_present_on_all_choices = True - for choice in one_of_choices: - while '$ref' in choice: - assert isinstance(choice['$ref'], str) - choice = self.get_schema_from_definitions(JsonRef(choice['$ref'])) or {} - properties = choice.get('properties', {}) - if not isinstance(properties, dict) or alias not in properties: - alias_is_present_on_all_choices = False - break - if alias_is_present_on_all_choices: - openapi_discriminator = alias - break - return openapi_discriminator - - def chain_schema(self, schema: core_schema.ChainSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a core_schema.ChainSchema. - - When generating a schema for validation, we return the validation JSON schema for the first step in the chain. - For serialization, we return the serialization JSON schema for the last step in the chain. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - step_index = 0 if self.mode == 'validation' else -1 # use first step for validation, last for serialization - return self.generate_inner(schema['steps'][step_index]) - - def lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that allows values matching either the lax schema or the - strict schema. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - # TODO: Need to read the default value off of model config or whatever - use_strict = schema.get('strict', False) # TODO: replace this default False - # If your JSON schema fails to generate it is probably - # because one of the following two branches failed. - if use_strict: - return self.generate_inner(schema['strict_schema']) - else: - return self.generate_inner(schema['lax_schema']) - - def json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that allows values matching either the JSON schema or the - Python schema. - - The JSON schema is used instead of the Python schema. If you want to use the Python schema, you should override - this method. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.generate_inner(schema['json_schema']) - - def typed_dict_schema(self, schema: core_schema.TypedDictSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a typed dict. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - total = schema.get('total', True) - named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [ - (name, self.field_is_required(field, total), field) - for name, field in schema['fields'].items() - if self.field_is_present(field) - ] - if self.mode == 'serialization': - named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', []))) - - config = _get_typed_dict_config(schema) - with self._config_wrapper_stack.push(config): - json_schema = self._named_required_fields_schema(named_required_fields) - - extra = schema.get('extra_behavior') - if extra is None: - extra = config.get('extra', 'ignore') - if extra == 'forbid': - json_schema['additionalProperties'] = False - elif extra == 'allow': - json_schema['additionalProperties'] = True - - return json_schema - - @staticmethod - def _name_required_computed_fields( - computed_fields: list[ComputedField], - ) -> list[tuple[str, bool, core_schema.ComputedField]]: - return [(field['property_name'], True, field) for field in computed_fields] - - def _named_required_fields_schema( - self, named_required_fields: Sequence[tuple[str, bool, CoreSchemaField]] - ) -> JsonSchemaValue: - properties: dict[str, JsonSchemaValue] = {} - required_fields: list[str] = [] - for name, required, field in named_required_fields: - if self.by_alias: - name = self._get_alias_name(field, name) - try: - field_json_schema = self.generate_inner(field).copy() - except PydanticOmit: - continue - if 'title' not in field_json_schema and self.field_title_should_be_set(field): - title = self.get_title_from_name(name) - field_json_schema['title'] = title - field_json_schema = self.handle_ref_overrides(field_json_schema) - properties[name] = field_json_schema - if required: - required_fields.append(name) - - json_schema = {'type': 'object', 'properties': properties} - if required_fields: - json_schema['required'] = required_fields - return json_schema - - def _get_alias_name(self, field: CoreSchemaField, name: str) -> str: - if field['type'] == 'computed-field': - alias: Any = field.get('alias', name) - elif self.mode == 'validation': - alias = field.get('validation_alias', name) - else: - alias = field.get('serialization_alias', name) - if isinstance(alias, str): - name = alias - elif isinstance(alias, list): - alias = cast('list[str] | str', alias) - for path in alias: - if isinstance(path, list) and len(path) == 1 and isinstance(path[0], str): - # Use the first valid single-item string path; the code that constructs the alias array - # should ensure the first such item is what belongs in the JSON schema - name = path[0] - break - else: - assert_never(alias) - return name - - def typed_dict_field_schema(self, schema: core_schema.TypedDictField) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a typed dict field. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.generate_inner(schema['schema']) - - def dataclass_field_schema(self, schema: core_schema.DataclassField) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a dataclass field. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.generate_inner(schema['schema']) - - def model_field_schema(self, schema: core_schema.ModelField) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a model field. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.generate_inner(schema['schema']) - - def computed_field_schema(self, schema: core_schema.ComputedField) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a computed field. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.generate_inner(schema['return_schema']) - - def model_schema(self, schema: core_schema.ModelSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a model. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - # We do not use schema['model'].model_json_schema() here - # because it could lead to inconsistent refs handling, etc. - cls = cast('type[BaseModel]', schema['cls']) - config = cls.model_config - title = config.get('title') - - with self._config_wrapper_stack.push(config): - json_schema = self.generate_inner(schema['schema']) - - json_schema_extra = config.get('json_schema_extra') - if cls.__pydantic_root_model__: - root_json_schema_extra = cls.model_fields['root'].json_schema_extra - if json_schema_extra and root_json_schema_extra: - raise ValueError( - '"model_config[\'json_schema_extra\']" and "Field.json_schema_extra" on "RootModel.root"' - ' field must not be set simultaneously' - ) - if root_json_schema_extra: - json_schema_extra = root_json_schema_extra - - json_schema = self._update_class_schema(json_schema, title, config.get('extra', None), cls, json_schema_extra) - - return json_schema - - def _update_class_schema( - self, - json_schema: JsonSchemaValue, - title: str | None, - extra: Literal['allow', 'ignore', 'forbid'] | None, - cls: type[Any], - json_schema_extra: JsonDict | JsonSchemaExtraCallable | None, - ) -> JsonSchemaValue: - if '$ref' in json_schema: - schema_to_update = self.get_schema_from_definitions(JsonRef(json_schema['$ref'])) or json_schema - else: - schema_to_update = json_schema - - if title is not None: - # referenced_schema['title'] = title - schema_to_update.setdefault('title', title) - - if 'additionalProperties' not in schema_to_update: - if extra == 'allow': - schema_to_update['additionalProperties'] = True - elif extra == 'forbid': - schema_to_update['additionalProperties'] = False - - if isinstance(json_schema_extra, (staticmethod, classmethod)): - # In older versions of python, this is necessary to ensure staticmethod/classmethods are callable - json_schema_extra = json_schema_extra.__get__(cls) - - if isinstance(json_schema_extra, dict): - schema_to_update.update(json_schema_extra) - elif callable(json_schema_extra): - if len(inspect.signature(json_schema_extra).parameters) > 1: - json_schema_extra(schema_to_update, cls) # type: ignore - else: - json_schema_extra(schema_to_update) # type: ignore - elif json_schema_extra is not None: - raise ValueError( - f"model_config['json_schema_extra']={json_schema_extra} should be a dict, callable, or None" - ) - - return json_schema - - def resolve_schema_to_update(self, json_schema: JsonSchemaValue) -> JsonSchemaValue: - """Resolve a JsonSchemaValue to the non-ref schema if it is a $ref schema. - - Args: - json_schema: The schema to resolve. - - Returns: - The resolved schema. - """ - if '$ref' in json_schema: - schema_to_update = self.get_schema_from_definitions(JsonRef(json_schema['$ref'])) - if schema_to_update is None: - raise RuntimeError(f'Cannot update undefined schema for $ref={json_schema["$ref"]}') - return self.resolve_schema_to_update(schema_to_update) - else: - schema_to_update = json_schema - return schema_to_update - - def model_fields_schema(self, schema: core_schema.ModelFieldsSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a model's fields. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [ - (name, self.field_is_required(field, total=True), field) - for name, field in schema['fields'].items() - if self.field_is_present(field) - ] - if self.mode == 'serialization': - named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', []))) - json_schema = self._named_required_fields_schema(named_required_fields) - extras_schema = schema.get('extras_schema', None) - if extras_schema is not None: - schema_to_update = self.resolve_schema_to_update(json_schema) - schema_to_update['additionalProperties'] = self.generate_inner(extras_schema) - return json_schema - - def field_is_present(self, field: CoreSchemaField) -> bool: - """Whether the field should be included in the generated JSON schema. - - Args: - field: The schema for the field itself. - - Returns: - `True` if the field should be included in the generated JSON schema, `False` otherwise. - """ - if self.mode == 'serialization': - # If you still want to include the field in the generated JSON schema, - # override this method and return True - return not field.get('serialization_exclude') - elif self.mode == 'validation': - return True - else: - assert_never(self.mode) - - def field_is_required( - self, - field: core_schema.ModelField | core_schema.DataclassField | core_schema.TypedDictField, - total: bool, - ) -> bool: - """Whether the field should be marked as required in the generated JSON schema. - (Note that this is irrelevant if the field is not present in the JSON schema.). - - Args: - field: The schema for the field itself. - total: Only applies to `TypedDictField`s. - Indicates if the `TypedDict` this field belongs to is total, in which case any fields that don't - explicitly specify `required=False` are required. - - Returns: - `True` if the field should be marked as required in the generated JSON schema, `False` otherwise. - """ - if self.mode == 'serialization' and self._config.json_schema_serialization_defaults_required: - return not field.get('serialization_exclude') - else: - if field['type'] == 'typed-dict-field': - return field.get('required', total) - else: - return field['schema']['type'] != 'default' - - def dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a dataclass's constructor arguments. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [ - (field['name'], self.field_is_required(field, total=True), field) - for field in schema['fields'] - if self.field_is_present(field) - ] - if self.mode == 'serialization': - named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', []))) - return self._named_required_fields_schema(named_required_fields) - - def dataclass_schema(self, schema: core_schema.DataclassSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a dataclass. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - cls = schema['cls'] - config: ConfigDict = getattr(cls, '__pydantic_config__', cast('ConfigDict', {})) - title = config.get('title') or cls.__name__ - - with self._config_wrapper_stack.push(config): - json_schema = self.generate_inner(schema['schema']).copy() - - json_schema_extra = config.get('json_schema_extra') - json_schema = self._update_class_schema(json_schema, title, config.get('extra', None), cls, json_schema_extra) - - # Dataclass-specific handling of description - if is_dataclass(cls) and not hasattr(cls, '__pydantic_validator__'): - # vanilla dataclass; don't use cls.__doc__ as it will contain the class signature by default - description = None - else: - description = None if cls.__doc__ is None else inspect.cleandoc(cls.__doc__) - if description: - json_schema['description'] = description - - return json_schema - - def arguments_schema(self, schema: core_schema.ArgumentsSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a function's arguments. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - metadata = _core_metadata.CoreMetadataHandler(schema).metadata - prefer_positional = metadata.get('pydantic_js_prefer_positional_arguments') - - arguments = schema['arguments_schema'] - kw_only_arguments = [a for a in arguments if a.get('mode') == 'keyword_only'] - kw_or_p_arguments = [a for a in arguments if a.get('mode') in {'positional_or_keyword', None}] - p_only_arguments = [a for a in arguments if a.get('mode') == 'positional_only'] - var_args_schema = schema.get('var_args_schema') - var_kwargs_schema = schema.get('var_kwargs_schema') - - if prefer_positional: - positional_possible = not kw_only_arguments and not var_kwargs_schema - if positional_possible: - return self.p_arguments_schema(p_only_arguments + kw_or_p_arguments, var_args_schema) - - keyword_possible = not p_only_arguments and not var_args_schema - if keyword_possible: - return self.kw_arguments_schema(kw_or_p_arguments + kw_only_arguments, var_kwargs_schema) - - if not prefer_positional: - positional_possible = not kw_only_arguments and not var_kwargs_schema - if positional_possible: - return self.p_arguments_schema(p_only_arguments + kw_or_p_arguments, var_args_schema) - - raise PydanticInvalidForJsonSchema( - 'Unable to generate JSON schema for arguments validator with positional-only and keyword-only arguments' - ) - - def kw_arguments_schema( - self, arguments: list[core_schema.ArgumentsParameter], var_kwargs_schema: CoreSchema | None - ) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a function's keyword arguments. - - Args: - arguments: The core schema. - - Returns: - The generated JSON schema. - """ - properties: dict[str, JsonSchemaValue] = {} - required: list[str] = [] - for argument in arguments: - name = self.get_argument_name(argument) - argument_schema = self.generate_inner(argument['schema']).copy() - argument_schema['title'] = self.get_title_from_name(name) - properties[name] = argument_schema - - if argument['schema']['type'] != 'default': - # This assumes that if the argument has a default value, - # the inner schema must be of type WithDefaultSchema. - # I believe this is true, but I am not 100% sure - required.append(name) - - json_schema: JsonSchemaValue = {'type': 'object', 'properties': properties} - if required: - json_schema['required'] = required - - if var_kwargs_schema: - additional_properties_schema = self.generate_inner(var_kwargs_schema) - if additional_properties_schema: - json_schema['additionalProperties'] = additional_properties_schema - else: - json_schema['additionalProperties'] = False - return json_schema - - def p_arguments_schema( - self, arguments: list[core_schema.ArgumentsParameter], var_args_schema: CoreSchema | None - ) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a function's positional arguments. - - Args: - arguments: The core schema. - - Returns: - The generated JSON schema. - """ - prefix_items: list[JsonSchemaValue] = [] - min_items = 0 - - for argument in arguments: - name = self.get_argument_name(argument) - - argument_schema = self.generate_inner(argument['schema']).copy() - argument_schema['title'] = self.get_title_from_name(name) - prefix_items.append(argument_schema) - - if argument['schema']['type'] != 'default': - # This assumes that if the argument has a default value, - # the inner schema must be of type WithDefaultSchema. - # I believe this is true, but I am not 100% sure - min_items += 1 - - json_schema: JsonSchemaValue = {'type': 'array', 'prefixItems': prefix_items} - if min_items: - json_schema['minItems'] = min_items - - if var_args_schema: - items_schema = self.generate_inner(var_args_schema) - if items_schema: - json_schema['items'] = items_schema - else: - json_schema['maxItems'] = len(prefix_items) - - return json_schema - - def get_argument_name(self, argument: core_schema.ArgumentsParameter) -> str: - """Retrieves the name of an argument. - - Args: - argument: The core schema. - - Returns: - The name of the argument. - """ - name = argument['name'] - if self.by_alias: - alias = argument.get('alias') - if isinstance(alias, str): - name = alias - else: - pass # might want to do something else? - return name - - def call_schema(self, schema: core_schema.CallSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a function call. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.generate_inner(schema['arguments_schema']) - - def custom_error_schema(self, schema: core_schema.CustomErrorSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a custom error. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return self.generate_inner(schema['schema']) - - def json_schema(self, schema: core_schema.JsonSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a JSON object. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - content_core_schema = schema.get('schema') or core_schema.any_schema() - content_json_schema = self.generate_inner(content_core_schema) - if self.mode == 'validation': - return {'type': 'string', 'contentMediaType': 'application/json', 'contentSchema': content_json_schema} - else: - # self.mode == 'serialization' - return content_json_schema - - def url_schema(self, schema: core_schema.UrlSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a URL. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - json_schema = {'type': 'string', 'format': 'uri', 'minLength': 1} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.string) - return json_schema - - def multi_host_url_schema(self, schema: core_schema.MultiHostUrlSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a URL that can be used with multiple hosts. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - # Note: 'multi-host-uri' is a custom/pydantic-specific format, not part of the JSON Schema spec - json_schema = {'type': 'string', 'format': 'multi-host-uri', 'minLength': 1} - self.update_with_validations(json_schema, schema, self.ValidationsMapping.string) - return json_schema - - def uuid_schema(self, schema: core_schema.UuidSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a UUID. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - return {'type': 'string', 'format': 'uuid'} - - def definitions_schema(self, schema: core_schema.DefinitionsSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that defines a JSON object with definitions. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - for definition in schema['definitions']: - try: - self.generate_inner(definition) - except PydanticInvalidForJsonSchema as e: - core_ref: CoreRef = CoreRef(definition['ref']) # type: ignore - self._core_defs_invalid_for_json_schema[self.get_defs_ref((core_ref, self.mode))] = e - continue - return self.generate_inner(schema['schema']) - - def definition_ref_schema(self, schema: core_schema.DefinitionReferenceSchema) -> JsonSchemaValue: - """Generates a JSON schema that matches a schema that references a definition. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - core_ref = CoreRef(schema['schema_ref']) - _, ref_json_schema = self.get_cache_defs_ref_schema(core_ref) - return ref_json_schema - - def ser_schema( - self, schema: core_schema.SerSchema | core_schema.IncExSeqSerSchema | core_schema.IncExDictSerSchema - ) -> JsonSchemaValue | None: - """Generates a JSON schema that matches a schema that defines a serialized object. - - Args: - schema: The core schema. - - Returns: - The generated JSON schema. - """ - schema_type = schema['type'] - if schema_type == 'function-plain' or schema_type == 'function-wrap': - # PlainSerializerFunctionSerSchema or WrapSerializerFunctionSerSchema - return_schema = schema.get('return_schema') - if return_schema is not None: - return self.generate_inner(return_schema) - elif schema_type == 'format' or schema_type == 'to-string': - # FormatSerSchema or ToStringSerSchema - return self.str_schema(core_schema.str_schema()) - elif schema['type'] == 'model': - # ModelSerSchema - return self.generate_inner(schema['schema']) - return None - - # ### Utility methods - - def get_title_from_name(self, name: str) -> str: - """Retrieves a title from a name. - - Args: - name: The name to retrieve a title from. - - Returns: - The title. - """ - return name.title().replace('_', ' ') - - def field_title_should_be_set(self, schema: CoreSchemaOrField) -> bool: - """Returns true if a field with the given schema should have a title set based on the field name. - - Intuitively, we want this to return true for schemas that wouldn't otherwise provide their own title - (e.g., int, float, str), and false for those that would (e.g., BaseModel subclasses). - - Args: - schema: The schema to check. - - Returns: - `True` if the field should have a title set, `False` otherwise. - """ - if _core_utils.is_core_schema_field(schema): - if schema['type'] == 'computed-field': - field_schema = schema['return_schema'] - else: - field_schema = schema['schema'] - return self.field_title_should_be_set(field_schema) - - elif _core_utils.is_core_schema(schema): - if schema.get('ref'): # things with refs, such as models and enums, should not have titles set - return False - if schema['type'] in {'default', 'nullable', 'definitions'}: - return self.field_title_should_be_set(schema['schema']) # type: ignore[typeddict-item] - if _core_utils.is_function_with_inner_schema(schema): - return self.field_title_should_be_set(schema['schema']) - if schema['type'] == 'definition-ref': - # Referenced schemas should not have titles set for the same reason - # schemas with refs should not - return False - return True # anything else should have title set - - else: - raise PydanticInvalidForJsonSchema(f'Unexpected schema type: schema={schema}') # pragma: no cover - - def normalize_name(self, name: str) -> str: - """Normalizes a name to be used as a key in a dictionary. - - Args: - name: The name to normalize. - - Returns: - The normalized name. - """ - return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name).replace('.', '__') - - def get_defs_ref(self, core_mode_ref: CoreModeRef) -> DefsRef: - """Override this method to change the way that definitions keys are generated from a core reference. - - Args: - core_mode_ref: The core reference. - - Returns: - The definitions key. - """ - # Split the core ref into "components"; generic origins and arguments are each separate components - core_ref, mode = core_mode_ref - components = re.split(r'([\][,])', core_ref) - # Remove IDs from each component - components = [x.rsplit(':', 1)[0] for x in components] - core_ref_no_id = ''.join(components) - # Remove everything before the last period from each "component" - components = [re.sub(r'(?:[^.[\]]+\.)+((?:[^.[\]]+))', r'\1', x) for x in components] - short_ref = ''.join(components) - - mode_title = _MODE_TITLE_MAPPING[mode] - - # It is important that the generated defs_ref values be such that at least one choice will not - # be generated for any other core_ref. Currently, this should be the case because we include - # the id of the source type in the core_ref - name = DefsRef(self.normalize_name(short_ref)) - name_mode = DefsRef(self.normalize_name(short_ref) + f'-{mode_title}') - module_qualname = DefsRef(self.normalize_name(core_ref_no_id)) - module_qualname_mode = DefsRef(f'{module_qualname}-{mode_title}') - module_qualname_id = DefsRef(self.normalize_name(core_ref)) - occurrence_index = self._collision_index.get(module_qualname_id) - if occurrence_index is None: - self._collision_counter[module_qualname] += 1 - occurrence_index = self._collision_index[module_qualname_id] = self._collision_counter[module_qualname] - - module_qualname_occurrence = DefsRef(f'{module_qualname}__{occurrence_index}') - module_qualname_occurrence_mode = DefsRef(f'{module_qualname_mode}__{occurrence_index}') - - self._prioritized_defsref_choices[module_qualname_occurrence_mode] = [ - name, - name_mode, - module_qualname, - module_qualname_mode, - module_qualname_occurrence, - module_qualname_occurrence_mode, - ] - - return module_qualname_occurrence_mode - - def get_cache_defs_ref_schema(self, core_ref: CoreRef) -> tuple[DefsRef, JsonSchemaValue]: - """This method wraps the get_defs_ref method with some cache-lookup/population logic, - and returns both the produced defs_ref and the JSON schema that will refer to the right definition. - - Args: - core_ref: The core reference to get the definitions reference for. - - Returns: - A tuple of the definitions reference and the JSON schema that will refer to it. - """ - core_mode_ref = (core_ref, self.mode) - maybe_defs_ref = self.core_to_defs_refs.get(core_mode_ref) - if maybe_defs_ref is not None: - json_ref = self.core_to_json_refs[core_mode_ref] - return maybe_defs_ref, {'$ref': json_ref} - - defs_ref = self.get_defs_ref(core_mode_ref) - - # populate the ref translation mappings - self.core_to_defs_refs[core_mode_ref] = defs_ref - self.defs_to_core_refs[defs_ref] = core_mode_ref - - json_ref = JsonRef(self.ref_template.format(model=defs_ref)) - self.core_to_json_refs[core_mode_ref] = json_ref - self.json_to_defs_refs[json_ref] = defs_ref - ref_json_schema = {'$ref': json_ref} - return defs_ref, ref_json_schema - - def handle_ref_overrides(self, json_schema: JsonSchemaValue) -> JsonSchemaValue: - """It is not valid for a schema with a top-level $ref to have sibling keys. - - During our own schema generation, we treat sibling keys as overrides to the referenced schema, - but this is not how the official JSON schema spec works. - - Because of this, we first remove any sibling keys that are redundant with the referenced schema, then if - any remain, we transform the schema from a top-level '$ref' to use allOf to move the $ref out of the top level. - (See bottom of https://swagger.io/docs/specification/using-ref/ for a reference about this behavior) - """ - if '$ref' in json_schema: - # prevent modifications to the input; this copy may be safe to drop if there is significant overhead - json_schema = json_schema.copy() - - referenced_json_schema = self.get_schema_from_definitions(JsonRef(json_schema['$ref'])) - if referenced_json_schema is None: - # This can happen when building schemas for models with not-yet-defined references. - # It may be a good idea to do a recursive pass at the end of the generation to remove - # any redundant override keys. - if len(json_schema) > 1: - # Make it an allOf to at least resolve the sibling keys issue - json_schema = json_schema.copy() - json_schema.setdefault('allOf', []) - json_schema['allOf'].append({'$ref': json_schema['$ref']}) - del json_schema['$ref'] - - return json_schema - for k, v in list(json_schema.items()): - if k == '$ref': - continue - if k in referenced_json_schema and referenced_json_schema[k] == v: - del json_schema[k] # redundant key - if len(json_schema) > 1: - # There is a remaining "override" key, so we need to move $ref out of the top level - json_ref = JsonRef(json_schema['$ref']) - del json_schema['$ref'] - assert 'allOf' not in json_schema # this should never happen, but just in case - json_schema['allOf'] = [{'$ref': json_ref}] - - return json_schema - - def get_schema_from_definitions(self, json_ref: JsonRef) -> JsonSchemaValue | None: - def_ref = self.json_to_defs_refs[json_ref] - if def_ref in self._core_defs_invalid_for_json_schema: - raise self._core_defs_invalid_for_json_schema[def_ref] - return self.definitions.get(def_ref, None) - - def encode_default(self, dft: Any) -> Any: - """Encode a default value to a JSON-serializable value. - - This is used to encode default values for fields in the generated JSON schema. - - Args: - dft: The default value to encode. - - Returns: - The encoded default value. - """ - config = self._config - return pydantic_core.to_jsonable_python( - dft, - timedelta_mode=config.ser_json_timedelta, - bytes_mode=config.ser_json_bytes, - ) - - def update_with_validations( - self, json_schema: JsonSchemaValue, core_schema: CoreSchema, mapping: dict[str, str] - ) -> None: - """Update the json_schema with the corresponding validations specified in the core_schema, - using the provided mapping to translate keys in core_schema to the appropriate keys for a JSON schema. - - Args: - json_schema: The JSON schema to update. - core_schema: The core schema to get the validations from. - mapping: A mapping from core_schema attribute names to the corresponding JSON schema attribute names. - """ - for core_key, json_schema_key in mapping.items(): - if core_key in core_schema: - json_schema[json_schema_key] = core_schema[core_key] - - class ValidationsMapping: - """This class just contains mappings from core_schema attribute names to the corresponding - JSON schema attribute names. While I suspect it is unlikely to be necessary, you can in - principle override this class in a subclass of GenerateJsonSchema (by inheriting from - GenerateJsonSchema.ValidationsMapping) to change these mappings. - """ - - numeric = { - 'multiple_of': 'multipleOf', - 'le': 'maximum', - 'ge': 'minimum', - 'lt': 'exclusiveMaximum', - 'gt': 'exclusiveMinimum', - } - bytes = { - 'min_length': 'minLength', - 'max_length': 'maxLength', - } - string = { - 'min_length': 'minLength', - 'max_length': 'maxLength', - 'pattern': 'pattern', - } - array = { - 'min_length': 'minItems', - 'max_length': 'maxItems', - } - object = { - 'min_length': 'minProperties', - 'max_length': 'maxProperties', - } - date = { - 'le': 'maximum', - 'ge': 'minimum', - 'lt': 'exclusiveMaximum', - 'gt': 'exclusiveMinimum', - } - - def get_flattened_anyof(self, schemas: list[JsonSchemaValue]) -> JsonSchemaValue: - members = [] - for schema in schemas: - if len(schema) == 1 and 'anyOf' in schema: - members.extend(schema['anyOf']) - else: - members.append(schema) - members = _deduplicate_schemas(members) - if len(members) == 1: - return members[0] - return {'anyOf': members} - - def get_json_ref_counts(self, json_schema: JsonSchemaValue) -> dict[JsonRef, int]: - """Get all values corresponding to the key '$ref' anywhere in the json_schema.""" - json_refs: dict[JsonRef, int] = Counter() - - def _add_json_refs(schema: Any) -> None: - if isinstance(schema, dict): - if '$ref' in schema: - json_ref = JsonRef(schema['$ref']) - if not isinstance(json_ref, str): - return # in this case, '$ref' might have been the name of a property - already_visited = json_ref in json_refs - json_refs[json_ref] += 1 - if already_visited: - return # prevent recursion on a definition that was already visited - defs_ref = self.json_to_defs_refs[json_ref] - if defs_ref in self._core_defs_invalid_for_json_schema: - raise self._core_defs_invalid_for_json_schema[defs_ref] - _add_json_refs(self.definitions[defs_ref]) - - for v in schema.values(): - _add_json_refs(v) - elif isinstance(schema, list): - for v in schema: - _add_json_refs(v) - - _add_json_refs(json_schema) - return json_refs - - def handle_invalid_for_json_schema(self, schema: CoreSchemaOrField, error_info: str) -> JsonSchemaValue: - raise PydanticInvalidForJsonSchema(f'Cannot generate a JsonSchema for {error_info}') - - def emit_warning(self, kind: JsonSchemaWarningKind, detail: str) -> None: - """This method simply emits PydanticJsonSchemaWarnings based on handling in the `warning_message` method.""" - message = self.render_warning_message(kind, detail) - if message is not None: - warnings.warn(message, PydanticJsonSchemaWarning) - - def render_warning_message(self, kind: JsonSchemaWarningKind, detail: str) -> str | None: - """This method is responsible for ignoring warnings as desired, and for formatting the warning messages. - - You can override the value of `ignored_warning_kinds` in a subclass of GenerateJsonSchema - to modify what warnings are generated. If you want more control, you can override this method; - just return None in situations where you don't want warnings to be emitted. - - Args: - kind: The kind of warning to render. It can be one of the following: - - - 'skipped-choice': A choice field was skipped because it had no valid choices. - - 'non-serializable-default': A default value was skipped because it was not JSON-serializable. - detail: A string with additional details about the warning. - - Returns: - The formatted warning message, or `None` if no warning should be emitted. - """ - if kind in self.ignored_warning_kinds: - return None - return f'{detail} [{kind}]' - - def _build_definitions_remapping(self) -> _DefinitionsRemapping: - defs_to_json: dict[DefsRef, JsonRef] = {} - for defs_refs in self._prioritized_defsref_choices.values(): - for defs_ref in defs_refs: - json_ref = JsonRef(self.ref_template.format(model=defs_ref)) - defs_to_json[defs_ref] = json_ref - - return _DefinitionsRemapping.from_prioritized_choices( - self._prioritized_defsref_choices, defs_to_json, self.definitions - ) - - def _garbage_collect_definitions(self, schema: JsonSchemaValue) -> None: - visited_defs_refs: set[DefsRef] = set() - unvisited_json_refs = _get_all_json_refs(schema) - while unvisited_json_refs: - next_json_ref = unvisited_json_refs.pop() - next_defs_ref = self.json_to_defs_refs[next_json_ref] - if next_defs_ref in visited_defs_refs: - continue - visited_defs_refs.add(next_defs_ref) - unvisited_json_refs.update(_get_all_json_refs(self.definitions[next_defs_ref])) - - self.definitions = {k: v for k, v in self.definitions.items() if k in visited_defs_refs} - - -# ##### Start JSON Schema Generation Functions ##### - - -def model_json_schema( - cls: type[BaseModel] | type[PydanticDataclass], - by_alias: bool = True, - ref_template: str = DEFAULT_REF_TEMPLATE, - schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, - mode: JsonSchemaMode = 'validation', -) -> dict[str, Any]: - """Utility function to generate a JSON Schema for a model. - - Args: - cls: The model class to generate a JSON Schema for. - by_alias: If `True` (the default), fields will be serialized according to their alias. - If `False`, fields will be serialized according to their attribute name. - ref_template: The template to use for generating JSON Schema references. - schema_generator: The class to use for generating the JSON Schema. - mode: The mode to use for generating the JSON Schema. It can be one of the following: - - - 'validation': Generate a JSON Schema for validating data. - - 'serialization': Generate a JSON Schema for serializing data. - - Returns: - The generated JSON Schema. - """ - schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) - if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer): - cls.__pydantic_validator__.rebuild() - assert '__pydantic_core_schema__' in cls.__dict__, 'this is a bug! please report it' - return schema_generator_instance.generate(cls.__pydantic_core_schema__, mode=mode) - - -def models_json_schema( - models: Sequence[tuple[type[BaseModel] | type[PydanticDataclass], JsonSchemaMode]], - *, - by_alias: bool = True, - title: str | None = None, - description: str | None = None, - ref_template: str = DEFAULT_REF_TEMPLATE, - schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, -) -> tuple[dict[tuple[type[BaseModel] | type[PydanticDataclass], JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]: - """Utility function to generate a JSON Schema for multiple models. - - Args: - models: A sequence of tuples of the form (model, mode). - by_alias: Whether field aliases should be used as keys in the generated JSON Schema. - title: The title of the generated JSON Schema. - description: The description of the generated JSON Schema. - ref_template: The reference template to use for generating JSON Schema references. - schema_generator: The schema generator to use for generating the JSON Schema. - - Returns: - A tuple where: - - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and - whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have - JsonRef references to definitions that are defined in the second returned element.) - - The second element is a JSON schema containing all definitions referenced in the first returned - element, along with the optional title and description keys. - """ - for cls, _ in models: - if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer): - cls.__pydantic_validator__.rebuild() - - instance = schema_generator(by_alias=by_alias, ref_template=ref_template) - inputs = [(m, mode, m.__pydantic_core_schema__) for m, mode in models] - json_schemas_map, definitions = instance.generate_definitions(inputs) - - json_schema: dict[str, Any] = {} - if definitions: - json_schema['$defs'] = definitions - if title: - json_schema['title'] = title - if description: - json_schema['description'] = description - - return json_schemas_map, json_schema - - -# ##### End JSON Schema Generation Functions ##### - - -_HashableJsonValue: TypeAlias = Union[ - int, float, str, bool, None, Tuple['_HashableJsonValue', ...], Tuple[Tuple[str, '_HashableJsonValue'], ...] -] - - -def _deduplicate_schemas(schemas: Iterable[JsonDict]) -> list[JsonDict]: - return list({_make_json_hashable(schema): schema for schema in schemas}.values()) - - -def _make_json_hashable(value: JsonValue) -> _HashableJsonValue: - if isinstance(value, dict): - return tuple(sorted((k, _make_json_hashable(v)) for k, v in value.items())) - elif isinstance(value, list): - return tuple(_make_json_hashable(v) for v in value) - else: - return value - - -def _sort_json_schema(value: JsonSchemaValue, parent_key: str | None = None) -> JsonSchemaValue: - if isinstance(value, dict): - sorted_dict: dict[str, JsonSchemaValue] = {} - keys = value.keys() - if (parent_key != 'properties') and (parent_key != 'default'): - keys = sorted(keys) - for key in keys: - sorted_dict[key] = _sort_json_schema(value[key], parent_key=key) - return sorted_dict - elif isinstance(value, list): - sorted_list: list[JsonSchemaValue] = [] - for item in value: # type: ignore - sorted_list.append(_sort_json_schema(item, parent_key)) - return sorted_list # type: ignore - else: - return value - - -@dataclasses.dataclass(**_internal_dataclass.slots_true) -class WithJsonSchema: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/json_schema/#withjsonschema-annotation - - Add this as an annotation on a field to override the (base) JSON schema that would be generated for that field. - This provides a way to set a JSON schema for types that would otherwise raise errors when producing a JSON schema, - such as Callable, or types that have an is-instance core schema, without needing to go so far as creating a - custom subclass of pydantic.json_schema.GenerateJsonSchema. - Note that any _modifications_ to the schema that would normally be made (such as setting the title for model fields) - will still be performed. - - If `mode` is set this will only apply to that schema generation mode, allowing you - to set different json schemas for validation and serialization. - """ - - json_schema: JsonSchemaValue | None - mode: Literal['validation', 'serialization'] | None = None - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - mode = self.mode or handler.mode - if mode != handler.mode: - return handler(core_schema) - if self.json_schema is None: - # This exception is handled in pydantic.json_schema.GenerateJsonSchema._named_required_fields_schema - raise PydanticOmit - else: - return self.json_schema - - def __hash__(self) -> int: - return hash(type(self.mode)) - - -@dataclasses.dataclass(**_internal_dataclass.slots_true) -class Examples: - """Add examples to a JSON schema. - - Examples should be a map of example names (strings) - to example values (any valid JSON). - - If `mode` is set this will only apply to that schema generation mode, - allowing you to add different examples for validation and serialization. - """ - - examples: dict[str, Any] - mode: Literal['validation', 'serialization'] | None = None - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - mode = self.mode or handler.mode - json_schema = handler(core_schema) - if mode != handler.mode: - return json_schema - examples = json_schema.get('examples', {}) - examples.update(to_jsonable_python(self.examples)) - json_schema['examples'] = examples - return json_schema - - def __hash__(self) -> int: - return hash(type(self.mode)) - - -def _get_all_json_refs(item: Any) -> set[JsonRef]: - """Get all the definitions references from a JSON schema.""" - refs: set[JsonRef] = set() - if isinstance(item, dict): - for key, value in item.items(): - if key == '$ref' and isinstance(value, str): - # the isinstance check ensures that '$ref' isn't the name of a property, etc. - refs.add(JsonRef(value)) - elif isinstance(value, dict): - refs.update(_get_all_json_refs(value)) - elif isinstance(value, list): - for item in value: - refs.update(_get_all_json_refs(item)) - elif isinstance(item, list): - for item in item: - refs.update(_get_all_json_refs(item)) - return refs - - -AnyType = TypeVar('AnyType') - -if TYPE_CHECKING: - SkipJsonSchema = Annotated[AnyType, ...] -else: - - @dataclasses.dataclass(**_internal_dataclass.slots_true) - class SkipJsonSchema: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/json_schema/#skipjsonschema-annotation - - Add this as an annotation on a field to skip generating a JSON schema for that field. - - Example: - ```py - from typing import Union - - from pydantic import BaseModel - from pydantic.json_schema import SkipJsonSchema - - from pprint import pprint - - - class Model(BaseModel): - a: Union[int, None] = None # (1)! - b: Union[int, SkipJsonSchema[None]] = None # (2)! - c: SkipJsonSchema[Union[int, None]] = None # (3)! - - - pprint(Model.model_json_schema()) - ''' - { - 'properties': { - 'a': { - 'anyOf': [ - {'type': 'integer'}, - {'type': 'null'} - ], - 'default': None, - 'title': 'A' - }, - 'b': { - 'default': None, - 'title': 'B', - 'type': 'integer' - } - }, - 'title': 'Model', - 'type': 'object' - } - ''' - ``` - - 1. The integer and null types are both included in the schema for `a`. - 2. The integer type is the only type included in the schema for `b`. - 3. The entirety of the `c` field is omitted from the schema. - """ - - def __class_getitem__(cls, item: AnyType) -> AnyType: - return Annotated[item, cls()] - - def __get_pydantic_json_schema__( - self, core_schema: CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - raise PydanticOmit - - def __hash__(self) -> int: - return hash(type(self)) - - -def _get_typed_dict_config(schema: core_schema.TypedDictSchema) -> ConfigDict: - metadata = _core_metadata.CoreMetadataHandler(schema).metadata - cls = metadata.get('pydantic_typed_dict_cls') - if cls is not None: - try: - return _decorators.get_attribute_from_bases(cls, '__pydantic_config__') - except AttributeError: - pass - return {} diff --git a/lib/pydantic/main.py b/lib/pydantic/main.py index 8c7ebbbf..69f3b751 100644 --- a/lib/pydantic/main.py +++ b/lib/pydantic/main.py @@ -1,1434 +1,980 @@ -"""Logic for creating models.""" -from __future__ import annotations as _annotations - -import operator -import sys -import types -import typing import warnings -from copy import copy, deepcopy -from typing import Any, ClassVar - -import pydantic_core -import typing_extensions -from pydantic_core import PydanticUndefined - -from ._internal import ( - _config, - _decorators, - _fields, - _forward_ref, - _generics, - _mock_val_ser, - _model_construction, - _repr, - _typing_extra, - _utils, +from abc import ABCMeta +from copy import deepcopy +from enum import Enum +from functools import partial +from pathlib import Path +from types import FunctionType, prepare_class, resolve_bases +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + ClassVar, + Dict, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, + no_type_check, + overload, ) -from ._migration import getattr_migration -from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler -from .config import ConfigDict -from .errors import PydanticUndefinedAnnotation, PydanticUserError -from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue, model_json_schema -from .warnings import PydanticDeprecatedSince20 -if typing.TYPE_CHECKING: +from typing_extensions import dataclass_transform + +from .class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators +from .config import BaseConfig, Extra, inherit_config, prepare_config +from .error_wrappers import ErrorWrapper, ValidationError +from .errors import ConfigError, DictError, ExtraError, MissingError +from .fields import ( + MAPPING_LIKE_SHAPES, + Field, + FieldInfo, + ModelField, + ModelPrivateAttr, + PrivateAttr, + Undefined, + is_finalvar_with_default_val, +) +from .json import custom_pydantic_encoder, pydantic_encoder +from .parse import Protocol, load_file, load_str_bytes +from .schema import default_ref_template, model_schema +from .types import PyObject, StrBytes +from .typing import ( + AnyCallable, + get_args, + get_origin, + is_classvar, + is_namedtuple, + is_union, + resolve_annotations, + update_model_forward_refs, +) +from .utils import ( + DUNDER_ATTRIBUTES, + ROOT_KEY, + ClassAttribute, + GetterDict, + Representation, + ValueItems, + generate_model_signature, + is_valid_field, + is_valid_private_name, + lenient_issubclass, + sequence_like, + smart_deepcopy, + unique_list, + validate_field_name, +) + +if TYPE_CHECKING: from inspect import Signature - from pathlib import Path - from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator - from typing_extensions import Literal, Unpack - - from ._internal._utils import AbstractSetIntStr, MappingIntStrAny - from .deprecated.parse import Protocol as DeprecatedParseProtocol - from .fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr - from .fields import Field as _Field - - TupleGenerator = typing.Generator[typing.Tuple[str, Any], None, None] - Model = typing.TypeVar('Model', bound='BaseModel') - # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope - IncEx: typing_extensions.TypeAlias = 'set[int] | set[str] | dict[int, Any] | dict[str, Any] | None' -else: - # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 - # and https://youtrack.jetbrains.com/issue/PY-51428 - DeprecationWarning = PydanticDeprecatedSince20 - -__all__ = 'BaseModel', 'create_model' - -_object_setattr = _model_construction.object_setattr - - -class BaseModel(metaclass=_model_construction.ModelMetaclass): - """Usage docs: https://docs.pydantic.dev/2.6/concepts/models/ - - A base class for creating Pydantic models. - - Attributes: - __class_vars__: The names of classvars defined on the model. - __private_attributes__: Metadata about the private attributes of the model. - __signature__: The signature for instantiating the model. - - __pydantic_complete__: Whether model building is completed, or if there are still undefined fields. - __pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer. - __pydantic_custom_init__: Whether the model has a custom `__init__` function. - __pydantic_decorators__: Metadata containing the decorators defined on the model. - This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1. - __pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to - __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these. - __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models. - __pydantic_post_init__: The name of the post-init method for the model, if defined. - __pydantic_root_model__: Whether the model is a `RootModel`. - __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model. - __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model. - - __pydantic_extra__: An instance attribute with the values of extra fields from validation when - `model_config['extra'] == 'allow'`. - __pydantic_fields_set__: An instance attribute with the names of fields explicitly set. - __pydantic_private__: Instance attribute with the values of private attributes set on the model instance. - """ - - if typing.TYPE_CHECKING: - # Here we provide annotations for the attributes of BaseModel. - # Many of these are populated by the metaclass, which is why this section is in a `TYPE_CHECKING` block. - # However, for the sake of easy review, we have included type annotations of all class and instance attributes - # of `BaseModel` here: - - # Class attributes - model_config: ClassVar[ConfigDict] - """ - Configuration for the model, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict]. - """ - - model_fields: ClassVar[dict[str, FieldInfo]] - """ - Metadata about the fields defined on the model, - mapping of field names to [`FieldInfo`][pydantic.fields.FieldInfo]. - - This replaces `Model.__fields__` from Pydantic V1. - """ - - model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]] - """A dictionary of computed field names and their corresponding `ComputedFieldInfo` objects.""" - - __class_vars__: ClassVar[set[str]] - __private_attributes__: ClassVar[dict[str, ModelPrivateAttr]] - __signature__: ClassVar[Signature] - - __pydantic_complete__: ClassVar[bool] - __pydantic_core_schema__: ClassVar[CoreSchema] - __pydantic_custom_init__: ClassVar[bool] - __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] - __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata] - __pydantic_parent_namespace__: ClassVar[dict[str, Any] | None] - __pydantic_post_init__: ClassVar[None | Literal['model_post_init']] - __pydantic_root_model__: ClassVar[bool] - __pydantic_serializer__: ClassVar[SchemaSerializer] - __pydantic_validator__: ClassVar[SchemaValidator] - - # Instance attributes - # Note: we use the non-existent kwarg `init=False` in pydantic.fields.Field below so that @dataclass_transform - # doesn't think these are valid as keyword arguments to the class initializer. - __pydantic_extra__: dict[str, Any] | None = _Field(init=False) # type: ignore - __pydantic_fields_set__: set[str] = _Field(init=False) # type: ignore - __pydantic_private__: dict[str, Any] | None = _Field(init=False) # type: ignore - - else: - # `model_fields` and `__pydantic_decorators__` must be set for - # pydantic._internal._generate_schema.GenerateSchema.model_schema to work for a plain BaseModel annotation - model_fields = {} - model_computed_fields = {} - - __pydantic_decorators__ = _decorators.DecoratorInfos() - __pydantic_parent_namespace__ = None - # Prevent `BaseModel` from being instantiated directly: - __pydantic_validator__ = _mock_val_ser.MockValSer( - 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', - val_or_ser='validator', - code='base-model-instantiated', - ) - __pydantic_serializer__ = _mock_val_ser.MockValSer( - 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', - val_or_ser='serializer', - code='base-model-instantiated', - ) - - __slots__ = '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__' - - model_config = ConfigDict() - __pydantic_complete__ = False - __pydantic_root_model__ = False - - def __init__(self, /, **data: Any) -> None: # type: ignore - """Create a new model by parsing and validating input data from keyword arguments. - - Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be - validated to form a valid model. - - `self` is explicitly positional-only to allow `self` as a field name. - """ - # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks - __tracebackhide__ = True - self.__pydantic_validator__.validate_python(data, self_instance=self) - - # The following line sets a flag that we use to determine when `__init__` gets overridden by the user - __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess] - - @property - def model_extra(self) -> dict[str, Any] | None: - """Get extra fields set during validation. - - Returns: - A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`. - """ - return self.__pydantic_extra__ - - @property - def model_fields_set(self) -> set[str]: - """Returns the set of fields that have been explicitly set on this model instance. - - Returns: - A set of strings representing the fields that have been set, - i.e. that were not filled from defaults. - """ - return self.__pydantic_fields_set__ - - @classmethod - def model_construct(cls: type[Model], _fields_set: set[str] | None = None, **values: Any) -> Model: - """Creates a new instance of the `Model` class with validated data. - - Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. - Default values are respected, but no other validation is performed. - Behaves as if `Config.extra = 'allow'` was set since it adds all passed values - - Args: - _fields_set: The set of field names accepted for the Model instance. - values: Trusted or pre-validated data dictionary. - - Returns: - A new instance of the `Model` class with validated data. - """ - m = cls.__new__(cls) - fields_values: dict[str, Any] = {} - fields_set = set() - - for name, field in cls.model_fields.items(): - if field.alias and field.alias in values: - fields_values[name] = values.pop(field.alias) - fields_set.add(name) - elif name in values: - fields_values[name] = values.pop(name) - fields_set.add(name) - elif not field.is_required(): - fields_values[name] = field.get_default(call_default_factory=True) - if _fields_set is None: - _fields_set = fields_set - - _extra: dict[str, Any] | None = None - if cls.model_config.get('extra') == 'allow': - _extra = {} - for k, v in values.items(): - _extra[k] = v - else: - fields_values.update(values) - _object_setattr(m, '__dict__', fields_values) - _object_setattr(m, '__pydantic_fields_set__', _fields_set) - if not cls.__pydantic_root_model__: - _object_setattr(m, '__pydantic_extra__', _extra) - - if cls.__pydantic_post_init__: - m.model_post_init(None) - # update private attributes with values set - if hasattr(m, '__pydantic_private__') and m.__pydantic_private__ is not None: - for k, v in values.items(): - if k in m.__private_attributes__: - m.__pydantic_private__[k] = v - - elif not cls.__pydantic_root_model__: - # Note: if there are any private attributes, cls.__pydantic_post_init__ would exist - # Since it doesn't, that means that `__pydantic_private__` should be set to None - _object_setattr(m, '__pydantic_private__', None) - - return m - - def model_copy(self: Model, *, update: dict[str, Any] | None = None, deep: bool = False) -> Model: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#model_copy - - Returns a copy of the model. - - Args: - update: Values to change/add in the new model. Note: the data is not validated - before creating the new model. You should trust this data. - deep: Set to `True` to make a deep copy of the model. - - Returns: - New model instance. - """ - copied = self.__deepcopy__() if deep else self.__copy__() - if update: - if self.model_config.get('extra') == 'allow': - for k, v in update.items(): - if k in self.model_fields: - copied.__dict__[k] = v - else: - if copied.__pydantic_extra__ is None: - copied.__pydantic_extra__ = {} - copied.__pydantic_extra__[k] = v - else: - copied.__dict__.update(update) - copied.__pydantic_fields_set__.update(update.keys()) - return copied - - def model_dump( - self, - *, - mode: Literal['json', 'python'] | str = 'python', - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool = True, - ) -> dict[str, Any]: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump - - Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - Args: - mode: The mode in which `to_python` should run. - If mode is 'json', the output will only contain JSON serializable types. - If mode is 'python', the output may contain non-JSON-serializable Python objects. - include: A list of fields to include in the output. - exclude: A list of fields to exclude from the output. - by_alias: Whether to use the field's alias in the dictionary key if defined. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that are set to their default value. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. - warnings: Whether to log warnings when invalid fields are encountered. - - Returns: - A dictionary representation of the model. - """ - return self.__pydantic_serializer__.to_python( - self, - mode=mode, - by_alias=by_alias, - include=include, - exclude=exclude, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - round_trip=round_trip, - warnings=warnings, - ) - - def model_dump_json( - self, - *, - indent: int | None = None, - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool = True, - ) -> str: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json - - Generates a JSON representation of the model using Pydantic's `to_json` method. - - Args: - indent: Indentation to use in the JSON output. If None is passed, the output will be compact. - include: Field(s) to include in the JSON output. - exclude: Field(s) to exclude from the JSON output. - by_alias: Whether to serialize using field aliases. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that are set to their default value. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. - warnings: Whether to log warnings when invalid fields are encountered. - - Returns: - A JSON string representation of the model. - """ - return self.__pydantic_serializer__.to_json( - self, - indent=indent, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - round_trip=round_trip, - warnings=warnings, - ).decode() - - @classmethod - def model_json_schema( - cls, - by_alias: bool = True, - ref_template: str = DEFAULT_REF_TEMPLATE, - schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, - mode: JsonSchemaMode = 'validation', - ) -> dict[str, Any]: - """Generates a JSON schema for a model class. - - Args: - by_alias: Whether to use attribute aliases or not. - ref_template: The reference template. - schema_generator: To override the logic used to generate the JSON schema, as a subclass of - `GenerateJsonSchema` with your desired modifications - mode: The mode in which to generate the schema. - - Returns: - The JSON schema for the given model class. - """ - return model_json_schema( - cls, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator, mode=mode - ) - - @classmethod - def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str: - """Compute the class name for parametrizations of generic classes. - - This method can be overridden to achieve a custom naming scheme for generic BaseModels. - - Args: - params: Tuple of types of the class. Given a generic class - `Model` with 2 type variables and a concrete model `Model[str, int]`, - the value `(str, int)` would be passed to `params`. - - Returns: - String representing the new class where `params` are passed to `cls` as type variables. - - Raises: - TypeError: Raised when trying to generate concrete names for non-generic models. - """ - if not issubclass(cls, typing.Generic): - raise TypeError('Concrete names should only be generated for generic models.') - - # Any strings received should represent forward references, so we handle them specially below. - # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future, - # we may be able to remove this special case. - param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params] - params_component = ', '.join(param_names) - return f'{cls.__name__}[{params_component}]' - - def model_post_init(self, __context: Any) -> None: - """Override this method to perform additional initialization after `__init__` and `model_construct`. - This is useful if you want to do some validation that requires the entire model to be initialized. - """ - pass - - @classmethod - def model_rebuild( - cls, - *, - force: bool = False, - raise_errors: bool = True, - _parent_namespace_depth: int = 2, - _types_namespace: dict[str, Any] | None = None, - ) -> bool | None: - """Try to rebuild the pydantic-core schema for the model. - - This may be necessary when one of the annotations is a ForwardRef which could not be resolved during - the initial attempt to build the schema, and automatic rebuilding fails. - - Args: - force: Whether to force the rebuilding of the model schema, defaults to `False`. - raise_errors: Whether to raise errors, defaults to `True`. - _parent_namespace_depth: The depth level of the parent namespace, defaults to 2. - _types_namespace: The types namespace, defaults to `None`. - - Returns: - Returns `None` if the schema is already "complete" and rebuilding was not required. - If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. - """ - if not force and cls.__pydantic_complete__: - return None - else: - if '__pydantic_core_schema__' in cls.__dict__: - delattr(cls, '__pydantic_core_schema__') # delete cached value to ensure full rebuild happens - if _types_namespace is not None: - types_namespace: dict[str, Any] | None = _types_namespace.copy() - else: - if _parent_namespace_depth > 0: - frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {} - cls_parent_ns = ( - _model_construction.unpack_lenient_weakvaluedict(cls.__pydantic_parent_namespace__) or {} + from .class_validators import ValidatorListDict + from .types import ModelOrDc + from .typing import ( + AbstractSetIntStr, + AnyClassMethod, + CallableGenerator, + DictAny, + DictStrAny, + MappingIntStrAny, + ReprArgs, + SetStr, + TupleGenerator, + ) + + Model = TypeVar('Model', bound='BaseModel') + +__all__ = 'BaseModel', 'create_model', 'validate_model' + +_T = TypeVar('_T') + + +def validate_custom_root_type(fields: Dict[str, ModelField]) -> None: + if len(fields) > 1: + raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields') + + +def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]: + def hash_function(self_: Any) -> int: + return hash(self_.__class__) + hash(tuple(self_.__dict__.values())) + + return hash_function if frozen else None + + +# If a field is of type `Callable`, its default value should be a function and cannot to ignored. +ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod) +# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model +UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES +# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra +# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's +# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for +# the `BaseModel` class, since that's defined immediately after the metaclass. +_is_base_model_class_defined = False + + +@dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) +class ModelMetaclass(ABCMeta): + @no_type_check # noqa C901 + def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 + fields: Dict[str, ModelField] = {} + config = BaseConfig + validators: 'ValidatorListDict' = {} + + pre_root_validators, post_root_validators = [], [] + private_attributes: Dict[str, ModelPrivateAttr] = {} + base_private_attributes: Dict[str, ModelPrivateAttr] = {} + slots: SetStr = namespace.get('__slots__', ()) + slots = {slots} if isinstance(slots, str) else set(slots) + class_vars: SetStr = set() + hash_func: Optional[Callable[[Any], int]] = None + + for base in reversed(bases): + if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel: + fields.update(smart_deepcopy(base.__fields__)) + config = inherit_config(base.__config__, config) + validators = inherit_validators(base.__validators__, validators) + pre_root_validators += base.__pre_root_validators__ + post_root_validators += base.__post_root_validators__ + base_private_attributes.update(base.__private_attributes__) + class_vars.update(base.__class_vars__) + hash_func = base.__hash__ + + resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True) + allowed_config_kwargs: SetStr = { + key + for key in dir(config) + if not (key.startswith('__') and key.endswith('__')) # skip dunder methods and attributes + } + config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs} + config_from_namespace = namespace.get('Config') + if config_kwargs and config_from_namespace: + raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs') + config = inherit_config(config_from_namespace, config, **config_kwargs) + + validators = inherit_validators(extract_validators(namespace), validators) + vg = ValidatorGroup(validators) + + for f in fields.values(): + f.set_config(config) + extra_validators = vg.get_validators(f.name) + if extra_validators: + f.class_validators.update(extra_validators) + # re-run prepare to add extra validators + f.populate_validators() + + prepare_config(config, name) + + untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES + + def is_untouched(v: Any) -> bool: + return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method' + + if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'): + annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None)) + # annotation only fields need to come first in fields + for ann_name, ann_type in annotations.items(): + if is_classvar(ann_type): + class_vars.add(ann_name) + elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)): + class_vars.add(ann_name) + elif is_valid_field(ann_name): + validate_field_name(bases, ann_name) + value = namespace.get(ann_name, Undefined) + allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,) + if ( + is_untouched(value) + and ann_type != PyObject + and not any( + lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types + ) + ): + continue + fields[ann_name] = ModelField.infer( + name=ann_name, + value=value, + annotation=ann_type, + class_validators=vg.get_validators(ann_name), + config=config, ) - types_namespace = {**cls_parent_ns, **frame_parent_ns} - cls.__pydantic_parent_namespace__ = _model_construction.build_lenient_weakvaluedict(types_namespace) - else: - types_namespace = _model_construction.unpack_lenient_weakvaluedict( - cls.__pydantic_parent_namespace__ + elif ann_name not in namespace and config.underscore_attrs_are_private: + private_attributes[ann_name] = PrivateAttr() + + untouched_types = UNTOUCHED_TYPES + config.keep_untouched + for var_name, value in namespace.items(): + can_be_changed = var_name not in class_vars and not is_untouched(value) + if isinstance(value, ModelPrivateAttr): + if not is_valid_private_name(var_name): + raise NameError( + f'Private attributes "{var_name}" must not be a valid field name; ' + f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"' + ) + private_attributes[var_name] = value + elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed: + private_attributes[var_name] = PrivateAttr(default=value) + elif is_valid_field(var_name) and var_name not in annotations and can_be_changed: + validate_field_name(bases, var_name) + inferred = ModelField.infer( + name=var_name, + value=value, + annotation=annotations.get(var_name, Undefined), + class_validators=vg.get_validators(var_name), + config=config, ) + if var_name in fields: + if lenient_issubclass(inferred.type_, fields[var_name].type_): + inferred.type_ = fields[var_name].type_ + else: + raise TypeError( + f'The type of {name}.{var_name} differs from the new default value; ' + f'if you wish to change the type of this field, please use a type annotation' + ) + fields[var_name] = inferred - types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace) - - # manually override defer_build so complete_model_class doesn't skip building the model again - config = {**cls.model_config, 'defer_build': False} - return _model_construction.complete_model_class( - cls, - cls.__name__, - _config.ConfigWrapper(config, check=False), - raise_errors=raise_errors, - types_namespace=types_namespace, - ) - - @classmethod - def model_validate( - cls: type[Model], - obj: Any, - *, - strict: bool | None = None, - from_attributes: bool | None = None, - context: dict[str, Any] | None = None, - ) -> Model: - """Validate a pydantic model instance. - - Args: - obj: The object to validate. - strict: Whether to enforce types strictly. - from_attributes: Whether to extract data from object attributes. - context: Additional context to pass to the validator. - - Raises: - ValidationError: If the object could not be validated. - - Returns: - The validated model instance. - """ - # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks - __tracebackhide__ = True - return cls.__pydantic_validator__.validate_python( - obj, strict=strict, from_attributes=from_attributes, context=context - ) - - @classmethod - def model_validate_json( - cls: type[Model], - json_data: str | bytes | bytearray, - *, - strict: bool | None = None, - context: dict[str, Any] | None = None, - ) -> Model: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/json/#json-parsing - - Validate the given JSON data against the Pydantic model. - - Args: - json_data: The JSON data to validate. - strict: Whether to enforce types strictly. - context: Extra variables to pass to the validator. - - Returns: - The validated Pydantic model. - - Raises: - ValueError: If `json_data` is not a JSON string. - """ - # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks - __tracebackhide__ = True - return cls.__pydantic_validator__.validate_json(json_data, strict=strict, context=context) - - @classmethod - def model_validate_strings( - cls: type[Model], - obj: Any, - *, - strict: bool | None = None, - context: dict[str, Any] | None = None, - ) -> Model: - """Validate the given object contains string data against the Pydantic model. - - Args: - obj: The object contains string data to validate. - strict: Whether to enforce types strictly. - context: Extra variables to pass to the validator. - - Returns: - The validated Pydantic model. - """ - # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks - __tracebackhide__ = True - return cls.__pydantic_validator__.validate_strings(obj, strict=strict, context=context) - - @classmethod - def __get_pydantic_core_schema__(cls, __source: type[BaseModel], __handler: GetCoreSchemaHandler) -> CoreSchema: - """Hook into generating the model's CoreSchema. - - Args: - __source: The class we are generating a schema for. - This will generally be the same as the `cls` argument if this is a classmethod. - __handler: Call into Pydantic's internal JSON schema generation. - A callable that calls into Pydantic's internal CoreSchema generation logic. - - Returns: - A `pydantic-core` `CoreSchema`. - """ - # Only use the cached value from this _exact_ class; we don't want one from a parent class - # This is why we check `cls.__dict__` and don't use `cls.__pydantic_core_schema__` or similar. - if '__pydantic_core_schema__' in cls.__dict__: - # Due to the way generic classes are built, it's possible that an invalid schema may be temporarily - # set on generic classes. I think we could resolve this to ensure that we get proper schema caching - # for generics, but for simplicity for now, we just always rebuild if the class has a generic origin. - if not cls.__pydantic_generic_metadata__['origin']: - return cls.__pydantic_core_schema__ - - return __handler(__source) - - @classmethod - def __get_pydantic_json_schema__( - cls, - __core_schema: CoreSchema, - __handler: GetJsonSchemaHandler, - ) -> JsonSchemaValue: - """Hook into generating the model's JSON schema. - - Args: - __core_schema: A `pydantic-core` CoreSchema. - You can ignore this argument and call the handler with a new CoreSchema, - wrap this CoreSchema (`{'type': 'nullable', 'schema': current_schema}`), - or just call the handler with the original schema. - __handler: Call into Pydantic's internal JSON schema generation. - This will raise a `pydantic.errors.PydanticInvalidForJsonSchema` if JSON schema - generation fails. - Since this gets called by `BaseModel.model_json_schema` you can override the - `schema_generator` argument to that function to change JSON schema generation globally - for a type. - - Returns: - A JSON schema, as a Python object. - """ - return __handler(__core_schema) - - @classmethod - def __pydantic_init_subclass__(cls, **kwargs: Any) -> None: - """This is intended to behave just like `__init_subclass__`, but is called by `ModelMetaclass` - only after the class is actually fully initialized. In particular, attributes like `model_fields` will - be present when this is called. - - This is necessary because `__init_subclass__` will always be called by `type.__new__`, - and it would require a prohibitively large refactor to the `ModelMetaclass` to ensure that - `type.__new__` was called in such a manner that the class would already be sufficiently initialized. - - This will receive the same `kwargs` that would be passed to the standard `__init_subclass__`, namely, - any kwargs passed to the class definition that aren't used internally by pydantic. - - Args: - **kwargs: Any keyword arguments passed to the class definition that aren't used internally - by pydantic. - """ - pass - - def __class_getitem__( - cls, typevar_values: type[Any] | tuple[type[Any], ...] - ) -> type[BaseModel] | _forward_ref.PydanticRecursiveRef: - cached = _generics.get_cached_generic_type_early(cls, typevar_values) - if cached is not None: - return cached - - if cls is BaseModel: - raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel') - if not hasattr(cls, '__parameters__'): - raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic') - if not cls.__pydantic_generic_metadata__['parameters'] and typing.Generic not in cls.__bases__: - raise TypeError(f'{cls} is not a generic class') - - if not isinstance(typevar_values, tuple): - typevar_values = (typevar_values,) - _generics.check_parameters_count(cls, typevar_values) - - # Build map from generic typevars to passed params - typevars_map: dict[_typing_extra.TypeVarType, type[Any]] = dict( - zip(cls.__pydantic_generic_metadata__['parameters'], typevar_values) - ) - - if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: - submodel = cls # if arguments are equal to parameters it's the same object - _generics.set_cached_generic_type(cls, typevar_values, submodel) + _custom_root_type = ROOT_KEY in fields + if _custom_root_type: + validate_custom_root_type(fields) + vg.check_for_unused() + if config.json_encoders: + json_encoder = partial(custom_pydantic_encoder, config.json_encoders) else: - parent_args = cls.__pydantic_generic_metadata__['args'] - if not parent_args: - args = typevar_values - else: - args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args) + json_encoder = pydantic_encoder + pre_rv_new, post_rv_new = extract_root_validators(namespace) - origin = cls.__pydantic_generic_metadata__['origin'] or cls - model_name = origin.model_parametrized_name(args) - params = tuple( - {param: None for param in _generics.iter_contained_typevars(typevars_map.values())} - ) # use dict as ordered set + if hash_func is None: + hash_func = generate_hash_function(config.frozen) - with _generics.generic_recursion_self_type(origin, args) as maybe_self_type: - if maybe_self_type is not None: - return maybe_self_type + exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'} + new_namespace = { + '__config__': config, + '__fields__': fields, + '__exclude_fields__': { + name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None + } + or None, + '__include_fields__': { + name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None + } + or None, + '__validators__': vg.validators, + '__pre_root_validators__': unique_list( + pre_root_validators + pre_rv_new, + name_factory=lambda v: v.__name__, + ), + '__post_root_validators__': unique_list( + post_root_validators + post_rv_new, + name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__, + ), + '__schema_cache__': {}, + '__json_encoder__': staticmethod(json_encoder), + '__custom_root_type__': _custom_root_type, + '__private_attributes__': {**base_private_attributes, **private_attributes}, + '__slots__': slots | private_attributes.keys(), + '__hash__': hash_func, + '__class_vars__': class_vars, + **{n: v for n, v in namespace.items() if n not in exclude_from_namespace}, + } - cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args) - if cached is not None: - return cached + cls = super().__new__(mcs, name, bases, new_namespace, **kwargs) + # set __signature__ attr only for model class, but not for its instances + cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config)) + if resolve_forward_refs: + cls.__try_update_forward_refs__() - # Attempt to rebuild the origin in case new types have been defined - try: - # depth 3 gets you above this __class_getitem__ call - origin.model_rebuild(_parent_namespace_depth=3) - except PydanticUndefinedAnnotation: - # It's okay if it fails, it just means there are still undefined types - # that could be evaluated later. - # TODO: Make sure validation fails if there are still undefined types, perhaps using MockValidator - pass + # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 + # for attributes not in `new_namespace` (e.g. private attributes) + for name, obj in namespace.items(): + if name not in new_namespace: + set_name = getattr(obj, '__set_name__', None) + if callable(set_name): + set_name(cls, name) - submodel = _generics.create_generic_submodel(model_name, origin, args, params) + return cls - # Update cache - _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args) + def __instancecheck__(self, instance: Any) -> bool: + """ + Avoid calling ABC _abc_subclasscheck unless we're pretty sure. - return submodel + See #3829 and python/cpython#92810 + """ + return hasattr(instance, '__fields__') and super().__instancecheck__(instance) - def __copy__(self: Model) -> Model: - """Returns a shallow copy of the model.""" - cls = type(self) - m = cls.__new__(cls) - _object_setattr(m, '__dict__', copy(self.__dict__)) - _object_setattr(m, '__pydantic_extra__', copy(self.__pydantic_extra__)) - _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) - if self.__pydantic_private__ is None: - _object_setattr(m, '__pydantic_private__', None) - else: - _object_setattr( - m, - '__pydantic_private__', - {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, - ) +object_setattr = object.__setattr__ - return m - def __deepcopy__(self: Model, memo: dict[int, Any] | None = None) -> Model: - """Returns a deep copy of the model.""" - cls = type(self) - m = cls.__new__(cls) - _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo)) - _object_setattr(m, '__pydantic_extra__', deepcopy(self.__pydantic_extra__, memo=memo)) - # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str], - # and attempting a deepcopy would be marginally slower. - _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) +class BaseModel(Representation, metaclass=ModelMetaclass): + if TYPE_CHECKING: + # populated by the metaclass, defined here to help IDEs only + __fields__: ClassVar[Dict[str, ModelField]] = {} + __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __validators__: ClassVar[Dict[str, AnyCallable]] = {} + __pre_root_validators__: ClassVar[List[AnyCallable]] + __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]] + __config__: ClassVar[Type[BaseConfig]] = BaseConfig + __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x + __schema_cache__: ClassVar['DictAny'] = {} + __custom_root_type__: ClassVar[bool] = False + __signature__: ClassVar['Signature'] + __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] + __class_vars__: ClassVar[SetStr] + __fields_set__: ClassVar[SetStr] = set() - if self.__pydantic_private__ is None: - _object_setattr(m, '__pydantic_private__', None) - else: - _object_setattr( - m, - '__pydantic_private__', - deepcopy({k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, memo=memo), - ) + Config = BaseConfig + __slots__ = ('__dict__', '__fields_set__') + __doc__ = '' # Null out the Representation docstring - return m + def __init__(__pydantic_self__, **data: Any) -> None: + """ + Create a new model by parsing and validating input data from keyword arguments. - if not typing.TYPE_CHECKING: - # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access + Raises ValidationError if the input data cannot be parsed to form a valid model. + """ + # Uses something other than `self` the first arg to allow "self" as a settable attribute + values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data) + if validation_error: + raise validation_error + try: + object_setattr(__pydantic_self__, '__dict__', values) + except TypeError as e: + raise TypeError( + 'Model values must be a dict; you may not have returned a dictionary from a root validator' + ) from e + object_setattr(__pydantic_self__, '__fields_set__', fields_set) + __pydantic_self__._init_private_attributes() - def __getattr__(self, item: str) -> Any: - private_attributes = object.__getattribute__(self, '__private_attributes__') - if item in private_attributes: - attribute = private_attributes[item] - if hasattr(attribute, '__get__'): - return attribute.__get__(self, type(self)) # type: ignore + @no_type_check + def __setattr__(self, name, value): # noqa: C901 (ignore complexity) + if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES: + return object_setattr(self, name, value) - try: - # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items - return self.__pydantic_private__[item] # type: ignore - except KeyError as exc: - raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc - else: - # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized. - # See `BaseModel.__repr_args__` for more details - try: - pydantic_extra = object.__getattribute__(self, '__pydantic_extra__') - except AttributeError: - pydantic_extra = None - - if pydantic_extra is not None: - try: - return pydantic_extra[item] - except KeyError as exc: - raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc - else: - if hasattr(self.__class__, item): - return super().__getattribute__(item) # Raises AttributeError if appropriate - else: - # this is the current error - raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') - - def __setattr__(self, name: str, value: Any) -> None: - if name in self.__class_vars__: - raise AttributeError( - f'{name!r} is a ClassVar of `{self.__class__.__name__}` and cannot be set on an instance. ' - f'If you want to set a value on the class, use `{self.__class__.__name__}.{name} = value`.' - ) - elif not _fields.is_valid_field_name(name): - if self.__pydantic_private__ is None or name not in self.__private_attributes__: - _object_setattr(self, name, value) - else: - attribute = self.__private_attributes__[name] - if hasattr(attribute, '__set__'): - attribute.__set__(self, value) # type: ignore - else: - self.__pydantic_private__[name] = value - return - - self._check_frozen(name, value) - - attr = getattr(self.__class__, name, None) - if isinstance(attr, property): - attr.__set__(self, value) - elif self.model_config.get('validate_assignment', None): - self.__pydantic_validator__.validate_assignment(self, name, value) - elif self.model_config.get('extra') != 'allow' and name not in self.model_fields: - # TODO - matching error + if self.__config__.extra is not Extra.allow and name not in self.__fields__: raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') - elif self.model_config.get('extra') == 'allow' and name not in self.model_fields: - if self.model_extra and name in self.model_extra: - self.__pydantic_extra__[name] = value # type: ignore - else: + elif not self.__config__.allow_mutation or self.__config__.frozen: + raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment') + elif name in self.__fields__ and self.__fields__[name].final: + raise TypeError( + f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment' + ) + elif self.__config__.validate_assignment: + new_values = {**self.__dict__, name: value} + + for validator in self.__pre_root_validators__: try: - getattr(self, name) - except AttributeError: - # attribute does not already exist on instance, so put it in extra - self.__pydantic_extra__[name] = value # type: ignore + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__) + + known_field = self.__fields__.get(name, None) + if known_field: + # We want to + # - make sure validators are called without the current value for this field inside `values` + # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts) + # - keep the order of the fields + if not known_field.field_info.allow_mutation: + raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned') + dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name} + value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) else: - # attribute _does_ already exist on instance, and was not in extra, so update it - _object_setattr(self, name, value) + new_values[name] = value + + errors = [] + for skip_on_failure, validator in self.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + if errors: + raise ValidationError(errors, self.__class__) + + # update the whole __dict__ as other values than just `value` + # may be changed (e.g. with `root_validator`) + object_setattr(self, '__dict__', new_values) else: self.__dict__[name] = value - self.__pydantic_fields_set__.add(name) - def __delattr__(self, item: str) -> Any: - if item in self.__private_attributes__: - attribute = self.__private_attributes__[item] - if hasattr(attribute, '__delete__'): - attribute.__delete__(self) # type: ignore - return + self.__fields_set__.add(name) - try: - # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items - del self.__pydantic_private__[item] # type: ignore - return - except KeyError as exc: - raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc - - self._check_frozen(item, None) - - if item in self.model_fields: - object.__delattr__(self, item) - elif self.__pydantic_extra__ is not None and item in self.__pydantic_extra__: - del self.__pydantic_extra__[item] - else: - try: - object.__delattr__(self, item) - except AttributeError: - raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') - - def _check_frozen(self, name: str, value: Any) -> None: - if self.model_config.get('frozen', None): - typ = 'frozen_instance' - elif getattr(self.model_fields.get(name), 'frozen', False): - typ = 'frozen_field' - else: - return - error: pydantic_core.InitErrorDetails = { - 'type': typ, - 'loc': (name,), - 'input': value, - } - raise pydantic_core.ValidationError.from_exception_data(self.__class__.__name__, [error]) - - def __getstate__(self) -> dict[Any, Any]: - private = self.__pydantic_private__ - if private: - private = {k: v for k, v in private.items() if v is not PydanticUndefined} + def __getstate__(self) -> 'DictAny': + private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__) return { '__dict__': self.__dict__, - '__pydantic_extra__': self.__pydantic_extra__, - '__pydantic_fields_set__': self.__pydantic_fields_set__, - '__pydantic_private__': private, + '__fields_set__': self.__fields_set__, + '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined}, } - def __setstate__(self, state: dict[Any, Any]) -> None: - _object_setattr(self, '__pydantic_fields_set__', state['__pydantic_fields_set__']) - _object_setattr(self, '__pydantic_extra__', state['__pydantic_extra__']) - _object_setattr(self, '__pydantic_private__', state['__pydantic_private__']) - _object_setattr(self, '__dict__', state['__dict__']) + def __setstate__(self, state: 'DictAny') -> None: + object_setattr(self, '__dict__', state['__dict__']) + object_setattr(self, '__fields_set__', state['__fields_set__']) + for name, value in state.get('__private_attribute_values__', {}).items(): + object_setattr(self, name, value) - def __eq__(self, other: Any) -> bool: - if isinstance(other, BaseModel): - # When comparing instances of generic types for equality, as long as all field values are equal, - # only require their generic origin types to be equal, rather than exact type equality. - # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1). - self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__ - other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__ + def _init_private_attributes(self) -> None: + for name, private_attr in self.__private_attributes__.items(): + default = private_attr.get_default() + if default is not Undefined: + object_setattr(self, name, default) - # Perform common checks first - if not ( - self_type == other_type - and self.__pydantic_private__ == other.__pydantic_private__ - and self.__pydantic_extra__ == other.__pydantic_extra__ - ): - return False - - # We only want to compare pydantic fields but ignoring fields is costly. - # We'll perform a fast check first, and fallback only when needed - # See GH-7444 and GH-7825 for rationale and a performance benchmark - - # First, do the fast (and sometimes faulty) __dict__ comparison - if self.__dict__ == other.__dict__: - # If the check above passes, then pydantic fields are equal, we can return early - return True - - # We don't want to trigger unnecessary costly filtering of __dict__ on all unequal objects, so we return - # early if there are no keys to ignore (we would just return False later on anyway) - model_fields = type(self).model_fields.keys() - if self.__dict__.keys() <= model_fields and other.__dict__.keys() <= model_fields: - return False - - # If we reach here, there are non-pydantic-fields keys, mapped to unequal values, that we need to ignore - # Resort to costly filtering of the __dict__ objects - # We use operator.itemgetter because it is much faster than dict comprehensions - # NOTE: Contrary to standard python class and instances, when the Model class has a default value for an - # attribute and the model instance doesn't have a corresponding attribute, accessing the missing attribute - # raises an error in BaseModel.__getattr__ instead of returning the class attribute - # So we can use operator.itemgetter() instead of operator.attrgetter() - getter = operator.itemgetter(*model_fields) if model_fields else lambda _: _utils._SENTINEL - try: - return getter(self.__dict__) == getter(other.__dict__) - except KeyError: - # In rare cases (such as when using the deprecated BaseModel.copy() method), - # the __dict__ may not contain all model fields, which is how we can get here. - # getter(self.__dict__) is much faster than any 'safe' method that accounts - # for missing keys, and wrapping it in a `try` doesn't slow things down much - # in the common case. - self_fields_proxy = _utils.SafeGetItemProxy(self.__dict__) - other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__) - return getter(self_fields_proxy) == getter(other_fields_proxy) - - # other instance is not a BaseModel - else: - return NotImplemented # delegate to the other item in the comparison - - if typing.TYPE_CHECKING: - # We put `__init_subclass__` in a TYPE_CHECKING block because, even though we want the type-checking benefits - # described in the signature of `__init_subclass__` below, we don't want to modify the default behavior of - # subclass initialization. - - def __init_subclass__(cls, **kwargs: Unpack[ConfigDict]): - """This signature is included purely to help type-checkers check arguments to class declaration, which - provides a way to conveniently set model_config key/value pairs. - - ```py - from pydantic import BaseModel - - class MyModel(BaseModel, extra='allow'): - ... - ``` - - However, this may be deceiving, since the _actual_ calls to `__init_subclass__` will not receive any - of the config arguments, and will only receive any keyword arguments passed during class initialization - that are _not_ expected keys in ConfigDict. (This is due to the way `ModelMetaclass.__new__` works.) - - Args: - **kwargs: Keyword arguments passed to the class definition, which set model_config - - Note: - You may want to override `__pydantic_init_subclass__` instead, which behaves similarly but is called - *after* the class is fully initialized. - """ - - def __iter__(self) -> TupleGenerator: - """So `dict(model)` works.""" - yield from [(k, v) for (k, v) in self.__dict__.items() if not k.startswith('_')] - extra = self.__pydantic_extra__ - if extra: - yield from extra.items() - - def __repr__(self) -> str: - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' - - def __repr_args__(self) -> _repr.ReprArgs: - for k, v in self.__dict__.items(): - field = self.model_fields.get(k) - if field and field.repr: - yield k, v - - # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized. - # This can happen if a `ValidationError` is raised during initialization and the instance's - # repr is generated as part of the exception handling. Therefore, we use `getattr` here - # with a fallback, even though the type hints indicate the attribute will always be present. - try: - pydantic_extra = object.__getattribute__(self, '__pydantic_extra__') - except AttributeError: - pydantic_extra = None - - if pydantic_extra is not None: - yield from ((k, v) for k, v in pydantic_extra.items()) - yield from ((k, getattr(self, k)) for k, v in self.model_computed_fields.items() if v.repr) - - # take logic from `_repr.Representation` without the side effects of inheritance, see #5740 - __repr_name__ = _repr.Representation.__repr_name__ - __repr_str__ = _repr.Representation.__repr_str__ - __pretty__ = _repr.Representation.__pretty__ - __rich_repr__ = _repr.Representation.__rich_repr__ - - def __str__(self) -> str: - return self.__repr_str__(' ') - - # ##### Deprecated methods from v1 ##### - @property - @typing_extensions.deprecated( - 'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None - ) - def __fields__(self) -> dict[str, FieldInfo]: - warnings.warn( - 'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=PydanticDeprecatedSince20 - ) - return self.model_fields - - @property - @typing_extensions.deprecated( - 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.', - category=None, - ) - def __fields_set__(self) -> set[str]: - warnings.warn( - 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.', - category=PydanticDeprecatedSince20, - ) - return self.__pydantic_fields_set__ - - @typing_extensions.deprecated('The `dict` method is deprecated; use `model_dump` instead.', category=None) - def dict( # noqa: D102 + def dict( self, *, - include: IncEx = None, - exclude: IncEx = None, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, by_alias: bool = False, + skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, - ) -> typing.Dict[str, Any]: # noqa UP006 - warnings.warn('The `dict` method is deprecated; use `model_dump` instead.', category=PydanticDeprecatedSince20) - return self.model_dump( - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, + ) -> 'DictStrAny': + """ + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + + return dict( + self._iter( + to_dict=True, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) ) - @typing_extensions.deprecated('The `json` method is deprecated; use `model_dump_json` instead.', category=None) - def json( # noqa: D102 + def json( self, *, - include: IncEx = None, - exclude: IncEx = None, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, by_alias: bool = False, + skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, - encoder: typing.Callable[[Any], Any] | None = PydanticUndefined, # type: ignore[assignment] - models_as_dict: bool = PydanticUndefined, # type: ignore[assignment] + encoder: Optional[Callable[[Any], Any]] = None, + models_as_dict: bool = True, **dumps_kwargs: Any, ) -> str: - warnings.warn( - 'The `json` method is deprecated; use `model_dump_json` instead.', category=PydanticDeprecatedSince20 - ) - if encoder is not PydanticUndefined: - raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.') - if models_as_dict is not PydanticUndefined: - raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.') - if dumps_kwargs: - raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.') - return self.model_dump_json( - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, + """ + Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. + + `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`. + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__) + + # We don't directly call `self.dict()`, which does exactly this with `to_dict=True` + # because we want to be able to keep raw `BaseModel` instances and not as `dict`. + # This allows users to write custom JSON encoders for given `BaseModel` classes. + data = dict( + self._iter( + to_dict=models_as_dict, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) ) + if self.__custom_root_type__: + data = data[ROOT_KEY] + return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs) @classmethod - @typing_extensions.deprecated('The `parse_obj` method is deprecated; use `model_validate` instead.', category=None) - def parse_obj(cls: type[Model], obj: Any) -> Model: # noqa: D102 - warnings.warn( - 'The `parse_obj` method is deprecated; use `model_validate` instead.', category=PydanticDeprecatedSince20 - ) - return cls.model_validate(obj) + def _enforce_dict_if_root(cls, obj: Any) -> Any: + if cls.__custom_root_type__ and ( + not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY}) + or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES + ): + return {ROOT_KEY: obj} + else: + return obj @classmethod - @typing_extensions.deprecated( - 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, ' - 'otherwise load the data then use `model_validate` instead.', - category=None, - ) - def parse_raw( # noqa: D102 - cls: type[Model], - b: str | bytes, + def parse_obj(cls: Type['Model'], obj: Any) -> 'Model': + obj = cls._enforce_dict_if_root(obj) + if not isinstance(obj, dict): + try: + obj = dict(obj) + except (TypeError, ValueError) as e: + exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}') + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e + return cls(**obj) + + @classmethod + def parse_raw( + cls: Type['Model'], + b: StrBytes, *, - content_type: str | None = None, + content_type: str = None, encoding: str = 'utf8', - proto: DeprecatedParseProtocol | None = None, + proto: Protocol = None, allow_pickle: bool = False, - ) -> Model: # pragma: no cover - warnings.warn( - 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, ' - 'otherwise load the data then use `model_validate` instead.', - category=PydanticDeprecatedSince20, - ) - from .deprecated import parse - + ) -> 'Model': try: - obj = parse.load_str_bytes( + obj = load_str_bytes( b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, ) - except (ValueError, TypeError) as exc: - import json - - # try to match V1 - if isinstance(exc, UnicodeDecodeError): - type_str = 'value_error.unicodedecode' - elif isinstance(exc, json.JSONDecodeError): - type_str = 'value_error.jsondecode' - elif isinstance(exc, ValueError): - type_str = 'value_error' - else: - type_str = 'type_error' - - # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same - error: pydantic_core.InitErrorDetails = { - # The type: ignore on the next line is to ignore the requirement of LiteralString - 'type': pydantic_core.PydanticCustomError(type_str, str(exc)), # type: ignore - 'loc': ('__root__',), - 'input': b, - } - raise pydantic_core.ValidationError.from_exception_data(cls.__name__, [error]) - return cls.model_validate(obj) + except (ValueError, TypeError, UnicodeDecodeError) as e: + raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls) + return cls.parse_obj(obj) @classmethod - @typing_extensions.deprecated( - 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON ' - 'use `model_validate_json`, otherwise `model_validate` instead.', - category=None, - ) - def parse_file( # noqa: D102 - cls: type[Model], - path: str | Path, + def parse_file( + cls: Type['Model'], + path: Union[str, Path], *, - content_type: str | None = None, + content_type: str = None, encoding: str = 'utf8', - proto: DeprecatedParseProtocol | None = None, + proto: Protocol = None, allow_pickle: bool = False, - ) -> Model: - warnings.warn( - 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON ' - 'use `model_validate_json`, otherwise `model_validate` instead.', - category=PydanticDeprecatedSince20, - ) - from .deprecated import parse - - obj = parse.load_file( + ) -> 'Model': + obj = load_file( path, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, ) return cls.parse_obj(obj) @classmethod - @typing_extensions.deprecated( - 'The `from_orm` method is deprecated; set ' - "`model_config['from_attributes']=True` and use `model_validate` instead.", - category=None, - ) - def from_orm(cls: type[Model], obj: Any) -> Model: # noqa: D102 - warnings.warn( - 'The `from_orm` method is deprecated; set ' - "`model_config['from_attributes']=True` and use `model_validate` instead.", - category=PydanticDeprecatedSince20, - ) - if not cls.model_config.get('from_attributes', None): - raise PydanticUserError( - 'You must set the config attribute `from_attributes=True` to use from_orm', code=None - ) - return cls.model_validate(obj) + def from_orm(cls: Type['Model'], obj: Any) -> 'Model': + if not cls.__config__.orm_mode: + raise ConfigError('You must have the config attribute orm_mode=True to use from_orm') + obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj) + m = cls.__new__(cls) + values, fields_set, validation_error = validate_model(cls, obj) + if validation_error: + raise validation_error + object_setattr(m, '__dict__', values) + object_setattr(m, '__fields_set__', fields_set) + m._init_private_attributes() + return m @classmethod - @typing_extensions.deprecated('The `construct` method is deprecated; use `model_construct` instead.', category=None) - def construct(cls: type[Model], _fields_set: set[str] | None = None, **values: Any) -> Model: # noqa: D102 - warnings.warn( - 'The `construct` method is deprecated; use `model_construct` instead.', category=PydanticDeprecatedSince20 - ) - return cls.model_construct(_fields_set=_fields_set, **values) - - @typing_extensions.deprecated( - 'The `copy` method is deprecated; use `model_copy` instead. ' - 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.', - category=None, - ) - def copy( - self: Model, - *, - include: AbstractSetIntStr | MappingIntStrAny | None = None, - exclude: AbstractSetIntStr | MappingIntStrAny | None = None, - update: typing.Dict[str, Any] | None = None, # noqa UP006 - deep: bool = False, - ) -> Model: # pragma: no cover - """Returns a copy of the model. - - !!! warning "Deprecated" - This method is now deprecated; use `model_copy` instead. - - If you need `include` or `exclude`, use: - - ```py - data = self.model_dump(include=include, exclude=exclude, round_trip=True) - data = {**data, **(update or {})} - copied = self.model_validate(data) - ``` - - Args: - include: Optional set or mapping specifying which fields to include in the copied model. - exclude: Optional set or mapping specifying which fields to exclude in the copied model. - update: Optional dictionary of field-value pairs to override field values in the copied model. - deep: If True, the values of fields that are Pydantic models will be deep-copied. - - Returns: - A copy of the model with included, excluded and updated fields as specified. + def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model': + """ + Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. + Default values are respected, but no other validation is performed. + Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + """ + m = cls.__new__(cls) + fields_values: Dict[str, Any] = {} + for name, field in cls.__fields__.items(): + if field.alt_alias and field.alias in values: + fields_values[name] = values[field.alias] + elif name in values: + fields_values[name] = values[name] + elif not field.required: + fields_values[name] = field.get_default() + fields_values.update(values) + object_setattr(m, '__dict__', fields_values) + if _fields_set is None: + _fields_set = set(values.keys()) + object_setattr(m, '__fields_set__', _fields_set) + m._init_private_attributes() + return m + + def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model': + if deep: + # chances of having empty dict here are quite low for using smart_deepcopy + values = deepcopy(values) + + cls = self.__class__ + m = cls.__new__(cls) + object_setattr(m, '__dict__', values) + object_setattr(m, '__fields_set__', fields_set) + for name in self.__private_attributes__: + value = getattr(self, name, Undefined) + if value is not Undefined: + if deep: + value = deepcopy(value) + object_setattr(m, name, value) + + return m + + def copy( + self: 'Model', + *, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + update: Optional['DictStrAny'] = None, + deep: bool = False, + ) -> 'Model': + """ + Duplicate a model, optionally choose which fields to include, exclude and change. + + :param include: fields to include in new model + :param exclude: fields to exclude from new model, as with values this takes precedence over include + :param update: values to change/add in the new model. Note: the data is not validated before creating + the new model: you should trust this data + :param deep: set to `True` to make a deep copy of the model + :return: new model instance """ - warnings.warn( - 'The `copy` method is deprecated; use `model_copy` instead. ' - 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.', - category=PydanticDeprecatedSince20, - ) - from .deprecated import copy_internals values = dict( - copy_internals._iter( - self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False - ), + self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False), **(update or {}), ) - if self.__pydantic_private__ is None: - private = None - else: - private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined} - if self.__pydantic_extra__ is None: - extra: dict[str, Any] | None = None - else: - extra = self.__pydantic_extra__.copy() - for k in list(self.__pydantic_extra__): - if k not in values: # k was in the exclude - extra.pop(k) - for k in list(values): - if k in self.__pydantic_extra__: # k must have come from extra - extra[k] = values.pop(k) - - # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg + # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg if update: - fields_set = self.__pydantic_fields_set__ | update.keys() + fields_set = self.__fields_set__ | update.keys() else: - fields_set = set(self.__pydantic_fields_set__) + fields_set = set(self.__fields_set__) + + return self._copy_and_set_values(values, fields_set, deep=deep) + + @classmethod + def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny': + cached = cls.__schema_cache__.get((by_alias, ref_template)) + if cached is not None: + return cached + s = model_schema(cls, by_alias=by_alias, ref_template=ref_template) + cls.__schema_cache__[(by_alias, ref_template)] = s + return s + + @classmethod + def schema_json( + cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any + ) -> str: + from .json import pydantic_encoder + + return cls.__config__.json_dumps( + cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls: Type['Model'], value: Any) -> 'Model': + if isinstance(value, cls): + copy_on_model_validation = cls.__config__.copy_on_model_validation + # whether to deep or shallow copy the model on validation, None means do not copy + deep_copy: Optional[bool] = None + if copy_on_model_validation not in {'deep', 'shallow', 'none'}: + # Warn about deprecated behavior + warnings.warn( + "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning + ) + if copy_on_model_validation: + deep_copy = False + + if copy_on_model_validation == 'shallow': + # shallow copy + deep_copy = False + elif copy_on_model_validation == 'deep': + # deep copy + deep_copy = True + + if deep_copy is None: + return value + else: + return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy) + + value = cls._enforce_dict_if_root(value) + + if isinstance(value, dict): + return cls(**value) + elif cls.__config__.orm_mode: + return cls.from_orm(value) + else: + try: + value_as_dict = dict(value) + except (TypeError, ValueError) as e: + raise DictError() from e + return cls(**value_as_dict) + + @classmethod + def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict: + if isinstance(obj, GetterDict): + return obj + return cls.__config__.getter_dict(obj) + + @classmethod + @no_type_check + def _get_value( + cls, + v: Any, + to_dict: bool, + by_alias: bool, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], + exclude_unset: bool, + exclude_defaults: bool, + exclude_none: bool, + ) -> Any: + + if isinstance(v, BaseModel): + if to_dict: + v_dict = v.dict( + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=include, + exclude=exclude, + exclude_none=exclude_none, + ) + if ROOT_KEY in v_dict: + return v_dict[ROOT_KEY] + return v_dict + else: + return v.copy(include=include, exclude=exclude) + + value_exclude = ValueItems(v, exclude) if exclude else None + value_include = ValueItems(v, include) if include else None + + if isinstance(v, dict): + return { + k_: cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(k_), + exclude=value_exclude and value_exclude.for_element(k_), + exclude_none=exclude_none, + ) + for k_, v_ in v.items() + if (not value_exclude or not value_exclude.is_excluded(k_)) + and (not value_include or value_include.is_included(k_)) + } + + elif sequence_like(v): + seq_args = ( + cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(i), + exclude=value_exclude and value_exclude.for_element(i), + exclude_none=exclude_none, + ) + for i, v_ in enumerate(v) + if (not value_exclude or not value_exclude.is_excluded(i)) + and (not value_include or value_include.is_included(i)) + ) + + return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args) + + elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False): + return v.value + + else: + return v + + @classmethod + def __try_update_forward_refs__(cls, **localns: Any) -> None: + """ + Same as update_forward_refs but will not raise exception + when forward references are not defined. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,)) + + @classmethod + def update_forward_refs(cls, **localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this Model, globalns and localns. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns) + + def __iter__(self) -> 'TupleGenerator': + """ + so `dict(model)` works + """ + yield from self.__dict__.items() + + def _iter( + self, + to_dict: bool = False, + by_alias: bool = False, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> 'TupleGenerator': + + # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. + # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. + if exclude is not None or self.__exclude_fields__ is not None: + exclude = ValueItems.merge(self.__exclude_fields__, exclude) + + if include is not None or self.__include_fields__ is not None: + include = ValueItems.merge(self.__include_fields__, include, intersect=True) + + allowed_keys = self._calculate_keys( + include=include, exclude=exclude, exclude_unset=exclude_unset # type: ignore + ) + if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): + # huge boost for plain _iter() + yield from self.__dict__.items() + return + + value_exclude = ValueItems(self, exclude) if exclude is not None else None + value_include = ValueItems(self, include) if include is not None else None + + for field_key, v in self.__dict__.items(): + if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): + continue + + if exclude_defaults: + model_field = self.__fields__.get(field_key) + if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v: + continue + + if by_alias and field_key in self.__fields__: + dict_key = self.__fields__[field_key].alias + else: + dict_key = field_key + + if to_dict or value_include or value_exclude: + v = self._get_value( + v, + to_dict=to_dict, + by_alias=by_alias, + include=value_include and value_include.for_element(field_key), + exclude=value_exclude and value_exclude.for_element(field_key), + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + yield dict_key, v + + def _calculate_keys( + self, + include: Optional['MappingIntStrAny'], + exclude: Optional['MappingIntStrAny'], + exclude_unset: bool, + update: Optional['DictStrAny'] = None, + ) -> Optional[AbstractSet[str]]: + if include is None and exclude is None and exclude_unset is False: + return None + + keys: AbstractSet[str] + if exclude_unset: + keys = self.__fields_set__.copy() + else: + keys = self.__dict__.keys() + + if include is not None: + keys &= include.keys() + + if update: + keys -= update.keys() - # removing excluded fields from `__pydantic_fields_set__` if exclude: - fields_set -= set(exclude) + keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)} - return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep) + return keys - @classmethod - @typing_extensions.deprecated('The `schema` method is deprecated; use `model_json_schema` instead.', category=None) - def schema( # noqa: D102 - cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE - ) -> typing.Dict[str, Any]: # noqa UP006 - warnings.warn( - 'The `schema` method is deprecated; use `model_json_schema` instead.', category=PydanticDeprecatedSince20 - ) - return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template) + def __eq__(self, other: Any) -> bool: + if isinstance(other, BaseModel): + return self.dict() == other.dict() + else: + return self.dict() == other - @classmethod - @typing_extensions.deprecated( - 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.', - category=None, - ) - def schema_json( # noqa: D102 - cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any - ) -> str: # pragma: no cover - warnings.warn( - 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.', - category=PydanticDeprecatedSince20, - ) - import json - - from .deprecated.json import pydantic_encoder - - return json.dumps( - cls.model_json_schema(by_alias=by_alias, ref_template=ref_template), - default=pydantic_encoder, - **dumps_kwargs, - ) - - @classmethod - @typing_extensions.deprecated('The `validate` method is deprecated; use `model_validate` instead.', category=None) - def validate(cls: type[Model], value: Any) -> Model: # noqa: D102 - warnings.warn( - 'The `validate` method is deprecated; use `model_validate` instead.', category=PydanticDeprecatedSince20 - ) - return cls.model_validate(value) - - @classmethod - @typing_extensions.deprecated( - 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.', - category=None, - ) - def update_forward_refs(cls, **localns: Any) -> None: # noqa: D102 - warnings.warn( - 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.', - category=PydanticDeprecatedSince20, - ) - if localns: # pragma: no cover - raise TypeError('`localns` arguments are not longer accepted.') - cls.model_rebuild(force=True) - - @typing_extensions.deprecated( - 'The private method `_iter` will be removed and should no longer be used.', category=None - ) - def _iter(self, *args: Any, **kwargs: Any) -> Any: - warnings.warn( - 'The private method `_iter` will be removed and should no longer be used.', - category=PydanticDeprecatedSince20, - ) - from .deprecated import copy_internals - - return copy_internals._iter(self, *args, **kwargs) - - @typing_extensions.deprecated( - 'The private method `_copy_and_set_values` will be removed and should no longer be used.', - category=None, - ) - def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any: - warnings.warn( - 'The private method `_copy_and_set_values` will be removed and should no longer be used.', - category=PydanticDeprecatedSince20, - ) - from .deprecated import copy_internals - - return copy_internals._copy_and_set_values(self, *args, **kwargs) - - @classmethod - @typing_extensions.deprecated( - 'The private method `_get_value` will be removed and should no longer be used.', - category=None, - ) - def _get_value(cls, *args: Any, **kwargs: Any) -> Any: - warnings.warn( - 'The private method `_get_value` will be removed and should no longer be used.', - category=PydanticDeprecatedSince20, - ) - from .deprecated import copy_internals - - return copy_internals._get_value(cls, *args, **kwargs) - - @typing_extensions.deprecated( - 'The private method `_calculate_keys` will be removed and should no longer be used.', - category=None, - ) - def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any: - warnings.warn( - 'The private method `_calculate_keys` will be removed and should no longer be used.', - category=PydanticDeprecatedSince20, - ) - from .deprecated import copy_internals - - return copy_internals._calculate_keys(self, *args, **kwargs) + def __repr_args__(self) -> 'ReprArgs': + return [ + (k, v) + for k, v in self.__dict__.items() + if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr) + ] -@typing.overload +_is_base_model_class_defined = True + + +@overload def create_model( __model_name: str, *, - __config__: ConfigDict | None = None, - __doc__: str | None = None, + __config__: Optional[Type[BaseConfig]] = None, __base__: None = None, __module__: str = __name__, - __validators__: dict[str, classmethod] | None = None, - __cls_kwargs__: dict[str, Any] | None = None, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, **field_definitions: Any, -) -> type[BaseModel]: +) -> Type['BaseModel']: ... -@typing.overload +@overload def create_model( __model_name: str, *, - __config__: ConfigDict | None = None, - __doc__: str | None = None, - __base__: type[Model] | tuple[type[Model], ...], + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[Type['Model'], Tuple[Type['Model'], ...]], __module__: str = __name__, - __validators__: dict[str, classmethod] | None = None, - __cls_kwargs__: dict[str, Any] | None = None, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, **field_definitions: Any, -) -> type[Model]: +) -> Type['Model']: ... -def create_model( # noqa: C901 +def create_model( __model_name: str, *, - __config__: ConfigDict | None = None, - __doc__: str | None = None, - __base__: type[Model] | tuple[type[Model], ...] | None = None, - __module__: str | None = None, - __validators__: dict[str, classmethod] | None = None, - __cls_kwargs__: dict[str, Any] | None = None, - __slots__: tuple[str, ...] | None = None, + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None, + __module__: str = __name__, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, + __slots__: Optional[Tuple[str, ...]] = None, **field_definitions: Any, -) -> type[Model]: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/models/#dynamic-model-creation - - Dynamically creates and returns a new Pydantic model, in other words, `create_model` dynamically creates a - subclass of [`BaseModel`][pydantic.BaseModel]. - - Args: - __model_name: The name of the newly created model. - __config__: The configuration of the new model. - __doc__: The docstring of the new model. - __base__: The base class or classes for the new model. - __module__: The name of the module that the model belongs to; - if `None`, the value is taken from `sys._getframe(1)` - __validators__: A dictionary of methods that validate fields. - __cls_kwargs__: A dictionary of keyword arguments for class creation, such as `metaclass`. - __slots__: Deprecated. Should not be passed to `create_model`. - **field_definitions: Attributes of the new model. They should be passed in the format: - `=(, )` or `=(, )`. - - Returns: - The new [model][pydantic.BaseModel]. - - Raises: - PydanticUserError: If `__base__` and `__config__` are both passed. +) -> Type['Model']: + """ + Dynamically create a model. + :param __model_name: name of the created model + :param __config__: config class to use for the new model + :param __base__: base class for the new model to inherit from + :param __module__: module of the created model + :param __validators__: a dict of method names and @validator class methods + :param __cls_kwargs__: a dict for class creation + :param __slots__: Deprecated, `__slots__` should not be passed to `create_model` + :param field_definitions: fields of the model (or extra fields if a base is supplied) + in the format `=(, )` or `=, e.g. + `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format + `=` or `=(, )`, e.g. + `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or + `foo=(str, FieldInfo(title='Foo'))` """ if __slots__ is not None: # __slots__ will be ignored from here on @@ -1436,14 +982,11 @@ def create_model( # noqa: C901 if __base__ is not None: if __config__ is not None: - raise PydanticUserError( - 'to avoid confusion `__config__` and `__base__` cannot be used together', - code='create-model-config-base', - ) + raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together') if not isinstance(__base__, tuple): __base__ = (__base__,) else: - __base__ = (typing.cast(typing.Type['Model'], BaseModel),) + __base__ = (cast(Type['Model'], BaseModel),) __cls_kwargs__ = __cls_kwargs__ or {} @@ -1451,16 +994,16 @@ def create_model( # noqa: C901 annotations = {} for f_name, f_def in field_definitions.items(): - if not _fields.is_valid_field_name(f_name): + if not is_valid_field(f_name): warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) if isinstance(f_def, tuple): - f_def = typing.cast('tuple[str, Any]', f_def) try: f_annotation, f_value = f_def except ValueError as e: - raise PydanticUserError( - 'Field definitions should be a `(, )`.', - code='create-model-field-definitions', + raise ConfigError( + 'field definitions should either be a tuple of (, ) or just a ' + 'default value, unfortunately this means tuples as ' + 'default values are not allowed' ) from e else: f_annotation, f_value = None, f_def @@ -1469,32 +1012,98 @@ def create_model( # noqa: C901 annotations[f_name] = f_annotation fields[f_name] = f_value - if __module__ is None: - f = sys._getframe(1) - __module__ = f.f_globals['__name__'] - - namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__} - if __doc__: - namespace.update({'__doc__': __doc__}) + namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__} if __validators__: namespace.update(__validators__) namespace.update(fields) if __config__: - namespace['model_config'] = _config.ConfigWrapper(__config__).config_dict - resolved_bases = types.resolve_bases(__base__) - meta, ns, kwds = types.prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) + namespace['Config'] = inherit_config(__config__, BaseConfig) + resolved_bases = resolve_bases(__base__) + meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) if resolved_bases is not __base__: ns['__orig_bases__'] = __base__ namespace.update(ns) - - return meta( - __model_name, - resolved_bases, - namespace, - __pydantic_reset_parent_namespace__=False, - _create_model_module=__module__, - **kwds, - ) + return meta(__model_name, resolved_bases, namespace, **kwds) -__getattr__ = getattr_migration(__name__) +_missing = object() + + +def validate_model( # noqa: C901 (ignore complexity) + model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None +) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]: + """ + validate data against a model. + """ + values = {} + errors = [] + # input_data names, possibly alias + names_used = set() + # field names, never aliases + fields_set = set() + config = model.__config__ + check_extra = config.extra is not Extra.ignore + cls_ = cls or model + + for validator in model.__pre_root_validators__: + try: + input_data = validator(cls_, input_data) + except (ValueError, TypeError, AssertionError) as exc: + return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_) + + for name, field in model.__fields__.items(): + value = input_data.get(field.alias, _missing) + using_name = False + if value is _missing and config.allow_population_by_field_name and field.alt_alias: + value = input_data.get(field.name, _missing) + using_name = True + + if value is _missing: + if field.required: + errors.append(ErrorWrapper(MissingError(), loc=field.alias)) + continue + + value = field.get_default() + + if not config.validate_all and not field.validate_always: + values[name] = value + continue + else: + fields_set.add(name) + if check_extra: + names_used.add(field.name if using_name else field.alias) + + v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[name] = v_ + + if check_extra: + if isinstance(input_data, GetterDict): + extra = input_data.extra_keys() - names_used + else: + extra = input_data.keys() - names_used + if extra: + fields_set |= extra + if config.extra is Extra.allow: + for f in extra: + values[f] = input_data[f] + else: + for f in sorted(extra): + errors.append(ErrorWrapper(ExtraError(), loc=f)) + + for skip_on_failure, validator in model.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + values = validator(cls_, values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + + if errors: + return values, fields_set, ValidationError(errors, cls_) + else: + return values, fields_set, None diff --git a/lib/pydantic/mypy.py b/lib/pydantic/mypy.py index 0e70eab5..6bd9db18 100644 --- a/lib/pydantic/mypy.py +++ b/lib/pydantic/mypy.py @@ -1,13 +1,8 @@ -"""This module includes classes and functions designed specifically for use with the mypy plugin.""" - -from __future__ import annotations - import sys from configparser import ConfigParser -from typing import Any, Callable, Iterator +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union from mypy.errorcodes import ErrorCode -from mypy.expandtype import expand_type, expand_type_by_instance from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -22,23 +17,21 @@ from mypy.nodes import ( ClassDef, Context, Decorator, - DictExpr, EllipsisExpr, - Expression, + FuncBase, FuncDef, - IfStmt, JsonDict, MemberExpr, NameExpr, PassStmt, PlaceholderNode, RefExpr, - Statement, StrExpr, + SymbolNode, SymbolTableNode, TempNode, - TypeAlias, TypeInfo, + TypeVarExpr, Var, ) from mypy.options import Options @@ -48,17 +41,11 @@ from mypy.plugin import ( FunctionContext, MethodContext, Plugin, - ReportConfigContext, SemanticAnalyzerPluginInterface, ) from mypy.plugins import dataclasses -from mypy.plugins.common import ( - deserialize_and_fixup_type, -) -from mypy.semanal import set_callable_name +from mypy.semanal import set_callable_name # type: ignore from mypy.server.trigger import make_wildcard_trigger -from mypy.state import state -from mypy.typeops import map_type_from_supertype from mypy.types import ( AnyType, CallableType, @@ -76,131 +63,76 @@ from mypy.typevars import fill_typevars from mypy.util import get_unique_redefinition_name from mypy.version import __version__ as mypy_version -from pydantic._internal import _fields -from pydantic.version import parse_mypy_version +from pydantic.utils import is_valid_field try: from mypy.types import TypeVarDef # type: ignore[attr-defined] except ImportError: # pragma: no cover - # Backward-compatible with TypeVarDef from Mypy 0.930. + # Backward-compatible with TypeVarDef from Mypy 0.910. from mypy.types import TypeVarType as TypeVarDef CONFIGFILE_KEY = 'pydantic-mypy' METADATA_KEY = 'pydantic-mypy-metadata' BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' -BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings' -ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel' -MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass' +BASESETTINGS_FULLNAME = 'pydantic.env_settings.BaseSettings' FIELD_FULLNAME = 'pydantic.fields.Field' DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' -MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator' -DECORATOR_FULLNAMES = { - 'pydantic.functional_validators.field_validator', - 'pydantic.functional_validators.model_validator', - 'pydantic.functional_serializers.serializer', - 'pydantic.functional_serializers.model_serializer', - 'pydantic.deprecated.class_validators.validator', - 'pydantic.deprecated.class_validators.root_validator', -} + + +def parse_mypy_version(version: str) -> Tuple[int, ...]: + return tuple(int(part) for part in version.split('+', 1)[0].split('.')) MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__' -# Increment version if plugin changes and mypy caches should be invalidated -__version__ = 2 - -def plugin(version: str) -> type[Plugin]: - """`version` is the mypy version string. +def plugin(version: str) -> 'TypingType[Plugin]': + """ + `version` is the mypy version string We might want to use this to print a warning if the mypy version being used is newer, or especially older, than we expect (or need). - - Args: - version: The mypy version string. - - Return: - The Pydantic mypy plugin type. """ return PydanticPlugin -class _DeferAnalysis(Exception): - pass - - class PydanticPlugin(Plugin): - """The Pydantic mypy plugin.""" - def __init__(self, options: Options) -> None: self.plugin_config = PydanticPluginConfig(options) - self._plugin_data = self.plugin_config.to_data() super().__init__(options) - def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], bool] | None: - """Update Pydantic model class.""" + def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]': sym = self.lookup_fully_qualified(fullname) if sym and isinstance(sym.node, TypeInfo): # pragma: no branch # No branching may occur if the mypy cache has not been cleared - if any(base.fullname == BASEMODEL_FULLNAME for base in sym.node.mro): + if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro): return self._pydantic_model_class_maker_callback return None - def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: - """Update Pydantic `ModelMetaclass` definition.""" - if fullname == MODEL_METACLASS_FULLNAME: - return self._pydantic_model_metaclass_marker_callback - return None - - def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: - """Adjust the return type of the `Field` function.""" + def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]': sym = self.lookup_fully_qualified(fullname) if sym and sym.fullname == FIELD_FULLNAME: return self._pydantic_field_callback return None - def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: - """Adjust return type of `from_orm` method call.""" + def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]: if fullname.endswith('.from_orm'): - return from_attributes_callback + return from_orm_callback return None - def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: - """Mark pydantic.dataclasses as dataclass. - - Mypy version 1.1.1 added support for `@dataclass_transform` decorator. - """ - if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1): + def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: + if fullname == DATACLASS_FULLNAME: return dataclasses.dataclass_class_maker_callback # type: ignore[return-value] return None - def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]: - """Return all plugin config data. + def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: + transformer = PydanticModelTransformer(ctx, self.plugin_config) + transformer.transform() - Used by mypy to determine if cache needs to be discarded. + def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type': """ - return self._plugin_data - - def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> bool: - transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config) - return transformer.transform() - - def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: - """Reset dataclass_transform_spec attribute of ModelMetaclass. - - Let the plugin handle it. This behavior can be disabled - if 'debug_dataclass_transform' is set to True', for testing purposes. - """ - if self.plugin_config.debug_dataclass_transform: - return - info_metaclass = ctx.cls.info.declared_metaclass - assert info_metaclass, "callback not passed from 'get_metaclass_hook'" - if getattr(info_metaclass.type, 'dataclass_transform_spec', None): - info_metaclass.type.dataclass_transform_spec = None - - def _pydantic_field_callback(self, ctx: FunctionContext) -> Type: - """Extract the type of the `default` argument from the Field function, and use it as the return type. + Extract the type of the `default` argument from the Field function, and use it as the return type. In particular: * Check whether the default and default_factory argument is specified. @@ -232,7 +164,11 @@ class PydanticPlugin(Plugin): # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter # Pydantic calls the default factory without any argument, so we retrieve the first item if isinstance(default_factory_type, Overloaded): - default_factory_type = default_factory_type.items[0] + if MYPY_VERSION_TUPLE > (0, 910): + default_factory_type = default_factory_type.items[0] + else: + # Mypy0.910 exposes the items of overloaded types in a function + default_factory_type = default_factory_type.items()[0] # type: ignore[operator] if isinstance(default_factory_type, CallableType): ret_type = default_factory_type.ret_type @@ -249,26 +185,11 @@ class PydanticPlugin(Plugin): class PydanticPluginConfig: - """A Pydantic mypy plugin config holder. - - Attributes: - init_forbid_extra: Whether to add a `**kwargs` at the end of the generated `__init__` signature. - init_typed: Whether to annotate fields in the generated `__init__`. - warn_required_dynamic_aliases: Whether to raise required dynamic aliases error. - debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute - of `ModelMetaclass` for testing purposes. - """ - - __slots__ = ( - 'init_forbid_extra', - 'init_typed', - 'warn_required_dynamic_aliases', - 'debug_dataclass_transform', - ) + __slots__ = ('init_forbid_extra', 'init_typed', 'warn_required_dynamic_aliases', 'warn_untyped_fields') init_forbid_extra: bool init_typed: bool warn_required_dynamic_aliases: bool - debug_dataclass_transform: bool # undocumented + warn_untyped_fields: bool def __init__(self, options: Options) -> None: if options.config_file is None: # pragma: no cover @@ -289,724 +210,343 @@ class PydanticPluginConfig: setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) setattr(self, key, setting) - def to_data(self) -> dict[str, Any]: - """Returns a dict of config names to their values.""" - return {key: getattr(self, key) for key in self.__slots__} - -def from_attributes_callback(ctx: MethodContext) -> Type: - """Raise an error if from_attributes is not enabled.""" +def from_orm_callback(ctx: MethodContext) -> Type: + """ + Raise an error if orm_mode is not enabled + """ model_type: Instance - ctx_type = ctx.type - if isinstance(ctx_type, TypeType): - ctx_type = ctx_type.item - if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): - model_type = ctx_type.ret_type # called on the class - elif isinstance(ctx_type, Instance): - model_type = ctx_type # called on an instance (unusual, but still valid) + if isinstance(ctx.type, CallableType) and isinstance(ctx.type.ret_type, Instance): + model_type = ctx.type.ret_type # called on the class + elif isinstance(ctx.type, Instance): + model_type = ctx.type # called on an instance (unusual, but still valid) else: # pragma: no cover - detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})' + detail = f'ctx.type: {ctx.type} (of type {ctx.type.__class__.__name__})' error_unexpected_behavior(detail, ctx.api, ctx.context) return ctx.default_return_type pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) if pydantic_metadata is None: return ctx.default_return_type - from_attributes = pydantic_metadata.get('config', {}).get('from_attributes') - if from_attributes is not True: - error_from_attributes(model_type.type.name, ctx.api, ctx.context) + orm_mode = pydantic_metadata.get('config', {}).get('orm_mode') + if orm_mode is not True: + error_from_orm(get_name(model_type.type), ctx.api, ctx.context) return ctx.default_return_type -class PydanticModelField: - """Based on mypy.plugins.dataclasses.DataclassAttribute.""" - - def __init__( - self, - name: str, - alias: str | None, - has_dynamic_alias: bool, - has_default: bool, - line: int, - column: int, - type: Type | None, - info: TypeInfo, - ): - self.name = name - self.alias = alias - self.has_dynamic_alias = has_dynamic_alias - self.has_default = has_default - self.line = line - self.column = column - self.type = type - self.info = info - - def to_argument( - self, - current_info: TypeInfo, - typed: bool, - force_optional: bool, - use_alias: bool, - api: SemanticAnalyzerPluginInterface, - ) -> Argument: - """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument.""" - variable = self.to_var(current_info, api, use_alias) - type_annotation = self.expand_type(current_info, api) if typed else AnyType(TypeOfAny.explicit) - return Argument( - variable=variable, - type_annotation=type_annotation, - initializer=None, - kind=ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED, - ) - - def expand_type(self, current_info: TypeInfo, api: SemanticAnalyzerPluginInterface) -> Type | None: - """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type.""" - # The getattr in the next line is used to prevent errors in legacy versions of mypy without this attribute - if self.type is not None and getattr(self.info, 'self_type', None) is not None: - # In general, it is not safe to call `expand_type()` during semantic analyzis, - # however this plugin is called very late, so all types should be fully ready. - # Also, it is tricky to avoid eager expansion of Self types here (e.g. because - # we serialize attributes). - expanded_type = expand_type(self.type, {self.info.self_type.id: fill_typevars(current_info)}) - if isinstance(self.type, UnionType) and not isinstance(expanded_type, UnionType): - if not api.final_iteration: - raise _DeferAnalysis() - return expanded_type - return self.type - - def to_var(self, current_info: TypeInfo, api: SemanticAnalyzerPluginInterface, use_alias: bool) -> Var: - """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var.""" - if use_alias and self.alias is not None: - name = self.alias - else: - name = self.name - - return Var(name, self.expand_type(current_info, api)) - - def serialize(self) -> JsonDict: - """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" - assert self.type - return { - 'name': self.name, - 'alias': self.alias, - 'has_dynamic_alias': self.has_dynamic_alias, - 'has_default': self.has_default, - 'line': self.line, - 'column': self.column, - 'type': self.type.serialize(), - } - - @classmethod - def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField: - """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" - data = data.copy() - typ = deserialize_and_fixup_type(data.pop('type'), api) - return cls(type=typ, info=info, **data) - - def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None: - """Expands type vars in the context of a subtype when an attribute is inherited - from a generic super type. - """ - if self.type is not None: - self.type = map_type_from_supertype(self.type, sub_type, self.info) - - -class PydanticModelClassVar: - """Based on mypy.plugins.dataclasses.DataclassAttribute. - - ClassVars are ignored by subclasses. - - Attributes: - name: the ClassVar name - """ - - def __init__(self, name): - self.name = name - - @classmethod - def deserialize(cls, data: JsonDict) -> PydanticModelClassVar: - """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" - data = data.copy() - return cls(**data) - - def serialize(self) -> JsonDict: - """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" - return { - 'name': self.name, - } - - class PydanticModelTransformer: - """Transform the BaseModel subclass according to the plugin settings. - - Attributes: - tracked_config_fields: A set of field configs that the plugin has to track their value. - """ - - tracked_config_fields: set[str] = { + tracked_config_fields: Set[str] = { 'extra', + 'allow_mutation', 'frozen', - 'from_attributes', - 'populate_by_name', + 'orm_mode', + 'allow_population_by_field_name', 'alias_generator', } - def __init__( - self, - cls: ClassDef, - reason: Expression | Statement, - api: SemanticAnalyzerPluginInterface, - plugin_config: PydanticPluginConfig, - ) -> None: - self._cls = cls - self._reason = reason - self._api = api - + def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None: + self._ctx = ctx self.plugin_config = plugin_config - def transform(self) -> bool: - """Configures the BaseModel subclass according to the plugin settings. + def transform(self) -> None: + """ + Configures the BaseModel subclass according to the plugin settings. In particular: - * determines the model config and fields, * adds a fields-aware signature for the initializer and construct methods - * freezes the class if frozen = True + * freezes the class if allow_mutation = False or frozen = True * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses """ - info = self._cls.info - is_root_model = any(ROOT_MODEL_FULLNAME in base.fullname for base in info.mro[:-1]) + ctx = self._ctx + info = self._ctx.cls.info + + self.adjust_validator_signatures() config = self.collect_config() - fields, class_vars = self.collect_fields_and_class_vars(config, is_root_model) - if fields is None or class_vars is None: - # Some definitions are not ready. We need another pass. - return False + fields = self.collect_fields(config) for field in fields: - if field.type is None: - return False - - is_settings = any(base.fullname == BASESETTINGS_FULLNAME for base in info.mro[:-1]) - try: - self.add_initializer(fields, config, is_settings, is_root_model) - self.add_model_construct_method(fields, config, is_settings) - self.set_frozen(fields, self._api, frozen=config.frozen is True) - except _DeferAnalysis: - if not self._api.final_iteration: - self._api.defer() - - self.adjust_decorator_signatures() - + if info[field.name].type is None: + if not ctx.api.final_iteration: + ctx.api.defer() + is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1]) + self.add_initializer(fields, config, is_settings) + self.add_construct_method(fields) + self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True) info.metadata[METADATA_KEY] = { 'fields': {field.name: field.serialize() for field in fields}, - 'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars}, - 'config': config.get_values_dict(), + 'config': config.set_values_dict(), } - return True + def adjust_validator_signatures(self) -> None: + """When we decorate a function `f` with `pydantic.validator(...), mypy sees + `f` as a regular method taking a `self` instance, even though pydantic + internally wraps `f` with `classmethod` if necessary. - def adjust_decorator_signatures(self) -> None: - """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator` - or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance, - even though pydantic internally wraps `f` with `classmethod` if necessary. - - Teach mypy this by marking any function whose outermost decorator is a `validator()`, - `field_validator()` or `serializer()` call as a `classmethod`. + Teach mypy this by marking any function whose outermost decorator is a + `validator()` call as a classmethod. """ - for name, sym in self._cls.info.names.items(): + for name, sym in self._ctx.cls.info.names.items(): if isinstance(sym.node, Decorator): first_dec = sym.node.original_decorators[0] if ( isinstance(first_dec, CallExpr) and isinstance(first_dec.callee, NameExpr) - and first_dec.callee.fullname in DECORATOR_FULLNAMES - # @model_validator(mode="after") is an exception, it expects a regular method - and not ( - first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME - and any( - first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after' - for i, arg in enumerate(first_dec.args) - ) - ) + and first_dec.callee.fullname == 'pydantic.class_validators.validator' ): - # TODO: Only do this if the first argument of the decorated function is `cls` sym.node.func.is_class = True - def collect_config(self) -> ModelConfigData: # noqa: C901 (ignore complexity) - """Collects the values of the config attributes that are used by the plugin, accounting for parent classes.""" - cls = self._cls + def collect_config(self) -> 'ModelConfigData': + """ + Collects the values of the config attributes that are used by the plugin, accounting for parent classes. + """ + ctx = self._ctx + cls = ctx.cls config = ModelConfigData() - - has_config_kwargs = False - has_config_from_namespace = False - - # Handle `class MyModel(BaseModel, =, ...):` - for name, expr in cls.keywords.items(): - config_data = self.get_config_update(name, expr) - if config_data: - has_config_kwargs = True - config.update(config_data) - - # Handle `model_config` - stmt: Statement | None = None for stmt in cls.defs.body: - if not isinstance(stmt, (AssignmentStmt, ClassDef)): + if not isinstance(stmt, ClassDef): continue - - if isinstance(stmt, AssignmentStmt): - lhs = stmt.lvalues[0] - if not isinstance(lhs, NameExpr) or lhs.name != 'model_config': - continue - - if isinstance(stmt.rvalue, CallExpr): # calls to `dict` or `ConfigDict` - for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args): - if arg_name is None: - continue - config.update(self.get_config_update(arg_name, arg)) - elif isinstance(stmt.rvalue, DictExpr): # dict literals - for key_expr, value_expr in stmt.rvalue.items: - if not isinstance(key_expr, StrExpr): - continue - config.update(self.get_config_update(key_expr.value, value_expr)) - - elif isinstance(stmt, ClassDef): - if stmt.name != 'Config': # 'deprecated' Config-class - continue + if stmt.name == 'Config': for substmt in stmt.defs.body: if not isinstance(substmt, AssignmentStmt): continue - lhs = substmt.lvalues[0] - if not isinstance(lhs, NameExpr): - continue - config.update(self.get_config_update(lhs.name, substmt.rvalue)) - - if has_config_kwargs: - self._api.fail( - 'Specifying config in two places is ambiguous, use either Config attribute or class kwargs', - cls, - ) - break - - has_config_from_namespace = True - - if has_config_kwargs or has_config_from_namespace: - if ( - stmt - and config.has_alias_generator - and not config.populate_by_name - and self.plugin_config.warn_required_dynamic_aliases - ): - error_required_dynamic_aliases(self._api, stmt) - + config.update(self.get_config_update(substmt)) + if ( + config.has_alias_generator + and not config.allow_population_by_field_name + and self.plugin_config.warn_required_dynamic_aliases + ): + error_required_dynamic_aliases(ctx.api, stmt) for info in cls.info.mro[1:]: # 0 is the current class if METADATA_KEY not in info.metadata: continue # Each class depends on the set of fields in its ancestors - self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) for name, value in info.metadata[METADATA_KEY]['config'].items(): config.setdefault(name, value) return config - def collect_fields_and_class_vars( - self, model_config: ModelConfigData, is_root_model: bool - ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]: - """Collects the fields for the model, accounting for parent classes.""" - cls = self._cls + def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']: + """ + Collects the fields for the model, accounting for parent classes + """ + # First, collect fields belonging to the current class. + ctx = self._ctx + cls = self._ctx.cls + fields = [] # type: List[PydanticModelField] + known_fields = set() # type: Set[str] + for stmt in cls.defs.body: + if not isinstance(stmt, AssignmentStmt): # `and stmt.new_syntax` to require annotation + continue - # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates. - # - # We iterate through the MRO in reverse because attrs defined in the parent must appear - # earlier in the attributes list than attrs defined in the child. See: - # https://docs.python.org/3/library/dataclasses.html#inheritance - # - # However, we also want fields defined in the subtype to override ones defined - # in the parent. We can implement this via a dict without disrupting the attr order - # because dicts preserve insertion order in Python 3.7+. - found_fields: dict[str, PydanticModelField] = {} - found_class_vars: dict[str, PydanticModelClassVar] = {} - for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object - # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata: - # # We haven't processed the base class yet. Need another pass. - # return None, None + lhs = stmt.lvalues[0] + if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name): + continue + + if not stmt.new_syntax and self.plugin_config.warn_untyped_fields: + error_untyped_fields(ctx.api, stmt) + + # if lhs.name == '__config__': # BaseConfig not well handled; I'm not sure why yet + # continue + + sym = cls.info.names.get(lhs.name) + if sym is None: # pragma: no cover + # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) + # This is the same logic used in the dataclasses plugin + continue + + node = sym.node + if isinstance(node, PlaceholderNode): # pragma: no cover + # See the PlaceholderNode docstring for more detail about how this can occur + # Basically, it is an edge case when dealing with complex import logic + # This is the same logic used in the dataclasses plugin + continue + if not isinstance(node, Var): # pragma: no cover + # Don't know if this edge case still happens with the `is_valid_field` check above + # but better safe than sorry + continue + + # x: ClassVar[int] is ignored by dataclasses. + if node.is_classvar: + continue + + is_required = self.get_is_required(cls, stmt, lhs) + alias, has_dynamic_alias = self.get_alias_info(stmt) + if ( + has_dynamic_alias + and not model_config.allow_population_by_field_name + and self.plugin_config.warn_required_dynamic_aliases + ): + error_required_dynamic_aliases(ctx.api, stmt) + fields.append( + PydanticModelField( + name=lhs.name, + is_required=is_required, + alias=alias, + has_dynamic_alias=has_dynamic_alias, + line=stmt.line, + column=stmt.column, + ) + ) + known_fields.add(lhs.name) + all_fields = fields.copy() + for info in cls.info.mro[1:]: # 0 is the current class, -2 is BaseModel, -1 is object if METADATA_KEY not in info.metadata: continue - # Each class depends on the set of attributes in its dataclass ancestors. - self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + superclass_fields = [] + # Each class depends on the set of fields in its ancestors + ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) for name, data in info.metadata[METADATA_KEY]['fields'].items(): - field = PydanticModelField.deserialize(info, data, self._api) - # (The following comment comes directly from the dataclasses plugin) - # TODO: We shouldn't be performing type operations during the main - # semantic analysis pass, since some TypeInfo attributes might - # still be in flux. This should be performed in a later phase. - with state.strict_optional_set(self._api.options.strict_optional): - field.expand_typevar_from_subtype(cls.info) - found_fields[name] = field - - sym_node = cls.info.names.get(name) - if sym_node and sym_node.node and not isinstance(sym_node.node, Var): - self._api.fail( - 'BaseModel field may only be overridden by another field', - sym_node.node, - ) - # Collect ClassVars - for name, data in info.metadata[METADATA_KEY]['class_vars'].items(): - found_class_vars[name] = PydanticModelClassVar.deserialize(data) - - # Second, collect fields and ClassVars belonging to the current class. - current_field_names: set[str] = set() - current_class_vars_names: set[str] = set() - for stmt in self._get_assignment_statements_from_block(cls.defs): - maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars) - if isinstance(maybe_field, PydanticModelField): - lhs = stmt.lvalues[0] - if is_root_model and lhs.name != 'root': - error_extra_fields_on_root_model(self._api, stmt) + if name not in known_fields: + field = PydanticModelField.deserialize(info, data) + known_fields.add(name) + superclass_fields.append(field) else: - current_field_names.add(lhs.name) - found_fields[lhs.name] = maybe_field - elif isinstance(maybe_field, PydanticModelClassVar): - lhs = stmt.lvalues[0] - current_class_vars_names.add(lhs.name) - found_class_vars[lhs.name] = maybe_field + (field,) = (a for a in all_fields if a.name == name) + all_fields.remove(field) + superclass_fields.append(field) + all_fields = superclass_fields + all_fields + return all_fields - return list(found_fields.values()), list(found_class_vars.values()) - - def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]: - for body in stmt.body: - if not body.is_unreachable: - yield from self._get_assignment_statements_from_block(body) - if stmt.else_body is not None and not stmt.else_body.is_unreachable: - yield from self._get_assignment_statements_from_block(stmt.else_body) - - def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: - for stmt in block.body: - if isinstance(stmt, AssignmentStmt): - yield stmt - elif isinstance(stmt, IfStmt): - yield from self._get_assignment_statements_from_if_statement(stmt) - - def collect_field_or_class_var_from_stmt( # noqa C901 - self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar] - ) -> PydanticModelField | PydanticModelClassVar | None: - """Get pydantic model field from statement. - - Args: - stmt: The statement. - model_config: Configuration settings for the model. - class_vars: ClassVars already known to be defined on the model. - - Returns: - A pydantic model field if it could find the field in statement. Otherwise, `None`. + def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None: """ - cls = self._cls - - lhs = stmt.lvalues[0] - if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': - return None - - if not stmt.new_syntax: - if ( - isinstance(stmt.rvalue, CallExpr) - and isinstance(stmt.rvalue.callee, CallExpr) - and isinstance(stmt.rvalue.callee.callee, NameExpr) - and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES - ): - # This is a (possibly-reused) validator or serializer, not a field - # In particular, it looks something like: my_validator = validator('my_field')(f) - # Eventually, we may want to attempt to respect model_config['ignored_types'] - return None - - if lhs.name in class_vars: - # Class vars are not fields and are not required to be annotated - return None - - # The assignment does not have an annotation, and it's not anything else we recognize - error_untyped_fields(self._api, stmt) - return None - - lhs = stmt.lvalues[0] - if not isinstance(lhs, NameExpr): - return None - - if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': - return None - - sym = cls.info.names.get(lhs.name) - if sym is None: # pragma: no cover - # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) - # This is the same logic used in the dataclasses plugin - return None - - node = sym.node - if isinstance(node, PlaceholderNode): # pragma: no cover - # See the PlaceholderNode docstring for more detail about how this can occur - # Basically, it is an edge case when dealing with complex import logic - - # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. - return None - - if isinstance(node, TypeAlias): - self._api.fail( - 'Type aliases inside BaseModel definitions are not supported at runtime', - node, - ) - # Skip processing this node. This doesn't match the runtime behaviour, - # but the only alternative would be to modify the SymbolTable, - # and it's a little hairy to do that in a plugin. - return None - - if not isinstance(node, Var): # pragma: no cover - # Don't know if this edge case still happens with the `is_valid_field` check above - # but better safe than sorry - - # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. - return None - - # x: ClassVar[int] is not a field - if node.is_classvar: - return PydanticModelClassVar(lhs.name) - - # x: InitVar[int] is not supported in BaseModel - node_type = get_proper_type(node.type) - if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar': - self._api.fail( - 'InitVar is not supported in BaseModel', - node, - ) - - has_default = self.get_has_default(stmt) - - if sym.type is None and node.is_final and node.is_inferred: - # This follows the logic from the dataclasses plugin. The following comment is taken verbatim: - # - # This is a special case, assignment like x: Final = 42 is classified - # annotated above, but mypy strips the `Final` turning it into x = 42. - # We do not support inferred types in dataclasses, so we can try inferring - # type for simple literals, and otherwise require an explicit type - # argument for Final[...]. - typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True) - if typ: - node.type = typ - else: - self._api.fail( - 'Need type argument for Final[...] with non-literal default in BaseModel', - stmt, - ) - node.type = AnyType(TypeOfAny.from_error) - - alias, has_dynamic_alias = self.get_alias_info(stmt) - if has_dynamic_alias and not model_config.populate_by_name and self.plugin_config.warn_required_dynamic_aliases: - error_required_dynamic_aliases(self._api, stmt) - - init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) - return PydanticModelField( - name=lhs.name, - has_dynamic_alias=has_dynamic_alias, - has_default=has_default, - alias=alias, - line=stmt.line, - column=stmt.column, - type=init_type, - info=cls.info, - ) - - def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None: - """Infer __init__ argument type for an attribute. - - In particular, possibly use the signature of __set__. - """ - default = sym.type - if sym.implicit: - return default - t = get_proper_type(sym.type) - - # Perform a simple-minded inference from the signature of __set__, if present. - # We can't use mypy.checkmember here, since this plugin runs before type checking. - # We only support some basic scanerios here, which is hopefully sufficient for - # the vast majority of use cases. - if not isinstance(t, Instance): - return default - setter = t.type.get('__set__') - if setter: - if isinstance(setter.node, FuncDef): - super_info = t.type.get_containing_type_info('__set__') - assert super_info - if setter.type: - setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info)) - else: - return AnyType(TypeOfAny.unannotated) - if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [ - ARG_POS, - ARG_POS, - ARG_POS, - ]: - return expand_type_by_instance(setter_type.arg_types[2], t) - else: - self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context) - else: - self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context) - - return default - - def add_initializer( - self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool - ) -> None: - """Adds a fields-aware `__init__` method to the class. + Adds a fields-aware `__init__` method to the class. The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. """ - if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated: - return # Don't generate an __init__ if one already exists - + ctx = self._ctx typed = self.plugin_config.init_typed - use_alias = config.populate_by_name is not True - requires_dynamic_aliases = bool(config.has_alias_generator and not config.populate_by_name) - with state.strict_optional_set(self._api.options.strict_optional): - args = self.get_field_arguments( - fields, - typed=typed, - requires_dynamic_aliases=requires_dynamic_aliases, - use_alias=use_alias, - is_settings=is_settings, - ) - - if is_root_model and MYPY_VERSION_TUPLE <= (1, 0, 1): - # convert root argument to positional argument - # This is needed because mypy support for `dataclass_transform` isn't complete on 1.0.1 - args[0].kind = ARG_POS if args[0].kind == ARG_NAMED else ARG_OPT - - if is_settings: - base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node - if '__init__' in base_settings_node.names: - base_settings_init_node = base_settings_node.names['__init__'].node - if base_settings_init_node is not None and base_settings_init_node.type is not None: - func_type = base_settings_init_node.type - for arg_idx, arg_name in enumerate(func_type.arg_names): - if arg_name.startswith('__') or not arg_name.startswith('_'): - continue - analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx]) - variable = Var(arg_name, analyzed_variable_type) - args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT)) - + use_alias = config.allow_population_by_field_name is not True + force_all_optional = is_settings or bool( + config.has_alias_generator and not config.allow_population_by_field_name + ) + init_arguments = self.get_field_arguments( + fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias + ) if not self.should_init_forbid_extra(fields, config): var = Var('kwargs') - args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) + init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) - add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType()) + if '__init__' not in ctx.cls.info.names: + add_method(ctx, '__init__', init_arguments, NoneType()) - def add_model_construct_method( - self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool - ) -> None: - """Adds a fully typed `model_construct` classmethod to the class. + def add_construct_method(self, fields: List['PydanticModelField']) -> None: + """ + Adds a fully typed `construct` classmethod to the class. Similar to the fields-aware __init__ method, but always uses the field names (not aliases), and does not treat settings fields as optional. """ - set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')]) + ctx = self._ctx + set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')]) optional_set_str = UnionType([set_str, NoneType()]) fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) - with state.strict_optional_set(self._api.options.strict_optional): - args = self.get_field_arguments( - fields, typed=True, requires_dynamic_aliases=False, use_alias=False, is_settings=is_settings - ) - if not self.should_init_forbid_extra(fields, config): - var = Var('kwargs') - args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) + construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False) + construct_arguments = [fields_set_argument] + construct_arguments - args = [fields_set_argument] + args + obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object') + self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class + tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name + tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) + self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) + ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr) + + # Backward-compatible with TypeVarDef from Mypy 0.910. + if isinstance(tvd, TypeVarType): + self_type = tvd + else: + self_type = TypeVarType(tvd) # type: ignore[call-arg] add_method( - self._api, - self._cls, - 'model_construct', - args=args, - return_type=fill_typevars(self._cls.info), + ctx, + 'construct', + construct_arguments, + return_type=self_type, + self_type=self_type, + tvar_def=tvd, is_classmethod=True, ) - def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None: - """Marks all fields as properties so that attempts to set them trigger mypy errors. + def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None: + """ + Marks all fields as properties so that attempts to set them trigger mypy errors. This is the same approach used by the attrs and dataclasses plugins. """ - info = self._cls.info + info = self._ctx.cls.info for field in fields: sym_node = info.names.get(field.name) if sym_node is not None: var = sym_node.node - if isinstance(var, Var): - var.is_property = frozen - elif isinstance(var, PlaceholderNode) and not self._api.final_iteration: - # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage - self._api.defer() - else: # pragma: no cover - # I don't know whether it's possible to hit this branch, but I've added it for safety - try: - var_str = str(var) - except TypeError: - # This happens for PlaceholderNode; perhaps it will happen for other types in the future.. - var_str = repr(var) - detail = f'sym_node.node: {var_str} (of type {var.__class__})' - error_unexpected_behavior(detail, self._api, self._cls) + assert isinstance(var, Var) + var.is_property = frozen else: - var = field.to_var(info, api, use_alias=False) + var = field.to_var(info, use_alias=False) var.info = info var.is_property = frozen - var._fullname = info.fullname + '.' + var.name - info.names[var.name] = SymbolTableNode(MDEF, var) + var._fullname = get_fullname(info) + '.' + get_name(var) + info.names[get_name(var)] = SymbolTableNode(MDEF, var) - def get_config_update(self, name: str, arg: Expression) -> ModelConfigData | None: - """Determines the config update due to a single kwarg in the ConfigDict definition. + def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']: + """ + Determines the config update due to a single statement in the Config class definition. Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) """ - if name not in self.tracked_config_fields: + lhs = substmt.lvalues[0] + if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields): return None - if name == 'extra': - if isinstance(arg, StrExpr): - forbid_extra = arg.value == 'forbid' - elif isinstance(arg, MemberExpr): - forbid_extra = arg.name == 'forbid' + if lhs.name == 'extra': + if isinstance(substmt.rvalue, StrExpr): + forbid_extra = substmt.rvalue.value == 'forbid' + elif isinstance(substmt.rvalue, MemberExpr): + forbid_extra = substmt.rvalue.name == 'forbid' else: - error_invalid_config_value(name, self._api, arg) + error_invalid_config_value(lhs.name, self._ctx.api, substmt) return None return ModelConfigData(forbid_extra=forbid_extra) - if name == 'alias_generator': + if lhs.name == 'alias_generator': has_alias_generator = True - if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None': + if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None': has_alias_generator = False return ModelConfigData(has_alias_generator=has_alias_generator) - if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'): - return ModelConfigData(**{name: arg.fullname == 'builtins.True'}) - error_invalid_config_value(name, self._api, arg) + if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'): + return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'}) + error_invalid_config_value(lhs.name, self._ctx.api, substmt) return None @staticmethod - def get_has_default(stmt: AssignmentStmt) -> bool: - """Returns a boolean indicating whether the field defined in `stmt` is a required field.""" + def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool: + """ + Returns a boolean indicating whether the field defined in `stmt` is a required field. + """ expr = stmt.rvalue if isinstance(expr, TempNode): - # TempNode means annotation-only, so has no default - return False + # TempNode means annotation-only, so only non-required if Optional + value_type = get_proper_type(cls.info[lhs.name].type) + if isinstance(value_type, UnionType) and any(isinstance(item, NoneType) for item in value_type.items): + # Annotated as Optional, or otherwise having NoneType in the union + return False + return True if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: - # The "default value" is a call to `Field`; at this point, the field has a default if and only if: - # * there is a positional argument that is not `...` - # * there is a keyword argument named "default" that is not `...` - # * there is a "default_factory" that is not `None` + # The "default value" is a call to `Field`; at this point, the field is + # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) or if default_factory + # is specified. for arg, name in zip(expr.args, expr.arg_names): - # If name is None, then this arg is the default because it is the only positional argument. + # If name is None, then this arg is the default because it is the only positonal argument. if name is None or name == 'default': - return arg.__class__ is not EllipsisExpr + return arg.__class__ is EllipsisExpr if name == 'default_factory': - return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None') - return False - # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) - return not isinstance(expr, EllipsisExpr) + return False + return True + # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) + return isinstance(expr, EllipsisExpr) @staticmethod - def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]: - """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. + def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]: + """ + Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. If `has_dynamic_alias` is True, `alias` will be None. @@ -1033,38 +573,29 @@ class PydanticModelTransformer: return None, False def get_field_arguments( - self, - fields: list[PydanticModelField], - typed: bool, - use_alias: bool, - requires_dynamic_aliases: bool, - is_settings: bool, - ) -> list[Argument]: - """Helper function used during the construction of the `__init__` and `model_construct` method signatures. + self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool + ) -> List[Argument]: + """ + Helper function used during the construction of the `__init__` and `construct` method signatures. Returns a list of mypy Argument instances for use in the generated signatures. """ - info = self._cls.info + info = self._ctx.cls.info arguments = [ - field.to_argument( - info, - typed=typed, - force_optional=requires_dynamic_aliases or is_settings, - use_alias=use_alias, - api=self._api, - ) + field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias) for field in fields if not (use_alias and field.has_dynamic_alias) ] return arguments - def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool: - """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature. + def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool: + """ + Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, *unless* a required dynamic alias is present (since then we can't determine a valid signature). """ - if not config.populate_by_name: + if not config.allow_population_by_field_name: if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): return False if config.forbid_extra: @@ -1072,8 +603,9 @@ class PydanticModelTransformer: return self.plugin_config.init_forbid_extra @staticmethod - def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool: - """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be + def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool: + """ + Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be determined during static analysis. """ for field in fields: @@ -1086,74 +618,95 @@ class PydanticModelTransformer: return False -class ModelConfigData: - """Pydantic mypy plugin model config class.""" +class PydanticModelField: + def __init__( + self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int + ): + self.name = name + self.is_required = is_required + self.alias = alias + self.has_dynamic_alias = has_dynamic_alias + self.line = line + self.column = column + def to_var(self, info: TypeInfo, use_alias: bool) -> Var: + name = self.name + if use_alias and self.alias is not None: + name = self.alias + return Var(name, info[self.name].type) + + def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument: + if typed and info[self.name].type is not None: + type_annotation = info[self.name].type + else: + type_annotation = AnyType(TypeOfAny.explicit) + return Argument( + variable=self.to_var(info, use_alias), + type_annotation=type_annotation, + initializer=None, + kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED, + ) + + def serialize(self) -> JsonDict: + return self.__dict__ + + @classmethod + def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField': + return cls(**data) + + +class ModelConfigData: def __init__( self, - forbid_extra: bool | None = None, - frozen: bool | None = None, - from_attributes: bool | None = None, - populate_by_name: bool | None = None, - has_alias_generator: bool | None = None, + forbid_extra: Optional[bool] = None, + allow_mutation: Optional[bool] = None, + frozen: Optional[bool] = None, + orm_mode: Optional[bool] = None, + allow_population_by_field_name: Optional[bool] = None, + has_alias_generator: Optional[bool] = None, ): self.forbid_extra = forbid_extra + self.allow_mutation = allow_mutation self.frozen = frozen - self.from_attributes = from_attributes - self.populate_by_name = populate_by_name + self.orm_mode = orm_mode + self.allow_population_by_field_name = allow_population_by_field_name self.has_alias_generator = has_alias_generator - def get_values_dict(self) -> dict[str, Any]: - """Returns a dict of Pydantic model config names to their values. - - It includes the config if config value is not `None`. - """ + def set_values_dict(self) -> Dict[str, Any]: return {k: v for k, v in self.__dict__.items() if v is not None} - def update(self, config: ModelConfigData | None) -> None: - """Update Pydantic model config values.""" + def update(self, config: Optional['ModelConfigData']) -> None: if config is None: return - for k, v in config.get_values_dict().items(): + for k, v in config.set_values_dict().items(): setattr(self, k, v) def setdefault(self, key: str, value: Any) -> None: - """Set default value for Pydantic model config if config value is `None`.""" if getattr(self, key) is None: setattr(self, key, value) -ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic') +ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic') ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') -ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic') -def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None: - """Emits an error when the model does not have `from_attributes=True`.""" - api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM) +def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None: + api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM) def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: - """Emits an error when the config value is invalid.""" api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: - """Emits required dynamic aliases error. - - This will be called when `warn_required_dynamic_aliases=True`. - """ api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) -def error_unexpected_behavior( - detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context -) -> None: # pragma: no cover - """Emits unexpected behavior error.""" +def error_unexpected_behavior(detail: str, api: CheckerPluginInterface, context: Context) -> None: # pragma: no cover # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path link = 'https://github.com/pydantic/pydantic/issues/new/choose' full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' @@ -1162,70 +715,55 @@ def error_unexpected_behavior( def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: - """Emits an error when there is an untyped field in the model.""" api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) -def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None: - """Emits an error when there is more than just a root field defined for a subclass of RootModel.""" - api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL) - - def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None: - """Emits an error when `Field` has both `default` and `default_factory` together.""" api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS) def add_method( - api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, - cls: ClassDef, + ctx: ClassDefContext, name: str, - args: list[Argument], + args: List[Argument], return_type: Type, - self_type: Type | None = None, - tvar_def: TypeVarDef | None = None, + self_type: Optional[Type] = None, + tvar_def: Optional[TypeVarDef] = None, is_classmethod: bool = False, + is_new: bool = False, + # is_staticmethod: bool = False, ) -> None: - """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes.""" - info = cls.info + """ + Adds a new method to a class. + + This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged + """ + info = ctx.cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): - cls.defs.body.remove(sym.node) # pragma: no cover + ctx.cls.defs.body.remove(sym.node) # pragma: no cover - if isinstance(api, SemanticAnalyzerPluginInterface): - function_type = api.named_type('builtins.function') - else: - function_type = api.named_generic_type('builtins.function', []) - - if is_classmethod: - self_type = self_type or TypeType(fill_typevars(info)) - first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)] + self_type = self_type or fill_typevars(info) + if is_classmethod or is_new: + first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)] + # elif is_staticmethod: + # first = [] else: self_type = self_type or fill_typevars(info) - # `self` is positional *ONLY* here, but this can't be expressed - # fully in the mypy internal API. ARG_POS is the closest we can get. - # Using ARG_POS will, however, give mypy errors if a `self` field - # is present on a model: - # - # Name "self" already defined (possibly by an import) [no-redef] - # - # As a workaround, we give this argument a name that will - # never conflict. By its positional nature, this name will not - # be used or exposed to users. first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] args = first + args - arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, 'All arguments must be fully typed.' arg_types.append(arg.type_annotation) - arg_names.append(arg.variable.name) + arg_names.append(get_name(arg.variable)) arg_kinds.append(arg.kind) + function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function') signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) if tvar_def: signature.variables = [tvar_def] @@ -1234,7 +772,8 @@ def add_method( func.info = info func.type = set_callable_name(signature, func) func.is_class = is_classmethod - func._fullname = info.fullname + '.' + name + # func.is_static = is_staticmethod + func._fullname = get_fullname(info) + '.' + name func.line = info.line # NOTE: we would like the plugin generated node to dominate, but we still @@ -1244,44 +783,68 @@ def add_method( r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] - # Add decorator for is_classmethod - # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a - # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel. - if is_classmethod: + if is_classmethod: # or is_staticmethod: func.is_decorated = True v = Var(name, func.type) v.info = info v._fullname = func._fullname + # if is_classmethod: v.is_classmethod = True dec = Decorator(func, [NameExpr('classmethod')], v) + # else: + # v.is_staticmethod = True + # dec = Decorator(func, [NameExpr('staticmethod')], v) + dec.line = info.line sym = SymbolTableNode(MDEF, dec) else: sym = SymbolTableNode(MDEF, func) sym.plugin_generated = True - info.names[name] = sym + info.names[name] = sym info.defn.defs.body.append(func) -def parse_toml(config_file: str) -> dict[str, Any] | None: - """Returns a dict of config keys to values. - - It reads configs from toml file and returns `None` if the file is not a toml file. +def get_fullname(x: Union[FuncBase, SymbolNode]) -> str: """ + Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. + """ + fn = x.fullname + if callable(fn): # pragma: no cover + return fn() + return fn + + +def get_name(x: Union[FuncBase, SymbolNode]) -> str: + """ + Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. + """ + fn = x.name + if callable(fn): # pragma: no cover + return fn() + return fn + + +def parse_toml(config_file: str) -> Optional[Dict[str, Any]]: if not config_file.endswith('.toml'): return None + read_mode = 'rb' if sys.version_info >= (3, 11): import tomllib as toml_ else: try: import tomli as toml_ - except ImportError: # pragma: no cover - import warnings + except ImportError: + # older versions of mypy have toml as a dependency, not tomli + read_mode = 'r' + try: + import toml as toml_ # type: ignore[no-redef] + except ImportError: # pragma: no cover + import warnings - warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') - return None + warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') + return None - with open(config_file, 'rb') as rf: - return toml_.load(rf) + with open(config_file, read_mode) as rf: + return toml_.load(rf) # type: ignore[arg-type] diff --git a/lib/pydantic/networks.py b/lib/pydantic/networks.py index 6d9d292f..c7d97186 100644 --- a/lib/pydantic/networks.py +++ b/lib/pydantic/networks.py @@ -1,35 +1,80 @@ -"""The networks module contains types for common network-related fields.""" -from __future__ import annotations as _annotations - -import dataclasses as _dataclasses import re -from importlib.metadata import version -from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network -from typing import TYPE_CHECKING, Any +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, + _BaseAddress, + _BaseNetwork, +) +from typing import ( + TYPE_CHECKING, + Any, + Collection, + Dict, + Generator, + List, + Match, + Optional, + Pattern, + Set, + Tuple, + Type, + Union, + cast, + no_type_check, +) -from pydantic_core import MultiHostUrl, PydanticCustomError, Url, core_schema -from typing_extensions import Annotated, TypeAlias - -from ._internal import _fields, _repr, _schema_generation_shared -from ._migration import getattr_migration -from .annotated_handlers import GetCoreSchemaHandler -from .json_schema import JsonSchemaValue +from . import errors +from .utils import Representation, update_not_none +from .validators import constr_length_validator, str_validator if TYPE_CHECKING: import email_validator + from typing_extensions import TypedDict - NetworkType: TypeAlias = 'str | bytes | int | tuple[str | bytes | int, str | int]' + from .config import BaseConfig + from .fields import ModelField + from .typing import AnyCallable + + CallableGenerator = Generator[AnyCallable, None, None] + + class Parts(TypedDict, total=False): + scheme: str + user: Optional[str] + password: Optional[str] + ipv4: Optional[str] + ipv6: Optional[str] + domain: Optional[str] + port: Optional[str] + path: Optional[str] + query: Optional[str] + fragment: Optional[str] + + class HostParts(TypedDict, total=False): + host: str + tld: Optional[str] + host_type: Optional[str] + port: Optional[str] + rebuild: bool else: email_validator = None + class Parts(dict): + pass + + +NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]] __all__ = [ 'AnyUrl', 'AnyHttpUrl', 'FileUrl', 'HttpUrl', - 'UrlConstraints', + 'stricturl', 'EmailStr', 'NameEmail', 'IPvAnyAddress', @@ -41,321 +86,489 @@ __all__ = [ 'RedisDsn', 'MongoDsn', 'KafkaDsn', - 'NatsDsn', 'validate_email', - 'MySQLDsn', - 'MariaDBDsn', ] +_url_regex_cache = None +_multi_host_url_regex_cache = None +_ascii_domain_regex_cache = None +_int_domain_regex_cache = None +_host_regex_cache = None -@_dataclasses.dataclass -class UrlConstraints(_fields.PydanticMetadata): - """Url constraints. +_host_regex = ( + r'(?:' + r'(?P(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|' # ipv4 + r'(?P\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|' # ipv6 + r'(?P[^\s/:?#]+)' # domain, validation occurs later + r')?' + r'(?::(?P\d+))?' # port +) +_scheme_regex = r'(?:(?P[a-z][a-z0-9+\-.]+)://)?' # scheme https://tools.ietf.org/html/rfc3986#appendix-A +_user_info_regex = r'(?:(?P[^\s:/]*)(?::(?P[^\s/]*))?@)?' +_path_regex = r'(?P/[^\s?#]*)?' +_query_regex = r'(?:\?(?P[^\s#]*))?' +_fragment_regex = r'(?:#(?P[^\s#]*))?' - Attributes: - max_length: The maximum length of the url. Defaults to `None`. - allowed_schemes: The allowed schemes. Defaults to `None`. - host_required: Whether the host is required. Defaults to `None`. - default_host: The default host. Defaults to `None`. - default_port: The default port. Defaults to `None`. - default_path: The default path. Defaults to `None`. + +def url_regex() -> Pattern[str]: + global _url_regex_cache + if _url_regex_cache is None: + _url_regex_cache = re.compile( + rf'{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}', + re.IGNORECASE, + ) + return _url_regex_cache + + +def multi_host_url_regex() -> Pattern[str]: """ + Compiled multi host url regex. - max_length: int | None = None - allowed_schemes: list[str] | None = None - host_required: bool | None = None - default_host: str | None = None - default_port: int | None = None - default_path: str | None = None + Additionally to `url_regex` it allows to match multiple hosts. + E.g. host1.db.net,host2.db.net + """ + global _multi_host_url_regex_cache + if _multi_host_url_regex_cache is None: + _multi_host_url_regex_cache = re.compile( + rf'{_scheme_regex}{_user_info_regex}' + r'(?P([^/]*))' # validation occurs later + rf'{_path_regex}{_query_regex}{_fragment_regex}', + re.IGNORECASE, + ) + return _multi_host_url_regex_cache - def __hash__(self) -> int: - return hash( - ( - self.max_length, - tuple(self.allowed_schemes) if self.allowed_schemes is not None else None, - self.host_required, - self.default_host, - self.default_port, - self.default_path, - ) + +def ascii_domain_regex() -> Pattern[str]: + global _ascii_domain_regex_cache + if _ascii_domain_regex_cache is None: + ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?' + ascii_domain_ending = r'(?P\.[a-z]{2,63})?\.?' + _ascii_domain_regex_cache = re.compile( + fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE + ) + return _ascii_domain_regex_cache + + +def int_domain_regex() -> Pattern[str]: + global _int_domain_regex_cache + if _int_domain_regex_cache is None: + int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?' + int_domain_ending = r'(?P(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?' + _int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE) + return _int_domain_regex_cache + + +def host_regex() -> Pattern[str]: + global _host_regex_cache + if _host_regex_cache is None: + _host_regex_cache = re.compile( + _host_regex, + re.IGNORECASE, + ) + return _host_regex_cache + + +class AnyUrl(str): + strip_whitespace = True + min_length = 1 + max_length = 2**16 + allowed_schemes: Optional[Collection[str]] = None + tld_required: bool = False + user_required: bool = False + host_required: bool = True + hidden_parts: Set[str] = set() + + __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment') + + @no_type_check + def __new__(cls, url: Optional[str], **kwargs) -> object: + return str.__new__(cls, cls.build(**kwargs) if url is None else url) + + def __init__( + self, + url: str, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: Optional[str] = None, + tld: Optional[str] = None, + host_type: str = 'domain', + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + ) -> None: + str.__init__(url) + self.scheme = scheme + self.user = user + self.password = password + self.host = host + self.tld = tld + self.host_type = host_type + self.port = port + self.path = path + self.query = query + self.fragment = fragment + + @classmethod + def build( + cls, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: str, + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + **_kwargs: str, + ) -> str: + parts = Parts( + scheme=scheme, + user=user, + password=password, + host=host, + port=port, + path=path, + query=query, + fragment=fragment, + **_kwargs, # type: ignore[misc] ) + url = scheme + '://' + if user: + url += user + if password: + url += ':' + password + if user or password: + url += '@' + url += host + if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port): + url += ':' + port + if path: + url += path + if query: + url += '?' + query + if fragment: + url += '#' + fragment + return url -AnyUrl = Url -"""Base type for all URLs. + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri') -* Any scheme allowed -* Top-level domain (TLD) not required -* Host required + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate -Assuming an input URL of `http://samuel:pass@example.com:8000/the/path/?query=here#fragment=is;this=bit`, -the types export the following properties: + @classmethod + def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl': + if value.__class__ == cls: + return value + value = str_validator(value) + if cls.strip_whitespace: + value = value.strip() + url: str = cast(str, constr_length_validator(value, field, config)) -- `scheme`: the URL scheme (`http`), always set. -- `host`: the URL host (`example.com`), always set. -- `username`: optional username if included (`samuel`). -- `password`: optional password if included (`pass`). -- `port`: optional port (`8000`). -- `path`: optional path (`/the/path/`). -- `query`: optional URL query (for example, `GET` arguments or "search string", such as `query=here`). -- `fragment`: optional fragment (`fragment=is;this=bit`). -""" -AnyHttpUrl = Annotated[Url, UrlConstraints(allowed_schemes=['http', 'https'])] -"""A type that will accept any http or https URL. + m = cls._match_url(url) + # the regex should always match, if it doesn't please report with details of the URL tried + assert m, 'URL regex failed unexpectedly' -* TLD not required -* Host required -""" -HttpUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=['http', 'https'])] -"""A type that will accept any http or https URL. + original_parts = cast('Parts', m.groupdict()) + parts = cls.apply_default_parts(original_parts) + parts = cls.validate_parts(parts) -* TLD required -* Host required -* Max length 2083 + if m.end() != len(url): + raise errors.UrlExtraError(extra=url[m.end() :]) -```py -from pydantic import BaseModel, HttpUrl, ValidationError + return cls._build_url(m, url, parts) -class MyModel(BaseModel): - url: HttpUrl + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'AnyUrl': + """ + Validate hosts and build the AnyUrl object. Split from `validate` so this method + can be altered in `MultiHostDsn`. + """ + host, tld, host_type, rebuild = cls.validate_host(parts) -m = MyModel(url='http://www.example.com') -print(m.url) -#> http://www.example.com/ + return cls( + None if rebuild else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + host=host, + tld=tld, + host_type=host_type, + port=parts['port'], + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + ) -try: - MyModel(url='ftp://invalid.url') -except ValidationError as e: - print(e) - ''' - 1 validation error for MyModel - url - URL scheme should be 'http' or 'https' [type=url_scheme, input_value='ftp://invalid.url', input_type=str] - ''' + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return url_regex().match(url) -try: - MyModel(url='not a url') -except ValidationError as e: - print(e) - ''' - 1 validation error for MyModel - url - Input should be a valid URL, relative URL without a base [type=url_parsing, input_value='not a url', input_type=str] - ''' -``` + @staticmethod + def _validate_port(port: Optional[str]) -> None: + if port is not None and int(port) > 65_535: + raise errors.UrlPortError() -"International domains" (e.g. a URL where the host or TLD includes non-ascii characters) will be encoded via -[punycode](https://en.wikipedia.org/wiki/Punycode) (see -[this article](https://www.xudongz.com/blog/2017/idn-phishing/) for a good description of why this is important): + @classmethod + def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': + """ + A method used to validate parts of a URL. + Could be overridden to set default values for parts if missing + """ + scheme = parts['scheme'] + if scheme is None: + raise errors.UrlSchemeError() -```py -from pydantic import BaseModel, HttpUrl + if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes: + raise errors.UrlSchemePermittedError(set(cls.allowed_schemes)) -class MyModel(BaseModel): - url: HttpUrl + if validate_port: + cls._validate_port(parts['port']) -m1 = MyModel(url='http://puny£code.com') -print(m1.url) -#> http://xn--punycode-eja.com/ -m2 = MyModel(url='https://www.аррÓе.com/') -print(m2.url) -#> https://www.xn--80ak6aa92e.com/ -m3 = MyModel(url='https://www.example.ç å®/') -print(m3.url) -#> https://www.example.xn--pbt977c/ -``` + user = parts['user'] + if cls.user_required and user is None: + raise errors.UrlUserInfoError() + + return parts + + @classmethod + def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]: + tld, host_type, rebuild = None, None, False + for f in ('domain', 'ipv4', 'ipv6'): + host = parts[f] # type: ignore[literal-required] + if host: + host_type = f + break + + if host is None: + if cls.host_required: + raise errors.UrlHostError() + elif host_type == 'domain': + is_international = False + d = ascii_domain_regex().fullmatch(host) + if d is None: + d = int_domain_regex().fullmatch(host) + if d is None: + raise errors.UrlHostError() + is_international = True + + tld = d.group('tld') + if tld is None and not is_international: + d = int_domain_regex().fullmatch(host) + assert d is not None + tld = d.group('tld') + is_international = True + + if tld is not None: + tld = tld[1:] + elif cls.tld_required: + raise errors.UrlHostTldError() + + if is_international: + host_type = 'int_domain' + rebuild = True + host = host.encode('idna').decode('ascii') + if tld is not None: + tld = tld.encode('idna').decode('ascii') + + return host, tld, host_type, rebuild # type: ignore + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return {} + + @classmethod + def apply_default_parts(cls, parts: 'Parts') -> 'Parts': + for key, value in cls.get_default_parts(parts).items(): + if not parts[key]: # type: ignore[literal-required] + parts[key] = value # type: ignore[literal-required] + return parts + + def __repr__(self) -> str: + extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None) + return f'{self.__class__.__name__}({super().__repr__()}, {extra})' -!!! warning "Underscores in Hostnames" - In Pydantic, underscores are allowed in all parts of a domain except the TLD. - Technically this might be wrong - in theory the hostname cannot have underscores, but subdomains can. +class AnyHttpUrl(AnyUrl): + allowed_schemes = {'http', 'https'} - To explain this; consider the following two cases: + __slots__ = () - - `exam_ple.co.uk`: the hostname is `exam_ple`, which should not be allowed since it contains an underscore. - - `foo_bar.example.com` the hostname is `example`, which should be allowed since the underscore is in the subdomain. - Without having an exhaustive list of TLDs, it would be impossible to differentiate between these two. Therefore - underscores are allowed, but you can always do further validation in a validator if desired. +class HttpUrl(AnyHttpUrl): + tld_required = True + # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers + max_length = 2083 + hidden_parts = {'port'} - Also, Chrome, Firefox, and Safari all currently accept `http://exam_ple.com` as a URL, so we're in good - (or at least big) company. -""" -FileUrl = Annotated[Url, UrlConstraints(allowed_schemes=['file'])] -"""A type that will accept any file URL. + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return {'port': '80' if parts['scheme'] == 'http' else '443'} -* Host not required -""" -PostgresDsn = Annotated[ - MultiHostUrl, - UrlConstraints( - host_required=True, - allowed_schemes=[ - 'postgres', - 'postgresql', - 'postgresql+asyncpg', - 'postgresql+pg8000', - 'postgresql+psycopg', - 'postgresql+psycopg2', - 'postgresql+psycopg2cffi', - 'postgresql+py-postgresql', - 'postgresql+pygresql', - ], - ), -] -"""A type that will accept any Postgres DSN. -* User info required -* TLD not required -* Host required -* Supports multiple hosts +class FileUrl(AnyUrl): + allowed_schemes = {'file'} + host_required = False -If further validation is required, these properties can be used by validators to enforce specific behaviour: + __slots__ = () -```py -from pydantic import ( - BaseModel, - HttpUrl, - PostgresDsn, - ValidationError, - field_validator, -) -class MyModel(BaseModel): - url: HttpUrl +class MultiHostDsn(AnyUrl): + __slots__ = AnyUrl.__slots__ + ('hosts',) -m = MyModel(url='http://www.example.com') + def __init__(self, *args: Any, hosts: Optional[List['HostParts']] = None, **kwargs: Any): + super().__init__(*args, **kwargs) + self.hosts = hosts -# the repr() method for a url will display all properties of the url -print(repr(m.url)) -#> Url('http://www.example.com/') -print(m.url.scheme) -#> http -print(m.url.host) -#> www.example.com -print(m.url.port) -#> 80 + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return multi_host_url_regex().match(url) -class MyDatabaseModel(BaseModel): - db: PostgresDsn + @classmethod + def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': + return super().validate_parts(parts, validate_port=False) - @field_validator('db') - def check_db_name(cls, v): - assert v.path and len(v.path) > 1, 'database must be provided' - return v + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'MultiHostDsn': + hosts_parts: List['HostParts'] = [] + host_re = host_regex() + for host in m.groupdict()['hosts'].split(','): + d: Parts = host_re.match(host).groupdict() # type: ignore + host, tld, host_type, rebuild = cls.validate_host(d) + port = d.get('port') + cls._validate_port(port) + hosts_parts.append( + { + 'host': host, + 'host_type': host_type, + 'tld': tld, + 'rebuild': rebuild, + 'port': port, + } + ) -m = MyDatabaseModel(db='postgres://user:pass@localhost:5432/foobar') -print(m.db) -#> postgres://user:pass@localhost:5432/foobar + if len(hosts_parts) > 1: + return cls( + None if any([hp['rebuild'] for hp in hosts_parts]) else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + host_type=None, + hosts=hosts_parts, + ) + else: + # backwards compatibility with single host + host_part = hosts_parts[0] + return cls( + None if host_part['rebuild'] else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + host=host_part['host'], + tld=host_part['tld'], + host_type=host_part['host_type'], + port=host_part.get('port'), + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + ) -try: - MyDatabaseModel(db='postgres://user:pass@localhost:5432') -except ValidationError as e: - print(e) - ''' - 1 validation error for MyDatabaseModel - db - Assertion failed, database must be provided - assert (None) - + where None = MultiHostUrl('postgres://user:pass@localhost:5432').path [type=assertion_error, input_value='postgres://user:pass@localhost:5432', input_type=str] - ''' -``` -""" -CockroachDsn = Annotated[ - Url, - UrlConstraints( - host_required=True, - allowed_schemes=[ - 'cockroachdb', - 'cockroachdb+psycopg2', - 'cockroachdb+asyncpg', - ], - ), -] -"""A type that will accept any Cockroach DSN. +class PostgresDsn(MultiHostDsn): + allowed_schemes = { + 'postgres', + 'postgresql', + 'postgresql+asyncpg', + 'postgresql+pg8000', + 'postgresql+psycopg2', + 'postgresql+psycopg2cffi', + 'postgresql+py-postgresql', + 'postgresql+pygresql', + } + user_required = True -* User info required -* TLD not required -* Host required -""" -AmqpDsn = Annotated[Url, UrlConstraints(allowed_schemes=['amqp', 'amqps'])] -"""A type that will accept any AMQP DSN. + __slots__ = () -* User info required -* TLD not required -* Host required -""" -RedisDsn = Annotated[ - Url, - UrlConstraints(allowed_schemes=['redis', 'rediss'], default_host='localhost', default_port=6379, default_path='/0'), -] -"""A type that will accept any Redis DSN. -* User info required -* TLD not required -* Host required (e.g., `rediss://:pass@localhost`) -""" -MongoDsn = Annotated[MultiHostUrl, UrlConstraints(allowed_schemes=['mongodb', 'mongodb+srv'], default_port=27017)] -"""A type that will accept any MongoDB DSN. +class CockroachDsn(AnyUrl): + allowed_schemes = { + 'cockroachdb', + 'cockroachdb+psycopg2', + 'cockroachdb+asyncpg', + } + user_required = True -* User info not required -* Database name not required -* Port not required -* User info may be passed without user part (e.g., `mongodb://mongodb0.example.com:27017`). -""" -KafkaDsn = Annotated[Url, UrlConstraints(allowed_schemes=['kafka'], default_host='localhost', default_port=9092)] -"""A type that will accept any Kafka DSN. -* User info required -* TLD not required -* Host required -""" -NatsDsn = Annotated[ - MultiHostUrl, UrlConstraints(allowed_schemes=['nats', 'tls', 'ws'], default_host='localhost', default_port=4222) -] -"""A type that will accept any NATS DSN. +class AmqpDsn(AnyUrl): + allowed_schemes = {'amqp', 'amqps'} + host_required = False -NATS is a connective technology built for the ever increasingly hyper-connected world. -It is a single technology that enables applications to securely communicate across -any combination of cloud vendors, on-premise, edge, web and mobile, and devices. -More: https://nats.io -""" -MySQLDsn = Annotated[ - Url, - UrlConstraints( - allowed_schemes=[ - 'mysql', - 'mysql+mysqlconnector', - 'mysql+aiomysql', - 'mysql+asyncmy', - 'mysql+mysqldb', - 'mysql+pymysql', - 'mysql+cymysql', - 'mysql+pyodbc', - ], - default_port=3306, - ), -] -"""A type that will accept any MySQL DSN. -* User info required -* TLD not required -* Host required -""" -MariaDBDsn = Annotated[ - Url, - UrlConstraints( - allowed_schemes=['mariadb', 'mariadb+mariadbconnector', 'mariadb+pymysql'], - default_port=3306, - ), -] -"""A type that will accept any MariaDB DSN. +class RedisDsn(AnyUrl): + __slots__ = () + allowed_schemes = {'redis', 'rediss'} + host_required = False -* User info required -* TLD not required -* Host required -""" + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '', + 'port': '6379', + 'path': '/0', + } + + +class MongoDsn(AnyUrl): + allowed_schemes = {'mongodb'} + + # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'port': '27017', + } + + +class KafkaDsn(AnyUrl): + allowed_schemes = {'kafka'} + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'domain': 'localhost', + 'port': '9092', + } + + +def stricturl( + *, + strip_whitespace: bool = True, + min_length: int = 1, + max_length: int = 2**16, + tld_required: bool = True, + host_required: bool = True, + allowed_schemes: Optional[Collection[str]] = None, +) -> Type[AnyUrl]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + min_length=min_length, + max_length=max_length, + tld_required=tld_required, + host_required=host_required, + allowed_schemes=allowed_schemes, + ) + return type('UrlValue', (AnyUrl,), namespace) def import_email_validator() -> None: @@ -364,95 +577,27 @@ def import_email_validator() -> None: import email_validator except ImportError as e: raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e - if not version('email-validator').partition('.')[0] == '2': - raise ImportError('email-validator version >= 2.0 required, run pip install -U email-validator') -if TYPE_CHECKING: - EmailStr = Annotated[str, ...] -else: +class EmailStr(str): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='email') - class EmailStr: - """ - Info: - To use this type, you need to install the optional - [`email-validator`](https://github.com/JoshData/python-email-validator) package: + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + # included here and below so the error happens straight away + import_email_validator() - ```bash - pip install email-validator - ``` + yield str_validator + yield cls.validate - Validate email addresses. - - ```py - from pydantic import BaseModel, EmailStr - - class Model(BaseModel): - email: EmailStr - - print(Model(email='contact@mail.com')) - #> email='contact@mail.com' - ``` - """ # noqa: D212 - - @classmethod - def __get_pydantic_core_schema__( - cls, - _source: type[Any], - _handler: GetCoreSchemaHandler, - ) -> core_schema.CoreSchema: - import_email_validator() - return core_schema.no_info_after_validator_function(cls._validate, core_schema.str_schema()) - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = handler(core_schema) - field_schema.update(type='string', format='email') - return field_schema - - @classmethod - def _validate(cls, __input_value: str) -> str: - return validate_email(__input_value)[1] + @classmethod + def validate(cls, value: Union[str]) -> str: + return validate_email(value)[1] -class NameEmail(_repr.Representation): - """ - Info: - To use this type, you need to install the optional - [`email-validator`](https://github.com/JoshData/python-email-validator) package: - - ```bash - pip install email-validator - ``` - - Validate a name and email address combination, as specified by - [RFC 5322](https://datatracker.ietf.org/doc/html/rfc5322#section-3.4). - - The `NameEmail` has two properties: `name` and `email`. - In case the `name` is not provided, it's inferred from the email address. - - ```py - from pydantic import BaseModel, NameEmail - - class User(BaseModel): - email: NameEmail - - user = User(email='Fred Bloggs ') - print(user.email) - #> Fred Bloggs - print(user.email.name) - #> Fred Bloggs - - user = User(email='fred.bloggs@example.com') - print(user.email) - #> fred.bloggs - print(user.email.name) - #> fred.bloggs - ``` - """ # noqa: D212 - +class NameEmail(Representation): __slots__ = 'name', 'email' def __init__(self, name: str, email: str): @@ -463,76 +608,39 @@ class NameEmail(_repr.Representation): return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email) @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = handler(core_schema) + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='name-email') - return field_schema @classmethod - def __get_pydantic_core_schema__( - cls, - _source: type[Any], - _handler: GetCoreSchemaHandler, - ) -> core_schema.CoreSchema: + def __get_validators__(cls) -> 'CallableGenerator': import_email_validator() - return core_schema.no_info_after_validator_function( - cls._validate, - core_schema.union_schema( - [core_schema.is_instance_schema(cls), core_schema.str_schema()], - custom_error_type='name_email_type', - custom_error_message='Input is not a valid NameEmail', - ), - serialization=core_schema.to_string_ser_schema(), - ) + + yield cls.validate @classmethod - def _validate(cls, __input_value: NameEmail | str) -> NameEmail: - if isinstance(__input_value, cls): - return __input_value - else: - name, email = validate_email(__input_value) # type: ignore[arg-type] - return cls(name, email) + def validate(cls, value: Any) -> 'NameEmail': + if value.__class__ == cls: + return value + value = str_validator(value) + return cls(*validate_email(value)) def __str__(self) -> str: return f'{self.name} <{self.email}>' -class IPvAnyAddress: - """Validate an IPv4 or IPv6 address. - - ```py - from pydantic import BaseModel - from pydantic.networks import IPvAnyAddress - - class IpModel(BaseModel): - ip: IPvAnyAddress - - print(IpModel(ip='127.0.0.1')) - #> ip=IPv4Address('127.0.0.1') - - try: - IpModel(ip='http://www.example.com') - except ValueError as e: - print(e.errors()) - ''' - [ - { - 'type': 'ip_any_address', - 'loc': ('ip',), - 'msg': 'value is not a valid IPv4 or IPv6 address', - 'input': 'http://www.example.com', - } - ] - ''' - ``` - """ - +class IPvAnyAddress(_BaseAddress): __slots__ = () - def __new__(cls, value: Any) -> IPv4Address | IPv6Address: - """Validate an IPv4 or IPv6 address.""" + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanyaddress') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]: try: return IPv4Address(value) except ValueError: @@ -541,38 +649,22 @@ class IPvAnyAddress: try: return IPv6Address(value) except ValueError: - raise PydanticCustomError('ip_any_address', 'value is not a valid IPv4 or IPv6 address') - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = {} - field_schema.update(type='string', format='ipvanyaddress') - return field_schema - - @classmethod - def __get_pydantic_core_schema__( - cls, - _source: type[Any], - _handler: GetCoreSchemaHandler, - ) -> core_schema.CoreSchema: - return core_schema.no_info_plain_validator_function( - cls._validate, serialization=core_schema.to_string_ser_schema() - ) - - @classmethod - def _validate(cls, __input_value: Any) -> IPv4Address | IPv6Address: - return cls(__input_value) # type: ignore[return-value] + raise errors.IPvAnyAddressError() -class IPvAnyInterface: - """Validate an IPv4 or IPv6 interface.""" - +class IPvAnyInterface(_BaseAddress): __slots__ = () - def __new__(cls, value: NetworkType) -> IPv4Interface | IPv6Interface: - """Validate an IPv4 or IPv6 interface.""" + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanyinterface') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]: try: return IPv4Interface(value) except ValueError: @@ -581,39 +673,21 @@ class IPvAnyInterface: try: return IPv6Interface(value) except ValueError: - raise PydanticCustomError('ip_any_interface', 'value is not a valid IPv4 or IPv6 interface') + raise errors.IPvAnyInterfaceError() + + +class IPvAnyNetwork(_BaseNetwork): # type: ignore + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanynetwork') @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = {} - field_schema.update(type='string', format='ipvanyinterface') - return field_schema + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate @classmethod - def __get_pydantic_core_schema__( - cls, - _source: type[Any], - _handler: GetCoreSchemaHandler, - ) -> core_schema.CoreSchema: - return core_schema.no_info_plain_validator_function( - cls._validate, serialization=core_schema.to_string_ser_schema() - ) - - @classmethod - def _validate(cls, __input_value: NetworkType) -> IPv4Interface | IPv6Interface: - return cls(__input_value) # type: ignore[return-value] - - -class IPvAnyNetwork: - """Validate an IPv4 or IPv6 network.""" - - __slots__ = () - - def __new__(cls, value: NetworkType) -> IPv4Network | IPv6Network: - """Validate an IPv4 or IPv6 network.""" - # Assume IP Network is defined with a default value for `strict` argument. + def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]: + # Assume IP Network is defined with a default value for ``strict`` argument. # Define your own class if you want to specify network address check strictness. try: return IPv4Network(value) @@ -623,86 +697,40 @@ class IPvAnyNetwork: try: return IPv6Network(value) except ValueError: - raise PydanticCustomError('ip_any_network', 'value is not a valid IPv4 or IPv6 network') - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = {} - field_schema.update(type='string', format='ipvanynetwork') - return field_schema - - @classmethod - def __get_pydantic_core_schema__( - cls, - _source: type[Any], - _handler: GetCoreSchemaHandler, - ) -> core_schema.CoreSchema: - return core_schema.no_info_plain_validator_function( - cls._validate, serialization=core_schema.to_string_ser_schema() - ) - - @classmethod - def _validate(cls, __input_value: NetworkType) -> IPv4Network | IPv6Network: - return cls(__input_value) # type: ignore[return-value] + raise errors.IPvAnyNetworkError() -def _build_pretty_email_regex() -> re.Pattern[str]: - name_chars = r'[\w!#$%&\'*+\-/=?^_`{|}~]' - unquoted_name_group = rf'((?:{name_chars}+\s+)*{name_chars}+)' - quoted_name_group = r'"((?:[^"]|\")+)"' - email_group = r'<\s*(.+)\s*>' - return re.compile(rf'\s*(?:{unquoted_name_group}|{quoted_name_group})?\s*{email_group}\s*') +pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *') -pretty_email_regex = _build_pretty_email_regex() +def validate_email(value: Union[str]) -> Tuple[str, str]: + """ + Brutally simple email address validation. Note unlike most email address validation + * raw ip address (literal) domain parts are not allowed. + * "John Doe " style "pretty" email addresses are processed + * the local part check is extremely basic. This raises the possibility of unicode spoofing, but no better + solution is really possible. + * spaces are striped from the beginning and end of addresses but no error is raised -MAX_EMAIL_LENGTH = 2048 -"""Maximum length for an email. -A somewhat arbitrary but very generous number compared to what is allowed by most implementations. -""" - - -def validate_email(value: str) -> tuple[str, str]: - """Email address validation using [email-validator](https://pypi.org/project/email-validator/). - - Note: - Note that: - - * Raw IP address (literal) domain parts are not allowed. - * `"John Doe "` style "pretty" email addresses are processed. - * Spaces are striped from the beginning and end of addresses, but no error is raised. + See RFC 5322 but treat it with suspicion, there seems to exist no universally acknowledged test for a valid email! """ if email_validator is None: import_email_validator() - if len(value) > MAX_EMAIL_LENGTH: - raise PydanticCustomError( - 'value_error', - 'value is not a valid email address: {reason}', - {'reason': f'Length must not exceed {MAX_EMAIL_LENGTH} characters'}, - ) - m = pretty_email_regex.fullmatch(value) - name: str | None = None + name: Optional[str] = None if m: - unquoted_name, quoted_name, value = m.groups() - name = unquoted_name or quoted_name + name, value = m.groups() email = value.strip() try: - parts = email_validator.validate_email(email, check_deliverability=False) + email_validator.validate_email(email, check_deliverability=False) except email_validator.EmailNotValidError as e: - raise PydanticCustomError( - 'value_error', 'value is not a valid email address: {reason}', {'reason': str(e.args[0])} - ) from e + raise errors.EmailError() from e - email = parts.normalized - assert email is not None - name = name or parts.local_part - return name, email + at_index = email.index('@') + local_part = email[:at_index] # RFC 5321, local part must be case-sensitive. + global_part = email[at_index:].lower() - -__getattr__ = getattr_migration(__name__) + return name or local_part, local_part + global_part diff --git a/lib/pydantic/parse.py b/lib/pydantic/parse.py index ceee6342..7ac330ca 100644 --- a/lib/pydantic/parse.py +++ b/lib/pydantic/parse.py @@ -1,4 +1,66 @@ -"""The `parse` module is a backport module from V1.""" -from ._migration import getattr_migration +import json +import pickle +from enum import Enum +from pathlib import Path +from typing import Any, Callable, Union -__getattr__ = getattr_migration(__name__) +from .types import StrBytes + + +class Protocol(str, Enum): + json = 'json' + pickle = 'pickle' + + +def load_str_bytes( + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + if proto is None and content_type: + if content_type.endswith(('json', 'javascript')): + pass + elif allow_pickle and content_type.endswith('pickle'): + proto = Protocol.pickle + else: + raise TypeError(f'Unknown content-type: {content_type}') + + proto = proto or Protocol.json + + if proto == Protocol.json: + if isinstance(b, bytes): + b = b.decode(encoding) + return json_loads(b) + elif proto == Protocol.pickle: + if not allow_pickle: + raise RuntimeError('Trying to decode with pickle with allow_pickle=False') + bb = b if isinstance(b, bytes) else b.encode() + return pickle.loads(bb) + else: + raise TypeError(f'Unknown protocol: {proto}') + + +def load_file( + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + path = Path(path) + b = path.read_bytes() + if content_type is None: + if path.suffix in ('.js', '.json'): + proto = Protocol.json + elif path.suffix == '.pkl': + proto = Protocol.pickle + + return load_str_bytes( + b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads + ) diff --git a/lib/pydantic/plugin/__init__.py b/lib/pydantic/plugin/__init__.py deleted file mode 100644 index 84197006..00000000 --- a/lib/pydantic/plugin/__init__.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Usage docs: https://docs.pydantic.dev/2.6/concepts/plugins#build-a-plugin - -Plugin interface for Pydantic plugins, and related types. -""" -from __future__ import annotations - -from typing import Any, Callable, NamedTuple - -from pydantic_core import CoreConfig, CoreSchema, ValidationError -from typing_extensions import Literal, Protocol, TypeAlias - -__all__ = ( - 'PydanticPluginProtocol', - 'BaseValidateHandlerProtocol', - 'ValidatePythonHandlerProtocol', - 'ValidateJsonHandlerProtocol', - 'ValidateStringsHandlerProtocol', - 'NewSchemaReturns', - 'SchemaTypePath', - 'SchemaKind', -) - -NewSchemaReturns: TypeAlias = 'tuple[ValidatePythonHandlerProtocol | None, ValidateJsonHandlerProtocol | None, ValidateStringsHandlerProtocol | None]' - - -class SchemaTypePath(NamedTuple): - """Path defining where `schema_type` was defined, or where `TypeAdapter` was called.""" - - module: str - name: str - - -SchemaKind: TypeAlias = Literal['BaseModel', 'TypeAdapter', 'dataclass', 'create_model', 'validate_call'] - - -class PydanticPluginProtocol(Protocol): - """Protocol defining the interface for Pydantic plugins.""" - - def new_schema_validator( - self, - schema: CoreSchema, - schema_type: Any, - schema_type_path: SchemaTypePath, - schema_kind: SchemaKind, - config: CoreConfig | None, - plugin_settings: dict[str, object], - ) -> tuple[ - ValidatePythonHandlerProtocol | None, ValidateJsonHandlerProtocol | None, ValidateStringsHandlerProtocol | None - ]: - """This method is called for each plugin every time a new [`SchemaValidator`][pydantic_core.SchemaValidator] - is created. - - It should return an event handler for each of the three validation methods, or `None` if the plugin does not - implement that method. - - Args: - schema: The schema to validate against. - schema_type: The original type which the schema was created from, e.g. the model class. - schema_type_path: Path defining where `schema_type` was defined, or where `TypeAdapter` was called. - schema_kind: The kind of schema to validate against. - config: The config to use for validation. - plugin_settings: Any plugin settings. - - Returns: - A tuple of optional event handlers for each of the three validation methods - - `validate_python`, `validate_json`, `validate_strings`. - """ - raise NotImplementedError('Pydantic plugins should implement `new_schema_validator`.') - - -class BaseValidateHandlerProtocol(Protocol): - """Base class for plugin callbacks protocols. - - You shouldn't implement this protocol directly, instead use one of the subclasses with adds the correctly - typed `on_error` method. - """ - - on_enter: Callable[..., None] - """`on_enter` is changed to be more specific on all subclasses""" - - def on_success(self, result: Any) -> None: - """Callback to be notified of successful validation. - - Args: - result: The result of the validation. - """ - return - - def on_error(self, error: ValidationError) -> None: - """Callback to be notified of validation errors. - - Args: - error: The validation error. - """ - return - - def on_exception(self, exception: Exception) -> None: - """Callback to be notified of validation exceptions. - - Args: - exception: The exception raised during validation. - """ - return - - -class ValidatePythonHandlerProtocol(BaseValidateHandlerProtocol, Protocol): - """Event handler for `SchemaValidator.validate_python`.""" - - def on_enter( - self, - input: Any, - *, - strict: bool | None = None, - from_attributes: bool | None = None, - context: dict[str, Any] | None = None, - self_instance: Any | None = None, - ) -> None: - """Callback to be notified of validation start, and create an instance of the event handler. - - Args: - input: The input to be validated. - strict: Whether to validate the object in strict mode. - from_attributes: Whether to validate objects as inputs by extracting attributes. - context: The context to use for validation, this is passed to functional validators. - self_instance: An instance of a model to set attributes on from validation, this is used when running - validation from the `__init__` method of a model. - """ - pass - - -class ValidateJsonHandlerProtocol(BaseValidateHandlerProtocol, Protocol): - """Event handler for `SchemaValidator.validate_json`.""" - - def on_enter( - self, - input: str | bytes | bytearray, - *, - strict: bool | None = None, - context: dict[str, Any] | None = None, - self_instance: Any | None = None, - ) -> None: - """Callback to be notified of validation start, and create an instance of the event handler. - - Args: - input: The JSON data to be validated. - strict: Whether to validate the object in strict mode. - context: The context to use for validation, this is passed to functional validators. - self_instance: An instance of a model to set attributes on from validation, this is used when running - validation from the `__init__` method of a model. - """ - pass - - -StringInput: TypeAlias = 'dict[str, StringInput]' - - -class ValidateStringsHandlerProtocol(BaseValidateHandlerProtocol, Protocol): - """Event handler for `SchemaValidator.validate_strings`.""" - - def on_enter( - self, input: StringInput, *, strict: bool | None = None, context: dict[str, Any] | None = None - ) -> None: - """Callback to be notified of validation start, and create an instance of the event handler. - - Args: - input: The string data to be validated. - strict: Whether to validate the object in strict mode. - context: The context to use for validation, this is passed to functional validators. - """ - pass diff --git a/lib/pydantic/plugin/_loader.py b/lib/pydantic/plugin/_loader.py deleted file mode 100644 index 9e0e33ca..00000000 --- a/lib/pydantic/plugin/_loader.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import annotations - -import importlib.metadata as importlib_metadata -import warnings -from typing import TYPE_CHECKING, Final, Iterable - -if TYPE_CHECKING: - from . import PydanticPluginProtocol - - -PYDANTIC_ENTRY_POINT_GROUP: Final[str] = 'pydantic' - -# cache of plugins -_plugins: dict[str, PydanticPluginProtocol] | None = None -# return no plugins while loading plugins to avoid recursion and errors while import plugins -# this means that if plugins use pydantic -_loading_plugins: bool = False - - -def get_plugins() -> Iterable[PydanticPluginProtocol]: - """Load plugins for Pydantic. - - Inspired by: https://github.com/pytest-dev/pluggy/blob/1.3.0/src/pluggy/_manager.py#L376-L402 - """ - global _plugins, _loading_plugins - if _loading_plugins: - # this happens when plugins themselves use pydantic, we return no plugins - return () - elif _plugins is None: - _plugins = {} - # set _loading_plugins so any plugins that use pydantic don't themselves use plugins - _loading_plugins = True - try: - for dist in importlib_metadata.distributions(): - for entry_point in dist.entry_points: - if entry_point.group != PYDANTIC_ENTRY_POINT_GROUP: - continue - if entry_point.value in _plugins: - continue - try: - _plugins[entry_point.value] = entry_point.load() - except (ImportError, AttributeError) as e: - warnings.warn( - f'{e.__class__.__name__} while loading the `{entry_point.name}` Pydantic plugin, ' - f'this plugin will not be installed.\n\n{e!r}' - ) - finally: - _loading_plugins = False - - return _plugins.values() diff --git a/lib/pydantic/plugin/_schema_validator.py b/lib/pydantic/plugin/_schema_validator.py deleted file mode 100644 index 7186ece6..00000000 --- a/lib/pydantic/plugin/_schema_validator.py +++ /dev/null @@ -1,138 +0,0 @@ -"""Pluggable schema validator for pydantic.""" -from __future__ import annotations - -import functools -from typing import TYPE_CHECKING, Any, Callable, Iterable, TypeVar - -from pydantic_core import CoreConfig, CoreSchema, SchemaValidator, ValidationError -from typing_extensions import Literal, ParamSpec - -if TYPE_CHECKING: - from . import BaseValidateHandlerProtocol, PydanticPluginProtocol, SchemaKind, SchemaTypePath - - -P = ParamSpec('P') -R = TypeVar('R') -Event = Literal['on_validate_python', 'on_validate_json', 'on_validate_strings'] -events: list[Event] = list(Event.__args__) # type: ignore - - -def create_schema_validator( - schema: CoreSchema, - schema_type: Any, - schema_type_module: str, - schema_type_name: str, - schema_kind: SchemaKind, - config: CoreConfig | None = None, - plugin_settings: dict[str, Any] | None = None, -) -> SchemaValidator: - """Create a `SchemaValidator` or `PluggableSchemaValidator` if plugins are installed. - - Returns: - If plugins are installed then return `PluggableSchemaValidator`, otherwise return `SchemaValidator`. - """ - from . import SchemaTypePath - from ._loader import get_plugins - - plugins = get_plugins() - if plugins: - return PluggableSchemaValidator( - schema, - schema_type, - SchemaTypePath(schema_type_module, schema_type_name), - schema_kind, - config, - plugins, - plugin_settings or {}, - ) # type: ignore - else: - return SchemaValidator(schema, config) - - -class PluggableSchemaValidator: - """Pluggable schema validator.""" - - __slots__ = '_schema_validator', 'validate_json', 'validate_python', 'validate_strings' - - def __init__( - self, - schema: CoreSchema, - schema_type: Any, - schema_type_path: SchemaTypePath, - schema_kind: SchemaKind, - config: CoreConfig | None, - plugins: Iterable[PydanticPluginProtocol], - plugin_settings: dict[str, Any], - ) -> None: - self._schema_validator = SchemaValidator(schema, config) - - python_event_handlers: list[BaseValidateHandlerProtocol] = [] - json_event_handlers: list[BaseValidateHandlerProtocol] = [] - strings_event_handlers: list[BaseValidateHandlerProtocol] = [] - for plugin in plugins: - try: - p, j, s = plugin.new_schema_validator( - schema, schema_type, schema_type_path, schema_kind, config, plugin_settings - ) - except TypeError as e: # pragma: no cover - raise TypeError(f'Error using plugin `{plugin.__module__}:{plugin.__class__.__name__}`: {e}') from e - if p is not None: - python_event_handlers.append(p) - if j is not None: - json_event_handlers.append(j) - if s is not None: - strings_event_handlers.append(s) - - self.validate_python = build_wrapper(self._schema_validator.validate_python, python_event_handlers) - self.validate_json = build_wrapper(self._schema_validator.validate_json, json_event_handlers) - self.validate_strings = build_wrapper(self._schema_validator.validate_strings, strings_event_handlers) - - def __getattr__(self, name: str) -> Any: - return getattr(self._schema_validator, name) - - -def build_wrapper(func: Callable[P, R], event_handlers: list[BaseValidateHandlerProtocol]) -> Callable[P, R]: - if not event_handlers: - return func - else: - on_enters = tuple(h.on_enter for h in event_handlers if filter_handlers(h, 'on_enter')) - on_successes = tuple(h.on_success for h in event_handlers if filter_handlers(h, 'on_success')) - on_errors = tuple(h.on_error for h in event_handlers if filter_handlers(h, 'on_error')) - on_exceptions = tuple(h.on_exception for h in event_handlers if filter_handlers(h, 'on_exception')) - - @functools.wraps(func) - def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - for on_enter_handler in on_enters: - on_enter_handler(*args, **kwargs) - - try: - result = func(*args, **kwargs) - except ValidationError as error: - for on_error_handler in on_errors: - on_error_handler(error) - raise - except Exception as exception: - for on_exception_handler in on_exceptions: - on_exception_handler(exception) - raise - else: - for on_success_handler in on_successes: - on_success_handler(result) - return result - - return wrapper - - -def filter_handlers(handler_cls: BaseValidateHandlerProtocol, method_name: str) -> bool: - """Filter out handler methods which are not implemented by the plugin directly - e.g. are missing - or are inherited from the protocol. - """ - handler = getattr(handler_cls, method_name, None) - if handler is None: - return False - elif handler.__module__ == 'pydantic.plugin': - # this is the original handler, from the protocol due to runtime inheritance - # we don't want to call it - return False - else: - return True diff --git a/lib/pydantic/root_model.py b/lib/pydantic/root_model.py deleted file mode 100644 index 42186b9d..00000000 --- a/lib/pydantic/root_model.py +++ /dev/null @@ -1,149 +0,0 @@ -"""RootModel class and type definitions.""" - -from __future__ import annotations as _annotations - -import typing -from copy import copy, deepcopy - -from pydantic_core import PydanticUndefined - -from . import PydanticUserError -from ._internal import _model_construction, _repr -from .main import BaseModel, _object_setattr - -if typing.TYPE_CHECKING: - from typing import Any - - from typing_extensions import Literal, dataclass_transform - - from .fields import Field as PydanticModelField - - # dataclass_transform could be applied to RootModel directly, but `ModelMetaclass`'s dataclass_transform - # takes priority (at least with pyright). We trick type checkers into thinking we apply dataclass_transform - # on a new metaclass. - @dataclass_transform(kw_only_default=False, field_specifiers=(PydanticModelField,)) - class _RootModelMetaclass(_model_construction.ModelMetaclass): - ... - - Model = typing.TypeVar('Model', bound='BaseModel') -else: - _RootModelMetaclass = _model_construction.ModelMetaclass - -__all__ = ('RootModel',) - - -RootModelRootType = typing.TypeVar('RootModelRootType') - - -class RootModel(BaseModel, typing.Generic[RootModelRootType], metaclass=_RootModelMetaclass): - """Usage docs: https://docs.pydantic.dev/2.6/concepts/models/#rootmodel-and-custom-root-types - - A Pydantic `BaseModel` for the root object of the model. - - Attributes: - root: The root object of the model. - __pydantic_root_model__: Whether the model is a RootModel. - __pydantic_private__: Private fields in the model. - __pydantic_extra__: Extra fields in the model. - - """ - - __pydantic_root_model__ = True - __pydantic_private__ = None - __pydantic_extra__ = None - - root: RootModelRootType - - def __init_subclass__(cls, **kwargs): - extra = cls.model_config.get('extra') - if extra is not None: - raise PydanticUserError( - "`RootModel` does not support setting `model_config['extra']`", code='root-model-extra' - ) - super().__init_subclass__(**kwargs) - - def __init__(self, /, root: RootModelRootType = PydanticUndefined, **data) -> None: # type: ignore - __tracebackhide__ = True - if data: - if root is not PydanticUndefined: - raise ValueError( - '"RootModel.__init__" accepts either a single positional argument or arbitrary keyword arguments' - ) - root = data # type: ignore - self.__pydantic_validator__.validate_python(root, self_instance=self) - - __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess] - - @classmethod - def model_construct(cls: type[Model], root: RootModelRootType, _fields_set: set[str] | None = None) -> Model: # type: ignore - """Create a new model using the provided root object and update fields set. - - Args: - root: The root object of the model. - _fields_set: The set of fields to be updated. - - Returns: - The new model. - - Raises: - NotImplemented: If the model is not a subclass of `RootModel`. - """ - return super().model_construct(root=root, _fields_set=_fields_set) - - def __getstate__(self) -> dict[Any, Any]: - return { - '__dict__': self.__dict__, - '__pydantic_fields_set__': self.__pydantic_fields_set__, - } - - def __setstate__(self, state: dict[Any, Any]) -> None: - _object_setattr(self, '__pydantic_fields_set__', state['__pydantic_fields_set__']) - _object_setattr(self, '__dict__', state['__dict__']) - - def __copy__(self: Model) -> Model: - """Returns a shallow copy of the model.""" - cls = type(self) - m = cls.__new__(cls) - _object_setattr(m, '__dict__', copy(self.__dict__)) - _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) - return m - - def __deepcopy__(self: Model, memo: dict[int, Any] | None = None) -> Model: - """Returns a deep copy of the model.""" - cls = type(self) - m = cls.__new__(cls) - _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo)) - # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str], - # and attempting a deepcopy would be marginally slower. - _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) - return m - - if typing.TYPE_CHECKING: - - def model_dump( # type: ignore - self, - *, - mode: Literal['json', 'python'] | str = 'python', - include: Any = None, - exclude: Any = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool = True, - ) -> RootModelRootType: - """This method is included just to get a more accurate return type for type checkers. - It is included in this `if TYPE_CHECKING:` block since no override is actually necessary. - - See the documentation of `BaseModel.model_dump` for more details about the arguments. - """ - ... - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, RootModel): - return NotImplemented - return self.model_fields['root'].annotation == other.model_fields['root'].annotation and super().__eq__(other) - - def __repr_args__(self) -> _repr.ReprArgs: - yield 'root', self.root diff --git a/lib/pydantic/schema.py b/lib/pydantic/schema.py index e290aed9..e7af56f1 100644 --- a/lib/pydantic/schema.py +++ b/lib/pydantic/schema.py @@ -1,4 +1,1153 @@ -"""The `schema` module is a backport module from V1.""" -from ._migration import getattr_migration +import re +import warnings +from collections import defaultdict +from datetime import date, datetime, time, timedelta +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + ForwardRef, + FrozenSet, + Generic, + Iterable, + List, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from uuid import UUID -__getattr__ = getattr_migration(__name__) +from typing_extensions import Annotated, Literal + +from .fields import ( + MAPPING_LIKE_SHAPES, + SHAPE_DEQUE, + SHAPE_FROZENSET, + SHAPE_GENERIC, + SHAPE_ITERABLE, + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_SINGLETON, + SHAPE_TUPLE, + SHAPE_TUPLE_ELLIPSIS, + FieldInfo, + ModelField, +) +from .json import pydantic_encoder +from .networks import AnyUrl, EmailStr +from .types import ( + ConstrainedDecimal, + ConstrainedFloat, + ConstrainedFrozenSet, + ConstrainedInt, + ConstrainedList, + ConstrainedSet, + SecretBytes, + SecretStr, + StrictBytes, + StrictStr, + conbytes, + condecimal, + confloat, + confrozenset, + conint, + conlist, + conset, + constr, +) +from .typing import ( + all_literal_values, + get_args, + get_origin, + get_sub_types, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_union, +) +from .utils import ROOT_KEY, get_model, lenient_issubclass + +if TYPE_CHECKING: + from .dataclasses import Dataclass + from .main import BaseModel + +default_prefix = '#/definitions/' +default_ref_template = '#/definitions/{model}' + +TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]] +TypeModelSet = Set[TypeModelOrEnum] + + +def _apply_modify_schema( + modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any] +) -> None: + from inspect import signature + + sig = signature(modify_schema) + args = set(sig.parameters.keys()) + if 'field' in args or 'kwargs' in args: + modify_schema(field_schema, field=field) + else: + modify_schema(field_schema) + + +def schema( + models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]], + *, + by_alias: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions`` + top-level JSON key, including their sub-models. + + :param models: a list of models to include in the generated JSON Schema + :param by_alias: generate the schemas using the aliases defined, if any + :param title: title for the generated schema that includes the definitions + :param description: description for the generated schema + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful + for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For + a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for + the models and sub-models passed in ``models``. + """ + clean_models = [get_model(model) for model in models] + flat_models = get_flat_models_from_models(clean_models) + model_name_map = get_model_name_map(flat_models) + definitions = {} + output_schema: Dict[str, Any] = {} + if title: + output_schema['title'] = title + if description: + output_schema['description'] = description + for model in clean_models: + m_schema, m_definitions, m_nested_models = model_process_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + ) + definitions.update(m_definitions) + model_name = model_name_map[model] + definitions[model_name] = m_schema + if definitions: + output_schema['definitions'] = definitions + return output_schema + + +def model_schema( + model: Union[Type['BaseModel'], Type['Dataclass']], + by_alias: bool = True, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level + JSON key. + + :param model: a Pydantic model (a class that inherits from BaseModel) + :param by_alias: generate the schemas using the aliases defined, if any + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema for the passed ``model`` + """ + model = get_model(model) + flat_models = get_flat_models_from_model(model) + model_name_map = get_model_name_map(flat_models) + model_name = model_name_map[model] + m_schema, m_definitions, nested_models = model_process_schema( + model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template + ) + if model_name in nested_models: + # model_name is in Nested models, it has circular references + m_definitions[model_name] = m_schema + m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False) + if m_definitions: + m_schema.update({'definitions': m_definitions}) + return m_schema + + +def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]: + + # If no title is explicitly set, we don't set title in the schema for enums. + # The behaviour is the same as `BaseModel` reference, where the default title + # is in the definitions part of the schema. + schema_: Dict[str, Any] = {} + if field.field_info.title or not lenient_issubclass(field.type_, Enum): + schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ') + + if field.field_info.title: + schema_overrides = True + + if field.field_info.description: + schema_['description'] = field.field_info.description + schema_overrides = True + + if not field.required and field.default is not None and not is_callable_type(field.outer_type_): + schema_['default'] = encode_default(field.default) + schema_overrides = True + + return schema_, schema_overrides + + +def field_schema( + field: ModelField, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: TypeModelSet = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Process a Pydantic field and return a tuple with a JSON Schema for it as the first item. + Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field + is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they + will be included in the definitions and referenced in the schema instead of included recursively. + + :param field: a Pydantic ``ModelField`` + :param by_alias: use the defined alias (if any) in the returned schema + :param model_name_map: used to generate the JSON Schema references to other models included in the definitions + :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of + #/definitions/ will be used + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :param known_models: used to solve circular references + :return: tuple of the schema for this field and additional definitions + """ + s, schema_overrides = get_field_info_schema(field) + + validation_schema = get_field_schema_validations(field) + if validation_schema: + s.update(validation_schema) + schema_overrides = True + + f_schema, f_definitions, f_nested_models = field_type_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models or set(), + ) + + # $ref will only be returned when there are no schema_overrides + if '$ref' in f_schema: + return f_schema, f_definitions, f_nested_models + else: + s.update(f_schema) + return s, f_definitions, f_nested_models + + +numeric_types = (int, float, Decimal) +_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ('max_length', numeric_types, 'maxLength'), + ('min_length', numeric_types, 'minLength'), + ('regex', str, 'pattern'), +) + +_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ('gt', numeric_types, 'exclusiveMinimum'), + ('lt', numeric_types, 'exclusiveMaximum'), + ('ge', numeric_types, 'minimum'), + ('le', numeric_types, 'maximum'), + ('multiple_of', numeric_types, 'multipleOf'), +) + + +def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: + """ + Get the JSON Schema validation keywords for a ``field`` with an annotation of + a Pydantic ``FieldInfo`` with validation arguments. + """ + f_schema: Dict[str, Any] = {} + + if lenient_issubclass(field.type_, Enum): + # schema is already updated by `enum_process_schema`; just update with field extra + if field.field_info.extra: + f_schema.update(field.field_info.extra) + return f_schema + + if lenient_issubclass(field.type_, (str, bytes)): + for attr_name, t, keyword in _str_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): + for attr_name, t, keyword in _numeric_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if field.field_info is not None and field.field_info.const: + f_schema['const'] = field.default + if field.field_info.extra: + f_schema.update(field.field_info.extra) + modify_schema = getattr(field.outer_type_, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema + + +def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]: + """ + Process a set of models and generate unique names for them to be used as keys in the JSON Schema + definitions. By default the names are the same as the class name. But if two models in different Python + modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be + based on the Python module path for those conflicting models to prevent name collisions. + + :param unique_models: a Python set of models + :return: dict mapping models to names + """ + name_model_map = {} + conflicting_names: Set[str] = set() + for model in unique_models: + model_name = normalize_name(model.__name__) + if model_name in conflicting_names: + model_name = get_long_model_name(model) + name_model_map[model_name] = model + elif model_name in name_model_map: + conflicting_names.add(model_name) + conflicting_model = name_model_map.pop(model_name) + name_model_map[get_long_model_name(conflicting_model)] = conflicting_model + name_model_map[get_long_model_name(model)] = model + else: + name_model_map[model_name] = model + return {v: k for k, v in name_model_map.items()} + + +def get_flat_models_from_model(model: Type['BaseModel'], known_models: TypeModelSet = None) -> TypeModelSet: + """ + Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass + model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also + subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), + the return value will be ``set([Foo, Bar, Baz])``. + + :param model: a Pydantic ``BaseModel`` subclass + :param known_models: used to solve circular references + :return: a set with the initial model and all its sub-models + """ + known_models = known_models or set() + flat_models: TypeModelSet = set() + flat_models.add(model) + known_models |= flat_models + fields = cast(Sequence[ModelField], model.__fields__.values()) + flat_models |= get_flat_models_from_fields(fields, known_models=known_models) + return flat_models + + +def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet: + """ + Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a sublcass of BaseModel + (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree. + I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that + model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of + type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param field: a Pydantic ``ModelField`` + :param known_models: used to solve circular references + :return: a set with the model used in the declaration for this field, if any, and all its sub-models + """ + from .main import BaseModel + + flat_models: TypeModelSet = set() + + field_type = field.type_ + if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): + field_type = field_type.__pydantic_model__ + + if field.sub_fields and not lenient_issubclass(field_type, BaseModel): + flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) + elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: + flat_models |= get_flat_models_from_model(field_type, known_models=known_models) + elif lenient_issubclass(field_type, Enum): + flat_models.add(field_type) + return flat_models + + +def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet: + """ + Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel`` + (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree. + I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a + field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also + subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param fields: a list of Pydantic ``ModelField``s + :param known_models: used to solve circular references + :return: a set with any model declared in the fields, and all their sub-models + """ + flat_models: TypeModelSet = set() + for field in fields: + flat_models |= get_flat_models_from_field(field, known_models=known_models) + return flat_models + + +def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet: + """ + Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass + a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has + a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + """ + flat_models: TypeModelSet = set() + for model in models: + flat_models |= get_flat_models_from_model(model) + return flat_models + + +def get_long_model_name(model: TypeModelOrEnum) -> str: + return f'{model.__module__}__{model.__qualname__}'.replace('.', '__') + + +def field_type_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``field_schema()``, you probably should be using that function. + + Take a single ``field`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + from .main import BaseModel # noqa: F811 + + definitions = {} + nested_models: Set[str] = set() + f_schema: Dict[str, Any] + if field.shape in { + SHAPE_LIST, + SHAPE_TUPLE_ELLIPSIS, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_FROZENSET, + SHAPE_ITERABLE, + SHAPE_DEQUE, + }: + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + f_schema = {'type': 'array', 'items': items_schema} + if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: + f_schema['uniqueItems'] = True + + elif field.shape in MAPPING_LIKE_SHAPES: + f_schema = {'type': 'object'} + key_field = cast(ModelField, field.key_field) + regex = getattr(key_field.type_, 'regex', None) + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if regex: + # Dict keys have a regex pattern + # items_schema might be a schema or empty dict, add it either way + f_schema['patternProperties'] = {regex.pattern: items_schema} + elif items_schema: + # The dict values are not simply Any, so they need a schema + f_schema['additionalProperties'] = items_schema + elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)): + sub_schema = [] + sub_fields = cast(List[ModelField], field.sub_fields) + for sf in sub_fields: + sf_schema, sf_definitions, sf_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sf_definitions) + nested_models.update(sf_nested_models) + sub_schema.append(sf_schema) + + sub_fields_len = len(sub_fields) + if field.shape == SHAPE_GENERIC: + all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema} + f_schema = {'allOf': [all_of_schemas]} + else: + f_schema = { + 'type': 'array', + 'minItems': sub_fields_len, + 'maxItems': sub_fields_len, + } + if sub_fields_len >= 1: + f_schema['items'] = sub_schema + else: + assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape + f_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + + # check field type to avoid repeated calls to the same __modify_schema__ method + if field.type_ != field.outer_type_: + if field.shape == SHAPE_GENERIC: + field_type = field.type_ + else: + field_type = field.outer_type_ + modify_schema = getattr(field_type, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema, definitions, nested_models + + +def model_process_schema( + model: TypeModelOrEnum, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: TypeModelSet = None, + field: Optional[ModelField] = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``model_schema()``, you probably should be using that function. + + Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The + sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All + the definitions are returned as the second value. + """ + from inspect import getdoc, signature + + known_models = known_models or set() + if lenient_issubclass(model, Enum): + model = cast(Type[Enum], model) + s = enum_process_schema(model, field=field) + return s, {}, set() + model = cast(Type['BaseModel'], model) + s = {'title': model.__config__.title or model.__name__} + doc = getdoc(model) + if doc: + s['description'] = doc + known_models.add(model) + m_schema, m_definitions, nested_models = model_type_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + s.update(m_schema) + schema_extra = model.__config__.schema_extra + if callable(schema_extra): + if len(signature(schema_extra).parameters) == 1: + schema_extra(s) + else: + schema_extra(s, model) + else: + s.update(schema_extra) + return s, m_definitions, nested_models + + +def model_type_schema( + model: Type['BaseModel'], + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + You probably should be using ``model_schema()``, this function is indirectly used by that function. + + Take a single ``model`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + properties = {} + required = [] + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + for k, f in model.__fields__.items(): + try: + f_schema, f_definitions, f_nested_models = field_schema( + f, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + except SkipField as skip: + warnings.warn(skip.message, UserWarning) + continue + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if by_alias: + properties[f.alias] = f_schema + if f.required: + required.append(f.alias) + else: + properties[k] = f_schema + if f.required: + required.append(k) + if ROOT_KEY in properties: + out_schema = properties[ROOT_KEY] + out_schema['title'] = model.__config__.title or model.__name__ + else: + out_schema = {'type': 'object', 'properties': properties} + if required: + out_schema['required'] = required + if model.__config__.extra == 'forbid': + out_schema['additionalProperties'] = False + return out_schema, definitions, nested_models + + +def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]: + """ + Take a single `enum` and generate its schema. + + This is similar to the `model_process_schema` function, but applies to ``Enum`` objects. + """ + schema_: Dict[str, Any] = { + 'title': enum.__name__, + # Python assigns all enums a default docstring value of 'An enumeration', so + # all enums will have a description field even if not explicitly provided. + 'description': enum.__doc__ or 'An enumeration.', + # Add enum values and the enum field type to the schema. + 'enum': [item.value for item in cast(Iterable[Enum], enum)], + } + + add_field_type_to_schema(enum, schema_) + + modify_schema = getattr(enum, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, schema_) + + return schema_ + + +def field_singleton_sub_fields_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you probably should be using that function. + + Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their + schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``. + """ + sub_fields = cast(List[ModelField], field.sub_fields) + definitions = {} + nested_models: Set[str] = set() + if len(sub_fields) == 1: + return field_type_schema( + sub_fields[0], + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + else: + s: Dict[str, Any] = {} + # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object + field_has_discriminator: bool = field.discriminator_key is not None + if field_has_discriminator: + assert field.sub_fields_mapping is not None + + discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {} + + for discriminator_value, sub_field in field.sub_fields_mapping.items(): + # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many + if is_union(get_origin(sub_field.type_)): + sub_models = get_sub_types(sub_field.type_) + discriminator_models_refs[discriminator_value] = { + model_name_map[sub_model]: get_schema_ref( + model_name_map[sub_model], ref_prefix, ref_template, False + ) + for sub_model in sub_models + } + else: + sub_field_type = sub_field.type_ + if hasattr(sub_field_type, '__pydantic_model__'): + sub_field_type = sub_field_type.__pydantic_model__ + + discriminator_model_name = model_name_map[sub_field_type] + discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False) + discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref'] + + s['discriminator'] = { + 'propertyName': field.discriminator_alias, + 'mapping': discriminator_models_refs, + } + + sub_field_schemas = [] + for sf in sub_fields: + sub_schema, sub_definitions, sub_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sub_definitions) + if schema_overrides and 'allOf' in sub_schema: + # if the sub_field is a referenced schema we only need the referenced + # object. Otherwise we will end up with several allOf inside anyOf/oneOf. + # See https://github.com/pydantic/pydantic/issues/1209 + sub_schema = sub_schema['allOf'][0] + + if sub_schema.keys() == {'discriminator', 'oneOf'}: + # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere + sub_schema.pop('discriminator') + sub_field_schemas.append(sub_schema) + nested_models.update(sub_nested_models) + s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas + return s, definitions, nested_models + + +# Order is important, e.g. subclasses of str must go before str +# this is used only for standard library types, custom types should use __modify_schema__ instead +field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( + (Path, {'type': 'string', 'format': 'path'}), + (datetime, {'type': 'string', 'format': 'date-time'}), + (date, {'type': 'string', 'format': 'date'}), + (time, {'type': 'string', 'format': 'time'}), + (timedelta, {'type': 'number', 'format': 'time-delta'}), + (IPv4Network, {'type': 'string', 'format': 'ipv4network'}), + (IPv6Network, {'type': 'string', 'format': 'ipv6network'}), + (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}), + (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}), + (IPv4Address, {'type': 'string', 'format': 'ipv4'}), + (IPv6Address, {'type': 'string', 'format': 'ipv6'}), + (Pattern, {'type': 'string', 'format': 'regex'}), + (str, {'type': 'string'}), + (bytes, {'type': 'string', 'format': 'binary'}), + (bool, {'type': 'boolean'}), + (int, {'type': 'integer'}), + (float, {'type': 'number'}), + (Decimal, {'type': 'number'}), + (UUID, {'type': 'string', 'format': 'uuid'}), + (dict, {'type': 'object'}), + (list, {'type': 'array', 'items': {}}), + (tuple, {'type': 'array', 'items': {}}), + (set, {'type': 'array', 'items': {}, 'uniqueItems': True}), + (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}), +) + +json_scheme = {'type': 'string', 'format': 'json-string'} + + +def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None: + """ + Update the given `schema` with the type-specific metadata for the given `field_type`. + + This function looks through `field_class_to_schema` for a class that matches the given `field_type`, + and then modifies the given `schema` with the information from that type. + """ + for type_, t_schema in field_class_to_schema: + # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class + if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: + schema_.update(t_schema) + break + + +def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]: + if ref_prefix: + schema_ref = {'$ref': ref_prefix + name} + else: + schema_ref = {'$ref': ref_template.format(model=name)} + return {'allOf': [schema_ref]} if schema_overrides else schema_ref + + +def field_singleton_schema( # noqa: C901 (ignore complexity) + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you should probably be using that function. + + Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models. + """ + from .main import BaseModel + + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + field_type = field.type_ + + # Recurse into this field if it contains sub_fields and is NOT a + # BaseModel OR that BaseModel is a const + if field.sub_fields and ( + (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel) + ): + return field_singleton_sub_fields_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type: + return {}, definitions, nested_models # no restrictions + if is_none_type(field_type): + return {'type': 'null'}, definitions, nested_models + if is_callable_type(field_type): + raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.') + f_schema: Dict[str, Any] = {} + if field.field_info is not None and field.field_info.const: + f_schema['const'] = field.default + + if is_literal_type(field_type): + values = all_literal_values(field_type) + + if len({v.__class__ for v in values}) > 1: + return field_schema( + multitypes_literal_field_for_schema(values, field), + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + + # All values have the same type + field_type = values[0].__class__ + f_schema['enum'] = list(values) + add_field_type_to_schema(field_type, f_schema) + elif lenient_issubclass(field_type, Enum): + enum_name = model_name_map[field_type] + f_schema, schema_overrides = get_field_info_schema(field, schema_overrides) + f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides)) + definitions[enum_name] = enum_process_schema(field_type, field=field) + elif is_namedtuple(field_type): + sub_schema, *_ = model_process_schema( + field_type.__pydantic_model__, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + items_schemas = list(sub_schema['properties'].values()) + f_schema.update( + { + 'type': 'array', + 'items': items_schemas, + 'minItems': len(items_schemas), + 'maxItems': len(items_schemas), + } + ) + elif not hasattr(field_type, '__pydantic_model__'): + add_field_type_to_schema(field_type, f_schema) + + modify_schema = getattr(field_type, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + + if f_schema: + return f_schema, definitions, nested_models + + # Handle dataclass-based models + if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): + field_type = field_type.__pydantic_model__ + + if issubclass(field_type, BaseModel): + model_name = model_name_map[field_type] + if field_type not in known_models: + sub_schema, sub_definitions, sub_nested_models = model_process_schema( + field_type, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + definitions.update(sub_definitions) + definitions[model_name] = sub_schema + nested_models.update(sub_nested_models) + else: + nested_models.add(model_name) + schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides) + return schema_ref, definitions, nested_models + + # For generics with no args + args = get_args(field_type) + if args is not None and not args and Generic in field_type.__bases__: + return f_schema, definitions, nested_models + + raise ValueError(f'Value not declarable with JSON Schema, field: {field}') + + +def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: + """ + To support `Literal` with values of different types, we split it into multiple `Literal` with same type + e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]` + """ + literal_distinct_types = defaultdict(list) + for v in values: + literal_distinct_types[v.__class__].append(v) + distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values()) + + return ModelField( + name=field.name, + type_=Union[tuple(distinct_literals)], # type: ignore + class_validators=field.class_validators, + model_config=field.model_config, + default=field.default, + required=field.required, + alias=field.alias, + field_info=field.field_info, + ) + + +def encode_default(dft: Any) -> Any: + if isinstance(dft, Enum): + return dft.value + elif isinstance(dft, (int, float, str)): + return dft + elif isinstance(dft, (list, tuple)): + t = dft.__class__ + seq_args = (encode_default(v) for v in dft) + return t(*seq_args) if is_namedtuple(t) else t(seq_args) + elif isinstance(dft, dict): + return {encode_default(k): encode_default(v) for k, v in dft.items()} + elif dft is None: + return None + else: + return pydantic_encoder(dft) + + +_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} + + +def get_annotation_from_field_info( + annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False +) -> Type[Any]: + """ + Get an annotation with validation implemented for numbers and strings based on the field_info. + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :param field_name: name of the field for use in error messages + :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment + :return: the same ``annotation`` if unmodified or a new annotation with validation in place + """ + constraints = field_info.get_constraints() + used_constraints: Set[str] = set() + if constraints: + annotation, used_constraints = get_annotation_with_constraints(annotation, field_info) + if validate_assignment: + used_constraints.add('allow_mutation') + + unused_constraints = constraints - used_constraints + if unused_constraints: + raise ValueError( + f'On field "{field_name}" the following field constraints are set but not enforced: ' + f'{", ".join(unused_constraints)}. ' + f'\nFor more details see https://pydantic-docs.helpmanual.io/usage/schema/#unenforced-field-constraints' + ) + + return annotation + + +def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901 + """ + Get an annotation with used constraints implemented for numbers and strings based on the field_info. + + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints. + """ + used_constraints: Set[str] = set() + + def go(type_: Any) -> Type[Any]: + if ( + is_literal_type(type_) + or isinstance(type_, ForwardRef) + or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet)) + ): + return type_ + origin = get_origin(type_) + if origin is not None: + args: Tuple[Any, ...] = get_args(type_) + if any(isinstance(a, ForwardRef) for a in args): + # forward refs cause infinite recursion below + return type_ + + if origin is Annotated: + return go(args[0]) + if is_union(origin): + return Union[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, List) and ( + field_info.min_items is not None + or field_info.max_items is not None + or field_info.unique_items is not None + ): + used_constraints.update({'min_items', 'max_items', 'unique_items'}) + return conlist( + go(args[0]), + min_items=field_info.min_items, + max_items=field_info.max_items, + unique_items=field_info.unique_items, + ) + + if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({'min_items', 'max_items'}) + return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({'min_items', 'max_items'}) + return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + for t in (Tuple, List, Set, FrozenSet, Sequence): + if issubclass(origin, t): # type: ignore + return t[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, Dict): + return Dict[args[0], go(args[1])] # type: ignore + + attrs: Optional[Tuple[str, ...]] = None + constraint_func: Optional[Callable[..., type]] = None + if isinstance(type_, type): + if issubclass(type_, (SecretStr, SecretBytes)): + attrs = ('max_length', 'min_length') + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)): + attrs = ('max_length', 'min_length', 'regex') + if issubclass(type_, StrictStr): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = constr + elif issubclass(type_, bytes): + attrs = ('max_length', 'min_length', 'regex') + if issubclass(type_, StrictBytes): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = conbytes + elif issubclass(type_, numeric_types) and not issubclass( + type_, + ( + ConstrainedInt, + ConstrainedFloat, + ConstrainedDecimal, + ConstrainedList, + ConstrainedSet, + ConstrainedFrozenSet, + bool, + ), + ): + # Is numeric type + attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of') + if issubclass(type_, float): + attrs += ('allow_inf_nan',) + if issubclass(type_, Decimal): + attrs += ('max_digits', 'decimal_places') + numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch + constraint_func = _map_types_constraint[numeric_type] + + if attrs: + used_constraints.update(set(attrs)) + kwargs = { + attr_name: attr + for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs) + if attr is not None + } + if kwargs: + constraint_func = cast(Callable[..., type], constraint_func) + return constraint_func(**kwargs) + return type_ + + return go(annotation), used_constraints + + +def normalize_name(name: str) -> str: + """ + Normalizes the given name. This can be applied to either a model *or* enum. + """ + return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name) + + +class SkipField(Exception): + """ + Utility exception used to exclude fields from schema. + """ + + def __init__(self, message: str) -> None: + self.message = message diff --git a/lib/pydantic/tools.py b/lib/pydantic/tools.py index 8e317c92..9cdb4538 100644 --- a/lib/pydantic/tools.py +++ b/lib/pydantic/tools.py @@ -1,4 +1,92 @@ -"""The `tools` module is a backport module from V1.""" -from ._migration import getattr_migration +import json +from functools import lru_cache +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union -__getattr__ = getattr_migration(__name__) +from .parse import Protocol, load_file, load_str_bytes +from .types import StrBytes +from .typing import display_as_type + +__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of') + +NameFactory = Union[str, Callable[[Type[Any]], str]] + +if TYPE_CHECKING: + from .typing import DictStrAny + + +def _generate_parsing_type_name(type_: Any) -> str: + return f'ParsingModel[{display_as_type(type_)}]' + + +@lru_cache(maxsize=2048) +def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any: + from pydantic.main import create_model + + if type_name is None: + type_name = _generate_parsing_type_name + if not isinstance(type_name, str): + type_name = type_name(type_) + return create_model(type_name, __root__=(type_, ...)) + + +T = TypeVar('T') + + +def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T: + model_type = _get_parsing_type(type_, type_name=type_name) # type: ignore[arg-type] + return model_type(__root__=obj).__root__ + + +def parse_file_as( + type_: Type[T], + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, + type_name: Optional[NameFactory] = None, +) -> T: + obj = load_file( + path, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=json_loads, + ) + return parse_obj_as(type_, obj, type_name=type_name) + + +def parse_raw_as( + type_: Type[T], + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, + type_name: Optional[NameFactory] = None, +) -> T: + obj = load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=json_loads, + ) + return parse_obj_as(type_, obj, type_name=type_name) + + +def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny': + """Generate a JSON schema (as dict) for the passed model or dynamically generated one""" + return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs) + + +def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str: + """Generate a JSON schema (as JSON) for the passed model or dynamically generated one""" + return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs) diff --git a/lib/pydantic/type_adapter.py b/lib/pydantic/type_adapter.py deleted file mode 100644 index 366262fe..00000000 --- a/lib/pydantic/type_adapter.py +++ /dev/null @@ -1,460 +0,0 @@ -"""Type adapter specification.""" -from __future__ import annotations as _annotations - -import sys -from dataclasses import is_dataclass -from typing import TYPE_CHECKING, Any, Dict, Generic, Iterable, Set, TypeVar, Union, cast, final, overload - -from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator, Some -from typing_extensions import Literal, get_args, is_typeddict - -from pydantic.errors import PydanticUserError -from pydantic.main import BaseModel - -from ._internal import _config, _generate_schema, _typing_extra -from .config import ConfigDict -from .json_schema import ( - DEFAULT_REF_TEMPLATE, - GenerateJsonSchema, - JsonSchemaKeyT, - JsonSchemaMode, - JsonSchemaValue, -) -from .plugin._schema_validator import create_schema_validator - -T = TypeVar('T') - - -if TYPE_CHECKING: - # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope - IncEx = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any]] - - -def _get_schema(type_: Any, config_wrapper: _config.ConfigWrapper, parent_depth: int) -> CoreSchema: - """`BaseModel` uses its own `__module__` to find out where it was defined - and then looks for symbols to resolve forward references in those globals. - On the other hand this function can be called with arbitrary objects, - including type aliases, where `__module__` (always `typing.py`) is not useful. - So instead we look at the globals in our parent stack frame. - - This works for the case where this function is called in a module that - has the target of forward references in its scope, but - does not always work for more complex cases. - - For example, take the following: - - a.py - ```python - from typing import Dict, List - - IntList = List[int] - OuterDict = Dict[str, 'IntList'] - ``` - - b.py - ```python test="skip" - from a import OuterDict - - from pydantic import TypeAdapter - - IntList = int # replaces the symbol the forward reference is looking for - v = TypeAdapter(OuterDict) - v({'x': 1}) # should fail but doesn't - ``` - - If `OuterDict` were a `BaseModel`, this would work because it would resolve - the forward reference within the `a.py` namespace. - But `TypeAdapter(OuterDict)` can't determine what module `OuterDict` came from. - - In other words, the assumption that _all_ forward references exist in the - module we are being called from is not technically always true. - Although most of the time it is and it works fine for recursive models and such, - `BaseModel`'s behavior isn't perfect either and _can_ break in similar ways, - so there is no right or wrong between the two. - - But at the very least this behavior is _subtly_ different from `BaseModel`'s. - """ - local_ns = _typing_extra.parent_frame_namespace(parent_depth=parent_depth) - global_ns = sys._getframe(max(parent_depth - 1, 1)).f_globals.copy() - global_ns.update(local_ns or {}) - gen = _generate_schema.GenerateSchema(config_wrapper, types_namespace=global_ns, typevars_map={}) - schema = gen.generate_schema(type_) - schema = gen.clean_schema(schema) - return schema - - -def _getattr_no_parents(obj: Any, attribute: str) -> Any: - """Returns the attribute value without attempting to look up attributes from parent types.""" - if hasattr(obj, '__dict__'): - try: - return obj.__dict__[attribute] - except KeyError: - pass - - slots = getattr(obj, '__slots__', None) - if slots is not None and attribute in slots: - return getattr(obj, attribute) - else: - raise AttributeError(attribute) - - -def _type_has_config(type_: Any) -> bool: - """Returns whether the type has config.""" - try: - return issubclass(type_, BaseModel) or is_dataclass(type_) or is_typeddict(type_) - except TypeError: - # type is not a class - return False - - -@final -class TypeAdapter(Generic[T]): - """Usage docs: https://docs.pydantic.dev/2.6/concepts/type_adapter/ - - Type adapters provide a flexible way to perform validation and serialization based on a Python type. - - A `TypeAdapter` instance exposes some of the functionality from `BaseModel` instance methods - for types that do not have such methods (such as dataclasses, primitive types, and more). - - **Note:** `TypeAdapter` instances are not types, and cannot be used as type annotations for fields. - - Attributes: - core_schema: The core schema for the type. - validator (SchemaValidator): The schema validator for the type. - serializer: The schema serializer for the type. - """ - - @overload - def __init__( - self, - type: type[T], - *, - config: ConfigDict | None = ..., - _parent_depth: int = ..., - module: str | None = ..., - ) -> None: - ... - - # This second overload is for unsupported special forms (such as Union). `pyright` handles them fine, but `mypy` does not match - # them against `type: type[T]`, so an explicit overload with `type: T` is needed. - @overload - def __init__( # pyright: ignore[reportOverlappingOverload] - self, - type: T, - *, - config: ConfigDict | None = ..., - _parent_depth: int = ..., - module: str | None = ..., - ) -> None: - ... - - def __init__( - self, - type: type[T] | T, - *, - config: ConfigDict | None = None, - _parent_depth: int = 2, - module: str | None = None, - ) -> None: - """Initializes the TypeAdapter object. - - Args: - type: The type associated with the `TypeAdapter`. - config: Configuration for the `TypeAdapter`, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict]. - _parent_depth: depth at which to search the parent namespace to construct the local namespace. - module: The module that passes to plugin if provided. - - !!! note - You cannot use the `config` argument when instantiating a `TypeAdapter` if the type you're using has its own - config that cannot be overridden (ex: `BaseModel`, `TypedDict`, and `dataclass`). A - [`type-adapter-config-unused`](../errors/usage_errors.md#type-adapter-config-unused) error will be raised in this case. - - !!! note - The `_parent_depth` argument is named with an underscore to suggest its private nature and discourage use. - It may be deprecated in a minor version, so we only recommend using it if you're - comfortable with potential change in behavior / support. - - ??? tip "Compatibility with `mypy`" - Depending on the type used, `mypy` might raise an error when instantiating a `TypeAdapter`. As a workaround, you can explicitly - annotate your variable: - - ```py - from typing import Union - - from pydantic import TypeAdapter - - ta: TypeAdapter[Union[str, int]] = TypeAdapter(Union[str, int]) # type: ignore[arg-type] - ``` - - Returns: - A type adapter configured for the specified `type`. - """ - type_is_annotated: bool = _typing_extra.is_annotated(type) - annotated_type: Any = get_args(type)[0] if type_is_annotated else None - type_has_config: bool = _type_has_config(annotated_type if type_is_annotated else type) - - if type_has_config and config is not None: - raise PydanticUserError( - 'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict.' - ' These types can have their own config and setting the config via the `config`' - ' parameter to TypeAdapter will not override it, thus the `config` you passed to' - ' TypeAdapter becomes meaningless, which is probably not what you want.', - code='type-adapter-config-unused', - ) - - config_wrapper = _config.ConfigWrapper(config) - - core_schema: CoreSchema - try: - core_schema = _getattr_no_parents(type, '__pydantic_core_schema__') - except AttributeError: - core_schema = _get_schema(type, config_wrapper, parent_depth=_parent_depth + 1) - - core_config = config_wrapper.core_config(None) - validator: SchemaValidator - try: - validator = _getattr_no_parents(type, '__pydantic_validator__') - except AttributeError: - if module is None: - f = sys._getframe(1) - module = cast(str, f.f_globals.get('__name__', '')) - validator = create_schema_validator( - core_schema, type, module, str(type), 'TypeAdapter', core_config, config_wrapper.plugin_settings - ) # type: ignore - - serializer: SchemaSerializer - try: - serializer = _getattr_no_parents(type, '__pydantic_serializer__') - except AttributeError: - serializer = SchemaSerializer(core_schema, core_config) - - self.core_schema = core_schema - self.validator = validator - self.serializer = serializer - - def validate_python( - self, - __object: Any, - *, - strict: bool | None = None, - from_attributes: bool | None = None, - context: dict[str, Any] | None = None, - ) -> T: - """Validate a Python object against the model. - - Args: - __object: The Python object to validate against the model. - strict: Whether to strictly check types. - from_attributes: Whether to extract data from object attributes. - context: Additional context to pass to the validator. - - !!! note - When using `TypeAdapter` with a Pydantic `dataclass`, the use of the `from_attributes` - argument is not supported. - - Returns: - The validated object. - """ - return self.validator.validate_python(__object, strict=strict, from_attributes=from_attributes, context=context) - - def validate_json( - self, __data: str | bytes, *, strict: bool | None = None, context: dict[str, Any] | None = None - ) -> T: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/json/#json-parsing - - Validate a JSON string or bytes against the model. - - Args: - __data: The JSON data to validate against the model. - strict: Whether to strictly check types. - context: Additional context to use during validation. - - Returns: - The validated object. - """ - return self.validator.validate_json(__data, strict=strict, context=context) - - def validate_strings(self, __obj: Any, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> T: - """Validate object contains string data against the model. - - Args: - __obj: The object contains string data to validate. - strict: Whether to strictly check types. - context: Additional context to use during validation. - - Returns: - The validated object. - """ - return self.validator.validate_strings(__obj, strict=strict, context=context) - - def get_default_value(self, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> Some[T] | None: - """Get the default value for the wrapped type. - - Args: - strict: Whether to strictly check types. - context: Additional context to pass to the validator. - - Returns: - The default value wrapped in a `Some` if there is one or None if not. - """ - return self.validator.get_default_value(strict=strict, context=context) - - def dump_python( - self, - __instance: T, - *, - mode: Literal['json', 'python'] = 'python', - include: IncEx | None = None, - exclude: IncEx | None = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool = True, - ) -> Any: - """Dump an instance of the adapted type to a Python object. - - Args: - __instance: The Python object to serialize. - mode: The output format. - include: Fields to include in the output. - exclude: Fields to exclude from the output. - by_alias: Whether to use alias names for field names. - exclude_unset: Whether to exclude unset fields. - exclude_defaults: Whether to exclude fields with default values. - exclude_none: Whether to exclude fields with None values. - round_trip: Whether to output the serialized data in a way that is compatible with deserialization. - warnings: Whether to display serialization warnings. - - Returns: - The serialized object. - """ - return self.serializer.to_python( - __instance, - mode=mode, - by_alias=by_alias, - include=include, - exclude=exclude, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - round_trip=round_trip, - warnings=warnings, - ) - - def dump_json( - self, - __instance: T, - *, - indent: int | None = None, - include: IncEx | None = None, - exclude: IncEx | None = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool = True, - ) -> bytes: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/json/#json-serialization - - Serialize an instance of the adapted type to JSON. - - Args: - __instance: The instance to be serialized. - indent: Number of spaces for JSON indentation. - include: Fields to include. - exclude: Fields to exclude. - by_alias: Whether to use alias names for field names. - exclude_unset: Whether to exclude unset fields. - exclude_defaults: Whether to exclude fields with default values. - exclude_none: Whether to exclude fields with a value of `None`. - round_trip: Whether to serialize and deserialize the instance to ensure round-tripping. - warnings: Whether to emit serialization warnings. - - Returns: - The JSON representation of the given instance as bytes. - """ - return self.serializer.to_json( - __instance, - indent=indent, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - round_trip=round_trip, - warnings=warnings, - ) - - def json_schema( - self, - *, - by_alias: bool = True, - ref_template: str = DEFAULT_REF_TEMPLATE, - schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, - mode: JsonSchemaMode = 'validation', - ) -> dict[str, Any]: - """Generate a JSON schema for the adapted type. - - Args: - by_alias: Whether to use alias names for field names. - ref_template: The format string used for generating $ref strings. - schema_generator: The generator class used for creating the schema. - mode: The mode to use for schema generation. - - Returns: - The JSON schema for the model as a dictionary. - """ - schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) - return schema_generator_instance.generate(self.core_schema, mode=mode) - - @staticmethod - def json_schemas( - __inputs: Iterable[tuple[JsonSchemaKeyT, JsonSchemaMode, TypeAdapter[Any]]], - *, - by_alias: bool = True, - title: str | None = None, - description: str | None = None, - ref_template: str = DEFAULT_REF_TEMPLATE, - schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, - ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]: - """Generate a JSON schema including definitions from multiple type adapters. - - Args: - __inputs: Inputs to schema generation. The first two items will form the keys of the (first) - output mapping; the type adapters will provide the core schemas that get converted into - definitions in the output JSON schema. - by_alias: Whether to use alias names. - title: The title for the schema. - description: The description for the schema. - ref_template: The format string used for generating $ref strings. - schema_generator: The generator class used for creating the schema. - - Returns: - A tuple where: - - - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and - whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have - JsonRef references to definitions that are defined in the second returned element.) - - The second element is a JSON schema containing all definitions referenced in the first returned - element, along with the optional title and description keys. - - """ - schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) - - inputs = [(key, mode, adapter.core_schema) for key, mode, adapter in __inputs] - - json_schemas_map, definitions = schema_generator_instance.generate_definitions(inputs) - - json_schema: dict[str, Any] = {} - if definitions: - json_schema['$defs'] = definitions - if title: - json_schema['title'] = title - if description: - json_schema['description'] = description - - return json_schemas_map, json_schema diff --git a/lib/pydantic/types.py b/lib/pydantic/types.py index c2534c88..f98dba3d 100644 --- a/lib/pydantic/types.py +++ b/lib/pydantic/types.py @@ -1,14 +1,12 @@ -"""The types module contains custom types used by pydantic.""" -from __future__ import annotations as _annotations - -import base64 -import dataclasses as _dataclasses +import abc +import math import re -from datetime import date, datetime +import warnings +from datetime import date from decimal import Decimal from enum import Enum from pathlib import Path -from types import ModuleType +from types import new_class from typing import ( TYPE_CHECKING, Any, @@ -16,56 +14,78 @@ from typing import ( ClassVar, Dict, FrozenSet, - Generic, - Hashable, - Iterator, List, + Optional, + Pattern, Set, + Tuple, + Type, TypeVar, Union, cast, + overload, ) from uuid import UUID +from weakref import WeakSet -import annotated_types -from annotated_types import BaseMetadata, MaxLen, MinLen -from pydantic_core import CoreSchema, PydanticCustomError, core_schema -from typing_extensions import Annotated, Literal, Protocol, TypeAlias, TypeAliasType, deprecated - -from ._internal import ( - _core_utils, - _fields, - _internal_dataclass, - _typing_extra, - _utils, - _validators, +from . import errors +from .datetime_parse import parse_date +from .utils import import_string, update_not_none +from .validators import ( + bytes_validator, + constr_length_validator, + constr_lower, + constr_strip_whitespace, + constr_upper, + decimal_validator, + float_finite_validator, + float_validator, + frozenset_validator, + int_validator, + list_validator, + number_multiple_validator, + number_size_validator, + path_exists_validator, + path_validator, + set_validator, + str_validator, + strict_bytes_validator, + strict_float_validator, + strict_int_validator, + strict_str_validator, ) -from ._migration import getattr_migration -from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler -from .errors import PydanticUserError -from .json_schema import JsonSchemaValue -from .warnings import PydanticDeprecatedSince20 -__all__ = ( - 'Strict', +__all__ = [ + 'NoneStr', + 'NoneBytes', + 'StrBytes', + 'NoneStrBytes', 'StrictStr', + 'ConstrainedBytes', 'conbytes', + 'ConstrainedList', 'conlist', + 'ConstrainedSet', 'conset', + 'ConstrainedFrozenSet', 'confrozenset', + 'ConstrainedStr', 'constr', - 'ImportString', + 'PyObject', + 'ConstrainedInt', 'conint', 'PositiveInt', 'NegativeInt', 'NonNegativeInt', 'NonPositiveInt', + 'ConstrainedFloat', 'confloat', 'PositiveFloat', 'NegativeFloat', 'NonNegativeFloat', 'NonPositiveFloat', 'FiniteFloat', + 'ConstrainedDecimal', 'condecimal', 'UUID1', 'UUID3', @@ -73,8 +93,9 @@ __all__ = ( 'UUID5', 'FilePath', 'DirectoryPath', - 'NewPath', 'Json', + 'JsonWrapper', + 'SecretField', 'SecretStr', 'SecretBytes', 'StrictBool', @@ -85,1512 +106,845 @@ __all__ = ( 'ByteSize', 'PastDate', 'FutureDate', - 'PastDatetime', - 'FutureDatetime', + 'ConstrainedDate', 'condate', - 'AwareDatetime', - 'NaiveDatetime', - 'AllowInfNan', - 'EncoderProtocol', - 'EncodedBytes', - 'EncodedStr', - 'Base64Encoder', - 'Base64Bytes', - 'Base64Str', - 'Base64UrlBytes', - 'Base64UrlStr', - 'GetPydanticSchema', - 'StringConstraints', - 'Tag', - 'Discriminator', - 'JsonValue', - 'OnErrorOmit', -) +] +NoneStr = Optional[str] +NoneBytes = Optional[bytes] +StrBytes = Union[str, bytes] +NoneStrBytes = Optional[StrBytes] +OptionalInt = Optional[int] +OptionalIntFloat = Union[OptionalInt, float] +OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal] +OptionalDate = Optional[date] +StrIntFloat = Union[str, int, float] + +if TYPE_CHECKING: + from typing_extensions import Annotated + + from .dataclasses import Dataclass + from .main import BaseModel + from .typing import CallableGenerator + + ModelOrDc = Type[Union[BaseModel, Dataclass]] T = TypeVar('T') +_DEFINED_TYPES: 'WeakSet[type]' = WeakSet() -@_dataclasses.dataclass -class Strict(_fields.PydanticMetadata, BaseMetadata): - """Usage docs: https://docs.pydantic.dev/2.6/concepts/strict_mode/#strict-mode-with-annotated-strict +@overload +def _registered(typ: Type[T]) -> Type[T]: + pass - A field metadata class to indicate that a field should be validated in strict mode. - Attributes: - strict: Whether to validate the field in strict mode. +@overload +def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta': + pass - Example: - ```python - from typing_extensions import Annotated - from pydantic.types import Strict +def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']: + # In order to generate valid examples of constrained types, Hypothesis needs + # to inspect the type object - so we keep a weakref to each contype object + # until it can be registered. When (or if) our Hypothesis plugin is loaded, + # it monkeypatches this function. + # If Hypothesis is never used, the total effect is to keep a weak reference + # which has minimal memory usage and doesn't even affect garbage collection. + _DEFINED_TYPES.add(typ) + return typ - StrictBool = Annotated[bool, Strict()] - ``` - """ - strict: bool = True +class ConstrainedNumberMeta(type): + def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore + new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) - def __hash__(self) -> int: - return hash(self.strict) + if new_cls.gt is not None and new_cls.ge is not None: + raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') + if new_cls.lt is not None and new_cls.le is not None: + raise errors.ConfigError('bounds lt and le cannot be specified at the same time') + + return _registered(new_cls) # type: ignore # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -StrictBool = Annotated[bool, Strict()] -"""A boolean that must be either ``True`` or ``False``.""" +if TYPE_CHECKING: + StrictBool = bool +else: + + class StrictBool(int): + """ + StrictBool to allow for bools which are not type-coerced. + """ + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='boolean') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> bool: + """ + Ensure that we only allow bools. + """ + if isinstance(value, bool): + return value + + raise errors.StrictBoolError() + # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +class ConstrainedInt(int, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalInt = None + ge: OptionalInt = None + lt: OptionalInt = None + le: OptionalInt = None + multiple_of: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_int_validator if cls.strict else int_validator + yield number_size_validator + yield number_multiple_validator + + def conint( - *, - strict: bool | None = None, - gt: int | None = None, - ge: int | None = None, - lt: int | None = None, - le: int | None = None, - multiple_of: int | None = None, -) -> type[int]: - """ - !!! warning "Discouraged" - This function is **discouraged** in favor of using - [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with - [`Field`][pydantic.fields.Field] instead. - - This function will be **deprecated** in Pydantic 3.0. - - The reason is that `conint` returns a type, which doesn't play well with static analysis tools. - - === ":x: Don't do this" - ```py - from pydantic import BaseModel, conint - - class Foo(BaseModel): - bar: conint(strict=True, gt=0) - ``` - - === ":white_check_mark: Do this" - ```py - from typing_extensions import Annotated - - from pydantic import BaseModel, Field - - class Foo(BaseModel): - bar: Annotated[int, Field(strict=True, gt=0)] - ``` - - A wrapper around `int` that allows for additional constraints. - - Args: - strict: Whether to validate the integer in strict mode. Defaults to `None`. - gt: The value must be greater than this. - ge: The value must be greater than or equal to this. - lt: The value must be less than this. - le: The value must be less than or equal to this. - multiple_of: The value must be a multiple of this. - - Returns: - The wrapped integer type. - - ```py - from pydantic import BaseModel, ValidationError, conint - - class ConstrainedExample(BaseModel): - constrained_int: conint(gt=1) - - m = ConstrainedExample(constrained_int=2) - print(repr(m)) - #> ConstrainedExample(constrained_int=2) - - try: - ConstrainedExample(constrained_int=0) - except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'greater_than', - 'loc': ('constrained_int',), - 'msg': 'Input should be greater than 1', - 'input': 0, - 'ctx': {'gt': 1}, - 'url': 'https://errors.pydantic.dev/2/v/greater_than', - } - ] - ''' - ``` - - """ # noqa: D212 - return Annotated[ - int, - Strict(strict) if strict is not None else None, - annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), - annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None, - ] + *, strict: bool = False, gt: int = None, ge: int = None, lt: int = None, le: int = None, multiple_of: int = None +) -> Type[int]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) + return type('ConstrainedIntValue', (ConstrainedInt,), namespace) -PositiveInt = Annotated[int, annotated_types.Gt(0)] -"""An integer that must be greater than zero. +if TYPE_CHECKING: + PositiveInt = int + NegativeInt = int + NonPositiveInt = int + NonNegativeInt = int + StrictInt = int +else: -```py -from pydantic import BaseModel, PositiveInt, ValidationError + class PositiveInt(ConstrainedInt): + gt = 0 -class Model(BaseModel): - positive_int: PositiveInt + class NegativeInt(ConstrainedInt): + lt = 0 -m = Model(positive_int=1) -print(repr(m)) -#> Model(positive_int=1) + class NonPositiveInt(ConstrainedInt): + le = 0 -try: - Model(positive_int=-1) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'greater_than', - 'loc': ('positive_int',), - 'msg': 'Input should be greater than 0', - 'input': -1, - 'ctx': {'gt': 0}, - 'url': 'https://errors.pydantic.dev/2/v/greater_than', - } - ] - ''' -``` -""" -NegativeInt = Annotated[int, annotated_types.Lt(0)] -"""An integer that must be less than zero. + class NonNegativeInt(ConstrainedInt): + ge = 0 -```py -from pydantic import BaseModel, NegativeInt, ValidationError + class StrictInt(ConstrainedInt): + strict = True -class Model(BaseModel): - negative_int: NegativeInt - -m = Model(negative_int=-1) -print(repr(m)) -#> Model(negative_int=-1) - -try: - Model(negative_int=1) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'less_than', - 'loc': ('negative_int',), - 'msg': 'Input should be less than 0', - 'input': 1, - 'ctx': {'lt': 0}, - 'url': 'https://errors.pydantic.dev/2/v/less_than', - } - ] - ''' -``` -""" -NonPositiveInt = Annotated[int, annotated_types.Le(0)] -"""An integer that must be less than or equal to zero. - -```py -from pydantic import BaseModel, NonPositiveInt, ValidationError - -class Model(BaseModel): - non_positive_int: NonPositiveInt - -m = Model(non_positive_int=0) -print(repr(m)) -#> Model(non_positive_int=0) - -try: - Model(non_positive_int=1) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'less_than_equal', - 'loc': ('non_positive_int',), - 'msg': 'Input should be less than or equal to 0', - 'input': 1, - 'ctx': {'le': 0}, - 'url': 'https://errors.pydantic.dev/2/v/less_than_equal', - } - ] - ''' -``` -""" -NonNegativeInt = Annotated[int, annotated_types.Ge(0)] -"""An integer that must be greater than or equal to zero. - -```py -from pydantic import BaseModel, NonNegativeInt, ValidationError - -class Model(BaseModel): - non_negative_int: NonNegativeInt - -m = Model(non_negative_int=0) -print(repr(m)) -#> Model(non_negative_int=0) - -try: - Model(non_negative_int=-1) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'greater_than_equal', - 'loc': ('non_negative_int',), - 'msg': 'Input should be greater than or equal to 0', - 'input': -1, - 'ctx': {'ge': 0}, - 'url': 'https://errors.pydantic.dev/2/v/greater_than_equal', - } - ] - ''' -``` -""" -StrictInt = Annotated[int, Strict()] -"""An integer that must be validated in strict mode. - -```py -from pydantic import BaseModel, StrictInt, ValidationError - -class StrictIntModel(BaseModel): - strict_int: StrictInt - -try: - StrictIntModel(strict_int=3.14159) -except ValidationError as e: - print(e) - ''' - 1 validation error for StrictIntModel - strict_int - Input should be a valid integer [type=int_type, input_value=3.14159, input_type=float] - ''' -``` -""" # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -@_dataclasses.dataclass -class AllowInfNan(_fields.PydanticMetadata): - """A field metadata class to indicate that a field should allow ``-inf``, ``inf``, and ``nan``.""" +class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalIntFloat = None + ge: OptionalIntFloat = None + lt: OptionalIntFloat = None + le: OptionalIntFloat = None + multiple_of: OptionalIntFloat = None + allow_inf_nan: Optional[bool] = None - allow_inf_nan: bool = True + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + # Modify constraints to account for differences between IEEE floats and JSON + if field_schema.get('exclusiveMinimum') == -math.inf: + del field_schema['exclusiveMinimum'] + if field_schema.get('minimum') == -math.inf: + del field_schema['minimum'] + if field_schema.get('exclusiveMaximum') == math.inf: + del field_schema['exclusiveMaximum'] + if field_schema.get('maximum') == math.inf: + del field_schema['maximum'] - def __hash__(self) -> int: - return hash(self.allow_inf_nan) + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_float_validator if cls.strict else float_validator + yield number_size_validator + yield number_multiple_validator + yield float_finite_validator def confloat( *, - strict: bool | None = None, - gt: float | None = None, - ge: float | None = None, - lt: float | None = None, - le: float | None = None, - multiple_of: float | None = None, - allow_inf_nan: bool | None = None, -) -> type[float]: - """ - !!! warning "Discouraged" - This function is **discouraged** in favor of using - [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with - [`Field`][pydantic.fields.Field] instead. - - This function will be **deprecated** in Pydantic 3.0. - - The reason is that `confloat` returns a type, which doesn't play well with static analysis tools. - - === ":x: Don't do this" - ```py - from pydantic import BaseModel, confloat - - class Foo(BaseModel): - bar: confloat(strict=True, gt=0) - ``` - - === ":white_check_mark: Do this" - ```py - from typing_extensions import Annotated - - from pydantic import BaseModel, Field - - class Foo(BaseModel): - bar: Annotated[float, Field(strict=True, gt=0)] - ``` - - A wrapper around `float` that allows for additional constraints. - - Args: - strict: Whether to validate the float in strict mode. - gt: The value must be greater than this. - ge: The value must be greater than or equal to this. - lt: The value must be less than this. - le: The value must be less than or equal to this. - multiple_of: The value must be a multiple of this. - allow_inf_nan: Whether to allow `-inf`, `inf`, and `nan`. - - Returns: - The wrapped float type. - - ```py - from pydantic import BaseModel, ValidationError, confloat - - class ConstrainedExample(BaseModel): - constrained_float: confloat(gt=1.0) - - m = ConstrainedExample(constrained_float=1.1) - print(repr(m)) - #> ConstrainedExample(constrained_float=1.1) - - try: - ConstrainedExample(constrained_float=0.9) - except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'greater_than', - 'loc': ('constrained_float',), - 'msg': 'Input should be greater than 1', - 'input': 0.9, - 'ctx': {'gt': 1.0}, - 'url': 'https://errors.pydantic.dev/2/v/greater_than', - } - ] - ''' - ``` - """ # noqa: D212 - return Annotated[ - float, - Strict(strict) if strict is not None else None, - annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), - annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None, - AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None, - ] + strict: bool = False, + gt: float = None, + ge: float = None, + lt: float = None, + le: float = None, + multiple_of: float = None, + allow_inf_nan: Optional[bool] = None, +) -> Type[float]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan) + return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace) -PositiveFloat = Annotated[float, annotated_types.Gt(0)] -"""A float that must be greater than zero. +if TYPE_CHECKING: + PositiveFloat = float + NegativeFloat = float + NonPositiveFloat = float + NonNegativeFloat = float + StrictFloat = float + FiniteFloat = float +else: -```py -from pydantic import BaseModel, PositiveFloat, ValidationError + class PositiveFloat(ConstrainedFloat): + gt = 0 -class Model(BaseModel): - positive_float: PositiveFloat + class NegativeFloat(ConstrainedFloat): + lt = 0 -m = Model(positive_float=1.0) -print(repr(m)) -#> Model(positive_float=1.0) + class NonPositiveFloat(ConstrainedFloat): + le = 0 -try: - Model(positive_float=-1.0) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'greater_than', - 'loc': ('positive_float',), - 'msg': 'Input should be greater than 0', - 'input': -1.0, - 'ctx': {'gt': 0.0}, - 'url': 'https://errors.pydantic.dev/2/v/greater_than', - } - ] - ''' -``` -""" -NegativeFloat = Annotated[float, annotated_types.Lt(0)] -"""A float that must be less than zero. + class NonNegativeFloat(ConstrainedFloat): + ge = 0 -```py -from pydantic import BaseModel, NegativeFloat, ValidationError + class StrictFloat(ConstrainedFloat): + strict = True -class Model(BaseModel): - negative_float: NegativeFloat - -m = Model(negative_float=-1.0) -print(repr(m)) -#> Model(negative_float=-1.0) - -try: - Model(negative_float=1.0) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'less_than', - 'loc': ('negative_float',), - 'msg': 'Input should be less than 0', - 'input': 1.0, - 'ctx': {'lt': 0.0}, - 'url': 'https://errors.pydantic.dev/2/v/less_than', - } - ] - ''' -``` -""" -NonPositiveFloat = Annotated[float, annotated_types.Le(0)] -"""A float that must be less than or equal to zero. - -```py -from pydantic import BaseModel, NonPositiveFloat, ValidationError - -class Model(BaseModel): - non_positive_float: NonPositiveFloat - -m = Model(non_positive_float=0.0) -print(repr(m)) -#> Model(non_positive_float=0.0) - -try: - Model(non_positive_float=1.0) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'less_than_equal', - 'loc': ('non_positive_float',), - 'msg': 'Input should be less than or equal to 0', - 'input': 1.0, - 'ctx': {'le': 0.0}, - 'url': 'https://errors.pydantic.dev/2/v/less_than_equal', - } - ] - ''' -``` -""" -NonNegativeFloat = Annotated[float, annotated_types.Ge(0)] -"""A float that must be greater than or equal to zero. - -```py -from pydantic import BaseModel, NonNegativeFloat, ValidationError - -class Model(BaseModel): - non_negative_float: NonNegativeFloat - -m = Model(non_negative_float=0.0) -print(repr(m)) -#> Model(non_negative_float=0.0) - -try: - Model(non_negative_float=-1.0) -except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'greater_than_equal', - 'loc': ('non_negative_float',), - 'msg': 'Input should be greater than or equal to 0', - 'input': -1.0, - 'ctx': {'ge': 0.0}, - 'url': 'https://errors.pydantic.dev/2/v/greater_than_equal', - } - ] - ''' -``` -""" -StrictFloat = Annotated[float, Strict(True)] -"""A float that must be validated in strict mode. - -```py -from pydantic import BaseModel, StrictFloat, ValidationError - -class StrictFloatModel(BaseModel): - strict_float: StrictFloat - -try: - StrictFloatModel(strict_float='1.0') -except ValidationError as e: - print(e) - ''' - 1 validation error for StrictFloatModel - strict_float - Input should be a valid number [type=float_type, input_value='1.0', input_type=str] - ''' -``` -""" -FiniteFloat = Annotated[float, AllowInfNan(False)] -"""A float that must be finite (not ``-inf``, ``inf``, or ``nan``). - -```py -from pydantic import BaseModel, FiniteFloat - -class Model(BaseModel): - finite: FiniteFloat - -m = Model(finite=1.0) -print(m) -#> finite=1.0 -``` -""" + class FiniteFloat(ConstrainedFloat): + allow_inf_nan = False # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +class ConstrainedBytes(bytes): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + strict: bool = False + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_bytes_validator if cls.strict else bytes_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + + def conbytes( *, - min_length: int | None = None, - max_length: int | None = None, - strict: bool | None = None, -) -> type[bytes]: - """A wrapper around `bytes` that allows for additional constraints. - - Args: - min_length: The minimum length of the bytes. - max_length: The maximum length of the bytes. - strict: Whether to validate the bytes in strict mode. - - Returns: - The wrapped bytes type. - """ - return Annotated[ - bytes, - Strict(strict) if strict is not None else None, - annotated_types.Len(min_length or 0, max_length), - ] + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + min_length: int = None, + max_length: int = None, + strict: bool = False, +) -> Type[bytes]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + min_length=min_length, + max_length=max_length, + strict=strict, + ) + return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace)) -StrictBytes = Annotated[bytes, Strict()] -"""A bytes that must be validated in strict mode.""" +if TYPE_CHECKING: + StrictBytes = bytes +else: + + class StrictBytes(ConstrainedBytes): + strict = True # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -@_dataclasses.dataclass(frozen=True) -class StringConstraints(annotated_types.GroupedMetadata): - """Usage docs: https://docs.pydantic.dev/2.6/concepts/fields/#string-constraints +class ConstrainedStr(str): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + curtail_length: OptionalInt = None + regex: Optional[Pattern[str]] = None + strict = False - Apply constraints to `str` types. + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + minLength=cls.min_length, + maxLength=cls.max_length, + pattern=cls.regex and cls.regex.pattern, + ) - Attributes: - strip_whitespace: Whether to strip whitespace from the string. - to_upper: Whether to convert the string to uppercase. - to_lower: Whether to convert the string to lowercase. - strict: Whether to validate the string in strict mode. - min_length: The minimum length of the string. - max_length: The maximum length of the string. - pattern: A regex pattern that the string must match. - """ + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_str_validator if cls.strict else str_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + yield cls.validate - strip_whitespace: bool | None = None - to_upper: bool | None = None - to_lower: bool | None = None - strict: bool | None = None - min_length: int | None = None - max_length: int | None = None - pattern: str | None = None + @classmethod + def validate(cls, value: Union[str]) -> Union[str]: + if cls.curtail_length and len(value) > cls.curtail_length: + value = value[: cls.curtail_length] - def __iter__(self) -> Iterator[BaseMetadata]: - if self.min_length is not None: - yield MinLen(self.min_length) - if self.max_length is not None: - yield MaxLen(self.max_length) - if self.strict is not None: - yield Strict() - if ( - self.strip_whitespace is not None - or self.pattern is not None - or self.to_lower is not None - or self.to_upper is not None - ): - yield _fields.pydantic_general_metadata( - strip_whitespace=self.strip_whitespace, - to_upper=self.to_upper, - to_lower=self.to_lower, - pattern=self.pattern, - ) + if cls.regex: + if not cls.regex.match(value): + raise errors.StrRegexError(pattern=cls.regex.pattern) + + return value def constr( *, - strip_whitespace: bool | None = None, - to_upper: bool | None = None, - to_lower: bool | None = None, - strict: bool | None = None, - min_length: int | None = None, - max_length: int | None = None, - pattern: str | None = None, -) -> type[str]: - """ - !!! warning "Discouraged" - This function is **discouraged** in favor of using - [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with - [`StringConstraints`][pydantic.types.StringConstraints] instead. - - This function will be **deprecated** in Pydantic 3.0. - - The reason is that `constr` returns a type, which doesn't play well with static analysis tools. - - === ":x: Don't do this" - ```py - from pydantic import BaseModel, constr - - class Foo(BaseModel): - bar: constr(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$') - ``` - - === ":white_check_mark: Do this" - ```py - from typing_extensions import Annotated - - from pydantic import BaseModel, StringConstraints - - class Foo(BaseModel): - bar: Annotated[str, StringConstraints(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$')] - ``` - - A wrapper around `str` that allows for additional constraints. - - ```py - from pydantic import BaseModel, constr - - class Foo(BaseModel): - bar: constr(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$') + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + strict: bool = False, + min_length: int = None, + max_length: int = None, + curtail_length: int = None, + regex: str = None, +) -> Type[str]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + strict=strict, + min_length=min_length, + max_length=max_length, + curtail_length=curtail_length, + regex=regex and re.compile(regex), + ) + return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace)) - foo = Foo(bar=' hello ') - print(foo) - #> bar='HELLO' - ``` +if TYPE_CHECKING: + StrictStr = str +else: - Args: - strip_whitespace: Whether to remove leading and trailing whitespace. - to_upper: Whether to turn all characters to uppercase. - to_lower: Whether to turn all characters to lowercase. - strict: Whether to validate the string in strict mode. - min_length: The minimum length of the string. - max_length: The maximum length of the string. - pattern: A regex pattern to validate the string against. - - Returns: - The wrapped string type. - """ # noqa: D212 - return Annotated[ - str, - StringConstraints( - strip_whitespace=strip_whitespace, - to_upper=to_upper, - to_lower=to_lower, - strict=strict, - min_length=min_length, - max_length=max_length, - pattern=pattern, - ), - ] + class StrictStr(ConstrainedStr): + strict = True -StrictStr = Annotated[str, Strict()] -"""A string that must be validated in strict mode.""" +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# This types superclass should be Set[T], but cython chokes on that... +class ConstrainedSet(set): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = set + __args__: Set[Type[T]] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.set_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]': + if v is None: + return None + + v = set_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.SetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.SetMaxLengthError(limit_value=cls.max_items) + + return v -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ COLLECTION TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -HashableItemType = TypeVar('HashableItemType', bound=Hashable) +def conset(item_type: Type[T], *, min_items: int = None, max_items: int = None) -> Type[Set[T]]: + # __args__ is needed to conform to typing generics api + namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace)) -def conset( - item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None -) -> type[set[HashableItemType]]: - """A wrapper around `typing.Set` that allows for additional constraints. +# This types superclass should be FrozenSet[T], but cython chokes on that... +class ConstrainedFrozenSet(frozenset): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = frozenset + __args__: FrozenSet[Type[T]] # type: ignore - Args: - item_type: The type of the items in the set. - min_length: The minimum length of the set. - max_length: The maximum length of the set. + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore - Returns: - The wrapped set type. - """ - return Annotated[Set[item_type], annotated_types.Len(min_length or 0, max_length)] + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.frozenset_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]': + if v is None: + return None + + v = frozenset_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.FrozenSetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items) + + return v -def confrozenset( - item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None -) -> type[frozenset[HashableItemType]]: - """A wrapper around `typing.FrozenSet` that allows for additional constraints. - - Args: - item_type: The type of the items in the frozenset. - min_length: The minimum length of the frozenset. - max_length: The maximum length of the frozenset. - - Returns: - The wrapped frozenset type. - """ - return Annotated[FrozenSet[item_type], annotated_types.Len(min_length or 0, max_length)] +def confrozenset(item_type: Type[T], *, min_items: int = None, max_items: int = None) -> Type[FrozenSet[T]]: + # __args__ is needed to conform to typing generics api + namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace)) -AnyItemType = TypeVar('AnyItemType') +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# This types superclass should be List[T], but cython chokes on that... +class ConstrainedList(list): # type: ignore + # Needed for pydantic to detect that this is a list + __origin__ = list + __args__: Tuple[Type[T], ...] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + unique_items: Optional[bool] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.list_length_validator + if cls.unique_items: + yield cls.unique_items_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items) + + @classmethod + def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': + if v is None: + return None + + v = list_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.ListMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.ListMaxLengthError(limit_value=cls.max_items) + + return v + + @classmethod + def unique_items_validator(cls, v: 'List[T]') -> 'List[T]': + for i, value in enumerate(v, start=1): + if value in v[i:]: + raise errors.ListUniqueItemsError() + + return v def conlist( - item_type: type[AnyItemType], - *, - min_length: int | None = None, - max_length: int | None = None, - unique_items: bool | None = None, -) -> type[list[AnyItemType]]: - """A wrapper around typing.List that adds validation. - - Args: - item_type: The type of the items in the list. - min_length: The minimum length of the list. Defaults to None. - max_length: The maximum length of the list. Defaults to None. - unique_items: Whether the items in the list must be unique. Defaults to None. - !!! warning Deprecated - The `unique_items` parameter is deprecated, use `Set` instead. - See [this issue](https://github.com/pydantic/pydantic-core/issues/296) for more details. - - Returns: - The wrapped list type. - """ - if unique_items is not None: - raise PydanticUserError( - ( - '`unique_items` is removed, use `Set` instead' - '(this feature is discussed in https://github.com/pydantic/pydantic-core/issues/296)' - ), - code='removed-kwargs', - ) - return Annotated[List[item_type], annotated_types.Len(min_length or 0, max_length)] + item_type: Type[T], *, min_items: int = None, max_items: int = None, unique_items: bool = None +) -> Type[List[T]]: + # __args__ is needed to conform to typing generics api + namespace = dict( + min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,) + ) + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace)) -# ~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT STRING TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + -AnyType = TypeVar('AnyType') if TYPE_CHECKING: - ImportString = Annotated[AnyType, ...] + PyObject = Callable[..., Any] else: - class ImportString: - """A type that can be used to import a type from a string. - - `ImportString` expects a string and loads the Python object importable at that dotted path. - Attributes of modules may be separated from the module by `:` or `.`, e.g. if `'math:cos'` was provided, - the resulting field value would be the function`cos`. If a `.` is used and both an attribute and submodule - are present at the same path, the module will be preferred. - - On model instantiation, pointers will be evaluated and imported. There is - some nuance to this behavior, demonstrated in the examples below. - - **Good behavior:** - ```py - from math import cos - - from pydantic import BaseModel, Field, ImportString, ValidationError - - - class ImportThings(BaseModel): - obj: ImportString - - - # A string value will cause an automatic import - my_cos = ImportThings(obj='math.cos') - - # You can use the imported function as you would expect - cos_of_0 = my_cos.obj(0) - assert cos_of_0 == 1 - - - # A string whose value cannot be imported will raise an error - try: - ImportThings(obj='foo.bar') - except ValidationError as e: - print(e) - ''' - 1 validation error for ImportThings - obj - Invalid python path: No module named 'foo.bar' [type=import_error, input_value='foo.bar', input_type=str] - ''' - - - # Actual python objects can be assigned as well - my_cos = ImportThings(obj=cos) - my_cos_2 = ImportThings(obj='math.cos') - my_cos_3 = ImportThings(obj='math:cos') - assert my_cos == my_cos_2 == my_cos_3 - - - # You can set default field value either as Python object: - class ImportThingsDefaultPyObj(BaseModel): - obj: ImportString = math.cos - - - # or as a string value (but only if used with `validate_default=True`) - class ImportThingsDefaultString(BaseModel): - obj: ImportString = Field(default='math.cos', validate_default=True) - - - my_cos_default1 = ImportThingsDefaultPyObj() - my_cos_default2 = ImportThingsDefaultString() - assert my_cos_default1.obj == my_cos_default2.obj == math.cos - - - # note: this will not work! - class ImportThingsMissingValidateDefault(BaseModel): - obj: ImportString = 'math.cos' - - my_cos_default3 = ImportThingsMissingValidateDefault() - assert my_cos_default3.obj == 'math.cos' # just string, not evaluated - ``` - - Serializing an `ImportString` type to json is also possible. - - ```py - from pydantic import BaseModel, ImportString - - - class ImportThings(BaseModel): - obj: ImportString - - - # Create an instance - m = ImportThings(obj='math.cos') - print(m) - #> obj= - print(m.model_dump_json()) - #> {"obj":"math.cos"} - ``` - """ + class PyObject: + validate_always = True @classmethod - def __class_getitem__(cls, item: AnyType) -> AnyType: - return Annotated[item, cls()] + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate @classmethod - def __get_pydantic_core_schema__( - cls, source: type[Any], handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - serializer = core_schema.plain_serializer_function_ser_schema(cls._serialize, when_used='json') - if cls is source: - # Treat bare usage of ImportString (`schema is None`) as the same as ImportString[Any] - return core_schema.no_info_plain_validator_function( - function=_validators.import_string, serialization=serializer - ) - else: - return core_schema.no_info_before_validator_function( - function=_validators.import_string, schema=handler(source), serialization=serializer - ) + def validate(cls, value: Any) -> Any: + if isinstance(value, Callable): + return value - @staticmethod - def _serialize(v: Any) -> str: - if isinstance(v, ModuleType): - return v.__name__ - elif hasattr(v, '__module__') and hasattr(v, '__name__'): - return f'{v.__module__}.{v.__name__}' - else: - return v + try: + value = str_validator(value) + except errors.StrError: + raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable') - def __repr__(self) -> str: - return 'ImportString' + try: + return import_string(value) + except ImportError as e: + raise errors.PyObjectError(error_message=str(e)) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta): + gt: OptionalIntFloatDecimal = None + ge: OptionalIntFloatDecimal = None + lt: OptionalIntFloatDecimal = None + le: OptionalIntFloatDecimal = None + max_digits: OptionalInt = None + decimal_places: OptionalInt = None + multiple_of: OptionalIntFloatDecimal = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield decimal_validator + yield number_size_validator + yield number_multiple_validator + yield cls.validate + + @classmethod + def validate(cls, value: Decimal) -> Decimal: + digit_tuple, exponent = value.as_tuple()[1:] + if exponent in {'F', 'n', 'N'}: + raise errors.DecimalIsNotFiniteError() + + if exponent >= 0: + # A positive exponent adds that many trailing zeros. + digits = len(digit_tuple) + exponent + decimals = 0 + else: + # If the absolute value of the negative exponent is larger than the + # number of digits, then it's the same as the number of digits, + # because it'll consume all of the digits in digit_tuple and then + # add abs(exponent) - len(digit_tuple) leading zeros after the + # decimal point. + if abs(exponent) > len(digit_tuple): + digits = decimals = abs(exponent) + else: + digits = len(digit_tuple) + decimals = abs(exponent) + whole_digits = digits - decimals + + if cls.max_digits is not None and digits > cls.max_digits: + raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits) + + if cls.decimal_places is not None and decimals > cls.decimal_places: + raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places) + + if cls.max_digits is not None and cls.decimal_places is not None: + expected = cls.max_digits - cls.decimal_places + if whole_digits > expected: + raise errors.DecimalWholeDigitsError(whole_digits=expected) + + return value + + def condecimal( *, - strict: bool | None = None, - gt: int | Decimal | None = None, - ge: int | Decimal | None = None, - lt: int | Decimal | None = None, - le: int | Decimal | None = None, - multiple_of: int | Decimal | None = None, - max_digits: int | None = None, - decimal_places: int | None = None, - allow_inf_nan: bool | None = None, -) -> type[Decimal]: - """ - !!! warning "Discouraged" - This function is **discouraged** in favor of using - [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with - [`Field`][pydantic.fields.Field] instead. - - This function will be **deprecated** in Pydantic 3.0. - - The reason is that `condecimal` returns a type, which doesn't play well with static analysis tools. - - === ":x: Don't do this" - ```py - from pydantic import BaseModel, condecimal - - class Foo(BaseModel): - bar: condecimal(strict=True, allow_inf_nan=True) - ``` - - === ":white_check_mark: Do this" - ```py - from decimal import Decimal - - from typing_extensions import Annotated - - from pydantic import BaseModel, Field - - class Foo(BaseModel): - bar: Annotated[Decimal, Field(strict=True, allow_inf_nan=True)] - ``` - - A wrapper around Decimal that adds validation. - - Args: - strict: Whether to validate the value in strict mode. Defaults to `None`. - gt: The value must be greater than this. Defaults to `None`. - ge: The value must be greater than or equal to this. Defaults to `None`. - lt: The value must be less than this. Defaults to `None`. - le: The value must be less than or equal to this. Defaults to `None`. - multiple_of: The value must be a multiple of this. Defaults to `None`. - max_digits: The maximum number of digits. Defaults to `None`. - decimal_places: The number of decimal places. Defaults to `None`. - allow_inf_nan: Whether to allow infinity and NaN. Defaults to `None`. - - ```py - from decimal import Decimal - - from pydantic import BaseModel, ValidationError, condecimal - - class ConstrainedExample(BaseModel): - constrained_decimal: condecimal(gt=Decimal('1.0')) - - m = ConstrainedExample(constrained_decimal=Decimal('1.1')) - print(repr(m)) - #> ConstrainedExample(constrained_decimal=Decimal('1.1')) - - try: - ConstrainedExample(constrained_decimal=Decimal('0.9')) - except ValidationError as e: - print(e.errors()) - ''' - [ - { - 'type': 'greater_than', - 'loc': ('constrained_decimal',), - 'msg': 'Input should be greater than 1.0', - 'input': Decimal('0.9'), - 'ctx': {'gt': Decimal('1.0')}, - 'url': 'https://errors.pydantic.dev/2/v/greater_than', - } - ] - ''' - ``` - """ # noqa: D212 - return Annotated[ - Decimal, - Strict(strict) if strict is not None else None, - annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), - annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None, - _fields.pydantic_general_metadata(max_digits=max_digits, decimal_places=decimal_places), - AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None, - ] + gt: Decimal = None, + ge: Decimal = None, + lt: Decimal = None, + le: Decimal = None, + max_digits: int = None, + decimal_places: int = None, + multiple_of: Decimal = None, +) -> Type[Decimal]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of + ) + return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +if TYPE_CHECKING: + UUID1 = UUID + UUID3 = UUID + UUID4 = UUID + UUID5 = UUID +else: -@_dataclasses.dataclass(**_internal_dataclass.slots_true) -class UuidVersion: - """A field metadata class to indicate a [UUID](https://docs.python.org/3/library/uuid.html) version.""" + class UUID1(UUID): + _required_version = 1 - uuid_version: Literal[1, 3, 4, 5] + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format=f'uuid{cls._required_version}') - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = handler(core_schema) - field_schema.pop('anyOf', None) # remove the bytes/str union - field_schema.update(type='string', format=f'uuid{self.uuid_version}') - return field_schema + class UUID3(UUID1): + _required_version = 3 - def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - if isinstance(self, source): - # used directly as a type - return core_schema.uuid_schema(version=self.uuid_version) - else: - # update existing schema with self.uuid_version - schema = handler(source) - _check_annotated_type(schema['type'], 'uuid', self.__class__.__name__) - schema['version'] = self.uuid_version # type: ignore - return schema + class UUID4(UUID1): + _required_version = 4 - def __hash__(self) -> int: - return hash(type(self.uuid_version)) - - -UUID1 = Annotated[UUID, UuidVersion(1)] -"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 1. - -```py -import uuid - -from pydantic import UUID1, BaseModel - -class Model(BaseModel): - uuid1: UUID1 - -Model(uuid1=uuid.uuid1()) -``` -""" -UUID3 = Annotated[UUID, UuidVersion(3)] -"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 3. - -```py -import uuid - -from pydantic import UUID3, BaseModel - -class Model(BaseModel): - uuid3: UUID3 - -Model(uuid3=uuid.uuid3(uuid.NAMESPACE_DNS, 'pydantic.org')) -``` -""" -UUID4 = Annotated[UUID, UuidVersion(4)] -"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 4. - -```py -import uuid - -from pydantic import UUID4, BaseModel - -class Model(BaseModel): - uuid4: UUID4 - -Model(uuid4=uuid.uuid4()) -``` -""" -UUID5 = Annotated[UUID, UuidVersion(5)] -"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 5. - -```py -import uuid - -from pydantic import UUID5, BaseModel - -class Model(BaseModel): - uuid5: UUID5 - -Model(uuid5=uuid.uuid5(uuid.NAMESPACE_DNS, 'pydantic.org')) -``` -""" + class UUID5(UUID1): + _required_version = 5 # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +if TYPE_CHECKING: + FilePath = Path + DirectoryPath = Path +else: -@_dataclasses.dataclass -class PathType: - path_type: Literal['file', 'dir', 'new'] + class FilePath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format='file-path') - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = handler(core_schema) - format_conversion = {'file': 'file-path', 'dir': 'directory-path'} - field_schema.update(format=format_conversion.get(self.path_type, 'path'), type='string') - return field_schema + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield path_validator + yield path_exists_validator + yield cls.validate - def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - function_lookup = { - 'file': cast(core_schema.WithInfoValidatorFunction, self.validate_file), - 'dir': cast(core_schema.WithInfoValidatorFunction, self.validate_directory), - 'new': cast(core_schema.WithInfoValidatorFunction, self.validate_new), - } + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_file(): + raise errors.PathNotAFileError(path=value) - return core_schema.with_info_after_validator_function( - function_lookup[self.path_type], - handler(source), - ) + return value - @staticmethod - def validate_file(path: Path, _: core_schema.ValidationInfo) -> Path: - if path.is_file(): - return path - else: - raise PydanticCustomError('path_not_file', 'Path does not point to a file') + class DirectoryPath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format='directory-path') - @staticmethod - def validate_directory(path: Path, _: core_schema.ValidationInfo) -> Path: - if path.is_dir(): - return path - else: - raise PydanticCustomError('path_not_directory', 'Path does not point to a directory') + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield path_validator + yield path_exists_validator + yield cls.validate - @staticmethod - def validate_new(path: Path, _: core_schema.ValidationInfo) -> Path: - if path.exists(): - raise PydanticCustomError('path_exists', 'Path already exists') - elif not path.parent.exists(): - raise PydanticCustomError('parent_does_not_exist', 'Parent directory does not exist') - else: - return path + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_dir(): + raise errors.PathNotADirectoryError(path=value) - def __hash__(self) -> int: - return hash(type(self.path_type)) - - -FilePath = Annotated[Path, PathType('file')] -"""A path that must point to a file. - -```py -from pathlib import Path - -from pydantic import BaseModel, FilePath, ValidationError - -class Model(BaseModel): - f: FilePath - -path = Path('text.txt') -path.touch() -m = Model(f='text.txt') -print(m.model_dump()) -#> {'f': PosixPath('text.txt')} -path.unlink() - -path = Path('directory') -path.mkdir(exist_ok=True) -try: - Model(f='directory') # directory -except ValidationError as e: - print(e) - ''' - 1 validation error for Model - f - Path does not point to a file [type=path_not_file, input_value='directory', input_type=str] - ''' -path.rmdir() - -try: - Model(f='not-exists-file') -except ValidationError as e: - print(e) - ''' - 1 validation error for Model - f - Path does not point to a file [type=path_not_file, input_value='not-exists-file', input_type=str] - ''' -``` -""" -DirectoryPath = Annotated[Path, PathType('dir')] -"""A path that must point to a directory. - -```py -from pathlib import Path - -from pydantic import BaseModel, DirectoryPath, ValidationError - -class Model(BaseModel): - f: DirectoryPath - -path = Path('directory/') -path.mkdir() -m = Model(f='directory/') -print(m.model_dump()) -#> {'f': PosixPath('directory')} -path.rmdir() - -path = Path('file.txt') -path.touch() -try: - Model(f='file.txt') # file -except ValidationError as e: - print(e) - ''' - 1 validation error for Model - f - Path does not point to a directory [type=path_not_directory, input_value='file.txt', input_type=str] - ''' -path.unlink() - -try: - Model(f='not-exists-directory') -except ValidationError as e: - print(e) - ''' - 1 validation error for Model - f - Path does not point to a directory [type=path_not_directory, input_value='not-exists-directory', input_type=str] - ''' -``` -""" -NewPath = Annotated[Path, PathType('new')] -"""A path for a new file or directory that must not already exist.""" + return value # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +class JsonWrapper: + pass + + +class JsonMeta(type): + def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]: + if t is Any: + return Json # allow Json[Any] to replecate plain Json + return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t})) + + if TYPE_CHECKING: - Json = Annotated[AnyType, ...] # Json[list[str]] will be recognized by type checkers as list[str] + Json = Annotated[T, ...] # Json[list[str]] will be recognized by type checkers as list[str] else: - class Json: - """A special type wrapper which loads JSON before parsing. - - You can use the `Json` data type to make Pydantic first load a raw JSON string before - validating the loaded data into the parametrized type: - - ```py - from typing import Any, List - - from pydantic import BaseModel, Json, ValidationError - - - class AnyJsonModel(BaseModel): - json_obj: Json[Any] - - - class ConstrainedJsonModel(BaseModel): - json_obj: Json[List[int]] - - - print(AnyJsonModel(json_obj='{"b": 1}')) - #> json_obj={'b': 1} - print(ConstrainedJsonModel(json_obj='[1, 2, 3]')) - #> json_obj=[1, 2, 3] - - try: - ConstrainedJsonModel(json_obj=12) - except ValidationError as e: - print(e) - ''' - 1 validation error for ConstrainedJsonModel - json_obj - JSON input should be string, bytes or bytearray [type=json_type, input_value=12, input_type=int] - ''' - - try: - ConstrainedJsonModel(json_obj='[a, b]') - except ValidationError as e: - print(e) - ''' - 1 validation error for ConstrainedJsonModel - json_obj - Invalid JSON: expected value at line 1 column 2 [type=json_invalid, input_value='[a, b]', input_type=str] - ''' - - try: - ConstrainedJsonModel(json_obj='["a", "b"]') - except ValidationError as e: - print(e) - ''' - 2 validation errors for ConstrainedJsonModel - json_obj.0 - Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str] - json_obj.1 - Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='b', input_type=str] - ''' - ``` - - When you dump the model using `model_dump` or `model_dump_json`, the dumped value will be the result of validation, - not the original JSON string. However, you can use the argument `round_trip=True` to get the original JSON string back: - - ```py - from typing import List - - from pydantic import BaseModel, Json - - - class ConstrainedJsonModel(BaseModel): - json_obj: Json[List[int]] - - - print(ConstrainedJsonModel(json_obj='[1, 2, 3]').model_dump_json()) - #> {"json_obj":[1,2,3]} - print( - ConstrainedJsonModel(json_obj='[1, 2, 3]').model_dump_json(round_trip=True) - ) - #> {"json_obj":"[1,2,3]"} - ``` - """ - + class Json(metaclass=JsonMeta): @classmethod - def __class_getitem__(cls, item: AnyType) -> AnyType: - return Annotated[item, cls()] - - @classmethod - def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - if cls is source: - return core_schema.json_schema(None) - else: - return core_schema.json_schema(handler(source)) - - def __repr__(self) -> str: - return 'Json' - - def __hash__(self) -> int: - return hash(type(self)) - - def __eq__(self, other: Any) -> bool: - return type(other) == type(self) + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='json-string') # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -SecretType = TypeVar('SecretType', str, bytes) +class SecretField(abc.ABC): + """ + Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`, + the `__init__()` should be part of the abstract class and the + `get_secret_value()` method should use the generic `T` type. -class _SecretField(Generic[SecretType]): - def __init__(self, secret_value: SecretType) -> None: - self._secret_value: SecretType = secret_value - - def get_secret_value(self) -> SecretType: - """Get the secret value. - - Returns: - The secret value. - """ - return self._secret_value + However Cython doesn't support very well generics at the moment and + the generated code fails to be imported (see + https://github.com/cython/cython/issues/2753). + """ def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() + def __str__(self) -> str: + return '**********' if self.get_secret_value() else '' + def __hash__(self) -> int: return hash(self.get_secret_value()) + @abc.abstractmethod + def get_secret_value(self) -> Any: # pragma: no cover + ... + + +class SecretStr(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type='string', + writeOnly=True, + format='password', + minLength=cls.min_length, + maxLength=cls.max_length, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + yield constr_length_validator + + @classmethod + def validate(cls, value: Any) -> 'SecretStr': + if isinstance(value, cls): + return value + value = str_validator(value) + return cls(value) + + def __init__(self, value: str): + self._secret_value = value + + def __repr__(self) -> str: + return f"SecretStr('{self}')" + def __len__(self) -> int: return len(self._secret_value) - def __str__(self) -> str: - return str(self._display()) + def display(self) -> str: + warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning) + return str(self) - def __repr__(self) -> str: - return f'{self.__class__.__name__}({self._display()!r})' + def get_secret_value(self) -> str: + return self._secret_value - def _display(self) -> SecretType: - raise NotImplementedError + +class SecretBytes(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None @classmethod - def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - if issubclass(source, SecretStr): - field_type = str - inner_schema = core_schema.str_schema() - else: - assert issubclass(source, SecretBytes) - field_type = bytes - inner_schema = core_schema.bytes_schema() - error_kind = 'string_type' if field_type is str else 'bytes_type' - - def serialize( - value: _SecretField[SecretType], info: core_schema.SerializationInfo - ) -> str | _SecretField[SecretType]: - if info.mode == 'json': - # we want the output to always be string without the `b'` prefix for bytes, - # hence we just use `secret_display` - return _secret_display(value.get_secret_value()) - else: - return value - - def get_json_schema(_core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: - json_schema = handler(inner_schema) - _utils.update_not_none( - json_schema, - type='string', - writeOnly=True, - format='password', - ) - return json_schema - - json_schema = core_schema.no_info_after_validator_function( - source, # construct the type - inner_schema, + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type='string', + writeOnly=True, + format='password', + minLength=cls.min_length, + maxLength=cls.max_length, ) - s = core_schema.json_or_python_schema( - python_schema=core_schema.union_schema( - [ - core_schema.is_instance_schema(source), - json_schema, - ], - strict=True, - custom_error_type=error_kind, - ), - json_schema=json_schema, - serialization=core_schema.plain_serializer_function_ser_schema( - serialize, - info_arg=True, - return_schema=core_schema.str_schema(), - when_used='json', - ), - ) - s.setdefault('metadata', {}).setdefault('pydantic_js_functions', []).append(get_json_schema) - return s + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + yield constr_length_validator -def _secret_display(value: str | bytes) -> str: - return '**********' if value else '' + @classmethod + def validate(cls, value: Any) -> 'SecretBytes': + if isinstance(value, cls): + return value + value = bytes_validator(value) + return cls(value) + def __init__(self, value: bytes): + self._secret_value = value -class SecretStr(_SecretField[str]): - """A string used for storing sensitive information that you do not want to be visible in logging or tracebacks. + def __repr__(self) -> str: + return f"SecretBytes(b'{self}')" - When the secret value is nonempty, it is displayed as `'**********'` instead of the underlying value in - calls to `repr()` and `str()`. If the value _is_ empty, it is displayed as `''`. + def __len__(self) -> int: + return len(self._secret_value) - ```py - from pydantic import BaseModel, SecretStr + def display(self) -> str: + warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning) + return str(self) - class User(BaseModel): - username: str - password: SecretStr - - user = User(username='scolvin', password='password1') - - print(user) - #> username='scolvin' password=SecretStr('**********') - print(user.password.get_secret_value()) - #> password1 - print((SecretStr('password'), SecretStr(''))) - #> (SecretStr('**********'), SecretStr('')) - ``` - """ - - def _display(self) -> str: - return _secret_display(self.get_secret_value()) - - -class SecretBytes(_SecretField[bytes]): - """A bytes used for storing sensitive information that you do not want to be visible in logging or tracebacks. - - It displays `b'**********'` instead of the string value on `repr()` and `str()` calls. - When the secret value is nonempty, it is displayed as `b'**********'` instead of the underlying value in - calls to `repr()` and `str()`. If the value _is_ empty, it is displayed as `b''`. - - ```py - from pydantic import BaseModel, SecretBytes - - class User(BaseModel): - username: str - password: SecretBytes - - user = User(username='scolvin', password=b'password1') - #> username='scolvin' password=SecretBytes(b'**********') - print(user.password.get_secret_value()) - #> b'password1' - print((SecretBytes(b'password'), SecretBytes(b''))) - #> (SecretBytes(b'**********'), SecretBytes(b'')) - ``` - """ - - def _display(self) -> bytes: - return _secret_display(self.get_secret_value()).encode() + def get_secret_value(self) -> bytes: + return self._secret_value # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class PaymentCardBrand(str, Enum): + # If you add another card type, please also add it to the + # Hypothesis strategy in `pydantic._hypothesis_plugin`. amex = 'American Express' mastercard = 'Mastercard' visa = 'Visa' @@ -1600,13 +954,10 @@ class PaymentCardBrand(str, Enum): return self.value -@deprecated( - 'The `PaymentCardNumber` class is deprecated, use `pydantic_extra_types` instead. ' - 'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_payment/#pydantic_extra_types.payment.PaymentCardNumber.', - category=PydanticDeprecatedSince20, -) class PaymentCardNumber(str): - """Based on: https://en.wikipedia.org/wiki/Payment_card_number.""" + """ + Based on: https://en.wikipedia.org/wiki/Payment_card_number + """ strip_whitespace: ClassVar[bool] = True min_length: ClassVar[int] = 12 @@ -1616,47 +967,36 @@ class PaymentCardNumber(str): brand: PaymentCardBrand def __init__(self, card_number: str): - self.validate_digits(card_number) - - card_number = self.validate_luhn_check_digit(card_number) - self.bin = card_number[:6] self.last4 = card_number[-4:] - self.brand = self.validate_brand(card_number) + self.brand = self._get_brand(card_number) @classmethod - def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - return core_schema.with_info_after_validator_function( - cls.validate, - core_schema.str_schema( - min_length=cls.min_length, max_length=cls.max_length, strip_whitespace=cls.strip_whitespace - ), - ) - - @classmethod - def validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> PaymentCardNumber: - """Validate the card number and return a `PaymentCardNumber` instance.""" - return cls(__input_value) + def __get_validators__(cls) -> 'CallableGenerator': + yield str_validator + yield constr_strip_whitespace + yield constr_length_validator + yield cls.validate_digits + yield cls.validate_luhn_check_digit + yield cls + yield cls.validate_length_for_brand @property def masked(self) -> str: - """Mask all but the last 4 digits of the card number. - - Returns: - A masked card number string. - """ num_masked = len(self) - 10 # len(bin) + len(last4) == 10 return f'{self.bin}{"*" * num_masked}{self.last4}' @classmethod - def validate_digits(cls, card_number: str) -> None: - """Validate that the card number is all digits.""" + def validate_digits(cls, card_number: str) -> str: if not card_number.isdigit(): - raise PydanticCustomError('payment_card_number_digits', 'Card number is not all digits') + raise errors.NotDigitError + return card_number @classmethod def validate_luhn_check_digit(cls, card_number: str) -> str: - """Based on: https://en.wikipedia.org/wiki/Luhn_algorithm.""" + """ + Based on: https://en.wikipedia.org/wiki/Luhn_algorithm + """ sum_ = int(card_number[-1]) length = len(card_number) parity = length % 2 @@ -1669,14 +1009,33 @@ class PaymentCardNumber(str): sum_ += digit valid = sum_ % 10 == 0 if not valid: - raise PydanticCustomError('payment_card_number_luhn', 'Card number is not luhn valid') + raise errors.LuhnValidationError + return card_number + + @classmethod + def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber': + """ + Validate length based on BIN for major brands: + https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN) + """ + required_length: Union[None, int, str] = None + if card_number.brand in PaymentCardBrand.mastercard: + required_length = 16 + valid = len(card_number) == required_length + elif card_number.brand == PaymentCardBrand.visa: + required_length = '13, 16 or 19' + valid = len(card_number) in {13, 16, 19} + elif card_number.brand == PaymentCardBrand.amex: + required_length = 15 + valid = len(card_number) == required_length + else: + valid = True + if not valid: + raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length) return card_number @staticmethod - def validate_brand(card_number: str) -> PaymentCardBrand: - """Validate length based on BIN for major brands: - https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN). - """ + def _get_brand(card_number: str) -> PaymentCardBrand: if card_number[0] == '4': brand = PaymentCardBrand.visa elif 51 <= int(card_number[:2]) <= 55: @@ -1685,1195 +1044,144 @@ class PaymentCardNumber(str): brand = PaymentCardBrand.amex else: brand = PaymentCardBrand.other - - required_length: None | int | str = None - if brand in PaymentCardBrand.mastercard: - required_length = 16 - valid = len(card_number) == required_length - elif brand == PaymentCardBrand.visa: - required_length = '13, 16 or 19' - valid = len(card_number) in {13, 16, 19} - elif brand == PaymentCardBrand.amex: - required_length = 15 - valid = len(card_number) == required_length - else: - valid = True - - if not valid: - raise PydanticCustomError( - 'payment_card_number_brand', - 'Length for a {brand} card must be {required_length}', - {'brand': brand, 'required_length': required_length}, - ) return brand # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +BYTE_SIZES = { + 'b': 1, + 'kb': 10**3, + 'mb': 10**6, + 'gb': 10**9, + 'tb': 10**12, + 'pb': 10**15, + 'eb': 10**18, + 'kib': 2**10, + 'mib': 2**20, + 'gib': 2**30, + 'tib': 2**40, + 'pib': 2**50, + 'eib': 2**60, +} +BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) +byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) + class ByteSize(int): - """Converts a string representing a number of bytes with units (such as `'1KB'` or `'11.5MiB'`) into an integer. - - You can use the `ByteSize` data type to (case-insensitively) convert a string representation of a number of bytes into - an integer, and also to print out human-readable strings representing a number of bytes. - - In conformance with [IEC 80000-13 Standard](https://en.wikipedia.org/wiki/ISO/IEC_80000) we interpret `'1KB'` to mean 1000 bytes, - and `'1KiB'` to mean 1024 bytes. In general, including a middle `'i'` will cause the unit to be interpreted as a power of 2, - rather than a power of 10 (so, for example, `'1 MB'` is treated as `1_000_000` bytes, whereas `'1 MiB'` is treated as `1_048_576` bytes). - - !!! info - Note that `1b` will be parsed as "1 byte" and not "1 bit". - - ```py - from pydantic import BaseModel, ByteSize - - class MyModel(BaseModel): - size: ByteSize - - print(MyModel(size=52000).size) - #> 52000 - print(MyModel(size='3000 KiB').size) - #> 3072000 - - m = MyModel(size='50 PB') - print(m.size.human_readable()) - #> 44.4PiB - print(m.size.human_readable(decimal=True)) - #> 50.0PB - - print(m.size.to('TiB')) - #> 45474.73508864641 - ``` - """ - - byte_sizes = { - 'b': 1, - 'kb': 10**3, - 'mb': 10**6, - 'gb': 10**9, - 'tb': 10**12, - 'pb': 10**15, - 'eb': 10**18, - 'kib': 2**10, - 'mib': 2**20, - 'gib': 2**30, - 'tib': 2**40, - 'pib': 2**50, - 'eib': 2**60, - 'bit': 1 / 8, - 'kbit': 10**3 / 8, - 'mbit': 10**6 / 8, - 'gbit': 10**9 / 8, - 'tbit': 10**12 / 8, - 'pbit': 10**15 / 8, - 'ebit': 10**18 / 8, - 'kibit': 2**10 / 8, - 'mibit': 2**20 / 8, - 'gibit': 2**30 / 8, - 'tibit': 2**40 / 8, - 'pibit': 2**50 / 8, - 'eibit': 2**60 / 8, - } - byte_sizes.update({k.lower()[0]: v for k, v in byte_sizes.items() if 'i' not in k}) - - byte_string_pattern = r'^\s*(\d*\.?\d+)\s*(\w+)?' - byte_string_re = re.compile(byte_string_pattern, re.IGNORECASE) + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate @classmethod - def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - return core_schema.with_info_after_validator_function( - function=cls._validate, - schema=core_schema.union_schema( - [ - core_schema.str_schema(pattern=cls.byte_string_pattern), - core_schema.int_schema(ge=0), - ], - custom_error_type='byte_size', - custom_error_message='could not parse value and unit from byte string', - ), - serialization=core_schema.plain_serializer_function_ser_schema( - int, return_schema=core_schema.int_schema(ge=0) - ), - ) + def validate(cls, v: StrIntFloat) -> 'ByteSize': - @classmethod - def _validate(cls, __input_value: Any, _: core_schema.ValidationInfo) -> ByteSize: try: - return cls(int(__input_value)) + return cls(int(v)) except ValueError: pass - str_match = cls.byte_string_re.match(str(__input_value)) + str_match = byte_string_re.match(str(v)) if str_match is None: - raise PydanticCustomError('byte_size', 'could not parse value and unit from byte string') + raise errors.InvalidByteSize() scalar, unit = str_match.groups() if unit is None: unit = 'b' try: - unit_mult = cls.byte_sizes[unit.lower()] + unit_mult = BYTE_SIZES[unit.lower()] except KeyError: - raise PydanticCustomError('byte_size_unit', 'could not interpret byte unit: {unit}', {'unit': unit}) + raise errors.InvalidByteSizeUnit(unit=unit) return cls(int(float(scalar) * unit_mult)) def human_readable(self, decimal: bool = False) -> str: - """Converts a byte size to a human readable string. - Args: - decimal: If True, use decimal units (e.g. 1000 bytes per KB). If False, use binary units - (e.g. 1024 bytes per KiB). - - Returns: - A human readable string representation of the byte size. - """ if decimal: divisor = 1000 - units = 'B', 'KB', 'MB', 'GB', 'TB', 'PB' + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] final_unit = 'EB' else: divisor = 1024 - units = 'B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB' + units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] final_unit = 'EiB' num = float(self) for unit in units: if abs(num) < divisor: - if unit == 'B': - return f'{num:0.0f}{unit}' - else: - return f'{num:0.1f}{unit}' + return f'{num:0.1f}{unit}' num /= divisor return f'{num:0.1f}{final_unit}' def to(self, unit: str) -> float: - """Converts a byte size to another unit, including both byte and bit units. - Args: - unit: The unit to convert to. Must be one of the following: B, KB, MB, GB, TB, PB, EB, - KiB, MiB, GiB, TiB, PiB, EiB (byte units) and - bit, kbit, mbit, gbit, tbit, pbit, ebit, - kibit, mibit, gibit, tibit, pibit, eibit (bit units). - - Returns: - The byte size in the new unit. - """ try: - unit_div = self.byte_sizes[unit.lower()] + unit_div = BYTE_SIZES[unit.lower()] except KeyError: - raise PydanticCustomError('byte_size_unit', 'Could not interpret byte unit: {unit}', {'unit': unit}) + raise errors.InvalidByteSizeUnit(unit=unit) return self / unit_div # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -def _check_annotated_type(annotated_type: str, expected_type: str, annotation: str) -> None: - if annotated_type != expected_type: - raise PydanticUserError(f"'{annotation}' cannot annotate '{annotated_type}'.", code='invalid_annotated_type') - - if TYPE_CHECKING: - PastDate = Annotated[date, ...] - FutureDate = Annotated[date, ...] + PastDate = date + FutureDate = date else: - class PastDate: - """A date in the past.""" + class PastDate(date): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield cls.validate @classmethod - def __get_pydantic_core_schema__( - cls, source: type[Any], handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - if cls is source: - # used directly as a type - return core_schema.date_schema(now_op='past') - else: - schema = handler(source) - _check_annotated_type(schema['type'], 'date', cls.__name__) - schema['now_op'] = 'past' - return schema + def validate(cls, value: date) -> date: + if value >= date.today(): + raise errors.DateNotInThePastError() - def __repr__(self) -> str: - return 'PastDate' + return value - class FutureDate: - """A date in the future.""" + class FutureDate(date): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield cls.validate @classmethod - def __get_pydantic_core_schema__( - cls, source: type[Any], handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - if cls is source: - # used directly as a type - return core_schema.date_schema(now_op='future') - else: - schema = handler(source) - _check_annotated_type(schema['type'], 'date', cls.__name__) - schema['now_op'] = 'future' - return schema + def validate(cls, value: date) -> date: + if value <= date.today(): + raise errors.DateNotInTheFutureError() - def __repr__(self) -> str: - return 'FutureDate' + return value + + +class ConstrainedDate(date, metaclass=ConstrainedNumberMeta): + gt: OptionalDate = None + ge: OptionalDate = None + lt: OptionalDate = None + le: OptionalDate = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield number_size_validator def condate( *, - strict: bool | None = None, - gt: date | None = None, - ge: date | None = None, - lt: date | None = None, - le: date | None = None, -) -> type[date]: - """A wrapper for date that adds constraints. - - Args: - strict: Whether to validate the date value in strict mode. Defaults to `None`. - gt: The value must be greater than this. Defaults to `None`. - ge: The value must be greater than or equal to this. Defaults to `None`. - lt: The value must be less than this. Defaults to `None`. - le: The value must be less than or equal to this. Defaults to `None`. - - Returns: - A date type with the specified constraints. - """ - return Annotated[ - date, - Strict(strict) if strict is not None else None, - annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), - ] - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATETIME TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -if TYPE_CHECKING: - AwareDatetime = Annotated[datetime, ...] - NaiveDatetime = Annotated[datetime, ...] - PastDatetime = Annotated[datetime, ...] - FutureDatetime = Annotated[datetime, ...] - -else: - - class AwareDatetime: - """A datetime that requires timezone info.""" - - @classmethod - def __get_pydantic_core_schema__( - cls, source: type[Any], handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - if cls is source: - # used directly as a type - return core_schema.datetime_schema(tz_constraint='aware') - else: - schema = handler(source) - _check_annotated_type(schema['type'], 'datetime', cls.__name__) - schema['tz_constraint'] = 'aware' - return schema - - def __repr__(self) -> str: - return 'AwareDatetime' - - class NaiveDatetime: - """A datetime that doesn't require timezone info.""" - - @classmethod - def __get_pydantic_core_schema__( - cls, source: type[Any], handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - if cls is source: - # used directly as a type - return core_schema.datetime_schema(tz_constraint='naive') - else: - schema = handler(source) - _check_annotated_type(schema['type'], 'datetime', cls.__name__) - schema['tz_constraint'] = 'naive' - return schema - - def __repr__(self) -> str: - return 'NaiveDatetime' - - class PastDatetime: - """A datetime that must be in the past.""" - - @classmethod - def __get_pydantic_core_schema__( - cls, source: type[Any], handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - if cls is source: - # used directly as a type - return core_schema.datetime_schema(now_op='past') - else: - schema = handler(source) - _check_annotated_type(schema['type'], 'datetime', cls.__name__) - schema['now_op'] = 'past' - return schema - - def __repr__(self) -> str: - return 'PastDatetime' - - class FutureDatetime: - """A datetime that must be in the future.""" - - @classmethod - def __get_pydantic_core_schema__( - cls, source: type[Any], handler: GetCoreSchemaHandler - ) -> core_schema.CoreSchema: - if cls is source: - # used directly as a type - return core_schema.datetime_schema(now_op='future') - else: - schema = handler(source) - _check_annotated_type(schema['type'], 'datetime', cls.__name__) - schema['now_op'] = 'future' - return schema - - def __repr__(self) -> str: - return 'FutureDatetime' - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Encoded TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class EncoderProtocol(Protocol): - """Protocol for encoding and decoding data to and from bytes.""" - - @classmethod - def decode(cls, data: bytes) -> bytes: - """Decode the data using the encoder. - - Args: - data: The data to decode. - - Returns: - The decoded data. - """ - ... - - @classmethod - def encode(cls, value: bytes) -> bytes: - """Encode the data using the encoder. - - Args: - value: The data to encode. - - Returns: - The encoded data. - """ - ... - - @classmethod - def get_json_format(cls) -> str: - """Get the JSON format for the encoded data. - - Returns: - The JSON format for the encoded data. - """ - ... - - -class Base64Encoder(EncoderProtocol): - """Standard (non-URL-safe) Base64 encoder.""" - - @classmethod - def decode(cls, data: bytes) -> bytes: - """Decode the data from base64 encoded bytes to original bytes data. - - Args: - data: The data to decode. - - Returns: - The decoded data. - """ - try: - return base64.decodebytes(data) - except ValueError as e: - raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)}) - - @classmethod - def encode(cls, value: bytes) -> bytes: - """Encode the data from bytes to a base64 encoded bytes. - - Args: - value: The data to encode. - - Returns: - The encoded data. - """ - return base64.encodebytes(value) - - @classmethod - def get_json_format(cls) -> Literal['base64']: - """Get the JSON format for the encoded data. - - Returns: - The JSON format for the encoded data. - """ - return 'base64' - - -class Base64UrlEncoder(EncoderProtocol): - """URL-safe Base64 encoder.""" - - @classmethod - def decode(cls, data: bytes) -> bytes: - """Decode the data from base64 encoded bytes to original bytes data. - - Args: - data: The data to decode. - - Returns: - The decoded data. - """ - try: - return base64.urlsafe_b64decode(data) - except ValueError as e: - raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)}) - - @classmethod - def encode(cls, value: bytes) -> bytes: - """Encode the data from bytes to a base64 encoded bytes. - - Args: - value: The data to encode. - - Returns: - The encoded data. - """ - return base64.urlsafe_b64encode(value) - - @classmethod - def get_json_format(cls) -> Literal['base64url']: - """Get the JSON format for the encoded data. - - Returns: - The JSON format for the encoded data. - """ - return 'base64url' - - -@_dataclasses.dataclass(**_internal_dataclass.slots_true) -class EncodedBytes: - """A bytes type that is encoded and decoded using the specified encoder. - - `EncodedBytes` needs an encoder that implements `EncoderProtocol` to operate. - - ```py - from typing_extensions import Annotated - - from pydantic import BaseModel, EncodedBytes, EncoderProtocol, ValidationError - - class MyEncoder(EncoderProtocol): - @classmethod - def decode(cls, data: bytes) -> bytes: - if data == b'**undecodable**': - raise ValueError('Cannot decode data') - return data[13:] - - @classmethod - def encode(cls, value: bytes) -> bytes: - return b'**encoded**: ' + value - - @classmethod - def get_json_format(cls) -> str: - return 'my-encoder' - - MyEncodedBytes = Annotated[bytes, EncodedBytes(encoder=MyEncoder)] - - class Model(BaseModel): - my_encoded_bytes: MyEncodedBytes - - # Initialize the model with encoded data - m = Model(my_encoded_bytes=b'**encoded**: some bytes') - - # Access decoded value - print(m.my_encoded_bytes) - #> b'some bytes' - - # Serialize into the encoded form - print(m.model_dump()) - #> {'my_encoded_bytes': b'**encoded**: some bytes'} - - # Validate encoded data - try: - Model(my_encoded_bytes=b'**undecodable**') - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - my_encoded_bytes - Value error, Cannot decode data [type=value_error, input_value=b'**undecodable**', input_type=bytes] - ''' - ``` - """ - - encoder: type[EncoderProtocol] - - def __get_pydantic_json_schema__( - self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - field_schema = handler(core_schema) - field_schema.update(type='string', format=self.encoder.get_json_format()) - return field_schema - - def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - return core_schema.with_info_after_validator_function( - function=self.decode, - schema=core_schema.bytes_schema(), - serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode), - ) - - def decode(self, data: bytes, _: core_schema.ValidationInfo) -> bytes: - """Decode the data using the specified encoder. - - Args: - data: The data to decode. - - Returns: - The decoded data. - """ - return self.encoder.decode(data) - - def encode(self, value: bytes) -> bytes: - """Encode the data using the specified encoder. - - Args: - value: The data to encode. - - Returns: - The encoded data. - """ - return self.encoder.encode(value) - - def __hash__(self) -> int: - return hash(self.encoder) - - -@_dataclasses.dataclass(**_internal_dataclass.slots_true) -class EncodedStr(EncodedBytes): - """A str type that is encoded and decoded using the specified encoder. - - `EncodedStr` needs an encoder that implements `EncoderProtocol` to operate. - - ```py - from typing_extensions import Annotated - - from pydantic import BaseModel, EncodedStr, EncoderProtocol, ValidationError - - class MyEncoder(EncoderProtocol): - @classmethod - def decode(cls, data: bytes) -> bytes: - if data == b'**undecodable**': - raise ValueError('Cannot decode data') - return data[13:] - - @classmethod - def encode(cls, value: bytes) -> bytes: - return b'**encoded**: ' + value - - @classmethod - def get_json_format(cls) -> str: - return 'my-encoder' - - MyEncodedStr = Annotated[str, EncodedStr(encoder=MyEncoder)] - - class Model(BaseModel): - my_encoded_str: MyEncodedStr - - # Initialize the model with encoded data - m = Model(my_encoded_str='**encoded**: some str') - - # Access decoded value - print(m.my_encoded_str) - #> some str - - # Serialize into the encoded form - print(m.model_dump()) - #> {'my_encoded_str': '**encoded**: some str'} - - # Validate encoded data - try: - Model(my_encoded_str='**undecodable**') - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - my_encoded_str - Value error, Cannot decode data [type=value_error, input_value='**undecodable**', input_type=str] - ''' - ``` - """ - - def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - return core_schema.with_info_after_validator_function( - function=self.decode_str, - schema=super(EncodedStr, self).__get_pydantic_core_schema__(source=source, handler=handler), # noqa: UP008 - serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode_str), - ) - - def decode_str(self, data: bytes, _: core_schema.ValidationInfo) -> str: - """Decode the data using the specified encoder. - - Args: - data: The data to decode. - - Returns: - The decoded data. - """ - return data.decode() - - def encode_str(self, value: str) -> str: - """Encode the data using the specified encoder. - - Args: - value: The data to encode. - - Returns: - The encoded data. - """ - return super(EncodedStr, self).encode(value=value.encode()).decode() # noqa: UP008 - - def __hash__(self) -> int: - return hash(self.encoder) - - -Base64Bytes = Annotated[bytes, EncodedBytes(encoder=Base64Encoder)] -"""A bytes type that is encoded and decoded using the standard (non-URL-safe) base64 encoder. - -Note: - Under the hood, `Base64Bytes` use standard library `base64.encodebytes` and `base64.decodebytes` functions. - - As a result, attempting to decode url-safe base64 data using the `Base64Bytes` type may fail or produce an incorrect - decoding. - -```py -from pydantic import Base64Bytes, BaseModel, ValidationError - -class Model(BaseModel): - base64_bytes: Base64Bytes - -# Initialize the model with base64 data -m = Model(base64_bytes=b'VGhpcyBpcyB0aGUgd2F5') - -# Access decoded value -print(m.base64_bytes) -#> b'This is the way' - -# Serialize into the base64 form -print(m.model_dump()) -#> {'base64_bytes': b'VGhpcyBpcyB0aGUgd2F5\n'} - -# Validate base64 data -try: - print(Model(base64_bytes=b'undecodable').base64_bytes) -except ValidationError as e: - print(e) - ''' - 1 validation error for Model - base64_bytes - Base64 decoding error: 'Incorrect padding' [type=base64_decode, input_value=b'undecodable', input_type=bytes] - ''' -``` -""" -Base64Str = Annotated[str, EncodedStr(encoder=Base64Encoder)] -"""A str type that is encoded and decoded using the standard (non-URL-safe) base64 encoder. - -Note: - Under the hood, `Base64Bytes` use standard library `base64.encodebytes` and `base64.decodebytes` functions. - - As a result, attempting to decode url-safe base64 data using the `Base64Str` type may fail or produce an incorrect - decoding. - -```py -from pydantic import Base64Str, BaseModel, ValidationError - -class Model(BaseModel): - base64_str: Base64Str - -# Initialize the model with base64 data -m = Model(base64_str='VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y') - -# Access decoded value -print(m.base64_str) -#> These aren't the droids you're looking for - -# Serialize into the base64 form -print(m.model_dump()) -#> {'base64_str': 'VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y\n'} - -# Validate base64 data -try: - print(Model(base64_str='undecodable').base64_str) -except ValidationError as e: - print(e) - ''' - 1 validation error for Model - base64_str - Base64 decoding error: 'Incorrect padding' [type=base64_decode, input_value='undecodable', input_type=str] - ''' -``` -""" -Base64UrlBytes = Annotated[bytes, EncodedBytes(encoder=Base64UrlEncoder)] -"""A bytes type that is encoded and decoded using the URL-safe base64 encoder. - -Note: - Under the hood, `Base64UrlBytes` use standard library `base64.urlsafe_b64encode` and `base64.urlsafe_b64decode` - functions. - - As a result, the `Base64UrlBytes` type can be used to faithfully decode "vanilla" base64 data - (using `'+'` and `'/'`). - -```py -from pydantic import Base64UrlBytes, BaseModel - -class Model(BaseModel): - base64url_bytes: Base64UrlBytes - -# Initialize the model with base64 data -m = Model(base64url_bytes=b'SHc_dHc-TXc==') -print(m) -#> base64url_bytes=b'Hw?tw>Mw' -``` -""" -Base64UrlStr = Annotated[str, EncodedStr(encoder=Base64UrlEncoder)] -"""A str type that is encoded and decoded using the URL-safe base64 encoder. - -Note: - Under the hood, `Base64UrlStr` use standard library `base64.urlsafe_b64encode` and `base64.urlsafe_b64decode` - functions. - - As a result, the `Base64UrlStr` type can be used to faithfully decode "vanilla" base64 data (using `'+'` and `'/'`). - -```py -from pydantic import Base64UrlStr, BaseModel - -class Model(BaseModel): - base64url_str: Base64UrlStr - -# Initialize the model with base64 data -m = Model(base64url_str='SHc_dHc-TXc==') -print(m) -#> base64url_str='Hw?tw>Mw' -``` -""" - - -__getattr__ = getattr_migration(__name__) - - -@_dataclasses.dataclass(**_internal_dataclass.slots_true) -class GetPydanticSchema: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/types/#using-getpydanticschema-to-reduce-boilerplate - - A convenience class for creating an annotation that provides pydantic custom type hooks. - - This class is intended to eliminate the need to create a custom "marker" which defines the - `__get_pydantic_core_schema__` and `__get_pydantic_json_schema__` custom hook methods. - - For example, to have a field treated by type checkers as `int`, but by pydantic as `Any`, you can do: - ```python - from typing import Any - - from typing_extensions import Annotated - - from pydantic import BaseModel, GetPydanticSchema - - HandleAsAny = GetPydanticSchema(lambda _s, h: h(Any)) - - class Model(BaseModel): - x: Annotated[int, HandleAsAny] # pydantic sees `x: Any` - - print(repr(Model(x='abc').x)) - #> 'abc' - ``` - """ - - get_pydantic_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema] | None = None - get_pydantic_json_schema: Callable[[Any, GetJsonSchemaHandler], JsonSchemaValue] | None = None - - # Note: we may want to consider adding a convenience staticmethod `def for_type(type_: Any) -> GetPydanticSchema:` - # which returns `GetPydanticSchema(lambda _s, h: h(type_))` - - if not TYPE_CHECKING: - # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access - - def __getattr__(self, item: str) -> Any: - """Use this rather than defining `__get_pydantic_core_schema__` etc. to reduce the number of nested calls.""" - if item == '__get_pydantic_core_schema__' and self.get_pydantic_core_schema: - return self.get_pydantic_core_schema - elif item == '__get_pydantic_json_schema__' and self.get_pydantic_json_schema: - return self.get_pydantic_json_schema - else: - return object.__getattribute__(self, item) - - __hash__ = object.__hash__ - - -@_dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) -class Tag: - """Provides a way to specify the expected tag to use for a case of a (callable) discriminated union. - - Also provides a way to label a union case in error messages. - - When using a callable `Discriminator`, attach a `Tag` to each case in the `Union` to specify the tag that - should be used to identify that case. For example, in the below example, the `Tag` is used to specify that - if `get_discriminator_value` returns `'apple'`, the input should be validated as an `ApplePie`, and if it - returns `'pumpkin'`, the input should be validated as a `PumpkinPie`. - - The primary role of the `Tag` here is to map the return value from the callable `Discriminator` function to - the appropriate member of the `Union` in question. - - ```py - from typing import Any, Union - - from typing_extensions import Annotated, Literal - - from pydantic import BaseModel, Discriminator, Tag - - class Pie(BaseModel): - time_to_cook: int - num_ingredients: int - - class ApplePie(Pie): - fruit: Literal['apple'] = 'apple' - - class PumpkinPie(Pie): - filling: Literal['pumpkin'] = 'pumpkin' - - def get_discriminator_value(v: Any) -> str: - if isinstance(v, dict): - return v.get('fruit', v.get('filling')) - return getattr(v, 'fruit', getattr(v, 'filling', None)) - - class ThanksgivingDinner(BaseModel): - dessert: Annotated[ - Union[ - Annotated[ApplePie, Tag('apple')], - Annotated[PumpkinPie, Tag('pumpkin')], - ], - Discriminator(get_discriminator_value), - ] - - apple_variation = ThanksgivingDinner.model_validate( - {'dessert': {'fruit': 'apple', 'time_to_cook': 60, 'num_ingredients': 8}} - ) - print(repr(apple_variation)) - ''' - ThanksgivingDinner(dessert=ApplePie(time_to_cook=60, num_ingredients=8, fruit='apple')) - ''' - - pumpkin_variation = ThanksgivingDinner.model_validate( - { - 'dessert': { - 'filling': 'pumpkin', - 'time_to_cook': 40, - 'num_ingredients': 6, - } - } - ) - print(repr(pumpkin_variation)) - ''' - ThanksgivingDinner(dessert=PumpkinPie(time_to_cook=40, num_ingredients=6, filling='pumpkin')) - ''' - ``` - - !!! note - You must specify a `Tag` for every case in a `Tag` that is associated with a - callable `Discriminator`. Failing to do so will result in a `PydanticUserError` with code - [`callable-discriminator-no-tag`](../errors/usage_errors.md#callable-discriminator-no-tag). - - See the [Discriminated Unions] concepts docs for more details on how to use `Tag`s. - - [Discriminated Unions]: ../concepts/unions.md#discriminated-unions - """ - - tag: str - - def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: - schema = handler(source_type) - metadata = schema.setdefault('metadata', {}) - assert isinstance(metadata, dict) - metadata[_core_utils.TAGGED_UNION_TAG_KEY] = self.tag - return schema - - -@_dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) -class Discriminator: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/unions/#discriminated-unions-with-callable-discriminator - - Provides a way to use a custom callable as the way to extract the value of a union discriminator. - - This allows you to get validation behavior like you'd get from `Field(discriminator=)`, - but without needing to have a single shared field across all the union choices. This also makes it - possible to handle unions of models and primitive types with discriminated-union-style validation errors. - Finally, this allows you to use a custom callable as the way to identify which member of a union a value - belongs to, while still seeing all the performance benefits of a discriminated union. - - Consider this example, which is much more performant with the use of `Discriminator` and thus a `TaggedUnion` - than it would be as a normal `Union`. - - ```py - from typing import Any, Union - - from typing_extensions import Annotated, Literal - - from pydantic import BaseModel, Discriminator, Tag - - class Pie(BaseModel): - time_to_cook: int - num_ingredients: int - - class ApplePie(Pie): - fruit: Literal['apple'] = 'apple' - - class PumpkinPie(Pie): - filling: Literal['pumpkin'] = 'pumpkin' - - def get_discriminator_value(v: Any) -> str: - if isinstance(v, dict): - return v.get('fruit', v.get('filling')) - return getattr(v, 'fruit', getattr(v, 'filling', None)) - - class ThanksgivingDinner(BaseModel): - dessert: Annotated[ - Union[ - Annotated[ApplePie, Tag('apple')], - Annotated[PumpkinPie, Tag('pumpkin')], - ], - Discriminator(get_discriminator_value), - ] - - apple_variation = ThanksgivingDinner.model_validate( - {'dessert': {'fruit': 'apple', 'time_to_cook': 60, 'num_ingredients': 8}} - ) - print(repr(apple_variation)) - ''' - ThanksgivingDinner(dessert=ApplePie(time_to_cook=60, num_ingredients=8, fruit='apple')) - ''' - - pumpkin_variation = ThanksgivingDinner.model_validate( - { - 'dessert': { - 'filling': 'pumpkin', - 'time_to_cook': 40, - 'num_ingredients': 6, - } - } - ) - print(repr(pumpkin_variation)) - ''' - ThanksgivingDinner(dessert=PumpkinPie(time_to_cook=40, num_ingredients=6, filling='pumpkin')) - ''' - ``` - - See the [Discriminated Unions] concepts docs for more details on how to use `Discriminator`s. - - [Discriminated Unions]: ../concepts/unions.md#discriminated-unions - """ - - discriminator: str | Callable[[Any], Hashable] - """The callable or field name for discriminating the type in a tagged union. - - A `Callable` discriminator must extract the value of the discriminator from the input. - A `str` discriminator must be the name of a field to discriminate against. - """ - custom_error_type: str | None = None - """Type to use in [custom errors](../errors/errors.md#custom-errors) replacing the standard discriminated union - validation errors. - """ - custom_error_message: str | None = None - """Message to use in custom errors.""" - custom_error_context: dict[str, int | str | float] | None = None - """Context to use in custom errors.""" - - def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: - origin = _typing_extra.get_origin(source_type) - if not origin or not _typing_extra.origin_is_union(origin): - raise TypeError(f'{type(self).__name__} must be used with a Union type, not {source_type}') - - if isinstance(self.discriminator, str): - from pydantic import Field - - return handler(Annotated[source_type, Field(discriminator=self.discriminator)]) - else: - original_schema = handler(source_type) - return self._convert_schema(original_schema) - - def _convert_schema(self, original_schema: core_schema.CoreSchema) -> core_schema.TaggedUnionSchema: - if original_schema['type'] != 'union': - # This likely indicates that the schema was a single-item union that was simplified. - # In this case, we do the same thing we do in - # `pydantic._internal._discriminated_union._ApplyInferredDiscriminator._apply_to_root`, namely, - # package the generated schema back into a single-item union. - original_schema = core_schema.union_schema([original_schema]) - - tagged_union_choices = {} - for i, choice in enumerate(original_schema['choices']): - tag = None - if isinstance(choice, tuple): - choice, tag = choice - metadata = choice.get('metadata') - if metadata is not None: - metadata_tag = metadata.get(_core_utils.TAGGED_UNION_TAG_KEY) - if metadata_tag is not None: - tag = metadata_tag - if tag is None: - raise PydanticUserError( - f'`Tag` not provided for choice {choice} used with `Discriminator`', - code='callable-discriminator-no-tag', - ) - tagged_union_choices[tag] = choice - - # Have to do these verbose checks to ensure falsy values ('' and {}) don't get ignored - custom_error_type = self.custom_error_type - if custom_error_type is None: - custom_error_type = original_schema.get('custom_error_type') - - custom_error_message = self.custom_error_message - if custom_error_message is None: - custom_error_message = original_schema.get('custom_error_message') - - custom_error_context = self.custom_error_context - if custom_error_context is None: - custom_error_context = original_schema.get('custom_error_context') - - custom_error_type = original_schema.get('custom_error_type') if custom_error_type is None else custom_error_type - return core_schema.tagged_union_schema( - tagged_union_choices, - self.discriminator, - custom_error_type=custom_error_type, - custom_error_message=custom_error_message, - custom_error_context=custom_error_context, - strict=original_schema.get('strict'), - ref=original_schema.get('ref'), - metadata=original_schema.get('metadata'), - serialization=original_schema.get('serialization'), - ) - - -_JSON_TYPES = {int, float, str, bool, list, dict, type(None)} - - -def _get_type_name(x: Any) -> str: - type_ = type(x) - if type_ in _JSON_TYPES: - return type_.__name__ - - # Handle proper subclasses; note we don't need to handle None or bool here - if isinstance(x, int): - return 'int' - if isinstance(x, float): - return 'float' - if isinstance(x, str): - return 'str' - if isinstance(x, list): - return 'list' - if isinstance(x, dict): - return 'dict' - - # Fail by returning the type's actual name - return getattr(type_, '__name__', '') - - -class _AllowAnyJson: - @classmethod - def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: - python_schema = handler(source_type) - return core_schema.json_or_python_schema(json_schema=core_schema.any_schema(), python_schema=python_schema) - - -if TYPE_CHECKING: - # This seems to only be necessary for mypy - JsonValue: TypeAlias = Union[ - List['JsonValue'], - Dict[str, 'JsonValue'], - str, - bool, - int, - float, - None, - ] - """A `JsonValue` is used to represent a value that can be serialized to JSON. - - It may be one of: - - * `List['JsonValue']` - * `Dict[str, 'JsonValue']` - * `str` - * `bool` - * `int` - * `float` - * `None` - - The following example demonstrates how to use `JsonValue` to validate JSON data, - and what kind of errors to expect when input data is not json serializable. - - ```py - import json - - from pydantic import BaseModel, JsonValue, ValidationError - - class Model(BaseModel): - j: JsonValue - - valid_json_data = {'j': {'a': {'b': {'c': 1, 'd': [2, None]}}}} - invalid_json_data = {'j': {'a': {'b': ...}}} - - print(repr(Model.model_validate(valid_json_data))) - #> Model(j={'a': {'b': {'c': 1, 'd': [2, None]}}}) - print(repr(Model.model_validate_json(json.dumps(valid_json_data)))) - #> Model(j={'a': {'b': {'c': 1, 'd': [2, None]}}}) - - try: - Model.model_validate(invalid_json_data) - except ValidationError as e: - print(e) - ''' - 1 validation error for Model - j.dict.a.dict.b - input was not a valid JSON value [type=invalid-json-value, input_value=Ellipsis, input_type=ellipsis] - ''' - ``` - """ - -else: - JsonValue = TypeAliasType( - 'JsonValue', - Annotated[ - Union[ - Annotated[List['JsonValue'], Tag('list')], - Annotated[Dict[str, 'JsonValue'], Tag('dict')], - Annotated[str, Tag('str')], - Annotated[bool, Tag('bool')], - Annotated[int, Tag('int')], - Annotated[float, Tag('float')], - Annotated[None, Tag('NoneType')], - ], - Discriminator( - _get_type_name, - custom_error_type='invalid-json-value', - custom_error_message='input was not a valid JSON value', - ), - _AllowAnyJson, - ], - ) - - -class _OnErrorOmit: - @classmethod - def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: - # there is no actual default value here but we use with_default_schema since it already has the on_error - # behavior implemented and it would be no more efficient to implement it on every other validator - # or as a standalone validator - return core_schema.with_default_schema(schema=handler(source_type), on_error='omit') - - -OnErrorOmit = Annotated[T, _OnErrorOmit] -""" -When used as an item in a list, the key type in a dict, optional values of a TypedDict, etc. -this annotation omits the item from the iteration if there is any error validating it. -That is, instead of a [`ValidationError`][pydantic_core.ValidationError] being propagated up and the entire iterable being discarded -any invalid items are discarded and the valid ones are returned. -""" + gt: date = None, + ge: date = None, + lt: date = None, + le: date = None, +) -> Type[date]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(gt=gt, ge=ge, lt=lt, le=le) + return type('ConstrainedDateValue', (ConstrainedDate,), namespace) diff --git a/lib/pydantic/typing.py b/lib/pydantic/typing.py index f1b32ba2..5ccf266c 100644 --- a/lib/pydantic/typing.py +++ b/lib/pydantic/typing.py @@ -1,4 +1,602 @@ -"""`typing` module is a backport module from V1.""" -from ._migration import getattr_migration +import sys +from collections.abc import Callable +from os import PathLike +from typing import ( # type: ignore + TYPE_CHECKING, + AbstractSet, + Any, + Callable as TypingCallable, + ClassVar, + Dict, + ForwardRef, + Generator, + Iterable, + List, + Mapping, + NewType, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + _eval_type, + cast, + get_type_hints, +) -__getattr__ = getattr_migration(__name__) +from typing_extensions import ( + Annotated, + Final, + Literal, + NotRequired as TypedDictNotRequired, + Required as TypedDictRequired, +) + +try: + from typing import _TypingBase as typing_base # type: ignore +except ImportError: + from typing import _Final as typing_base # type: ignore + +try: + from typing import GenericAlias as TypingGenericAlias # type: ignore +except ImportError: + # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) + TypingGenericAlias = () + +try: + from types import UnionType as TypesUnionType # type: ignore +except ImportError: + # python < 3.10 does not have UnionType (str | int, byte | bool and so on) + TypesUnionType = () + + +if sys.version_info < (3, 9): + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + return type_._evaluate(globalns, localns) + +else: + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + # Even though it is the right signature for python 3.9, mypy complains with + # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast... + return cast(Any, type_)._evaluate(globalns, localns, set()) + + +if sys.version_info < (3, 9): + # Ensure we always get all the whole `Annotated` hint, not just the annotated type. + # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`, + # so it already returns the full annotation + get_all_type_hints = get_type_hints + +else: + + def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any: + return get_type_hints(obj, globalns, localns, include_extras=True) + + +_T = TypeVar('_T') + +AnyCallable = TypingCallable[..., Any] +NoArgAnyCallable = TypingCallable[[], Any] + +# workaround for https://github.com/python/mypy/issues/9496 +AnyArgTCallable = TypingCallable[..., _T] + + +# Annotated[...] is implemented by returning an instance of one of these classes, depending on +# python/typing_extensions version. +AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'} + + +if sys.version_info < (3, 8): + + def get_origin(t: Type[Any]) -> Optional[Type[Any]]: + if type(t).__name__ in AnnotatedTypeNames: + # weirdly this is a runtime requirement, as well as for mypy + return cast(Type[Any], Annotated) + return getattr(t, '__origin__', None) + +else: + from typing import get_origin as _typing_get_origin + + def get_origin(tp: Type[Any]) -> Optional[Type[Any]]: + """ + We can't directly use `typing.get_origin` since we need a fallback to support + custom generic classes like `ConstrainedList` + It should be useless once https://github.com/cython/cython/issues/3537 is + solved and https://github.com/pydantic/pydantic/pull/1753 is merged. + """ + if type(tp).__name__ in AnnotatedTypeNames: + return cast(Type[Any], Annotated) # mypy complains about _SpecialForm + return _typing_get_origin(tp) or getattr(tp, '__origin__', None) + + +if sys.version_info < (3, 8): + from typing import _GenericAlias + + def get_args(t: Type[Any]) -> Tuple[Any, ...]: + """Compatibility version of get_args for python 3.7. + + Mostly compatible with the python 3.8 `typing` module version + and able to handle almost all use cases. + """ + if type(t).__name__ in AnnotatedTypeNames: + return t.__args__ + t.__metadata__ + if isinstance(t, _GenericAlias): + res = t.__args__ + if t.__origin__ is Callable and res and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return getattr(t, '__args__', ()) + +else: + from typing import get_args as _typing_get_args + + def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """ + In python 3.9, `typing.Dict`, `typing.List`, ... + do have an empty `__args__` by default (instead of the generic ~T for example). + In order to still support `Dict` for example and consider it as `Dict[Any, Any]`, + we retrieve the `_nparams` value that tells us how many parameters it needs. + """ + if hasattr(tp, '_nparams'): + return (Any,) * tp._nparams + # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple` + # in python 3.10- but now returns () for `tuple` and `Tuple`. + # This will probably be clarified in pydantic v2 + try: + if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]: # type: ignore[misc] + return ((),) + # there is a TypeError when compiled with cython + except TypeError: # pragma: no cover + pass + return () + + def get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if type(tp).__name__ in AnnotatedTypeNames: + return tp.__args__ + tp.__metadata__ + # the fallback is needed for the same reasons as `get_origin` (see above) + return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp) + + +if sys.version_info < (3, 9): + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """Python 3.9 and older only supports generics from `typing` module. + They convert strings to ForwardRef automatically. + + Examples:: + typing.List['Hero'] == typing.List[ForwardRef('Hero')] + """ + return tp + +else: + from typing import _UnionGenericAlias # type: ignore + + from typing_extensions import _AnnotatedAlias + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """ + Recursively searches for `str` type hints and replaces them with ForwardRef. + + Examples:: + convert_generics(list['Hero']) == list[ForwardRef('Hero')] + convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int + """ + origin = get_origin(tp) + if not origin or not hasattr(tp, '__args__'): + return tp + + args = get_args(tp) + + # typing.Annotated needs special treatment + if origin is Annotated: + return _AnnotatedAlias(convert_generics(args[0]), args[1:]) + + # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)` + converted = tuple( + ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg) + for arg in args + ) + + if converted == args: + return tp + elif isinstance(tp, TypingGenericAlias): + return TypingGenericAlias(origin, converted) + elif isinstance(tp, TypesUnionType): + # recreate types.UnionType (PEP604, Python >= 3.10) + return _UnionGenericAlias(origin, converted) + else: + try: + setattr(tp, '__args__', converted) + except AttributeError: + pass + return tp + + +if sys.version_info < (3, 10): + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union + + WithArgsTypes = (TypingGenericAlias,) + +else: + import types + import typing + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union or tp is types.UnionType # noqa: E721 + + WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType) + + +if sys.version_info < (3, 9): + StrPath = Union[str, PathLike] +else: + StrPath = Union[str, PathLike] + # TODO: Once we switch to Cython 3 to handle generics properly + # (https://github.com/cython/cython/issues/2753), use following lines instead + # of the one above + # # os.PathLike only becomes subscriptable from Python 3.9 onwards + # StrPath = Union[str, PathLike[str]] + + +if TYPE_CHECKING: + from .fields import ModelField + + TupleGenerator = Generator[Tuple[str, Any], None, None] + DictStrAny = Dict[str, Any] + DictAny = Dict[Any, Any] + SetStr = Set[str] + ListStr = List[str] + IntStr = Union[int, str] + AbstractSetIntStr = AbstractSet[IntStr] + DictIntStrAny = Dict[IntStr, Any] + MappingIntStrAny = Mapping[IntStr, Any] + CallableGenerator = Generator[AnyCallable, None, None] + ReprArgs = Sequence[Tuple[Optional[str], Any]] + AnyClassMethod = classmethod[Any] + +__all__ = ( + 'AnyCallable', + 'NoArgAnyCallable', + 'NoneType', + 'is_none_type', + 'display_as_type', + 'resolve_annotations', + 'is_callable_type', + 'is_literal_type', + 'all_literal_values', + 'is_namedtuple', + 'is_typeddict', + 'is_typeddict_special', + 'is_new_type', + 'new_type_supertype', + 'is_classvar', + 'is_finalvar', + 'update_field_forward_refs', + 'update_model_forward_refs', + 'TupleGenerator', + 'DictStrAny', + 'DictAny', + 'SetStr', + 'ListStr', + 'IntStr', + 'AbstractSetIntStr', + 'DictIntStrAny', + 'CallableGenerator', + 'ReprArgs', + 'AnyClassMethod', + 'CallableGenerator', + 'WithArgsTypes', + 'get_args', + 'get_origin', + 'get_sub_types', + 'typing_base', + 'get_all_type_hints', + 'is_union', + 'StrPath', + 'MappingIntStrAny', +) + + +NoneType = None.__class__ + + +NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None]) + + +if sys.version_info < (3, 8): + # Even though this implementation is slower, we need it for python 3.7: + # In python 3.7 "Literal" is not a builtin type and uses a different + # mechanism. + # for this reason `Literal[None] is Literal[None]` evaluates to `False`, + # breaking the faster implementation used for the other python versions. + + def is_none_type(type_: Any) -> bool: + return type_ in NONE_TYPES + +elif sys.version_info[:2] == (3, 8): + + def is_none_type(type_: Any) -> bool: + for none_type in NONE_TYPES: + if type_ is none_type: + return True + # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey + # can change on very subtle changes like use of types in other modules, + # hopefully this check avoids that issue. + if is_literal_type(type_): # pragma: no cover + return all_literal_values(type_) == (None,) + return False + +else: + + def is_none_type(type_: Any) -> bool: + for none_type in NONE_TYPES: + if type_ is none_type: + return True + return False + + +def display_as_type(v: Type[Any]) -> str: + if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type): + v = v.__class__ + + if is_union(get_origin(v)): + return f'Union[{", ".join(map(display_as_type, get_args(v)))}]' + + if isinstance(v, WithArgsTypes): + # Generic alias are constructs like `list[int]` + return str(v).replace('typing.', '') + + try: + return v.__name__ + except AttributeError: + # happens with typing objects + return str(v).replace('typing.', '') + + +def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]: + """ + Partially taken from typing.get_type_hints. + + Resolve string or ForwardRef annotations into type objects if possible. + """ + base_globals: Optional[Dict[str, Any]] = None + if module_name: + try: + module = sys.modules[module_name] + except KeyError: + # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 + pass + else: + base_globals = module.__dict__ + + annotations = {} + for name, value in raw_annotations.items(): + if isinstance(value, str): + if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1): + value = ForwardRef(value, is_argument=False, is_class=True) + else: + value = ForwardRef(value, is_argument=False) + try: + value = _eval_type(value, base_globals, None) + except NameError: + # this is ok, it can be fixed with update_forward_refs + pass + annotations[name] = value + return annotations + + +def is_callable_type(type_: Type[Any]) -> bool: + return type_ is Callable or get_origin(type_) is Callable + + +def is_literal_type(type_: Type[Any]) -> bool: + return Literal is not None and get_origin(type_) is Literal + + +def literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + return get_args(type_) + + +def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + """ + This method is used to retrieve all Literal values as + Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) + e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]` + """ + if not is_literal_type(type_): + return (type_,) + + values = literal_values(type_) + return tuple(x for value in values for x in all_literal_values(value)) + + +def is_namedtuple(type_: Type[Any]) -> bool: + """ + Check if a given class is a named tuple. + It can be either a `typing.NamedTuple` or `collections.namedtuple` + """ + from .utils import lenient_issubclass + + return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields') + + +def is_typeddict(type_: Type[Any]) -> bool: + """ + Check if a given class is a typed dict (from `typing` or `typing_extensions`) + In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict) + """ + from .utils import lenient_issubclass + + return lenient_issubclass(type_, dict) and hasattr(type_, '__total__') + + +def _check_typeddict_special(type_: Any) -> bool: + return type_ is TypedDictRequired or type_ is TypedDictNotRequired + + +def is_typeddict_special(type_: Any) -> bool: + """ + Check if type is a TypedDict special form (Required or NotRequired). + """ + return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_)) + + +test_type = NewType('test_type', str) + + +def is_new_type(type_: Type[Any]) -> bool: + """ + Check whether type_ was created using typing.NewType + """ + return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__') # type: ignore + + +def new_type_supertype(type_: Type[Any]) -> Type[Any]: + while hasattr(type_, '__supertype__'): + type_ = type_.__supertype__ + return type_ + + +def _check_classvar(v: Optional[Type[Any]]) -> bool: + if v is None: + return False + + return v.__class__ == ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar' + + +def _check_finalvar(v: Optional[Type[Any]]) -> bool: + """ + Check if a given type is a `typing.Final` type. + """ + if v is None: + return False + + return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final') + + +def is_classvar(ann_type: Type[Any]) -> bool: + if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)): + return True + + # this is an ugly workaround for class vars that contain forward references and are therefore themselves + # forward references, see #3679 + if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): + return True + + return False + + +def is_finalvar(ann_type: Type[Any]) -> bool: + return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type)) + + +def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this ModelField, globalns and localns. + """ + prepare = False + if field.type_.__class__ == ForwardRef: + prepare = True + field.type_ = evaluate_forwardref(field.type_, globalns, localns or None) + if field.outer_type_.__class__ == ForwardRef: + prepare = True + field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None) + if prepare: + field.prepare() + + if field.sub_fields: + for sub_f in field.sub_fields: + update_field_forward_refs(sub_f, globalns=globalns, localns=localns) + + if field.discriminator_key is not None: + field.prepare_discriminated_union_sub_fields() + + +def update_model_forward_refs( + model: Type[Any], + fields: Iterable['ModelField'], + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable], + localns: 'DictStrAny', + exc_to_suppress: Tuple[Type[BaseException], ...] = (), +) -> None: + """ + Try to update model fields ForwardRefs based on model and localns. + """ + if model.__module__ in sys.modules: + globalns = sys.modules[model.__module__].__dict__.copy() + else: + globalns = {} + + globalns.setdefault(model.__name__, model) + + for f in fields: + try: + update_field_forward_refs(f, globalns=globalns, localns=localns) + except exc_to_suppress: + pass + + for key in set(json_encoders.keys()): + if isinstance(key, str): + fr: ForwardRef = ForwardRef(key) + elif isinstance(key, ForwardRef): + fr = key + else: + continue + + try: + new_key = evaluate_forwardref(fr, globalns, localns or None) + except exc_to_suppress: # pragma: no cover + continue + + json_encoders[new_key] = json_encoders.pop(key) + + +def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]: + """ + Tries to get the class of a Type[T] annotation. Returns True if Type is used + without brackets. Otherwise returns None. + """ + if type_ is type: + return True + + if get_origin(type_) is None: + return None + + args = get_args(type_) + if not args or not isinstance(args[0], type): + return True + else: + return args[0] + + +def get_sub_types(tp: Any) -> List[Any]: + """ + Return all the types that are allowed by type `tp` + `tp` can be a `Union` of allowed types or an `Annotated` type + """ + origin = get_origin(tp) + if origin is Annotated: + return get_sub_types(get_args(tp)[0]) + elif is_union(origin): + return [x for t in get_args(tp) for x in get_sub_types(t)] + else: + return [tp] diff --git a/lib/pydantic/utils.py b/lib/pydantic/utils.py index 1619d1db..1d016c0e 100644 --- a/lib/pydantic/utils.py +++ b/lib/pydantic/utils.py @@ -1,4 +1,841 @@ -"""The `utils` module is a backport module from V1.""" -from ._migration import getattr_migration +import keyword +import warnings +import weakref +from collections import OrderedDict, defaultdict, deque +from copy import deepcopy +from itertools import islice, zip_longest +from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + Collection, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) -__getattr__ = getattr_migration(__name__) +from typing_extensions import Annotated + +from .errors import ConfigError +from .typing import ( + NoneType, + WithArgsTypes, + all_literal_values, + display_as_type, + get_args, + get_origin, + is_literal_type, + is_union, +) +from .version import version_info + +if TYPE_CHECKING: + from inspect import Signature + from pathlib import Path + + from .config import BaseConfig + from .dataclasses import Dataclass + from .fields import ModelField + from .main import BaseModel + from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs + + RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] + +__all__ = ( + 'import_string', + 'sequence_like', + 'validate_field_name', + 'lenient_isinstance', + 'lenient_issubclass', + 'in_ipython', + 'is_valid_identifier', + 'deep_update', + 'update_not_none', + 'almost_equal_floats', + 'get_model', + 'to_camel', + 'is_valid_field', + 'smart_deepcopy', + 'PyObjectStr', + 'Representation', + 'GetterDict', + 'ValueItems', + 'version_info', # required here to match behaviour in v1.3 + 'ClassAttribute', + 'path_type', + 'ROOT_KEY', + 'get_unique_discriminator_alias', + 'get_discriminator_alias_and_values', + 'DUNDER_ATTRIBUTES', + 'LimitedDict', +) + +ROOT_KEY = '__root__' +# these are types that are returned unchanged by deepcopy +IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = { + int, + float, + complex, + str, + bool, + bytes, + type, + NoneType, + FunctionType, + BuiltinFunctionType, + LambdaType, + weakref.ref, + CodeType, + # note: including ModuleType will differ from behaviour of deepcopy by not producing error. + # It might be not a good idea in general, but considering that this function used only internally + # against default values of fields, this will allow to actually have a field with module as default value + ModuleType, + NotImplemented.__class__, + Ellipsis.__class__, +} + +# these are types that if empty, might be copied with simple copy() instead of deepcopy() +BUILTIN_COLLECTIONS: Set[Type[Any]] = { + list, + set, + tuple, + frozenset, + dict, + OrderedDict, + defaultdict, + deque, +} + + +def import_string(dotted_path: str) -> Any: + """ + Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the + last name in the path. Raise ImportError if the import fails. + """ + from importlib import import_module + + try: + module_path, class_name = dotted_path.strip(' ').rsplit('.', 1) + except ValueError as e: + raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e + + module = import_module(module_path) + try: + return getattr(module, class_name) + except AttributeError as e: + raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e + + +def truncate(v: Union[str], *, max_len: int = 80) -> str: + """ + Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long + """ + warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning) + if isinstance(v, str) and len(v) > (max_len - 2): + # -3 so quote + string + … + quote has correct length + return (v[: (max_len - 3)] + '…').__repr__() + try: + v = v.__repr__() + except TypeError: + v = v.__class__.__repr__(v) # in case v is a type + if len(v) > max_len: + v = v[: max_len - 1] + '…' + return v + + +def sequence_like(v: Any) -> bool: + return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) + + +def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None: + """ + Ensure that the field's name does not shadow an existing attribute of the model. + """ + for base in bases: + if getattr(base, field_name, None): + raise NameError( + f'Field name "{field_name}" shadows a BaseModel attribute; ' + f'use a different field name with "alias=\'{field_name}\'".' + ) + + +def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(o, class_or_tuple) # type: ignore[arg-type] + except TypeError: + return False + + +def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type] + except TypeError: + if isinstance(cls, WithArgsTypes): + return False + raise # pragma: no cover + + +def in_ipython() -> bool: + """ + Check whether we're in an ipython environment, including jupyter notebooks. + """ + try: + eval('__IPYTHON__') + except NameError: + return False + else: # pragma: no cover + return True + + +def is_valid_identifier(identifier: str) -> bool: + """ + Checks that a string is a valid identifier and not a Python keyword. + :param identifier: The identifier to test. + :return: True if the identifier is valid. + """ + return identifier.isidentifier() and not keyword.iskeyword(identifier) + + +KeyType = TypeVar('KeyType') + + +def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]: + updated_mapping = mapping.copy() + for updating_mapping in updating_mappings: + for k, v in updating_mapping.items(): + if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): + updated_mapping[k] = deep_update(updated_mapping[k], v) + else: + updated_mapping[k] = v + return updated_mapping + + +def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None: + mapping.update({k: v for k, v in update.items() if v is not None}) + + +def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool: + """ + Return True if two floats are almost equal + """ + return abs(value_1 - value_2) <= delta + + +def generate_model_signature( + init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig'] +) -> 'Signature': + """ + Generate signature for model based on its fields + """ + from inspect import Parameter, Signature, signature + + from .config import Extra + + present_params = signature(init).parameters.values() + merged_params: Dict[str, Parameter] = {} + var_kw = None + use_var_kw = False + + for param in islice(present_params, 1, None): # skip self arg + if param.kind is param.VAR_KEYWORD: + var_kw = param + continue + merged_params[param.name] = param + + if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through + allow_names = config.allow_population_by_field_name + for field_name, field in fields.items(): + param_name = field.alias + if field_name in merged_params or param_name in merged_params: + continue + elif not is_valid_identifier(param_name): + if allow_names and is_valid_identifier(field_name): + param_name = field_name + else: + use_var_kw = True + continue + + # TODO: replace annotation with actual expected types once #1055 solved + kwargs = {'default': field.default} if not field.required else {} + merged_params[param_name] = Parameter( + param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs + ) + + if config.extra is Extra.allow: + use_var_kw = True + + if var_kw and use_var_kw: + # Make sure the parameter for extra kwargs + # does not have the same name as a field + default_model_signature = [ + ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD), + ('data', Parameter.VAR_KEYWORD), + ] + if [(p.name, p.kind) for p in present_params] == default_model_signature: + # if this is the standard model signature, use extra_data as the extra args name + var_kw_name = 'extra_data' + else: + # else start from var_kw + var_kw_name = var_kw.name + + # generate a name that's definitely unique + while var_kw_name in fields: + var_kw_name += '_' + merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) + + return Signature(parameters=list(merged_params.values()), return_annotation=None) + + +def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']: + from .main import BaseModel + + try: + model_cls = obj.__pydantic_model__ # type: ignore + except AttributeError: + model_cls = obj + + if not issubclass(model_cls, BaseModel): + raise TypeError('Unsupported type, must be either BaseModel or dataclass') + return model_cls + + +def to_camel(string: str) -> str: + return ''.join(word.capitalize() for word in string.split('_')) + + +def to_lower_camel(string: str) -> str: + if len(string) >= 1: + pascal_string = to_camel(string) + return pascal_string[0].lower() + pascal_string[1:] + return string.lower() + + +T = TypeVar('T') + + +def unique_list( + input_list: Union[List[T], Tuple[T, ...]], + *, + name_factory: Callable[[T], str] = str, +) -> List[T]: + """ + Make a list unique while maintaining order. + We update the list if another one with the same name is set + (e.g. root validator overridden in subclass) + """ + result: List[T] = [] + result_names: List[str] = [] + for v in input_list: + v_name = name_factory(v) + if v_name not in result_names: + result_names.append(v_name) + result.append(v) + else: + result[result_names.index(v_name)] = v + + return result + + +class PyObjectStr(str): + """ + String class where repr doesn't include quotes. Useful with Representation when you want to return a string + representation of something that valid (or pseudo-valid) python. + """ + + def __repr__(self) -> str: + return str(self) + + +class Representation: + """ + Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. + + __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations + of objects. + """ + + __slots__: Tuple[str, ...] = tuple() + + def __repr_args__(self) -> 'ReprArgs': + """ + Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. + + Can either return: + * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` + * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` + """ + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v is not None] + + def __repr_name__(self) -> str: + """ + Name of the instance's class, used in __repr__. + """ + return self.__class__.__name__ + + def __repr_str__(self, join_str: str) -> str: + return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) + + def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: + """ + Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects + """ + yield self.__repr_name__() + '(' + yield 1 + for name, value in self.__repr_args__(): + if name is not None: + yield name + '=' + yield fmt(value) + yield ',' + yield 0 + yield -1 + yield ')' + + def __str__(self) -> str: + return self.__repr_str__(' ') + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + def __rich_repr__(self) -> 'RichReprResult': + """Get fields for Rich library""" + for name, field_repr in self.__repr_args__(): + if name is None: + yield field_repr + else: + yield name, field_repr + + +class GetterDict(Representation): + """ + Hack to make object's smell just enough like dicts for validate_model. + + We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves. + """ + + __slots__ = ('_obj',) + + def __init__(self, obj: Any): + self._obj = obj + + def __getitem__(self, key: str) -> Any: + try: + return getattr(self._obj, key) + except AttributeError as e: + raise KeyError(key) from e + + def get(self, key: Any, default: Any = None) -> Any: + return getattr(self._obj, key, default) + + def extra_keys(self) -> Set[Any]: + """ + We don't want to get any other attributes of obj if the model didn't explicitly ask for them + """ + return set() + + def keys(self) -> List[Any]: + """ + Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python + dictionaries. + """ + return list(self) + + def values(self) -> List[Any]: + return [self[k] for k in self] + + def items(self) -> Iterator[Tuple[str, Any]]: + for k in self: + yield k, self.get(k) + + def __iter__(self) -> Iterator[str]: + for name in dir(self._obj): + if not name.startswith('_'): + yield name + + def __len__(self) -> int: + return sum(1 for _ in self) + + def __contains__(self, item: Any) -> bool: + return item in self.keys() + + def __eq__(self, other: Any) -> bool: + return dict(self) == dict(other.items()) + + def __repr_args__(self) -> 'ReprArgs': + return [(None, dict(self))] + + def __repr_name__(self) -> str: + return f'GetterDict[{display_as_type(self._obj)}]' + + +class ValueItems(Representation): + """ + Class for more convenient calculation of excluded or included fields on values. + """ + + __slots__ = ('_items', '_type') + + def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None: + items = self._coerce_items(items) + + if isinstance(value, (list, tuple)): + items = self._normalize_indexes(items, len(value)) + + self._items: 'MappingIntStrAny' = items + + def is_excluded(self, item: Any) -> bool: + """ + Check if item is fully excluded. + + :param item: key or index of a value + """ + return self.is_true(self._items.get(item)) + + def is_included(self, item: Any) -> bool: + """ + Check if value is contained in self._items + + :param item: key or index of value + """ + return item in self._items + + def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]: + """ + :param e: key or index of element on value + :return: raw values for element if self._items is dict and contain needed element + """ + + item = self._items.get(e) + return item if not self.is_true(item) else None + + def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny': + """ + :param items: dict or set of indexes which will be normalized + :param v_length: length of sequence indexes of which will be + + >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) + {0: True, 2: True, 3: True} + >>> self._normalize_indexes({'__all__': True}, 4) + {0: True, 1: True, 2: True, 3: True} + """ + + normalized_items: 'DictIntStrAny' = {} + all_items = None + for i, v in items.items(): + if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)): + raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') + if i == '__all__': + all_items = self._coerce_value(v) + continue + if not isinstance(i, int): + raise TypeError( + 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: ' + 'expected integer keys or keyword "__all__"' + ) + normalized_i = v_length + i if i < 0 else i + normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) + + if not all_items: + return normalized_items + if self.is_true(all_items): + for i in range(v_length): + normalized_items.setdefault(i, ...) + return normalized_items + for i in range(v_length): + normalized_item = normalized_items.setdefault(i, {}) + if not self.is_true(normalized_item): + normalized_items[i] = self.merge(all_items, normalized_item) + return normalized_items + + @classmethod + def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: + """ + Merge a ``base`` item with an ``override`` item. + + Both ``base`` and ``override`` are converted to dictionaries if possible. + Sets are converted to dictionaries with the sets entries as keys and + Ellipsis as values. + + Each key-value pair existing in ``base`` is merged with ``override``, + while the rest of the key-value pairs are updated recursively with this function. + + Merging takes place based on the "union" of keys if ``intersect`` is + set to ``False`` (default) and on the intersection of keys if + ``intersect`` is set to ``True``. + """ + override = cls._coerce_value(override) + base = cls._coerce_value(base) + if override is None: + return base + if cls.is_true(base) or base is None: + return override + if cls.is_true(override): + return base if intersect else override + + # intersection or union of keys while preserving ordering: + if intersect: + merge_keys = [k for k in base if k in override] + [k for k in override if k in base] + else: + merge_keys = list(base) + [k for k in override if k not in base] + + merged: 'DictIntStrAny' = {} + for k in merge_keys: + merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) + if merged_item is not None: + merged[k] = merged_item + + return merged + + @staticmethod + def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny': + if isinstance(items, Mapping): + pass + elif isinstance(items, AbstractSet): + items = dict.fromkeys(items, ...) + else: + class_name = getattr(items, '__class__', '???') + assert_never( + items, + f'Unexpected type of exclude value {class_name}', + ) + return items + + @classmethod + def _coerce_value(cls, value: Any) -> Any: + if value is None or cls.is_true(value): + return value + return cls._coerce_items(value) + + @staticmethod + def is_true(v: Any) -> bool: + return v is True or v is ... + + def __repr_args__(self) -> 'ReprArgs': + return [(None, self._items)] + + +class ClassAttribute: + """ + Hide class attribute from its instances + """ + + __slots__ = ( + 'name', + 'value', + ) + + def __init__(self, name: str, value: Any) -> None: + self.name = name + self.value = value + + def __get__(self, instance: Any, owner: Type[Any]) -> None: + if instance is None: + return self.value + raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') + + +path_types = { + 'is_dir': 'directory', + 'is_file': 'file', + 'is_mount': 'mount point', + 'is_symlink': 'symlink', + 'is_block_device': 'block device', + 'is_char_device': 'char device', + 'is_fifo': 'FIFO', + 'is_socket': 'socket', +} + + +def path_type(p: 'Path') -> str: + """ + Find out what sort of thing a path is. + """ + assert p.exists(), 'path does not exist' + for method, name in path_types.items(): + if getattr(p, method)(): + return name + + return 'unknown' + + +Obj = TypeVar('Obj') + + +def smart_deepcopy(obj: Obj) -> Obj: + """ + Return type as is for immutable built-in types + Use obj.copy() for built-in empty collections + Use copy.deepcopy() for non-empty collections and unknown objects + """ + + obj_type = obj.__class__ + if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: + return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway + try: + if not obj and obj_type in BUILTIN_COLLECTIONS: + # faster way for empty collections, no need to copy its members + return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method + except (TypeError, ValueError, RuntimeError): + # do we really dare to catch ALL errors? Seems a bit risky + pass + + return deepcopy(obj) # slowest way when we actually might need a deepcopy + + +def is_valid_field(name: str) -> bool: + if not name.startswith('_'): + return True + return ROOT_KEY == name + + +DUNDER_ATTRIBUTES = { + '__annotations__', + '__classcell__', + '__doc__', + '__module__', + '__orig_bases__', + '__orig_class__', + '__qualname__', +} + + +def is_valid_private_name(name: str) -> bool: + return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES + + +_EMPTY = object() + + +def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool: + """ + Check that the items of `left` are the same objects as those in `right`. + + >>> a, b = object(), object() + >>> all_identical([a, b, a], [a, b, a]) + True + >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" + False + """ + for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY): + if left_item is not right_item: + return False + return True + + +def assert_never(obj: NoReturn, msg: str) -> NoReturn: + """ + Helper to make sure that we have covered all possible types. + + This is mostly useful for ``mypy``, docs: + https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks + """ + raise TypeError(msg) + + +def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str: + """Validate that all aliases are the same and if that's the case return the alias""" + unique_aliases = set(all_aliases) + if len(unique_aliases) > 1: + raise ConfigError( + f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})' + ) + return unique_aliases.pop() + + +def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]: + """ + Get alias and all valid values in the `Literal` type of the discriminator field + `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many. + """ + is_root_model = getattr(tp, '__custom_root_type__', False) + + if get_origin(tp) is Annotated: + tp = get_args(tp)[0] + + if hasattr(tp, '__pydantic_model__'): + tp = tp.__pydantic_model__ + + if is_union(get_origin(tp)): + alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key) + return alias, tuple(v for values in all_values for v in values) + elif is_root_model: + union_type = tp.__fields__[ROOT_KEY].type_ + alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key) + + if len(set(all_values)) > 1: + raise ConfigError( + f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}' + ) + + return alias, all_values[0] + + else: + try: + t_discriminator_type = tp.__fields__[discriminator_key].type_ + except AttributeError as e: + raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e + except KeyError as e: + raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e + + if not is_literal_type(t_discriminator_type): + raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`') + + return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type) + + +def _get_union_alias_and_all_values( + union_type: Type[Any], discriminator_key: str +) -> Tuple[str, Tuple[Tuple[str, ...], ...]]: + zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)] + # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))] + all_aliases, all_values = zip(*zipped_aliases_values) + return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values + + +KT = TypeVar('KT') +VT = TypeVar('VT') +if TYPE_CHECKING: + # Annoying inheriting from `MutableMapping` and `dict` breaks cython, hence this work around + class LimitedDict(dict, MutableMapping[KT, VT]): # type: ignore[type-arg] + def __init__(self, size_limit: int = 1000): + ... + +else: + + class LimitedDict(dict): + """ + Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage. + + Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache. + + Annoying inheriting from `MutableMapping` breaks cython. + """ + + def __init__(self, size_limit: int = 1000): + self.size_limit = size_limit + super().__init__() + + def __setitem__(self, __key: Any, __value: Any) -> None: + super().__setitem__(__key, __value) + if len(self) > self.size_limit: + excess = len(self) - self.size_limit + self.size_limit // 10 + to_remove = list(self.keys())[:excess] + for key in to_remove: + del self[key] + + def __class_getitem__(cls, *args: Any) -> Any: + # to avoid errors with 3.7 + pass diff --git a/lib/pydantic/v1/__init__.py b/lib/pydantic/v1/__init__.py deleted file mode 100644 index 3bf1418f..00000000 --- a/lib/pydantic/v1/__init__.py +++ /dev/null @@ -1,131 +0,0 @@ -# flake8: noqa -from . import dataclasses -from .annotated_types import create_model_from_namedtuple, create_model_from_typeddict -from .class_validators import root_validator, validator -from .config import BaseConfig, ConfigDict, Extra -from .decorator import validate_arguments -from .env_settings import BaseSettings -from .error_wrappers import ValidationError -from .errors import * -from .fields import Field, PrivateAttr, Required -from .main import * -from .networks import * -from .parse import Protocol -from .tools import * -from .types import * -from .version import VERSION, compiled - -__version__ = VERSION - -# WARNING __all__ from .errors is not included here, it will be removed as an export here in v2 -# please use "from pydantic.errors import ..." instead -__all__ = [ - # annotated types utils - 'create_model_from_namedtuple', - 'create_model_from_typeddict', - # dataclasses - 'dataclasses', - # class_validators - 'root_validator', - 'validator', - # config - 'BaseConfig', - 'ConfigDict', - 'Extra', - # decorator - 'validate_arguments', - # env_settings - 'BaseSettings', - # error_wrappers - 'ValidationError', - # fields - 'Field', - 'Required', - # main - 'BaseModel', - 'create_model', - 'validate_model', - # network - 'AnyUrl', - 'AnyHttpUrl', - 'FileUrl', - 'HttpUrl', - 'stricturl', - 'EmailStr', - 'NameEmail', - 'IPvAnyAddress', - 'IPvAnyInterface', - 'IPvAnyNetwork', - 'PostgresDsn', - 'CockroachDsn', - 'AmqpDsn', - 'RedisDsn', - 'MongoDsn', - 'KafkaDsn', - 'validate_email', - # parse - 'Protocol', - # tools - 'parse_file_as', - 'parse_obj_as', - 'parse_raw_as', - 'schema_of', - 'schema_json_of', - # types - 'NoneStr', - 'NoneBytes', - 'StrBytes', - 'NoneStrBytes', - 'StrictStr', - 'ConstrainedBytes', - 'conbytes', - 'ConstrainedList', - 'conlist', - 'ConstrainedSet', - 'conset', - 'ConstrainedFrozenSet', - 'confrozenset', - 'ConstrainedStr', - 'constr', - 'PyObject', - 'ConstrainedInt', - 'conint', - 'PositiveInt', - 'NegativeInt', - 'NonNegativeInt', - 'NonPositiveInt', - 'ConstrainedFloat', - 'confloat', - 'PositiveFloat', - 'NegativeFloat', - 'NonNegativeFloat', - 'NonPositiveFloat', - 'FiniteFloat', - 'ConstrainedDecimal', - 'condecimal', - 'ConstrainedDate', - 'condate', - 'UUID1', - 'UUID3', - 'UUID4', - 'UUID5', - 'FilePath', - 'DirectoryPath', - 'Json', - 'JsonWrapper', - 'SecretField', - 'SecretStr', - 'SecretBytes', - 'StrictBool', - 'StrictBytes', - 'StrictInt', - 'StrictFloat', - 'PaymentCardNumber', - 'PrivateAttr', - 'ByteSize', - 'PastDate', - 'FutureDate', - # version - 'compiled', - 'VERSION', -] diff --git a/lib/pydantic/v1/class_validators.py b/lib/pydantic/v1/class_validators.py deleted file mode 100644 index 71e66509..00000000 --- a/lib/pydantic/v1/class_validators.py +++ /dev/null @@ -1,361 +0,0 @@ -import warnings -from collections import ChainMap -from functools import partial, partialmethod, wraps -from itertools import chain -from types import FunctionType -from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload - -from .errors import ConfigError -from .typing import AnyCallable -from .utils import ROOT_KEY, in_ipython - -if TYPE_CHECKING: - from .typing import AnyClassMethod - - -class Validator: - __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure' - - def __init__( - self, - func: AnyCallable, - pre: bool = False, - each_item: bool = False, - always: bool = False, - check_fields: bool = False, - skip_on_failure: bool = False, - ): - self.func = func - self.pre = pre - self.each_item = each_item - self.always = always - self.check_fields = check_fields - self.skip_on_failure = skip_on_failure - - -if TYPE_CHECKING: - from inspect import Signature - - from .config import BaseConfig - from .fields import ModelField - from .types import ModelOrDc - - ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any] - ValidatorsList = List[ValidatorCallable] - ValidatorListDict = Dict[str, List[Validator]] - -_FUNCS: Set[str] = set() -VALIDATOR_CONFIG_KEY = '__validator_config__' -ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__' - - -def validator( - *fields: str, - pre: bool = False, - each_item: bool = False, - always: bool = False, - check_fields: bool = True, - whole: Optional[bool] = None, - allow_reuse: bool = False, -) -> Callable[[AnyCallable], 'AnyClassMethod']: - """ - Decorate methods on the class indicating that they should be used to validate fields - :param fields: which field(s) the method should be called on - :param pre: whether or not this validator should be called before the standard validators (else after) - :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the - whole object - :param always: whether this method and other validators should be called even if the value is missing - :param check_fields: whether to check that the fields actually exist on the model - :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function - """ - if not fields: - raise ConfigError('validator with no fields specified') - elif isinstance(fields[0], FunctionType): - raise ConfigError( - "validators should be used with fields and keyword arguments, not bare. " # noqa: Q000 - "E.g. usage should be `@validator('', ...)`" - ) - elif not all(isinstance(field, str) for field in fields): - raise ConfigError( - "validator fields should be passed as separate string args. " # noqa: Q000 - "E.g. usage should be `@validator('', '', ...)`" - ) - - if whole is not None: - warnings.warn( - 'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead', - DeprecationWarning, - ) - assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' - each_item = not whole - - def dec(f: AnyCallable) -> 'AnyClassMethod': - f_cls = _prepare_validator(f, allow_reuse) - setattr( - f_cls, - VALIDATOR_CONFIG_KEY, - ( - fields, - Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields), - ), - ) - return f_cls - - return dec - - -@overload -def root_validator(_func: AnyCallable) -> 'AnyClassMethod': - ... - - -@overload -def root_validator( - *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False -) -> Callable[[AnyCallable], 'AnyClassMethod']: - ... - - -def root_validator( - _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False -) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]: - """ - Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either - before or after standard model parsing/validation is performed. - """ - if _func: - f_cls = _prepare_validator(_func, allow_reuse) - setattr( - f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) - ) - return f_cls - - def dec(f: AnyCallable) -> 'AnyClassMethod': - f_cls = _prepare_validator(f, allow_reuse) - setattr( - f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) - ) - return f_cls - - return dec - - -def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod': - """ - Avoid validators with duplicated names since without this, validators can be overwritten silently - which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False. - """ - f_cls = function if isinstance(function, classmethod) else classmethod(function) - if not in_ipython() and not allow_reuse: - ref = ( - getattr(f_cls.__func__, '__module__', '') - + '.' - + getattr(f_cls.__func__, '__qualname__', f'') - ) - if ref in _FUNCS: - raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`') - _FUNCS.add(ref) - return f_cls - - -class ValidatorGroup: - def __init__(self, validators: 'ValidatorListDict') -> None: - self.validators = validators - self.used_validators = {'*'} - - def get_validators(self, name: str) -> Optional[Dict[str, Validator]]: - self.used_validators.add(name) - validators = self.validators.get(name, []) - if name != ROOT_KEY: - validators += self.validators.get('*', []) - if validators: - return {getattr(v.func, '__name__', f''): v for v in validators} - else: - return None - - def check_for_unused(self) -> None: - unused_validators = set( - chain.from_iterable( - ( - getattr(v.func, '__name__', f'') - for v in self.validators[f] - if v.check_fields - ) - for f in (self.validators.keys() - self.used_validators) - ) - ) - if unused_validators: - fn = ', '.join(unused_validators) - raise ConfigError( - f"Validators defined with incorrect fields: {fn} " # noqa: Q000 - f"(use check_fields=False if you're inheriting from the model and intended this)" - ) - - -def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]: - validators: Dict[str, List[Validator]] = {} - for var_name, value in namespace.items(): - validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None) - if validator_config: - fields, v = validator_config - for field in fields: - if field in validators: - validators[field].append(v) - else: - validators[field] = [v] - return validators - - -def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]: - from inspect import signature - - pre_validators: List[AnyCallable] = [] - post_validators: List[Tuple[bool, AnyCallable]] = [] - for name, value in namespace.items(): - validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None) - if validator_config: - sig = signature(validator_config.func) - args = list(sig.parameters.keys()) - if args[0] == 'self': - raise ConfigError( - f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, ' - f'should be: (cls, values).' - ) - if len(args) != 2: - raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).') - # check function signature - if validator_config.pre: - pre_validators.append(validator_config.func) - else: - post_validators.append((validator_config.skip_on_failure, validator_config.func)) - return pre_validators, post_validators - - -def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict': - for field, field_validators in base_validators.items(): - if field not in validators: - validators[field] = [] - validators[field] += field_validators - return validators - - -def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable': - """ - Make a generic function which calls a validator with the right arguments. - - Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, - hence this laborious way of doing things. - - It's done like this so validators don't all need **kwargs in their signature, eg. any combination of - the arguments "values", "fields" and/or "config" are permitted. - """ - from inspect import signature - - if not isinstance(validator, (partial, partialmethod)): - # This should be the default case, so overhead is reduced - sig = signature(validator) - args = list(sig.parameters.keys()) - else: - # Fix the generated argument lists of partial methods - sig = signature(validator.func) - args = [ - k - for k in signature(validator.func).parameters.keys() - if k not in validator.args | validator.keywords.keys() - ] - - first_arg = args.pop(0) - if first_arg == 'self': - raise ConfigError( - f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, ' - f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.' - ) - elif first_arg == 'cls': - # assume the second argument is value - return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) - else: - # assume the first argument was value which has already been removed - return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) - - -def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList': - return [make_generic_validator(f) for f in v_funcs if f] - - -all_kwargs = {'values', 'field', 'config'} - - -def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': - # assume the first argument is value - has_kwargs = False - if 'kwargs' in args: - has_kwargs = True - args -= {'kwargs'} - - if not args.issubset(all_kwargs): - raise ConfigError( - f'Invalid signature for validator {validator}: {sig}, should be: ' - f'(cls, value, values, config, field), "values", "config" and "field" are all optional.' - ) - - if has_kwargs: - return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) - elif args == set(): - return lambda cls, v, values, field, config: validator(cls, v) - elif args == {'values'}: - return lambda cls, v, values, field, config: validator(cls, v, values=values) - elif args == {'field'}: - return lambda cls, v, values, field, config: validator(cls, v, field=field) - elif args == {'config'}: - return lambda cls, v, values, field, config: validator(cls, v, config=config) - elif args == {'values', 'field'}: - return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field) - elif args == {'values', 'config'}: - return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config) - elif args == {'field', 'config'}: - return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config) - else: - # args == {'values', 'field', 'config'} - return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) - - -def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': - has_kwargs = False - if 'kwargs' in args: - has_kwargs = True - args -= {'kwargs'} - - if not args.issubset(all_kwargs): - raise ConfigError( - f'Invalid signature for validator {validator}: {sig}, should be: ' - f'(value, values, config, field), "values", "config" and "field" are all optional.' - ) - - if has_kwargs: - return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) - elif args == set(): - return lambda cls, v, values, field, config: validator(v) - elif args == {'values'}: - return lambda cls, v, values, field, config: validator(v, values=values) - elif args == {'field'}: - return lambda cls, v, values, field, config: validator(v, field=field) - elif args == {'config'}: - return lambda cls, v, values, field, config: validator(v, config=config) - elif args == {'values', 'field'}: - return lambda cls, v, values, field, config: validator(v, values=values, field=field) - elif args == {'values', 'config'}: - return lambda cls, v, values, field, config: validator(v, values=values, config=config) - elif args == {'field', 'config'}: - return lambda cls, v, values, field, config: validator(v, field=field, config=config) - else: - # args == {'values', 'field', 'config'} - return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) - - -def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']: - all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) # type: ignore[arg-type,var-annotated] - return { - k: v - for k, v in all_attributes.items() - if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY) - } diff --git a/lib/pydantic/v1/color.py b/lib/pydantic/v1/color.py deleted file mode 100644 index 6fdc9fb1..00000000 --- a/lib/pydantic/v1/color.py +++ /dev/null @@ -1,494 +0,0 @@ -""" -Color definitions are used as per CSS3 specification: -http://www.w3.org/TR/css3-color/#svg-color - -A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. - -In these cases the LAST color when sorted alphabetically takes preferences, -eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua". -""" -import math -import re -from colorsys import hls_to_rgb, rgb_to_hls -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast - -from .errors import ColorError -from .utils import Representation, almost_equal_floats - -if TYPE_CHECKING: - from .typing import CallableGenerator, ReprArgs - -ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] -ColorType = Union[ColorTuple, str] -HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]] - - -class RGBA: - """ - Internal use only as a representation of a color. - """ - - __slots__ = 'r', 'g', 'b', 'alpha', '_tuple' - - def __init__(self, r: float, g: float, b: float, alpha: Optional[float]): - self.r = r - self.g = g - self.b = b - self.alpha = alpha - - self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha) - - def __getitem__(self, item: Any) -> Any: - return self._tuple[item] - - -# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached -r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*' -r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*' -_r_255 = r'(\d{1,3}(?:\.\d+)?)' -_r_comma = r'\s*,\s*' -r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*' -_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' -r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*' -_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' -_r_sl = r'(\d{1,3}(?:\.\d+)?)%' -r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*' -r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*' - -# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used -repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'} -rads = 2 * math.pi - - -class Color(Representation): - __slots__ = '_original', '_rgba' - - def __init__(self, value: ColorType) -> None: - self._rgba: RGBA - self._original: ColorType - if isinstance(value, (tuple, list)): - self._rgba = parse_tuple(value) - elif isinstance(value, str): - self._rgba = parse_str(value) - elif isinstance(value, Color): - self._rgba = value._rgba - value = value._original - else: - raise ColorError(reason='value must be a tuple, list or string') - - # if we've got here value must be a valid color - self._original = value - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format='color') - - def original(self) -> ColorType: - """ - Original value passed to Color - """ - return self._original - - def as_named(self, *, fallback: bool = False) -> str: - if self._rgba.alpha is None: - rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) - try: - return COLORS_BY_VALUE[rgb] - except KeyError as e: - if fallback: - return self.as_hex() - else: - raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e - else: - return self.as_hex() - - def as_hex(self) -> str: - """ - Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string - a "short" representation of the color is possible and whether there's an alpha channel. - """ - values = [float_to_255(c) for c in self._rgba[:3]] - if self._rgba.alpha is not None: - values.append(float_to_255(self._rgba.alpha)) - - as_hex = ''.join(f'{v:02x}' for v in values) - if all(c in repeat_colors for c in values): - as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2)) - return '#' + as_hex - - def as_rgb(self) -> str: - """ - Color as an rgb(, , ) or rgba(, , , ) string. - """ - if self._rgba.alpha is None: - return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})' - else: - return ( - f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, ' - f'{round(self._alpha_float(), 2)})' - ) - - def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: - """ - Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is - in the range 0 to 1. - - :param alpha: whether to include the alpha channel, options are - None - (default) include alpha only if it's set (e.g. not None) - True - always include alpha, - False - always omit alpha, - """ - r, g, b = (float_to_255(c) for c in self._rgba[:3]) - if alpha is None: - if self._rgba.alpha is None: - return r, g, b - else: - return r, g, b, self._alpha_float() - elif alpha: - return r, g, b, self._alpha_float() - else: - # alpha is False - return r, g, b - - def as_hsl(self) -> str: - """ - Color as an hsl(, , ) or hsl(, , , ) string. - """ - if self._rgba.alpha is None: - h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore - return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})' - else: - h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore - return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})' - - def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: - """ - Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in - the range 0 to 1. - - NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys. - - :param alpha: whether to include the alpha channel, options are - None - (default) include alpha only if it's set (e.g. not None) - True - always include alpha, - False - always omit alpha, - """ - h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) - if alpha is None: - if self._rgba.alpha is None: - return h, s, l - else: - return h, s, l, self._alpha_float() - if alpha: - return h, s, l, self._alpha_float() - else: - # alpha is False - return h, s, l - - def _alpha_float(self) -> float: - return 1 if self._rgba.alpha is None else self._rgba.alpha - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls - - def __str__(self) -> str: - return self.as_named(fallback=True) - - def __repr_args__(self) -> 'ReprArgs': - return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore - - def __eq__(self, other: Any) -> bool: - return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple() - - def __hash__(self) -> int: - return hash(self.as_rgb_tuple()) - - -def parse_tuple(value: Tuple[Any, ...]) -> RGBA: - """ - Parse a tuple or list as a color. - """ - if len(value) == 3: - r, g, b = (parse_color_value(v) for v in value) - return RGBA(r, g, b, None) - elif len(value) == 4: - r, g, b = (parse_color_value(v) for v in value[:3]) - return RGBA(r, g, b, parse_float_alpha(value[3])) - else: - raise ColorError(reason='tuples must have length 3 or 4') - - -def parse_str(value: str) -> RGBA: - """ - Parse a string to an RGBA tuple, trying the following formats (in this order): - * named color, see COLORS_BY_NAME below - * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) - * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) - * `rgb(, , ) ` - * `rgba(, , , )` - """ - value_lower = value.lower() - try: - r, g, b = COLORS_BY_NAME[value_lower] - except KeyError: - pass - else: - return ints_to_rgba(r, g, b, None) - - m = re.fullmatch(r_hex_short, value_lower) - if m: - *rgb, a = m.groups() - r, g, b = (int(v * 2, 16) for v in rgb) - if a: - alpha: Optional[float] = int(a * 2, 16) / 255 - else: - alpha = None - return ints_to_rgba(r, g, b, alpha) - - m = re.fullmatch(r_hex_long, value_lower) - if m: - *rgb, a = m.groups() - r, g, b = (int(v, 16) for v in rgb) - if a: - alpha = int(a, 16) / 255 - else: - alpha = None - return ints_to_rgba(r, g, b, alpha) - - m = re.fullmatch(r_rgb, value_lower) - if m: - return ints_to_rgba(*m.groups(), None) # type: ignore - - m = re.fullmatch(r_rgba, value_lower) - if m: - return ints_to_rgba(*m.groups()) # type: ignore - - m = re.fullmatch(r_hsl, value_lower) - if m: - h, h_units, s, l_ = m.groups() - return parse_hsl(h, h_units, s, l_) - - m = re.fullmatch(r_hsla, value_lower) - if m: - h, h_units, s, l_, a = m.groups() - return parse_hsl(h, h_units, s, l_, parse_float_alpha(a)) - - raise ColorError(reason='string not recognised as a valid color') - - -def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA: - return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) - - -def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: - """ - Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number - in the range 0 to 1 - """ - try: - color = float(value) - except ValueError: - raise ColorError(reason='color values must be a valid number') - if 0 <= color <= max_val: - return color / max_val - else: - raise ColorError(reason=f'color values must be in the range 0 to {max_val}') - - -def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: - """ - Parse a value checking it's a valid float in the range 0 to 1 - """ - if value is None: - return None - try: - if isinstance(value, str) and value.endswith('%'): - alpha = float(value[:-1]) / 100 - else: - alpha = float(value) - except ValueError: - raise ColorError(reason='alpha values must be a valid float') - - if almost_equal_floats(alpha, 1): - return None - elif 0 <= alpha <= 1: - return alpha - else: - raise ColorError(reason='alpha values must be in the range 0 to 1') - - -def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA: - """ - Parse raw hue, saturation, lightness and alpha values and convert to RGBA. - """ - s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100) - - h_value = float(h) - if h_units in {None, 'deg'}: - h_value = h_value % 360 / 360 - elif h_units == 'rad': - h_value = h_value % rads / rads - else: - # turns - h_value = h_value % 1 - - r, g, b = hls_to_rgb(h_value, l_value, s_value) - return RGBA(r, g, b, alpha) - - -def float_to_255(c: float) -> int: - return int(round(c * 255)) - - -COLORS_BY_NAME = { - 'aliceblue': (240, 248, 255), - 'antiquewhite': (250, 235, 215), - 'aqua': (0, 255, 255), - 'aquamarine': (127, 255, 212), - 'azure': (240, 255, 255), - 'beige': (245, 245, 220), - 'bisque': (255, 228, 196), - 'black': (0, 0, 0), - 'blanchedalmond': (255, 235, 205), - 'blue': (0, 0, 255), - 'blueviolet': (138, 43, 226), - 'brown': (165, 42, 42), - 'burlywood': (222, 184, 135), - 'cadetblue': (95, 158, 160), - 'chartreuse': (127, 255, 0), - 'chocolate': (210, 105, 30), - 'coral': (255, 127, 80), - 'cornflowerblue': (100, 149, 237), - 'cornsilk': (255, 248, 220), - 'crimson': (220, 20, 60), - 'cyan': (0, 255, 255), - 'darkblue': (0, 0, 139), - 'darkcyan': (0, 139, 139), - 'darkgoldenrod': (184, 134, 11), - 'darkgray': (169, 169, 169), - 'darkgreen': (0, 100, 0), - 'darkgrey': (169, 169, 169), - 'darkkhaki': (189, 183, 107), - 'darkmagenta': (139, 0, 139), - 'darkolivegreen': (85, 107, 47), - 'darkorange': (255, 140, 0), - 'darkorchid': (153, 50, 204), - 'darkred': (139, 0, 0), - 'darksalmon': (233, 150, 122), - 'darkseagreen': (143, 188, 143), - 'darkslateblue': (72, 61, 139), - 'darkslategray': (47, 79, 79), - 'darkslategrey': (47, 79, 79), - 'darkturquoise': (0, 206, 209), - 'darkviolet': (148, 0, 211), - 'deeppink': (255, 20, 147), - 'deepskyblue': (0, 191, 255), - 'dimgray': (105, 105, 105), - 'dimgrey': (105, 105, 105), - 'dodgerblue': (30, 144, 255), - 'firebrick': (178, 34, 34), - 'floralwhite': (255, 250, 240), - 'forestgreen': (34, 139, 34), - 'fuchsia': (255, 0, 255), - 'gainsboro': (220, 220, 220), - 'ghostwhite': (248, 248, 255), - 'gold': (255, 215, 0), - 'goldenrod': (218, 165, 32), - 'gray': (128, 128, 128), - 'green': (0, 128, 0), - 'greenyellow': (173, 255, 47), - 'grey': (128, 128, 128), - 'honeydew': (240, 255, 240), - 'hotpink': (255, 105, 180), - 'indianred': (205, 92, 92), - 'indigo': (75, 0, 130), - 'ivory': (255, 255, 240), - 'khaki': (240, 230, 140), - 'lavender': (230, 230, 250), - 'lavenderblush': (255, 240, 245), - 'lawngreen': (124, 252, 0), - 'lemonchiffon': (255, 250, 205), - 'lightblue': (173, 216, 230), - 'lightcoral': (240, 128, 128), - 'lightcyan': (224, 255, 255), - 'lightgoldenrodyellow': (250, 250, 210), - 'lightgray': (211, 211, 211), - 'lightgreen': (144, 238, 144), - 'lightgrey': (211, 211, 211), - 'lightpink': (255, 182, 193), - 'lightsalmon': (255, 160, 122), - 'lightseagreen': (32, 178, 170), - 'lightskyblue': (135, 206, 250), - 'lightslategray': (119, 136, 153), - 'lightslategrey': (119, 136, 153), - 'lightsteelblue': (176, 196, 222), - 'lightyellow': (255, 255, 224), - 'lime': (0, 255, 0), - 'limegreen': (50, 205, 50), - 'linen': (250, 240, 230), - 'magenta': (255, 0, 255), - 'maroon': (128, 0, 0), - 'mediumaquamarine': (102, 205, 170), - 'mediumblue': (0, 0, 205), - 'mediumorchid': (186, 85, 211), - 'mediumpurple': (147, 112, 219), - 'mediumseagreen': (60, 179, 113), - 'mediumslateblue': (123, 104, 238), - 'mediumspringgreen': (0, 250, 154), - 'mediumturquoise': (72, 209, 204), - 'mediumvioletred': (199, 21, 133), - 'midnightblue': (25, 25, 112), - 'mintcream': (245, 255, 250), - 'mistyrose': (255, 228, 225), - 'moccasin': (255, 228, 181), - 'navajowhite': (255, 222, 173), - 'navy': (0, 0, 128), - 'oldlace': (253, 245, 230), - 'olive': (128, 128, 0), - 'olivedrab': (107, 142, 35), - 'orange': (255, 165, 0), - 'orangered': (255, 69, 0), - 'orchid': (218, 112, 214), - 'palegoldenrod': (238, 232, 170), - 'palegreen': (152, 251, 152), - 'paleturquoise': (175, 238, 238), - 'palevioletred': (219, 112, 147), - 'papayawhip': (255, 239, 213), - 'peachpuff': (255, 218, 185), - 'peru': (205, 133, 63), - 'pink': (255, 192, 203), - 'plum': (221, 160, 221), - 'powderblue': (176, 224, 230), - 'purple': (128, 0, 128), - 'red': (255, 0, 0), - 'rosybrown': (188, 143, 143), - 'royalblue': (65, 105, 225), - 'saddlebrown': (139, 69, 19), - 'salmon': (250, 128, 114), - 'sandybrown': (244, 164, 96), - 'seagreen': (46, 139, 87), - 'seashell': (255, 245, 238), - 'sienna': (160, 82, 45), - 'silver': (192, 192, 192), - 'skyblue': (135, 206, 235), - 'slateblue': (106, 90, 205), - 'slategray': (112, 128, 144), - 'slategrey': (112, 128, 144), - 'snow': (255, 250, 250), - 'springgreen': (0, 255, 127), - 'steelblue': (70, 130, 180), - 'tan': (210, 180, 140), - 'teal': (0, 128, 128), - 'thistle': (216, 191, 216), - 'tomato': (255, 99, 71), - 'turquoise': (64, 224, 208), - 'violet': (238, 130, 238), - 'wheat': (245, 222, 179), - 'white': (255, 255, 255), - 'whitesmoke': (245, 245, 245), - 'yellow': (255, 255, 0), - 'yellowgreen': (154, 205, 50), -} - -COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()} diff --git a/lib/pydantic/v1/config.py b/lib/pydantic/v1/config.py deleted file mode 100644 index a25973af..00000000 --- a/lib/pydantic/v1/config.py +++ /dev/null @@ -1,191 +0,0 @@ -import json -from enum import Enum -from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union - -from typing_extensions import Literal, Protocol - -from .typing import AnyArgTCallable, AnyCallable -from .utils import GetterDict -from .version import compiled - -if TYPE_CHECKING: - from typing import overload - - from .fields import ModelField - from .main import BaseModel - - ConfigType = Type['BaseConfig'] - - class SchemaExtraCallable(Protocol): - @overload - def __call__(self, schema: Dict[str, Any]) -> None: - pass - - @overload - def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None: - pass - -else: - SchemaExtraCallable = Callable[..., None] - -__all__ = 'BaseConfig', 'ConfigDict', 'get_config', 'Extra', 'inherit_config', 'prepare_config' - - -class Extra(str, Enum): - allow = 'allow' - ignore = 'ignore' - forbid = 'forbid' - - -# https://github.com/cython/cython/issues/4003 -# Fixed in Cython 3 and Pydantic v1 won't support Cython 3. -# Pydantic v2 doesn't depend on Cython at all. -if not compiled: - from typing_extensions import TypedDict - - class ConfigDict(TypedDict, total=False): - title: Optional[str] - anystr_lower: bool - anystr_strip_whitespace: bool - min_anystr_length: int - max_anystr_length: Optional[int] - validate_all: bool - extra: Extra - allow_mutation: bool - frozen: bool - allow_population_by_field_name: bool - use_enum_values: bool - fields: Dict[str, Union[str, Dict[str, str]]] - validate_assignment: bool - error_msg_templates: Dict[str, str] - arbitrary_types_allowed: bool - orm_mode: bool - getter_dict: Type[GetterDict] - alias_generator: Optional[Callable[[str], str]] - keep_untouched: Tuple[type, ...] - schema_extra: Union[Dict[str, object], 'SchemaExtraCallable'] - json_loads: Callable[[str], object] - json_dumps: AnyArgTCallable[str] - json_encoders: Dict[Type[object], AnyCallable] - underscore_attrs_are_private: bool - allow_inf_nan: bool - copy_on_model_validation: Literal['none', 'deep', 'shallow'] - # whether dataclass `__post_init__` should be run after validation - post_init_call: Literal['before_validation', 'after_validation'] - -else: - ConfigDict = dict # type: ignore - - -class BaseConfig: - title: Optional[str] = None - anystr_lower: bool = False - anystr_upper: bool = False - anystr_strip_whitespace: bool = False - min_anystr_length: int = 0 - max_anystr_length: Optional[int] = None - validate_all: bool = False - extra: Extra = Extra.ignore - allow_mutation: bool = True - frozen: bool = False - allow_population_by_field_name: bool = False - use_enum_values: bool = False - fields: Dict[str, Union[str, Dict[str, str]]] = {} - validate_assignment: bool = False - error_msg_templates: Dict[str, str] = {} - arbitrary_types_allowed: bool = False - orm_mode: bool = False - getter_dict: Type[GetterDict] = GetterDict - alias_generator: Optional[Callable[[str], str]] = None - keep_untouched: Tuple[type, ...] = () - schema_extra: Union[Dict[str, Any], 'SchemaExtraCallable'] = {} - json_loads: Callable[[str], Any] = json.loads - json_dumps: Callable[..., str] = json.dumps - json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {} - underscore_attrs_are_private: bool = False - allow_inf_nan: bool = True - - # whether inherited models as fields should be reconstructed as base model, - # and whether such a copy should be shallow or deep - copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow' - - # whether `Union` should check all allowed types before even trying to coerce - smart_union: bool = False - # whether dataclass `__post_init__` should be run before or after validation - post_init_call: Literal['before_validation', 'after_validation'] = 'before_validation' - - @classmethod - def get_field_info(cls, name: str) -> Dict[str, Any]: - """ - Get properties of FieldInfo from the `fields` property of the config class. - """ - - fields_value = cls.fields.get(name) - - if isinstance(fields_value, str): - field_info: Dict[str, Any] = {'alias': fields_value} - elif isinstance(fields_value, dict): - field_info = fields_value - else: - field_info = {} - - if 'alias' in field_info: - field_info.setdefault('alias_priority', 2) - - if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator: - alias = cls.alias_generator(name) - if not isinstance(alias, str): - raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}') - field_info.update(alias=alias, alias_priority=1) - return field_info - - @classmethod - def prepare_field(cls, field: 'ModelField') -> None: - """ - Optional hook to check or modify fields during model creation. - """ - pass - - -def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]: - if config is None: - return BaseConfig - - else: - config_dict = ( - config - if isinstance(config, dict) - else {k: getattr(config, k) for k in dir(config) if not k.startswith('__')} - ) - - class Config(BaseConfig): - ... - - for k, v in config_dict.items(): - setattr(Config, k, v) - return Config - - -def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType': - if not self_config: - base_classes: Tuple['ConfigType', ...] = (parent_config,) - elif self_config == parent_config: - base_classes = (self_config,) - else: - base_classes = self_config, parent_config - - namespace['json_encoders'] = { - **getattr(parent_config, 'json_encoders', {}), - **getattr(self_config, 'json_encoders', {}), - **namespace.get('json_encoders', {}), - } - - return type('Config', base_classes, namespace) - - -def prepare_config(config: Type[BaseConfig], cls_name: str) -> None: - if not isinstance(config.extra, Extra): - try: - config.extra = Extra(config.extra) - except ValueError: - raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"') diff --git a/lib/pydantic/v1/dataclasses.py b/lib/pydantic/v1/dataclasses.py deleted file mode 100644 index 2df3987a..00000000 --- a/lib/pydantic/v1/dataclasses.py +++ /dev/null @@ -1,500 +0,0 @@ -""" -The main purpose is to enhance stdlib dataclasses by adding validation -A pydantic dataclass can be generated from scratch or from a stdlib one. - -Behind the scene, a pydantic dataclass is just like a regular one on which we attach -a `BaseModel` and magic methods to trigger the validation of the data. -`__init__` and `__post_init__` are hence overridden and have extra logic to be -able to validate input data. - -When a pydantic dataclass is generated from scratch, it's just a plain dataclass -with validation triggered at initialization - -The tricky part if for stdlib dataclasses that are converted after into pydantic ones e.g. - -```py -@dataclasses.dataclass -class M: - x: int - -ValidatedM = pydantic.dataclasses.dataclass(M) -``` - -We indeed still want to support equality, hashing, repr, ... as if it was the stdlib one! - -```py -assert isinstance(ValidatedM(x=1), M) -assert ValidatedM(x=1) == M(x=1) -``` - -This means we **don't want to create a new dataclass that inherits from it** -The trick is to create a wrapper around `M` that will act as a proxy to trigger -validation without altering default `M` behaviour. -""" -import copy -import dataclasses -import sys -from contextlib import contextmanager -from functools import wraps - -try: - from functools import cached_property -except ImportError: - # cached_property available only for python3.8+ - pass - -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload - -from typing_extensions import dataclass_transform - -from .class_validators import gather_all_validators -from .config import BaseConfig, ConfigDict, Extra, get_config -from .error_wrappers import ValidationError -from .errors import DataclassTypeError -from .fields import Field, FieldInfo, Required, Undefined -from .main import create_model, validate_model -from .utils import ClassAttribute - -if TYPE_CHECKING: - from .main import BaseModel - from .typing import CallableGenerator, NoArgAnyCallable - - DataclassT = TypeVar('DataclassT', bound='Dataclass') - - DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy'] - - class Dataclass: - # stdlib attributes - __dataclass_fields__: ClassVar[Dict[str, Any]] - __dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams` - __post_init__: ClassVar[Callable[..., None]] - - # Added by pydantic - __pydantic_run_validation__: ClassVar[bool] - __post_init_post_parse__: ClassVar[Callable[..., None]] - __pydantic_initialised__: ClassVar[bool] - __pydantic_model__: ClassVar[Type[BaseModel]] - __pydantic_validate_values__: ClassVar[Callable[['Dataclass'], None]] - __pydantic_has_field_info_default__: ClassVar[bool] # whether a `pydantic.Field` is used as default value - - def __init__(self, *args: object, **kwargs: object) -> None: - pass - - @classmethod - def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator': - pass - - @classmethod - def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT': - pass - - -__all__ = [ - 'dataclass', - 'set_validation', - 'create_pydantic_model_from_dataclass', - 'is_builtin_dataclass', - 'make_dataclass_validator', -] - -_T = TypeVar('_T') - -if sys.version_info >= (3, 10): - - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) - @overload - def dataclass( - *, - init: bool = True, - repr: bool = True, - eq: bool = True, - order: bool = False, - unsafe_hash: bool = False, - frozen: bool = False, - config: Union[ConfigDict, Type[object], None] = None, - validate_on_init: Optional[bool] = None, - use_proxy: Optional[bool] = None, - kw_only: bool = ..., - ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: - ... - - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) - @overload - def dataclass( - _cls: Type[_T], - *, - init: bool = True, - repr: bool = True, - eq: bool = True, - order: bool = False, - unsafe_hash: bool = False, - frozen: bool = False, - config: Union[ConfigDict, Type[object], None] = None, - validate_on_init: Optional[bool] = None, - use_proxy: Optional[bool] = None, - kw_only: bool = ..., - ) -> 'DataclassClassOrWrapper': - ... - -else: - - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) - @overload - def dataclass( - *, - init: bool = True, - repr: bool = True, - eq: bool = True, - order: bool = False, - unsafe_hash: bool = False, - frozen: bool = False, - config: Union[ConfigDict, Type[object], None] = None, - validate_on_init: Optional[bool] = None, - use_proxy: Optional[bool] = None, - ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: - ... - - @dataclass_transform(field_specifiers=(dataclasses.field, Field)) - @overload - def dataclass( - _cls: Type[_T], - *, - init: bool = True, - repr: bool = True, - eq: bool = True, - order: bool = False, - unsafe_hash: bool = False, - frozen: bool = False, - config: Union[ConfigDict, Type[object], None] = None, - validate_on_init: Optional[bool] = None, - use_proxy: Optional[bool] = None, - ) -> 'DataclassClassOrWrapper': - ... - - -@dataclass_transform(field_specifiers=(dataclasses.field, Field)) -def dataclass( - _cls: Optional[Type[_T]] = None, - *, - init: bool = True, - repr: bool = True, - eq: bool = True, - order: bool = False, - unsafe_hash: bool = False, - frozen: bool = False, - config: Union[ConfigDict, Type[object], None] = None, - validate_on_init: Optional[bool] = None, - use_proxy: Optional[bool] = None, - kw_only: bool = False, -) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']: - """ - Like the python standard lib dataclasses but with type validation. - The result is either a pydantic dataclass that will validate input data - or a wrapper that will trigger validation around a stdlib dataclass - to avoid modifying it directly - """ - the_config = get_config(config) - - def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper': - should_use_proxy = ( - use_proxy - if use_proxy is not None - else ( - is_builtin_dataclass(cls) - and (cls.__bases__[0] is object or set(dir(cls)) == set(dir(cls.__bases__[0]))) - ) - ) - if should_use_proxy: - dc_cls_doc = '' - dc_cls = DataclassProxy(cls) - default_validate_on_init = False - else: - dc_cls_doc = cls.__doc__ or '' # needs to be done before generating dataclass - if sys.version_info >= (3, 10): - dc_cls = dataclasses.dataclass( - cls, - init=init, - repr=repr, - eq=eq, - order=order, - unsafe_hash=unsafe_hash, - frozen=frozen, - kw_only=kw_only, - ) - else: - dc_cls = dataclasses.dataclass( # type: ignore - cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen - ) - default_validate_on_init = True - - should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init - _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc) - dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls}) - return dc_cls - - if _cls is None: - return wrap - - return wrap(_cls) - - -@contextmanager -def set_validation(cls: Type['DataclassT'], value: bool) -> Generator[Type['DataclassT'], None, None]: - original_run_validation = cls.__pydantic_run_validation__ - try: - cls.__pydantic_run_validation__ = value - yield cls - finally: - cls.__pydantic_run_validation__ = original_run_validation - - -class DataclassProxy: - __slots__ = '__dataclass__' - - def __init__(self, dc_cls: Type['Dataclass']) -> None: - object.__setattr__(self, '__dataclass__', dc_cls) - - def __call__(self, *args: Any, **kwargs: Any) -> Any: - with set_validation(self.__dataclass__, True): - return self.__dataclass__(*args, **kwargs) - - def __getattr__(self, name: str) -> Any: - return getattr(self.__dataclass__, name) - - def __setattr__(self, __name: str, __value: Any) -> None: - return setattr(self.__dataclass__, __name, __value) - - def __instancecheck__(self, instance: Any) -> bool: - return isinstance(instance, self.__dataclass__) - - def __copy__(self) -> 'DataclassProxy': - return DataclassProxy(copy.copy(self.__dataclass__)) - - def __deepcopy__(self, memo: Any) -> 'DataclassProxy': - return DataclassProxy(copy.deepcopy(self.__dataclass__, memo)) - - -def _add_pydantic_validation_attributes( # noqa: C901 (ignore complexity) - dc_cls: Type['Dataclass'], - config: Type[BaseConfig], - validate_on_init: bool, - dc_cls_doc: str, -) -> None: - """ - We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass - it won't even exist (code is generated on the fly by `dataclasses`) - By default, we run validation after `__init__` or `__post_init__` if defined - """ - init = dc_cls.__init__ - - @wraps(init) - def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: - if config.extra == Extra.ignore: - init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) - - elif config.extra == Extra.allow: - for k, v in kwargs.items(): - self.__dict__.setdefault(k, v) - init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) - - else: - init(self, *args, **kwargs) - - if hasattr(dc_cls, '__post_init__'): - try: - post_init = dc_cls.__post_init__.__wrapped__ # type: ignore[attr-defined] - except AttributeError: - post_init = dc_cls.__post_init__ - - @wraps(post_init) - def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: - if config.post_init_call == 'before_validation': - post_init(self, *args, **kwargs) - - if self.__class__.__pydantic_run_validation__: - self.__pydantic_validate_values__() - if hasattr(self, '__post_init_post_parse__'): - self.__post_init_post_parse__(*args, **kwargs) - - if config.post_init_call == 'after_validation': - post_init(self, *args, **kwargs) - - setattr(dc_cls, '__init__', handle_extra_init) - setattr(dc_cls, '__post_init__', new_post_init) - - else: - - @wraps(init) - def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: - handle_extra_init(self, *args, **kwargs) - - if self.__class__.__pydantic_run_validation__: - self.__pydantic_validate_values__() - - if hasattr(self, '__post_init_post_parse__'): - # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of - # public method `dataclasses.fields` - - # get all initvars and their default values - initvars_and_values: Dict[str, Any] = {} - for i, f in enumerate(self.__class__.__dataclass_fields__.values()): - if f._field_type is dataclasses._FIELD_INITVAR: # type: ignore[attr-defined] - try: - # set arg value by default - initvars_and_values[f.name] = args[i] - except IndexError: - initvars_and_values[f.name] = kwargs.get(f.name, f.default) - - self.__post_init_post_parse__(**initvars_and_values) - - setattr(dc_cls, '__init__', new_init) - - setattr(dc_cls, '__pydantic_run_validation__', ClassAttribute('__pydantic_run_validation__', validate_on_init)) - setattr(dc_cls, '__pydantic_initialised__', False) - setattr(dc_cls, '__pydantic_model__', create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc)) - setattr(dc_cls, '__pydantic_validate_values__', _dataclass_validate_values) - setattr(dc_cls, '__validate__', classmethod(_validate_dataclass)) - setattr(dc_cls, '__get_validators__', classmethod(_get_validators)) - - if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen: - setattr(dc_cls, '__setattr__', _dataclass_validate_assignment_setattr) - - -def _get_validators(cls: 'DataclassClassOrWrapper') -> 'CallableGenerator': - yield cls.__validate__ - - -def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT': - with set_validation(cls, True): - if isinstance(v, cls): - v.__pydantic_validate_values__() - return v - elif isinstance(v, (list, tuple)): - return cls(*v) - elif isinstance(v, dict): - return cls(**v) - else: - raise DataclassTypeError(class_name=cls.__name__) - - -def create_pydantic_model_from_dataclass( - dc_cls: Type['Dataclass'], - config: Type[Any] = BaseConfig, - dc_cls_doc: Optional[str] = None, -) -> Type['BaseModel']: - field_definitions: Dict[str, Any] = {} - for field in dataclasses.fields(dc_cls): - default: Any = Undefined - default_factory: Optional['NoArgAnyCallable'] = None - field_info: FieldInfo - - if field.default is not dataclasses.MISSING: - default = field.default - elif field.default_factory is not dataclasses.MISSING: - default_factory = field.default_factory - else: - default = Required - - if isinstance(default, FieldInfo): - field_info = default - dc_cls.__pydantic_has_field_info_default__ = True - else: - field_info = Field(default=default, default_factory=default_factory, **field.metadata) - - field_definitions[field.name] = (field.type, field_info) - - validators = gather_all_validators(dc_cls) - model: Type['BaseModel'] = create_model( - dc_cls.__name__, - __config__=config, - __module__=dc_cls.__module__, - __validators__=validators, - __cls_kwargs__={'__resolve_forward_refs__': False}, - **field_definitions, - ) - model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or '' - return model - - -if sys.version_info >= (3, 8): - - def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool: - return isinstance(getattr(type(obj), k, None), cached_property) - -else: - - def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool: - return False - - -def _dataclass_validate_values(self: 'Dataclass') -> None: - # validation errors can occur if this function is called twice on an already initialised dataclass. - # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property - if getattr(self, '__pydantic_initialised__'): - return - if getattr(self, '__pydantic_has_field_info_default__', False): - # We need to remove `FieldInfo` values since they are not valid as input - # It's ok to do that because they are obviously the default values! - input_data = { - k: v - for k, v in self.__dict__.items() - if not (isinstance(v, FieldInfo) or _is_field_cached_property(self, k)) - } - else: - input_data = {k: v for k, v in self.__dict__.items() if not _is_field_cached_property(self, k)} - d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__) - if validation_error: - raise validation_error - self.__dict__.update(d) - object.__setattr__(self, '__pydantic_initialised__', True) - - -def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value: Any) -> None: - if self.__pydantic_initialised__: - d = dict(self.__dict__) - d.pop(name, None) - known_field = self.__pydantic_model__.__fields__.get(name, None) - if known_field: - value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__) - if error_: - raise ValidationError([error_], self.__class__) - - object.__setattr__(self, name, value) - - -def is_builtin_dataclass(_cls: Type[Any]) -> bool: - """ - Whether a class is a stdlib dataclass - (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass) - - we check that - - `_cls` is a dataclass - - `_cls` is not a processed pydantic dataclass (with a basemodel attached) - - `_cls` is not a pydantic dataclass inheriting directly from a stdlib dataclass - e.g. - ``` - @dataclasses.dataclass - class A: - x: int - - @pydantic.dataclasses.dataclass - class B(A): - y: int - ``` - In this case, when we first check `B`, we make an extra check and look at the annotations ('y'), - which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x') - """ - return ( - dataclasses.is_dataclass(_cls) - and not hasattr(_cls, '__pydantic_model__') - and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {}))) - ) - - -def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]) -> 'CallableGenerator': - """ - Create a pydantic.dataclass from a builtin dataclass to add type validation - and yield the validators - It retrieves the parameters of the dataclass and forwards them to the newly created dataclass - """ - yield from _get_validators(dataclass(dc_cls, config=config, use_proxy=True)) diff --git a/lib/pydantic/v1/datetime_parse.py b/lib/pydantic/v1/datetime_parse.py deleted file mode 100644 index cfd54593..00000000 --- a/lib/pydantic/v1/datetime_parse.py +++ /dev/null @@ -1,248 +0,0 @@ -""" -Functions to parse datetime objects. - -We're using regular expressions rather than time.strptime because: -- They provide both validation and parsing. -- They're more flexible for datetimes. -- The date/datetime/time constructors produce friendlier error messages. - -Stolen from https://raw.githubusercontent.com/django/django/main/django/utils/dateparse.py at -9718fa2e8abe430c3526a9278dd976443d4ae3c6 - -Changed to: -* use standard python datetime types not django.utils.timezone -* raise ValueError when regex doesn't match rather than returning None -* support parsing unix timestamps for dates and datetimes -""" -import re -from datetime import date, datetime, time, timedelta, timezone -from typing import Dict, Optional, Type, Union - -from . import errors - -date_expr = r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' -time_expr = ( - r'(?P\d{1,2}):(?P\d{1,2})' - r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' - r'(?PZ|[+-]\d{2}(?::?\d{2})?)?$' -) - -date_re = re.compile(f'{date_expr}$') -time_re = re.compile(time_expr) -datetime_re = re.compile(f'{date_expr}[T ]{time_expr}') - -standard_duration_re = re.compile( - r'^' - r'(?:(?P-?\d+) (days?, )?)?' - r'((?:(?P-?\d+):)(?=\d+:\d+))?' - r'(?:(?P-?\d+):)?' - r'(?P-?\d+)' - r'(?:\.(?P\d{1,6})\d{0,6})?' - r'$' -) - -# Support the sections of ISO 8601 date representation that are accepted by timedelta -iso8601_duration_re = re.compile( - r'^(?P[-+]?)' - r'P' - r'(?:(?P\d+(.\d+)?)D)?' - r'(?:T' - r'(?:(?P\d+(.\d+)?)H)?' - r'(?:(?P\d+(.\d+)?)M)?' - r'(?:(?P\d+(.\d+)?)S)?' - r')?' - r'$' -) - -EPOCH = datetime(1970, 1, 1) -# if greater than this, the number is in ms, if less than or equal it's in seconds -# (in seconds this is 11th October 2603, in ms it's 20th August 1970) -MS_WATERSHED = int(2e10) -# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 -MAX_NUMBER = int(3e20) -StrBytesIntFloat = Union[str, bytes, int, float] - - -def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: - if isinstance(value, (int, float)): - return value - try: - return float(value) - except ValueError: - return None - except TypeError: - raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float') - - -def from_unix_seconds(seconds: Union[int, float]) -> datetime: - if seconds > MAX_NUMBER: - return datetime.max - elif seconds < -MAX_NUMBER: - return datetime.min - - while abs(seconds) > MS_WATERSHED: - seconds /= 1000 - dt = EPOCH + timedelta(seconds=seconds) - return dt.replace(tzinfo=timezone.utc) - - -def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]: - if value == 'Z': - return timezone.utc - elif value is not None: - offset_mins = int(value[-2:]) if len(value) > 3 else 0 - offset = 60 * int(value[1:3]) + offset_mins - if value[0] == '-': - offset = -offset - try: - return timezone(timedelta(minutes=offset)) - except ValueError: - raise error() - else: - return None - - -def parse_date(value: Union[date, StrBytesIntFloat]) -> date: - """ - Parse a date/int/float/string and return a datetime.date. - - Raise ValueError if the input is well formatted but not a valid date. - Raise ValueError if the input isn't well formatted. - """ - if isinstance(value, date): - if isinstance(value, datetime): - return value.date() - else: - return value - - number = get_numeric(value, 'date') - if number is not None: - return from_unix_seconds(number).date() - - if isinstance(value, bytes): - value = value.decode() - - match = date_re.match(value) # type: ignore - if match is None: - raise errors.DateError() - - kw = {k: int(v) for k, v in match.groupdict().items()} - - try: - return date(**kw) - except ValueError: - raise errors.DateError() - - -def parse_time(value: Union[time, StrBytesIntFloat]) -> time: - """ - Parse a time/string and return a datetime.time. - - Raise ValueError if the input is well formatted but not a valid time. - Raise ValueError if the input isn't well formatted, in particular if it contains an offset. - """ - if isinstance(value, time): - return value - - number = get_numeric(value, 'time') - if number is not None: - if number >= 86400: - # doesn't make sense since the time time loop back around to 0 - raise errors.TimeError() - return (datetime.min + timedelta(seconds=number)).time() - - if isinstance(value, bytes): - value = value.decode() - - match = time_re.match(value) # type: ignore - if match is None: - raise errors.TimeError() - - kw = match.groupdict() - if kw['microsecond']: - kw['microsecond'] = kw['microsecond'].ljust(6, '0') - - tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError) - kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} - kw_['tzinfo'] = tzinfo - - try: - return time(**kw_) # type: ignore - except ValueError: - raise errors.TimeError() - - -def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: - """ - Parse a datetime/int/float/string and return a datetime.datetime. - - This function supports time zone offsets. When the input contains one, - the output uses a timezone with a fixed offset from UTC. - - Raise ValueError if the input is well formatted but not a valid datetime. - Raise ValueError if the input isn't well formatted. - """ - if isinstance(value, datetime): - return value - - number = get_numeric(value, 'datetime') - if number is not None: - return from_unix_seconds(number) - - if isinstance(value, bytes): - value = value.decode() - - match = datetime_re.match(value) # type: ignore - if match is None: - raise errors.DateTimeError() - - kw = match.groupdict() - if kw['microsecond']: - kw['microsecond'] = kw['microsecond'].ljust(6, '0') - - tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError) - kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} - kw_['tzinfo'] = tzinfo - - try: - return datetime(**kw_) # type: ignore - except ValueError: - raise errors.DateTimeError() - - -def parse_duration(value: StrBytesIntFloat) -> timedelta: - """ - Parse a duration int/float/string and return a datetime.timedelta. - - The preferred format for durations in Django is '%d %H:%M:%S.%f'. - - Also supports ISO 8601 representation. - """ - if isinstance(value, timedelta): - return value - - if isinstance(value, (int, float)): - # below code requires a string - value = f'{value:f}' - elif isinstance(value, bytes): - value = value.decode() - - try: - match = standard_duration_re.match(value) or iso8601_duration_re.match(value) - except TypeError: - raise TypeError('invalid type; expected timedelta, string, bytes, int or float') - - if not match: - raise errors.DurationError() - - kw = match.groupdict() - sign = -1 if kw.pop('sign', '+') == '-' else 1 - if kw.get('microseconds'): - kw['microseconds'] = kw['microseconds'].ljust(6, '0') - - if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'): - kw['microseconds'] = '-' + kw['microseconds'] - - kw_ = {k: float(v) for k, v in kw.items() if v is not None} - - return sign * timedelta(**kw_) diff --git a/lib/pydantic/v1/decorator.py b/lib/pydantic/v1/decorator.py deleted file mode 100644 index 089aab65..00000000 --- a/lib/pydantic/v1/decorator.py +++ /dev/null @@ -1,264 +0,0 @@ -from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload - -from . import validator -from .config import Extra -from .errors import ConfigError -from .main import BaseModel, create_model -from .typing import get_all_type_hints -from .utils import to_camel - -__all__ = ('validate_arguments',) - -if TYPE_CHECKING: - from .typing import AnyCallable - - AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable) - ConfigType = Union[None, Type[Any], Dict[str, Any]] - - -@overload -def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']: - ... - - -@overload -def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': - ... - - -def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any: - """ - Decorator to validate the arguments passed to a function. - """ - - def validate(_func: 'AnyCallable') -> 'AnyCallable': - vd = ValidatedFunction(_func, config) - - @wraps(_func) - def wrapper_function(*args: Any, **kwargs: Any) -> Any: - return vd.call(*args, **kwargs) - - wrapper_function.vd = vd # type: ignore - wrapper_function.validate = vd.init_model_instance # type: ignore - wrapper_function.raw_function = vd.raw_function # type: ignore - wrapper_function.model = vd.model # type: ignore - return wrapper_function - - if func: - return validate(func) - else: - return validate - - -ALT_V_ARGS = 'v__args' -ALT_V_KWARGS = 'v__kwargs' -V_POSITIONAL_ONLY_NAME = 'v__positional_only' -V_DUPLICATE_KWARGS = 'v__duplicate_kwargs' - - -class ValidatedFunction: - def __init__(self, function: 'AnyCallableT', config: 'ConfigType'): # noqa C901 - from inspect import Parameter, signature - - parameters: Mapping[str, Parameter] = signature(function).parameters - - if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}: - raise ConfigError( - f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" ' - f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator' - ) - - self.raw_function = function - self.arg_mapping: Dict[int, str] = {} - self.positional_only_args = set() - self.v_args_name = 'args' - self.v_kwargs_name = 'kwargs' - - type_hints = get_all_type_hints(function) - takes_args = False - takes_kwargs = False - fields: Dict[str, Tuple[Any, Any]] = {} - for i, (name, p) in enumerate(parameters.items()): - if p.annotation is p.empty: - annotation = Any - else: - annotation = type_hints[name] - - default = ... if p.default is p.empty else p.default - if p.kind == Parameter.POSITIONAL_ONLY: - self.arg_mapping[i] = name - fields[name] = annotation, default - fields[V_POSITIONAL_ONLY_NAME] = List[str], None - self.positional_only_args.add(name) - elif p.kind == Parameter.POSITIONAL_OR_KEYWORD: - self.arg_mapping[i] = name - fields[name] = annotation, default - fields[V_DUPLICATE_KWARGS] = List[str], None - elif p.kind == Parameter.KEYWORD_ONLY: - fields[name] = annotation, default - elif p.kind == Parameter.VAR_POSITIONAL: - self.v_args_name = name - fields[name] = Tuple[annotation, ...], None - takes_args = True - else: - assert p.kind == Parameter.VAR_KEYWORD, p.kind - self.v_kwargs_name = name - fields[name] = Dict[str, annotation], None # type: ignore - takes_kwargs = True - - # these checks avoid a clash between "args" and a field with that name - if not takes_args and self.v_args_name in fields: - self.v_args_name = ALT_V_ARGS - - # same with "kwargs" - if not takes_kwargs and self.v_kwargs_name in fields: - self.v_kwargs_name = ALT_V_KWARGS - - if not takes_args: - # we add the field so validation below can raise the correct exception - fields[self.v_args_name] = List[Any], None - - if not takes_kwargs: - # same with kwargs - fields[self.v_kwargs_name] = Dict[Any, Any], None - - self.create_model(fields, takes_args, takes_kwargs, config) - - def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel: - values = self.build_values(args, kwargs) - return self.model(**values) - - def call(self, *args: Any, **kwargs: Any) -> Any: - m = self.init_model_instance(*args, **kwargs) - return self.execute(m) - - def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]: - values: Dict[str, Any] = {} - if args: - arg_iter = enumerate(args) - while True: - try: - i, a = next(arg_iter) - except StopIteration: - break - arg_name = self.arg_mapping.get(i) - if arg_name is not None: - values[arg_name] = a - else: - values[self.v_args_name] = [a] + [a for _, a in arg_iter] - break - - var_kwargs: Dict[str, Any] = {} - wrong_positional_args = [] - duplicate_kwargs = [] - fields_alias = [ - field.alias - for name, field in self.model.__fields__.items() - if name not in (self.v_args_name, self.v_kwargs_name) - ] - non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name} - for k, v in kwargs.items(): - if k in non_var_fields or k in fields_alias: - if k in self.positional_only_args: - wrong_positional_args.append(k) - if k in values: - duplicate_kwargs.append(k) - values[k] = v - else: - var_kwargs[k] = v - - if var_kwargs: - values[self.v_kwargs_name] = var_kwargs - if wrong_positional_args: - values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args - if duplicate_kwargs: - values[V_DUPLICATE_KWARGS] = duplicate_kwargs - return values - - def execute(self, m: BaseModel) -> Any: - d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory} - var_kwargs = d.pop(self.v_kwargs_name, {}) - - if self.v_args_name in d: - args_: List[Any] = [] - in_kwargs = False - kwargs = {} - for name, value in d.items(): - if in_kwargs: - kwargs[name] = value - elif name == self.v_args_name: - args_ += value - in_kwargs = True - else: - args_.append(value) - return self.raw_function(*args_, **kwargs, **var_kwargs) - elif self.positional_only_args: - args_ = [] - kwargs = {} - for name, value in d.items(): - if name in self.positional_only_args: - args_.append(value) - else: - kwargs[name] = value - return self.raw_function(*args_, **kwargs, **var_kwargs) - else: - return self.raw_function(**d, **var_kwargs) - - def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None: - pos_args = len(self.arg_mapping) - - class CustomConfig: - pass - - if not TYPE_CHECKING: # pragma: no branch - if isinstance(config, dict): - CustomConfig = type('Config', (), config) # noqa: F811 - elif config is not None: - CustomConfig = config # noqa: F811 - - if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'): - raise ConfigError( - 'Setting the "fields" and "alias_generator" property on custom Config for ' - '@validate_arguments is not yet supported, please remove.' - ) - - class DecoratorBaseModel(BaseModel): - @validator(self.v_args_name, check_fields=False, allow_reuse=True) - def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]: - if takes_args or v is None: - return v - - raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given') - - @validator(self.v_kwargs_name, check_fields=False, allow_reuse=True) - def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: - if takes_kwargs or v is None: - return v - - plural = '' if len(v) == 1 else 's' - keys = ', '.join(map(repr, v.keys())) - raise TypeError(f'unexpected keyword argument{plural}: {keys}') - - @validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True) - def check_positional_only(cls, v: Optional[List[str]]) -> None: - if v is None: - return - - plural = '' if len(v) == 1 else 's' - keys = ', '.join(map(repr, v)) - raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}') - - @validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True) - def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None: - if v is None: - return - - plural = '' if len(v) == 1 else 's' - keys = ', '.join(map(repr, v)) - raise TypeError(f'multiple values for argument{plural}: {keys}') - - class Config(CustomConfig): - extra = getattr(CustomConfig, 'extra', Extra.forbid) - - self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields) diff --git a/lib/pydantic/v1/env_settings.py b/lib/pydantic/v1/env_settings.py deleted file mode 100644 index 6c446e51..00000000 --- a/lib/pydantic/v1/env_settings.py +++ /dev/null @@ -1,350 +0,0 @@ -import os -import warnings -from pathlib import Path -from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union - -from .config import BaseConfig, Extra -from .fields import ModelField -from .main import BaseModel -from .types import JsonWrapper -from .typing import StrPath, display_as_type, get_origin, is_union -from .utils import deep_update, lenient_issubclass, path_type, sequence_like - -env_file_sentinel = str(object()) - -SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]] -DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]] - - -class SettingsError(ValueError): - pass - - -class BaseSettings(BaseModel): - """ - Base class for settings, allowing values to be overridden by environment variables. - - This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose), - Heroku and any 12 factor app design. - """ - - def __init__( - __pydantic_self__, - _env_file: Optional[DotenvType] = env_file_sentinel, - _env_file_encoding: Optional[str] = None, - _env_nested_delimiter: Optional[str] = None, - _secrets_dir: Optional[StrPath] = None, - **values: Any, - ) -> None: - # Uses something other than `self` the first arg to allow "self" as a settable attribute - super().__init__( - **__pydantic_self__._build_values( - values, - _env_file=_env_file, - _env_file_encoding=_env_file_encoding, - _env_nested_delimiter=_env_nested_delimiter, - _secrets_dir=_secrets_dir, - ) - ) - - def _build_values( - self, - init_kwargs: Dict[str, Any], - _env_file: Optional[DotenvType] = None, - _env_file_encoding: Optional[str] = None, - _env_nested_delimiter: Optional[str] = None, - _secrets_dir: Optional[StrPath] = None, - ) -> Dict[str, Any]: - # Configure built-in sources - init_settings = InitSettingsSource(init_kwargs=init_kwargs) - env_settings = EnvSettingsSource( - env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file), - env_file_encoding=( - _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding - ), - env_nested_delimiter=( - _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter - ), - env_prefix_len=len(self.__config__.env_prefix), - ) - file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir) - # Provide a hook to set built-in sources priority and add / remove sources - sources = self.__config__.customise_sources( - init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings - ) - if sources: - return deep_update(*reversed([source(self) for source in sources])) - else: - # no one should mean to do this, but I think returning an empty dict is marginally preferable - # to an informative error and much better than a confusing error - return {} - - class Config(BaseConfig): - env_prefix: str = '' - env_file: Optional[DotenvType] = None - env_file_encoding: Optional[str] = None - env_nested_delimiter: Optional[str] = None - secrets_dir: Optional[StrPath] = None - validate_all: bool = True - extra: Extra = Extra.forbid - arbitrary_types_allowed: bool = True - case_sensitive: bool = False - - @classmethod - def prepare_field(cls, field: ModelField) -> None: - env_names: Union[List[str], AbstractSet[str]] - field_info_from_config = cls.get_field_info(field.name) - - env = field_info_from_config.get('env') or field.field_info.extra.get('env') - if env is None: - if field.has_alias: - warnings.warn( - 'aliases are no longer used by BaseSettings to define which environment variables to read. ' - 'Instead use the "env" field setting. ' - 'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names', - FutureWarning, - ) - env_names = {cls.env_prefix + field.name} - elif isinstance(env, str): - env_names = {env} - elif isinstance(env, (set, frozenset)): - env_names = env - elif sequence_like(env): - env_names = list(env) - else: - raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set') - - if not cls.case_sensitive: - env_names = env_names.__class__(n.lower() for n in env_names) - field.field_info.extra['env_names'] = env_names - - @classmethod - def customise_sources( - cls, - init_settings: SettingsSourceCallable, - env_settings: SettingsSourceCallable, - file_secret_settings: SettingsSourceCallable, - ) -> Tuple[SettingsSourceCallable, ...]: - return init_settings, env_settings, file_secret_settings - - @classmethod - def parse_env_var(cls, field_name: str, raw_val: str) -> Any: - return cls.json_loads(raw_val) - - # populated by the metaclass using the Config class defined above, annotated here to help IDEs only - __config__: ClassVar[Type[Config]] - - -class InitSettingsSource: - __slots__ = ('init_kwargs',) - - def __init__(self, init_kwargs: Dict[str, Any]): - self.init_kwargs = init_kwargs - - def __call__(self, settings: BaseSettings) -> Dict[str, Any]: - return self.init_kwargs - - def __repr__(self) -> str: - return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})' - - -class EnvSettingsSource: - __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter', 'env_prefix_len') - - def __init__( - self, - env_file: Optional[DotenvType], - env_file_encoding: Optional[str], - env_nested_delimiter: Optional[str] = None, - env_prefix_len: int = 0, - ): - self.env_file: Optional[DotenvType] = env_file - self.env_file_encoding: Optional[str] = env_file_encoding - self.env_nested_delimiter: Optional[str] = env_nested_delimiter - self.env_prefix_len: int = env_prefix_len - - def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901 - """ - Build environment variables suitable for passing to the Model. - """ - d: Dict[str, Any] = {} - - if settings.__config__.case_sensitive: - env_vars: Mapping[str, Optional[str]] = os.environ - else: - env_vars = {k.lower(): v for k, v in os.environ.items()} - - dotenv_vars = self._read_env_files(settings.__config__.case_sensitive) - if dotenv_vars: - env_vars = {**dotenv_vars, **env_vars} - - for field in settings.__fields__.values(): - env_val: Optional[str] = None - for env_name in field.field_info.extra['env_names']: - env_val = env_vars.get(env_name) - if env_val is not None: - break - - is_complex, allow_parse_failure = self.field_is_complex(field) - if is_complex: - if env_val is None: - # field is complex but no value found so far, try explode_env_vars - env_val_built = self.explode_env_vars(field, env_vars) - if env_val_built: - d[field.alias] = env_val_built - else: - # field is complex and there's a value, decode that as JSON, then add explode_env_vars - try: - env_val = settings.__config__.parse_env_var(field.name, env_val) - except ValueError as e: - if not allow_parse_failure: - raise SettingsError(f'error parsing env var "{env_name}"') from e - - if isinstance(env_val, dict): - d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars)) - else: - d[field.alias] = env_val - elif env_val is not None: - # simplest case, field is not complex, we only need to add the value if it was found - d[field.alias] = env_val - - return d - - def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]: - env_files = self.env_file - if env_files is None: - return {} - - if isinstance(env_files, (str, os.PathLike)): - env_files = [env_files] - - dotenv_vars = {} - for env_file in env_files: - env_path = Path(env_file).expanduser() - if env_path.is_file(): - dotenv_vars.update( - read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive) - ) - - return dotenv_vars - - def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]: - """ - Find out if a field is complex, and if so whether JSON errors should be ignored - """ - if lenient_issubclass(field.annotation, JsonWrapper): - return False, False - - if field.is_complex(): - allow_parse_failure = False - elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields): - allow_parse_failure = True - else: - return False, False - - return True, allow_parse_failure - - def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]: - """ - Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries. - - This is applied to a single field, hence filtering by env_var prefix. - """ - prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']] - result: Dict[str, Any] = {} - for env_name, env_val in env_vars.items(): - if not any(env_name.startswith(prefix) for prefix in prefixes): - continue - # we remove the prefix before splitting in case the prefix has characters in common with the delimiter - env_name_without_prefix = env_name[self.env_prefix_len :] - _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter) - env_var = result - for key in keys: - env_var = env_var.setdefault(key, {}) - env_var[last_key] = env_val - - return result - - def __repr__(self) -> str: - return ( - f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, ' - f'env_nested_delimiter={self.env_nested_delimiter!r})' - ) - - -class SecretsSettingsSource: - __slots__ = ('secrets_dir',) - - def __init__(self, secrets_dir: Optional[StrPath]): - self.secrets_dir: Optional[StrPath] = secrets_dir - - def __call__(self, settings: BaseSettings) -> Dict[str, Any]: - """ - Build fields from "secrets" files. - """ - secrets: Dict[str, Optional[str]] = {} - - if self.secrets_dir is None: - return secrets - - secrets_path = Path(self.secrets_dir).expanduser() - - if not secrets_path.exists(): - warnings.warn(f'directory "{secrets_path}" does not exist') - return secrets - - if not secrets_path.is_dir(): - raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}') - - for field in settings.__fields__.values(): - for env_name in field.field_info.extra['env_names']: - path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive) - if not path: - # path does not exist, we currently don't return a warning for this - continue - - if path.is_file(): - secret_value = path.read_text().strip() - if field.is_complex(): - try: - secret_value = settings.__config__.parse_env_var(field.name, secret_value) - except ValueError as e: - raise SettingsError(f'error parsing env var "{env_name}"') from e - - secrets[field.alias] = secret_value - else: - warnings.warn( - f'attempted to load secret file "{path}" but found a {path_type(path)} instead.', - stacklevel=4, - ) - return secrets - - def __repr__(self) -> str: - return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})' - - -def read_env_file( - file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False -) -> Dict[str, Optional[str]]: - try: - from dotenv import dotenv_values - except ImportError as e: - raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e - - file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8') - if not case_sensitive: - return {k.lower(): v for k, v in file_vars.items()} - else: - return file_vars - - -def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]: - """ - Find a file within path's directory matching filename, optionally ignoring case. - """ - for f in dir_path.iterdir(): - if f.name == file_name: - return f - elif not case_sensitive and f.name.lower() == file_name.lower(): - return f - return None diff --git a/lib/pydantic/v1/error_wrappers.py b/lib/pydantic/v1/error_wrappers.py deleted file mode 100644 index d89a500c..00000000 --- a/lib/pydantic/v1/error_wrappers.py +++ /dev/null @@ -1,161 +0,0 @@ -import json -from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union - -from .json import pydantic_encoder -from .utils import Representation - -if TYPE_CHECKING: - from typing_extensions import TypedDict - - from .config import BaseConfig - from .types import ModelOrDc - from .typing import ReprArgs - - Loc = Tuple[Union[int, str], ...] - - class _ErrorDictRequired(TypedDict): - loc: Loc - msg: str - type: str - - class ErrorDict(_ErrorDictRequired, total=False): - ctx: Dict[str, Any] - - -__all__ = 'ErrorWrapper', 'ValidationError' - - -class ErrorWrapper(Representation): - __slots__ = 'exc', '_loc' - - def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: - self.exc = exc - self._loc = loc - - def loc_tuple(self) -> 'Loc': - if isinstance(self._loc, tuple): - return self._loc - else: - return (self._loc,) - - def __repr_args__(self) -> 'ReprArgs': - return [('exc', self.exc), ('loc', self.loc_tuple())] - - -# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] -# but recursive, therefore just use: -ErrorList = Union[Sequence[Any], ErrorWrapper] - - -class ValidationError(Representation, ValueError): - __slots__ = 'raw_errors', 'model', '_error_cache' - - def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None: - self.raw_errors = errors - self.model = model - self._error_cache: Optional[List['ErrorDict']] = None - - def errors(self) -> List['ErrorDict']: - if self._error_cache is None: - try: - config = self.model.__config__ # type: ignore - except AttributeError: - config = self.model.__pydantic_model__.__config__ # type: ignore - self._error_cache = list(flatten_errors(self.raw_errors, config)) - return self._error_cache - - def json(self, *, indent: Union[None, int, str] = 2) -> str: - return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) - - def __str__(self) -> str: - errors = self.errors() - no_errors = len(errors) - return ( - f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' - f'{display_errors(errors)}' - ) - - def __repr_args__(self) -> 'ReprArgs': - return [('model', self.model.__name__), ('errors', self.errors())] - - -def display_errors(errors: List['ErrorDict']) -> str: - return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) - - -def _display_error_loc(error: 'ErrorDict') -> str: - return ' -> '.join(str(e) for e in error['loc']) - - -def _display_error_type_and_ctx(error: 'ErrorDict') -> str: - t = 'type=' + error['type'] - ctx = error.get('ctx') - if ctx: - return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) - else: - return t - - -def flatten_errors( - errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None -) -> Generator['ErrorDict', None, None]: - for error in errors: - if isinstance(error, ErrorWrapper): - if loc: - error_loc = loc + error.loc_tuple() - else: - error_loc = error.loc_tuple() - - if isinstance(error.exc, ValidationError): - yield from flatten_errors(error.exc.raw_errors, config, error_loc) - else: - yield error_dict(error.exc, config, error_loc) - elif isinstance(error, list): - yield from flatten_errors(error, config, loc=loc) - else: - raise RuntimeError(f'Unknown error object: {error}') - - -def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict': - type_ = get_exc_type(exc.__class__) - msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None) - ctx = exc.__dict__ - if msg_template: - msg = msg_template.format(**ctx) - else: - msg = str(exc) - - d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_} - - if ctx: - d['ctx'] = ctx - - return d - - -_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} - - -def get_exc_type(cls: Type[Exception]) -> str: - # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up - try: - return _EXC_TYPE_CACHE[cls] - except KeyError: - r = _get_exc_type(cls) - _EXC_TYPE_CACHE[cls] = r - return r - - -def _get_exc_type(cls: Type[Exception]) -> str: - if issubclass(cls, AssertionError): - return 'assertion_error' - - base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' - if cls in (TypeError, ValueError): - # just TypeError or ValueError, no extra code - return base_name - - # if it's not a TypeError or ValueError, we just take the lowercase of the exception name - # no chaining or snake case logic, use "code" for more complex error types. - code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() - return base_name + '.' + code diff --git a/lib/pydantic/v1/errors.py b/lib/pydantic/v1/errors.py deleted file mode 100644 index 7bdafdd1..00000000 --- a/lib/pydantic/v1/errors.py +++ /dev/null @@ -1,646 +0,0 @@ -from decimal import Decimal -from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union - -from .typing import display_as_type - -if TYPE_CHECKING: - from .typing import DictStrAny - -# explicitly state exports to avoid "from .errors import *" also importing Decimal, Path etc. -__all__ = ( - 'PydanticTypeError', - 'PydanticValueError', - 'ConfigError', - 'MissingError', - 'ExtraError', - 'NoneIsNotAllowedError', - 'NoneIsAllowedError', - 'WrongConstantError', - 'NotNoneError', - 'BoolError', - 'BytesError', - 'DictError', - 'EmailError', - 'UrlError', - 'UrlSchemeError', - 'UrlSchemePermittedError', - 'UrlUserInfoError', - 'UrlHostError', - 'UrlHostTldError', - 'UrlPortError', - 'UrlExtraError', - 'EnumError', - 'IntEnumError', - 'EnumMemberError', - 'IntegerError', - 'FloatError', - 'PathError', - 'PathNotExistsError', - 'PathNotAFileError', - 'PathNotADirectoryError', - 'PyObjectError', - 'SequenceError', - 'ListError', - 'SetError', - 'FrozenSetError', - 'TupleError', - 'TupleLengthError', - 'ListMinLengthError', - 'ListMaxLengthError', - 'ListUniqueItemsError', - 'SetMinLengthError', - 'SetMaxLengthError', - 'FrozenSetMinLengthError', - 'FrozenSetMaxLengthError', - 'AnyStrMinLengthError', - 'AnyStrMaxLengthError', - 'StrError', - 'StrRegexError', - 'NumberNotGtError', - 'NumberNotGeError', - 'NumberNotLtError', - 'NumberNotLeError', - 'NumberNotMultipleError', - 'DecimalError', - 'DecimalIsNotFiniteError', - 'DecimalMaxDigitsError', - 'DecimalMaxPlacesError', - 'DecimalWholeDigitsError', - 'DateTimeError', - 'DateError', - 'DateNotInThePastError', - 'DateNotInTheFutureError', - 'TimeError', - 'DurationError', - 'HashableError', - 'UUIDError', - 'UUIDVersionError', - 'ArbitraryTypeError', - 'ClassError', - 'SubclassError', - 'JsonError', - 'JsonTypeError', - 'PatternError', - 'DataclassTypeError', - 'CallableError', - 'IPvAnyAddressError', - 'IPvAnyInterfaceError', - 'IPvAnyNetworkError', - 'IPv4AddressError', - 'IPv6AddressError', - 'IPv4NetworkError', - 'IPv6NetworkError', - 'IPv4InterfaceError', - 'IPv6InterfaceError', - 'ColorError', - 'StrictBoolError', - 'NotDigitError', - 'LuhnValidationError', - 'InvalidLengthForBrand', - 'InvalidByteSize', - 'InvalidByteSizeUnit', - 'MissingDiscriminator', - 'InvalidDiscriminator', -) - - -def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin': - """ - For built-in exceptions like ValueError or TypeError, we need to implement - __reduce__ to override the default behaviour (instead of __getstate__/__setstate__) - By default pickle protocol 2 calls `cls.__new__(cls, *args)`. - Since we only use kwargs, we need a little constructor to change that. - Note: the callable can't be a lambda as pickle looks in the namespace to find it - """ - return cls(**ctx) - - -class PydanticErrorMixin: - code: str - msg_template: str - - def __init__(self, **ctx: Any) -> None: - self.__dict__ = ctx - - def __str__(self) -> str: - return self.msg_template.format(**self.__dict__) - - def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]: - return cls_kwargs, (self.__class__, self.__dict__) - - -class PydanticTypeError(PydanticErrorMixin, TypeError): - pass - - -class PydanticValueError(PydanticErrorMixin, ValueError): - pass - - -class ConfigError(RuntimeError): - pass - - -class MissingError(PydanticValueError): - msg_template = 'field required' - - -class ExtraError(PydanticValueError): - msg_template = 'extra fields not permitted' - - -class NoneIsNotAllowedError(PydanticTypeError): - code = 'none.not_allowed' - msg_template = 'none is not an allowed value' - - -class NoneIsAllowedError(PydanticTypeError): - code = 'none.allowed' - msg_template = 'value is not none' - - -class WrongConstantError(PydanticValueError): - code = 'const' - - def __str__(self) -> str: - permitted = ', '.join(repr(v) for v in self.permitted) # type: ignore - return f'unexpected value; permitted: {permitted}' - - -class NotNoneError(PydanticTypeError): - code = 'not_none' - msg_template = 'value is not None' - - -class BoolError(PydanticTypeError): - msg_template = 'value could not be parsed to a boolean' - - -class BytesError(PydanticTypeError): - msg_template = 'byte type expected' - - -class DictError(PydanticTypeError): - msg_template = 'value is not a valid dict' - - -class EmailError(PydanticValueError): - msg_template = 'value is not a valid email address' - - -class UrlError(PydanticValueError): - code = 'url' - - -class UrlSchemeError(UrlError): - code = 'url.scheme' - msg_template = 'invalid or missing URL scheme' - - -class UrlSchemePermittedError(UrlError): - code = 'url.scheme' - msg_template = 'URL scheme not permitted' - - def __init__(self, allowed_schemes: Set[str]): - super().__init__(allowed_schemes=allowed_schemes) - - -class UrlUserInfoError(UrlError): - code = 'url.userinfo' - msg_template = 'userinfo required in URL but missing' - - -class UrlHostError(UrlError): - code = 'url.host' - msg_template = 'URL host invalid' - - -class UrlHostTldError(UrlError): - code = 'url.host' - msg_template = 'URL host invalid, top level domain required' - - -class UrlPortError(UrlError): - code = 'url.port' - msg_template = 'URL port invalid, port cannot exceed 65535' - - -class UrlExtraError(UrlError): - code = 'url.extra' - msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}' - - -class EnumMemberError(PydanticTypeError): - code = 'enum' - - def __str__(self) -> str: - permitted = ', '.join(repr(v.value) for v in self.enum_values) # type: ignore - return f'value is not a valid enumeration member; permitted: {permitted}' - - -class IntegerError(PydanticTypeError): - msg_template = 'value is not a valid integer' - - -class FloatError(PydanticTypeError): - msg_template = 'value is not a valid float' - - -class PathError(PydanticTypeError): - msg_template = 'value is not a valid path' - - -class _PathValueError(PydanticValueError): - def __init__(self, *, path: Path) -> None: - super().__init__(path=str(path)) - - -class PathNotExistsError(_PathValueError): - code = 'path.not_exists' - msg_template = 'file or directory at path "{path}" does not exist' - - -class PathNotAFileError(_PathValueError): - code = 'path.not_a_file' - msg_template = 'path "{path}" does not point to a file' - - -class PathNotADirectoryError(_PathValueError): - code = 'path.not_a_directory' - msg_template = 'path "{path}" does not point to a directory' - - -class PyObjectError(PydanticTypeError): - msg_template = 'ensure this value contains valid import path or valid callable: {error_message}' - - -class SequenceError(PydanticTypeError): - msg_template = 'value is not a valid sequence' - - -class IterableError(PydanticTypeError): - msg_template = 'value is not a valid iterable' - - -class ListError(PydanticTypeError): - msg_template = 'value is not a valid list' - - -class SetError(PydanticTypeError): - msg_template = 'value is not a valid set' - - -class FrozenSetError(PydanticTypeError): - msg_template = 'value is not a valid frozenset' - - -class DequeError(PydanticTypeError): - msg_template = 'value is not a valid deque' - - -class TupleError(PydanticTypeError): - msg_template = 'value is not a valid tuple' - - -class TupleLengthError(PydanticValueError): - code = 'tuple.length' - msg_template = 'wrong tuple length {actual_length}, expected {expected_length}' - - def __init__(self, *, actual_length: int, expected_length: int) -> None: - super().__init__(actual_length=actual_length, expected_length=expected_length) - - -class ListMinLengthError(PydanticValueError): - code = 'list.min_items' - msg_template = 'ensure this value has at least {limit_value} items' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class ListMaxLengthError(PydanticValueError): - code = 'list.max_items' - msg_template = 'ensure this value has at most {limit_value} items' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class ListUniqueItemsError(PydanticValueError): - code = 'list.unique_items' - msg_template = 'the list has duplicated items' - - -class SetMinLengthError(PydanticValueError): - code = 'set.min_items' - msg_template = 'ensure this value has at least {limit_value} items' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class SetMaxLengthError(PydanticValueError): - code = 'set.max_items' - msg_template = 'ensure this value has at most {limit_value} items' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class FrozenSetMinLengthError(PydanticValueError): - code = 'frozenset.min_items' - msg_template = 'ensure this value has at least {limit_value} items' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class FrozenSetMaxLengthError(PydanticValueError): - code = 'frozenset.max_items' - msg_template = 'ensure this value has at most {limit_value} items' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class AnyStrMinLengthError(PydanticValueError): - code = 'any_str.min_length' - msg_template = 'ensure this value has at least {limit_value} characters' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class AnyStrMaxLengthError(PydanticValueError): - code = 'any_str.max_length' - msg_template = 'ensure this value has at most {limit_value} characters' - - def __init__(self, *, limit_value: int) -> None: - super().__init__(limit_value=limit_value) - - -class StrError(PydanticTypeError): - msg_template = 'str type expected' - - -class StrRegexError(PydanticValueError): - code = 'str.regex' - msg_template = 'string does not match regex "{pattern}"' - - def __init__(self, *, pattern: str) -> None: - super().__init__(pattern=pattern) - - -class _NumberBoundError(PydanticValueError): - def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None: - super().__init__(limit_value=limit_value) - - -class NumberNotGtError(_NumberBoundError): - code = 'number.not_gt' - msg_template = 'ensure this value is greater than {limit_value}' - - -class NumberNotGeError(_NumberBoundError): - code = 'number.not_ge' - msg_template = 'ensure this value is greater than or equal to {limit_value}' - - -class NumberNotLtError(_NumberBoundError): - code = 'number.not_lt' - msg_template = 'ensure this value is less than {limit_value}' - - -class NumberNotLeError(_NumberBoundError): - code = 'number.not_le' - msg_template = 'ensure this value is less than or equal to {limit_value}' - - -class NumberNotFiniteError(PydanticValueError): - code = 'number.not_finite_number' - msg_template = 'ensure this value is a finite number' - - -class NumberNotMultipleError(PydanticValueError): - code = 'number.not_multiple' - msg_template = 'ensure this value is a multiple of {multiple_of}' - - def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None: - super().__init__(multiple_of=multiple_of) - - -class DecimalError(PydanticTypeError): - msg_template = 'value is not a valid decimal' - - -class DecimalIsNotFiniteError(PydanticValueError): - code = 'decimal.not_finite' - msg_template = 'value is not a valid decimal' - - -class DecimalMaxDigitsError(PydanticValueError): - code = 'decimal.max_digits' - msg_template = 'ensure that there are no more than {max_digits} digits in total' - - def __init__(self, *, max_digits: int) -> None: - super().__init__(max_digits=max_digits) - - -class DecimalMaxPlacesError(PydanticValueError): - code = 'decimal.max_places' - msg_template = 'ensure that there are no more than {decimal_places} decimal places' - - def __init__(self, *, decimal_places: int) -> None: - super().__init__(decimal_places=decimal_places) - - -class DecimalWholeDigitsError(PydanticValueError): - code = 'decimal.whole_digits' - msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point' - - def __init__(self, *, whole_digits: int) -> None: - super().__init__(whole_digits=whole_digits) - - -class DateTimeError(PydanticValueError): - msg_template = 'invalid datetime format' - - -class DateError(PydanticValueError): - msg_template = 'invalid date format' - - -class DateNotInThePastError(PydanticValueError): - code = 'date.not_in_the_past' - msg_template = 'date is not in the past' - - -class DateNotInTheFutureError(PydanticValueError): - code = 'date.not_in_the_future' - msg_template = 'date is not in the future' - - -class TimeError(PydanticValueError): - msg_template = 'invalid time format' - - -class DurationError(PydanticValueError): - msg_template = 'invalid duration format' - - -class HashableError(PydanticTypeError): - msg_template = 'value is not a valid hashable' - - -class UUIDError(PydanticTypeError): - msg_template = 'value is not a valid uuid' - - -class UUIDVersionError(PydanticValueError): - code = 'uuid.version' - msg_template = 'uuid version {required_version} expected' - - def __init__(self, *, required_version: int) -> None: - super().__init__(required_version=required_version) - - -class ArbitraryTypeError(PydanticTypeError): - code = 'arbitrary_type' - msg_template = 'instance of {expected_arbitrary_type} expected' - - def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None: - super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type)) - - -class ClassError(PydanticTypeError): - code = 'class' - msg_template = 'a class is expected' - - -class SubclassError(PydanticTypeError): - code = 'subclass' - msg_template = 'subclass of {expected_class} expected' - - def __init__(self, *, expected_class: Type[Any]) -> None: - super().__init__(expected_class=display_as_type(expected_class)) - - -class JsonError(PydanticValueError): - msg_template = 'Invalid JSON' - - -class JsonTypeError(PydanticTypeError): - code = 'json' - msg_template = 'JSON object must be str, bytes or bytearray' - - -class PatternError(PydanticValueError): - code = 'regex_pattern' - msg_template = 'Invalid regular expression' - - -class DataclassTypeError(PydanticTypeError): - code = 'dataclass' - msg_template = 'instance of {class_name}, tuple or dict expected' - - -class CallableError(PydanticTypeError): - msg_template = '{value} is not callable' - - -class EnumError(PydanticTypeError): - code = 'enum_instance' - msg_template = '{value} is not a valid Enum instance' - - -class IntEnumError(PydanticTypeError): - code = 'int_enum_instance' - msg_template = '{value} is not a valid IntEnum instance' - - -class IPvAnyAddressError(PydanticValueError): - msg_template = 'value is not a valid IPv4 or IPv6 address' - - -class IPvAnyInterfaceError(PydanticValueError): - msg_template = 'value is not a valid IPv4 or IPv6 interface' - - -class IPvAnyNetworkError(PydanticValueError): - msg_template = 'value is not a valid IPv4 or IPv6 network' - - -class IPv4AddressError(PydanticValueError): - msg_template = 'value is not a valid IPv4 address' - - -class IPv6AddressError(PydanticValueError): - msg_template = 'value is not a valid IPv6 address' - - -class IPv4NetworkError(PydanticValueError): - msg_template = 'value is not a valid IPv4 network' - - -class IPv6NetworkError(PydanticValueError): - msg_template = 'value is not a valid IPv6 network' - - -class IPv4InterfaceError(PydanticValueError): - msg_template = 'value is not a valid IPv4 interface' - - -class IPv6InterfaceError(PydanticValueError): - msg_template = 'value is not a valid IPv6 interface' - - -class ColorError(PydanticValueError): - msg_template = 'value is not a valid color: {reason}' - - -class StrictBoolError(PydanticValueError): - msg_template = 'value is not a valid boolean' - - -class NotDigitError(PydanticValueError): - code = 'payment_card_number.digits' - msg_template = 'card number is not all digits' - - -class LuhnValidationError(PydanticValueError): - code = 'payment_card_number.luhn_check' - msg_template = 'card number is not luhn valid' - - -class InvalidLengthForBrand(PydanticValueError): - code = 'payment_card_number.invalid_length_for_brand' - msg_template = 'Length for a {brand} card must be {required_length}' - - -class InvalidByteSize(PydanticValueError): - msg_template = 'could not parse value and unit from byte string' - - -class InvalidByteSizeUnit(PydanticValueError): - msg_template = 'could not interpret byte unit: {unit}' - - -class MissingDiscriminator(PydanticValueError): - code = 'discriminated_union.missing_discriminator' - msg_template = 'Discriminator {discriminator_key!r} is missing in value' - - -class InvalidDiscriminator(PydanticValueError): - code = 'discriminated_union.invalid_discriminator' - msg_template = ( - 'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} ' - '(allowed values: {allowed_values})' - ) - - def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None: - super().__init__( - discriminator_key=discriminator_key, - discriminator_value=discriminator_value, - allowed_values=', '.join(map(repr, allowed_values)), - ) diff --git a/lib/pydantic/v1/fields.py b/lib/pydantic/v1/fields.py deleted file mode 100644 index 60d260e9..00000000 --- a/lib/pydantic/v1/fields.py +++ /dev/null @@ -1,1253 +0,0 @@ -import copy -import re -from collections import Counter as CollectionCounter, defaultdict, deque -from collections.abc import Callable, Hashable as CollectionsHashable, Iterable as CollectionsIterable -from typing import ( - TYPE_CHECKING, - Any, - Counter, - DefaultDict, - Deque, - Dict, - ForwardRef, - FrozenSet, - Generator, - Iterable, - Iterator, - List, - Mapping, - Optional, - Pattern, - Sequence, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -from typing_extensions import Annotated, Final - -from . import errors as errors_ -from .class_validators import Validator, make_generic_validator, prep_validators -from .error_wrappers import ErrorWrapper -from .errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError -from .types import Json, JsonWrapper -from .typing import ( - NoArgAnyCallable, - convert_generics, - display_as_type, - get_args, - get_origin, - is_finalvar, - is_literal_type, - is_new_type, - is_none_type, - is_typeddict, - is_typeddict_special, - is_union, - new_type_supertype, -) -from .utils import ( - PyObjectStr, - Representation, - ValueItems, - get_discriminator_alias_and_values, - get_unique_discriminator_alias, - lenient_isinstance, - lenient_issubclass, - sequence_like, - smart_deepcopy, -) -from .validators import constant_validator, dict_validator, find_validators, validate_json - -Required: Any = Ellipsis - -T = TypeVar('T') - - -class UndefinedType: - def __repr__(self) -> str: - return 'PydanticUndefined' - - def __copy__(self: T) -> T: - return self - - def __reduce__(self) -> str: - return 'Undefined' - - def __deepcopy__(self: T, _: Any) -> T: - return self - - -Undefined = UndefinedType() - -if TYPE_CHECKING: - from .class_validators import ValidatorsList - from .config import BaseConfig - from .error_wrappers import ErrorList - from .types import ModelOrDc - from .typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs - - ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]] - LocStr = Union[Tuple[Union[int, str], ...], str] - BoolUndefined = Union[bool, UndefinedType] - - -class FieldInfo(Representation): - """ - Captures extra information about a field. - """ - - __slots__ = ( - 'default', - 'default_factory', - 'alias', - 'alias_priority', - 'title', - 'description', - 'exclude', - 'include', - 'const', - 'gt', - 'ge', - 'lt', - 'le', - 'multiple_of', - 'allow_inf_nan', - 'max_digits', - 'decimal_places', - 'min_items', - 'max_items', - 'unique_items', - 'min_length', - 'max_length', - 'allow_mutation', - 'repr', - 'regex', - 'discriminator', - 'extra', - ) - - # field constraints with the default value, it's also used in update_from_config below - __field_constraints__ = { - 'min_length': None, - 'max_length': None, - 'regex': None, - 'gt': None, - 'lt': None, - 'ge': None, - 'le': None, - 'multiple_of': None, - 'allow_inf_nan': None, - 'max_digits': None, - 'decimal_places': None, - 'min_items': None, - 'max_items': None, - 'unique_items': None, - 'allow_mutation': True, - } - - def __init__(self, default: Any = Undefined, **kwargs: Any) -> None: - self.default = default - self.default_factory = kwargs.pop('default_factory', None) - self.alias = kwargs.pop('alias', None) - self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias is not None else None) - self.title = kwargs.pop('title', None) - self.description = kwargs.pop('description', None) - self.exclude = kwargs.pop('exclude', None) - self.include = kwargs.pop('include', None) - self.const = kwargs.pop('const', None) - self.gt = kwargs.pop('gt', None) - self.ge = kwargs.pop('ge', None) - self.lt = kwargs.pop('lt', None) - self.le = kwargs.pop('le', None) - self.multiple_of = kwargs.pop('multiple_of', None) - self.allow_inf_nan = kwargs.pop('allow_inf_nan', None) - self.max_digits = kwargs.pop('max_digits', None) - self.decimal_places = kwargs.pop('decimal_places', None) - self.min_items = kwargs.pop('min_items', None) - self.max_items = kwargs.pop('max_items', None) - self.unique_items = kwargs.pop('unique_items', None) - self.min_length = kwargs.pop('min_length', None) - self.max_length = kwargs.pop('max_length', None) - self.allow_mutation = kwargs.pop('allow_mutation', True) - self.regex = kwargs.pop('regex', None) - self.discriminator = kwargs.pop('discriminator', None) - self.repr = kwargs.pop('repr', True) - self.extra = kwargs - - def __repr_args__(self) -> 'ReprArgs': - field_defaults_to_hide: Dict[str, Any] = { - 'repr': True, - **self.__field_constraints__, - } - - attrs = ((s, getattr(self, s)) for s in self.__slots__) - return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)] - - def get_constraints(self) -> Set[str]: - """ - Gets the constraints set on the field by comparing the constraint value with its default value - - :return: the constraints set on field_info - """ - return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default} - - def update_from_config(self, from_config: Dict[str, Any]) -> None: - """ - Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated. - """ - for attr_name, value in from_config.items(): - try: - current_value = getattr(self, attr_name) - except AttributeError: - # attr_name is not an attribute of FieldInfo, it should therefore be added to extra - # (except if extra already has this value!) - self.extra.setdefault(attr_name, value) - else: - if current_value is self.__field_constraints__.get(attr_name, None): - setattr(self, attr_name, value) - elif attr_name == 'exclude': - self.exclude = ValueItems.merge(value, current_value) - elif attr_name == 'include': - self.include = ValueItems.merge(value, current_value, intersect=True) - - def _validate(self) -> None: - if self.default is not Undefined and self.default_factory is not None: - raise ValueError('cannot specify both default and default_factory') - - -def Field( - default: Any = Undefined, - *, - default_factory: Optional[NoArgAnyCallable] = None, - alias: Optional[str] = None, - title: Optional[str] = None, - description: Optional[str] = None, - exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None, - include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None, - const: Optional[bool] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - multiple_of: Optional[float] = None, - allow_inf_nan: Optional[bool] = None, - max_digits: Optional[int] = None, - decimal_places: Optional[int] = None, - min_items: Optional[int] = None, - max_items: Optional[int] = None, - unique_items: Optional[bool] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - allow_mutation: bool = True, - regex: Optional[str] = None, - discriminator: Optional[str] = None, - repr: bool = True, - **extra: Any, -) -> Any: - """ - Used to provide extra information about a field, either for the model schema or complex validation. Some arguments - apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. - - :param default: since this is replacing the field’s default, its first argument is used - to set the default, use ellipsis (``...``) to indicate the field is required - :param default_factory: callable that will be called when a default value is needed for this field - If both `default` and `default_factory` are set, an error is raised. - :param alias: the public name of the field - :param title: can be any string, used in the schema - :param description: can be any string, used in the schema - :param exclude: exclude this field while dumping. - Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. - :param include: include this field while dumping. - Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. - :param const: this field is required and *must* take it's default value - :param gt: only applies to numbers, requires the field to be "greater than". The schema - will have an ``exclusiveMinimum`` validation keyword - :param ge: only applies to numbers, requires the field to be "greater than or equal to". The - schema will have a ``minimum`` validation keyword - :param lt: only applies to numbers, requires the field to be "less than". The schema - will have an ``exclusiveMaximum`` validation keyword - :param le: only applies to numbers, requires the field to be "less than or equal to". The - schema will have a ``maximum`` validation keyword - :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The - schema will have a ``multipleOf`` validation keyword - :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf), - which is a valid Python float. Default True, set to False for compatibility with JSON. - :param max_digits: only applies to Decimals, requires the field to have a maximum number - of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. - :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places - allowed. It does not include trailing decimal zeroes. - :param min_items: only applies to lists, requires the field to have a minimum number of - elements. The schema will have a ``minItems`` validation keyword - :param max_items: only applies to lists, requires the field to have a maximum number of - elements. The schema will have a ``maxItems`` validation keyword - :param unique_items: only applies to lists, requires the field not to have duplicated - elements. The schema will have a ``uniqueItems`` validation keyword - :param min_length: only applies to strings, requires the field to have a minimum length. The - schema will have a ``minLength`` validation keyword - :param max_length: only applies to strings, requires the field to have a maximum length. The - schema will have a ``maxLength`` validation keyword - :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is - assigned on an instance. The BaseModel Config must set validate_assignment to True - :param regex: only applies to strings, requires the field match against a regular expression - pattern string. The schema will have a ``pattern`` validation keyword - :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field. - The `discriminator` is the name of this common field to shorten validation and improve generated schema - :param repr: show this field in the representation - :param **extra: any additional keyword arguments will be added as is to the schema - """ - field_info = FieldInfo( - default, - default_factory=default_factory, - alias=alias, - title=title, - description=description, - exclude=exclude, - include=include, - const=const, - gt=gt, - ge=ge, - lt=lt, - le=le, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - min_items=min_items, - max_items=max_items, - unique_items=unique_items, - min_length=min_length, - max_length=max_length, - allow_mutation=allow_mutation, - regex=regex, - discriminator=discriminator, - repr=repr, - **extra, - ) - field_info._validate() - return field_info - - -# used to be an enum but changed to int's for small performance improvement as less access overhead -SHAPE_SINGLETON = 1 -SHAPE_LIST = 2 -SHAPE_SET = 3 -SHAPE_MAPPING = 4 -SHAPE_TUPLE = 5 -SHAPE_TUPLE_ELLIPSIS = 6 -SHAPE_SEQUENCE = 7 -SHAPE_FROZENSET = 8 -SHAPE_ITERABLE = 9 -SHAPE_GENERIC = 10 -SHAPE_DEQUE = 11 -SHAPE_DICT = 12 -SHAPE_DEFAULTDICT = 13 -SHAPE_COUNTER = 14 -SHAPE_NAME_LOOKUP = { - SHAPE_LIST: 'List[{}]', - SHAPE_SET: 'Set[{}]', - SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]', - SHAPE_SEQUENCE: 'Sequence[{}]', - SHAPE_FROZENSET: 'FrozenSet[{}]', - SHAPE_ITERABLE: 'Iterable[{}]', - SHAPE_DEQUE: 'Deque[{}]', - SHAPE_DICT: 'Dict[{}]', - SHAPE_DEFAULTDICT: 'DefaultDict[{}]', - SHAPE_COUNTER: 'Counter[{}]', -} - -MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER} - - -class ModelField(Representation): - __slots__ = ( - 'type_', - 'outer_type_', - 'annotation', - 'sub_fields', - 'sub_fields_mapping', - 'key_field', - 'validators', - 'pre_validators', - 'post_validators', - 'default', - 'default_factory', - 'required', - 'final', - 'model_config', - 'name', - 'alias', - 'has_alias', - 'field_info', - 'discriminator_key', - 'discriminator_alias', - 'validate_always', - 'allow_none', - 'shape', - 'class_validators', - 'parse_json', - ) - - def __init__( - self, - *, - name: str, - type_: Type[Any], - class_validators: Optional[Dict[str, Validator]], - model_config: Type['BaseConfig'], - default: Any = None, - default_factory: Optional[NoArgAnyCallable] = None, - required: 'BoolUndefined' = Undefined, - final: bool = False, - alias: Optional[str] = None, - field_info: Optional[FieldInfo] = None, - ) -> None: - self.name: str = name - self.has_alias: bool = alias is not None - self.alias: str = alias if alias is not None else name - self.annotation = type_ - self.type_: Any = convert_generics(type_) - self.outer_type_: Any = type_ - self.class_validators = class_validators or {} - self.default: Any = default - self.default_factory: Optional[NoArgAnyCallable] = default_factory - self.required: 'BoolUndefined' = required - self.final: bool = final - self.model_config = model_config - self.field_info: FieldInfo = field_info or FieldInfo(default) - self.discriminator_key: Optional[str] = self.field_info.discriminator - self.discriminator_alias: Optional[str] = self.discriminator_key - - self.allow_none: bool = False - self.validate_always: bool = False - self.sub_fields: Optional[List[ModelField]] = None - self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None # used for discriminated union - self.key_field: Optional[ModelField] = None - self.validators: 'ValidatorsList' = [] - self.pre_validators: Optional['ValidatorsList'] = None - self.post_validators: Optional['ValidatorsList'] = None - self.parse_json: bool = False - self.shape: int = SHAPE_SINGLETON - self.model_config.prepare_field(self) - self.prepare() - - def get_default(self) -> Any: - return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() - - @staticmethod - def _get_field_info( - field_name: str, annotation: Any, value: Any, config: Type['BaseConfig'] - ) -> Tuple[FieldInfo, Any]: - """ - Get a FieldInfo from a root typing.Annotated annotation, value, or config default. - - The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain - a FieldInfo, a new one will be created using the config. - - :param field_name: name of the field for use in error messages - :param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]` - :param value: the field's assigned value - :param config: the model's config object - :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config. - """ - field_info_from_config = config.get_field_info(field_name) - - field_info = None - if get_origin(annotation) is Annotated: - field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)] - if len(field_infos) > 1: - raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}') - field_info = next(iter(field_infos), None) - if field_info is not None: - field_info = copy.copy(field_info) - field_info.update_from_config(field_info_from_config) - if field_info.default not in (Undefined, Required): - raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}') - if value is not Undefined and value is not Required: - # check also `Required` because of `validate_arguments` that sets `...` as default value - field_info.default = value - - if isinstance(value, FieldInfo): - if field_info is not None: - raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}') - field_info = value - field_info.update_from_config(field_info_from_config) - elif field_info is None: - field_info = FieldInfo(value, **field_info_from_config) - value = None if field_info.default_factory is not None else field_info.default - field_info._validate() - return field_info, value - - @classmethod - def infer( - cls, - *, - name: str, - value: Any, - annotation: Any, - class_validators: Optional[Dict[str, Validator]], - config: Type['BaseConfig'], - ) -> 'ModelField': - from .schema import get_annotation_from_field_info - - field_info, value = cls._get_field_info(name, annotation, value, config) - required: 'BoolUndefined' = Undefined - if value is Required: - required = True - value = None - elif value is not Undefined: - required = False - annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment) - - return cls( - name=name, - type_=annotation, - alias=field_info.alias, - class_validators=class_validators, - default=value, - default_factory=field_info.default_factory, - required=required, - model_config=config, - field_info=field_info, - ) - - def set_config(self, config: Type['BaseConfig']) -> None: - self.model_config = config - info_from_config = config.get_field_info(self.name) - config.prepare_field(self) - new_alias = info_from_config.get('alias') - new_alias_priority = info_from_config.get('alias_priority') or 0 - if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0): - self.field_info.alias = new_alias - self.field_info.alias_priority = new_alias_priority - self.alias = new_alias - new_exclude = info_from_config.get('exclude') - if new_exclude is not None: - self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude) - new_include = info_from_config.get('include') - if new_include is not None: - self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True) - - @property - def alt_alias(self) -> bool: - return self.name != self.alias - - def prepare(self) -> None: - """ - Prepare the field but inspecting self.default, self.type_ etc. - - Note: this method is **not** idempotent (because _type_analysis is not idempotent), - e.g. calling it it multiple times may modify the field and configure it incorrectly. - """ - self._set_default_and_type() - if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType: - # self.type_ is currently a ForwardRef and there's nothing we can do now, - # user will need to call model.update_forward_refs() - return - - self._type_analysis() - if self.required is Undefined: - self.required = True - if self.default is Undefined and self.default_factory is None: - self.default = None - self.populate_validators() - - def _set_default_and_type(self) -> None: - """ - Set the default value, infer the type if needed and check if `None` value is valid. - """ - if self.default_factory is not None: - if self.type_ is Undefined: - raise errors_.ConfigError( - f'you need to set the type of field {self.name!r} when using `default_factory`' - ) - return - - default_value = self.get_default() - - if default_value is not None and self.type_ is Undefined: - self.type_ = default_value.__class__ - self.outer_type_ = self.type_ - self.annotation = self.type_ - - if self.type_ is Undefined: - raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') - - if self.required is False and default_value is None: - self.allow_none = True - - def _type_analysis(self) -> None: # noqa: C901 (ignore complexity) - # typing interface is horrible, we have to do some ugly checks - if lenient_issubclass(self.type_, JsonWrapper): - self.type_ = self.type_.inner_type - self.parse_json = True - elif lenient_issubclass(self.type_, Json): - self.type_ = Any - self.parse_json = True - elif isinstance(self.type_, TypeVar): - if self.type_.__bound__: - self.type_ = self.type_.__bound__ - elif self.type_.__constraints__: - self.type_ = Union[self.type_.__constraints__] - else: - self.type_ = Any - elif is_new_type(self.type_): - self.type_ = new_type_supertype(self.type_) - - if self.type_ is Any or self.type_ is object: - if self.required is Undefined: - self.required = False - self.allow_none = True - return - elif self.type_ is Pattern or self.type_ is re.Pattern: - # python 3.7 only, Pattern is a typing object but without sub fields - return - elif is_literal_type(self.type_): - return - elif is_typeddict(self.type_): - return - - if is_finalvar(self.type_): - self.final = True - - if self.type_ is Final: - self.type_ = Any - else: - self.type_ = get_args(self.type_)[0] - - self._type_analysis() - return - - origin = get_origin(self.type_) - - if origin is Annotated or is_typeddict_special(origin): - self.type_ = get_args(self.type_)[0] - self._type_analysis() - return - - if self.discriminator_key is not None and not is_union(origin): - raise TypeError('`discriminator` can only be used with `Union` type with more than one variant') - - # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None` - if origin is None or origin is CollectionsHashable: - # field is not "typing" object eg. Union, Dict, List etc. - # allow None for virtual superclasses of NoneType, e.g. Hashable - if isinstance(self.type_, type) and isinstance(None, self.type_): - self.allow_none = True - return - elif origin is Callable: - return - elif is_union(origin): - types_ = [] - for type_ in get_args(self.type_): - if is_none_type(type_) or type_ is Any or type_ is object: - if self.required is Undefined: - self.required = False - self.allow_none = True - if is_none_type(type_): - continue - types_.append(type_) - - if len(types_) == 1: - # Optional[] - self.type_ = types_[0] - # this is the one case where the "outer type" isn't just the original type - self.outer_type_ = self.type_ - # re-run to correctly interpret the new self.type_ - self._type_analysis() - else: - self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_] - - if self.discriminator_key is not None: - self.prepare_discriminated_union_sub_fields() - return - elif issubclass(origin, Tuple): # type: ignore - # origin == Tuple without item type - args = get_args(self.type_) - if not args: # plain tuple - self.type_ = Any - self.shape = SHAPE_TUPLE_ELLIPSIS - elif len(args) == 2 and args[1] is Ellipsis: # e.g. Tuple[int, ...] - self.type_ = args[0] - self.shape = SHAPE_TUPLE_ELLIPSIS - self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')] - elif args == ((),): # Tuple[()] means empty tuple - self.shape = SHAPE_TUPLE - self.type_ = Any - self.sub_fields = [] - else: - self.shape = SHAPE_TUPLE - self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)] - return - elif issubclass(origin, List): - # Create self validators - get_validators = getattr(self.type_, '__get_validators__', None) - if get_validators: - self.class_validators.update( - {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} - ) - - self.type_ = get_args(self.type_)[0] - self.shape = SHAPE_LIST - elif issubclass(origin, Set): - # Create self validators - get_validators = getattr(self.type_, '__get_validators__', None) - if get_validators: - self.class_validators.update( - {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} - ) - - self.type_ = get_args(self.type_)[0] - self.shape = SHAPE_SET - elif issubclass(origin, FrozenSet): - # Create self validators - get_validators = getattr(self.type_, '__get_validators__', None) - if get_validators: - self.class_validators.update( - {f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} - ) - - self.type_ = get_args(self.type_)[0] - self.shape = SHAPE_FROZENSET - elif issubclass(origin, Deque): - self.type_ = get_args(self.type_)[0] - self.shape = SHAPE_DEQUE - elif issubclass(origin, Sequence): - self.type_ = get_args(self.type_)[0] - self.shape = SHAPE_SEQUENCE - # priority to most common mapping: dict - elif origin is dict or origin is Dict: - self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) - self.type_ = get_args(self.type_)[1] - self.shape = SHAPE_DICT - elif issubclass(origin, DefaultDict): - self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) - self.type_ = get_args(self.type_)[1] - self.shape = SHAPE_DEFAULTDICT - elif issubclass(origin, Counter): - self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) - self.type_ = int - self.shape = SHAPE_COUNTER - elif issubclass(origin, Mapping): - self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) - self.type_ = get_args(self.type_)[1] - self.shape = SHAPE_MAPPING - # Equality check as almost everything inherits form Iterable, including str - # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other - elif origin in {Iterable, CollectionsIterable}: - self.type_ = get_args(self.type_)[0] - self.shape = SHAPE_ITERABLE - self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')] - elif issubclass(origin, Type): # type: ignore - return - elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed: - # Is a Pydantic-compatible generic that handles itself - # or we have arbitrary_types_allowed = True - self.shape = SHAPE_GENERIC - self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))] - self.type_ = origin - return - else: - raise TypeError(f'Fields of type "{origin}" are not supported.') - - # type_ has been refined eg. as the type of a List and sub_fields needs to be populated - self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)] - - def prepare_discriminated_union_sub_fields(self) -> None: - """ - Prepare the mapping -> and update `sub_fields` - Note that this process can be aborted if a `ForwardRef` is encountered - """ - assert self.discriminator_key is not None - - if self.type_.__class__ is DeferredType: - return - - assert self.sub_fields is not None - sub_fields_mapping: Dict[str, 'ModelField'] = {} - all_aliases: Set[str] = set() - - for sub_field in self.sub_fields: - t = sub_field.type_ - if t.__class__ is ForwardRef: - # Stopping everything...will need to call `update_forward_refs` - return - - alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key) - all_aliases.add(alias) - for discriminator_value in discriminator_values: - sub_fields_mapping[discriminator_value] = sub_field - - self.sub_fields_mapping = sub_fields_mapping - self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key) - - def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField': - if for_keys: - class_validators = None - else: - # validators for sub items should not have `each_item` as we want to check only the first sublevel - class_validators = { - k: Validator( - func=v.func, - pre=v.pre, - each_item=False, - always=v.always, - check_fields=v.check_fields, - skip_on_failure=v.skip_on_failure, - ) - for k, v in self.class_validators.items() - if v.each_item - } - - field_info, _ = self._get_field_info(name, type_, None, self.model_config) - - return self.__class__( - type_=type_, - name=name, - class_validators=class_validators, - model_config=self.model_config, - field_info=field_info, - ) - - def populate_validators(self) -> None: - """ - Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__ - and class validators. This method should be idempotent, e.g. it should be safe to call multiple times - without mis-configuring the field. - """ - self.validate_always = getattr(self.type_, 'validate_always', False) or any( - v.always for v in self.class_validators.values() - ) - - class_validators_ = self.class_validators.values() - if not self.sub_fields or self.shape == SHAPE_GENERIC: - get_validators = getattr(self.type_, '__get_validators__', None) - v_funcs = ( - *[v.func for v in class_validators_ if v.each_item and v.pre], - *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))), - *[v.func for v in class_validators_ if v.each_item and not v.pre], - ) - self.validators = prep_validators(v_funcs) - - self.pre_validators = [] - self.post_validators = [] - - if self.field_info and self.field_info.const: - self.post_validators.append(make_generic_validator(constant_validator)) - - if class_validators_: - self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) - self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) - - if self.parse_json: - self.pre_validators.append(make_generic_validator(validate_json)) - - self.pre_validators = self.pre_validators or None - self.post_validators = self.post_validators or None - - def validate( - self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None - ) -> 'ValidateReturn': - assert self.type_.__class__ is not DeferredType - - if self.type_.__class__ is ForwardRef: - assert cls is not None - raise ConfigError( - f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' - f'you might need to call {cls.__name__}.update_forward_refs().' - ) - - errors: Optional['ErrorList'] - if self.pre_validators: - v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators) - if errors: - return v, errors - - if v is None: - if is_none_type(self.type_): - # keep validating - pass - elif self.allow_none: - if self.post_validators: - return self._apply_validators(v, values, loc, cls, self.post_validators) - else: - return None, None - else: - return v, ErrorWrapper(NoneIsNotAllowedError(), loc) - - if self.shape == SHAPE_SINGLETON: - v, errors = self._validate_singleton(v, values, loc, cls) - elif self.shape in MAPPING_LIKE_SHAPES: - v, errors = self._validate_mapping_like(v, values, loc, cls) - elif self.shape == SHAPE_TUPLE: - v, errors = self._validate_tuple(v, values, loc, cls) - elif self.shape == SHAPE_ITERABLE: - v, errors = self._validate_iterable(v, values, loc, cls) - elif self.shape == SHAPE_GENERIC: - v, errors = self._apply_validators(v, values, loc, cls, self.validators) - else: - # sequence, list, set, generator, tuple with ellipsis, frozen set - v, errors = self._validate_sequence_like(v, values, loc, cls) - - if not errors and self.post_validators: - v, errors = self._apply_validators(v, values, loc, cls, self.post_validators) - return v, errors - - def _validate_sequence_like( # noqa: C901 (ignore complexity) - self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] - ) -> 'ValidateReturn': - """ - Validate sequence-like containers: lists, tuples, sets and generators - Note that large if-else blocks are necessary to enable Cython - optimization, which is why we disable the complexity check above. - """ - if not sequence_like(v): - e: errors_.PydanticTypeError - if self.shape == SHAPE_LIST: - e = errors_.ListError() - elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS): - e = errors_.TupleError() - elif self.shape == SHAPE_SET: - e = errors_.SetError() - elif self.shape == SHAPE_FROZENSET: - e = errors_.FrozenSetError() - else: - e = errors_.SequenceError() - return v, ErrorWrapper(e, loc) - - loc = loc if isinstance(loc, tuple) else (loc,) - result = [] - errors: List[ErrorList] = [] - for i, v_ in enumerate(v): - v_loc = *loc, i - r, ee = self._validate_singleton(v_, values, v_loc, cls) - if ee: - errors.append(ee) - else: - result.append(r) - - if errors: - return v, errors - - converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result - - if self.shape == SHAPE_SET: - converted = set(result) - elif self.shape == SHAPE_FROZENSET: - converted = frozenset(result) - elif self.shape == SHAPE_TUPLE_ELLIPSIS: - converted = tuple(result) - elif self.shape == SHAPE_DEQUE: - converted = deque(result, maxlen=getattr(v, 'maxlen', None)) - elif self.shape == SHAPE_SEQUENCE: - if isinstance(v, tuple): - converted = tuple(result) - elif isinstance(v, set): - converted = set(result) - elif isinstance(v, Generator): - converted = iter(result) - elif isinstance(v, deque): - converted = deque(result, maxlen=getattr(v, 'maxlen', None)) - return converted, None - - def _validate_iterable( - self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] - ) -> 'ValidateReturn': - """ - Validate Iterables. - - This intentionally doesn't validate values to allow infinite generators. - """ - - try: - iterable = iter(v) - except TypeError: - return v, ErrorWrapper(errors_.IterableError(), loc) - return iterable, None - - def _validate_tuple( - self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] - ) -> 'ValidateReturn': - e: Optional[Exception] = None - if not sequence_like(v): - e = errors_.TupleError() - else: - actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore - if actual_length != expected_length: - e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length) - - if e: - return v, ErrorWrapper(e, loc) - - loc = loc if isinstance(loc, tuple) else (loc,) - result = [] - errors: List[ErrorList] = [] - for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore - v_loc = *loc, i - r, ee = field.validate(v_, values, loc=v_loc, cls=cls) - if ee: - errors.append(ee) - else: - result.append(r) - - if errors: - return v, errors - else: - return tuple(result), None - - def _validate_mapping_like( - self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] - ) -> 'ValidateReturn': - try: - v_iter = dict_validator(v) - except TypeError as exc: - return v, ErrorWrapper(exc, loc) - - loc = loc if isinstance(loc, tuple) else (loc,) - result, errors = {}, [] - for k, v_ in v_iter.items(): - v_loc = *loc, '__key__' - key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore - if key_errors: - errors.append(key_errors) - continue - - v_loc = *loc, k - value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls) - if value_errors: - errors.append(value_errors) - continue - - result[key_result] = value_result - if errors: - return v, errors - elif self.shape == SHAPE_DICT: - return result, None - elif self.shape == SHAPE_DEFAULTDICT: - return defaultdict(self.type_, result), None - elif self.shape == SHAPE_COUNTER: - return CollectionCounter(result), None - else: - return self._get_mapping_value(v, result), None - - def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]: - """ - When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid - coercing to `dict` unwillingly. - """ - original_cls = original.__class__ - - if original_cls == dict or original_cls == Dict: - return converted - elif original_cls in {defaultdict, DefaultDict}: - return defaultdict(self.type_, converted) - else: - try: - # Counter, OrderedDict, UserDict, ... - return original_cls(converted) # type: ignore - except TypeError: - raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None - - def _validate_singleton( - self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] - ) -> 'ValidateReturn': - if self.sub_fields: - if self.discriminator_key is not None: - return self._validate_discriminated_union(v, values, loc, cls) - - errors = [] - - if self.model_config.smart_union and is_union(get_origin(self.type_)): - # 1st pass: check if the value is an exact instance of one of the Union types - # (e.g. to avoid coercing a bool into an int) - for field in self.sub_fields: - if v.__class__ is field.outer_type_: - return v, None - - # 2nd pass: check if the value is an instance of any subclass of the Union types - for field in self.sub_fields: - # This whole logic will be improved later on to support more complex `isinstance` checks - # It will probably be done once a strict mode is added and be something like: - # ``` - # value, error = field.validate(v, values, strict=True) - # if error is None: - # return value, None - # ``` - try: - if isinstance(v, field.outer_type_): - return v, None - except TypeError: - # compound type - if lenient_isinstance(v, get_origin(field.outer_type_)): - value, error = field.validate(v, values, loc=loc, cls=cls) - if not error: - return value, None - - # 1st pass by default or 3rd pass with `smart_union` enabled: - # check if the value can be coerced into one of the Union types - for field in self.sub_fields: - value, error = field.validate(v, values, loc=loc, cls=cls) - if error: - errors.append(error) - else: - return value, None - return v, errors - else: - return self._apply_validators(v, values, loc, cls, self.validators) - - def _validate_discriminated_union( - self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] - ) -> 'ValidateReturn': - assert self.discriminator_key is not None - assert self.discriminator_alias is not None - - try: - try: - discriminator_value = v[self.discriminator_alias] - except KeyError: - if self.model_config.allow_population_by_field_name: - discriminator_value = v[self.discriminator_key] - else: - raise - except KeyError: - return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) - except TypeError: - try: - # BaseModel or dataclass - discriminator_value = getattr(v, self.discriminator_key) - except (AttributeError, TypeError): - return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) - - if self.sub_fields_mapping is None: - assert cls is not None - raise ConfigError( - f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' - f'you might need to call {cls.__name__}.update_forward_refs().' - ) - - try: - sub_field = self.sub_fields_mapping[discriminator_value] - except (KeyError, TypeError): - # KeyError: `discriminator_value` is not in the dictionary. - # TypeError: `discriminator_value` is unhashable. - assert self.sub_fields_mapping is not None - return v, ErrorWrapper( - InvalidDiscriminator( - discriminator_key=self.discriminator_key, - discriminator_value=discriminator_value, - allowed_values=list(self.sub_fields_mapping), - ), - loc, - ) - else: - if not isinstance(loc, tuple): - loc = (loc,) - return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls) - - def _apply_validators( - self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList' - ) -> 'ValidateReturn': - for validator in validators: - try: - v = validator(cls, v, values, self, self.model_config) - except (ValueError, TypeError, AssertionError) as exc: - return v, ErrorWrapper(exc, loc) - return v, None - - def is_complex(self) -> bool: - """ - Whether the field is "complex" eg. env variables should be parsed as JSON. - """ - from .main import BaseModel - - return ( - self.shape != SHAPE_SINGLETON - or hasattr(self.type_, '__pydantic_model__') - or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict)) - ) - - def _type_display(self) -> PyObjectStr: - t = display_as_type(self.type_) - - if self.shape in MAPPING_LIKE_SHAPES: - t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]' # type: ignore - elif self.shape == SHAPE_TUPLE: - t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore - elif self.shape == SHAPE_GENERIC: - assert self.sub_fields - t = '{}[{}]'.format( - display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields) - ) - elif self.shape != SHAPE_SINGLETON: - t = SHAPE_NAME_LOOKUP[self.shape].format(t) - - if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields): - t = f'Optional[{t}]' - return PyObjectStr(t) - - def __repr_args__(self) -> 'ReprArgs': - args = [('name', self.name), ('type', self._type_display()), ('required', self.required)] - - if not self.required: - if self.default_factory is not None: - args.append(('default_factory', f'')) - else: - args.append(('default', self.default)) - - if self.alt_alias: - args.append(('alias', self.alias)) - return args - - -class ModelPrivateAttr(Representation): - __slots__ = ('default', 'default_factory') - - def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None: - self.default = default - self.default_factory = default_factory - - def get_default(self) -> Any: - return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() - - def __eq__(self, other: Any) -> bool: - return isinstance(other, self.__class__) and (self.default, self.default_factory) == ( - other.default, - other.default_factory, - ) - - -def PrivateAttr( - default: Any = Undefined, - *, - default_factory: Optional[NoArgAnyCallable] = None, -) -> Any: - """ - Indicates that attribute is only used internally and never mixed with regular fields. - - Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant. - - Private attrs are stored in model __slots__. - - :param default: the attribute’s default value - :param default_factory: callable that will be called when a default value is needed for this attribute - If both `default` and `default_factory` are set, an error is raised. - """ - if default is not Undefined and default_factory is not None: - raise ValueError('cannot specify both default and default_factory') - - return ModelPrivateAttr( - default, - default_factory=default_factory, - ) - - -class DeferredType: - """ - Used to postpone field preparation, while creating recursive generic models. - """ - - -def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool: - return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo) diff --git a/lib/pydantic/v1/generics.py b/lib/pydantic/v1/generics.py deleted file mode 100644 index a75b6b98..00000000 --- a/lib/pydantic/v1/generics.py +++ /dev/null @@ -1,400 +0,0 @@ -import sys -import types -import typing -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - ForwardRef, - Generic, - Iterator, - List, - Mapping, - Optional, - Tuple, - Type, - TypeVar, - Union, - cast, -) -from weakref import WeakKeyDictionary, WeakValueDictionary - -from typing_extensions import Annotated, Literal as ExtLiteral - -from .class_validators import gather_all_validators -from .fields import DeferredType -from .main import BaseModel, create_model -from .types import JsonWrapper -from .typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base -from .utils import all_identical, lenient_issubclass - -if sys.version_info >= (3, 10): - from typing import _UnionGenericAlias -if sys.version_info >= (3, 8): - from typing import Literal - -GenericModelT = TypeVar('GenericModelT', bound='GenericModel') -TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type - -CacheKey = Tuple[Type[Any], Any, Tuple[Any, ...]] -Parametrization = Mapping[TypeVarType, Type[Any]] - -# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected -# once they are no longer referenced by the caller. -if sys.version_info >= (3, 9): # Typing for weak dictionaries available at 3.9 - GenericTypesCache = WeakValueDictionary[CacheKey, Type[BaseModel]] - AssignedParameters = WeakKeyDictionary[Type[BaseModel], Parametrization] -else: - GenericTypesCache = WeakValueDictionary - AssignedParameters = WeakKeyDictionary - -# _generic_types_cache is a Mapping from __class_getitem__ arguments to the parametrized version of generic models. -# This ensures multiple calls of e.g. A[B] return always the same class. -_generic_types_cache = GenericTypesCache() - -# _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations -# as captured during construction of the class (not instances). -# E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created, -# `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`. -# (This information is only otherwise available after creation from the class name string). -_assigned_parameters = AssignedParameters() - - -class GenericModel(BaseModel): - __slots__ = () - __concrete__: ClassVar[bool] = False - - if TYPE_CHECKING: - # Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with - # `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of - # `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below. - __parameters__: ClassVar[Tuple[TypeVarType, ...]] - - # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings - def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]: - """Instantiates a new class from a generic class `cls` and type variables `params`. - - :param params: Tuple of types the class . Given a generic class - `Model` with 2 type variables and a concrete model `Model[str, int]`, - the value `(str, int)` would be passed to `params`. - :return: New model class inheriting from `cls` with instantiated - types described by `params`. If no parameters are given, `cls` is - returned as is. - - """ - - def _cache_key(_params: Any) -> CacheKey: - args = get_args(_params) - # python returns a list for Callables, which is not hashable - if len(args) == 2 and isinstance(args[0], list): - args = (tuple(args[0]), args[1]) - return cls, _params, args - - cached = _generic_types_cache.get(_cache_key(params)) - if cached is not None: - return cached - if cls.__concrete__ and Generic not in cls.__bases__: - raise TypeError('Cannot parameterize a concrete instantiation of a generic model') - if not isinstance(params, tuple): - params = (params,) - if cls is GenericModel and any(isinstance(param, TypeVar) for param in params): - raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel') - if not hasattr(cls, '__parameters__'): - raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized') - - check_parameters_count(cls, params) - # Build map from generic typevars to passed params - typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params)) - if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: - return cls # if arguments are equal to parameters it's the same object - - # Create new model with original model as parent inserting fields with DeferredType. - model_name = cls.__concrete_name__(params) - validators = gather_all_validators(cls) - - type_hints = get_all_type_hints(cls).items() - instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar} - - fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__} - - model_module, called_globally = get_caller_frame_info() - created_model = cast( - Type[GenericModel], # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes - create_model( - model_name, - __module__=model_module or cls.__module__, - __base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)), - __config__=None, - __validators__=validators, - __cls_kwargs__=None, - **fields, - ), - ) - - _assigned_parameters[created_model] = typevars_map - - if called_globally: # create global reference and therefore allow pickling - object_by_reference = None - reference_name = model_name - reference_module_globals = sys.modules[created_model.__module__].__dict__ - while object_by_reference is not created_model: - object_by_reference = reference_module_globals.setdefault(reference_name, created_model) - reference_name += '_' - - created_model.Config = cls.Config - - # Find any typevars that are still present in the model. - # If none are left, the model is fully "concrete", otherwise the new - # class is a generic class as well taking the found typevars as - # parameters. - new_params = tuple( - {param: None for param in iter_contained_typevars(typevars_map.values())} - ) # use dict as ordered set - created_model.__concrete__ = not new_params - if new_params: - created_model.__parameters__ = new_params - - # Save created model in cache so we don't end up creating duplicate - # models that should be identical. - _generic_types_cache[_cache_key(params)] = created_model - if len(params) == 1: - _generic_types_cache[_cache_key(params[0])] = created_model - - # Recursively walk class type hints and replace generic typevars - # with concrete types that were passed. - _prepare_model_fields(created_model, fields, instance_type_hints, typevars_map) - - return created_model - - @classmethod - def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: - """Compute class name for child classes. - - :param params: Tuple of types the class . Given a generic class - `Model` with 2 type variables and a concrete model `Model[str, int]`, - the value `(str, int)` would be passed to `params`. - :return: String representing a the new class where `params` are - passed to `cls` as type variables. - - This method can be overridden to achieve a custom naming scheme for GenericModels. - """ - param_names = [display_as_type(param) for param in params] - params_component = ', '.join(param_names) - return f'{cls.__name__}[{params_component}]' - - @classmethod - def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]: - """ - Returns unbound bases of cls parameterised to given type variables - - :param typevars_map: Dictionary of type applications for binding subclasses. - Given a generic class `Model` with 2 type variables [S, T] - and a concrete model `Model[str, int]`, - the value `{S: str, T: int}` would be passed to `typevars_map`. - :return: an iterator of generic sub classes, parameterised by `typevars_map` - and other assigned parameters of `cls` - - e.g.: - ``` - class A(GenericModel, Generic[T]): - ... - - class B(A[V], Generic[V]): - ... - - assert A[int] in B.__parameterized_bases__({V: int}) - ``` - """ - - def build_base_model( - base_model: Type[GenericModel], mapped_types: Parametrization - ) -> Iterator[Type[GenericModel]]: - base_parameters = tuple(mapped_types[param] for param in base_model.__parameters__) - parameterized_base = base_model.__class_getitem__(base_parameters) - if parameterized_base is base_model or parameterized_base is cls: - # Avoid duplication in MRO - return - yield parameterized_base - - for base_model in cls.__bases__: - if not issubclass(base_model, GenericModel): - # not a class that can be meaningfully parameterized - continue - elif not getattr(base_model, '__parameters__', None): - # base_model is "GenericModel" (and has no __parameters__) - # or - # base_model is already concrete, and will be included transitively via cls. - continue - elif cls in _assigned_parameters: - if base_model in _assigned_parameters: - # cls is partially parameterised but not from base_model - # e.g. cls = B[S], base_model = A[S] - # B[S][int] should subclass A[int], (and will be transitively via B[int]) - # but it's not viable to consistently subclass types with arbitrary construction - # So don't attempt to include A[S][int] - continue - else: # base_model not in _assigned_parameters: - # cls is partially parameterized, base_model is original generic - # e.g. cls = B[str, T], base_model = B[S, T] - # Need to determine the mapping for the base_model parameters - mapped_types: Parametrization = { - key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items() - } - yield from build_base_model(base_model, mapped_types) - else: - # cls is base generic, so base_class has a distinct base - # can construct the Parameterised base model using typevars_map directly - yield from build_base_model(base_model, typevars_map) - - -def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any: - """Return type with all occurrences of `type_map` keys recursively replaced with their values. - - :param type_: Any type, class or generic alias - :param type_map: Mapping from `TypeVar` instance to concrete types. - :return: New type representing the basic structure of `type_` with all - `typevar_map` keys recursively replaced. - - >>> replace_types(Tuple[str, Union[List[str], float]], {str: int}) - Tuple[int, Union[List[int], float]] - - """ - if not type_map: - return type_ - - type_args = get_args(type_) - origin_type = get_origin(type_) - - if origin_type is Annotated: - annotated_type, *annotations = type_args - return Annotated[replace_types(annotated_type, type_map), tuple(annotations)] - - if (origin_type is ExtLiteral) or (sys.version_info >= (3, 8) and origin_type is Literal): - return type_map.get(type_, type_) - # Having type args is a good indicator that this is a typing module - # class instantiation or a generic alias of some sort. - if type_args: - resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args) - if all_identical(type_args, resolved_type_args): - # If all arguments are the same, there is no need to modify the - # type or create a new object at all - return type_ - if ( - origin_type is not None - and isinstance(type_, typing_base) - and not isinstance(origin_type, typing_base) - and getattr(type_, '_name', None) is not None - ): - # In python < 3.9 generic aliases don't exist so any of these like `list`, - # `type` or `collections.abc.Callable` need to be translated. - # See: https://www.python.org/dev/peps/pep-0585 - origin_type = getattr(typing, type_._name) - assert origin_type is not None - # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__. - # We also cannot use isinstance() since we have to compare types. - if sys.version_info >= (3, 10) and origin_type is types.UnionType: # noqa: E721 - return _UnionGenericAlias(origin_type, resolved_type_args) - return origin_type[resolved_type_args] - - # We handle pydantic generic models separately as they don't have the same - # semantics as "typing" classes or generic aliases - if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__: - type_args = type_.__parameters__ - resolved_type_args = tuple(replace_types(t, type_map) for t in type_args) - if all_identical(type_args, resolved_type_args): - return type_ - return type_[resolved_type_args] - - # Handle special case for typehints that can have lists as arguments. - # `typing.Callable[[int, str], int]` is an example for this. - if isinstance(type_, (List, list)): - resolved_list = list(replace_types(element, type_map) for element in type_) - if all_identical(type_, resolved_list): - return type_ - return resolved_list - - # For JsonWrapperValue, need to handle its inner type to allow correct parsing - # of generic Json arguments like Json[T] - if not origin_type and lenient_issubclass(type_, JsonWrapper): - type_.inner_type = replace_types(type_.inner_type, type_map) - return type_ - - # If all else fails, we try to resolve the type directly and otherwise just - # return the input with no modifications. - new_type = type_map.get(type_, type_) - # Convert string to ForwardRef - if isinstance(new_type, str): - return ForwardRef(new_type) - else: - return new_type - - -def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None: - actual = len(parameters) - expected = len(cls.__parameters__) - if actual != expected: - description = 'many' if actual > expected else 'few' - raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}') - - -DictValues: Type[Any] = {}.values().__class__ - - -def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]: - """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.""" - if isinstance(v, TypeVar): - yield v - elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel): - yield from v.__parameters__ - elif isinstance(v, (DictValues, list)): - for var in v: - yield from iter_contained_typevars(var) - else: - args = get_args(v) - for arg in args: - yield from iter_contained_typevars(arg) - - -def get_caller_frame_info() -> Tuple[Optional[str], bool]: - """ - Used inside a function to check whether it was called globally - - Will only work against non-compiled code, therefore used only in pydantic.generics - - :returns Tuple[module_name, called_globally] - """ - try: - previous_caller_frame = sys._getframe(2) - except ValueError as e: - raise RuntimeError('This function must be used inside another function') from e - except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it - return None, False - frame_globals = previous_caller_frame.f_globals - return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals - - -def _prepare_model_fields( - created_model: Type[GenericModel], - fields: Mapping[str, Any], - instance_type_hints: Mapping[str, type], - typevars_map: Mapping[Any, type], -) -> None: - """ - Replace DeferredType fields with concrete type hints and prepare them. - """ - - for key, field in created_model.__fields__.items(): - if key not in fields: - assert field.type_.__class__ is not DeferredType - # https://github.com/nedbat/coveragepy/issues/198 - continue # pragma: no cover - - assert field.type_.__class__ is DeferredType, field.type_.__class__ - - field_type_hint = instance_type_hints[key] - concrete_type = replace_types(field_type_hint, typevars_map) - field.type_ = concrete_type - field.outer_type_ = concrete_type - field.prepare() - created_model.__annotations__[key] = concrete_type diff --git a/lib/pydantic/v1/json.py b/lib/pydantic/v1/json.py deleted file mode 100644 index b358b850..00000000 --- a/lib/pydantic/v1/json.py +++ /dev/null @@ -1,112 +0,0 @@ -import datetime -from collections import deque -from decimal import Decimal -from enum import Enum -from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network -from pathlib import Path -from re import Pattern -from types import GeneratorType -from typing import Any, Callable, Dict, Type, Union -from uuid import UUID - -from .color import Color -from .networks import NameEmail -from .types import SecretBytes, SecretStr - -__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' - - -def isoformat(o: Union[datetime.date, datetime.time]) -> str: - return o.isoformat() - - -def decimal_encoder(dec_value: Decimal) -> Union[int, float]: - """ - Encodes a Decimal as int of there's no exponent, otherwise float - - This is useful when we use ConstrainedDecimal to represent Numeric(x,0) - where a integer (but not int typed) is used. Encoding this as a float - results in failed round-tripping between encode and parse. - Our Id type is a prime example of this. - - >>> decimal_encoder(Decimal("1.0")) - 1.0 - - >>> decimal_encoder(Decimal("1")) - 1 - """ - if dec_value.as_tuple().exponent >= 0: - return int(dec_value) - else: - return float(dec_value) - - -ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { - bytes: lambda o: o.decode(), - Color: str, - datetime.date: isoformat, - datetime.datetime: isoformat, - datetime.time: isoformat, - datetime.timedelta: lambda td: td.total_seconds(), - Decimal: decimal_encoder, - Enum: lambda o: o.value, - frozenset: list, - deque: list, - GeneratorType: list, - IPv4Address: str, - IPv4Interface: str, - IPv4Network: str, - IPv6Address: str, - IPv6Interface: str, - IPv6Network: str, - NameEmail: str, - Path: str, - Pattern: lambda o: o.pattern, - SecretBytes: str, - SecretStr: str, - set: list, - UUID: str, -} - - -def pydantic_encoder(obj: Any) -> Any: - from dataclasses import asdict, is_dataclass - - from .main import BaseModel - - if isinstance(obj, BaseModel): - return obj.dict() - elif is_dataclass(obj): - return asdict(obj) - - # Check the class type and its superclasses for a matching encoder - for base in obj.__class__.__mro__[:-1]: - try: - encoder = ENCODERS_BY_TYPE[base] - except KeyError: - continue - return encoder(obj) - else: # We have exited the for loop without finding a suitable encoder - raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") - - -def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: - # Check the class type and its superclasses for a matching encoder - for base in obj.__class__.__mro__[:-1]: - try: - encoder = type_encoders[base] - except KeyError: - continue - - return encoder(obj) - else: # We have exited the for loop without finding a suitable encoder - return pydantic_encoder(obj) - - -def timedelta_isoformat(td: datetime.timedelta) -> str: - """ - ISO 8601 encoding for Python timedelta object. - """ - minutes, seconds = divmod(td.seconds, 60) - hours, minutes = divmod(minutes, 60) - return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' diff --git a/lib/pydantic/v1/main.py b/lib/pydantic/v1/main.py deleted file mode 100644 index 08b8af55..00000000 --- a/lib/pydantic/v1/main.py +++ /dev/null @@ -1,1107 +0,0 @@ -import warnings -from abc import ABCMeta -from copy import deepcopy -from enum import Enum -from functools import partial -from pathlib import Path -from types import FunctionType, prepare_class, resolve_bases -from typing import ( - TYPE_CHECKING, - AbstractSet, - Any, - Callable, - ClassVar, - Dict, - List, - Mapping, - Optional, - Tuple, - Type, - TypeVar, - Union, - cast, - no_type_check, - overload, -) - -from typing_extensions import dataclass_transform - -from .class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators -from .config import BaseConfig, Extra, inherit_config, prepare_config -from .error_wrappers import ErrorWrapper, ValidationError -from .errors import ConfigError, DictError, ExtraError, MissingError -from .fields import ( - MAPPING_LIKE_SHAPES, - Field, - ModelField, - ModelPrivateAttr, - PrivateAttr, - Undefined, - is_finalvar_with_default_val, -) -from .json import custom_pydantic_encoder, pydantic_encoder -from .parse import Protocol, load_file, load_str_bytes -from .schema import default_ref_template, model_schema -from .types import PyObject, StrBytes -from .typing import ( - AnyCallable, - get_args, - get_origin, - is_classvar, - is_namedtuple, - is_union, - resolve_annotations, - update_model_forward_refs, -) -from .utils import ( - DUNDER_ATTRIBUTES, - ROOT_KEY, - ClassAttribute, - GetterDict, - Representation, - ValueItems, - generate_model_signature, - is_valid_field, - is_valid_private_name, - lenient_issubclass, - sequence_like, - smart_deepcopy, - unique_list, - validate_field_name, -) - -if TYPE_CHECKING: - from inspect import Signature - - from .class_validators import ValidatorListDict - from .types import ModelOrDc - from .typing import ( - AbstractSetIntStr, - AnyClassMethod, - CallableGenerator, - DictAny, - DictStrAny, - MappingIntStrAny, - ReprArgs, - SetStr, - TupleGenerator, - ) - - Model = TypeVar('Model', bound='BaseModel') - -__all__ = 'BaseModel', 'create_model', 'validate_model' - -_T = TypeVar('_T') - - -def validate_custom_root_type(fields: Dict[str, ModelField]) -> None: - if len(fields) > 1: - raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields') - - -def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]: - def hash_function(self_: Any) -> int: - return hash(self_.__class__) + hash(tuple(self_.__dict__.values())) - - return hash_function if frozen else None - - -# If a field is of type `Callable`, its default value should be a function and cannot to ignored. -ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod) -# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model -UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES -# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra -# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's -# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for -# the `BaseModel` class, since that's defined immediately after the metaclass. -_is_base_model_class_defined = False - - -@dataclass_transform(kw_only_default=True, field_specifiers=(Field,)) -class ModelMetaclass(ABCMeta): - @no_type_check # noqa C901 - def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 - fields: Dict[str, ModelField] = {} - config = BaseConfig - validators: 'ValidatorListDict' = {} - - pre_root_validators, post_root_validators = [], [] - private_attributes: Dict[str, ModelPrivateAttr] = {} - base_private_attributes: Dict[str, ModelPrivateAttr] = {} - slots: SetStr = namespace.get('__slots__', ()) - slots = {slots} if isinstance(slots, str) else set(slots) - class_vars: SetStr = set() - hash_func: Optional[Callable[[Any], int]] = None - - for base in reversed(bases): - if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel: - fields.update(smart_deepcopy(base.__fields__)) - config = inherit_config(base.__config__, config) - validators = inherit_validators(base.__validators__, validators) - pre_root_validators += base.__pre_root_validators__ - post_root_validators += base.__post_root_validators__ - base_private_attributes.update(base.__private_attributes__) - class_vars.update(base.__class_vars__) - hash_func = base.__hash__ - - resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True) - allowed_config_kwargs: SetStr = { - key - for key in dir(config) - if not (key.startswith('__') and key.endswith('__')) # skip dunder methods and attributes - } - config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs} - config_from_namespace = namespace.get('Config') - if config_kwargs and config_from_namespace: - raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs') - config = inherit_config(config_from_namespace, config, **config_kwargs) - - validators = inherit_validators(extract_validators(namespace), validators) - vg = ValidatorGroup(validators) - - for f in fields.values(): - f.set_config(config) - extra_validators = vg.get_validators(f.name) - if extra_validators: - f.class_validators.update(extra_validators) - # re-run prepare to add extra validators - f.populate_validators() - - prepare_config(config, name) - - untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES - - def is_untouched(v: Any) -> bool: - return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method' - - if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'): - annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None)) - # annotation only fields need to come first in fields - for ann_name, ann_type in annotations.items(): - if is_classvar(ann_type): - class_vars.add(ann_name) - elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)): - class_vars.add(ann_name) - elif is_valid_field(ann_name): - validate_field_name(bases, ann_name) - value = namespace.get(ann_name, Undefined) - allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,) - if ( - is_untouched(value) - and ann_type != PyObject - and not any( - lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types - ) - ): - continue - fields[ann_name] = ModelField.infer( - name=ann_name, - value=value, - annotation=ann_type, - class_validators=vg.get_validators(ann_name), - config=config, - ) - elif ann_name not in namespace and config.underscore_attrs_are_private: - private_attributes[ann_name] = PrivateAttr() - - untouched_types = UNTOUCHED_TYPES + config.keep_untouched - for var_name, value in namespace.items(): - can_be_changed = var_name not in class_vars and not is_untouched(value) - if isinstance(value, ModelPrivateAttr): - if not is_valid_private_name(var_name): - raise NameError( - f'Private attributes "{var_name}" must not be a valid field name; ' - f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"' - ) - private_attributes[var_name] = value - elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed: - private_attributes[var_name] = PrivateAttr(default=value) - elif is_valid_field(var_name) and var_name not in annotations and can_be_changed: - validate_field_name(bases, var_name) - inferred = ModelField.infer( - name=var_name, - value=value, - annotation=annotations.get(var_name, Undefined), - class_validators=vg.get_validators(var_name), - config=config, - ) - if var_name in fields: - if lenient_issubclass(inferred.type_, fields[var_name].type_): - inferred.type_ = fields[var_name].type_ - else: - raise TypeError( - f'The type of {name}.{var_name} differs from the new default value; ' - f'if you wish to change the type of this field, please use a type annotation' - ) - fields[var_name] = inferred - - _custom_root_type = ROOT_KEY in fields - if _custom_root_type: - validate_custom_root_type(fields) - vg.check_for_unused() - if config.json_encoders: - json_encoder = partial(custom_pydantic_encoder, config.json_encoders) - else: - json_encoder = pydantic_encoder - pre_rv_new, post_rv_new = extract_root_validators(namespace) - - if hash_func is None: - hash_func = generate_hash_function(config.frozen) - - exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'} - new_namespace = { - '__config__': config, - '__fields__': fields, - '__exclude_fields__': { - name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None - } - or None, - '__include_fields__': { - name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None - } - or None, - '__validators__': vg.validators, - '__pre_root_validators__': unique_list( - pre_root_validators + pre_rv_new, - name_factory=lambda v: v.__name__, - ), - '__post_root_validators__': unique_list( - post_root_validators + post_rv_new, - name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__, - ), - '__schema_cache__': {}, - '__json_encoder__': staticmethod(json_encoder), - '__custom_root_type__': _custom_root_type, - '__private_attributes__': {**base_private_attributes, **private_attributes}, - '__slots__': slots | private_attributes.keys(), - '__hash__': hash_func, - '__class_vars__': class_vars, - **{n: v for n, v in namespace.items() if n not in exclude_from_namespace}, - } - - cls = super().__new__(mcs, name, bases, new_namespace, **kwargs) - # set __signature__ attr only for model class, but not for its instances - cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config)) - if resolve_forward_refs: - cls.__try_update_forward_refs__() - - # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 - # for attributes not in `new_namespace` (e.g. private attributes) - for name, obj in namespace.items(): - if name not in new_namespace: - set_name = getattr(obj, '__set_name__', None) - if callable(set_name): - set_name(cls, name) - - return cls - - def __instancecheck__(self, instance: Any) -> bool: - """ - Avoid calling ABC _abc_subclasscheck unless we're pretty sure. - - See #3829 and python/cpython#92810 - """ - return hasattr(instance, '__fields__') and super().__instancecheck__(instance) - - -object_setattr = object.__setattr__ - - -class BaseModel(Representation, metaclass=ModelMetaclass): - if TYPE_CHECKING: - # populated by the metaclass, defined here to help IDEs only - __fields__: ClassVar[Dict[str, ModelField]] = {} - __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None - __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None - __validators__: ClassVar[Dict[str, AnyCallable]] = {} - __pre_root_validators__: ClassVar[List[AnyCallable]] - __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]] - __config__: ClassVar[Type[BaseConfig]] = BaseConfig - __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x - __schema_cache__: ClassVar['DictAny'] = {} - __custom_root_type__: ClassVar[bool] = False - __signature__: ClassVar['Signature'] - __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] - __class_vars__: ClassVar[SetStr] - __fields_set__: ClassVar[SetStr] = set() - - Config = BaseConfig - __slots__ = ('__dict__', '__fields_set__') - __doc__ = '' # Null out the Representation docstring - - def __init__(__pydantic_self__, **data: Any) -> None: - """ - Create a new model by parsing and validating input data from keyword arguments. - - Raises ValidationError if the input data cannot be parsed to form a valid model. - """ - # Uses something other than `self` the first arg to allow "self" as a settable attribute - values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data) - if validation_error: - raise validation_error - try: - object_setattr(__pydantic_self__, '__dict__', values) - except TypeError as e: - raise TypeError( - 'Model values must be a dict; you may not have returned a dictionary from a root validator' - ) from e - object_setattr(__pydantic_self__, '__fields_set__', fields_set) - __pydantic_self__._init_private_attributes() - - @no_type_check - def __setattr__(self, name, value): # noqa: C901 (ignore complexity) - if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES: - return object_setattr(self, name, value) - - if self.__config__.extra is not Extra.allow and name not in self.__fields__: - raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') - elif not self.__config__.allow_mutation or self.__config__.frozen: - raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment') - elif name in self.__fields__ and self.__fields__[name].final: - raise TypeError( - f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment' - ) - elif self.__config__.validate_assignment: - new_values = {**self.__dict__, name: value} - - for validator in self.__pre_root_validators__: - try: - new_values = validator(self.__class__, new_values) - except (ValueError, TypeError, AssertionError) as exc: - raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__) - - known_field = self.__fields__.get(name, None) - if known_field: - # We want to - # - make sure validators are called without the current value for this field inside `values` - # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts) - # - keep the order of the fields - if not known_field.field_info.allow_mutation: - raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned') - dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name} - value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__) - if error_: - raise ValidationError([error_], self.__class__) - else: - new_values[name] = value - - errors = [] - for skip_on_failure, validator in self.__post_root_validators__: - if skip_on_failure and errors: - continue - try: - new_values = validator(self.__class__, new_values) - except (ValueError, TypeError, AssertionError) as exc: - errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) - if errors: - raise ValidationError(errors, self.__class__) - - # update the whole __dict__ as other values than just `value` - # may be changed (e.g. with `root_validator`) - object_setattr(self, '__dict__', new_values) - else: - self.__dict__[name] = value - - self.__fields_set__.add(name) - - def __getstate__(self) -> 'DictAny': - private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__) - return { - '__dict__': self.__dict__, - '__fields_set__': self.__fields_set__, - '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined}, - } - - def __setstate__(self, state: 'DictAny') -> None: - object_setattr(self, '__dict__', state['__dict__']) - object_setattr(self, '__fields_set__', state['__fields_set__']) - for name, value in state.get('__private_attribute_values__', {}).items(): - object_setattr(self, name, value) - - def _init_private_attributes(self) -> None: - for name, private_attr in self.__private_attributes__.items(): - default = private_attr.get_default() - if default is not Undefined: - object_setattr(self, name, default) - - def dict( - self, - *, - include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - by_alias: bool = False, - skip_defaults: Optional[bool] = None, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - ) -> 'DictStrAny': - """ - Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - """ - if skip_defaults is not None: - warnings.warn( - f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"', - DeprecationWarning, - ) - exclude_unset = skip_defaults - - return dict( - self._iter( - to_dict=True, - by_alias=by_alias, - include=include, - exclude=exclude, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - ) - - def json( - self, - *, - include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - by_alias: bool = False, - skip_defaults: Optional[bool] = None, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - encoder: Optional[Callable[[Any], Any]] = None, - models_as_dict: bool = True, - **dumps_kwargs: Any, - ) -> str: - """ - Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. - - `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`. - """ - if skip_defaults is not None: - warnings.warn( - f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"', - DeprecationWarning, - ) - exclude_unset = skip_defaults - encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__) - - # We don't directly call `self.dict()`, which does exactly this with `to_dict=True` - # because we want to be able to keep raw `BaseModel` instances and not as `dict`. - # This allows users to write custom JSON encoders for given `BaseModel` classes. - data = dict( - self._iter( - to_dict=models_as_dict, - by_alias=by_alias, - include=include, - exclude=exclude, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - ) - if self.__custom_root_type__: - data = data[ROOT_KEY] - return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs) - - @classmethod - def _enforce_dict_if_root(cls, obj: Any) -> Any: - if cls.__custom_root_type__ and ( - not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY}) - and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY}) - or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES - ): - return {ROOT_KEY: obj} - else: - return obj - - @classmethod - def parse_obj(cls: Type['Model'], obj: Any) -> 'Model': - obj = cls._enforce_dict_if_root(obj) - if not isinstance(obj, dict): - try: - obj = dict(obj) - except (TypeError, ValueError) as e: - exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}') - raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e - return cls(**obj) - - @classmethod - def parse_raw( - cls: Type['Model'], - b: StrBytes, - *, - content_type: str = None, - encoding: str = 'utf8', - proto: Protocol = None, - allow_pickle: bool = False, - ) -> 'Model': - try: - obj = load_str_bytes( - b, - proto=proto, - content_type=content_type, - encoding=encoding, - allow_pickle=allow_pickle, - json_loads=cls.__config__.json_loads, - ) - except (ValueError, TypeError, UnicodeDecodeError) as e: - raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls) - return cls.parse_obj(obj) - - @classmethod - def parse_file( - cls: Type['Model'], - path: Union[str, Path], - *, - content_type: str = None, - encoding: str = 'utf8', - proto: Protocol = None, - allow_pickle: bool = False, - ) -> 'Model': - obj = load_file( - path, - proto=proto, - content_type=content_type, - encoding=encoding, - allow_pickle=allow_pickle, - json_loads=cls.__config__.json_loads, - ) - return cls.parse_obj(obj) - - @classmethod - def from_orm(cls: Type['Model'], obj: Any) -> 'Model': - if not cls.__config__.orm_mode: - raise ConfigError('You must have the config attribute orm_mode=True to use from_orm') - obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj) - m = cls.__new__(cls) - values, fields_set, validation_error = validate_model(cls, obj) - if validation_error: - raise validation_error - object_setattr(m, '__dict__', values) - object_setattr(m, '__fields_set__', fields_set) - m._init_private_attributes() - return m - - @classmethod - def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model': - """ - Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. - Default values are respected, but no other validation is performed. - Behaves as if `Config.extra = 'allow'` was set since it adds all passed values - """ - m = cls.__new__(cls) - fields_values: Dict[str, Any] = {} - for name, field in cls.__fields__.items(): - if field.alt_alias and field.alias in values: - fields_values[name] = values[field.alias] - elif name in values: - fields_values[name] = values[name] - elif not field.required: - fields_values[name] = field.get_default() - fields_values.update(values) - object_setattr(m, '__dict__', fields_values) - if _fields_set is None: - _fields_set = set(values.keys()) - object_setattr(m, '__fields_set__', _fields_set) - m._init_private_attributes() - return m - - def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model': - if deep: - # chances of having empty dict here are quite low for using smart_deepcopy - values = deepcopy(values) - - cls = self.__class__ - m = cls.__new__(cls) - object_setattr(m, '__dict__', values) - object_setattr(m, '__fields_set__', fields_set) - for name in self.__private_attributes__: - value = getattr(self, name, Undefined) - if value is not Undefined: - if deep: - value = deepcopy(value) - object_setattr(m, name, value) - - return m - - def copy( - self: 'Model', - *, - include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - update: Optional['DictStrAny'] = None, - deep: bool = False, - ) -> 'Model': - """ - Duplicate a model, optionally choose which fields to include, exclude and change. - - :param include: fields to include in new model - :param exclude: fields to exclude from new model, as with values this takes precedence over include - :param update: values to change/add in the new model. Note: the data is not validated before creating - the new model: you should trust this data - :param deep: set to `True` to make a deep copy of the model - :return: new model instance - """ - - values = dict( - self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False), - **(update or {}), - ) - - # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg - if update: - fields_set = self.__fields_set__ | update.keys() - else: - fields_set = set(self.__fields_set__) - - return self._copy_and_set_values(values, fields_set, deep=deep) - - @classmethod - def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny': - cached = cls.__schema_cache__.get((by_alias, ref_template)) - if cached is not None: - return cached - s = model_schema(cls, by_alias=by_alias, ref_template=ref_template) - cls.__schema_cache__[(by_alias, ref_template)] = s - return s - - @classmethod - def schema_json( - cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any - ) -> str: - from .json import pydantic_encoder - - return cls.__config__.json_dumps( - cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs - ) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls: Type['Model'], value: Any) -> 'Model': - if isinstance(value, cls): - copy_on_model_validation = cls.__config__.copy_on_model_validation - # whether to deep or shallow copy the model on validation, None means do not copy - deep_copy: Optional[bool] = None - if copy_on_model_validation not in {'deep', 'shallow', 'none'}: - # Warn about deprecated behavior - warnings.warn( - "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning - ) - if copy_on_model_validation: - deep_copy = False - - if copy_on_model_validation == 'shallow': - # shallow copy - deep_copy = False - elif copy_on_model_validation == 'deep': - # deep copy - deep_copy = True - - if deep_copy is None: - return value - else: - return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy) - - value = cls._enforce_dict_if_root(value) - - if isinstance(value, dict): - return cls(**value) - elif cls.__config__.orm_mode: - return cls.from_orm(value) - else: - try: - value_as_dict = dict(value) - except (TypeError, ValueError) as e: - raise DictError() from e - return cls(**value_as_dict) - - @classmethod - def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict: - if isinstance(obj, GetterDict): - return obj - return cls.__config__.getter_dict(obj) - - @classmethod - @no_type_check - def _get_value( - cls, - v: Any, - to_dict: bool, - by_alias: bool, - include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], - exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], - exclude_unset: bool, - exclude_defaults: bool, - exclude_none: bool, - ) -> Any: - if isinstance(v, BaseModel): - if to_dict: - v_dict = v.dict( - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - include=include, - exclude=exclude, - exclude_none=exclude_none, - ) - if ROOT_KEY in v_dict: - return v_dict[ROOT_KEY] - return v_dict - else: - return v.copy(include=include, exclude=exclude) - - value_exclude = ValueItems(v, exclude) if exclude else None - value_include = ValueItems(v, include) if include else None - - if isinstance(v, dict): - return { - k_: cls._get_value( - v_, - to_dict=to_dict, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - include=value_include and value_include.for_element(k_), - exclude=value_exclude and value_exclude.for_element(k_), - exclude_none=exclude_none, - ) - for k_, v_ in v.items() - if (not value_exclude or not value_exclude.is_excluded(k_)) - and (not value_include or value_include.is_included(k_)) - } - - elif sequence_like(v): - seq_args = ( - cls._get_value( - v_, - to_dict=to_dict, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - include=value_include and value_include.for_element(i), - exclude=value_exclude and value_exclude.for_element(i), - exclude_none=exclude_none, - ) - for i, v_ in enumerate(v) - if (not value_exclude or not value_exclude.is_excluded(i)) - and (not value_include or value_include.is_included(i)) - ) - - return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args) - - elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False): - return v.value - - else: - return v - - @classmethod - def __try_update_forward_refs__(cls, **localns: Any) -> None: - """ - Same as update_forward_refs but will not raise exception - when forward references are not defined. - """ - update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,)) - - @classmethod - def update_forward_refs(cls, **localns: Any) -> None: - """ - Try to update ForwardRefs on fields based on this Model, globalns and localns. - """ - update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns) - - def __iter__(self) -> 'TupleGenerator': - """ - so `dict(model)` works - """ - yield from self.__dict__.items() - - def _iter( - self, - to_dict: bool = False, - by_alias: bool = False, - include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - ) -> 'TupleGenerator': - # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. - # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. - if exclude is not None or self.__exclude_fields__ is not None: - exclude = ValueItems.merge(self.__exclude_fields__, exclude) - - if include is not None or self.__include_fields__ is not None: - include = ValueItems.merge(self.__include_fields__, include, intersect=True) - - allowed_keys = self._calculate_keys( - include=include, exclude=exclude, exclude_unset=exclude_unset # type: ignore - ) - if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): - # huge boost for plain _iter() - yield from self.__dict__.items() - return - - value_exclude = ValueItems(self, exclude) if exclude is not None else None - value_include = ValueItems(self, include) if include is not None else None - - for field_key, v in self.__dict__.items(): - if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): - continue - - if exclude_defaults: - model_field = self.__fields__.get(field_key) - if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v: - continue - - if by_alias and field_key in self.__fields__: - dict_key = self.__fields__[field_key].alias - else: - dict_key = field_key - - if to_dict or value_include or value_exclude: - v = self._get_value( - v, - to_dict=to_dict, - by_alias=by_alias, - include=value_include and value_include.for_element(field_key), - exclude=value_exclude and value_exclude.for_element(field_key), - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - yield dict_key, v - - def _calculate_keys( - self, - include: Optional['MappingIntStrAny'], - exclude: Optional['MappingIntStrAny'], - exclude_unset: bool, - update: Optional['DictStrAny'] = None, - ) -> Optional[AbstractSet[str]]: - if include is None and exclude is None and exclude_unset is False: - return None - - keys: AbstractSet[str] - if exclude_unset: - keys = self.__fields_set__.copy() - else: - keys = self.__dict__.keys() - - if include is not None: - keys &= include.keys() - - if update: - keys -= update.keys() - - if exclude: - keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)} - - return keys - - def __eq__(self, other: Any) -> bool: - if isinstance(other, BaseModel): - return self.dict() == other.dict() - else: - return self.dict() == other - - def __repr_args__(self) -> 'ReprArgs': - return [ - (k, v) - for k, v in self.__dict__.items() - if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr) - ] - - -_is_base_model_class_defined = True - - -@overload -def create_model( - __model_name: str, - *, - __config__: Optional[Type[BaseConfig]] = None, - __base__: None = None, - __module__: str = __name__, - __validators__: Dict[str, 'AnyClassMethod'] = None, - __cls_kwargs__: Dict[str, Any] = None, - **field_definitions: Any, -) -> Type['BaseModel']: - ... - - -@overload -def create_model( - __model_name: str, - *, - __config__: Optional[Type[BaseConfig]] = None, - __base__: Union[Type['Model'], Tuple[Type['Model'], ...]], - __module__: str = __name__, - __validators__: Dict[str, 'AnyClassMethod'] = None, - __cls_kwargs__: Dict[str, Any] = None, - **field_definitions: Any, -) -> Type['Model']: - ... - - -def create_model( - __model_name: str, - *, - __config__: Optional[Type[BaseConfig]] = None, - __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None, - __module__: str = __name__, - __validators__: Dict[str, 'AnyClassMethod'] = None, - __cls_kwargs__: Dict[str, Any] = None, - __slots__: Optional[Tuple[str, ...]] = None, - **field_definitions: Any, -) -> Type['Model']: - """ - Dynamically create a model. - :param __model_name: name of the created model - :param __config__: config class to use for the new model - :param __base__: base class for the new model to inherit from - :param __module__: module of the created model - :param __validators__: a dict of method names and @validator class methods - :param __cls_kwargs__: a dict for class creation - :param __slots__: Deprecated, `__slots__` should not be passed to `create_model` - :param field_definitions: fields of the model (or extra fields if a base is supplied) - in the format `=(, )` or `=, e.g. - `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format - `=` or `=(, )`, e.g. - `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or - `foo=(str, FieldInfo(title='Foo'))` - """ - if __slots__ is not None: - # __slots__ will be ignored from here on - warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning) - - if __base__ is not None: - if __config__ is not None: - raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together') - if not isinstance(__base__, tuple): - __base__ = (__base__,) - else: - __base__ = (cast(Type['Model'], BaseModel),) - - __cls_kwargs__ = __cls_kwargs__ or {} - - fields = {} - annotations = {} - - for f_name, f_def in field_definitions.items(): - if not is_valid_field(f_name): - warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) - if isinstance(f_def, tuple): - try: - f_annotation, f_value = f_def - except ValueError as e: - raise ConfigError( - 'field definitions should either be a tuple of (, ) or just a ' - 'default value, unfortunately this means tuples as ' - 'default values are not allowed' - ) from e - else: - f_annotation, f_value = None, f_def - - if f_annotation: - annotations[f_name] = f_annotation - fields[f_name] = f_value - - namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__} - if __validators__: - namespace.update(__validators__) - namespace.update(fields) - if __config__: - namespace['Config'] = inherit_config(__config__, BaseConfig) - resolved_bases = resolve_bases(__base__) - meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) - if resolved_bases is not __base__: - ns['__orig_bases__'] = __base__ - namespace.update(ns) - return meta(__model_name, resolved_bases, namespace, **kwds) - - -_missing = object() - - -def validate_model( # noqa: C901 (ignore complexity) - model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None -) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]: - """ - validate data against a model. - """ - values = {} - errors = [] - # input_data names, possibly alias - names_used = set() - # field names, never aliases - fields_set = set() - config = model.__config__ - check_extra = config.extra is not Extra.ignore - cls_ = cls or model - - for validator in model.__pre_root_validators__: - try: - input_data = validator(cls_, input_data) - except (ValueError, TypeError, AssertionError) as exc: - return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_) - - for name, field in model.__fields__.items(): - value = input_data.get(field.alias, _missing) - using_name = False - if value is _missing and config.allow_population_by_field_name and field.alt_alias: - value = input_data.get(field.name, _missing) - using_name = True - - if value is _missing: - if field.required: - errors.append(ErrorWrapper(MissingError(), loc=field.alias)) - continue - - value = field.get_default() - - if not config.validate_all and not field.validate_always: - values[name] = value - continue - else: - fields_set.add(name) - if check_extra: - names_used.add(field.name if using_name else field.alias) - - v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_) - if isinstance(errors_, ErrorWrapper): - errors.append(errors_) - elif isinstance(errors_, list): - errors.extend(errors_) - else: - values[name] = v_ - - if check_extra: - if isinstance(input_data, GetterDict): - extra = input_data.extra_keys() - names_used - else: - extra = input_data.keys() - names_used - if extra: - fields_set |= extra - if config.extra is Extra.allow: - for f in extra: - values[f] = input_data[f] - else: - for f in sorted(extra): - errors.append(ErrorWrapper(ExtraError(), loc=f)) - - for skip_on_failure, validator in model.__post_root_validators__: - if skip_on_failure and errors: - continue - try: - values = validator(cls_, values) - except (ValueError, TypeError, AssertionError) as exc: - errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) - - if errors: - return values, fields_set, ValidationError(errors, cls_) - else: - return values, fields_set, None diff --git a/lib/pydantic/v1/mypy.py b/lib/pydantic/v1/mypy.py deleted file mode 100644 index 1d6d5ae2..00000000 --- a/lib/pydantic/v1/mypy.py +++ /dev/null @@ -1,944 +0,0 @@ -import sys -from configparser import ConfigParser -from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union - -from mypy.errorcodes import ErrorCode -from mypy.nodes import ( - ARG_NAMED, - ARG_NAMED_OPT, - ARG_OPT, - ARG_POS, - ARG_STAR2, - MDEF, - Argument, - AssignmentStmt, - Block, - CallExpr, - ClassDef, - Context, - Decorator, - EllipsisExpr, - FuncBase, - FuncDef, - JsonDict, - MemberExpr, - NameExpr, - PassStmt, - PlaceholderNode, - RefExpr, - StrExpr, - SymbolNode, - SymbolTableNode, - TempNode, - TypeInfo, - TypeVarExpr, - Var, -) -from mypy.options import Options -from mypy.plugin import ( - CheckerPluginInterface, - ClassDefContext, - FunctionContext, - MethodContext, - Plugin, - ReportConfigContext, - SemanticAnalyzerPluginInterface, -) -from mypy.plugins import dataclasses -from mypy.semanal import set_callable_name # type: ignore -from mypy.server.trigger import make_wildcard_trigger -from mypy.types import ( - AnyType, - CallableType, - Instance, - NoneType, - Overloaded, - ProperType, - Type, - TypeOfAny, - TypeType, - TypeVarType, - UnionType, - get_proper_type, -) -from mypy.typevars import fill_typevars -from mypy.util import get_unique_redefinition_name -from mypy.version import __version__ as mypy_version - -from pydantic.utils import is_valid_field - -try: - from mypy.types import TypeVarDef # type: ignore[attr-defined] -except ImportError: # pragma: no cover - # Backward-compatible with TypeVarDef from Mypy 0.910. - from mypy.types import TypeVarType as TypeVarDef - -CONFIGFILE_KEY = 'pydantic-mypy' -METADATA_KEY = 'pydantic-mypy-metadata' -_NAMESPACE = __name__[:-5] # 'pydantic' in 1.10.X, 'pydantic.v1' in v2.X -BASEMODEL_FULLNAME = f'{_NAMESPACE}.main.BaseModel' -BASESETTINGS_FULLNAME = f'{_NAMESPACE}.env_settings.BaseSettings' -MODEL_METACLASS_FULLNAME = f'{_NAMESPACE}.main.ModelMetaclass' -FIELD_FULLNAME = f'{_NAMESPACE}.fields.Field' -DATACLASS_FULLNAME = f'{_NAMESPACE}.dataclasses.dataclass' - - -def parse_mypy_version(version: str) -> Tuple[int, ...]: - return tuple(map(int, version.partition('+')[0].split('.'))) - - -MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) -BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__' - -# Increment version if plugin changes and mypy caches should be invalidated -__version__ = 2 - - -def plugin(version: str) -> 'TypingType[Plugin]': - """ - `version` is the mypy version string - - We might want to use this to print a warning if the mypy version being used is - newer, or especially older, than we expect (or need). - """ - return PydanticPlugin - - -class PydanticPlugin(Plugin): - def __init__(self, options: Options) -> None: - self.plugin_config = PydanticPluginConfig(options) - self._plugin_data = self.plugin_config.to_data() - super().__init__(options) - - def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]': - sym = self.lookup_fully_qualified(fullname) - if sym and isinstance(sym.node, TypeInfo): # pragma: no branch - # No branching may occur if the mypy cache has not been cleared - if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro): - return self._pydantic_model_class_maker_callback - return None - - def get_metaclass_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: - if fullname == MODEL_METACLASS_FULLNAME: - return self._pydantic_model_metaclass_marker_callback - return None - - def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]': - sym = self.lookup_fully_qualified(fullname) - if sym and sym.fullname == FIELD_FULLNAME: - return self._pydantic_field_callback - return None - - def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]: - if fullname.endswith('.from_orm'): - return from_orm_callback - return None - - def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: - """Mark pydantic.dataclasses as dataclass. - - Mypy version 1.1.1 added support for `@dataclass_transform` decorator. - """ - if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1): - return dataclasses.dataclass_class_maker_callback # type: ignore[return-value] - return None - - def report_config_data(self, ctx: ReportConfigContext) -> Dict[str, Any]: - """Return all plugin config data. - - Used by mypy to determine if cache needs to be discarded. - """ - return self._plugin_data - - def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: - transformer = PydanticModelTransformer(ctx, self.plugin_config) - transformer.transform() - - def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: - """Reset dataclass_transform_spec attribute of ModelMetaclass. - - Let the plugin handle it. This behavior can be disabled - if 'debug_dataclass_transform' is set to True', for testing purposes. - """ - if self.plugin_config.debug_dataclass_transform: - return - info_metaclass = ctx.cls.info.declared_metaclass - assert info_metaclass, "callback not passed from 'get_metaclass_hook'" - if getattr(info_metaclass.type, 'dataclass_transform_spec', None): - info_metaclass.type.dataclass_transform_spec = None # type: ignore[attr-defined] - - def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type': - """ - Extract the type of the `default` argument from the Field function, and use it as the return type. - - In particular: - * Check whether the default and default_factory argument is specified. - * Output an error if both are specified. - * Retrieve the type of the argument which is specified, and use it as return type for the function. - """ - default_any_type = ctx.default_return_type - - assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' - assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()' - default_args = ctx.args[0] - default_factory_args = ctx.args[1] - - if default_args and default_factory_args: - error_default_and_default_factory_specified(ctx.api, ctx.context) - return default_any_type - - if default_args: - default_type = ctx.arg_types[0][0] - default_arg = default_args[0] - - # Fallback to default Any type if the field is required - if not isinstance(default_arg, EllipsisExpr): - return default_type - - elif default_factory_args: - default_factory_type = ctx.arg_types[1][0] - - # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter - # Pydantic calls the default factory without any argument, so we retrieve the first item - if isinstance(default_factory_type, Overloaded): - if MYPY_VERSION_TUPLE > (0, 910): - default_factory_type = default_factory_type.items[0] - else: - # Mypy0.910 exposes the items of overloaded types in a function - default_factory_type = default_factory_type.items()[0] # type: ignore[operator] - - if isinstance(default_factory_type, CallableType): - ret_type = default_factory_type.ret_type - # mypy doesn't think `ret_type` has `args`, you'd think mypy should know, - # add this check in case it varies by version - args = getattr(ret_type, 'args', None) - if args: - if all(isinstance(arg, TypeVarType) for arg in args): - # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any` - ret_type.args = tuple(default_any_type for _ in args) # type: ignore[attr-defined] - return ret_type - - return default_any_type - - -class PydanticPluginConfig: - __slots__ = ( - 'init_forbid_extra', - 'init_typed', - 'warn_required_dynamic_aliases', - 'warn_untyped_fields', - 'debug_dataclass_transform', - ) - init_forbid_extra: bool - init_typed: bool - warn_required_dynamic_aliases: bool - warn_untyped_fields: bool - debug_dataclass_transform: bool # undocumented - - def __init__(self, options: Options) -> None: - if options.config_file is None: # pragma: no cover - return - - toml_config = parse_toml(options.config_file) - if toml_config is not None: - config = toml_config.get('tool', {}).get('pydantic-mypy', {}) - for key in self.__slots__: - setting = config.get(key, False) - if not isinstance(setting, bool): - raise ValueError(f'Configuration value must be a boolean for key: {key}') - setattr(self, key, setting) - else: - plugin_config = ConfigParser() - plugin_config.read(options.config_file) - for key in self.__slots__: - setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) - setattr(self, key, setting) - - def to_data(self) -> Dict[str, Any]: - return {key: getattr(self, key) for key in self.__slots__} - - -def from_orm_callback(ctx: MethodContext) -> Type: - """ - Raise an error if orm_mode is not enabled - """ - model_type: Instance - ctx_type = ctx.type - if isinstance(ctx_type, TypeType): - ctx_type = ctx_type.item - if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): - model_type = ctx_type.ret_type # called on the class - elif isinstance(ctx_type, Instance): - model_type = ctx_type # called on an instance (unusual, but still valid) - else: # pragma: no cover - detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})' - error_unexpected_behavior(detail, ctx.api, ctx.context) - return ctx.default_return_type - pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) - if pydantic_metadata is None: - return ctx.default_return_type - orm_mode = pydantic_metadata.get('config', {}).get('orm_mode') - if orm_mode is not True: - error_from_orm(get_name(model_type.type), ctx.api, ctx.context) - return ctx.default_return_type - - -class PydanticModelTransformer: - tracked_config_fields: Set[str] = { - 'extra', - 'allow_mutation', - 'frozen', - 'orm_mode', - 'allow_population_by_field_name', - 'alias_generator', - } - - def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None: - self._ctx = ctx - self.plugin_config = plugin_config - - def transform(self) -> None: - """ - Configures the BaseModel subclass according to the plugin settings. - - In particular: - * determines the model config and fields, - * adds a fields-aware signature for the initializer and construct methods - * freezes the class if allow_mutation = False or frozen = True - * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses - """ - ctx = self._ctx - info = ctx.cls.info - - self.adjust_validator_signatures() - config = self.collect_config() - fields = self.collect_fields(config) - is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1]) - self.add_initializer(fields, config, is_settings) - self.add_construct_method(fields) - self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True) - info.metadata[METADATA_KEY] = { - 'fields': {field.name: field.serialize() for field in fields}, - 'config': config.set_values_dict(), - } - - def adjust_validator_signatures(self) -> None: - """When we decorate a function `f` with `pydantic.validator(...), mypy sees - `f` as a regular method taking a `self` instance, even though pydantic - internally wraps `f` with `classmethod` if necessary. - - Teach mypy this by marking any function whose outermost decorator is a - `validator()` call as a classmethod. - """ - for name, sym in self._ctx.cls.info.names.items(): - if isinstance(sym.node, Decorator): - first_dec = sym.node.original_decorators[0] - if ( - isinstance(first_dec, CallExpr) - and isinstance(first_dec.callee, NameExpr) - and first_dec.callee.fullname == f'{_NAMESPACE}.class_validators.validator' - ): - sym.node.func.is_class = True - - def collect_config(self) -> 'ModelConfigData': - """ - Collects the values of the config attributes that are used by the plugin, accounting for parent classes. - """ - ctx = self._ctx - cls = ctx.cls - config = ModelConfigData() - for stmt in cls.defs.body: - if not isinstance(stmt, ClassDef): - continue - if stmt.name == 'Config': - for substmt in stmt.defs.body: - if not isinstance(substmt, AssignmentStmt): - continue - config.update(self.get_config_update(substmt)) - if ( - config.has_alias_generator - and not config.allow_population_by_field_name - and self.plugin_config.warn_required_dynamic_aliases - ): - error_required_dynamic_aliases(ctx.api, stmt) - for info in cls.info.mro[1:]: # 0 is the current class - if METADATA_KEY not in info.metadata: - continue - - # Each class depends on the set of fields in its ancestors - ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) - for name, value in info.metadata[METADATA_KEY]['config'].items(): - config.setdefault(name, value) - return config - - def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']: - """ - Collects the fields for the model, accounting for parent classes - """ - # First, collect fields belonging to the current class. - ctx = self._ctx - cls = self._ctx.cls - fields = [] # type: List[PydanticModelField] - known_fields = set() # type: Set[str] - for stmt in cls.defs.body: - if not isinstance(stmt, AssignmentStmt): # `and stmt.new_syntax` to require annotation - continue - - lhs = stmt.lvalues[0] - if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name): - continue - - if not stmt.new_syntax and self.plugin_config.warn_untyped_fields: - error_untyped_fields(ctx.api, stmt) - - # if lhs.name == '__config__': # BaseConfig not well handled; I'm not sure why yet - # continue - - sym = cls.info.names.get(lhs.name) - if sym is None: # pragma: no cover - # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) - # This is the same logic used in the dataclasses plugin - continue - - node = sym.node - if isinstance(node, PlaceholderNode): # pragma: no cover - # See the PlaceholderNode docstring for more detail about how this can occur - # Basically, it is an edge case when dealing with complex import logic - # This is the same logic used in the dataclasses plugin - continue - if not isinstance(node, Var): # pragma: no cover - # Don't know if this edge case still happens with the `is_valid_field` check above - # but better safe than sorry - continue - - # x: ClassVar[int] is ignored by dataclasses. - if node.is_classvar: - continue - - is_required = self.get_is_required(cls, stmt, lhs) - alias, has_dynamic_alias = self.get_alias_info(stmt) - if ( - has_dynamic_alias - and not model_config.allow_population_by_field_name - and self.plugin_config.warn_required_dynamic_aliases - ): - error_required_dynamic_aliases(ctx.api, stmt) - fields.append( - PydanticModelField( - name=lhs.name, - is_required=is_required, - alias=alias, - has_dynamic_alias=has_dynamic_alias, - line=stmt.line, - column=stmt.column, - ) - ) - known_fields.add(lhs.name) - all_fields = fields.copy() - for info in cls.info.mro[1:]: # 0 is the current class, -2 is BaseModel, -1 is object - if METADATA_KEY not in info.metadata: - continue - - superclass_fields = [] - # Each class depends on the set of fields in its ancestors - ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) - - for name, data in info.metadata[METADATA_KEY]['fields'].items(): - if name not in known_fields: - field = PydanticModelField.deserialize(info, data) - known_fields.add(name) - superclass_fields.append(field) - else: - (field,) = (a for a in all_fields if a.name == name) - all_fields.remove(field) - superclass_fields.append(field) - all_fields = superclass_fields + all_fields - return all_fields - - def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None: - """ - Adds a fields-aware `__init__` method to the class. - - The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. - """ - ctx = self._ctx - typed = self.plugin_config.init_typed - use_alias = config.allow_population_by_field_name is not True - force_all_optional = is_settings or bool( - config.has_alias_generator and not config.allow_population_by_field_name - ) - init_arguments = self.get_field_arguments( - fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias - ) - if not self.should_init_forbid_extra(fields, config): - var = Var('kwargs') - init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) - - if '__init__' not in ctx.cls.info.names: - add_method(ctx, '__init__', init_arguments, NoneType()) - - def add_construct_method(self, fields: List['PydanticModelField']) -> None: - """ - Adds a fully typed `construct` classmethod to the class. - - Similar to the fields-aware __init__ method, but always uses the field names (not aliases), - and does not treat settings fields as optional. - """ - ctx = self._ctx - set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')]) - optional_set_str = UnionType([set_str, NoneType()]) - fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) - construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False) - construct_arguments = [fields_set_argument] + construct_arguments - - obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object') - self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class - tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name - if MYPY_VERSION_TUPLE >= (1, 4): - tvd = TypeVarType( - self_tvar_name, - tvar_fullname, - -1, - [], - obj_type, - AnyType(TypeOfAny.from_omitted_generics), # type: ignore[arg-type] - ) - self_tvar_expr = TypeVarExpr( - self_tvar_name, - tvar_fullname, - [], - obj_type, - AnyType(TypeOfAny.from_omitted_generics), # type: ignore[arg-type] - ) - else: - tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) - self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) - ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr) - - # Backward-compatible with TypeVarDef from Mypy 0.910. - if isinstance(tvd, TypeVarType): - self_type = tvd - else: - self_type = TypeVarType(tvd) - - add_method( - ctx, - 'construct', - construct_arguments, - return_type=self_type, - self_type=self_type, - tvar_def=tvd, - is_classmethod=True, - ) - - def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None: - """ - Marks all fields as properties so that attempts to set them trigger mypy errors. - - This is the same approach used by the attrs and dataclasses plugins. - """ - ctx = self._ctx - info = ctx.cls.info - for field in fields: - sym_node = info.names.get(field.name) - if sym_node is not None: - var = sym_node.node - if isinstance(var, Var): - var.is_property = frozen - elif isinstance(var, PlaceholderNode) and not ctx.api.final_iteration: - # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage - ctx.api.defer() - else: # pragma: no cover - # I don't know whether it's possible to hit this branch, but I've added it for safety - try: - var_str = str(var) - except TypeError: - # This happens for PlaceholderNode; perhaps it will happen for other types in the future.. - var_str = repr(var) - detail = f'sym_node.node: {var_str} (of type {var.__class__})' - error_unexpected_behavior(detail, ctx.api, ctx.cls) - else: - var = field.to_var(info, use_alias=False) - var.info = info - var.is_property = frozen - var._fullname = get_fullname(info) + '.' + get_name(var) - info.names[get_name(var)] = SymbolTableNode(MDEF, var) - - def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']: - """ - Determines the config update due to a single statement in the Config class definition. - - Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) - """ - lhs = substmt.lvalues[0] - if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields): - return None - if lhs.name == 'extra': - if isinstance(substmt.rvalue, StrExpr): - forbid_extra = substmt.rvalue.value == 'forbid' - elif isinstance(substmt.rvalue, MemberExpr): - forbid_extra = substmt.rvalue.name == 'forbid' - else: - error_invalid_config_value(lhs.name, self._ctx.api, substmt) - return None - return ModelConfigData(forbid_extra=forbid_extra) - if lhs.name == 'alias_generator': - has_alias_generator = True - if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None': - has_alias_generator = False - return ModelConfigData(has_alias_generator=has_alias_generator) - if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'): - return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'}) - error_invalid_config_value(lhs.name, self._ctx.api, substmt) - return None - - @staticmethod - def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool: - """ - Returns a boolean indicating whether the field defined in `stmt` is a required field. - """ - expr = stmt.rvalue - if isinstance(expr, TempNode): - # TempNode means annotation-only, so only non-required if Optional - value_type = get_proper_type(cls.info[lhs.name].type) - return not PydanticModelTransformer.type_has_implicit_default(value_type) - if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: - # The "default value" is a call to `Field`; at this point, the field is - # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) or if default_factory - # is specified. - for arg, name in zip(expr.args, expr.arg_names): - # If name is None, then this arg is the default because it is the only positional argument. - if name is None or name == 'default': - return arg.__class__ is EllipsisExpr - if name == 'default_factory': - return False - # In this case, default and default_factory are not specified, so we need to look at the annotation - value_type = get_proper_type(cls.info[lhs.name].type) - return not PydanticModelTransformer.type_has_implicit_default(value_type) - # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) - return isinstance(expr, EllipsisExpr) - - @staticmethod - def type_has_implicit_default(type_: Optional[ProperType]) -> bool: - """ - Returns True if the passed type will be given an implicit default value. - - In pydantic v1, this is the case for Optional types and Any (with default value None). - """ - if isinstance(type_, AnyType): - # Annotated as Any - return True - if isinstance(type_, UnionType) and any( - isinstance(item, NoneType) or isinstance(item, AnyType) for item in type_.items - ): - # Annotated as Optional, or otherwise having NoneType or AnyType in the union - return True - return False - - @staticmethod - def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]: - """ - Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. - - `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. - If `has_dynamic_alias` is True, `alias` will be None. - """ - expr = stmt.rvalue - if isinstance(expr, TempNode): - # TempNode means annotation-only - return None, False - - if not ( - isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME - ): - # Assigned value is not a call to pydantic.fields.Field - return None, False - - for i, arg_name in enumerate(expr.arg_names): - if arg_name != 'alias': - continue - arg = expr.args[i] - if isinstance(arg, StrExpr): - return arg.value, False - else: - return None, True - return None, False - - def get_field_arguments( - self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool - ) -> List[Argument]: - """ - Helper function used during the construction of the `__init__` and `construct` method signatures. - - Returns a list of mypy Argument instances for use in the generated signatures. - """ - info = self._ctx.cls.info - arguments = [ - field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias) - for field in fields - if not (use_alias and field.has_dynamic_alias) - ] - return arguments - - def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool: - """ - Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature - - We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, - *unless* a required dynamic alias is present (since then we can't determine a valid signature). - """ - if not config.allow_population_by_field_name: - if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): - return False - if config.forbid_extra: - return True - return self.plugin_config.init_forbid_extra - - @staticmethod - def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool: - """ - Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be - determined during static analysis. - """ - for field in fields: - if field.has_dynamic_alias: - return True - if has_alias_generator: - for field in fields: - if field.alias is None: - return True - return False - - -class PydanticModelField: - def __init__( - self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int - ): - self.name = name - self.is_required = is_required - self.alias = alias - self.has_dynamic_alias = has_dynamic_alias - self.line = line - self.column = column - - def to_var(self, info: TypeInfo, use_alias: bool) -> Var: - name = self.name - if use_alias and self.alias is not None: - name = self.alias - return Var(name, info[self.name].type) - - def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument: - if typed and info[self.name].type is not None: - type_annotation = info[self.name].type - else: - type_annotation = AnyType(TypeOfAny.explicit) - return Argument( - variable=self.to_var(info, use_alias), - type_annotation=type_annotation, - initializer=None, - kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED, - ) - - def serialize(self) -> JsonDict: - return self.__dict__ - - @classmethod - def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField': - return cls(**data) - - -class ModelConfigData: - def __init__( - self, - forbid_extra: Optional[bool] = None, - allow_mutation: Optional[bool] = None, - frozen: Optional[bool] = None, - orm_mode: Optional[bool] = None, - allow_population_by_field_name: Optional[bool] = None, - has_alias_generator: Optional[bool] = None, - ): - self.forbid_extra = forbid_extra - self.allow_mutation = allow_mutation - self.frozen = frozen - self.orm_mode = orm_mode - self.allow_population_by_field_name = allow_population_by_field_name - self.has_alias_generator = has_alias_generator - - def set_values_dict(self) -> Dict[str, Any]: - return {k: v for k, v in self.__dict__.items() if v is not None} - - def update(self, config: Optional['ModelConfigData']) -> None: - if config is None: - return - for k, v in config.set_values_dict().items(): - setattr(self, k, v) - - def setdefault(self, key: str, value: Any) -> None: - if getattr(self, key) is None: - setattr(self, key, value) - - -ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic') -ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') -ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') -ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') -ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') -ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') - - -def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None: - api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM) - - -def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: - api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) - - -def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: - api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) - - -def error_unexpected_behavior( - detail: str, api: Union[CheckerPluginInterface, SemanticAnalyzerPluginInterface], context: Context -) -> None: # pragma: no cover - # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path - link = 'https://github.com/pydantic/pydantic/issues/new/choose' - full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' - full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' - api.fail(full_message, context, code=ERROR_UNEXPECTED) - - -def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: - api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) - - -def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None: - api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS) - - -def add_method( - ctx: ClassDefContext, - name: str, - args: List[Argument], - return_type: Type, - self_type: Optional[Type] = None, - tvar_def: Optional[TypeVarDef] = None, - is_classmethod: bool = False, - is_new: bool = False, - # is_staticmethod: bool = False, -) -> None: - """ - Adds a new method to a class. - - This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged - """ - info = ctx.cls.info - - # First remove any previously generated methods with the same name - # to avoid clashes and problems in the semantic analyzer. - if name in info.names: - sym = info.names[name] - if sym.plugin_generated and isinstance(sym.node, FuncDef): - ctx.cls.defs.body.remove(sym.node) # pragma: no cover - - self_type = self_type or fill_typevars(info) - if is_classmethod or is_new: - first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)] - # elif is_staticmethod: - # first = [] - else: - self_type = self_type or fill_typevars(info) - first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] - args = first + args - arg_types, arg_names, arg_kinds = [], [], [] - for arg in args: - assert arg.type_annotation, 'All arguments must be fully typed.' - arg_types.append(arg.type_annotation) - arg_names.append(get_name(arg.variable)) - arg_kinds.append(arg.kind) - - function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function') - signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) - if tvar_def: - signature.variables = [tvar_def] - - func = FuncDef(name, args, Block([PassStmt()])) - func.info = info - func.type = set_callable_name(signature, func) - func.is_class = is_classmethod - # func.is_static = is_staticmethod - func._fullname = get_fullname(info) + '.' + name - func.line = info.line - - # NOTE: we would like the plugin generated node to dominate, but we still - # need to keep any existing definitions so they get semantically analyzed. - if name in info.names: - # Get a nice unique name instead. - r_name = get_unique_redefinition_name(name, info.names) - info.names[r_name] = info.names[name] - - if is_classmethod: # or is_staticmethod: - func.is_decorated = True - v = Var(name, func.type) - v.info = info - v._fullname = func._fullname - # if is_classmethod: - v.is_classmethod = True - dec = Decorator(func, [NameExpr('classmethod')], v) - # else: - # v.is_staticmethod = True - # dec = Decorator(func, [NameExpr('staticmethod')], v) - - dec.line = info.line - sym = SymbolTableNode(MDEF, dec) - else: - sym = SymbolTableNode(MDEF, func) - sym.plugin_generated = True - - info.names[name] = sym - info.defn.defs.body.append(func) - - -def get_fullname(x: Union[FuncBase, SymbolNode]) -> str: - """ - Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. - """ - fn = x.fullname - if callable(fn): # pragma: no cover - return fn() - return fn - - -def get_name(x: Union[FuncBase, SymbolNode]) -> str: - """ - Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. - """ - fn = x.name - if callable(fn): # pragma: no cover - return fn() - return fn - - -def parse_toml(config_file: str) -> Optional[Dict[str, Any]]: - if not config_file.endswith('.toml'): - return None - - read_mode = 'rb' - if sys.version_info >= (3, 11): - import tomllib as toml_ - else: - try: - import tomli as toml_ - except ImportError: - # older versions of mypy have toml as a dependency, not tomli - read_mode = 'r' - try: - import toml as toml_ # type: ignore[no-redef] - except ImportError: # pragma: no cover - import warnings - - warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') - return None - - with open(config_file, read_mode) as rf: - return toml_.load(rf) # type: ignore[arg-type] diff --git a/lib/pydantic/v1/networks.py b/lib/pydantic/v1/networks.py deleted file mode 100644 index cfebe588..00000000 --- a/lib/pydantic/v1/networks.py +++ /dev/null @@ -1,747 +0,0 @@ -import re -from ipaddress import ( - IPv4Address, - IPv4Interface, - IPv4Network, - IPv6Address, - IPv6Interface, - IPv6Network, - _BaseAddress, - _BaseNetwork, -) -from typing import ( - TYPE_CHECKING, - Any, - Collection, - Dict, - Generator, - List, - Match, - Optional, - Pattern, - Set, - Tuple, - Type, - Union, - cast, - no_type_check, -) - -from . import errors -from .utils import Representation, update_not_none -from .validators import constr_length_validator, str_validator - -if TYPE_CHECKING: - import email_validator - from typing_extensions import TypedDict - - from .config import BaseConfig - from .fields import ModelField - from .typing import AnyCallable - - CallableGenerator = Generator[AnyCallable, None, None] - - class Parts(TypedDict, total=False): - scheme: str - user: Optional[str] - password: Optional[str] - ipv4: Optional[str] - ipv6: Optional[str] - domain: Optional[str] - port: Optional[str] - path: Optional[str] - query: Optional[str] - fragment: Optional[str] - - class HostParts(TypedDict, total=False): - host: str - tld: Optional[str] - host_type: Optional[str] - port: Optional[str] - rebuild: bool - -else: - email_validator = None - - class Parts(dict): - pass - - -NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]] - -__all__ = [ - 'AnyUrl', - 'AnyHttpUrl', - 'FileUrl', - 'HttpUrl', - 'stricturl', - 'EmailStr', - 'NameEmail', - 'IPvAnyAddress', - 'IPvAnyInterface', - 'IPvAnyNetwork', - 'PostgresDsn', - 'CockroachDsn', - 'AmqpDsn', - 'RedisDsn', - 'MongoDsn', - 'KafkaDsn', - 'validate_email', -] - -_url_regex_cache = None -_multi_host_url_regex_cache = None -_ascii_domain_regex_cache = None -_int_domain_regex_cache = None -_host_regex_cache = None - -_host_regex = ( - r'(?:' - r'(?P(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|' # ipv4 - r'(?P\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|' # ipv6 - r'(?P[^\s/:?#]+)' # domain, validation occurs later - r')?' - r'(?::(?P\d+))?' # port -) -_scheme_regex = r'(?:(?P[a-z][a-z0-9+\-.]+)://)?' # scheme https://tools.ietf.org/html/rfc3986#appendix-A -_user_info_regex = r'(?:(?P[^\s:/]*)(?::(?P[^\s/]*))?@)?' -_path_regex = r'(?P/[^\s?#]*)?' -_query_regex = r'(?:\?(?P[^\s#]*))?' -_fragment_regex = r'(?:#(?P[^\s#]*))?' - - -def url_regex() -> Pattern[str]: - global _url_regex_cache - if _url_regex_cache is None: - _url_regex_cache = re.compile( - rf'{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}', - re.IGNORECASE, - ) - return _url_regex_cache - - -def multi_host_url_regex() -> Pattern[str]: - """ - Compiled multi host url regex. - - Additionally to `url_regex` it allows to match multiple hosts. - E.g. host1.db.net,host2.db.net - """ - global _multi_host_url_regex_cache - if _multi_host_url_regex_cache is None: - _multi_host_url_regex_cache = re.compile( - rf'{_scheme_regex}{_user_info_regex}' - r'(?P([^/]*))' # validation occurs later - rf'{_path_regex}{_query_regex}{_fragment_regex}', - re.IGNORECASE, - ) - return _multi_host_url_regex_cache - - -def ascii_domain_regex() -> Pattern[str]: - global _ascii_domain_regex_cache - if _ascii_domain_regex_cache is None: - ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?' - ascii_domain_ending = r'(?P\.[a-z]{2,63})?\.?' - _ascii_domain_regex_cache = re.compile( - fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE - ) - return _ascii_domain_regex_cache - - -def int_domain_regex() -> Pattern[str]: - global _int_domain_regex_cache - if _int_domain_regex_cache is None: - int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?' - int_domain_ending = r'(?P(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?' - _int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE) - return _int_domain_regex_cache - - -def host_regex() -> Pattern[str]: - global _host_regex_cache - if _host_regex_cache is None: - _host_regex_cache = re.compile( - _host_regex, - re.IGNORECASE, - ) - return _host_regex_cache - - -class AnyUrl(str): - strip_whitespace = True - min_length = 1 - max_length = 2**16 - allowed_schemes: Optional[Collection[str]] = None - tld_required: bool = False - user_required: bool = False - host_required: bool = True - hidden_parts: Set[str] = set() - - __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment') - - @no_type_check - def __new__(cls, url: Optional[str], **kwargs) -> object: - return str.__new__(cls, cls.build(**kwargs) if url is None else url) - - def __init__( - self, - url: str, - *, - scheme: str, - user: Optional[str] = None, - password: Optional[str] = None, - host: Optional[str] = None, - tld: Optional[str] = None, - host_type: str = 'domain', - port: Optional[str] = None, - path: Optional[str] = None, - query: Optional[str] = None, - fragment: Optional[str] = None, - ) -> None: - str.__init__(url) - self.scheme = scheme - self.user = user - self.password = password - self.host = host - self.tld = tld - self.host_type = host_type - self.port = port - self.path = path - self.query = query - self.fragment = fragment - - @classmethod - def build( - cls, - *, - scheme: str, - user: Optional[str] = None, - password: Optional[str] = None, - host: str, - port: Optional[str] = None, - path: Optional[str] = None, - query: Optional[str] = None, - fragment: Optional[str] = None, - **_kwargs: str, - ) -> str: - parts = Parts( - scheme=scheme, - user=user, - password=password, - host=host, - port=port, - path=path, - query=query, - fragment=fragment, - **_kwargs, # type: ignore[misc] - ) - - url = scheme + '://' - if user: - url += user - if password: - url += ':' + password - if user or password: - url += '@' - url += host - if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port): - url += ':' + port - if path: - url += path - if query: - url += '?' + query - if fragment: - url += '#' + fragment - return url - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl': - if value.__class__ == cls: - return value - value = str_validator(value) - if cls.strip_whitespace: - value = value.strip() - url: str = cast(str, constr_length_validator(value, field, config)) - - m = cls._match_url(url) - # the regex should always match, if it doesn't please report with details of the URL tried - assert m, 'URL regex failed unexpectedly' - - original_parts = cast('Parts', m.groupdict()) - parts = cls.apply_default_parts(original_parts) - parts = cls.validate_parts(parts) - - if m.end() != len(url): - raise errors.UrlExtraError(extra=url[m.end() :]) - - return cls._build_url(m, url, parts) - - @classmethod - def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'AnyUrl': - """ - Validate hosts and build the AnyUrl object. Split from `validate` so this method - can be altered in `MultiHostDsn`. - """ - host, tld, host_type, rebuild = cls.validate_host(parts) - - return cls( - None if rebuild else url, - scheme=parts['scheme'], - user=parts['user'], - password=parts['password'], - host=host, - tld=tld, - host_type=host_type, - port=parts['port'], - path=parts['path'], - query=parts['query'], - fragment=parts['fragment'], - ) - - @staticmethod - def _match_url(url: str) -> Optional[Match[str]]: - return url_regex().match(url) - - @staticmethod - def _validate_port(port: Optional[str]) -> None: - if port is not None and int(port) > 65_535: - raise errors.UrlPortError() - - @classmethod - def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': - """ - A method used to validate parts of a URL. - Could be overridden to set default values for parts if missing - """ - scheme = parts['scheme'] - if scheme is None: - raise errors.UrlSchemeError() - - if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes: - raise errors.UrlSchemePermittedError(set(cls.allowed_schemes)) - - if validate_port: - cls._validate_port(parts['port']) - - user = parts['user'] - if cls.user_required and user is None: - raise errors.UrlUserInfoError() - - return parts - - @classmethod - def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]: - tld, host_type, rebuild = None, None, False - for f in ('domain', 'ipv4', 'ipv6'): - host = parts[f] # type: ignore[literal-required] - if host: - host_type = f - break - - if host is None: - if cls.host_required: - raise errors.UrlHostError() - elif host_type == 'domain': - is_international = False - d = ascii_domain_regex().fullmatch(host) - if d is None: - d = int_domain_regex().fullmatch(host) - if d is None: - raise errors.UrlHostError() - is_international = True - - tld = d.group('tld') - if tld is None and not is_international: - d = int_domain_regex().fullmatch(host) - assert d is not None - tld = d.group('tld') - is_international = True - - if tld is not None: - tld = tld[1:] - elif cls.tld_required: - raise errors.UrlHostTldError() - - if is_international: - host_type = 'int_domain' - rebuild = True - host = host.encode('idna').decode('ascii') - if tld is not None: - tld = tld.encode('idna').decode('ascii') - - return host, tld, host_type, rebuild # type: ignore - - @staticmethod - def get_default_parts(parts: 'Parts') -> 'Parts': - return {} - - @classmethod - def apply_default_parts(cls, parts: 'Parts') -> 'Parts': - for key, value in cls.get_default_parts(parts).items(): - if not parts[key]: # type: ignore[literal-required] - parts[key] = value # type: ignore[literal-required] - return parts - - def __repr__(self) -> str: - extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None) - return f'{self.__class__.__name__}({super().__repr__()}, {extra})' - - -class AnyHttpUrl(AnyUrl): - allowed_schemes = {'http', 'https'} - - __slots__ = () - - -class HttpUrl(AnyHttpUrl): - tld_required = True - # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers - max_length = 2083 - hidden_parts = {'port'} - - @staticmethod - def get_default_parts(parts: 'Parts') -> 'Parts': - return {'port': '80' if parts['scheme'] == 'http' else '443'} - - -class FileUrl(AnyUrl): - allowed_schemes = {'file'} - host_required = False - - __slots__ = () - - -class MultiHostDsn(AnyUrl): - __slots__ = AnyUrl.__slots__ + ('hosts',) - - def __init__(self, *args: Any, hosts: Optional[List['HostParts']] = None, **kwargs: Any): - super().__init__(*args, **kwargs) - self.hosts = hosts - - @staticmethod - def _match_url(url: str) -> Optional[Match[str]]: - return multi_host_url_regex().match(url) - - @classmethod - def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': - return super().validate_parts(parts, validate_port=False) - - @classmethod - def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'MultiHostDsn': - hosts_parts: List['HostParts'] = [] - host_re = host_regex() - for host in m.groupdict()['hosts'].split(','): - d: Parts = host_re.match(host).groupdict() # type: ignore - host, tld, host_type, rebuild = cls.validate_host(d) - port = d.get('port') - cls._validate_port(port) - hosts_parts.append( - { - 'host': host, - 'host_type': host_type, - 'tld': tld, - 'rebuild': rebuild, - 'port': port, - } - ) - - if len(hosts_parts) > 1: - return cls( - None if any([hp['rebuild'] for hp in hosts_parts]) else url, - scheme=parts['scheme'], - user=parts['user'], - password=parts['password'], - path=parts['path'], - query=parts['query'], - fragment=parts['fragment'], - host_type=None, - hosts=hosts_parts, - ) - else: - # backwards compatibility with single host - host_part = hosts_parts[0] - return cls( - None if host_part['rebuild'] else url, - scheme=parts['scheme'], - user=parts['user'], - password=parts['password'], - host=host_part['host'], - tld=host_part['tld'], - host_type=host_part['host_type'], - port=host_part.get('port'), - path=parts['path'], - query=parts['query'], - fragment=parts['fragment'], - ) - - -class PostgresDsn(MultiHostDsn): - allowed_schemes = { - 'postgres', - 'postgresql', - 'postgresql+asyncpg', - 'postgresql+pg8000', - 'postgresql+psycopg', - 'postgresql+psycopg2', - 'postgresql+psycopg2cffi', - 'postgresql+py-postgresql', - 'postgresql+pygresql', - } - user_required = True - - __slots__ = () - - -class CockroachDsn(AnyUrl): - allowed_schemes = { - 'cockroachdb', - 'cockroachdb+psycopg2', - 'cockroachdb+asyncpg', - } - user_required = True - - -class AmqpDsn(AnyUrl): - allowed_schemes = {'amqp', 'amqps'} - host_required = False - - -class RedisDsn(AnyUrl): - __slots__ = () - allowed_schemes = {'redis', 'rediss'} - host_required = False - - @staticmethod - def get_default_parts(parts: 'Parts') -> 'Parts': - return { - 'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '', - 'port': '6379', - 'path': '/0', - } - - -class MongoDsn(AnyUrl): - allowed_schemes = {'mongodb'} - - # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes - @staticmethod - def get_default_parts(parts: 'Parts') -> 'Parts': - return { - 'port': '27017', - } - - -class KafkaDsn(AnyUrl): - allowed_schemes = {'kafka'} - - @staticmethod - def get_default_parts(parts: 'Parts') -> 'Parts': - return { - 'domain': 'localhost', - 'port': '9092', - } - - -def stricturl( - *, - strip_whitespace: bool = True, - min_length: int = 1, - max_length: int = 2**16, - tld_required: bool = True, - host_required: bool = True, - allowed_schemes: Optional[Collection[str]] = None, -) -> Type[AnyUrl]: - # use kwargs then define conf in a dict to aid with IDE type hinting - namespace = dict( - strip_whitespace=strip_whitespace, - min_length=min_length, - max_length=max_length, - tld_required=tld_required, - host_required=host_required, - allowed_schemes=allowed_schemes, - ) - return type('UrlValue', (AnyUrl,), namespace) - - -def import_email_validator() -> None: - global email_validator - try: - import email_validator - except ImportError as e: - raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e - - -class EmailStr(str): - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format='email') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - # included here and below so the error happens straight away - import_email_validator() - - yield str_validator - yield cls.validate - - @classmethod - def validate(cls, value: Union[str]) -> str: - return validate_email(value)[1] - - -class NameEmail(Representation): - __slots__ = 'name', 'email' - - def __init__(self, name: str, email: str): - self.name = name - self.email = email - - def __eq__(self, other: Any) -> bool: - return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email) - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format='name-email') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - import_email_validator() - - yield cls.validate - - @classmethod - def validate(cls, value: Any) -> 'NameEmail': - if value.__class__ == cls: - return value - value = str_validator(value) - return cls(*validate_email(value)) - - def __str__(self) -> str: - return f'{self.name} <{self.email}>' - - -class IPvAnyAddress(_BaseAddress): - __slots__ = () - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format='ipvanyaddress') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]: - try: - return IPv4Address(value) - except ValueError: - pass - - try: - return IPv6Address(value) - except ValueError: - raise errors.IPvAnyAddressError() - - -class IPvAnyInterface(_BaseAddress): - __slots__ = () - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format='ipvanyinterface') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]: - try: - return IPv4Interface(value) - except ValueError: - pass - - try: - return IPv6Interface(value) - except ValueError: - raise errors.IPvAnyInterfaceError() - - -class IPvAnyNetwork(_BaseNetwork): # type: ignore - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format='ipvanynetwork') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]: - # Assume IP Network is defined with a default value for ``strict`` argument. - # Define your own class if you want to specify network address check strictness. - try: - return IPv4Network(value) - except ValueError: - pass - - try: - return IPv6Network(value) - except ValueError: - raise errors.IPvAnyNetworkError() - - -pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *') -MAX_EMAIL_LENGTH = 2048 -"""Maximum length for an email. -A somewhat arbitrary but very generous number compared to what is allowed by most implementations. -""" - - -def validate_email(value: Union[str]) -> Tuple[str, str]: - """ - Email address validation using https://pypi.org/project/email-validator/ - Notes: - * raw ip address (literal) domain parts are not allowed. - * "John Doe " style "pretty" email addresses are processed - * spaces are striped from the beginning and end of addresses but no error is raised - """ - if email_validator is None: - import_email_validator() - - if len(value) > MAX_EMAIL_LENGTH: - raise errors.EmailError() - - m = pretty_email_regex.fullmatch(value) - name: Union[str, None] = None - if m: - name, value = m.groups() - email = value.strip() - try: - parts = email_validator.validate_email(email, check_deliverability=False) - except email_validator.EmailNotValidError as e: - raise errors.EmailError from e - - if hasattr(parts, 'normalized'): - # email-validator >= 2 - email = parts.normalized - assert email is not None - name = name or parts.local_part - return name, email - else: - # email-validator >1, <2 - at_index = email.index('@') - local_part = email[:at_index] # RFC 5321, local part must be case-sensitive. - global_part = email[at_index:].lower() - - return name or local_part, local_part + global_part diff --git a/lib/pydantic/v1/parse.py b/lib/pydantic/v1/parse.py deleted file mode 100644 index 7ac330ca..00000000 --- a/lib/pydantic/v1/parse.py +++ /dev/null @@ -1,66 +0,0 @@ -import json -import pickle -from enum import Enum -from pathlib import Path -from typing import Any, Callable, Union - -from .types import StrBytes - - -class Protocol(str, Enum): - json = 'json' - pickle = 'pickle' - - -def load_str_bytes( - b: StrBytes, - *, - content_type: str = None, - encoding: str = 'utf8', - proto: Protocol = None, - allow_pickle: bool = False, - json_loads: Callable[[str], Any] = json.loads, -) -> Any: - if proto is None and content_type: - if content_type.endswith(('json', 'javascript')): - pass - elif allow_pickle and content_type.endswith('pickle'): - proto = Protocol.pickle - else: - raise TypeError(f'Unknown content-type: {content_type}') - - proto = proto or Protocol.json - - if proto == Protocol.json: - if isinstance(b, bytes): - b = b.decode(encoding) - return json_loads(b) - elif proto == Protocol.pickle: - if not allow_pickle: - raise RuntimeError('Trying to decode with pickle with allow_pickle=False') - bb = b if isinstance(b, bytes) else b.encode() - return pickle.loads(bb) - else: - raise TypeError(f'Unknown protocol: {proto}') - - -def load_file( - path: Union[str, Path], - *, - content_type: str = None, - encoding: str = 'utf8', - proto: Protocol = None, - allow_pickle: bool = False, - json_loads: Callable[[str], Any] = json.loads, -) -> Any: - path = Path(path) - b = path.read_bytes() - if content_type is None: - if path.suffix in ('.js', '.json'): - proto = Protocol.json - elif path.suffix == '.pkl': - proto = Protocol.pickle - - return load_str_bytes( - b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads - ) diff --git a/lib/pydantic/v1/py.typed b/lib/pydantic/v1/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pydantic/v1/schema.py b/lib/pydantic/v1/schema.py deleted file mode 100644 index ea16a72a..00000000 --- a/lib/pydantic/v1/schema.py +++ /dev/null @@ -1,1163 +0,0 @@ -import re -import warnings -from collections import defaultdict -from dataclasses import is_dataclass -from datetime import date, datetime, time, timedelta -from decimal import Decimal -from enum import Enum -from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - ForwardRef, - FrozenSet, - Generic, - Iterable, - List, - Optional, - Pattern, - Sequence, - Set, - Tuple, - Type, - TypeVar, - Union, - cast, -) -from uuid import UUID - -from typing_extensions import Annotated, Literal - -from .fields import ( - MAPPING_LIKE_SHAPES, - SHAPE_DEQUE, - SHAPE_FROZENSET, - SHAPE_GENERIC, - SHAPE_ITERABLE, - SHAPE_LIST, - SHAPE_SEQUENCE, - SHAPE_SET, - SHAPE_SINGLETON, - SHAPE_TUPLE, - SHAPE_TUPLE_ELLIPSIS, - FieldInfo, - ModelField, -) -from .json import pydantic_encoder -from .networks import AnyUrl, EmailStr -from .types import ( - ConstrainedDecimal, - ConstrainedFloat, - ConstrainedFrozenSet, - ConstrainedInt, - ConstrainedList, - ConstrainedSet, - ConstrainedStr, - SecretBytes, - SecretStr, - StrictBytes, - StrictStr, - conbytes, - condecimal, - confloat, - confrozenset, - conint, - conlist, - conset, - constr, -) -from .typing import ( - all_literal_values, - get_args, - get_origin, - get_sub_types, - is_callable_type, - is_literal_type, - is_namedtuple, - is_none_type, - is_union, -) -from .utils import ROOT_KEY, get_model, lenient_issubclass - -if TYPE_CHECKING: - from .dataclasses import Dataclass - from .main import BaseModel - -default_prefix = '#/definitions/' -default_ref_template = '#/definitions/{model}' - -TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]] -TypeModelSet = Set[TypeModelOrEnum] - - -def _apply_modify_schema( - modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any] -) -> None: - from inspect import signature - - sig = signature(modify_schema) - args = set(sig.parameters.keys()) - if 'field' in args or 'kwargs' in args: - modify_schema(field_schema, field=field) - else: - modify_schema(field_schema) - - -def schema( - models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]], - *, - by_alias: bool = True, - title: Optional[str] = None, - description: Optional[str] = None, - ref_prefix: Optional[str] = None, - ref_template: str = default_ref_template, -) -> Dict[str, Any]: - """ - Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions`` - top-level JSON key, including their sub-models. - - :param models: a list of models to include in the generated JSON Schema - :param by_alias: generate the schemas using the aliases defined, if any - :param title: title for the generated schema that includes the definitions - :param description: description for the generated schema - :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the - default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere - else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the - top-level key ``definitions``, so you can extract them from there. But all the references will have the set - prefix. - :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful - for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For - a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. - :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for - the models and sub-models passed in ``models``. - """ - clean_models = [get_model(model) for model in models] - flat_models = get_flat_models_from_models(clean_models) - model_name_map = get_model_name_map(flat_models) - definitions = {} - output_schema: Dict[str, Any] = {} - if title: - output_schema['title'] = title - if description: - output_schema['description'] = description - for model in clean_models: - m_schema, m_definitions, m_nested_models = model_process_schema( - model, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - ) - definitions.update(m_definitions) - model_name = model_name_map[model] - definitions[model_name] = m_schema - if definitions: - output_schema['definitions'] = definitions - return output_schema - - -def model_schema( - model: Union[Type['BaseModel'], Type['Dataclass']], - by_alias: bool = True, - ref_prefix: Optional[str] = None, - ref_template: str = default_ref_template, -) -> Dict[str, Any]: - """ - Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level - JSON key. - - :param model: a Pydantic model (a class that inherits from BaseModel) - :param by_alias: generate the schemas using the aliases defined, if any - :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the - default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere - else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the - top-level key ``definitions``, so you can extract them from there. But all the references will have the set - prefix. - :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for - references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a - sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. - :return: dict with the JSON Schema for the passed ``model`` - """ - model = get_model(model) - flat_models = get_flat_models_from_model(model) - model_name_map = get_model_name_map(flat_models) - model_name = model_name_map[model] - m_schema, m_definitions, nested_models = model_process_schema( - model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template - ) - if model_name in nested_models: - # model_name is in Nested models, it has circular references - m_definitions[model_name] = m_schema - m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False) - if m_definitions: - m_schema.update({'definitions': m_definitions}) - return m_schema - - -def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]: - # If no title is explicitly set, we don't set title in the schema for enums. - # The behaviour is the same as `BaseModel` reference, where the default title - # is in the definitions part of the schema. - schema_: Dict[str, Any] = {} - if field.field_info.title or not lenient_issubclass(field.type_, Enum): - schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ') - - if field.field_info.title: - schema_overrides = True - - if field.field_info.description: - schema_['description'] = field.field_info.description - schema_overrides = True - - if not field.required and field.default is not None and not is_callable_type(field.outer_type_): - schema_['default'] = encode_default(field.default) - schema_overrides = True - - return schema_, schema_overrides - - -def field_schema( - field: ModelField, - *, - by_alias: bool = True, - model_name_map: Dict[TypeModelOrEnum, str], - ref_prefix: Optional[str] = None, - ref_template: str = default_ref_template, - known_models: Optional[TypeModelSet] = None, -) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: - """ - Process a Pydantic field and return a tuple with a JSON Schema for it as the first item. - Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field - is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they - will be included in the definitions and referenced in the schema instead of included recursively. - - :param field: a Pydantic ``ModelField`` - :param by_alias: use the defined alias (if any) in the returned schema - :param model_name_map: used to generate the JSON Schema references to other models included in the definitions - :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of - #/definitions/ will be used - :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for - references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a - sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. - :param known_models: used to solve circular references - :return: tuple of the schema for this field and additional definitions - """ - s, schema_overrides = get_field_info_schema(field) - - validation_schema = get_field_schema_validations(field) - if validation_schema: - s.update(validation_schema) - schema_overrides = True - - f_schema, f_definitions, f_nested_models = field_type_schema( - field, - by_alias=by_alias, - model_name_map=model_name_map, - schema_overrides=schema_overrides, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models or set(), - ) - - # $ref will only be returned when there are no schema_overrides - if '$ref' in f_schema: - return f_schema, f_definitions, f_nested_models - else: - s.update(f_schema) - return s, f_definitions, f_nested_models - - -numeric_types = (int, float, Decimal) -_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( - ('max_length', numeric_types, 'maxLength'), - ('min_length', numeric_types, 'minLength'), - ('regex', str, 'pattern'), -) - -_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( - ('gt', numeric_types, 'exclusiveMinimum'), - ('lt', numeric_types, 'exclusiveMaximum'), - ('ge', numeric_types, 'minimum'), - ('le', numeric_types, 'maximum'), - ('multiple_of', numeric_types, 'multipleOf'), -) - - -def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: - """ - Get the JSON Schema validation keywords for a ``field`` with an annotation of - a Pydantic ``FieldInfo`` with validation arguments. - """ - f_schema: Dict[str, Any] = {} - - if lenient_issubclass(field.type_, Enum): - # schema is already updated by `enum_process_schema`; just update with field extra - if field.field_info.extra: - f_schema.update(field.field_info.extra) - return f_schema - - if lenient_issubclass(field.type_, (str, bytes)): - for attr_name, t, keyword in _str_types_attrs: - attr = getattr(field.field_info, attr_name, None) - if isinstance(attr, t): - f_schema[keyword] = attr - if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): - for attr_name, t, keyword in _numeric_types_attrs: - attr = getattr(field.field_info, attr_name, None) - if isinstance(attr, t): - f_schema[keyword] = attr - if field.field_info is not None and field.field_info.const: - f_schema['const'] = field.default - if field.field_info.extra: - f_schema.update(field.field_info.extra) - modify_schema = getattr(field.outer_type_, '__modify_schema__', None) - if modify_schema: - _apply_modify_schema(modify_schema, field, f_schema) - return f_schema - - -def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]: - """ - Process a set of models and generate unique names for them to be used as keys in the JSON Schema - definitions. By default the names are the same as the class name. But if two models in different Python - modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be - based on the Python module path for those conflicting models to prevent name collisions. - - :param unique_models: a Python set of models - :return: dict mapping models to names - """ - name_model_map = {} - conflicting_names: Set[str] = set() - for model in unique_models: - model_name = normalize_name(model.__name__) - if model_name in conflicting_names: - model_name = get_long_model_name(model) - name_model_map[model_name] = model - elif model_name in name_model_map: - conflicting_names.add(model_name) - conflicting_model = name_model_map.pop(model_name) - name_model_map[get_long_model_name(conflicting_model)] = conflicting_model - name_model_map[get_long_model_name(model)] = model - else: - name_model_map[model_name] = model - return {v: k for k, v in name_model_map.items()} - - -def get_flat_models_from_model(model: Type['BaseModel'], known_models: Optional[TypeModelSet] = None) -> TypeModelSet: - """ - Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass - model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also - subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), - the return value will be ``set([Foo, Bar, Baz])``. - - :param model: a Pydantic ``BaseModel`` subclass - :param known_models: used to solve circular references - :return: a set with the initial model and all its sub-models - """ - known_models = known_models or set() - flat_models: TypeModelSet = set() - flat_models.add(model) - known_models |= flat_models - fields = cast(Sequence[ModelField], model.__fields__.values()) - flat_models |= get_flat_models_from_fields(fields, known_models=known_models) - return flat_models - - -def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet: - """ - Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a subclass of BaseModel - (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree. - I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that - model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of - type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. - - :param field: a Pydantic ``ModelField`` - :param known_models: used to solve circular references - :return: a set with the model used in the declaration for this field, if any, and all its sub-models - """ - from .main import BaseModel - - flat_models: TypeModelSet = set() - - field_type = field.type_ - if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): - field_type = field_type.__pydantic_model__ - - if field.sub_fields and not lenient_issubclass(field_type, BaseModel): - flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) - elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: - flat_models |= get_flat_models_from_model(field_type, known_models=known_models) - elif lenient_issubclass(field_type, Enum): - flat_models.add(field_type) - return flat_models - - -def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet: - """ - Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel`` - (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree. - I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a - field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also - subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. - - :param fields: a list of Pydantic ``ModelField``s - :param known_models: used to solve circular references - :return: a set with any model declared in the fields, and all their sub-models - """ - flat_models: TypeModelSet = set() - for field in fields: - flat_models |= get_flat_models_from_field(field, known_models=known_models) - return flat_models - - -def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet: - """ - Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass - a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has - a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. - """ - flat_models: TypeModelSet = set() - for model in models: - flat_models |= get_flat_models_from_model(model) - return flat_models - - -def get_long_model_name(model: TypeModelOrEnum) -> str: - return f'{model.__module__}__{model.__qualname__}'.replace('.', '__') - - -def field_type_schema( - field: ModelField, - *, - by_alias: bool, - model_name_map: Dict[TypeModelOrEnum, str], - ref_template: str, - schema_overrides: bool = False, - ref_prefix: Optional[str] = None, - known_models: TypeModelSet, -) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: - """ - Used by ``field_schema()``, you probably should be using that function. - - Take a single ``field`` and generate the schema for its type only, not including additional - information as title, etc. Also return additional schema definitions, from sub-models. - """ - from .main import BaseModel # noqa: F811 - - definitions = {} - nested_models: Set[str] = set() - f_schema: Dict[str, Any] - if field.shape in { - SHAPE_LIST, - SHAPE_TUPLE_ELLIPSIS, - SHAPE_SEQUENCE, - SHAPE_SET, - SHAPE_FROZENSET, - SHAPE_ITERABLE, - SHAPE_DEQUE, - }: - items_schema, f_definitions, f_nested_models = field_singleton_schema( - field, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - definitions.update(f_definitions) - nested_models.update(f_nested_models) - f_schema = {'type': 'array', 'items': items_schema} - if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: - f_schema['uniqueItems'] = True - - elif field.shape in MAPPING_LIKE_SHAPES: - f_schema = {'type': 'object'} - key_field = cast(ModelField, field.key_field) - regex = getattr(key_field.type_, 'regex', None) - items_schema, f_definitions, f_nested_models = field_singleton_schema( - field, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - definitions.update(f_definitions) - nested_models.update(f_nested_models) - if regex: - # Dict keys have a regex pattern - # items_schema might be a schema or empty dict, add it either way - f_schema['patternProperties'] = {ConstrainedStr._get_pattern(regex): items_schema} - if items_schema: - # The dict values are not simply Any, so they need a schema - f_schema['additionalProperties'] = items_schema - elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)): - sub_schema = [] - sub_fields = cast(List[ModelField], field.sub_fields) - for sf in sub_fields: - sf_schema, sf_definitions, sf_nested_models = field_type_schema( - sf, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - definitions.update(sf_definitions) - nested_models.update(sf_nested_models) - sub_schema.append(sf_schema) - - sub_fields_len = len(sub_fields) - if field.shape == SHAPE_GENERIC: - all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema} - f_schema = {'allOf': [all_of_schemas]} - else: - f_schema = { - 'type': 'array', - 'minItems': sub_fields_len, - 'maxItems': sub_fields_len, - } - if sub_fields_len >= 1: - f_schema['items'] = sub_schema - else: - assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape - f_schema, f_definitions, f_nested_models = field_singleton_schema( - field, - by_alias=by_alias, - model_name_map=model_name_map, - schema_overrides=schema_overrides, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - definitions.update(f_definitions) - nested_models.update(f_nested_models) - - # check field type to avoid repeated calls to the same __modify_schema__ method - if field.type_ != field.outer_type_: - if field.shape == SHAPE_GENERIC: - field_type = field.type_ - else: - field_type = field.outer_type_ - modify_schema = getattr(field_type, '__modify_schema__', None) - if modify_schema: - _apply_modify_schema(modify_schema, field, f_schema) - return f_schema, definitions, nested_models - - -def model_process_schema( - model: TypeModelOrEnum, - *, - by_alias: bool = True, - model_name_map: Dict[TypeModelOrEnum, str], - ref_prefix: Optional[str] = None, - ref_template: str = default_ref_template, - known_models: Optional[TypeModelSet] = None, - field: Optional[ModelField] = None, -) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: - """ - Used by ``model_schema()``, you probably should be using that function. - - Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The - sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All - the definitions are returned as the second value. - """ - from inspect import getdoc, signature - - known_models = known_models or set() - if lenient_issubclass(model, Enum): - model = cast(Type[Enum], model) - s = enum_process_schema(model, field=field) - return s, {}, set() - model = cast(Type['BaseModel'], model) - s = {'title': model.__config__.title or model.__name__} - doc = getdoc(model) - if doc: - s['description'] = doc - known_models.add(model) - m_schema, m_definitions, nested_models = model_type_schema( - model, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - s.update(m_schema) - schema_extra = model.__config__.schema_extra - if callable(schema_extra): - if len(signature(schema_extra).parameters) == 1: - schema_extra(s) - else: - schema_extra(s, model) - else: - s.update(schema_extra) - return s, m_definitions, nested_models - - -def model_type_schema( - model: Type['BaseModel'], - *, - by_alias: bool, - model_name_map: Dict[TypeModelOrEnum, str], - ref_template: str, - ref_prefix: Optional[str] = None, - known_models: TypeModelSet, -) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: - """ - You probably should be using ``model_schema()``, this function is indirectly used by that function. - - Take a single ``model`` and generate the schema for its type only, not including additional - information as title, etc. Also return additional schema definitions, from sub-models. - """ - properties = {} - required = [] - definitions: Dict[str, Any] = {} - nested_models: Set[str] = set() - for k, f in model.__fields__.items(): - try: - f_schema, f_definitions, f_nested_models = field_schema( - f, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - except SkipField as skip: - warnings.warn(skip.message, UserWarning) - continue - definitions.update(f_definitions) - nested_models.update(f_nested_models) - if by_alias: - properties[f.alias] = f_schema - if f.required: - required.append(f.alias) - else: - properties[k] = f_schema - if f.required: - required.append(k) - if ROOT_KEY in properties: - out_schema = properties[ROOT_KEY] - out_schema['title'] = model.__config__.title or model.__name__ - else: - out_schema = {'type': 'object', 'properties': properties} - if required: - out_schema['required'] = required - if model.__config__.extra == 'forbid': - out_schema['additionalProperties'] = False - return out_schema, definitions, nested_models - - -def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]: - """ - Take a single `enum` and generate its schema. - - This is similar to the `model_process_schema` function, but applies to ``Enum`` objects. - """ - import inspect - - schema_: Dict[str, Any] = { - 'title': enum.__name__, - # Python assigns all enums a default docstring value of 'An enumeration', so - # all enums will have a description field even if not explicitly provided. - 'description': inspect.cleandoc(enum.__doc__ or 'An enumeration.'), - # Add enum values and the enum field type to the schema. - 'enum': [item.value for item in cast(Iterable[Enum], enum)], - } - - add_field_type_to_schema(enum, schema_) - - modify_schema = getattr(enum, '__modify_schema__', None) - if modify_schema: - _apply_modify_schema(modify_schema, field, schema_) - - return schema_ - - -def field_singleton_sub_fields_schema( - field: ModelField, - *, - by_alias: bool, - model_name_map: Dict[TypeModelOrEnum, str], - ref_template: str, - schema_overrides: bool = False, - ref_prefix: Optional[str] = None, - known_models: TypeModelSet, -) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: - """ - This function is indirectly used by ``field_schema()``, you probably should be using that function. - - Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their - schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``. - """ - sub_fields = cast(List[ModelField], field.sub_fields) - definitions = {} - nested_models: Set[str] = set() - if len(sub_fields) == 1: - return field_type_schema( - sub_fields[0], - by_alias=by_alias, - model_name_map=model_name_map, - schema_overrides=schema_overrides, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - else: - s: Dict[str, Any] = {} - # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object - field_has_discriminator: bool = field.discriminator_key is not None - if field_has_discriminator: - assert field.sub_fields_mapping is not None - - discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {} - - for discriminator_value, sub_field in field.sub_fields_mapping.items(): - if isinstance(discriminator_value, Enum): - discriminator_value = str(discriminator_value.value) - # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many - if is_union(get_origin(sub_field.type_)): - sub_models = get_sub_types(sub_field.type_) - discriminator_models_refs[discriminator_value] = { - model_name_map[sub_model]: get_schema_ref( - model_name_map[sub_model], ref_prefix, ref_template, False - ) - for sub_model in sub_models - } - else: - sub_field_type = sub_field.type_ - if hasattr(sub_field_type, '__pydantic_model__'): - sub_field_type = sub_field_type.__pydantic_model__ - - discriminator_model_name = model_name_map[sub_field_type] - discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False) - discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref'] - - s['discriminator'] = { - 'propertyName': field.discriminator_alias, - 'mapping': discriminator_models_refs, - } - - sub_field_schemas = [] - for sf in sub_fields: - sub_schema, sub_definitions, sub_nested_models = field_type_schema( - sf, - by_alias=by_alias, - model_name_map=model_name_map, - schema_overrides=schema_overrides, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - definitions.update(sub_definitions) - if schema_overrides and 'allOf' in sub_schema: - # if the sub_field is a referenced schema we only need the referenced - # object. Otherwise we will end up with several allOf inside anyOf/oneOf. - # See https://github.com/pydantic/pydantic/issues/1209 - sub_schema = sub_schema['allOf'][0] - - if sub_schema.keys() == {'discriminator', 'oneOf'}: - # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere - sub_schema.pop('discriminator') - sub_field_schemas.append(sub_schema) - nested_models.update(sub_nested_models) - s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas - return s, definitions, nested_models - - -# Order is important, e.g. subclasses of str must go before str -# this is used only for standard library types, custom types should use __modify_schema__ instead -field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( - (Path, {'type': 'string', 'format': 'path'}), - (datetime, {'type': 'string', 'format': 'date-time'}), - (date, {'type': 'string', 'format': 'date'}), - (time, {'type': 'string', 'format': 'time'}), - (timedelta, {'type': 'number', 'format': 'time-delta'}), - (IPv4Network, {'type': 'string', 'format': 'ipv4network'}), - (IPv6Network, {'type': 'string', 'format': 'ipv6network'}), - (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}), - (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}), - (IPv4Address, {'type': 'string', 'format': 'ipv4'}), - (IPv6Address, {'type': 'string', 'format': 'ipv6'}), - (Pattern, {'type': 'string', 'format': 'regex'}), - (str, {'type': 'string'}), - (bytes, {'type': 'string', 'format': 'binary'}), - (bool, {'type': 'boolean'}), - (int, {'type': 'integer'}), - (float, {'type': 'number'}), - (Decimal, {'type': 'number'}), - (UUID, {'type': 'string', 'format': 'uuid'}), - (dict, {'type': 'object'}), - (list, {'type': 'array', 'items': {}}), - (tuple, {'type': 'array', 'items': {}}), - (set, {'type': 'array', 'items': {}, 'uniqueItems': True}), - (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}), -) - -json_scheme = {'type': 'string', 'format': 'json-string'} - - -def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None: - """ - Update the given `schema` with the type-specific metadata for the given `field_type`. - - This function looks through `field_class_to_schema` for a class that matches the given `field_type`, - and then modifies the given `schema` with the information from that type. - """ - for type_, t_schema in field_class_to_schema: - # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class - if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: - schema_.update(t_schema) - break - - -def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]: - if ref_prefix: - schema_ref = {'$ref': ref_prefix + name} - else: - schema_ref = {'$ref': ref_template.format(model=name)} - return {'allOf': [schema_ref]} if schema_overrides else schema_ref - - -def field_singleton_schema( # noqa: C901 (ignore complexity) - field: ModelField, - *, - by_alias: bool, - model_name_map: Dict[TypeModelOrEnum, str], - ref_template: str, - schema_overrides: bool = False, - ref_prefix: Optional[str] = None, - known_models: TypeModelSet, -) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: - """ - This function is indirectly used by ``field_schema()``, you should probably be using that function. - - Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models. - """ - from .main import BaseModel - - definitions: Dict[str, Any] = {} - nested_models: Set[str] = set() - field_type = field.type_ - - # Recurse into this field if it contains sub_fields and is NOT a - # BaseModel OR that BaseModel is a const - if field.sub_fields and ( - (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel) - ): - return field_singleton_sub_fields_schema( - field, - by_alias=by_alias, - model_name_map=model_name_map, - schema_overrides=schema_overrides, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type: - return {}, definitions, nested_models # no restrictions - if is_none_type(field_type): - return {'type': 'null'}, definitions, nested_models - if is_callable_type(field_type): - raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.') - f_schema: Dict[str, Any] = {} - if field.field_info is not None and field.field_info.const: - f_schema['const'] = field.default - - if is_literal_type(field_type): - values = tuple(x.value if isinstance(x, Enum) else x for x in all_literal_values(field_type)) - - if len({v.__class__ for v in values}) > 1: - return field_schema( - multitypes_literal_field_for_schema(values, field), - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - ) - - # All values have the same type - field_type = values[0].__class__ - f_schema['enum'] = list(values) - add_field_type_to_schema(field_type, f_schema) - elif lenient_issubclass(field_type, Enum): - enum_name = model_name_map[field_type] - f_schema, schema_overrides = get_field_info_schema(field, schema_overrides) - f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides)) - definitions[enum_name] = enum_process_schema(field_type, field=field) - elif is_namedtuple(field_type): - sub_schema, *_ = model_process_schema( - field_type.__pydantic_model__, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - field=field, - ) - items_schemas = list(sub_schema['properties'].values()) - f_schema.update( - { - 'type': 'array', - 'items': items_schemas, - 'minItems': len(items_schemas), - 'maxItems': len(items_schemas), - } - ) - elif not hasattr(field_type, '__pydantic_model__'): - add_field_type_to_schema(field_type, f_schema) - - modify_schema = getattr(field_type, '__modify_schema__', None) - if modify_schema: - _apply_modify_schema(modify_schema, field, f_schema) - - if f_schema: - return f_schema, definitions, nested_models - - # Handle dataclass-based models - if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): - field_type = field_type.__pydantic_model__ - - if issubclass(field_type, BaseModel): - model_name = model_name_map[field_type] - if field_type not in known_models: - sub_schema, sub_definitions, sub_nested_models = model_process_schema( - field_type, - by_alias=by_alias, - model_name_map=model_name_map, - ref_prefix=ref_prefix, - ref_template=ref_template, - known_models=known_models, - field=field, - ) - definitions.update(sub_definitions) - definitions[model_name] = sub_schema - nested_models.update(sub_nested_models) - else: - nested_models.add(model_name) - schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides) - return schema_ref, definitions, nested_models - - # For generics with no args - args = get_args(field_type) - if args is not None and not args and Generic in field_type.__bases__: - return f_schema, definitions, nested_models - - raise ValueError(f'Value not declarable with JSON Schema, field: {field}') - - -def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: - """ - To support `Literal` with values of different types, we split it into multiple `Literal` with same type - e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]` - """ - literal_distinct_types = defaultdict(list) - for v in values: - literal_distinct_types[v.__class__].append(v) - distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values()) - - return ModelField( - name=field.name, - type_=Union[tuple(distinct_literals)], # type: ignore - class_validators=field.class_validators, - model_config=field.model_config, - default=field.default, - required=field.required, - alias=field.alias, - field_info=field.field_info, - ) - - -def encode_default(dft: Any) -> Any: - from .main import BaseModel - - if isinstance(dft, BaseModel) or is_dataclass(dft): - dft = cast('dict[str, Any]', pydantic_encoder(dft)) - - if isinstance(dft, dict): - return {encode_default(k): encode_default(v) for k, v in dft.items()} - elif isinstance(dft, Enum): - return dft.value - elif isinstance(dft, (int, float, str)): - return dft - elif isinstance(dft, (list, tuple)): - t = dft.__class__ - seq_args = (encode_default(v) for v in dft) - return t(*seq_args) if is_namedtuple(t) else t(seq_args) - elif dft is None: - return None - else: - return pydantic_encoder(dft) - - -_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} - - -def get_annotation_from_field_info( - annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False -) -> Type[Any]: - """ - Get an annotation with validation implemented for numbers and strings based on the field_info. - :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` - :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema - :param field_name: name of the field for use in error messages - :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment - :return: the same ``annotation`` if unmodified or a new annotation with validation in place - """ - constraints = field_info.get_constraints() - used_constraints: Set[str] = set() - if constraints: - annotation, used_constraints = get_annotation_with_constraints(annotation, field_info) - if validate_assignment: - used_constraints.add('allow_mutation') - - unused_constraints = constraints - used_constraints - if unused_constraints: - raise ValueError( - f'On field "{field_name}" the following field constraints are set but not enforced: ' - f'{", ".join(unused_constraints)}. ' - f'\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints' - ) - - return annotation - - -def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901 - """ - Get an annotation with used constraints implemented for numbers and strings based on the field_info. - - :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` - :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema - :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints. - """ - used_constraints: Set[str] = set() - - def go(type_: Any) -> Type[Any]: - if ( - is_literal_type(type_) - or isinstance(type_, ForwardRef) - or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet)) - ): - return type_ - origin = get_origin(type_) - if origin is not None: - args: Tuple[Any, ...] = get_args(type_) - if any(isinstance(a, ForwardRef) for a in args): - # forward refs cause infinite recursion below - return type_ - - if origin is Annotated: - return go(args[0]) - if is_union(origin): - return Union[tuple(go(a) for a in args)] # type: ignore - - if issubclass(origin, List) and ( - field_info.min_items is not None - or field_info.max_items is not None - or field_info.unique_items is not None - ): - used_constraints.update({'min_items', 'max_items', 'unique_items'}) - return conlist( - go(args[0]), - min_items=field_info.min_items, - max_items=field_info.max_items, - unique_items=field_info.unique_items, - ) - - if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None): - used_constraints.update({'min_items', 'max_items'}) - return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) - - if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None): - used_constraints.update({'min_items', 'max_items'}) - return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) - - for t in (Tuple, List, Set, FrozenSet, Sequence): - if issubclass(origin, t): # type: ignore - return t[tuple(go(a) for a in args)] # type: ignore - - if issubclass(origin, Dict): - return Dict[args[0], go(args[1])] # type: ignore - - attrs: Optional[Tuple[str, ...]] = None - constraint_func: Optional[Callable[..., type]] = None - if isinstance(type_, type): - if issubclass(type_, (SecretStr, SecretBytes)): - attrs = ('max_length', 'min_length') - - def constraint_func(**kw: Any) -> Type[Any]: - return type(type_.__name__, (type_,), kw) - - elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)): - attrs = ('max_length', 'min_length', 'regex') - if issubclass(type_, StrictStr): - - def constraint_func(**kw: Any) -> Type[Any]: - return type(type_.__name__, (type_,), kw) - - else: - constraint_func = constr - elif issubclass(type_, bytes): - attrs = ('max_length', 'min_length', 'regex') - if issubclass(type_, StrictBytes): - - def constraint_func(**kw: Any) -> Type[Any]: - return type(type_.__name__, (type_,), kw) - - else: - constraint_func = conbytes - elif issubclass(type_, numeric_types) and not issubclass( - type_, - ( - ConstrainedInt, - ConstrainedFloat, - ConstrainedDecimal, - ConstrainedList, - ConstrainedSet, - ConstrainedFrozenSet, - bool, - ), - ): - # Is numeric type - attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of') - if issubclass(type_, float): - attrs += ('allow_inf_nan',) - if issubclass(type_, Decimal): - attrs += ('max_digits', 'decimal_places') - numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch - constraint_func = _map_types_constraint[numeric_type] - - if attrs: - used_constraints.update(set(attrs)) - kwargs = { - attr_name: attr - for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs) - if attr is not None - } - if kwargs: - constraint_func = cast(Callable[..., type], constraint_func) - return constraint_func(**kwargs) - return type_ - - return go(annotation), used_constraints - - -def normalize_name(name: str) -> str: - """ - Normalizes the given name. This can be applied to either a model *or* enum. - """ - return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name) - - -class SkipField(Exception): - """ - Utility exception used to exclude fields from schema. - """ - - def __init__(self, message: str) -> None: - self.message = message diff --git a/lib/pydantic/v1/tools.py b/lib/pydantic/v1/tools.py deleted file mode 100644 index 45be2770..00000000 --- a/lib/pydantic/v1/tools.py +++ /dev/null @@ -1,92 +0,0 @@ -import json -from functools import lru_cache -from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union - -from .parse import Protocol, load_file, load_str_bytes -from .types import StrBytes -from .typing import display_as_type - -__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of') - -NameFactory = Union[str, Callable[[Type[Any]], str]] - -if TYPE_CHECKING: - from .typing import DictStrAny - - -def _generate_parsing_type_name(type_: Any) -> str: - return f'ParsingModel[{display_as_type(type_)}]' - - -@lru_cache(maxsize=2048) -def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any: - from .main import create_model - - if type_name is None: - type_name = _generate_parsing_type_name - if not isinstance(type_name, str): - type_name = type_name(type_) - return create_model(type_name, __root__=(type_, ...)) - - -T = TypeVar('T') - - -def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T: - model_type = _get_parsing_type(type_, type_name=type_name) # type: ignore[arg-type] - return model_type(__root__=obj).__root__ - - -def parse_file_as( - type_: Type[T], - path: Union[str, Path], - *, - content_type: str = None, - encoding: str = 'utf8', - proto: Protocol = None, - allow_pickle: bool = False, - json_loads: Callable[[str], Any] = json.loads, - type_name: Optional[NameFactory] = None, -) -> T: - obj = load_file( - path, - proto=proto, - content_type=content_type, - encoding=encoding, - allow_pickle=allow_pickle, - json_loads=json_loads, - ) - return parse_obj_as(type_, obj, type_name=type_name) - - -def parse_raw_as( - type_: Type[T], - b: StrBytes, - *, - content_type: str = None, - encoding: str = 'utf8', - proto: Protocol = None, - allow_pickle: bool = False, - json_loads: Callable[[str], Any] = json.loads, - type_name: Optional[NameFactory] = None, -) -> T: - obj = load_str_bytes( - b, - proto=proto, - content_type=content_type, - encoding=encoding, - allow_pickle=allow_pickle, - json_loads=json_loads, - ) - return parse_obj_as(type_, obj, type_name=type_name) - - -def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny': - """Generate a JSON schema (as dict) for the passed model or dynamically generated one""" - return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs) - - -def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str: - """Generate a JSON schema (as JSON) for the passed model or dynamically generated one""" - return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs) diff --git a/lib/pydantic/v1/types.py b/lib/pydantic/v1/types.py deleted file mode 100644 index 754e58ff..00000000 --- a/lib/pydantic/v1/types.py +++ /dev/null @@ -1,1205 +0,0 @@ -import abc -import math -import re -import warnings -from datetime import date -from decimal import Decimal, InvalidOperation -from enum import Enum -from pathlib import Path -from types import new_class -from typing import ( - TYPE_CHECKING, - Any, - Callable, - ClassVar, - Dict, - FrozenSet, - List, - Optional, - Pattern, - Set, - Tuple, - Type, - TypeVar, - Union, - cast, - overload, -) -from uuid import UUID -from weakref import WeakSet - -from . import errors -from .datetime_parse import parse_date -from .utils import import_string, update_not_none -from .validators import ( - bytes_validator, - constr_length_validator, - constr_lower, - constr_strip_whitespace, - constr_upper, - decimal_validator, - float_finite_validator, - float_validator, - frozenset_validator, - int_validator, - list_validator, - number_multiple_validator, - number_size_validator, - path_exists_validator, - path_validator, - set_validator, - str_validator, - strict_bytes_validator, - strict_float_validator, - strict_int_validator, - strict_str_validator, -) - -__all__ = [ - 'NoneStr', - 'NoneBytes', - 'StrBytes', - 'NoneStrBytes', - 'StrictStr', - 'ConstrainedBytes', - 'conbytes', - 'ConstrainedList', - 'conlist', - 'ConstrainedSet', - 'conset', - 'ConstrainedFrozenSet', - 'confrozenset', - 'ConstrainedStr', - 'constr', - 'PyObject', - 'ConstrainedInt', - 'conint', - 'PositiveInt', - 'NegativeInt', - 'NonNegativeInt', - 'NonPositiveInt', - 'ConstrainedFloat', - 'confloat', - 'PositiveFloat', - 'NegativeFloat', - 'NonNegativeFloat', - 'NonPositiveFloat', - 'FiniteFloat', - 'ConstrainedDecimal', - 'condecimal', - 'UUID1', - 'UUID3', - 'UUID4', - 'UUID5', - 'FilePath', - 'DirectoryPath', - 'Json', - 'JsonWrapper', - 'SecretField', - 'SecretStr', - 'SecretBytes', - 'StrictBool', - 'StrictBytes', - 'StrictInt', - 'StrictFloat', - 'PaymentCardNumber', - 'ByteSize', - 'PastDate', - 'FutureDate', - 'ConstrainedDate', - 'condate', -] - -NoneStr = Optional[str] -NoneBytes = Optional[bytes] -StrBytes = Union[str, bytes] -NoneStrBytes = Optional[StrBytes] -OptionalInt = Optional[int] -OptionalIntFloat = Union[OptionalInt, float] -OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal] -OptionalDate = Optional[date] -StrIntFloat = Union[str, int, float] - -if TYPE_CHECKING: - from typing_extensions import Annotated - - from .dataclasses import Dataclass - from .main import BaseModel - from .typing import CallableGenerator - - ModelOrDc = Type[Union[BaseModel, Dataclass]] - -T = TypeVar('T') -_DEFINED_TYPES: 'WeakSet[type]' = WeakSet() - - -@overload -def _registered(typ: Type[T]) -> Type[T]: - pass - - -@overload -def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta': - pass - - -def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']: - # In order to generate valid examples of constrained types, Hypothesis needs - # to inspect the type object - so we keep a weakref to each contype object - # until it can be registered. When (or if) our Hypothesis plugin is loaded, - # it monkeypatches this function. - # If Hypothesis is never used, the total effect is to keep a weak reference - # which has minimal memory usage and doesn't even affect garbage collection. - _DEFINED_TYPES.add(typ) - return typ - - -class ConstrainedNumberMeta(type): - def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore - new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) - - if new_cls.gt is not None and new_cls.ge is not None: - raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') - if new_cls.lt is not None and new_cls.le is not None: - raise errors.ConfigError('bounds lt and le cannot be specified at the same time') - - return _registered(new_cls) # type: ignore - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -if TYPE_CHECKING: - StrictBool = bool -else: - - class StrictBool(int): - """ - StrictBool to allow for bools which are not type-coerced. - """ - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='boolean') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls, value: Any) -> bool: - """ - Ensure that we only allow bools. - """ - if isinstance(value, bool): - return value - - raise errors.StrictBoolError() - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class ConstrainedInt(int, metaclass=ConstrainedNumberMeta): - strict: bool = False - gt: OptionalInt = None - ge: OptionalInt = None - lt: OptionalInt = None - le: OptionalInt = None - multiple_of: OptionalInt = None - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none( - field_schema, - exclusiveMinimum=cls.gt, - exclusiveMaximum=cls.lt, - minimum=cls.ge, - maximum=cls.le, - multipleOf=cls.multiple_of, - ) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield strict_int_validator if cls.strict else int_validator - yield number_size_validator - yield number_multiple_validator - - -def conint( - *, - strict: bool = False, - gt: Optional[int] = None, - ge: Optional[int] = None, - lt: Optional[int] = None, - le: Optional[int] = None, - multiple_of: Optional[int] = None, -) -> Type[int]: - # use kwargs then define conf in a dict to aid with IDE type hinting - namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) - return type('ConstrainedIntValue', (ConstrainedInt,), namespace) - - -if TYPE_CHECKING: - PositiveInt = int - NegativeInt = int - NonPositiveInt = int - NonNegativeInt = int - StrictInt = int -else: - - class PositiveInt(ConstrainedInt): - gt = 0 - - class NegativeInt(ConstrainedInt): - lt = 0 - - class NonPositiveInt(ConstrainedInt): - le = 0 - - class NonNegativeInt(ConstrainedInt): - ge = 0 - - class StrictInt(ConstrainedInt): - strict = True - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta): - strict: bool = False - gt: OptionalIntFloat = None - ge: OptionalIntFloat = None - lt: OptionalIntFloat = None - le: OptionalIntFloat = None - multiple_of: OptionalIntFloat = None - allow_inf_nan: Optional[bool] = None - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none( - field_schema, - exclusiveMinimum=cls.gt, - exclusiveMaximum=cls.lt, - minimum=cls.ge, - maximum=cls.le, - multipleOf=cls.multiple_of, - ) - # Modify constraints to account for differences between IEEE floats and JSON - if field_schema.get('exclusiveMinimum') == -math.inf: - del field_schema['exclusiveMinimum'] - if field_schema.get('minimum') == -math.inf: - del field_schema['minimum'] - if field_schema.get('exclusiveMaximum') == math.inf: - del field_schema['exclusiveMaximum'] - if field_schema.get('maximum') == math.inf: - del field_schema['maximum'] - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield strict_float_validator if cls.strict else float_validator - yield number_size_validator - yield number_multiple_validator - yield float_finite_validator - - -def confloat( - *, - strict: bool = False, - gt: float = None, - ge: float = None, - lt: float = None, - le: float = None, - multiple_of: float = None, - allow_inf_nan: Optional[bool] = None, -) -> Type[float]: - # use kwargs then define conf in a dict to aid with IDE type hinting - namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan) - return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace) - - -if TYPE_CHECKING: - PositiveFloat = float - NegativeFloat = float - NonPositiveFloat = float - NonNegativeFloat = float - StrictFloat = float - FiniteFloat = float -else: - - class PositiveFloat(ConstrainedFloat): - gt = 0 - - class NegativeFloat(ConstrainedFloat): - lt = 0 - - class NonPositiveFloat(ConstrainedFloat): - le = 0 - - class NonNegativeFloat(ConstrainedFloat): - ge = 0 - - class StrictFloat(ConstrainedFloat): - strict = True - - class FiniteFloat(ConstrainedFloat): - allow_inf_nan = False - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class ConstrainedBytes(bytes): - strip_whitespace = False - to_upper = False - to_lower = False - min_length: OptionalInt = None - max_length: OptionalInt = None - strict: bool = False - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield strict_bytes_validator if cls.strict else bytes_validator - yield constr_strip_whitespace - yield constr_upper - yield constr_lower - yield constr_length_validator - - -def conbytes( - *, - strip_whitespace: bool = False, - to_upper: bool = False, - to_lower: bool = False, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - strict: bool = False, -) -> Type[bytes]: - # use kwargs then define conf in a dict to aid with IDE type hinting - namespace = dict( - strip_whitespace=strip_whitespace, - to_upper=to_upper, - to_lower=to_lower, - min_length=min_length, - max_length=max_length, - strict=strict, - ) - return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace)) - - -if TYPE_CHECKING: - StrictBytes = bytes -else: - - class StrictBytes(ConstrainedBytes): - strict = True - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class ConstrainedStr(str): - strip_whitespace = False - to_upper = False - to_lower = False - min_length: OptionalInt = None - max_length: OptionalInt = None - curtail_length: OptionalInt = None - regex: Optional[Union[str, Pattern[str]]] = None - strict = False - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none( - field_schema, - minLength=cls.min_length, - maxLength=cls.max_length, - pattern=cls.regex and cls._get_pattern(cls.regex), - ) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield strict_str_validator if cls.strict else str_validator - yield constr_strip_whitespace - yield constr_upper - yield constr_lower - yield constr_length_validator - yield cls.validate - - @classmethod - def validate(cls, value: Union[str]) -> Union[str]: - if cls.curtail_length and len(value) > cls.curtail_length: - value = value[: cls.curtail_length] - - if cls.regex: - if not re.match(cls.regex, value): - raise errors.StrRegexError(pattern=cls._get_pattern(cls.regex)) - - return value - - @staticmethod - def _get_pattern(regex: Union[str, Pattern[str]]) -> str: - return regex if isinstance(regex, str) else regex.pattern - - -def constr( - *, - strip_whitespace: bool = False, - to_upper: bool = False, - to_lower: bool = False, - strict: bool = False, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - curtail_length: Optional[int] = None, - regex: Optional[str] = None, -) -> Type[str]: - # use kwargs then define conf in a dict to aid with IDE type hinting - namespace = dict( - strip_whitespace=strip_whitespace, - to_upper=to_upper, - to_lower=to_lower, - strict=strict, - min_length=min_length, - max_length=max_length, - curtail_length=curtail_length, - regex=regex and re.compile(regex), - ) - return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace)) - - -if TYPE_CHECKING: - StrictStr = str -else: - - class StrictStr(ConstrainedStr): - strict = True - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -# This types superclass should be Set[T], but cython chokes on that... -class ConstrainedSet(set): # type: ignore - # Needed for pydantic to detect that this is a set - __origin__ = set - __args__: Set[Type[T]] # type: ignore - - min_items: Optional[int] = None - max_items: Optional[int] = None - item_type: Type[T] # type: ignore - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.set_length_validator - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) - - @classmethod - def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]': - if v is None: - return None - - v = set_validator(v) - v_len = len(v) - - if cls.min_items is not None and v_len < cls.min_items: - raise errors.SetMinLengthError(limit_value=cls.min_items) - - if cls.max_items is not None and v_len > cls.max_items: - raise errors.SetMaxLengthError(limit_value=cls.max_items) - - return v - - -def conset(item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None) -> Type[Set[T]]: - # __args__ is needed to conform to typing generics api - namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} - # We use new_class to be able to deal with Generic types - return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace)) - - -# This types superclass should be FrozenSet[T], but cython chokes on that... -class ConstrainedFrozenSet(frozenset): # type: ignore - # Needed for pydantic to detect that this is a set - __origin__ = frozenset - __args__: FrozenSet[Type[T]] # type: ignore - - min_items: Optional[int] = None - max_items: Optional[int] = None - item_type: Type[T] # type: ignore - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.frozenset_length_validator - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) - - @classmethod - def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]': - if v is None: - return None - - v = frozenset_validator(v) - v_len = len(v) - - if cls.min_items is not None and v_len < cls.min_items: - raise errors.FrozenSetMinLengthError(limit_value=cls.min_items) - - if cls.max_items is not None and v_len > cls.max_items: - raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items) - - return v - - -def confrozenset( - item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None -) -> Type[FrozenSet[T]]: - # __args__ is needed to conform to typing generics api - namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} - # We use new_class to be able to deal with Generic types - return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace)) - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -# This types superclass should be List[T], but cython chokes on that... -class ConstrainedList(list): # type: ignore - # Needed for pydantic to detect that this is a list - __origin__ = list - __args__: Tuple[Type[T], ...] # type: ignore - - min_items: Optional[int] = None - max_items: Optional[int] = None - unique_items: Optional[bool] = None - item_type: Type[T] # type: ignore - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.list_length_validator - if cls.unique_items: - yield cls.unique_items_validator - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items) - - @classmethod - def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': - if v is None: - return None - - v = list_validator(v) - v_len = len(v) - - if cls.min_items is not None and v_len < cls.min_items: - raise errors.ListMinLengthError(limit_value=cls.min_items) - - if cls.max_items is not None and v_len > cls.max_items: - raise errors.ListMaxLengthError(limit_value=cls.max_items) - - return v - - @classmethod - def unique_items_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': - if v is None: - return None - - for i, value in enumerate(v, start=1): - if value in v[i:]: - raise errors.ListUniqueItemsError() - - return v - - -def conlist( - item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: bool = None -) -> Type[List[T]]: - # __args__ is needed to conform to typing generics api - namespace = dict( - min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,) - ) - # We use new_class to be able to deal with Generic types - return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace)) - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -if TYPE_CHECKING: - PyObject = Callable[..., Any] -else: - - class PyObject: - validate_always = True - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls, value: Any) -> Any: - if isinstance(value, Callable): - return value - - try: - value = str_validator(value) - except errors.StrError: - raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable') - - try: - return import_string(value) - except ImportError as e: - raise errors.PyObjectError(error_message=str(e)) - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta): - gt: OptionalIntFloatDecimal = None - ge: OptionalIntFloatDecimal = None - lt: OptionalIntFloatDecimal = None - le: OptionalIntFloatDecimal = None - max_digits: OptionalInt = None - decimal_places: OptionalInt = None - multiple_of: OptionalIntFloatDecimal = None - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none( - field_schema, - exclusiveMinimum=cls.gt, - exclusiveMaximum=cls.lt, - minimum=cls.ge, - maximum=cls.le, - multipleOf=cls.multiple_of, - ) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield decimal_validator - yield number_size_validator - yield number_multiple_validator - yield cls.validate - - @classmethod - def validate(cls, value: Decimal) -> Decimal: - try: - normalized_value = value.normalize() - except InvalidOperation: - normalized_value = value - digit_tuple, exponent = normalized_value.as_tuple()[1:] - if exponent in {'F', 'n', 'N'}: - raise errors.DecimalIsNotFiniteError() - - if exponent >= 0: - # A positive exponent adds that many trailing zeros. - digits = len(digit_tuple) + exponent - decimals = 0 - else: - # If the absolute value of the negative exponent is larger than the - # number of digits, then it's the same as the number of digits, - # because it'll consume all of the digits in digit_tuple and then - # add abs(exponent) - len(digit_tuple) leading zeros after the - # decimal point. - if abs(exponent) > len(digit_tuple): - digits = decimals = abs(exponent) - else: - digits = len(digit_tuple) - decimals = abs(exponent) - whole_digits = digits - decimals - - if cls.max_digits is not None and digits > cls.max_digits: - raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits) - - if cls.decimal_places is not None and decimals > cls.decimal_places: - raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places) - - if cls.max_digits is not None and cls.decimal_places is not None: - expected = cls.max_digits - cls.decimal_places - if whole_digits > expected: - raise errors.DecimalWholeDigitsError(whole_digits=expected) - - return value - - -def condecimal( - *, - gt: Decimal = None, - ge: Decimal = None, - lt: Decimal = None, - le: Decimal = None, - max_digits: Optional[int] = None, - decimal_places: Optional[int] = None, - multiple_of: Decimal = None, -) -> Type[Decimal]: - # use kwargs then define conf in a dict to aid with IDE type hinting - namespace = dict( - gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of - ) - return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace) - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -if TYPE_CHECKING: - UUID1 = UUID - UUID3 = UUID - UUID4 = UUID - UUID5 = UUID -else: - - class UUID1(UUID): - _required_version = 1 - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format=f'uuid{cls._required_version}') - - class UUID3(UUID1): - _required_version = 3 - - class UUID4(UUID1): - _required_version = 4 - - class UUID5(UUID1): - _required_version = 5 - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -if TYPE_CHECKING: - FilePath = Path - DirectoryPath = Path -else: - - class FilePath(Path): - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(format='file-path') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield path_validator - yield path_exists_validator - yield cls.validate - - @classmethod - def validate(cls, value: Path) -> Path: - if not value.is_file(): - raise errors.PathNotAFileError(path=value) - - return value - - class DirectoryPath(Path): - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(format='directory-path') - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield path_validator - yield path_exists_validator - yield cls.validate - - @classmethod - def validate(cls, value: Path) -> Path: - if not value.is_dir(): - raise errors.PathNotADirectoryError(path=value) - - return value - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class JsonWrapper: - pass - - -class JsonMeta(type): - def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]: - if t is Any: - return Json # allow Json[Any] to replecate plain Json - return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t})) - - -if TYPE_CHECKING: - Json = Annotated[T, ...] # Json[list[str]] will be recognized by type checkers as list[str] - -else: - - class Json(metaclass=JsonMeta): - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update(type='string', format='json-string') - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class SecretField(abc.ABC): - """ - Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`, - the `__init__()` should be part of the abstract class and the - `get_secret_value()` method should use the generic `T` type. - - However Cython doesn't support very well generics at the moment and - the generated code fails to be imported (see - https://github.com/cython/cython/issues/2753). - """ - - def __eq__(self, other: Any) -> bool: - return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() - - def __str__(self) -> str: - return '**********' if self.get_secret_value() else '' - - def __hash__(self) -> int: - return hash(self.get_secret_value()) - - @abc.abstractmethod - def get_secret_value(self) -> Any: # pragma: no cover - ... - - -class SecretStr(SecretField): - min_length: OptionalInt = None - max_length: OptionalInt = None - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none( - field_schema, - type='string', - writeOnly=True, - format='password', - minLength=cls.min_length, - maxLength=cls.max_length, - ) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - yield constr_length_validator - - @classmethod - def validate(cls, value: Any) -> 'SecretStr': - if isinstance(value, cls): - return value - value = str_validator(value) - return cls(value) - - def __init__(self, value: str): - self._secret_value = value - - def __repr__(self) -> str: - return f"SecretStr('{self}')" - - def __len__(self) -> int: - return len(self._secret_value) - - def display(self) -> str: - warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning) - return str(self) - - def get_secret_value(self) -> str: - return self._secret_value - - -class SecretBytes(SecretField): - min_length: OptionalInt = None - max_length: OptionalInt = None - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none( - field_schema, - type='string', - writeOnly=True, - format='password', - minLength=cls.min_length, - maxLength=cls.max_length, - ) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - yield constr_length_validator - - @classmethod - def validate(cls, value: Any) -> 'SecretBytes': - if isinstance(value, cls): - return value - value = bytes_validator(value) - return cls(value) - - def __init__(self, value: bytes): - self._secret_value = value - - def __repr__(self) -> str: - return f"SecretBytes(b'{self}')" - - def __len__(self) -> int: - return len(self._secret_value) - - def display(self) -> str: - warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning) - return str(self) - - def get_secret_value(self) -> bytes: - return self._secret_value - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -class PaymentCardBrand(str, Enum): - # If you add another card type, please also add it to the - # Hypothesis strategy in `pydantic._hypothesis_plugin`. - amex = 'American Express' - mastercard = 'Mastercard' - visa = 'Visa' - other = 'other' - - def __str__(self) -> str: - return self.value - - -class PaymentCardNumber(str): - """ - Based on: https://en.wikipedia.org/wiki/Payment_card_number - """ - - strip_whitespace: ClassVar[bool] = True - min_length: ClassVar[int] = 12 - max_length: ClassVar[int] = 19 - bin: str - last4: str - brand: PaymentCardBrand - - def __init__(self, card_number: str): - self.bin = card_number[:6] - self.last4 = card_number[-4:] - self.brand = self._get_brand(card_number) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield str_validator - yield constr_strip_whitespace - yield constr_length_validator - yield cls.validate_digits - yield cls.validate_luhn_check_digit - yield cls - yield cls.validate_length_for_brand - - @property - def masked(self) -> str: - num_masked = len(self) - 10 # len(bin) + len(last4) == 10 - return f'{self.bin}{"*" * num_masked}{self.last4}' - - @classmethod - def validate_digits(cls, card_number: str) -> str: - if not card_number.isdigit(): - raise errors.NotDigitError - return card_number - - @classmethod - def validate_luhn_check_digit(cls, card_number: str) -> str: - """ - Based on: https://en.wikipedia.org/wiki/Luhn_algorithm - """ - sum_ = int(card_number[-1]) - length = len(card_number) - parity = length % 2 - for i in range(length - 1): - digit = int(card_number[i]) - if i % 2 == parity: - digit *= 2 - if digit > 9: - digit -= 9 - sum_ += digit - valid = sum_ % 10 == 0 - if not valid: - raise errors.LuhnValidationError - return card_number - - @classmethod - def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber': - """ - Validate length based on BIN for major brands: - https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN) - """ - required_length: Union[None, int, str] = None - if card_number.brand in PaymentCardBrand.mastercard: - required_length = 16 - valid = len(card_number) == required_length - elif card_number.brand == PaymentCardBrand.visa: - required_length = '13, 16 or 19' - valid = len(card_number) in {13, 16, 19} - elif card_number.brand == PaymentCardBrand.amex: - required_length = 15 - valid = len(card_number) == required_length - else: - valid = True - if not valid: - raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length) - return card_number - - @staticmethod - def _get_brand(card_number: str) -> PaymentCardBrand: - if card_number[0] == '4': - brand = PaymentCardBrand.visa - elif 51 <= int(card_number[:2]) <= 55: - brand = PaymentCardBrand.mastercard - elif card_number[:2] in {'34', '37'}: - brand = PaymentCardBrand.amex - else: - brand = PaymentCardBrand.other - return brand - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -BYTE_SIZES = { - 'b': 1, - 'kb': 10**3, - 'mb': 10**6, - 'gb': 10**9, - 'tb': 10**12, - 'pb': 10**15, - 'eb': 10**18, - 'kib': 2**10, - 'mib': 2**20, - 'gib': 2**30, - 'tib': 2**40, - 'pib': 2**50, - 'eib': 2**60, -} -BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) -byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) - - -class ByteSize(int): - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield cls.validate - - @classmethod - def validate(cls, v: StrIntFloat) -> 'ByteSize': - try: - return cls(int(v)) - except ValueError: - pass - - str_match = byte_string_re.match(str(v)) - if str_match is None: - raise errors.InvalidByteSize() - - scalar, unit = str_match.groups() - if unit is None: - unit = 'b' - - try: - unit_mult = BYTE_SIZES[unit.lower()] - except KeyError: - raise errors.InvalidByteSizeUnit(unit=unit) - - return cls(int(float(scalar) * unit_mult)) - - def human_readable(self, decimal: bool = False) -> str: - if decimal: - divisor = 1000 - units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] - final_unit = 'EB' - else: - divisor = 1024 - units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] - final_unit = 'EiB' - - num = float(self) - for unit in units: - if abs(num) < divisor: - return f'{num:0.1f}{unit}' - num /= divisor - - return f'{num:0.1f}{final_unit}' - - def to(self, unit: str) -> float: - try: - unit_div = BYTE_SIZES[unit.lower()] - except KeyError: - raise errors.InvalidByteSizeUnit(unit=unit) - - return self / unit_div - - -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -if TYPE_CHECKING: - PastDate = date - FutureDate = date -else: - - class PastDate(date): - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield parse_date - yield cls.validate - - @classmethod - def validate(cls, value: date) -> date: - if value >= date.today(): - raise errors.DateNotInThePastError() - - return value - - class FutureDate(date): - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield parse_date - yield cls.validate - - @classmethod - def validate(cls, value: date) -> date: - if value <= date.today(): - raise errors.DateNotInTheFutureError() - - return value - - -class ConstrainedDate(date, metaclass=ConstrainedNumberMeta): - gt: OptionalDate = None - ge: OptionalDate = None - lt: OptionalDate = None - le: OptionalDate = None - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le) - - @classmethod - def __get_validators__(cls) -> 'CallableGenerator': - yield parse_date - yield number_size_validator - - -def condate( - *, - gt: date = None, - ge: date = None, - lt: date = None, - le: date = None, -) -> Type[date]: - # use kwargs then define conf in a dict to aid with IDE type hinting - namespace = dict(gt=gt, ge=ge, lt=lt, le=le) - return type('ConstrainedDateValue', (ConstrainedDate,), namespace) diff --git a/lib/pydantic/v1/typing.py b/lib/pydantic/v1/typing.py deleted file mode 100644 index a690a053..00000000 --- a/lib/pydantic/v1/typing.py +++ /dev/null @@ -1,603 +0,0 @@ -import sys -import typing -from collections.abc import Callable -from os import PathLike -from typing import ( # type: ignore - TYPE_CHECKING, - AbstractSet, - Any, - Callable as TypingCallable, - ClassVar, - Dict, - ForwardRef, - Generator, - Iterable, - List, - Mapping, - NewType, - Optional, - Sequence, - Set, - Tuple, - Type, - TypeVar, - Union, - _eval_type, - cast, - get_type_hints, -) - -from typing_extensions import ( - Annotated, - Final, - Literal, - NotRequired as TypedDictNotRequired, - Required as TypedDictRequired, -) - -try: - from typing import _TypingBase as typing_base # type: ignore -except ImportError: - from typing import _Final as typing_base # type: ignore - -try: - from typing import GenericAlias as TypingGenericAlias # type: ignore -except ImportError: - # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) - TypingGenericAlias = () - -try: - from types import UnionType as TypesUnionType # type: ignore -except ImportError: - # python < 3.10 does not have UnionType (str | int, byte | bool and so on) - TypesUnionType = () - - -if sys.version_info < (3, 9): - - def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: - return type_._evaluate(globalns, localns) - -else: - - def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: - # Even though it is the right signature for python 3.9, mypy complains with - # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast... - return cast(Any, type_)._evaluate(globalns, localns, set()) - - -if sys.version_info < (3, 9): - # Ensure we always get all the whole `Annotated` hint, not just the annotated type. - # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`, - # so it already returns the full annotation - get_all_type_hints = get_type_hints - -else: - - def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any: - return get_type_hints(obj, globalns, localns, include_extras=True) - - -_T = TypeVar('_T') - -AnyCallable = TypingCallable[..., Any] -NoArgAnyCallable = TypingCallable[[], Any] - -# workaround for https://github.com/python/mypy/issues/9496 -AnyArgTCallable = TypingCallable[..., _T] - - -# Annotated[...] is implemented by returning an instance of one of these classes, depending on -# python/typing_extensions version. -AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'} - - -LITERAL_TYPES: Set[Any] = {Literal} -if hasattr(typing, 'Literal'): - LITERAL_TYPES.add(typing.Literal) - - -if sys.version_info < (3, 8): - - def get_origin(t: Type[Any]) -> Optional[Type[Any]]: - if type(t).__name__ in AnnotatedTypeNames: - # weirdly this is a runtime requirement, as well as for mypy - return cast(Type[Any], Annotated) - return getattr(t, '__origin__', None) - -else: - from typing import get_origin as _typing_get_origin - - def get_origin(tp: Type[Any]) -> Optional[Type[Any]]: - """ - We can't directly use `typing.get_origin` since we need a fallback to support - custom generic classes like `ConstrainedList` - It should be useless once https://github.com/cython/cython/issues/3537 is - solved and https://github.com/pydantic/pydantic/pull/1753 is merged. - """ - if type(tp).__name__ in AnnotatedTypeNames: - return cast(Type[Any], Annotated) # mypy complains about _SpecialForm - return _typing_get_origin(tp) or getattr(tp, '__origin__', None) - - -if sys.version_info < (3, 8): - from typing import _GenericAlias - - def get_args(t: Type[Any]) -> Tuple[Any, ...]: - """Compatibility version of get_args for python 3.7. - - Mostly compatible with the python 3.8 `typing` module version - and able to handle almost all use cases. - """ - if type(t).__name__ in AnnotatedTypeNames: - return t.__args__ + t.__metadata__ - if isinstance(t, _GenericAlias): - res = t.__args__ - if t.__origin__ is Callable and res and res[0] is not Ellipsis: - res = (list(res[:-1]), res[-1]) - return res - return getattr(t, '__args__', ()) - -else: - from typing import get_args as _typing_get_args - - def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]: - """ - In python 3.9, `typing.Dict`, `typing.List`, ... - do have an empty `__args__` by default (instead of the generic ~T for example). - In order to still support `Dict` for example and consider it as `Dict[Any, Any]`, - we retrieve the `_nparams` value that tells us how many parameters it needs. - """ - if hasattr(tp, '_nparams'): - return (Any,) * tp._nparams - # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple` - # in python 3.10- but now returns () for `tuple` and `Tuple`. - # This will probably be clarified in pydantic v2 - try: - if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]: # type: ignore[misc] - return ((),) - # there is a TypeError when compiled with cython - except TypeError: # pragma: no cover - pass - return () - - def get_args(tp: Type[Any]) -> Tuple[Any, ...]: - """Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - Examples:: - get_args(Dict[str, int]) == (str, int) - get_args(int) == () - get_args(Union[int, Union[T, int], str][int]) == (int, str) - get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - get_args(Callable[[], T][int]) == ([], int) - """ - if type(tp).__name__ in AnnotatedTypeNames: - return tp.__args__ + tp.__metadata__ - # the fallback is needed for the same reasons as `get_origin` (see above) - return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp) - - -if sys.version_info < (3, 9): - - def convert_generics(tp: Type[Any]) -> Type[Any]: - """Python 3.9 and older only supports generics from `typing` module. - They convert strings to ForwardRef automatically. - - Examples:: - typing.List['Hero'] == typing.List[ForwardRef('Hero')] - """ - return tp - -else: - from typing import _UnionGenericAlias # type: ignore - - from typing_extensions import _AnnotatedAlias - - def convert_generics(tp: Type[Any]) -> Type[Any]: - """ - Recursively searches for `str` type hints and replaces them with ForwardRef. - - Examples:: - convert_generics(list['Hero']) == list[ForwardRef('Hero')] - convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')] - convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')] - convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int - """ - origin = get_origin(tp) - if not origin or not hasattr(tp, '__args__'): - return tp - - args = get_args(tp) - - # typing.Annotated needs special treatment - if origin is Annotated: - return _AnnotatedAlias(convert_generics(args[0]), args[1:]) - - # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)` - converted = tuple( - ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg) - for arg in args - ) - - if converted == args: - return tp - elif isinstance(tp, TypingGenericAlias): - return TypingGenericAlias(origin, converted) - elif isinstance(tp, TypesUnionType): - # recreate types.UnionType (PEP604, Python >= 3.10) - return _UnionGenericAlias(origin, converted) - else: - try: - setattr(tp, '__args__', converted) - except AttributeError: - pass - return tp - - -if sys.version_info < (3, 10): - - def is_union(tp: Optional[Type[Any]]) -> bool: - return tp is Union - - WithArgsTypes = (TypingGenericAlias,) - -else: - import types - import typing - - def is_union(tp: Optional[Type[Any]]) -> bool: - return tp is Union or tp is types.UnionType # noqa: E721 - - WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType) - - -StrPath = Union[str, PathLike] - - -if TYPE_CHECKING: - from .fields import ModelField - - TupleGenerator = Generator[Tuple[str, Any], None, None] - DictStrAny = Dict[str, Any] - DictAny = Dict[Any, Any] - SetStr = Set[str] - ListStr = List[str] - IntStr = Union[int, str] - AbstractSetIntStr = AbstractSet[IntStr] - DictIntStrAny = Dict[IntStr, Any] - MappingIntStrAny = Mapping[IntStr, Any] - CallableGenerator = Generator[AnyCallable, None, None] - ReprArgs = Sequence[Tuple[Optional[str], Any]] - - MYPY = False - if MYPY: - AnyClassMethod = classmethod[Any] - else: - # classmethod[TargetType, CallableParamSpecType, CallableReturnType] - AnyClassMethod = classmethod[Any, Any, Any] - -__all__ = ( - 'AnyCallable', - 'NoArgAnyCallable', - 'NoneType', - 'is_none_type', - 'display_as_type', - 'resolve_annotations', - 'is_callable_type', - 'is_literal_type', - 'all_literal_values', - 'is_namedtuple', - 'is_typeddict', - 'is_typeddict_special', - 'is_new_type', - 'new_type_supertype', - 'is_classvar', - 'is_finalvar', - 'update_field_forward_refs', - 'update_model_forward_refs', - 'TupleGenerator', - 'DictStrAny', - 'DictAny', - 'SetStr', - 'ListStr', - 'IntStr', - 'AbstractSetIntStr', - 'DictIntStrAny', - 'CallableGenerator', - 'ReprArgs', - 'AnyClassMethod', - 'CallableGenerator', - 'WithArgsTypes', - 'get_args', - 'get_origin', - 'get_sub_types', - 'typing_base', - 'get_all_type_hints', - 'is_union', - 'StrPath', - 'MappingIntStrAny', -) - - -NoneType = None.__class__ - - -NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None]) - - -if sys.version_info < (3, 8): - # Even though this implementation is slower, we need it for python 3.7: - # In python 3.7 "Literal" is not a builtin type and uses a different - # mechanism. - # for this reason `Literal[None] is Literal[None]` evaluates to `False`, - # breaking the faster implementation used for the other python versions. - - def is_none_type(type_: Any) -> bool: - return type_ in NONE_TYPES - -elif sys.version_info[:2] == (3, 8): - - def is_none_type(type_: Any) -> bool: - for none_type in NONE_TYPES: - if type_ is none_type: - return True - # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey - # can change on very subtle changes like use of types in other modules, - # hopefully this check avoids that issue. - if is_literal_type(type_): # pragma: no cover - return all_literal_values(type_) == (None,) - return False - -else: - - def is_none_type(type_: Any) -> bool: - return type_ in NONE_TYPES - - -def display_as_type(v: Type[Any]) -> str: - if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type): - v = v.__class__ - - if is_union(get_origin(v)): - return f'Union[{", ".join(map(display_as_type, get_args(v)))}]' - - if isinstance(v, WithArgsTypes): - # Generic alias are constructs like `list[int]` - return str(v).replace('typing.', '') - - try: - return v.__name__ - except AttributeError: - # happens with typing objects - return str(v).replace('typing.', '') - - -def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]: - """ - Partially taken from typing.get_type_hints. - - Resolve string or ForwardRef annotations into type objects if possible. - """ - base_globals: Optional[Dict[str, Any]] = None - if module_name: - try: - module = sys.modules[module_name] - except KeyError: - # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 - pass - else: - base_globals = module.__dict__ - - annotations = {} - for name, value in raw_annotations.items(): - if isinstance(value, str): - if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1): - value = ForwardRef(value, is_argument=False, is_class=True) - else: - value = ForwardRef(value, is_argument=False) - try: - value = _eval_type(value, base_globals, None) - except NameError: - # this is ok, it can be fixed with update_forward_refs - pass - annotations[name] = value - return annotations - - -def is_callable_type(type_: Type[Any]) -> bool: - return type_ is Callable or get_origin(type_) is Callable - - -def is_literal_type(type_: Type[Any]) -> bool: - return Literal is not None and get_origin(type_) in LITERAL_TYPES - - -def literal_values(type_: Type[Any]) -> Tuple[Any, ...]: - return get_args(type_) - - -def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]: - """ - This method is used to retrieve all Literal values as - Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) - e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]` - """ - if not is_literal_type(type_): - return (type_,) - - values = literal_values(type_) - return tuple(x for value in values for x in all_literal_values(value)) - - -def is_namedtuple(type_: Type[Any]) -> bool: - """ - Check if a given class is a named tuple. - It can be either a `typing.NamedTuple` or `collections.namedtuple` - """ - from .utils import lenient_issubclass - - return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields') - - -def is_typeddict(type_: Type[Any]) -> bool: - """ - Check if a given class is a typed dict (from `typing` or `typing_extensions`) - In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict) - """ - from .utils import lenient_issubclass - - return lenient_issubclass(type_, dict) and hasattr(type_, '__total__') - - -def _check_typeddict_special(type_: Any) -> bool: - return type_ is TypedDictRequired or type_ is TypedDictNotRequired - - -def is_typeddict_special(type_: Any) -> bool: - """ - Check if type is a TypedDict special form (Required or NotRequired). - """ - return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_)) - - -test_type = NewType('test_type', str) - - -def is_new_type(type_: Type[Any]) -> bool: - """ - Check whether type_ was created using typing.NewType - """ - return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__') # type: ignore - - -def new_type_supertype(type_: Type[Any]) -> Type[Any]: - while hasattr(type_, '__supertype__'): - type_ = type_.__supertype__ - return type_ - - -def _check_classvar(v: Optional[Type[Any]]) -> bool: - if v is None: - return False - - return v.__class__ == ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar' - - -def _check_finalvar(v: Optional[Type[Any]]) -> bool: - """ - Check if a given type is a `typing.Final` type. - """ - if v is None: - return False - - return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final') - - -def is_classvar(ann_type: Type[Any]) -> bool: - if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)): - return True - - # this is an ugly workaround for class vars that contain forward references and are therefore themselves - # forward references, see #3679 - if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): - return True - - return False - - -def is_finalvar(ann_type: Type[Any]) -> bool: - return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type)) - - -def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None: - """ - Try to update ForwardRefs on fields based on this ModelField, globalns and localns. - """ - prepare = False - if field.type_.__class__ == ForwardRef: - prepare = True - field.type_ = evaluate_forwardref(field.type_, globalns, localns or None) - if field.outer_type_.__class__ == ForwardRef: - prepare = True - field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None) - if prepare: - field.prepare() - - if field.sub_fields: - for sub_f in field.sub_fields: - update_field_forward_refs(sub_f, globalns=globalns, localns=localns) - - if field.discriminator_key is not None: - field.prepare_discriminated_union_sub_fields() - - -def update_model_forward_refs( - model: Type[Any], - fields: Iterable['ModelField'], - json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable], - localns: 'DictStrAny', - exc_to_suppress: Tuple[Type[BaseException], ...] = (), -) -> None: - """ - Try to update model fields ForwardRefs based on model and localns. - """ - if model.__module__ in sys.modules: - globalns = sys.modules[model.__module__].__dict__.copy() - else: - globalns = {} - - globalns.setdefault(model.__name__, model) - - for f in fields: - try: - update_field_forward_refs(f, globalns=globalns, localns=localns) - except exc_to_suppress: - pass - - for key in set(json_encoders.keys()): - if isinstance(key, str): - fr: ForwardRef = ForwardRef(key) - elif isinstance(key, ForwardRef): - fr = key - else: - continue - - try: - new_key = evaluate_forwardref(fr, globalns, localns or None) - except exc_to_suppress: # pragma: no cover - continue - - json_encoders[new_key] = json_encoders.pop(key) - - -def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]: - """ - Tries to get the class of a Type[T] annotation. Returns True if Type is used - without brackets. Otherwise returns None. - """ - if type_ is type: - return True - - if get_origin(type_) is None: - return None - - args = get_args(type_) - if not args or not isinstance(args[0], type): - return True - else: - return args[0] - - -def get_sub_types(tp: Any) -> List[Any]: - """ - Return all the types that are allowed by type `tp` - `tp` can be a `Union` of allowed types or an `Annotated` type - """ - origin = get_origin(tp) - if origin is Annotated: - return get_sub_types(get_args(tp)[0]) - elif is_union(origin): - return [x for t in get_args(tp) for x in get_sub_types(t)] - else: - return [tp] diff --git a/lib/pydantic/v1/utils.py b/lib/pydantic/v1/utils.py deleted file mode 100644 index 4d0f68ed..00000000 --- a/lib/pydantic/v1/utils.py +++ /dev/null @@ -1,803 +0,0 @@ -import keyword -import warnings -import weakref -from collections import OrderedDict, defaultdict, deque -from copy import deepcopy -from itertools import islice, zip_longest -from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType -from typing import ( - TYPE_CHECKING, - AbstractSet, - Any, - Callable, - Collection, - Dict, - Generator, - Iterable, - Iterator, - List, - Mapping, - NoReturn, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -from typing_extensions import Annotated - -from .errors import ConfigError -from .typing import ( - NoneType, - WithArgsTypes, - all_literal_values, - display_as_type, - get_args, - get_origin, - is_literal_type, - is_union, -) -from .version import version_info - -if TYPE_CHECKING: - from inspect import Signature - from pathlib import Path - - from .config import BaseConfig - from .dataclasses import Dataclass - from .fields import ModelField - from .main import BaseModel - from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs - - RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] - -__all__ = ( - 'import_string', - 'sequence_like', - 'validate_field_name', - 'lenient_isinstance', - 'lenient_issubclass', - 'in_ipython', - 'is_valid_identifier', - 'deep_update', - 'update_not_none', - 'almost_equal_floats', - 'get_model', - 'to_camel', - 'is_valid_field', - 'smart_deepcopy', - 'PyObjectStr', - 'Representation', - 'GetterDict', - 'ValueItems', - 'version_info', # required here to match behaviour in v1.3 - 'ClassAttribute', - 'path_type', - 'ROOT_KEY', - 'get_unique_discriminator_alias', - 'get_discriminator_alias_and_values', - 'DUNDER_ATTRIBUTES', -) - -ROOT_KEY = '__root__' -# these are types that are returned unchanged by deepcopy -IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = { - int, - float, - complex, - str, - bool, - bytes, - type, - NoneType, - FunctionType, - BuiltinFunctionType, - LambdaType, - weakref.ref, - CodeType, - # note: including ModuleType will differ from behaviour of deepcopy by not producing error. - # It might be not a good idea in general, but considering that this function used only internally - # against default values of fields, this will allow to actually have a field with module as default value - ModuleType, - NotImplemented.__class__, - Ellipsis.__class__, -} - -# these are types that if empty, might be copied with simple copy() instead of deepcopy() -BUILTIN_COLLECTIONS: Set[Type[Any]] = { - list, - set, - tuple, - frozenset, - dict, - OrderedDict, - defaultdict, - deque, -} - - -def import_string(dotted_path: str) -> Any: - """ - Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the - last name in the path. Raise ImportError if the import fails. - """ - from importlib import import_module - - try: - module_path, class_name = dotted_path.strip(' ').rsplit('.', 1) - except ValueError as e: - raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e - - module = import_module(module_path) - try: - return getattr(module, class_name) - except AttributeError as e: - raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e - - -def truncate(v: Union[str], *, max_len: int = 80) -> str: - """ - Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long - """ - warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning) - if isinstance(v, str) and len(v) > (max_len - 2): - # -3 so quote + string + … + quote has correct length - return (v[: (max_len - 3)] + '…').__repr__() - try: - v = v.__repr__() - except TypeError: - v = v.__class__.__repr__(v) # in case v is a type - if len(v) > max_len: - v = v[: max_len - 1] + '…' - return v - - -def sequence_like(v: Any) -> bool: - return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) - - -def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None: - """ - Ensure that the field's name does not shadow an existing attribute of the model. - """ - for base in bases: - if getattr(base, field_name, None): - raise NameError( - f'Field name "{field_name}" shadows a BaseModel attribute; ' - f'use a different field name with "alias=\'{field_name}\'".' - ) - - -def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: - try: - return isinstance(o, class_or_tuple) # type: ignore[arg-type] - except TypeError: - return False - - -def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: - try: - return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type] - except TypeError: - if isinstance(cls, WithArgsTypes): - return False - raise # pragma: no cover - - -def in_ipython() -> bool: - """ - Check whether we're in an ipython environment, including jupyter notebooks. - """ - try: - eval('__IPYTHON__') - except NameError: - return False - else: # pragma: no cover - return True - - -def is_valid_identifier(identifier: str) -> bool: - """ - Checks that a string is a valid identifier and not a Python keyword. - :param identifier: The identifier to test. - :return: True if the identifier is valid. - """ - return identifier.isidentifier() and not keyword.iskeyword(identifier) - - -KeyType = TypeVar('KeyType') - - -def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]: - updated_mapping = mapping.copy() - for updating_mapping in updating_mappings: - for k, v in updating_mapping.items(): - if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): - updated_mapping[k] = deep_update(updated_mapping[k], v) - else: - updated_mapping[k] = v - return updated_mapping - - -def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None: - mapping.update({k: v for k, v in update.items() if v is not None}) - - -def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool: - """ - Return True if two floats are almost equal - """ - return abs(value_1 - value_2) <= delta - - -def generate_model_signature( - init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig'] -) -> 'Signature': - """ - Generate signature for model based on its fields - """ - from inspect import Parameter, Signature, signature - - from .config import Extra - - present_params = signature(init).parameters.values() - merged_params: Dict[str, Parameter] = {} - var_kw = None - use_var_kw = False - - for param in islice(present_params, 1, None): # skip self arg - if param.kind is param.VAR_KEYWORD: - var_kw = param - continue - merged_params[param.name] = param - - if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through - allow_names = config.allow_population_by_field_name - for field_name, field in fields.items(): - param_name = field.alias - if field_name in merged_params or param_name in merged_params: - continue - elif not is_valid_identifier(param_name): - if allow_names and is_valid_identifier(field_name): - param_name = field_name - else: - use_var_kw = True - continue - - # TODO: replace annotation with actual expected types once #1055 solved - kwargs = {'default': field.default} if not field.required else {} - merged_params[param_name] = Parameter( - param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs - ) - - if config.extra is Extra.allow: - use_var_kw = True - - if var_kw and use_var_kw: - # Make sure the parameter for extra kwargs - # does not have the same name as a field - default_model_signature = [ - ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD), - ('data', Parameter.VAR_KEYWORD), - ] - if [(p.name, p.kind) for p in present_params] == default_model_signature: - # if this is the standard model signature, use extra_data as the extra args name - var_kw_name = 'extra_data' - else: - # else start from var_kw - var_kw_name = var_kw.name - - # generate a name that's definitely unique - while var_kw_name in fields: - var_kw_name += '_' - merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) - - return Signature(parameters=list(merged_params.values()), return_annotation=None) - - -def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']: - from .main import BaseModel - - try: - model_cls = obj.__pydantic_model__ # type: ignore - except AttributeError: - model_cls = obj - - if not issubclass(model_cls, BaseModel): - raise TypeError('Unsupported type, must be either BaseModel or dataclass') - return model_cls - - -def to_camel(string: str) -> str: - return ''.join(word.capitalize() for word in string.split('_')) - - -def to_lower_camel(string: str) -> str: - if len(string) >= 1: - pascal_string = to_camel(string) - return pascal_string[0].lower() + pascal_string[1:] - return string.lower() - - -T = TypeVar('T') - - -def unique_list( - input_list: Union[List[T], Tuple[T, ...]], - *, - name_factory: Callable[[T], str] = str, -) -> List[T]: - """ - Make a list unique while maintaining order. - We update the list if another one with the same name is set - (e.g. root validator overridden in subclass) - """ - result: List[T] = [] - result_names: List[str] = [] - for v in input_list: - v_name = name_factory(v) - if v_name not in result_names: - result_names.append(v_name) - result.append(v) - else: - result[result_names.index(v_name)] = v - - return result - - -class PyObjectStr(str): - """ - String class where repr doesn't include quotes. Useful with Representation when you want to return a string - representation of something that valid (or pseudo-valid) python. - """ - - def __repr__(self) -> str: - return str(self) - - -class Representation: - """ - Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. - - __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations - of objects. - """ - - __slots__: Tuple[str, ...] = tuple() - - def __repr_args__(self) -> 'ReprArgs': - """ - Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. - - Can either return: - * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` - * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` - """ - attrs = ((s, getattr(self, s)) for s in self.__slots__) - return [(a, v) for a, v in attrs if v is not None] - - def __repr_name__(self) -> str: - """ - Name of the instance's class, used in __repr__. - """ - return self.__class__.__name__ - - def __repr_str__(self, join_str: str) -> str: - return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) - - def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: - """ - Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects - """ - yield self.__repr_name__() + '(' - yield 1 - for name, value in self.__repr_args__(): - if name is not None: - yield name + '=' - yield fmt(value) - yield ',' - yield 0 - yield -1 - yield ')' - - def __str__(self) -> str: - return self.__repr_str__(' ') - - def __repr__(self) -> str: - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' - - def __rich_repr__(self) -> 'RichReprResult': - """Get fields for Rich library""" - for name, field_repr in self.__repr_args__(): - if name is None: - yield field_repr - else: - yield name, field_repr - - -class GetterDict(Representation): - """ - Hack to make object's smell just enough like dicts for validate_model. - - We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves. - """ - - __slots__ = ('_obj',) - - def __init__(self, obj: Any): - self._obj = obj - - def __getitem__(self, key: str) -> Any: - try: - return getattr(self._obj, key) - except AttributeError as e: - raise KeyError(key) from e - - def get(self, key: Any, default: Any = None) -> Any: - return getattr(self._obj, key, default) - - def extra_keys(self) -> Set[Any]: - """ - We don't want to get any other attributes of obj if the model didn't explicitly ask for them - """ - return set() - - def keys(self) -> List[Any]: - """ - Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python - dictionaries. - """ - return list(self) - - def values(self) -> List[Any]: - return [self[k] for k in self] - - def items(self) -> Iterator[Tuple[str, Any]]: - for k in self: - yield k, self.get(k) - - def __iter__(self) -> Iterator[str]: - for name in dir(self._obj): - if not name.startswith('_'): - yield name - - def __len__(self) -> int: - return sum(1 for _ in self) - - def __contains__(self, item: Any) -> bool: - return item in self.keys() - - def __eq__(self, other: Any) -> bool: - return dict(self) == dict(other.items()) - - def __repr_args__(self) -> 'ReprArgs': - return [(None, dict(self))] - - def __repr_name__(self) -> str: - return f'GetterDict[{display_as_type(self._obj)}]' - - -class ValueItems(Representation): - """ - Class for more convenient calculation of excluded or included fields on values. - """ - - __slots__ = ('_items', '_type') - - def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None: - items = self._coerce_items(items) - - if isinstance(value, (list, tuple)): - items = self._normalize_indexes(items, len(value)) - - self._items: 'MappingIntStrAny' = items - - def is_excluded(self, item: Any) -> bool: - """ - Check if item is fully excluded. - - :param item: key or index of a value - """ - return self.is_true(self._items.get(item)) - - def is_included(self, item: Any) -> bool: - """ - Check if value is contained in self._items - - :param item: key or index of value - """ - return item in self._items - - def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]: - """ - :param e: key or index of element on value - :return: raw values for element if self._items is dict and contain needed element - """ - - item = self._items.get(e) - return item if not self.is_true(item) else None - - def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny': - """ - :param items: dict or set of indexes which will be normalized - :param v_length: length of sequence indexes of which will be - - >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) - {0: True, 2: True, 3: True} - >>> self._normalize_indexes({'__all__': True}, 4) - {0: True, 1: True, 2: True, 3: True} - """ - - normalized_items: 'DictIntStrAny' = {} - all_items = None - for i, v in items.items(): - if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)): - raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') - if i == '__all__': - all_items = self._coerce_value(v) - continue - if not isinstance(i, int): - raise TypeError( - 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: ' - 'expected integer keys or keyword "__all__"' - ) - normalized_i = v_length + i if i < 0 else i - normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) - - if not all_items: - return normalized_items - if self.is_true(all_items): - for i in range(v_length): - normalized_items.setdefault(i, ...) - return normalized_items - for i in range(v_length): - normalized_item = normalized_items.setdefault(i, {}) - if not self.is_true(normalized_item): - normalized_items[i] = self.merge(all_items, normalized_item) - return normalized_items - - @classmethod - def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: - """ - Merge a ``base`` item with an ``override`` item. - - Both ``base`` and ``override`` are converted to dictionaries if possible. - Sets are converted to dictionaries with the sets entries as keys and - Ellipsis as values. - - Each key-value pair existing in ``base`` is merged with ``override``, - while the rest of the key-value pairs are updated recursively with this function. - - Merging takes place based on the "union" of keys if ``intersect`` is - set to ``False`` (default) and on the intersection of keys if - ``intersect`` is set to ``True``. - """ - override = cls._coerce_value(override) - base = cls._coerce_value(base) - if override is None: - return base - if cls.is_true(base) or base is None: - return override - if cls.is_true(override): - return base if intersect else override - - # intersection or union of keys while preserving ordering: - if intersect: - merge_keys = [k for k in base if k in override] + [k for k in override if k in base] - else: - merge_keys = list(base) + [k for k in override if k not in base] - - merged: 'DictIntStrAny' = {} - for k in merge_keys: - merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) - if merged_item is not None: - merged[k] = merged_item - - return merged - - @staticmethod - def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny': - if isinstance(items, Mapping): - pass - elif isinstance(items, AbstractSet): - items = dict.fromkeys(items, ...) - else: - class_name = getattr(items, '__class__', '???') - assert_never( - items, - f'Unexpected type of exclude value {class_name}', - ) - return items - - @classmethod - def _coerce_value(cls, value: Any) -> Any: - if value is None or cls.is_true(value): - return value - return cls._coerce_items(value) - - @staticmethod - def is_true(v: Any) -> bool: - return v is True or v is ... - - def __repr_args__(self) -> 'ReprArgs': - return [(None, self._items)] - - -class ClassAttribute: - """ - Hide class attribute from its instances - """ - - __slots__ = ( - 'name', - 'value', - ) - - def __init__(self, name: str, value: Any) -> None: - self.name = name - self.value = value - - def __get__(self, instance: Any, owner: Type[Any]) -> None: - if instance is None: - return self.value - raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') - - -path_types = { - 'is_dir': 'directory', - 'is_file': 'file', - 'is_mount': 'mount point', - 'is_symlink': 'symlink', - 'is_block_device': 'block device', - 'is_char_device': 'char device', - 'is_fifo': 'FIFO', - 'is_socket': 'socket', -} - - -def path_type(p: 'Path') -> str: - """ - Find out what sort of thing a path is. - """ - assert p.exists(), 'path does not exist' - for method, name in path_types.items(): - if getattr(p, method)(): - return name - - return 'unknown' - - -Obj = TypeVar('Obj') - - -def smart_deepcopy(obj: Obj) -> Obj: - """ - Return type as is for immutable built-in types - Use obj.copy() for built-in empty collections - Use copy.deepcopy() for non-empty collections and unknown objects - """ - - obj_type = obj.__class__ - if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: - return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway - try: - if not obj and obj_type in BUILTIN_COLLECTIONS: - # faster way for empty collections, no need to copy its members - return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method - except (TypeError, ValueError, RuntimeError): - # do we really dare to catch ALL errors? Seems a bit risky - pass - - return deepcopy(obj) # slowest way when we actually might need a deepcopy - - -def is_valid_field(name: str) -> bool: - if not name.startswith('_'): - return True - return ROOT_KEY == name - - -DUNDER_ATTRIBUTES = { - '__annotations__', - '__classcell__', - '__doc__', - '__module__', - '__orig_bases__', - '__orig_class__', - '__qualname__', -} - - -def is_valid_private_name(name: str) -> bool: - return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES - - -_EMPTY = object() - - -def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool: - """ - Check that the items of `left` are the same objects as those in `right`. - - >>> a, b = object(), object() - >>> all_identical([a, b, a], [a, b, a]) - True - >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" - False - """ - for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY): - if left_item is not right_item: - return False - return True - - -def assert_never(obj: NoReturn, msg: str) -> NoReturn: - """ - Helper to make sure that we have covered all possible types. - - This is mostly useful for ``mypy``, docs: - https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks - """ - raise TypeError(msg) - - -def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str: - """Validate that all aliases are the same and if that's the case return the alias""" - unique_aliases = set(all_aliases) - if len(unique_aliases) > 1: - raise ConfigError( - f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})' - ) - return unique_aliases.pop() - - -def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]: - """ - Get alias and all valid values in the `Literal` type of the discriminator field - `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many. - """ - is_root_model = getattr(tp, '__custom_root_type__', False) - - if get_origin(tp) is Annotated: - tp = get_args(tp)[0] - - if hasattr(tp, '__pydantic_model__'): - tp = tp.__pydantic_model__ - - if is_union(get_origin(tp)): - alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key) - return alias, tuple(v for values in all_values for v in values) - elif is_root_model: - union_type = tp.__fields__[ROOT_KEY].type_ - alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key) - - if len(set(all_values)) > 1: - raise ConfigError( - f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}' - ) - - return alias, all_values[0] - - else: - try: - t_discriminator_type = tp.__fields__[discriminator_key].type_ - except AttributeError as e: - raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e - except KeyError as e: - raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e - - if not is_literal_type(t_discriminator_type): - raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`') - - return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type) - - -def _get_union_alias_and_all_values( - union_type: Type[Any], discriminator_key: str -) -> Tuple[str, Tuple[Tuple[str, ...], ...]]: - zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)] - # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))] - all_aliases, all_values = zip(*zipped_aliases_values) - return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values diff --git a/lib/pydantic/v1/validators.py b/lib/pydantic/v1/validators.py deleted file mode 100644 index 549a235e..00000000 --- a/lib/pydantic/v1/validators.py +++ /dev/null @@ -1,765 +0,0 @@ -import math -import re -from collections import OrderedDict, deque -from collections.abc import Hashable as CollectionsHashable -from datetime import date, datetime, time, timedelta -from decimal import Decimal, DecimalException -from enum import Enum, IntEnum -from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Deque, - Dict, - ForwardRef, - FrozenSet, - Generator, - Hashable, - List, - NamedTuple, - Pattern, - Set, - Tuple, - Type, - TypeVar, - Union, -) -from uuid import UUID - -from . import errors -from .datetime_parse import parse_date, parse_datetime, parse_duration, parse_time -from .typing import ( - AnyCallable, - all_literal_values, - display_as_type, - get_class, - is_callable_type, - is_literal_type, - is_namedtuple, - is_none_type, - is_typeddict, -) -from .utils import almost_equal_floats, lenient_issubclass, sequence_like - -if TYPE_CHECKING: - from typing_extensions import Literal, TypedDict - - from .config import BaseConfig - from .fields import ModelField - from .types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt - - ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt] - AnyOrderedDict = OrderedDict[Any, Any] - Number = Union[int, float, Decimal] - StrBytes = Union[str, bytes] - - -def str_validator(v: Any) -> Union[str]: - if isinstance(v, str): - if isinstance(v, Enum): - return v.value - else: - return v - elif isinstance(v, (float, int, Decimal)): - # is there anything else we want to add here? If you think so, create an issue. - return str(v) - elif isinstance(v, (bytes, bytearray)): - return v.decode() - else: - raise errors.StrError() - - -def strict_str_validator(v: Any) -> Union[str]: - if isinstance(v, str) and not isinstance(v, Enum): - return v - raise errors.StrError() - - -def bytes_validator(v: Any) -> Union[bytes]: - if isinstance(v, bytes): - return v - elif isinstance(v, bytearray): - return bytes(v) - elif isinstance(v, str): - return v.encode() - elif isinstance(v, (float, int, Decimal)): - return str(v).encode() - else: - raise errors.BytesError() - - -def strict_bytes_validator(v: Any) -> Union[bytes]: - if isinstance(v, bytes): - return v - elif isinstance(v, bytearray): - return bytes(v) - else: - raise errors.BytesError() - - -BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'} -BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'} - - -def bool_validator(v: Any) -> bool: - if v is True or v is False: - return v - if isinstance(v, bytes): - v = v.decode() - if isinstance(v, str): - v = v.lower() - try: - if v in BOOL_TRUE: - return True - if v in BOOL_FALSE: - return False - except TypeError: - raise errors.BoolError() - raise errors.BoolError() - - -# matches the default limit cpython, see https://github.com/python/cpython/pull/96500 -max_str_int = 4_300 - - -def int_validator(v: Any) -> int: - if isinstance(v, int) and not (v is True or v is False): - return v - - # see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778 - # this check should be unnecessary once patch releases are out for 3.7, 3.8, 3.9 and 3.10 - # but better to check here until then. - # NOTICE: this does not fully protect user from the DOS risk since the standard library JSON implementation - # (and other std lib modules like xml) use `int()` and are likely called before this, the best workaround is to - # 1. update to the latest patch release of python once released, 2. use a different JSON library like ujson - if isinstance(v, (str, bytes, bytearray)) and len(v) > max_str_int: - raise errors.IntegerError() - - try: - return int(v) - except (TypeError, ValueError, OverflowError): - raise errors.IntegerError() - - -def strict_int_validator(v: Any) -> int: - if isinstance(v, int) and not (v is True or v is False): - return v - raise errors.IntegerError() - - -def float_validator(v: Any) -> float: - if isinstance(v, float): - return v - - try: - return float(v) - except (TypeError, ValueError): - raise errors.FloatError() - - -def strict_float_validator(v: Any) -> float: - if isinstance(v, float): - return v - raise errors.FloatError() - - -def float_finite_validator(v: 'Number', field: 'ModelField', config: 'BaseConfig') -> 'Number': - allow_inf_nan = getattr(field.type_, 'allow_inf_nan', None) - if allow_inf_nan is None: - allow_inf_nan = config.allow_inf_nan - - if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)): - raise errors.NumberNotFiniteError() - return v - - -def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number': - field_type: ConstrainedNumber = field.type_ - if field_type.multiple_of is not None: - mod = float(v) / float(field_type.multiple_of) % 1 - if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0): - raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of) - return v - - -def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number': - field_type: ConstrainedNumber = field.type_ - if field_type.gt is not None and not v > field_type.gt: - raise errors.NumberNotGtError(limit_value=field_type.gt) - elif field_type.ge is not None and not v >= field_type.ge: - raise errors.NumberNotGeError(limit_value=field_type.ge) - - if field_type.lt is not None and not v < field_type.lt: - raise errors.NumberNotLtError(limit_value=field_type.lt) - if field_type.le is not None and not v <= field_type.le: - raise errors.NumberNotLeError(limit_value=field_type.le) - - return v - - -def constant_validator(v: 'Any', field: 'ModelField') -> 'Any': - """Validate ``const`` fields. - - The value provided for a ``const`` field must be equal to the default value - of the field. This is to support the keyword of the same name in JSON - Schema. - """ - if v != field.default: - raise errors.WrongConstantError(given=v, permitted=[field.default]) - - return v - - -def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes': - v_len = len(v) - - min_length = config.min_anystr_length - if v_len < min_length: - raise errors.AnyStrMinLengthError(limit_value=min_length) - - max_length = config.max_anystr_length - if max_length is not None and v_len > max_length: - raise errors.AnyStrMaxLengthError(limit_value=max_length) - - return v - - -def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes': - return v.strip() - - -def anystr_upper(v: 'StrBytes') -> 'StrBytes': - return v.upper() - - -def anystr_lower(v: 'StrBytes') -> 'StrBytes': - return v.lower() - - -def ordered_dict_validator(v: Any) -> 'AnyOrderedDict': - if isinstance(v, OrderedDict): - return v - - try: - return OrderedDict(v) - except (TypeError, ValueError): - raise errors.DictError() - - -def dict_validator(v: Any) -> Dict[Any, Any]: - if isinstance(v, dict): - return v - - try: - return dict(v) - except (TypeError, ValueError): - raise errors.DictError() - - -def list_validator(v: Any) -> List[Any]: - if isinstance(v, list): - return v - elif sequence_like(v): - return list(v) - else: - raise errors.ListError() - - -def tuple_validator(v: Any) -> Tuple[Any, ...]: - if isinstance(v, tuple): - return v - elif sequence_like(v): - return tuple(v) - else: - raise errors.TupleError() - - -def set_validator(v: Any) -> Set[Any]: - if isinstance(v, set): - return v - elif sequence_like(v): - return set(v) - else: - raise errors.SetError() - - -def frozenset_validator(v: Any) -> FrozenSet[Any]: - if isinstance(v, frozenset): - return v - elif sequence_like(v): - return frozenset(v) - else: - raise errors.FrozenSetError() - - -def deque_validator(v: Any) -> Deque[Any]: - if isinstance(v, deque): - return v - elif sequence_like(v): - return deque(v) - else: - raise errors.DequeError() - - -def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum: - try: - enum_v = field.type_(v) - except ValueError: - # field.type_ should be an enum, so will be iterable - raise errors.EnumMemberError(enum_values=list(field.type_)) - return enum_v.value if config.use_enum_values else enum_v - - -def uuid_validator(v: Any, field: 'ModelField') -> UUID: - try: - if isinstance(v, str): - v = UUID(v) - elif isinstance(v, (bytes, bytearray)): - try: - v = UUID(v.decode()) - except ValueError: - # 16 bytes in big-endian order as the bytes argument fail - # the above check - v = UUID(bytes=v) - except ValueError: - raise errors.UUIDError() - - if not isinstance(v, UUID): - raise errors.UUIDError() - - required_version = getattr(field.type_, '_required_version', None) - if required_version and v.version != required_version: - raise errors.UUIDVersionError(required_version=required_version) - - return v - - -def decimal_validator(v: Any) -> Decimal: - if isinstance(v, Decimal): - return v - elif isinstance(v, (bytes, bytearray)): - v = v.decode() - - v = str(v).strip() - - try: - v = Decimal(v) - except DecimalException: - raise errors.DecimalError() - - if not v.is_finite(): - raise errors.DecimalIsNotFiniteError() - - return v - - -def hashable_validator(v: Any) -> Hashable: - if isinstance(v, Hashable): - return v - - raise errors.HashableError() - - -def ip_v4_address_validator(v: Any) -> IPv4Address: - if isinstance(v, IPv4Address): - return v - - try: - return IPv4Address(v) - except ValueError: - raise errors.IPv4AddressError() - - -def ip_v6_address_validator(v: Any) -> IPv6Address: - if isinstance(v, IPv6Address): - return v - - try: - return IPv6Address(v) - except ValueError: - raise errors.IPv6AddressError() - - -def ip_v4_network_validator(v: Any) -> IPv4Network: - """ - Assume IPv4Network initialised with a default ``strict`` argument - - See more: - https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network - """ - if isinstance(v, IPv4Network): - return v - - try: - return IPv4Network(v) - except ValueError: - raise errors.IPv4NetworkError() - - -def ip_v6_network_validator(v: Any) -> IPv6Network: - """ - Assume IPv6Network initialised with a default ``strict`` argument - - See more: - https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network - """ - if isinstance(v, IPv6Network): - return v - - try: - return IPv6Network(v) - except ValueError: - raise errors.IPv6NetworkError() - - -def ip_v4_interface_validator(v: Any) -> IPv4Interface: - if isinstance(v, IPv4Interface): - return v - - try: - return IPv4Interface(v) - except ValueError: - raise errors.IPv4InterfaceError() - - -def ip_v6_interface_validator(v: Any) -> IPv6Interface: - if isinstance(v, IPv6Interface): - return v - - try: - return IPv6Interface(v) - except ValueError: - raise errors.IPv6InterfaceError() - - -def path_validator(v: Any) -> Path: - if isinstance(v, Path): - return v - - try: - return Path(v) - except TypeError: - raise errors.PathError() - - -def path_exists_validator(v: Any) -> Path: - if not v.exists(): - raise errors.PathNotExistsError(path=v) - - return v - - -def callable_validator(v: Any) -> AnyCallable: - """ - Perform a simple check if the value is callable. - - Note: complete matching of argument type hints and return types is not performed - """ - if callable(v): - return v - - raise errors.CallableError(value=v) - - -def enum_validator(v: Any) -> Enum: - if isinstance(v, Enum): - return v - - raise errors.EnumError(value=v) - - -def int_enum_validator(v: Any) -> IntEnum: - if isinstance(v, IntEnum): - return v - - raise errors.IntEnumError(value=v) - - -def make_literal_validator(type_: Any) -> Callable[[Any], Any]: - permitted_choices = all_literal_values(type_) - - # To have a O(1) complexity and still return one of the values set inside the `Literal`, - # we create a dict with the set values (a set causes some problems with the way intersection works). - # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`) - allowed_choices = {v: v for v in permitted_choices} - - def literal_validator(v: Any) -> Any: - try: - return allowed_choices[v] - except (KeyError, TypeError): - raise errors.WrongConstantError(given=v, permitted=permitted_choices) - - return literal_validator - - -def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': - v_len = len(v) - - min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length - if v_len < min_length: - raise errors.AnyStrMinLengthError(limit_value=min_length) - - max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length - if max_length is not None and v_len > max_length: - raise errors.AnyStrMaxLengthError(limit_value=max_length) - - return v - - -def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': - strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace - if strip_whitespace: - v = v.strip() - - return v - - -def constr_upper(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': - upper = field.type_.to_upper or config.anystr_upper - if upper: - v = v.upper() - - return v - - -def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': - lower = field.type_.to_lower or config.anystr_lower - if lower: - v = v.lower() - return v - - -def validate_json(v: Any, config: 'BaseConfig') -> Any: - if v is None: - # pass None through to other validators - return v - try: - return config.json_loads(v) # type: ignore - except ValueError: - raise errors.JsonError() - except TypeError: - raise errors.JsonTypeError() - - -T = TypeVar('T') - - -def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]: - def arbitrary_type_validator(v: Any) -> T: - if isinstance(v, type_): - return v - raise errors.ArbitraryTypeError(expected_arbitrary_type=type_) - - return arbitrary_type_validator - - -def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]: - def class_validator(v: Any) -> Type[T]: - if lenient_issubclass(v, type_): - return v - raise errors.SubclassError(expected_class=type_) - - return class_validator - - -def any_class_validator(v: Any) -> Type[T]: - if isinstance(v, type): - return v - raise errors.ClassError() - - -def none_validator(v: Any) -> 'Literal[None]': - if v is None: - return v - raise errors.NotNoneError() - - -def pattern_validator(v: Any) -> Pattern[str]: - if isinstance(v, Pattern): - return v - - str_value = str_validator(v) - - try: - return re.compile(str_value) - except re.error: - raise errors.PatternError() - - -NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple) - - -def make_namedtuple_validator( - namedtuple_cls: Type[NamedTupleT], config: Type['BaseConfig'] -) -> Callable[[Tuple[Any, ...]], NamedTupleT]: - from .annotated_types import create_model_from_namedtuple - - NamedTupleModel = create_model_from_namedtuple( - namedtuple_cls, - __config__=config, - __module__=namedtuple_cls.__module__, - ) - namedtuple_cls.__pydantic_model__ = NamedTupleModel # type: ignore[attr-defined] - - def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT: - annotations = NamedTupleModel.__annotations__ - - if len(values) > len(annotations): - raise errors.ListMaxLengthError(limit_value=len(annotations)) - - dict_values: Dict[str, Any] = dict(zip(annotations, values)) - validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values)) - return namedtuple_cls(**validated_dict_values) - - return namedtuple_validator - - -def make_typeddict_validator( - typeddict_cls: Type['TypedDict'], config: Type['BaseConfig'] # type: ignore[valid-type] -) -> Callable[[Any], Dict[str, Any]]: - from .annotated_types import create_model_from_typeddict - - TypedDictModel = create_model_from_typeddict( - typeddict_cls, - __config__=config, - __module__=typeddict_cls.__module__, - ) - typeddict_cls.__pydantic_model__ = TypedDictModel # type: ignore[attr-defined] - - def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]: # type: ignore[valid-type] - return TypedDictModel.parse_obj(values).dict(exclude_unset=True) - - return typeddict_validator - - -class IfConfig: - def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None: - self.validator = validator - self.config_attr_names = config_attr_names - self.ignored_value = ignored_value - - def check(self, config: Type['BaseConfig']) -> bool: - return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names) - - -# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same, -# IPv4Interface before IPv4Address, etc -_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [ - (IntEnum, [int_validator, enum_member_validator]), - (Enum, [enum_member_validator]), - ( - str, - [ - str_validator, - IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), - IfConfig(anystr_upper, 'anystr_upper'), - IfConfig(anystr_lower, 'anystr_lower'), - IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), - ], - ), - ( - bytes, - [ - bytes_validator, - IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), - IfConfig(anystr_upper, 'anystr_upper'), - IfConfig(anystr_lower, 'anystr_lower'), - IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), - ], - ), - (bool, [bool_validator]), - (int, [int_validator]), - (float, [float_validator, IfConfig(float_finite_validator, 'allow_inf_nan', ignored_value=True)]), - (Path, [path_validator]), - (datetime, [parse_datetime]), - (date, [parse_date]), - (time, [parse_time]), - (timedelta, [parse_duration]), - (OrderedDict, [ordered_dict_validator]), - (dict, [dict_validator]), - (list, [list_validator]), - (tuple, [tuple_validator]), - (set, [set_validator]), - (frozenset, [frozenset_validator]), - (deque, [deque_validator]), - (UUID, [uuid_validator]), - (Decimal, [decimal_validator]), - (IPv4Interface, [ip_v4_interface_validator]), - (IPv6Interface, [ip_v6_interface_validator]), - (IPv4Address, [ip_v4_address_validator]), - (IPv6Address, [ip_v6_address_validator]), - (IPv4Network, [ip_v4_network_validator]), - (IPv6Network, [ip_v6_network_validator]), -] - - -def find_validators( # noqa: C901 (ignore complexity) - type_: Type[Any], config: Type['BaseConfig'] -) -> Generator[AnyCallable, None, None]: - from .dataclasses import is_builtin_dataclass, make_dataclass_validator - - if type_ is Any or type_ is object: - return - type_type = type_.__class__ - if type_type == ForwardRef or type_type == TypeVar: - return - - if is_none_type(type_): - yield none_validator - return - if type_ is Pattern or type_ is re.Pattern: - yield pattern_validator - return - if type_ is Hashable or type_ is CollectionsHashable: - yield hashable_validator - return - if is_callable_type(type_): - yield callable_validator - return - if is_literal_type(type_): - yield make_literal_validator(type_) - return - if is_builtin_dataclass(type_): - yield from make_dataclass_validator(type_, config) - return - if type_ is Enum: - yield enum_validator - return - if type_ is IntEnum: - yield int_enum_validator - return - if is_namedtuple(type_): - yield tuple_validator - yield make_namedtuple_validator(type_, config) - return - if is_typeddict(type_): - yield make_typeddict_validator(type_, config) - return - - class_ = get_class(type_) - if class_ is not None: - if class_ is not Any and isinstance(class_, type): - yield make_class_validator(class_) - else: - yield any_class_validator - return - - for val_type, validators in _VALIDATORS: - try: - if issubclass(type_, val_type): - for v in validators: - if isinstance(v, IfConfig): - if v.check(config): - yield v.validator - else: - yield v - return - except TypeError: - raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})') - - if config.arbitrary_types_allowed: - yield make_arbitrary_type_validator(type_) - else: - raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config') diff --git a/lib/pydantic/v1/version.py b/lib/pydantic/v1/version.py deleted file mode 100644 index ec982ba7..00000000 --- a/lib/pydantic/v1/version.py +++ /dev/null @@ -1,38 +0,0 @@ -__all__ = 'compiled', 'VERSION', 'version_info' - -VERSION = '1.10.14' - -try: - import cython # type: ignore -except ImportError: - compiled: bool = False -else: # pragma: no cover - try: - compiled = cython.compiled - except AttributeError: - compiled = False - - -def version_info() -> str: - import platform - import sys - from importlib import import_module - from pathlib import Path - - optional_deps = [] - for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'): - try: - import_module(p.replace('-', '_')) - except ImportError: - continue - optional_deps.append(p) - - info = { - 'pydantic version': VERSION, - 'pydantic compiled': compiled, - 'install path': Path(__file__).resolve().parent, - 'python version': sys.version, - 'platform': platform.platform(), - 'optional deps. installed': optional_deps, - } - return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items()) diff --git a/lib/pydantic/validate_call_decorator.py b/lib/pydantic/validate_call_decorator.py deleted file mode 100644 index b95fa3b6..00000000 --- a/lib/pydantic/validate_call_decorator.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Decorator for validating function calls.""" -from __future__ import annotations as _annotations - -import functools -from typing import TYPE_CHECKING, Any, Callable, TypeVar, overload - -from ._internal import _validate_call - -__all__ = ('validate_call',) - -if TYPE_CHECKING: - from .config import ConfigDict - - AnyCallableT = TypeVar('AnyCallableT', bound=Callable[..., Any]) - - -@overload -def validate_call( - *, config: ConfigDict | None = None, validate_return: bool = False -) -> Callable[[AnyCallableT], AnyCallableT]: - ... - - -@overload -def validate_call(__func: AnyCallableT) -> AnyCallableT: - ... - - -def validate_call( - __func: AnyCallableT | None = None, - *, - config: ConfigDict | None = None, - validate_return: bool = False, -) -> AnyCallableT | Callable[[AnyCallableT], AnyCallableT]: - """Usage docs: https://docs.pydantic.dev/2.6/concepts/validation_decorator/ - - Returns a decorated wrapper around the function that validates the arguments and, optionally, the return value. - - Usage may be either as a plain decorator `@validate_call` or with arguments `@validate_call(...)`. - - Args: - __func: The function to be decorated. - config: The configuration dictionary. - validate_return: Whether to validate the return value. - - Returns: - The decorated function. - """ - - def validate(function: AnyCallableT) -> AnyCallableT: - if isinstance(function, (classmethod, staticmethod)): - name = type(function).__name__ - raise TypeError(f'The `@{name}` decorator should be applied after `@validate_call` (put `@{name}` on top)') - validate_call_wrapper = _validate_call.ValidateCallWrapper(function, config, validate_return) - - @functools.wraps(function) - def wrapper_function(*args, **kwargs): - return validate_call_wrapper(*args, **kwargs) - - wrapper_function.raw_function = function # type: ignore - - return wrapper_function # type: ignore - - if __func: - return validate(__func) - else: - return validate diff --git a/lib/pydantic/validators.py b/lib/pydantic/validators.py index 55b0339e..fb6d0418 100644 --- a/lib/pydantic/validators.py +++ b/lib/pydantic/validators.py @@ -1,4 +1,765 @@ -"""The `validators` module is a backport module from V1.""" -from ._migration import getattr_migration +import math +import re +from collections import OrderedDict, deque +from collections.abc import Hashable as CollectionsHashable +from datetime import date, datetime, time, timedelta +from decimal import Decimal, DecimalException +from enum import Enum, IntEnum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Hashable, + List, + NamedTuple, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, +) +from uuid import UUID -__getattr__ = getattr_migration(__name__) +from . import errors +from .datetime_parse import parse_date, parse_datetime, parse_duration, parse_time +from .typing import ( + AnyCallable, + all_literal_values, + display_as_type, + get_class, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_typeddict, +) +from .utils import almost_equal_floats, lenient_issubclass, sequence_like + +if TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + + from .config import BaseConfig + from .fields import ModelField + from .types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt + + ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt] + AnyOrderedDict = OrderedDict[Any, Any] + Number = Union[int, float, Decimal] + StrBytes = Union[str, bytes] + + +def str_validator(v: Any) -> Union[str]: + if isinstance(v, str): + if isinstance(v, Enum): + return v.value + else: + return v + elif isinstance(v, (float, int, Decimal)): + # is there anything else we want to add here? If you think so, create an issue. + return str(v) + elif isinstance(v, (bytes, bytearray)): + return v.decode() + else: + raise errors.StrError() + + +def strict_str_validator(v: Any) -> Union[str]: + if isinstance(v, str) and not isinstance(v, Enum): + return v + raise errors.StrError() + + +def bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + elif isinstance(v, str): + return v.encode() + elif isinstance(v, (float, int, Decimal)): + return str(v).encode() + else: + raise errors.BytesError() + + +def strict_bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + else: + raise errors.BytesError() + + +BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'} +BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'} + + +def bool_validator(v: Any) -> bool: + if v is True or v is False: + return v + if isinstance(v, bytes): + v = v.decode() + if isinstance(v, str): + v = v.lower() + try: + if v in BOOL_TRUE: + return True + if v in BOOL_FALSE: + return False + except TypeError: + raise errors.BoolError() + raise errors.BoolError() + + +# matches the default limit cpython, see https://github.com/python/cpython/pull/96500 +max_str_int = 4_300 + + +def int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + + # see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778 + # this check should be unnecessary once patch releases are out for 3.7, 3.8, 3.9 and 3.10 + # but better to check here until then. + # NOTICE: this does not fully protect user from the DOS risk since the standard library JSON implementation + # (and other std lib modules like xml) use `int()` and are likely called before this, the best workaround is to + # 1. update to the latest patch release of python once released, 2. use a different JSON library like ujson + if isinstance(v, (str, bytes, bytearray)) and len(v) > max_str_int: + raise errors.IntegerError() + + try: + return int(v) + except (TypeError, ValueError, OverflowError): + raise errors.IntegerError() + + +def strict_int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + raise errors.IntegerError() + + +def float_validator(v: Any) -> float: + if isinstance(v, float): + return v + + try: + return float(v) + except (TypeError, ValueError): + raise errors.FloatError() + + +def strict_float_validator(v: Any) -> float: + if isinstance(v, float): + return v + raise errors.FloatError() + + +def float_finite_validator(v: 'Number', field: 'ModelField', config: 'BaseConfig') -> 'Number': + allow_inf_nan = getattr(field.type_, 'allow_inf_nan', None) + if allow_inf_nan is None: + allow_inf_nan = config.allow_inf_nan + + if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)): + raise errors.NumberNotFiniteError() + return v + + +def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number': + field_type: ConstrainedNumber = field.type_ + if field_type.multiple_of is not None: + mod = float(v) / float(field_type.multiple_of) % 1 + if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0): + raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of) + return v + + +def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number': + field_type: ConstrainedNumber = field.type_ + if field_type.gt is not None and not v > field_type.gt: + raise errors.NumberNotGtError(limit_value=field_type.gt) + elif field_type.ge is not None and not v >= field_type.ge: + raise errors.NumberNotGeError(limit_value=field_type.ge) + + if field_type.lt is not None and not v < field_type.lt: + raise errors.NumberNotLtError(limit_value=field_type.lt) + if field_type.le is not None and not v <= field_type.le: + raise errors.NumberNotLeError(limit_value=field_type.le) + + return v + + +def constant_validator(v: 'Any', field: 'ModelField') -> 'Any': + """Validate ``const`` fields. + + The value provided for a ``const`` field must be equal to the default value + of the field. This is to support the keyword of the same name in JSON + Schema. + """ + if v != field.default: + raise errors.WrongConstantError(given=v, permitted=[field.default]) + + return v + + +def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes': + v_len = len(v) + + min_length = config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes': + return v.strip() + + +def anystr_upper(v: 'StrBytes') -> 'StrBytes': + return v.upper() + + +def anystr_lower(v: 'StrBytes') -> 'StrBytes': + return v.lower() + + +def ordered_dict_validator(v: Any) -> 'AnyOrderedDict': + if isinstance(v, OrderedDict): + return v + + try: + return OrderedDict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def dict_validator(v: Any) -> Dict[Any, Any]: + if isinstance(v, dict): + return v + + try: + return dict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def list_validator(v: Any) -> List[Any]: + if isinstance(v, list): + return v + elif sequence_like(v): + return list(v) + else: + raise errors.ListError() + + +def tuple_validator(v: Any) -> Tuple[Any, ...]: + if isinstance(v, tuple): + return v + elif sequence_like(v): + return tuple(v) + else: + raise errors.TupleError() + + +def set_validator(v: Any) -> Set[Any]: + if isinstance(v, set): + return v + elif sequence_like(v): + return set(v) + else: + raise errors.SetError() + + +def frozenset_validator(v: Any) -> FrozenSet[Any]: + if isinstance(v, frozenset): + return v + elif sequence_like(v): + return frozenset(v) + else: + raise errors.FrozenSetError() + + +def deque_validator(v: Any) -> Deque[Any]: + if isinstance(v, deque): + return v + elif sequence_like(v): + return deque(v) + else: + raise errors.DequeError() + + +def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum: + try: + enum_v = field.type_(v) + except ValueError: + # field.type_ should be an enum, so will be iterable + raise errors.EnumMemberError(enum_values=list(field.type_)) + return enum_v.value if config.use_enum_values else enum_v + + +def uuid_validator(v: Any, field: 'ModelField') -> UUID: + try: + if isinstance(v, str): + v = UUID(v) + elif isinstance(v, (bytes, bytearray)): + try: + v = UUID(v.decode()) + except ValueError: + # 16 bytes in big-endian order as the bytes argument fail + # the above check + v = UUID(bytes=v) + except ValueError: + raise errors.UUIDError() + + if not isinstance(v, UUID): + raise errors.UUIDError() + + required_version = getattr(field.type_, '_required_version', None) + if required_version and v.version != required_version: + raise errors.UUIDVersionError(required_version=required_version) + + return v + + +def decimal_validator(v: Any) -> Decimal: + if isinstance(v, Decimal): + return v + elif isinstance(v, (bytes, bytearray)): + v = v.decode() + + v = str(v).strip() + + try: + v = Decimal(v) + except DecimalException: + raise errors.DecimalError() + + if not v.is_finite(): + raise errors.DecimalIsNotFiniteError() + + return v + + +def hashable_validator(v: Any) -> Hashable: + if isinstance(v, Hashable): + return v + + raise errors.HashableError() + + +def ip_v4_address_validator(v: Any) -> IPv4Address: + if isinstance(v, IPv4Address): + return v + + try: + return IPv4Address(v) + except ValueError: + raise errors.IPv4AddressError() + + +def ip_v6_address_validator(v: Any) -> IPv6Address: + if isinstance(v, IPv6Address): + return v + + try: + return IPv6Address(v) + except ValueError: + raise errors.IPv6AddressError() + + +def ip_v4_network_validator(v: Any) -> IPv4Network: + """ + Assume IPv4Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network + """ + if isinstance(v, IPv4Network): + return v + + try: + return IPv4Network(v) + except ValueError: + raise errors.IPv4NetworkError() + + +def ip_v6_network_validator(v: Any) -> IPv6Network: + """ + Assume IPv6Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network + """ + if isinstance(v, IPv6Network): + return v + + try: + return IPv6Network(v) + except ValueError: + raise errors.IPv6NetworkError() + + +def ip_v4_interface_validator(v: Any) -> IPv4Interface: + if isinstance(v, IPv4Interface): + return v + + try: + return IPv4Interface(v) + except ValueError: + raise errors.IPv4InterfaceError() + + +def ip_v6_interface_validator(v: Any) -> IPv6Interface: + if isinstance(v, IPv6Interface): + return v + + try: + return IPv6Interface(v) + except ValueError: + raise errors.IPv6InterfaceError() + + +def path_validator(v: Any) -> Path: + if isinstance(v, Path): + return v + + try: + return Path(v) + except TypeError: + raise errors.PathError() + + +def path_exists_validator(v: Any) -> Path: + if not v.exists(): + raise errors.PathNotExistsError(path=v) + + return v + + +def callable_validator(v: Any) -> AnyCallable: + """ + Perform a simple check if the value is callable. + + Note: complete matching of argument type hints and return types is not performed + """ + if callable(v): + return v + + raise errors.CallableError(value=v) + + +def enum_validator(v: Any) -> Enum: + if isinstance(v, Enum): + return v + + raise errors.EnumError(value=v) + + +def int_enum_validator(v: Any) -> IntEnum: + if isinstance(v, IntEnum): + return v + + raise errors.IntEnumError(value=v) + + +def make_literal_validator(type_: Any) -> Callable[[Any], Any]: + permitted_choices = all_literal_values(type_) + + # To have a O(1) complexity and still return one of the values set inside the `Literal`, + # we create a dict with the set values (a set causes some problems with the way intersection works). + # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`) + allowed_choices = {v: v for v in permitted_choices} + + def literal_validator(v: Any) -> Any: + try: + return allowed_choices[v] + except KeyError: + raise errors.WrongConstantError(given=v, permitted=permitted_choices) + + return literal_validator + + +def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + v_len = len(v) + + min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace + if strip_whitespace: + v = v.strip() + + return v + + +def constr_upper(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + upper = field.type_.to_upper or config.anystr_upper + if upper: + v = v.upper() + + return v + + +def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + lower = field.type_.to_lower or config.anystr_lower + if lower: + v = v.lower() + return v + + +def validate_json(v: Any, config: 'BaseConfig') -> Any: + if v is None: + # pass None through to other validators + return v + try: + return config.json_loads(v) # type: ignore + except ValueError: + raise errors.JsonError() + except TypeError: + raise errors.JsonTypeError() + + +T = TypeVar('T') + + +def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]: + def arbitrary_type_validator(v: Any) -> T: + if isinstance(v, type_): + return v + raise errors.ArbitraryTypeError(expected_arbitrary_type=type_) + + return arbitrary_type_validator + + +def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]: + def class_validator(v: Any) -> Type[T]: + if lenient_issubclass(v, type_): + return v + raise errors.SubclassError(expected_class=type_) + + return class_validator + + +def any_class_validator(v: Any) -> Type[T]: + if isinstance(v, type): + return v + raise errors.ClassError() + + +def none_validator(v: Any) -> 'Literal[None]': + if v is None: + return v + raise errors.NotNoneError() + + +def pattern_validator(v: Any) -> Pattern[str]: + if isinstance(v, Pattern): + return v + + str_value = str_validator(v) + + try: + return re.compile(str_value) + except re.error: + raise errors.PatternError() + + +NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple) + + +def make_namedtuple_validator( + namedtuple_cls: Type[NamedTupleT], config: Type['BaseConfig'] +) -> Callable[[Tuple[Any, ...]], NamedTupleT]: + from .annotated_types import create_model_from_namedtuple + + NamedTupleModel = create_model_from_namedtuple( + namedtuple_cls, + __config__=config, + __module__=namedtuple_cls.__module__, + ) + namedtuple_cls.__pydantic_model__ = NamedTupleModel # type: ignore[attr-defined] + + def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT: + annotations = NamedTupleModel.__annotations__ + + if len(values) > len(annotations): + raise errors.ListMaxLengthError(limit_value=len(annotations)) + + dict_values: Dict[str, Any] = dict(zip(annotations, values)) + validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values)) + return namedtuple_cls(**validated_dict_values) + + return namedtuple_validator + + +def make_typeddict_validator( + typeddict_cls: Type['TypedDict'], config: Type['BaseConfig'] # type: ignore[valid-type] +) -> Callable[[Any], Dict[str, Any]]: + from .annotated_types import create_model_from_typeddict + + TypedDictModel = create_model_from_typeddict( + typeddict_cls, + __config__=config, + __module__=typeddict_cls.__module__, + ) + typeddict_cls.__pydantic_model__ = TypedDictModel # type: ignore[attr-defined] + + def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]: # type: ignore[valid-type] + return TypedDictModel.parse_obj(values).dict(exclude_unset=True) + + return typeddict_validator + + +class IfConfig: + def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None: + self.validator = validator + self.config_attr_names = config_attr_names + self.ignored_value = ignored_value + + def check(self, config: Type['BaseConfig']) -> bool: + return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names) + + +# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same, +# IPv4Interface before IPv4Address, etc +_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [ + (IntEnum, [int_validator, enum_member_validator]), + (Enum, [enum_member_validator]), + ( + str, + [ + str_validator, + IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), + IfConfig(anystr_upper, 'anystr_upper'), + IfConfig(anystr_lower, 'anystr_lower'), + IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), + ], + ), + ( + bytes, + [ + bytes_validator, + IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), + IfConfig(anystr_upper, 'anystr_upper'), + IfConfig(anystr_lower, 'anystr_lower'), + IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), + ], + ), + (bool, [bool_validator]), + (int, [int_validator]), + (float, [float_validator, IfConfig(float_finite_validator, 'allow_inf_nan', ignored_value=True)]), + (Path, [path_validator]), + (datetime, [parse_datetime]), + (date, [parse_date]), + (time, [parse_time]), + (timedelta, [parse_duration]), + (OrderedDict, [ordered_dict_validator]), + (dict, [dict_validator]), + (list, [list_validator]), + (tuple, [tuple_validator]), + (set, [set_validator]), + (frozenset, [frozenset_validator]), + (deque, [deque_validator]), + (UUID, [uuid_validator]), + (Decimal, [decimal_validator]), + (IPv4Interface, [ip_v4_interface_validator]), + (IPv6Interface, [ip_v6_interface_validator]), + (IPv4Address, [ip_v4_address_validator]), + (IPv6Address, [ip_v6_address_validator]), + (IPv4Network, [ip_v4_network_validator]), + (IPv6Network, [ip_v6_network_validator]), +] + + +def find_validators( # noqa: C901 (ignore complexity) + type_: Type[Any], config: Type['BaseConfig'] +) -> Generator[AnyCallable, None, None]: + from .dataclasses import is_builtin_dataclass, make_dataclass_validator + + if type_ is Any or type_ is object: + return + type_type = type_.__class__ + if type_type == ForwardRef or type_type == TypeVar: + return + + if is_none_type(type_): + yield none_validator + return + if type_ is Pattern or type_ is re.Pattern: + yield pattern_validator + return + if type_ is Hashable or type_ is CollectionsHashable: + yield hashable_validator + return + if is_callable_type(type_): + yield callable_validator + return + if is_literal_type(type_): + yield make_literal_validator(type_) + return + if is_builtin_dataclass(type_): + yield from make_dataclass_validator(type_, config) + return + if type_ is Enum: + yield enum_validator + return + if type_ is IntEnum: + yield int_enum_validator + return + if is_namedtuple(type_): + yield tuple_validator + yield make_namedtuple_validator(type_, config) + return + if is_typeddict(type_): + yield make_typeddict_validator(type_, config) + return + + class_ = get_class(type_) + if class_ is not None: + if class_ is not Any and isinstance(class_, type): + yield make_class_validator(class_) + else: + yield any_class_validator + return + + for val_type, validators in _VALIDATORS: + try: + if issubclass(type_, val_type): + for v in validators: + if isinstance(v, IfConfig): + if v.check(config): + yield v.validator + else: + yield v + return + except TypeError: + raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})') + + if config.arbitrary_types_allowed: + yield make_arbitrary_type_validator(type_) + else: + raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config') diff --git a/lib/pydantic/version.py b/lib/pydantic/version.py index 3e233771..32c61633 100644 --- a/lib/pydantic/version.py +++ b/lib/pydantic/version.py @@ -1,80 +1,38 @@ -"""The `version` module holds the version information for Pydantic.""" -from __future__ import annotations as _annotations +__all__ = 'compiled', 'VERSION', 'version_info' -__all__ = 'VERSION', 'version_info' +VERSION = '1.10.2' -VERSION = '2.6.4' -"""The version of Pydantic.""" - - -def version_short() -> str: - """Return the `major.minor` part of Pydantic version. - - It returns '2.1' if Pydantic version is '2.1.1'. - """ - return '.'.join(VERSION.split('.')[:2]) +try: + import cython # type: ignore +except ImportError: + compiled: bool = False +else: # pragma: no cover + try: + compiled = cython.compiled + except AttributeError: + compiled = False def version_info() -> str: - """Return complete version information for Pydantic and its dependencies.""" - import importlib.metadata as importlib_metadata - import os import platform import sys + from importlib import import_module from pathlib import Path - import pydantic_core._pydantic_core as pdc - - from ._internal import _git as git - - # get data about packages that are closely related to pydantic, use pydantic or often conflict with pydantic - package_names = { - 'email-validator', - 'fastapi', - 'mypy', - 'pydantic-extra-types', - 'pydantic-settings', - 'pyright', - 'typing_extensions', - } - related_packages = [] - - for dist in importlib_metadata.distributions(): - name = dist.metadata['Name'] - if name in package_names: - related_packages.append(f'{name}-{dist.version}') - - pydantic_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) - most_recent_commit = ( - git.git_revision(pydantic_dir) if git.is_git_repo(pydantic_dir) and git.have_git() else 'unknown' - ) + optional_deps = [] + for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'): + try: + import_module(p.replace('-', '_')) + except ImportError: + continue + optional_deps.append(p) info = { 'pydantic version': VERSION, - 'pydantic-core version': pdc.__version__, - 'pydantic-core build': getattr(pdc, 'build_info', None) or pdc.build_profile, + 'pydantic compiled': compiled, 'install path': Path(__file__).resolve().parent, 'python version': sys.version, 'platform': platform.platform(), - 'related packages': ' '.join(related_packages), - 'commit': most_recent_commit, + 'optional deps. installed': optional_deps, } return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items()) - - -def parse_mypy_version(version: str) -> tuple[int, ...]: - """Parse mypy string version to tuple of ints. - - This function is included here rather than the mypy plugin file because the mypy plugin file cannot be imported - outside a mypy run. - - It parses normal version like `0.930` and dev version - like `0.940+dev.04cac4b5d911c4f9529e6ce86a27b44f28846f5d.dirty`. - - Args: - version: The mypy version string. - - Returns: - A tuple of ints. e.g. (0, 930). - """ - return tuple(map(int, version.partition('+')[0].split('.'))) diff --git a/lib/pydantic/warnings.py b/lib/pydantic/warnings.py deleted file mode 100644 index aedd4fba..00000000 --- a/lib/pydantic/warnings.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Pydantic-specific warnings.""" -from __future__ import annotations as _annotations - -from .version import version_short - -__all__ = 'PydanticDeprecatedSince20', 'PydanticDeprecationWarning' - - -class PydanticDeprecationWarning(DeprecationWarning): - """A Pydantic specific deprecation warning. - - This warning is raised when using deprecated functionality in Pydantic. It provides information on when the - deprecation was introduced and the expected version in which the corresponding functionality will be removed. - - Attributes: - message: Description of the warning. - since: Pydantic version in what the deprecation was introduced. - expected_removal: Pydantic version in what the corresponding functionality expected to be removed. - """ - - message: str - since: tuple[int, int] - expected_removal: tuple[int, int] - - def __init__( - self, message: str, *args: object, since: tuple[int, int], expected_removal: tuple[int, int] | None = None - ) -> None: - super().__init__(message, *args) - self.message = message.rstrip('.') - self.since = since - self.expected_removal = expected_removal if expected_removal is not None else (since[0] + 1, 0) - - def __str__(self) -> str: - message = ( - f'{self.message}. Deprecated in Pydantic V{self.since[0]}.{self.since[1]}' - f' to be removed in V{self.expected_removal[0]}.{self.expected_removal[1]}.' - ) - if self.since == (2, 0): - message += f' See Pydantic V2 Migration Guide at https://errors.pydantic.dev/{version_short()}/migration/' - return message - - -class PydanticDeprecatedSince20(PydanticDeprecationWarning): - """A specific `PydanticDeprecationWarning` subclass defining functionality deprecated since Pydantic 2.0.""" - - def __init__(self, message: str, *args: object) -> None: - super().__init__(message, *args, since=(2, 0), expected_removal=(3, 0)) - - -class PydanticDeprecatedSince26(PydanticDeprecationWarning): - """A specific `PydanticDeprecationWarning` subclass defining functionality deprecated since Pydantic 2.6.""" - - def __init__(self, message: str, *args: object) -> None: - super().__init__(message, *args, since=(2, 0), expected_removal=(3, 0)) - - -class GenericBeforeBaseModelWarning(Warning): - pass diff --git a/lib/pydantic_core/__init__.py b/lib/pydantic_core/__init__.py deleted file mode 100644 index 5b2655c9..00000000 --- a/lib/pydantic_core/__init__.py +++ /dev/null @@ -1,139 +0,0 @@ -from __future__ import annotations - -import sys as _sys -from typing import Any as _Any - -from ._pydantic_core import ( - ArgsKwargs, - MultiHostUrl, - PydanticCustomError, - PydanticKnownError, - PydanticOmit, - PydanticSerializationError, - PydanticSerializationUnexpectedValue, - PydanticUndefined, - PydanticUndefinedType, - PydanticUseDefault, - SchemaError, - SchemaSerializer, - SchemaValidator, - Some, - TzInfo, - Url, - ValidationError, - __version__, - from_json, - to_json, - to_jsonable_python, - validate_core_schema, -) -from .core_schema import CoreConfig, CoreSchema, CoreSchemaType, ErrorType - -if _sys.version_info < (3, 11): - from typing_extensions import NotRequired as _NotRequired -else: - from typing import NotRequired as _NotRequired - -if _sys.version_info < (3, 9): - from typing_extensions import TypedDict as _TypedDict -else: - from typing import TypedDict as _TypedDict - -__all__ = [ - '__version__', - 'CoreConfig', - 'CoreSchema', - 'CoreSchemaType', - 'SchemaValidator', - 'SchemaSerializer', - 'Some', - 'Url', - 'MultiHostUrl', - 'ArgsKwargs', - 'PydanticUndefined', - 'PydanticUndefinedType', - 'SchemaError', - 'ErrorDetails', - 'InitErrorDetails', - 'ValidationError', - 'PydanticCustomError', - 'PydanticKnownError', - 'PydanticOmit', - 'PydanticUseDefault', - 'PydanticSerializationError', - 'PydanticSerializationUnexpectedValue', - 'TzInfo', - 'to_json', - 'from_json', - 'to_jsonable_python', - 'validate_core_schema', -] - - -class ErrorDetails(_TypedDict): - type: str - """ - The type of error that occurred, this is an identifier designed for - programmatic use that will change rarely or never. - - `type` is unique for each error message, and can hence be used as an identifier to build custom error messages. - """ - loc: tuple[int | str, ...] - """Tuple of strings and ints identifying where in the schema the error occurred.""" - msg: str - """A human readable error message.""" - input: _Any - """The input data at this `loc` that caused the error.""" - ctx: _NotRequired[dict[str, _Any]] - """ - Values which are required to render the error message, and could hence be useful in rendering custom error messages. - Also useful for passing custom error data forward. - """ - - -class InitErrorDetails(_TypedDict): - type: str | PydanticCustomError - """The type of error that occurred, this should a "slug" identifier that changes rarely or never.""" - loc: _NotRequired[tuple[int | str, ...]] - """Tuple of strings and ints identifying where in the schema the error occurred.""" - input: _Any - """The input data at this `loc` that caused the error.""" - ctx: _NotRequired[dict[str, _Any]] - """ - Values which are required to render the error message, and could hence be useful in rendering custom error messages. - Also useful for passing custom error data forward. - """ - - -class ErrorTypeInfo(_TypedDict): - """ - Gives information about errors. - """ - - type: ErrorType - """The type of error that occurred, this should a "slug" identifier that changes rarely or never.""" - message_template_python: str - """String template to render a human readable error message from using context, when the input is Python.""" - example_message_python: str - """Example of a human readable error message, when the input is Python.""" - message_template_json: _NotRequired[str] - """String template to render a human readable error message from using context, when the input is JSON data.""" - example_message_json: _NotRequired[str] - """Example of a human readable error message, when the input is JSON data.""" - example_context: dict[str, _Any] | None - """Example of context values.""" - - -class MultiHostHost(_TypedDict): - """ - A host part of a multi-host URL. - """ - - username: str | None - """The username part of this host, or `None`.""" - password: str | None - """The password part of this host, or `None`.""" - host: str | None - """The host part of this host, or `None`.""" - port: int | None - """The port part of this host, or `None`.""" diff --git a/lib/pydantic_core/_pydantic_core.pyi b/lib/pydantic_core/_pydantic_core.pyi deleted file mode 100644 index a7b727f8..00000000 --- a/lib/pydantic_core/_pydantic_core.pyi +++ /dev/null @@ -1,882 +0,0 @@ -from __future__ import annotations - -import datetime -import sys -from typing import Any, Callable, Generic, Optional, Type, TypeVar - -from pydantic_core import ErrorDetails, ErrorTypeInfo, InitErrorDetails, MultiHostHost -from pydantic_core.core_schema import CoreConfig, CoreSchema, ErrorType - -if sys.version_info < (3, 8): - from typing_extensions import final -else: - from typing import final - -if sys.version_info < (3, 11): - from typing_extensions import Literal, LiteralString, Self, TypeAlias -else: - from typing import Literal, LiteralString, Self, TypeAlias - -from _typeshed import SupportsAllComparisons - -__all__ = [ - '__version__', - 'build_profile', - 'build_info', - '_recursion_limit', - 'ArgsKwargs', - 'SchemaValidator', - 'SchemaSerializer', - 'Url', - 'MultiHostUrl', - 'SchemaError', - 'ValidationError', - 'PydanticCustomError', - 'PydanticKnownError', - 'PydanticOmit', - 'PydanticUseDefault', - 'PydanticSerializationError', - 'PydanticSerializationUnexpectedValue', - 'PydanticUndefined', - 'PydanticUndefinedType', - 'Some', - 'to_json', - 'from_json', - 'to_jsonable_python', - 'list_all_errors', - 'TzInfo', - 'validate_core_schema', -] -__version__: str -build_profile: str -build_info: str -_recursion_limit: int - -_T = TypeVar('_T', default=Any, covariant=True) - -_StringInput: TypeAlias = 'dict[str, _StringInput]' - -@final -class Some(Generic[_T]): - """ - Similar to Rust's [`Option::Some`](https://doc.rust-lang.org/std/option/enum.Option.html) type, this - identifies a value as being present, and provides a way to access it. - - Generally used in a union with `None` to different between "some value which could be None" and no value. - """ - - __match_args__ = ('value',) - - @property - def value(self) -> _T: - """ - Returns the value wrapped by `Some`. - """ - @classmethod - def __class_getitem__(cls, __item: Any) -> Type[Self]: ... - -@final -class SchemaValidator: - """ - `SchemaValidator` is the Python wrapper for `pydantic-core`'s Rust validation logic, internally it owns one - `CombinedValidator` which may in turn own more `CombinedValidator`s which make up the full schema validator. - """ - - def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: - """ - Create a new SchemaValidator. - - Arguments: - schema: The [`CoreSchema`][pydantic_core.core_schema.CoreSchema] to use for validation. - config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to configure validation. - """ - @property - def title(self) -> str: - """ - The title of the schema, as used in the heading of [`ValidationError.__str__()`][pydantic_core.ValidationError]. - """ - def validate_python( - self, - input: Any, - *, - strict: bool | None = None, - from_attributes: bool | None = None, - context: 'dict[str, Any] | None' = None, - self_instance: Any | None = None, - ) -> Any: - """ - Validate a Python object against the schema and return the validated object. - - Arguments: - input: The Python object to validate. - strict: Whether to validate the object in strict mode. - If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. - from_attributes: Whether to validate objects as inputs to models by extracting attributes. - If `None`, the value of [`CoreConfig.from_attributes`][pydantic_core.core_schema.CoreConfig] is used. - context: The context to use for validation, this is passed to functional validators as - [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. - self_instance: An instance of a model set attributes on from validation, this is used when running - validation from the `__init__` method of a model. - - Raises: - ValidationError: If validation fails. - Exception: Other error types maybe raised if internal errors occur. - - Returns: - The validated object. - """ - def isinstance_python( - self, - input: Any, - *, - strict: bool | None = None, - from_attributes: bool | None = None, - context: 'dict[str, Any] | None' = None, - self_instance: Any | None = None, - ) -> bool: - """ - Similar to [`validate_python()`][pydantic_core.SchemaValidator.validate_python] but returns a boolean. - - Arguments match `validate_python()`. This method will not raise `ValidationError`s but will raise internal - errors. - - Returns: - `True` if validation succeeds, `False` if validation fails. - """ - def validate_json( - self, - input: str | bytes | bytearray, - *, - strict: bool | None = None, - context: 'dict[str, Any] | None' = None, - self_instance: Any | None = None, - ) -> Any: - """ - Validate JSON data directly against the schema and return the validated Python object. - - This method should be significantly faster than `validate_python(json.loads(json_data))` as it avoids the - need to create intermediate Python objects - - It also handles constructing the correct Python type even in strict mode, where - `validate_python(json.loads(json_data))` would fail validation. - - Arguments: - input: The JSON data to validate. - strict: Whether to validate the object in strict mode. - If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. - context: The context to use for validation, this is passed to functional validators as - [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. - self_instance: An instance of a model set attributes on from validation. - - Raises: - ValidationError: If validation fails or if the JSON data is invalid. - Exception: Other error types maybe raised if internal errors occur. - - Returns: - The validated Python object. - """ - def validate_strings( - self, input: _StringInput, *, strict: bool | None = None, context: 'dict[str, Any] | None' = None - ) -> Any: - """ - Validate a string against the schema and return the validated Python object. - - This is similar to `validate_json` but applies to scenarios where the input will be a string but not - JSON data, e.g. URL fragments, query parameters, etc. - - Arguments: - input: The input as a string, or bytes/bytearray if `strict=False`. - strict: Whether to validate the object in strict mode. - If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. - context: The context to use for validation, this is passed to functional validators as - [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. - - Raises: - ValidationError: If validation fails or if the JSON data is invalid. - Exception: Other error types maybe raised if internal errors occur. - - Returns: - The validated Python object. - """ - def validate_assignment( - self, - obj: Any, - field_name: str, - field_value: Any, - *, - strict: bool | None = None, - from_attributes: bool | None = None, - context: 'dict[str, Any] | None' = None, - ) -> dict[str, Any] | tuple[dict[str, Any], dict[str, Any] | None, set[str]]: - """ - Validate an assignment to a field on a model. - - Arguments: - obj: The model instance being assigned to. - field_name: The name of the field to validate assignment for. - field_value: The value to assign to the field. - strict: Whether to validate the object in strict mode. - If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. - from_attributes: Whether to validate objects as inputs to models by extracting attributes. - If `None`, the value of [`CoreConfig.from_attributes`][pydantic_core.core_schema.CoreConfig] is used. - context: The context to use for validation, this is passed to functional validators as - [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. - - Raises: - ValidationError: If validation fails. - Exception: Other error types maybe raised if internal errors occur. - - Returns: - Either the model dict or a tuple of `(model_data, model_extra, fields_set)` - """ - def get_default_value(self, *, strict: bool | None = None, context: Any = None) -> Some | None: - """ - Get the default value for the schema, including running default value validation. - - Arguments: - strict: Whether to validate the default value in strict mode. - If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. - context: The context to use for validation, this is passed to functional validators as - [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. - - Raises: - ValidationError: If validation fails. - Exception: Other error types maybe raised if internal errors occur. - - Returns: - `None` if the schema has no default value, otherwise a [`Some`][pydantic_core.Some] containing the default. - """ - -_IncEx: TypeAlias = set[int] | set[str] | dict[int, _IncEx] | dict[str, _IncEx] | None - -@final -class SchemaSerializer: - """ - `SchemaSerializer` is the Python wrapper for `pydantic-core`'s Rust serialization logic, internally it owns one - `CombinedSerializer` which may in turn own more `CombinedSerializer`s which make up the full schema serializer. - """ - - def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: - """ - Create a new SchemaSerializer. - - Arguments: - schema: The [`CoreSchema`][pydantic_core.core_schema.CoreSchema] to use for serialization. - config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to to configure serialization. - """ - def to_python( - self, - value: Any, - *, - mode: str | None = None, - include: _IncEx = None, - exclude: _IncEx = None, - by_alias: bool = True, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool = True, - fallback: Callable[[Any], Any] | None = None, - ) -> Any: - """ - Serialize/marshal a Python object to a Python object including transforming and filtering data. - - Arguments: - value: The Python object to serialize. - mode: The serialization mode to use, either `'python'` or `'json'`, defaults to `'python'`. In JSON mode, - all values are converted to JSON compatible types, e.g. `None`, `int`, `float`, `str`, `list`, `dict`. - include: A set of fields to include, if `None` all fields are included. - exclude: A set of fields to exclude, if `None` no fields are excluded. - by_alias: Whether to use the alias names of fields. - exclude_unset: Whether to exclude fields that are not set, - e.g. are not included in `__pydantic_fields_set__`. - exclude_defaults: Whether to exclude fields that are equal to their default value. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: Whether to enable serialization and validation round-trip support. - warnings: Whether to log warnings when invalid fields are encountered. - fallback: A function to call when an unknown value is encountered, - if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. - - Raises: - PydanticSerializationError: If serialization fails and no `fallback` function is provided. - - Returns: - The serialized Python object. - """ - def to_json( - self, - value: Any, - *, - indent: int | None = None, - include: _IncEx = None, - exclude: _IncEx = None, - by_alias: bool = True, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool = True, - fallback: Callable[[Any], Any] | None = None, - ) -> bytes: - """ - Serialize a Python object to JSON including transforming and filtering data. - - Arguments: - value: The Python object to serialize. - indent: If `None`, the JSON will be compact, otherwise it will be pretty-printed with the indent provided. - include: A set of fields to include, if `None` all fields are included. - exclude: A set of fields to exclude, if `None` no fields are excluded. - by_alias: Whether to use the alias names of fields. - exclude_unset: Whether to exclude fields that are not set, - e.g. are not included in `__pydantic_fields_set__`. - exclude_defaults: Whether to exclude fields that are equal to their default value. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: Whether to enable serialization and validation round-trip support. - warnings: Whether to log warnings when invalid fields are encountered. - fallback: A function to call when an unknown value is encountered, - if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. - - Raises: - PydanticSerializationError: If serialization fails and no `fallback` function is provided. - - Returns: - JSON bytes. - """ - -def to_json( - value: Any, - *, - indent: int | None = None, - include: _IncEx = None, - exclude: _IncEx = None, - by_alias: bool = True, - exclude_none: bool = False, - round_trip: bool = False, - timedelta_mode: Literal['iso8601', 'float'] = 'iso8601', - bytes_mode: Literal['utf8', 'base64'] = 'utf8', - inf_nan_mode: Literal['null', 'constants'] = 'constants', - serialize_unknown: bool = False, - fallback: Callable[[Any], Any] | None = None, -) -> bytes: - """ - Serialize a Python object to JSON including transforming and filtering data. - - This is effectively a standalone version of [`SchemaSerializer.to_json`][pydantic_core.SchemaSerializer.to_json]. - - Arguments: - value: The Python object to serialize. - indent: If `None`, the JSON will be compact, otherwise it will be pretty-printed with the indent provided. - include: A set of fields to include, if `None` all fields are included. - exclude: A set of fields to exclude, if `None` no fields are excluded. - by_alias: Whether to use the alias names of fields. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: Whether to enable serialization and validation round-trip support. - timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`. - bytes_mode: How to serialize `bytes` objects, either `'utf8'` or `'base64'`. - inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'` or `'constants'`. - serialize_unknown: Attempt to serialize unknown types, `str(value)` will be used, if that fails - `""` will be used. - fallback: A function to call when an unknown value is encountered, - if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. - - Raises: - PydanticSerializationError: If serialization fails and no `fallback` function is provided. - - Returns: - JSON bytes. - """ - -def from_json(data: str | bytes | bytearray, *, allow_inf_nan: bool = True, cache_strings: bool = True) -> Any: - """ - Deserialize JSON data to a Python object. - - This is effectively a faster version of `json.loads()`. - - Arguments: - data: The JSON data to deserialize. - allow_inf_nan: Whether to allow `Infinity`, `-Infinity` and `NaN` values as `json.loads()` does by default. - cache_strings: Whether to cache strings to avoid constructing new Python objects, - this should have a significant impact on performance while increasing memory usage slightly. - - Raises: - ValueError: If deserialization fails. - - Returns: - The deserialized Python object. - """ - -def to_jsonable_python( - value: Any, - *, - include: _IncEx = None, - exclude: _IncEx = None, - by_alias: bool = True, - exclude_none: bool = False, - round_trip: bool = False, - timedelta_mode: Literal['iso8601', 'float'] = 'iso8601', - bytes_mode: Literal['utf8', 'base64'] = 'utf8', - inf_nan_mode: Literal['null', 'constants'] = 'constants', - serialize_unknown: bool = False, - fallback: Callable[[Any], Any] | None = None, -) -> Any: - """ - Serialize/marshal a Python object to a JSON-serializable Python object including transforming and filtering data. - - This is effectively a standalone version of - [`SchemaSerializer.to_python(mode='json')`][pydantic_core.SchemaSerializer.to_python]. - - Args: - value: The Python object to serialize. - include: A set of fields to include, if `None` all fields are included. - exclude: A set of fields to exclude, if `None` no fields are excluded. - by_alias: Whether to use the alias names of fields. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: Whether to enable serialization and validation round-trip support. - timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`. - bytes_mode: How to serialize `bytes` objects, either `'utf8'` or `'base64'`. - inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'` or `'constants'`. - serialize_unknown: Attempt to serialize unknown types, `str(value)` will be used, if that fails - `""` will be used. - fallback: A function to call when an unknown value is encountered, - if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. - - Raises: - PydanticSerializationError: If serialization fails and no `fallback` function is provided. - - Returns: - The serialized Python object. - """ - -class Url(SupportsAllComparisons): - """ - A URL type, internal logic uses the [url rust crate](https://docs.rs/url/latest/url/) originally developed - by Mozilla. - """ - - def __new__(cls, url: str) -> Self: - """ - Create a new `Url` instance. - - Args: - url: String representation of a URL. - - Returns: - A new `Url` instance. - - Raises: - ValidationError: If the URL is invalid. - """ - @property - def scheme(self) -> str: - """ - The scheme part of the URL. - - e.g. `https` in `https://user:pass@host:port/path?query#fragment` - """ - @property - def username(self) -> str | None: - """ - The username part of the URL, or `None`. - - e.g. `user` in `https://user:pass@host:port/path?query#fragment` - """ - @property - def password(self) -> str | None: - """ - The password part of the URL, or `None`. - - e.g. `pass` in `https://user:pass@host:port/path?query#fragment` - """ - @property - def host(self) -> str | None: - """ - The host part of the URL, or `None`. - - If the URL must be punycode encoded, this is the encoded host, e.g if the input URL is `https://£££.com`, - `host` will be `xn--9aaa.com` - """ - def unicode_host(self) -> str | None: - """ - The host part of the URL as a unicode string, or `None`. - - e.g. `host` in `https://user:pass@host:port/path?query#fragment` - - If the URL must be punycode encoded, this is the decoded host, e.g if the input URL is `https://£££.com`, - `unicode_host()` will be `£££.com` - """ - @property - def port(self) -> int | None: - """ - The port part of the URL, or `None`. - - e.g. `port` in `https://user:pass@host:port/path?query#fragment` - """ - @property - def path(self) -> str | None: - """ - The path part of the URL, or `None`. - - e.g. `/path` in `https://user:pass@host:port/path?query#fragment` - """ - @property - def query(self) -> str | None: - """ - The query part of the URL, or `None`. - - e.g. `query` in `https://user:pass@host:port/path?query#fragment` - """ - def query_params(self) -> list[tuple[str, str]]: - """ - The query part of the URL as a list of key-value pairs. - - e.g. `[('foo', 'bar')]` in `https://user:pass@host:port/path?foo=bar#fragment` - """ - @property - def fragment(self) -> str | None: - """ - The fragment part of the URL, or `None`. - - e.g. `fragment` in `https://user:pass@host:port/path?query#fragment` - """ - def unicode_string(self) -> str: - """ - The URL as a unicode string, unlike `__str__()` this will not punycode encode the host. - - If the URL must be punycode encoded, this is the decoded string, e.g if the input URL is `https://£££.com`, - `unicode_string()` will be `https://£££.com` - """ - def __repr__(self) -> str: ... - def __str__(self) -> str: - """ - The URL as a string, this will punycode encode the host if required. - """ - def __deepcopy__(self, memo: dict) -> str: ... - @classmethod - def build( - cls, - *, - scheme: str, - username: Optional[str] = None, - password: Optional[str] = None, - host: str, - port: Optional[int] = None, - path: Optional[str] = None, - query: Optional[str] = None, - fragment: Optional[str] = None, - ) -> Self: - """ - Build a new `Url` instance from its component parts. - - Args: - scheme: The scheme part of the URL. - username: The username part of the URL, or omit for no username. - password: The password part of the URL, or omit for no password. - host: The host part of the URL. - port: The port part of the URL, or omit for no port. - path: The path part of the URL, or omit for no path. - query: The query part of the URL, or omit for no query. - fragment: The fragment part of the URL, or omit for no fragment. - - Returns: - An instance of URL - """ - -class MultiHostUrl(SupportsAllComparisons): - """ - A URL type with support for multiple hosts, as used by some databases for DSNs, e.g. `https://foo.com,bar.com/path`. - - Internal URL logic uses the [url rust crate](https://docs.rs/url/latest/url/) originally developed - by Mozilla. - """ - - def __new__(cls, url: str) -> Self: - """ - Create a new `MultiHostUrl` instance. - - Args: - url: String representation of a URL. - - Returns: - A new `MultiHostUrl` instance. - - Raises: - ValidationError: If the URL is invalid. - """ - @property - def scheme(self) -> str: - """ - The scheme part of the URL. - - e.g. `https` in `https://foo.com,bar.com/path?query#fragment` - """ - @property - def path(self) -> str | None: - """ - The path part of the URL, or `None`. - - e.g. `/path` in `https://foo.com,bar.com/path?query#fragment` - """ - @property - def query(self) -> str | None: - """ - The query part of the URL, or `None`. - - e.g. `query` in `https://foo.com,bar.com/path?query#fragment` - """ - def query_params(self) -> list[tuple[str, str]]: - """ - The query part of the URL as a list of key-value pairs. - - e.g. `[('foo', 'bar')]` in `https://foo.com,bar.com/path?query#fragment` - """ - @property - def fragment(self) -> str | None: - """ - The fragment part of the URL, or `None`. - - e.g. `fragment` in `https://foo.com,bar.com/path?query#fragment` - """ - def hosts(self) -> list[MultiHostHost]: - ''' - - The hosts of the `MultiHostUrl` as [`MultiHostHost`][pydantic_core.MultiHostHost] typed dicts. - - ```py - from pydantic_core import MultiHostUrl - - mhu = MultiHostUrl('https://foo.com:123,foo:bar@bar.com/path') - print(mhu.hosts()) - """ - [ - {'username': None, 'password': None, 'host': 'foo.com', 'port': 123}, - {'username': 'foo', 'password': 'bar', 'host': 'bar.com', 'port': 443} - ] - ``` - Returns: - A list of dicts, each representing a host. - ''' - def unicode_string(self) -> str: - """ - The URL as a unicode string, unlike `__str__()` this will not punycode encode the hosts. - """ - def __repr__(self) -> str: ... - def __str__(self) -> str: - """ - The URL as a string, this will punycode encode the hosts if required. - """ - def __deepcopy__(self, memo: dict) -> Self: ... - @classmethod - def build( - cls, - *, - scheme: str, - hosts: Optional[list[MultiHostHost]] = None, - username: Optional[str] = None, - password: Optional[str] = None, - host: Optional[str] = None, - port: Optional[int] = None, - path: Optional[str] = None, - query: Optional[str] = None, - fragment: Optional[str] = None, - ) -> Self: - """ - Build a new `MultiHostUrl` instance from its component parts. - - This method takes either `hosts` - a list of `MultiHostHost` typed dicts, or the individual components - `username`, `password`, `host` and `port`. - - Args: - scheme: The scheme part of the URL. - hosts: Multiple hosts to build the URL from. - username: The username part of the URL. - password: The password part of the URL. - host: The host part of the URL. - port: The port part of the URL. - path: The path part of the URL. - query: The query part of the URL, or omit for no query. - fragment: The fragment part of the URL, or omit for no fragment. - - Returns: - An instance of `MultiHostUrl` - """ - -@final -class SchemaError(Exception): - """ - Information about errors that occur while building a [`SchemaValidator`][pydantic_core.SchemaValidator] - or [`SchemaSerializer`][pydantic_core.SchemaSerializer]. - """ - - def error_count(self) -> int: - """ - Returns: - The number of errors in the schema. - """ - def errors(self) -> list[ErrorDetails]: - """ - Returns: - A list of [`ErrorDetails`][pydantic_core.ErrorDetails] for each error in the schema. - """ - -@final -class ValidationError(ValueError): - """ - `ValidationError` is the exception raised by `pydantic-core` when validation fails, it contains a list of errors - which detail why validation failed. - """ - - @staticmethod - def from_exception_data( - title: str, - line_errors: list[InitErrorDetails], - input_type: Literal['python', 'json'] = 'python', - hide_input: bool = False, - ) -> ValidationError: - """ - Python constructor for a Validation Error. - - The API for constructing validation errors will probably change in the future, - hence the static method rather than `__init__`. - - Arguments: - title: The title of the error, as used in the heading of `str(validation_error)` - line_errors: A list of [`InitErrorDetails`][pydantic_core.InitErrorDetails] which contain information - about errors that occurred during validation. - input_type: Whether the error is for a Python object or JSON. - hide_input: Whether to hide the input value in the error message. - """ - @property - def title(self) -> str: - """ - The title of the error, as used in the heading of `str(validation_error)`. - """ - def error_count(self) -> int: - """ - Returns: - The number of errors in the validation error. - """ - def errors( - self, *, include_url: bool = True, include_context: bool = True, include_input: bool = True - ) -> list[ErrorDetails]: - """ - Details about each error in the validation error. - - Args: - include_url: Whether to include a URL to documentation on the error each error. - include_context: Whether to include the context of each error. - include_input: Whether to include the input value of each error. - - Returns: - A list of [`ErrorDetails`][pydantic_core.ErrorDetails] for each error in the validation error. - """ - def json( - self, - *, - indent: int | None = None, - include_url: bool = True, - include_context: bool = True, - include_input: bool = True, - ) -> str: - """ - Same as [`errors()`][pydantic_core.ValidationError.errors] but returns a JSON string. - - Args: - indent: The number of spaces to indent the JSON by, or `None` for no indentation - compact JSON. - include_url: Whether to include a URL to documentation on the error each error. - include_context: Whether to include the context of each error. - include_input: Whether to include the input value of each error. - - Returns: - a JSON string. - """ - - def __repr__(self) -> str: - """ - A string representation of the validation error. - - Whether or not documentation URLs are included in the repr is controlled by the - environment variable `PYDANTIC_ERRORS_INCLUDE_URL` being set to `1` or - `true`; by default, URLs are shown. - - Due to implementation details, this environment variable can only be set once, - before the first validation error is created. - """ - -@final -class PydanticCustomError(ValueError): - def __new__( - cls, error_type: LiteralString, message_template: LiteralString, context: dict[str, Any] | None = None - ) -> Self: ... - @property - def context(self) -> dict[str, Any] | None: ... - @property - def type(self) -> str: ... - @property - def message_template(self) -> str: ... - def message(self) -> str: ... - -@final -class PydanticKnownError(ValueError): - def __new__(cls, error_type: ErrorType, context: dict[str, Any] | None = None) -> Self: ... - @property - def context(self) -> dict[str, Any] | None: ... - @property - def type(self) -> ErrorType: ... - @property - def message_template(self) -> str: ... - def message(self) -> str: ... - -@final -class PydanticOmit(Exception): - def __new__(cls) -> Self: ... - -@final -class PydanticUseDefault(Exception): - def __new__(cls) -> Self: ... - -@final -class PydanticSerializationError(ValueError): - def __new__(cls, message: str) -> Self: ... - -@final -class PydanticSerializationUnexpectedValue(ValueError): - def __new__(cls, message: str | None = None) -> Self: ... - -@final -class ArgsKwargs: - def __new__(cls, args: tuple[Any, ...], kwargs: dict[str, Any] | None = None) -> Self: ... - @property - def args(self) -> tuple[Any, ...]: ... - @property - def kwargs(self) -> dict[str, Any] | None: ... - -@final -class PydanticUndefinedType: - def __copy__(self) -> Self: ... - def __deepcopy__(self, memo: Any) -> Self: ... - -PydanticUndefined: PydanticUndefinedType - -def list_all_errors() -> list[ErrorTypeInfo]: - """ - Get information about all built-in errors. - - Returns: - A list of `ErrorTypeInfo` typed dicts. - """ -@final -class TzInfo(datetime.tzinfo): - def tzname(self, _dt: datetime.datetime | None) -> str | None: ... - def utcoffset(self, _dt: datetime.datetime | None) -> datetime.timedelta: ... - def dst(self, _dt: datetime.datetime | None) -> datetime.timedelta: ... - def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... - def __deepcopy__(self, _memo: dict[Any, Any]) -> 'TzInfo': ... - -def validate_core_schema(schema: CoreSchema, *, strict: bool | None = None) -> CoreSchema: - """Validate a CoreSchema - This currently uses lax mode for validation (i.e. will coerce strings to dates and such) - but may use strict mode in the future. - We may also remove this function altogether, do not rely on it being present if you are - using pydantic-core directly. - """ diff --git a/lib/pydantic_core/core_schema.py b/lib/pydantic_core/core_schema.py deleted file mode 100644 index 31bf4878..00000000 --- a/lib/pydantic_core/core_schema.py +++ /dev/null @@ -1,3980 +0,0 @@ -""" -This module contains definitions to build schemas which `pydantic_core` can -validate and serialize. -""" - -from __future__ import annotations as _annotations - -import sys -import warnings -from collections.abc import Mapping -from datetime import date, datetime, time, timedelta -from decimal import Decimal -from typing import TYPE_CHECKING, Any, Callable, Dict, Hashable, List, Set, Tuple, Type, Union - -from typing_extensions import deprecated - -if sys.version_info < (3, 12): - from typing_extensions import TypedDict -else: - from typing import TypedDict - -if sys.version_info < (3, 11): - from typing_extensions import Protocol, Required, TypeAlias -else: - from typing import Protocol, Required, TypeAlias - -if sys.version_info < (3, 9): - from typing_extensions import Literal -else: - from typing import Literal - -if TYPE_CHECKING: - from pydantic_core import PydanticUndefined -else: - # The initial build of pydantic_core requires PydanticUndefined to generate - # the core schema; so we need to conditionally skip it. mypy doesn't like - # this at all, hence the TYPE_CHECKING branch above. - try: - from pydantic_core import PydanticUndefined - except ImportError: - PydanticUndefined = object() - - -ExtraBehavior = Literal['allow', 'forbid', 'ignore'] - - -class CoreConfig(TypedDict, total=False): - """ - Base class for schema configuration options. - - Attributes: - title: The name of the configuration. - strict: Whether the configuration should strictly adhere to specified rules. - extra_fields_behavior: The behavior for handling extra fields. - typed_dict_total: Whether the TypedDict should be considered total. Default is `True`. - from_attributes: Whether to use attributes for models, dataclasses, and tagged union keys. - loc_by_alias: Whether to use the used alias (or first alias for "field required" errors) instead of - `field_names` to construct error `loc`s. Default is `True`. - revalidate_instances: Whether instances of models and dataclasses should re-validate. Default is 'never'. - validate_default: Whether to validate default values during validation. Default is `False`. - populate_by_name: Whether an aliased field may be populated by its name as given by the model attribute, - as well as the alias. (Replaces 'allow_population_by_field_name' in Pydantic v1.) Default is `False`. - str_max_length: The maximum length for string fields. - str_min_length: The minimum length for string fields. - str_strip_whitespace: Whether to strip whitespace from string fields. - str_to_lower: Whether to convert string fields to lowercase. - str_to_upper: Whether to convert string fields to uppercase. - allow_inf_nan: Whether to allow infinity and NaN values for float fields. Default is `True`. - ser_json_timedelta: The serialization option for `timedelta` values. Default is 'iso8601'. - ser_json_bytes: The serialization option for `bytes` values. Default is 'utf8'. - ser_json_inf_nan: The serialization option for infinity and NaN values - in float fields. Default is 'null'. - hide_input_in_errors: Whether to hide input data from `ValidationError` representation. - validation_error_cause: Whether to add user-python excs to the __cause__ of a ValidationError. - Requires exceptiongroup backport pre Python 3.11. - coerce_numbers_to_str: Whether to enable coercion of any `Number` type to `str` (not applicable in `strict` mode). - regex_engine: The regex engine to use for regex pattern validation. Default is 'rust-regex'. See `StringSchema`. - """ - - title: str - strict: bool - # settings related to typed dicts, model fields, dataclass fields - extra_fields_behavior: ExtraBehavior - typed_dict_total: bool # default: True - # used for models, dataclasses, and tagged union keys - from_attributes: bool - # whether to use the used alias (or first alias for "field required" errors) instead of field_names - # to construct error `loc`s, default True - loc_by_alias: bool - # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never' - revalidate_instances: Literal['always', 'never', 'subclass-instances'] - # whether to validate default values during validation, default False - validate_default: bool - # used on typed-dicts and arguments - populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1 - # fields related to string fields only - str_max_length: int - str_min_length: int - str_strip_whitespace: bool - str_to_lower: bool - str_to_upper: bool - # fields related to float fields only - allow_inf_nan: bool # default: True - # the config options are used to customise serialization to JSON - ser_json_timedelta: Literal['iso8601', 'float'] # default: 'iso8601' - ser_json_bytes: Literal['utf8', 'base64', 'hex'] # default: 'utf8' - ser_json_inf_nan: Literal['null', 'constants'] # default: 'null' - # used to hide input data from ValidationError repr - hide_input_in_errors: bool - validation_error_cause: bool # default: False - coerce_numbers_to_str: bool # default: False - regex_engine: Literal['rust-regex', 'python-re'] # default: 'rust-regex' - - -IncExCall: TypeAlias = 'set[int | str] | dict[int | str, IncExCall] | None' - - -class SerializationInfo(Protocol): - @property - def include(self) -> IncExCall: - ... - - @property - def exclude(self) -> IncExCall: - ... - - @property - def mode(self) -> str: - ... - - @property - def by_alias(self) -> bool: - ... - - @property - def exclude_unset(self) -> bool: - ... - - @property - def exclude_defaults(self) -> bool: - ... - - @property - def exclude_none(self) -> bool: - ... - - @property - def round_trip(self) -> bool: - ... - - def mode_is_json(self) -> bool: - ... - - def __str__(self) -> str: - ... - - def __repr__(self) -> str: - ... - - -class FieldSerializationInfo(SerializationInfo, Protocol): - @property - def field_name(self) -> str: - ... - - -class ValidationInfo(Protocol): - """ - Argument passed to validation functions. - """ - - @property - def context(self) -> Any | None: - """Current validation context.""" - ... - - @property - def config(self) -> CoreConfig | None: - """The CoreConfig that applies to this validation.""" - ... - - @property - def mode(self) -> Literal['python', 'json']: - """The type of input data we are currently validating""" - ... - - @property - def data(self) -> Dict[str, Any]: - """The data being validated for this model.""" - ... - - @property - def field_name(self) -> str | None: - """ - The name of the current field being validated if this validator is - attached to a model field. - """ - ... - - -ExpectedSerializationTypes = Literal[ - 'none', - 'int', - 'bool', - 'float', - 'str', - 'bytes', - 'bytearray', - 'list', - 'tuple', - 'set', - 'frozenset', - 'generator', - 'dict', - 'datetime', - 'date', - 'time', - 'timedelta', - 'url', - 'multi-host-url', - 'json', - 'uuid', -] - - -class SimpleSerSchema(TypedDict, total=False): - type: Required[ExpectedSerializationTypes] - - -def simple_ser_schema(type: ExpectedSerializationTypes) -> SimpleSerSchema: - """ - Returns a schema for serialization with a custom type. - - Args: - type: The type to use for serialization - """ - return SimpleSerSchema(type=type) - - -# (__input_value: Any) -> Any -GeneralPlainNoInfoSerializerFunction = Callable[[Any], Any] -# (__input_value: Any, __info: FieldSerializationInfo) -> Any -GeneralPlainInfoSerializerFunction = Callable[[Any, SerializationInfo], Any] -# (__model: Any, __input_value: Any) -> Any -FieldPlainNoInfoSerializerFunction = Callable[[Any, Any], Any] -# (__model: Any, __input_value: Any, __info: FieldSerializationInfo) -> Any -FieldPlainInfoSerializerFunction = Callable[[Any, Any, FieldSerializationInfo], Any] -SerializerFunction = Union[ - GeneralPlainNoInfoSerializerFunction, - GeneralPlainInfoSerializerFunction, - FieldPlainNoInfoSerializerFunction, - FieldPlainInfoSerializerFunction, -] - -WhenUsed = Literal['always', 'unless-none', 'json', 'json-unless-none'] -""" -Values have the following meanings: - -* `'always'` means always use -* `'unless-none'` means use unless the value is `None` -* `'json'` means use when serializing to JSON -* `'json-unless-none'` means use when serializing to JSON and the value is not `None` -""" - - -class PlainSerializerFunctionSerSchema(TypedDict, total=False): - type: Required[Literal['function-plain']] - function: Required[SerializerFunction] - is_field_serializer: bool # default False - info_arg: bool # default False - return_schema: CoreSchema # if omitted, AnySchema is used - when_used: WhenUsed # default: 'always' - - -def plain_serializer_function_ser_schema( - function: SerializerFunction, - *, - is_field_serializer: bool | None = None, - info_arg: bool | None = None, - return_schema: CoreSchema | None = None, - when_used: WhenUsed = 'always', -) -> PlainSerializerFunctionSerSchema: - """ - Returns a schema for serialization with a function, can be either a "general" or "field" function. - - Args: - function: The function to use for serialization - is_field_serializer: Whether the serializer is for a field, e.g. takes `model` as the first argument, - and `info` includes `field_name` - info_arg: Whether the function takes an `__info` argument - return_schema: Schema to use for serializing return value - when_used: When the function should be called - """ - if when_used == 'always': - # just to avoid extra elements in schema, and to use the actual default defined in rust - when_used = None # type: ignore - return _dict_not_none( - type='function-plain', - function=function, - is_field_serializer=is_field_serializer, - info_arg=info_arg, - return_schema=return_schema, - when_used=when_used, - ) - - -class SerializerFunctionWrapHandler(Protocol): # pragma: no cover - def __call__(self, __input_value: Any, __index_key: int | str | None = None) -> Any: - ... - - -# (__input_value: Any, __serializer: SerializerFunctionWrapHandler) -> Any -GeneralWrapNoInfoSerializerFunction = Callable[[Any, SerializerFunctionWrapHandler], Any] -# (__input_value: Any, __serializer: SerializerFunctionWrapHandler, __info: SerializationInfo) -> Any -GeneralWrapInfoSerializerFunction = Callable[[Any, SerializerFunctionWrapHandler, SerializationInfo], Any] -# (__model: Any, __input_value: Any, __serializer: SerializerFunctionWrapHandler) -> Any -FieldWrapNoInfoSerializerFunction = Callable[[Any, Any, SerializerFunctionWrapHandler], Any] -# (__model: Any, __input_value: Any, __serializer: SerializerFunctionWrapHandler, __info: FieldSerializationInfo) -> Any -FieldWrapInfoSerializerFunction = Callable[[Any, Any, SerializerFunctionWrapHandler, FieldSerializationInfo], Any] -WrapSerializerFunction = Union[ - GeneralWrapNoInfoSerializerFunction, - GeneralWrapInfoSerializerFunction, - FieldWrapNoInfoSerializerFunction, - FieldWrapInfoSerializerFunction, -] - - -class WrapSerializerFunctionSerSchema(TypedDict, total=False): - type: Required[Literal['function-wrap']] - function: Required[WrapSerializerFunction] - is_field_serializer: bool # default False - info_arg: bool # default False - schema: CoreSchema # if omitted, the schema on which this serializer is defined is used - return_schema: CoreSchema # if omitted, AnySchema is used - when_used: WhenUsed # default: 'always' - - -def wrap_serializer_function_ser_schema( - function: WrapSerializerFunction, - *, - is_field_serializer: bool | None = None, - info_arg: bool | None = None, - schema: CoreSchema | None = None, - return_schema: CoreSchema | None = None, - when_used: WhenUsed = 'always', -) -> WrapSerializerFunctionSerSchema: - """ - Returns a schema for serialization with a wrap function, can be either a "general" or "field" function. - - Args: - function: The function to use for serialization - is_field_serializer: Whether the serializer is for a field, e.g. takes `model` as the first argument, - and `info` includes `field_name` - info_arg: Whether the function takes an `__info` argument - schema: The schema to use for the inner serialization - return_schema: Schema to use for serializing return value - when_used: When the function should be called - """ - if when_used == 'always': - # just to avoid extra elements in schema, and to use the actual default defined in rust - when_used = None # type: ignore - return _dict_not_none( - type='function-wrap', - function=function, - is_field_serializer=is_field_serializer, - info_arg=info_arg, - schema=schema, - return_schema=return_schema, - when_used=when_used, - ) - - -class FormatSerSchema(TypedDict, total=False): - type: Required[Literal['format']] - formatting_string: Required[str] - when_used: WhenUsed # default: 'json-unless-none' - - -def format_ser_schema(formatting_string: str, *, when_used: WhenUsed = 'json-unless-none') -> FormatSerSchema: - """ - Returns a schema for serialization using python's `format` method. - - Args: - formatting_string: String defining the format to use - when_used: Same meaning as for [general_function_plain_ser_schema], but with a different default - """ - if when_used == 'json-unless-none': - # just to avoid extra elements in schema, and to use the actual default defined in rust - when_used = None # type: ignore - return _dict_not_none(type='format', formatting_string=formatting_string, when_used=when_used) - - -class ToStringSerSchema(TypedDict, total=False): - type: Required[Literal['to-string']] - when_used: WhenUsed # default: 'json-unless-none' - - -def to_string_ser_schema(*, when_used: WhenUsed = 'json-unless-none') -> ToStringSerSchema: - """ - Returns a schema for serialization using python's `str()` / `__str__` method. - - Args: - when_used: Same meaning as for [general_function_plain_ser_schema], but with a different default - """ - s = dict(type='to-string') - if when_used != 'json-unless-none': - # just to avoid extra elements in schema, and to use the actual default defined in rust - s['when_used'] = when_used - return s # type: ignore - - -class ModelSerSchema(TypedDict, total=False): - type: Required[Literal['model']] - cls: Required[Type[Any]] - schema: Required[CoreSchema] - - -def model_ser_schema(cls: Type[Any], schema: CoreSchema) -> ModelSerSchema: - """ - Returns a schema for serialization using a model. - - Args: - cls: The expected class type, used to generate warnings if the wrong type is passed - schema: Internal schema to use to serialize the model dict - """ - return ModelSerSchema(type='model', cls=cls, schema=schema) - - -SerSchema = Union[ - SimpleSerSchema, - PlainSerializerFunctionSerSchema, - WrapSerializerFunctionSerSchema, - FormatSerSchema, - ToStringSerSchema, - ModelSerSchema, -] - - -class ComputedField(TypedDict, total=False): - type: Required[Literal['computed-field']] - property_name: Required[str] - return_schema: Required[CoreSchema] - alias: str - metadata: Any - - -def computed_field( - property_name: str, return_schema: CoreSchema, *, alias: str | None = None, metadata: Any = None -) -> ComputedField: - """ - ComputedFields are properties of a model or dataclass that are included in serialization. - - Args: - property_name: The name of the property on the model or dataclass - return_schema: The schema used for the type returned by the computed field - alias: The name to use in the serialized output - metadata: Any other information you want to include with the schema, not used by pydantic-core - """ - return _dict_not_none( - type='computed-field', property_name=property_name, return_schema=return_schema, alias=alias, metadata=metadata - ) - - -class AnySchema(TypedDict, total=False): - type: Required[Literal['any']] - ref: str - metadata: Any - serialization: SerSchema - - -def any_schema(*, ref: str | None = None, metadata: Any = None, serialization: SerSchema | None = None) -> AnySchema: - """ - Returns a schema that matches any value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.any_schema() - v = SchemaValidator(schema) - assert v.validate_python(1) == 1 - ``` - - Args: - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none(type='any', ref=ref, metadata=metadata, serialization=serialization) - - -class NoneSchema(TypedDict, total=False): - type: Required[Literal['none']] - ref: str - metadata: Any - serialization: SerSchema - - -def none_schema(*, ref: str | None = None, metadata: Any = None, serialization: SerSchema | None = None) -> NoneSchema: - """ - Returns a schema that matches a None value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.none_schema() - v = SchemaValidator(schema) - assert v.validate_python(None) is None - ``` - - Args: - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none(type='none', ref=ref, metadata=metadata, serialization=serialization) - - -class BoolSchema(TypedDict, total=False): - type: Required[Literal['bool']] - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def bool_schema( - strict: bool | None = None, ref: str | None = None, metadata: Any = None, serialization: SerSchema | None = None -) -> BoolSchema: - """ - Returns a schema that matches a bool value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.bool_schema() - v = SchemaValidator(schema) - assert v.validate_python('True') is True - ``` - - Args: - strict: Whether the value should be a bool or a value that can be converted to a bool - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none(type='bool', strict=strict, ref=ref, metadata=metadata, serialization=serialization) - - -class IntSchema(TypedDict, total=False): - type: Required[Literal['int']] - multiple_of: int - le: int - ge: int - lt: int - gt: int - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def int_schema( - *, - multiple_of: int | None = None, - le: int | None = None, - ge: int | None = None, - lt: int | None = None, - gt: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> IntSchema: - """ - Returns a schema that matches a int value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.int_schema(multiple_of=2, le=6, ge=2) - v = SchemaValidator(schema) - assert v.validate_python('4') == 4 - ``` - - Args: - multiple_of: The value must be a multiple of this number - le: The value must be less than or equal to this number - ge: The value must be greater than or equal to this number - lt: The value must be strictly less than this number - gt: The value must be strictly greater than this number - strict: Whether the value should be a int or a value that can be converted to a int - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='int', - multiple_of=multiple_of, - le=le, - ge=ge, - lt=lt, - gt=gt, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class FloatSchema(TypedDict, total=False): - type: Required[Literal['float']] - allow_inf_nan: bool # whether 'NaN', '+inf', '-inf' should be forbidden. default: True - multiple_of: float - le: float - ge: float - lt: float - gt: float - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def float_schema( - *, - allow_inf_nan: bool | None = None, - multiple_of: float | None = None, - le: float | None = None, - ge: float | None = None, - lt: float | None = None, - gt: float | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> FloatSchema: - """ - Returns a schema that matches a float value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.float_schema(le=0.8, ge=0.2) - v = SchemaValidator(schema) - assert v.validate_python('0.5') == 0.5 - ``` - - Args: - allow_inf_nan: Whether to allow inf and nan values - multiple_of: The value must be a multiple of this number - le: The value must be less than or equal to this number - ge: The value must be greater than or equal to this number - lt: The value must be strictly less than this number - gt: The value must be strictly greater than this number - strict: Whether the value should be a float or a value that can be converted to a float - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='float', - allow_inf_nan=allow_inf_nan, - multiple_of=multiple_of, - le=le, - ge=ge, - lt=lt, - gt=gt, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class DecimalSchema(TypedDict, total=False): - type: Required[Literal['decimal']] - allow_inf_nan: bool # whether 'NaN', '+inf', '-inf' should be forbidden. default: False - multiple_of: Decimal - le: Decimal - ge: Decimal - lt: Decimal - gt: Decimal - max_digits: int - decimal_places: int - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def decimal_schema( - *, - allow_inf_nan: bool = None, - multiple_of: Decimal | None = None, - le: Decimal | None = None, - ge: Decimal | None = None, - lt: Decimal | None = None, - gt: Decimal | None = None, - max_digits: int | None = None, - decimal_places: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> DecimalSchema: - """ - Returns a schema that matches a decimal value, e.g.: - - ```py - from decimal import Decimal - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.decimal_schema(le=0.8, ge=0.2) - v = SchemaValidator(schema) - assert v.validate_python('0.5') == Decimal('0.5') - ``` - - Args: - allow_inf_nan: Whether to allow inf and nan values - multiple_of: The value must be a multiple of this number - le: The value must be less than or equal to this number - ge: The value must be greater than or equal to this number - lt: The value must be strictly less than this number - gt: The value must be strictly greater than this number - max_digits: The maximum number of decimal digits allowed - decimal_places: The maximum number of decimal places allowed - strict: Whether the value should be a float or a value that can be converted to a float - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='decimal', - gt=gt, - ge=ge, - lt=lt, - le=le, - max_digits=max_digits, - decimal_places=decimal_places, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class StringSchema(TypedDict, total=False): - type: Required[Literal['str']] - pattern: str - max_length: int - min_length: int - strip_whitespace: bool - to_lower: bool - to_upper: bool - regex_engine: Literal['rust-regex', 'python-re'] # default: 'rust-regex' - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def str_schema( - *, - pattern: str | None = None, - max_length: int | None = None, - min_length: int | None = None, - strip_whitespace: bool | None = None, - to_lower: bool | None = None, - to_upper: bool | None = None, - regex_engine: Literal['rust-regex', 'python-re'] | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> StringSchema: - """ - Returns a schema that matches a string value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.str_schema(max_length=10, min_length=2) - v = SchemaValidator(schema) - assert v.validate_python('hello') == 'hello' - ``` - - Args: - pattern: A regex pattern that the value must match - max_length: The value must be at most this length - min_length: The value must be at least this length - strip_whitespace: Whether to strip whitespace from the value - to_lower: Whether to convert the value to lowercase - to_upper: Whether to convert the value to uppercase - regex_engine: The regex engine to use for pattern validation. Default is 'rust-regex'. - - `rust-regex` uses the [`regex`](https://docs.rs/regex) Rust - crate, which is non-backtracking and therefore more DDoS - resistant, but does not support all regex features. - - `python-re` use the [`re`](https://docs.python.org/3/library/re.html) module, - which supports all regex features, but may be slower. - strict: Whether the value should be a string or a value that can be converted to a string - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='str', - pattern=pattern, - max_length=max_length, - min_length=min_length, - strip_whitespace=strip_whitespace, - to_lower=to_lower, - to_upper=to_upper, - regex_engine=regex_engine, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class BytesSchema(TypedDict, total=False): - type: Required[Literal['bytes']] - max_length: int - min_length: int - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def bytes_schema( - *, - max_length: int | None = None, - min_length: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> BytesSchema: - """ - Returns a schema that matches a bytes value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.bytes_schema(max_length=10, min_length=2) - v = SchemaValidator(schema) - assert v.validate_python(b'hello') == b'hello' - ``` - - Args: - max_length: The value must be at most this length - min_length: The value must be at least this length - strict: Whether the value should be a bytes or a value that can be converted to a bytes - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='bytes', - max_length=max_length, - min_length=min_length, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class DateSchema(TypedDict, total=False): - type: Required[Literal['date']] - strict: bool - le: date - ge: date - lt: date - gt: date - now_op: Literal['past', 'future'] - # defaults to current local utc offset from `time.localtime().tm_gmtoff` - # value is restricted to -86_400 < offset < 86_400 by bounds in generate_self_schema.py - now_utc_offset: int - ref: str - metadata: Any - serialization: SerSchema - - -def date_schema( - *, - strict: bool | None = None, - le: date | None = None, - ge: date | None = None, - lt: date | None = None, - gt: date | None = None, - now_op: Literal['past', 'future'] | None = None, - now_utc_offset: int | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> DateSchema: - """ - Returns a schema that matches a date value, e.g.: - - ```py - from datetime import date - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.date_schema(le=date(2020, 1, 1), ge=date(2019, 1, 1)) - v = SchemaValidator(schema) - assert v.validate_python(date(2019, 6, 1)) == date(2019, 6, 1) - ``` - - Args: - strict: Whether the value should be a date or a value that can be converted to a date - le: The value must be less than or equal to this date - ge: The value must be greater than or equal to this date - lt: The value must be strictly less than this date - gt: The value must be strictly greater than this date - now_op: The value must be in the past or future relative to the current date - now_utc_offset: The value must be in the past or future relative to the current date with this utc offset - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='date', - strict=strict, - le=le, - ge=ge, - lt=lt, - gt=gt, - now_op=now_op, - now_utc_offset=now_utc_offset, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class TimeSchema(TypedDict, total=False): - type: Required[Literal['time']] - strict: bool - le: time - ge: time - lt: time - gt: time - tz_constraint: Union[Literal['aware', 'naive'], int] - microseconds_precision: Literal['truncate', 'error'] - ref: str - metadata: Any - serialization: SerSchema - - -def time_schema( - *, - strict: bool | None = None, - le: time | None = None, - ge: time | None = None, - lt: time | None = None, - gt: time | None = None, - tz_constraint: Literal['aware', 'naive'] | int | None = None, - microseconds_precision: Literal['truncate', 'error'] = 'truncate', - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> TimeSchema: - """ - Returns a schema that matches a time value, e.g.: - - ```py - from datetime import time - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.time_schema(le=time(12, 0, 0), ge=time(6, 0, 0)) - v = SchemaValidator(schema) - assert v.validate_python(time(9, 0, 0)) == time(9, 0, 0) - ``` - - Args: - strict: Whether the value should be a time or a value that can be converted to a time - le: The value must be less than or equal to this time - ge: The value must be greater than or equal to this time - lt: The value must be strictly less than this time - gt: The value must be strictly greater than this time - tz_constraint: The value must be timezone aware or naive, or an int to indicate required tz offset - microseconds_precision: The behavior when seconds have more than 6 digits or microseconds is too large - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='time', - strict=strict, - le=le, - ge=ge, - lt=lt, - gt=gt, - tz_constraint=tz_constraint, - microseconds_precision=microseconds_precision, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class DatetimeSchema(TypedDict, total=False): - type: Required[Literal['datetime']] - strict: bool - le: datetime - ge: datetime - lt: datetime - gt: datetime - now_op: Literal['past', 'future'] - tz_constraint: Union[Literal['aware', 'naive'], int] - # defaults to current local utc offset from `time.localtime().tm_gmtoff` - # value is restricted to -86_400 < offset < 86_400 by bounds in generate_self_schema.py - now_utc_offset: int - microseconds_precision: Literal['truncate', 'error'] # default: 'truncate' - ref: str - metadata: Any - serialization: SerSchema - - -def datetime_schema( - *, - strict: bool | None = None, - le: datetime | None = None, - ge: datetime | None = None, - lt: datetime | None = None, - gt: datetime | None = None, - now_op: Literal['past', 'future'] | None = None, - tz_constraint: Literal['aware', 'naive'] | int | None = None, - now_utc_offset: int | None = None, - microseconds_precision: Literal['truncate', 'error'] = 'truncate', - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> DatetimeSchema: - """ - Returns a schema that matches a datetime value, e.g.: - - ```py - from datetime import datetime - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.datetime_schema() - v = SchemaValidator(schema) - now = datetime.now() - assert v.validate_python(str(now)) == now - ``` - - Args: - strict: Whether the value should be a datetime or a value that can be converted to a datetime - le: The value must be less than or equal to this datetime - ge: The value must be greater than or equal to this datetime - lt: The value must be strictly less than this datetime - gt: The value must be strictly greater than this datetime - now_op: The value must be in the past or future relative to the current datetime - tz_constraint: The value must be timezone aware or naive, or an int to indicate required tz offset - TODO: use of a tzinfo where offset changes based on the datetime is not yet supported - now_utc_offset: The value must be in the past or future relative to the current datetime with this utc offset - microseconds_precision: The behavior when seconds have more than 6 digits or microseconds is too large - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='datetime', - strict=strict, - le=le, - ge=ge, - lt=lt, - gt=gt, - now_op=now_op, - tz_constraint=tz_constraint, - now_utc_offset=now_utc_offset, - microseconds_precision=microseconds_precision, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class TimedeltaSchema(TypedDict, total=False): - type: Required[Literal['timedelta']] - strict: bool - le: timedelta - ge: timedelta - lt: timedelta - gt: timedelta - microseconds_precision: Literal['truncate', 'error'] - ref: str - metadata: Any - serialization: SerSchema - - -def timedelta_schema( - *, - strict: bool | None = None, - le: timedelta | None = None, - ge: timedelta | None = None, - lt: timedelta | None = None, - gt: timedelta | None = None, - microseconds_precision: Literal['truncate', 'error'] = 'truncate', - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> TimedeltaSchema: - """ - Returns a schema that matches a timedelta value, e.g.: - - ```py - from datetime import timedelta - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.timedelta_schema(le=timedelta(days=1), ge=timedelta(days=0)) - v = SchemaValidator(schema) - assert v.validate_python(timedelta(hours=12)) == timedelta(hours=12) - ``` - - Args: - strict: Whether the value should be a timedelta or a value that can be converted to a timedelta - le: The value must be less than or equal to this timedelta - ge: The value must be greater than or equal to this timedelta - lt: The value must be strictly less than this timedelta - gt: The value must be strictly greater than this timedelta - microseconds_precision: The behavior when seconds have more than 6 digits or microseconds is too large - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='timedelta', - strict=strict, - le=le, - ge=ge, - lt=lt, - gt=gt, - microseconds_precision=microseconds_precision, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class LiteralSchema(TypedDict, total=False): - type: Required[Literal['literal']] - expected: Required[List[Any]] - ref: str - metadata: Any - serialization: SerSchema - - -def literal_schema( - expected: list[Any], *, ref: str | None = None, metadata: Any = None, serialization: SerSchema | None = None -) -> LiteralSchema: - """ - Returns a schema that matches a literal value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.literal_schema(['hello', 'world']) - v = SchemaValidator(schema) - assert v.validate_python('hello') == 'hello' - ``` - - Args: - expected: The value must be one of these values - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none(type='literal', expected=expected, ref=ref, metadata=metadata, serialization=serialization) - - -# must match input/parse_json.rs::JsonType::try_from -JsonType = Literal['null', 'bool', 'int', 'float', 'str', 'list', 'dict'] - - -class IsInstanceSchema(TypedDict, total=False): - type: Required[Literal['is-instance']] - cls: Required[Any] - cls_repr: str - ref: str - metadata: Any - serialization: SerSchema - - -def is_instance_schema( - cls: Any, - *, - cls_repr: str | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> IsInstanceSchema: - """ - Returns a schema that checks if a value is an instance of a class, equivalent to python's `isinstance` method, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - class A: - pass - - schema = core_schema.is_instance_schema(cls=A) - v = SchemaValidator(schema) - v.validate_python(A()) - ``` - - Args: - cls: The value must be an instance of this class - cls_repr: If provided this string is used in the validator name instead of `repr(cls)` - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='is-instance', cls=cls, cls_repr=cls_repr, ref=ref, metadata=metadata, serialization=serialization - ) - - -class IsSubclassSchema(TypedDict, total=False): - type: Required[Literal['is-subclass']] - cls: Required[Type[Any]] - cls_repr: str - ref: str - metadata: Any - serialization: SerSchema - - -def is_subclass_schema( - cls: Type[Any], - *, - cls_repr: str | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> IsInstanceSchema: - """ - Returns a schema that checks if a value is a subtype of a class, equivalent to python's `issubclass` method, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - class A: - pass - - class B(A): - pass - - schema = core_schema.is_subclass_schema(cls=A) - v = SchemaValidator(schema) - v.validate_python(B) - ``` - - Args: - cls: The value must be a subclass of this class - cls_repr: If provided this string is used in the validator name instead of `repr(cls)` - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='is-subclass', cls=cls, cls_repr=cls_repr, ref=ref, metadata=metadata, serialization=serialization - ) - - -class CallableSchema(TypedDict, total=False): - type: Required[Literal['callable']] - ref: str - metadata: Any - serialization: SerSchema - - -def callable_schema( - *, ref: str | None = None, metadata: Any = None, serialization: SerSchema | None = None -) -> CallableSchema: - """ - Returns a schema that checks if a value is callable, equivalent to python's `callable` method, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.callable_schema() - v = SchemaValidator(schema) - v.validate_python(min) - ``` - - Args: - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none(type='callable', ref=ref, metadata=metadata, serialization=serialization) - - -class UuidSchema(TypedDict, total=False): - type: Required[Literal['uuid']] - version: Literal[1, 3, 4, 5] - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def uuid_schema( - *, - version: Literal[1, 3, 4, 5] | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> UuidSchema: - return _dict_not_none( - type='uuid', version=version, strict=strict, ref=ref, metadata=metadata, serialization=serialization - ) - - -class IncExSeqSerSchema(TypedDict, total=False): - type: Required[Literal['include-exclude-sequence']] - include: Set[int] - exclude: Set[int] - - -def filter_seq_schema(*, include: Set[int] | None = None, exclude: Set[int] | None = None) -> IncExSeqSerSchema: - return _dict_not_none(type='include-exclude-sequence', include=include, exclude=exclude) - - -IncExSeqOrElseSerSchema = Union[IncExSeqSerSchema, SerSchema] - - -class ListSchema(TypedDict, total=False): - type: Required[Literal['list']] - items_schema: CoreSchema - min_length: int - max_length: int - strict: bool - ref: str - metadata: Any - serialization: IncExSeqOrElseSerSchema - - -def list_schema( - items_schema: CoreSchema | None = None, - *, - min_length: int | None = None, - max_length: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: IncExSeqOrElseSerSchema | None = None, -) -> ListSchema: - """ - Returns a schema that matches a list value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.list_schema(core_schema.int_schema(), min_length=0, max_length=10) - v = SchemaValidator(schema) - assert v.validate_python(['4']) == [4] - ``` - - Args: - items_schema: The value must be a list of items that match this schema - min_length: The value must be a list with at least this many items - max_length: The value must be a list with at most this many items - strict: The value must be a list with exactly this many items - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='list', - items_schema=items_schema, - min_length=min_length, - max_length=max_length, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -# @deprecated('tuple_positional_schema is deprecated. Use pydantic_core.core_schema.tuple_schema instead.') -def tuple_positional_schema( - items_schema: list[CoreSchema], - *, - extras_schema: CoreSchema | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: IncExSeqOrElseSerSchema | None = None, -) -> TupleSchema: - """ - Returns a schema that matches a tuple of schemas, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.tuple_positional_schema( - [core_schema.int_schema(), core_schema.str_schema()] - ) - v = SchemaValidator(schema) - assert v.validate_python((1, 'hello')) == (1, 'hello') - ``` - - Args: - items_schema: The value must be a tuple with items that match these schemas - extras_schema: The value must be a tuple with items that match this schema - This was inspired by JSON schema's `prefixItems` and `items` fields. - In python's `typing.Tuple`, you can't specify a type for "extra" items -- they must all be the same type - if the length is variable. So this field won't be set from a `typing.Tuple` annotation on a pydantic model. - strict: The value must be a tuple with exactly this many items - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - if extras_schema is not None: - variadic_item_index = len(items_schema) - items_schema = items_schema + [extras_schema] - else: - variadic_item_index = None - return tuple_schema( - items_schema=items_schema, - variadic_item_index=variadic_item_index, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -# @deprecated('tuple_variable_schema is deprecated. Use pydantic_core.core_schema.tuple_schema instead.') -def tuple_variable_schema( - items_schema: CoreSchema | None = None, - *, - min_length: int | None = None, - max_length: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: IncExSeqOrElseSerSchema | None = None, -) -> TupleSchema: - """ - Returns a schema that matches a tuple of a given schema, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.tuple_variable_schema( - items_schema=core_schema.int_schema(), min_length=0, max_length=10 - ) - v = SchemaValidator(schema) - assert v.validate_python(('1', 2, 3)) == (1, 2, 3) - ``` - - Args: - items_schema: The value must be a tuple with items that match this schema - min_length: The value must be a tuple with at least this many items - max_length: The value must be a tuple with at most this many items - strict: The value must be a tuple with exactly this many items - ref: Optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return tuple_schema( - items_schema=[items_schema or any_schema()], - variadic_item_index=0, - min_length=min_length, - max_length=max_length, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class TupleSchema(TypedDict, total=False): - type: Required[Literal['tuple']] - items_schema: Required[List[CoreSchema]] - variadic_item_index: int - min_length: int - max_length: int - strict: bool - ref: str - metadata: Any - serialization: IncExSeqOrElseSerSchema - - -def tuple_schema( - items_schema: list[CoreSchema], - *, - variadic_item_index: int | None = None, - min_length: int | None = None, - max_length: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: IncExSeqOrElseSerSchema | None = None, -) -> TupleSchema: - """ - Returns a schema that matches a tuple of schemas, with an optional variadic item, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.tuple_schema( - [core_schema.int_schema(), core_schema.str_schema(), core_schema.float_schema()], - variadic_item_index=1, - ) - v = SchemaValidator(schema) - assert v.validate_python((1, 'hello', 'world', 1.5)) == (1, 'hello', 'world', 1.5) - ``` - - Args: - items_schema: The value must be a tuple with items that match these schemas - variadic_item_index: The index of the schema in `items_schema` to be treated as variadic (following PEP 646) - min_length: The value must be a tuple with at least this many items - max_length: The value must be a tuple with at most this many items - strict: The value must be a tuple with exactly this many items - ref: Optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='tuple', - items_schema=items_schema, - variadic_item_index=variadic_item_index, - min_length=min_length, - max_length=max_length, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class SetSchema(TypedDict, total=False): - type: Required[Literal['set']] - items_schema: CoreSchema - min_length: int - max_length: int - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def set_schema( - items_schema: CoreSchema | None = None, - *, - min_length: int | None = None, - max_length: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> SetSchema: - """ - Returns a schema that matches a set of a given schema, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.set_schema( - items_schema=core_schema.int_schema(), min_length=0, max_length=10 - ) - v = SchemaValidator(schema) - assert v.validate_python({1, '2', 3}) == {1, 2, 3} - ``` - - Args: - items_schema: The value must be a set with items that match this schema - min_length: The value must be a set with at least this many items - max_length: The value must be a set with at most this many items - strict: The value must be a set with exactly this many items - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='set', - items_schema=items_schema, - min_length=min_length, - max_length=max_length, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class FrozenSetSchema(TypedDict, total=False): - type: Required[Literal['frozenset']] - items_schema: CoreSchema - min_length: int - max_length: int - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def frozenset_schema( - items_schema: CoreSchema | None = None, - *, - min_length: int | None = None, - max_length: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> FrozenSetSchema: - """ - Returns a schema that matches a frozenset of a given schema, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.frozenset_schema( - items_schema=core_schema.int_schema(), min_length=0, max_length=10 - ) - v = SchemaValidator(schema) - assert v.validate_python(frozenset(range(3))) == frozenset({0, 1, 2}) - ``` - - Args: - items_schema: The value must be a frozenset with items that match this schema - min_length: The value must be a frozenset with at least this many items - max_length: The value must be a frozenset with at most this many items - strict: The value must be a frozenset with exactly this many items - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='frozenset', - items_schema=items_schema, - min_length=min_length, - max_length=max_length, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class GeneratorSchema(TypedDict, total=False): - type: Required[Literal['generator']] - items_schema: CoreSchema - min_length: int - max_length: int - ref: str - metadata: Any - serialization: IncExSeqOrElseSerSchema - - -def generator_schema( - items_schema: CoreSchema | None = None, - *, - min_length: int | None = None, - max_length: int | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: IncExSeqOrElseSerSchema | None = None, -) -> GeneratorSchema: - """ - Returns a schema that matches a generator value, e.g.: - - ```py - from typing import Iterator - from pydantic_core import SchemaValidator, core_schema - - def gen() -> Iterator[int]: - yield 1 - - schema = core_schema.generator_schema(items_schema=core_schema.int_schema()) - v = SchemaValidator(schema) - v.validate_python(gen()) - ``` - - Unlike other types, validated generators do not raise ValidationErrors eagerly, - but instead will raise a ValidationError when a violating value is actually read from the generator. - This is to ensure that "validated" generators retain the benefit of lazy evaluation. - - Args: - items_schema: The value must be a generator with items that match this schema - min_length: The value must be a generator that yields at least this many items - max_length: The value must be a generator that yields at most this many items - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='generator', - items_schema=items_schema, - min_length=min_length, - max_length=max_length, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -IncExDict = Set[Union[int, str]] - - -class IncExDictSerSchema(TypedDict, total=False): - type: Required[Literal['include-exclude-dict']] - include: IncExDict - exclude: IncExDict - - -def filter_dict_schema(*, include: IncExDict | None = None, exclude: IncExDict | None = None) -> IncExDictSerSchema: - return _dict_not_none(type='include-exclude-dict', include=include, exclude=exclude) - - -IncExDictOrElseSerSchema = Union[IncExDictSerSchema, SerSchema] - - -class DictSchema(TypedDict, total=False): - type: Required[Literal['dict']] - keys_schema: CoreSchema # default: AnySchema - values_schema: CoreSchema # default: AnySchema - min_length: int - max_length: int - strict: bool - ref: str - metadata: Any - serialization: IncExDictOrElseSerSchema - - -def dict_schema( - keys_schema: CoreSchema | None = None, - values_schema: CoreSchema | None = None, - *, - min_length: int | None = None, - max_length: int | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> DictSchema: - """ - Returns a schema that matches a dict value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.dict_schema( - keys_schema=core_schema.str_schema(), values_schema=core_schema.int_schema() - ) - v = SchemaValidator(schema) - assert v.validate_python({'a': '1', 'b': 2}) == {'a': 1, 'b': 2} - ``` - - Args: - keys_schema: The value must be a dict with keys that match this schema - values_schema: The value must be a dict with values that match this schema - min_length: The value must be a dict with at least this many items - max_length: The value must be a dict with at most this many items - strict: Whether the keys and values should be validated with strict mode - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='dict', - keys_schema=keys_schema, - values_schema=values_schema, - min_length=min_length, - max_length=max_length, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -# (__input_value: Any) -> Any -NoInfoValidatorFunction = Callable[[Any], Any] - - -class NoInfoValidatorFunctionSchema(TypedDict): - type: Literal['no-info'] - function: NoInfoValidatorFunction - - -# (__input_value: Any, __info: ValidationInfo) -> Any -WithInfoValidatorFunction = Callable[[Any, ValidationInfo], Any] - - -class WithInfoValidatorFunctionSchema(TypedDict, total=False): - type: Required[Literal['with-info']] - function: Required[WithInfoValidatorFunction] - field_name: str - - -ValidationFunction = Union[NoInfoValidatorFunctionSchema, WithInfoValidatorFunctionSchema] - - -class _ValidatorFunctionSchema(TypedDict, total=False): - function: Required[ValidationFunction] - schema: Required[CoreSchema] - ref: str - metadata: Any - serialization: SerSchema - - -class BeforeValidatorFunctionSchema(_ValidatorFunctionSchema, total=False): - type: Required[Literal['function-before']] - - -def no_info_before_validator_function( - function: NoInfoValidatorFunction, - schema: CoreSchema, - *, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> BeforeValidatorFunctionSchema: - """ - Returns a schema that calls a validator function before validating, no `info` argument is provided, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: bytes) -> str: - return v.decode() + 'world' - - func_schema = core_schema.no_info_before_validator_function( - function=fn, schema=core_schema.str_schema() - ) - schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) - - v = SchemaValidator(schema) - assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} - ``` - - Args: - function: The validator function to call - schema: The schema to validate the output of the validator function - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-before', - function={'type': 'no-info', 'function': function}, - schema=schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -def with_info_before_validator_function( - function: WithInfoValidatorFunction, - schema: CoreSchema, - *, - field_name: str | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> BeforeValidatorFunctionSchema: - """ - Returns a schema that calls a validator function before validation, the function is called with - an `info` argument, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: bytes, info: core_schema.ValidationInfo) -> str: - assert info.data is not None - assert info.field_name is not None - return v.decode() + 'world' - - func_schema = core_schema.with_info_before_validator_function( - function=fn, schema=core_schema.str_schema(), field_name='a' - ) - schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) - - v = SchemaValidator(schema) - assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} - ``` - - Args: - function: The validator function to call - field_name: The name of the field - schema: The schema to validate the output of the validator function - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-before', - function=_dict_not_none(type='with-info', function=function, field_name=field_name), - schema=schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class AfterValidatorFunctionSchema(_ValidatorFunctionSchema, total=False): - type: Required[Literal['function-after']] - - -def no_info_after_validator_function( - function: NoInfoValidatorFunction, - schema: CoreSchema, - *, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> AfterValidatorFunctionSchema: - """ - Returns a schema that calls a validator function after validating, no `info` argument is provided, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: str) -> str: - return v + 'world' - - func_schema = core_schema.no_info_after_validator_function(fn, core_schema.str_schema()) - schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) - - v = SchemaValidator(schema) - assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} - ``` - - Args: - function: The validator function to call after the schema is validated - schema: The schema to validate before the validator function - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-after', - function={'type': 'no-info', 'function': function}, - schema=schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -def with_info_after_validator_function( - function: WithInfoValidatorFunction, - schema: CoreSchema, - *, - field_name: str | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> AfterValidatorFunctionSchema: - """ - Returns a schema that calls a validator function after validation, the function is called with - an `info` argument, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: str, info: core_schema.ValidationInfo) -> str: - assert info.data is not None - assert info.field_name is not None - return v + 'world' - - func_schema = core_schema.with_info_after_validator_function( - function=fn, schema=core_schema.str_schema(), field_name='a' - ) - schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) - - v = SchemaValidator(schema) - assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} - ``` - - Args: - function: The validator function to call after the schema is validated - schema: The schema to validate before the validator function - field_name: The name of the field this validators is applied to, if any - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-after', - function=_dict_not_none(type='with-info', function=function, field_name=field_name), - schema=schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class ValidatorFunctionWrapHandler(Protocol): - def __call__(self, input_value: Any, outer_location: str | int | None = None) -> Any: # pragma: no cover - ... - - -# (__input_value: Any, __validator: ValidatorFunctionWrapHandler) -> Any -NoInfoWrapValidatorFunction = Callable[[Any, ValidatorFunctionWrapHandler], Any] - - -class NoInfoWrapValidatorFunctionSchema(TypedDict): - type: Literal['no-info'] - function: NoInfoWrapValidatorFunction - - -# (__input_value: Any, __validator: ValidatorFunctionWrapHandler, __info: ValidationInfo) -> Any -WithInfoWrapValidatorFunction = Callable[[Any, ValidatorFunctionWrapHandler, ValidationInfo], Any] - - -class WithInfoWrapValidatorFunctionSchema(TypedDict, total=False): - type: Required[Literal['with-info']] - function: Required[WithInfoWrapValidatorFunction] - field_name: str - - -WrapValidatorFunction = Union[NoInfoWrapValidatorFunctionSchema, WithInfoWrapValidatorFunctionSchema] - - -class WrapValidatorFunctionSchema(TypedDict, total=False): - type: Required[Literal['function-wrap']] - function: Required[WrapValidatorFunction] - schema: Required[CoreSchema] - ref: str - metadata: Any - serialization: SerSchema - - -def no_info_wrap_validator_function( - function: NoInfoWrapValidatorFunction, - schema: CoreSchema, - *, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> WrapValidatorFunctionSchema: - """ - Returns a schema which calls a function with a `validator` callable argument which can - optionally be used to call inner validation with the function logic, this is much like the - "onion" implementation of middleware in many popular web frameworks, no `info` argument is passed, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn( - v: str, - validator: core_schema.ValidatorFunctionWrapHandler, - ) -> str: - return validator(input_value=v) + 'world' - - schema = core_schema.no_info_wrap_validator_function( - function=fn, schema=core_schema.str_schema() - ) - v = SchemaValidator(schema) - assert v.validate_python('hello ') == 'hello world' - ``` - - Args: - function: The validator function to call - schema: The schema to validate the output of the validator function - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-wrap', - function={'type': 'no-info', 'function': function}, - schema=schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -def with_info_wrap_validator_function( - function: WithInfoWrapValidatorFunction, - schema: CoreSchema, - *, - field_name: str | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> WrapValidatorFunctionSchema: - """ - Returns a schema which calls a function with a `validator` callable argument which can - optionally be used to call inner validation with the function logic, this is much like the - "onion" implementation of middleware in many popular web frameworks, an `info` argument is also passed, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn( - v: str, - validator: core_schema.ValidatorFunctionWrapHandler, - info: core_schema.ValidationInfo, - ) -> str: - return validator(input_value=v) + 'world' - - schema = core_schema.with_info_wrap_validator_function( - function=fn, schema=core_schema.str_schema() - ) - v = SchemaValidator(schema) - assert v.validate_python('hello ') == 'hello world' - ``` - - Args: - function: The validator function to call - schema: The schema to validate the output of the validator function - field_name: The name of the field this validators is applied to, if any - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-wrap', - function=_dict_not_none(type='with-info', function=function, field_name=field_name), - schema=schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class PlainValidatorFunctionSchema(TypedDict, total=False): - type: Required[Literal['function-plain']] - function: Required[ValidationFunction] - ref: str - metadata: Any - serialization: SerSchema - - -def no_info_plain_validator_function( - function: NoInfoValidatorFunction, - *, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> PlainValidatorFunctionSchema: - """ - Returns a schema that uses the provided function for validation, no `info` argument is passed, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: str) -> str: - assert 'hello' in v - return v + 'world' - - schema = core_schema.no_info_plain_validator_function(function=fn) - v = SchemaValidator(schema) - assert v.validate_python('hello ') == 'hello world' - ``` - - Args: - function: The validator function to call - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-plain', - function={'type': 'no-info', 'function': function}, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -def with_info_plain_validator_function( - function: WithInfoValidatorFunction, - *, - field_name: str | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> PlainValidatorFunctionSchema: - """ - Returns a schema that uses the provided function for validation, an `info` argument is passed, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: str, info: core_schema.ValidationInfo) -> str: - assert 'hello' in v - return v + 'world' - - schema = core_schema.with_info_plain_validator_function(function=fn) - v = SchemaValidator(schema) - assert v.validate_python('hello ') == 'hello world' - ``` - - Args: - function: The validator function to call - field_name: The name of the field this validators is applied to, if any - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='function-plain', - function=_dict_not_none(type='with-info', function=function, field_name=field_name), - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class WithDefaultSchema(TypedDict, total=False): - type: Required[Literal['default']] - schema: Required[CoreSchema] - default: Any - default_factory: Callable[[], Any] - on_error: Literal['raise', 'omit', 'default'] # default: 'raise' - validate_default: bool # default: False - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def with_default_schema( - schema: CoreSchema, - *, - default: Any = PydanticUndefined, - default_factory: Callable[[], Any] | None = None, - on_error: Literal['raise', 'omit', 'default'] | None = None, - validate_default: bool | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> WithDefaultSchema: - """ - Returns a schema that adds a default value to the given schema, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.with_default_schema(core_schema.str_schema(), default='hello') - wrapper_schema = core_schema.typed_dict_schema( - {'a': core_schema.typed_dict_field(schema)} - ) - v = SchemaValidator(wrapper_schema) - assert v.validate_python({}) == v.validate_python({'a': 'hello'}) - ``` - - Args: - schema: The schema to add a default value to - default: The default value to use - default_factory: A function that returns the default value to use - on_error: What to do if the schema validation fails. One of 'raise', 'omit', 'default' - validate_default: Whether the default value should be validated - strict: Whether the underlying schema should be validated with strict mode - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - s = _dict_not_none( - type='default', - schema=schema, - default_factory=default_factory, - on_error=on_error, - validate_default=validate_default, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - if default is not PydanticUndefined: - s['default'] = default - return s - - -class NullableSchema(TypedDict, total=False): - type: Required[Literal['nullable']] - schema: Required[CoreSchema] - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def nullable_schema( - schema: CoreSchema, - *, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> NullableSchema: - """ - Returns a schema that matches a nullable value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.nullable_schema(core_schema.str_schema()) - v = SchemaValidator(schema) - assert v.validate_python(None) is None - ``` - - Args: - schema: The schema to wrap - strict: Whether the underlying schema should be validated with strict mode - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='nullable', schema=schema, strict=strict, ref=ref, metadata=metadata, serialization=serialization - ) - - -class UnionSchema(TypedDict, total=False): - type: Required[Literal['union']] - choices: Required[List[Union[CoreSchema, Tuple[CoreSchema, str]]]] - # default true, whether to automatically collapse unions with one element to the inner validator - auto_collapse: bool - custom_error_type: str - custom_error_message: str - custom_error_context: Dict[str, Union[str, int, float]] - mode: Literal['smart', 'left_to_right'] # default: 'smart' - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def union_schema( - choices: list[CoreSchema | tuple[CoreSchema, str]], - *, - auto_collapse: bool | None = None, - custom_error_type: str | None = None, - custom_error_message: str | None = None, - custom_error_context: dict[str, str | int] | None = None, - mode: Literal['smart', 'left_to_right'] | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> UnionSchema: - """ - Returns a schema that matches a union value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.union_schema([core_schema.str_schema(), core_schema.int_schema()]) - v = SchemaValidator(schema) - assert v.validate_python('hello') == 'hello' - assert v.validate_python(1) == 1 - ``` - - Args: - choices: The schemas to match. If a tuple, the second item is used as the label for the case. - auto_collapse: whether to automatically collapse unions with one element to the inner validator, default true - custom_error_type: The custom error type to use if the validation fails - custom_error_message: The custom error message to use if the validation fails - custom_error_context: The custom error context to use if the validation fails - mode: How to select which choice to return - * `smart` (default) will try to return the choice which is the closest match to the input value - * `left_to_right` will return the first choice in `choices` which succeeds validation - strict: Whether the underlying schemas should be validated with strict mode - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='union', - choices=choices, - auto_collapse=auto_collapse, - custom_error_type=custom_error_type, - custom_error_message=custom_error_message, - custom_error_context=custom_error_context, - mode=mode, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class TaggedUnionSchema(TypedDict, total=False): - type: Required[Literal['tagged-union']] - choices: Required[Dict[Hashable, CoreSchema]] - discriminator: Required[Union[str, List[Union[str, int]], List[List[Union[str, int]]], Callable[[Any], Hashable]]] - custom_error_type: str - custom_error_message: str - custom_error_context: Dict[str, Union[str, int, float]] - strict: bool - from_attributes: bool # default: True - ref: str - metadata: Any - serialization: SerSchema - - -def tagged_union_schema( - choices: Dict[Hashable, CoreSchema], - discriminator: str | list[str | int] | list[list[str | int]] | Callable[[Any], Hashable], - *, - custom_error_type: str | None = None, - custom_error_message: str | None = None, - custom_error_context: dict[str, int | str | float] | None = None, - strict: bool | None = None, - from_attributes: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> TaggedUnionSchema: - """ - Returns a schema that matches a tagged union value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - apple_schema = core_schema.typed_dict_schema( - { - 'foo': core_schema.typed_dict_field(core_schema.str_schema()), - 'bar': core_schema.typed_dict_field(core_schema.int_schema()), - } - ) - banana_schema = core_schema.typed_dict_schema( - { - 'foo': core_schema.typed_dict_field(core_schema.str_schema()), - 'spam': core_schema.typed_dict_field( - core_schema.list_schema(items_schema=core_schema.int_schema()) - ), - } - ) - schema = core_schema.tagged_union_schema( - choices={ - 'apple': apple_schema, - 'banana': banana_schema, - }, - discriminator='foo', - ) - v = SchemaValidator(schema) - assert v.validate_python({'foo': 'apple', 'bar': '123'}) == {'foo': 'apple', 'bar': 123} - assert v.validate_python({'foo': 'banana', 'spam': [1, 2, 3]}) == { - 'foo': 'banana', - 'spam': [1, 2, 3], - } - ``` - - Args: - choices: The schemas to match - When retrieving a schema from `choices` using the discriminator value, if the value is a str, - it should be fed back into the `choices` map until a schema is obtained - (This approach is to prevent multiple ownership of a single schema in Rust) - discriminator: The discriminator to use to determine the schema to use - * If `discriminator` is a str, it is the name of the attribute to use as the discriminator - * If `discriminator` is a list of int/str, it should be used as a "path" to access the discriminator - * If `discriminator` is a list of lists, each inner list is a path, and the first path that exists is used - * If `discriminator` is a callable, it should return the discriminator when called on the value to validate; - the callable can return `None` to indicate that there is no matching discriminator present on the input - custom_error_type: The custom error type to use if the validation fails - custom_error_message: The custom error message to use if the validation fails - custom_error_context: The custom error context to use if the validation fails - strict: Whether the underlying schemas should be validated with strict mode - from_attributes: Whether to use the attributes of the object to retrieve the discriminator value - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='tagged-union', - choices=choices, - discriminator=discriminator, - custom_error_type=custom_error_type, - custom_error_message=custom_error_message, - custom_error_context=custom_error_context, - strict=strict, - from_attributes=from_attributes, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class ChainSchema(TypedDict, total=False): - type: Required[Literal['chain']] - steps: Required[List[CoreSchema]] - ref: str - metadata: Any - serialization: SerSchema - - -def chain_schema( - steps: list[CoreSchema], *, ref: str | None = None, metadata: Any = None, serialization: SerSchema | None = None -) -> ChainSchema: - """ - Returns a schema that chains the provided validation schemas, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: str, info: core_schema.ValidationInfo) -> str: - assert 'hello' in v - return v + ' world' - - fn_schema = core_schema.with_info_plain_validator_function(function=fn) - schema = core_schema.chain_schema( - [fn_schema, fn_schema, fn_schema, core_schema.str_schema()] - ) - v = SchemaValidator(schema) - assert v.validate_python('hello') == 'hello world world world' - ``` - - Args: - steps: The schemas to chain - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none(type='chain', steps=steps, ref=ref, metadata=metadata, serialization=serialization) - - -class LaxOrStrictSchema(TypedDict, total=False): - type: Required[Literal['lax-or-strict']] - lax_schema: Required[CoreSchema] - strict_schema: Required[CoreSchema] - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def lax_or_strict_schema( - lax_schema: CoreSchema, - strict_schema: CoreSchema, - *, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> LaxOrStrictSchema: - """ - Returns a schema that uses the lax or strict schema, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - def fn(v: str, info: core_schema.ValidationInfo) -> str: - assert 'hello' in v - return v + ' world' - - lax_schema = core_schema.int_schema(strict=False) - strict_schema = core_schema.int_schema(strict=True) - - schema = core_schema.lax_or_strict_schema( - lax_schema=lax_schema, strict_schema=strict_schema, strict=True - ) - v = SchemaValidator(schema) - assert v.validate_python(123) == 123 - - schema = core_schema.lax_or_strict_schema( - lax_schema=lax_schema, strict_schema=strict_schema, strict=False - ) - v = SchemaValidator(schema) - assert v.validate_python('123') == 123 - ``` - - Args: - lax_schema: The lax schema to use - strict_schema: The strict schema to use - strict: Whether the strict schema should be used - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='lax-or-strict', - lax_schema=lax_schema, - strict_schema=strict_schema, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class JsonOrPythonSchema(TypedDict, total=False): - type: Required[Literal['json-or-python']] - json_schema: Required[CoreSchema] - python_schema: Required[CoreSchema] - ref: str - metadata: Any - serialization: SerSchema - - -def json_or_python_schema( - json_schema: CoreSchema, - python_schema: CoreSchema, - *, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> JsonOrPythonSchema: - """ - Returns a schema that uses the Json or Python schema depending on the input: - - ```py - from pydantic_core import SchemaValidator, ValidationError, core_schema - - v = SchemaValidator( - core_schema.json_or_python_schema( - json_schema=core_schema.int_schema(), - python_schema=core_schema.int_schema(strict=True), - ) - ) - - assert v.validate_json('"123"') == 123 - - try: - v.validate_python('123') - except ValidationError: - pass - else: - raise AssertionError('Validation should have failed') - ``` - - Args: - json_schema: The schema to use for Json inputs - python_schema: The schema to use for Python inputs - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='json-or-python', - json_schema=json_schema, - python_schema=python_schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class TypedDictField(TypedDict, total=False): - type: Required[Literal['typed-dict-field']] - schema: Required[CoreSchema] - required: bool - validation_alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] - serialization_alias: str - serialization_exclude: bool # default: False - metadata: Any - - -def typed_dict_field( - schema: CoreSchema, - *, - required: bool | None = None, - validation_alias: str | list[str | int] | list[list[str | int]] | None = None, - serialization_alias: str | None = None, - serialization_exclude: bool | None = None, - metadata: Any = None, -) -> TypedDictField: - """ - Returns a schema that matches a typed dict field, e.g.: - - ```py - from pydantic_core import core_schema - - field = core_schema.typed_dict_field(schema=core_schema.int_schema(), required=True) - ``` - - Args: - schema: The schema to use for the field - required: Whether the field is required - validation_alias: The alias(es) to use to find the field in the validation data - serialization_alias: The alias to use as a key when serializing - serialization_exclude: Whether to exclude the field when serializing - metadata: Any other information you want to include with the schema, not used by pydantic-core - """ - return _dict_not_none( - type='typed-dict-field', - schema=schema, - required=required, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - serialization_exclude=serialization_exclude, - metadata=metadata, - ) - - -class TypedDictSchema(TypedDict, total=False): - type: Required[Literal['typed-dict']] - fields: Required[Dict[str, TypedDictField]] - computed_fields: List[ComputedField] - strict: bool - extras_schema: CoreSchema - # all these values can be set via config, equivalent fields have `typed_dict_` prefix - extra_behavior: ExtraBehavior - total: bool # default: True - populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1 - ref: str - metadata: Any - serialization: SerSchema - config: CoreConfig - - -def typed_dict_schema( - fields: Dict[str, TypedDictField], - *, - computed_fields: list[ComputedField] | None = None, - strict: bool | None = None, - extras_schema: CoreSchema | None = None, - extra_behavior: ExtraBehavior | None = None, - total: bool | None = None, - populate_by_name: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, - config: CoreConfig | None = None, -) -> TypedDictSchema: - """ - Returns a schema that matches a typed dict, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - wrapper_schema = core_schema.typed_dict_schema( - {'a': core_schema.typed_dict_field(core_schema.str_schema())} - ) - v = SchemaValidator(wrapper_schema) - assert v.validate_python({'a': 'hello'}) == {'a': 'hello'} - ``` - - Args: - fields: The fields to use for the typed dict - computed_fields: Computed fields to use when serializing the model, only applies when directly inside a model - strict: Whether the typed dict is strict - extras_schema: The extra validator to use for the typed dict - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - extra_behavior: The extra behavior to use for the typed dict - total: Whether the typed dict is total - populate_by_name: Whether the typed dict should populate by name - serialization: Custom serialization schema - """ - return _dict_not_none( - type='typed-dict', - fields=fields, - computed_fields=computed_fields, - strict=strict, - extras_schema=extras_schema, - extra_behavior=extra_behavior, - total=total, - populate_by_name=populate_by_name, - ref=ref, - metadata=metadata, - serialization=serialization, - config=config, - ) - - -class ModelField(TypedDict, total=False): - type: Required[Literal['model-field']] - schema: Required[CoreSchema] - validation_alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] - serialization_alias: str - serialization_exclude: bool # default: False - frozen: bool - metadata: Any - - -def model_field( - schema: CoreSchema, - *, - validation_alias: str | list[str | int] | list[list[str | int]] | None = None, - serialization_alias: str | None = None, - serialization_exclude: bool | None = None, - frozen: bool | None = None, - metadata: Any = None, -) -> ModelField: - """ - Returns a schema for a model field, e.g.: - - ```py - from pydantic_core import core_schema - - field = core_schema.model_field(schema=core_schema.int_schema()) - ``` - - Args: - schema: The schema to use for the field - validation_alias: The alias(es) to use to find the field in the validation data - serialization_alias: The alias to use as a key when serializing - serialization_exclude: Whether to exclude the field when serializing - frozen: Whether the field is frozen - metadata: Any other information you want to include with the schema, not used by pydantic-core - """ - return _dict_not_none( - type='model-field', - schema=schema, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - serialization_exclude=serialization_exclude, - frozen=frozen, - metadata=metadata, - ) - - -class ModelFieldsSchema(TypedDict, total=False): - type: Required[Literal['model-fields']] - fields: Required[Dict[str, ModelField]] - model_name: str - computed_fields: List[ComputedField] - strict: bool - extras_schema: CoreSchema - # all these values can be set via config, equivalent fields have `typed_dict_` prefix - extra_behavior: ExtraBehavior - populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1 - from_attributes: bool - ref: str - metadata: Any - serialization: SerSchema - - -def model_fields_schema( - fields: Dict[str, ModelField], - *, - model_name: str | None = None, - computed_fields: list[ComputedField] | None = None, - strict: bool | None = None, - extras_schema: CoreSchema | None = None, - extra_behavior: ExtraBehavior | None = None, - populate_by_name: bool | None = None, - from_attributes: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> ModelFieldsSchema: - """ - Returns a schema that matches a typed dict, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - wrapper_schema = core_schema.model_fields_schema( - {'a': core_schema.model_field(core_schema.str_schema())} - ) - v = SchemaValidator(wrapper_schema) - print(v.validate_python({'a': 'hello'})) - #> ({'a': 'hello'}, None, {'a'}) - ``` - - Args: - fields: The fields to use for the typed dict - model_name: The name of the model, used for error messages, defaults to "Model" - computed_fields: Computed fields to use when serializing the model, only applies when directly inside a model - strict: Whether the typed dict is strict - extras_schema: The extra validator to use for the typed dict - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - extra_behavior: The extra behavior to use for the typed dict - populate_by_name: Whether the typed dict should populate by name - from_attributes: Whether the typed dict should be populated from attributes - serialization: Custom serialization schema - """ - return _dict_not_none( - type='model-fields', - fields=fields, - model_name=model_name, - computed_fields=computed_fields, - strict=strict, - extras_schema=extras_schema, - extra_behavior=extra_behavior, - populate_by_name=populate_by_name, - from_attributes=from_attributes, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class ModelSchema(TypedDict, total=False): - type: Required[Literal['model']] - cls: Required[Type[Any]] - schema: Required[CoreSchema] - custom_init: bool - root_model: bool - post_init: str - revalidate_instances: Literal['always', 'never', 'subclass-instances'] # default: 'never' - strict: bool - frozen: bool - extra_behavior: ExtraBehavior - config: CoreConfig - ref: str - metadata: Any - serialization: SerSchema - - -def model_schema( - cls: Type[Any], - schema: CoreSchema, - *, - custom_init: bool | None = None, - root_model: bool | None = None, - post_init: str | None = None, - revalidate_instances: Literal['always', 'never', 'subclass-instances'] | None = None, - strict: bool | None = None, - frozen: bool | None = None, - extra_behavior: ExtraBehavior | None = None, - config: CoreConfig | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> ModelSchema: - """ - A model schema generally contains a typed-dict schema. - It will run the typed dict validator, then create a new class - and set the dict and fields set returned from the typed dict validator - to `__dict__` and `__pydantic_fields_set__` respectively. - - Example: - - ```py - from pydantic_core import CoreConfig, SchemaValidator, core_schema - - class MyModel: - __slots__ = ( - '__dict__', - '__pydantic_fields_set__', - '__pydantic_extra__', - '__pydantic_private__', - ) - - schema = core_schema.model_schema( - cls=MyModel, - config=CoreConfig(str_max_length=5), - schema=core_schema.model_fields_schema( - fields={'a': core_schema.model_field(core_schema.str_schema())}, - ), - ) - v = SchemaValidator(schema) - assert v.isinstance_python({'a': 'hello'}) is True - assert v.isinstance_python({'a': 'too long'}) is False - ``` - - Args: - cls: The class to use for the model - schema: The schema to use for the model - custom_init: Whether the model has a custom init method - root_model: Whether the model is a `RootModel` - post_init: The call after init to use for the model - revalidate_instances: whether instances of models and dataclasses (including subclass instances) - should re-validate defaults to config.revalidate_instances, else 'never' - strict: Whether the model is strict - frozen: Whether the model is frozen - extra_behavior: The extra behavior to use for the model, used in serialization - config: The config to use for the model - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='model', - cls=cls, - schema=schema, - custom_init=custom_init, - root_model=root_model, - post_init=post_init, - revalidate_instances=revalidate_instances, - strict=strict, - frozen=frozen, - extra_behavior=extra_behavior, - config=config, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class DataclassField(TypedDict, total=False): - type: Required[Literal['dataclass-field']] - name: Required[str] - schema: Required[CoreSchema] - kw_only: bool # default: True - init: bool # default: True - init_only: bool # default: False - frozen: bool # default: False - validation_alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] - serialization_alias: str - serialization_exclude: bool # default: False - metadata: Any - - -def dataclass_field( - name: str, - schema: CoreSchema, - *, - kw_only: bool | None = None, - init: bool | None = None, - init_only: bool | None = None, - validation_alias: str | list[str | int] | list[list[str | int]] | None = None, - serialization_alias: str | None = None, - serialization_exclude: bool | None = None, - metadata: Any = None, - frozen: bool | None = None, -) -> DataclassField: - """ - Returns a schema for a dataclass field, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - field = core_schema.dataclass_field( - name='a', schema=core_schema.str_schema(), kw_only=False - ) - schema = core_schema.dataclass_args_schema('Foobar', [field]) - v = SchemaValidator(schema) - assert v.validate_python({'a': 'hello'}) == ({'a': 'hello'}, None) - ``` - - Args: - name: The name to use for the argument parameter - schema: The schema to use for the argument parameter - kw_only: Whether the field can be set with a positional argument as well as a keyword argument - init: Whether the field should be validated during initialization - init_only: Whether the field should be omitted from `__dict__` and passed to `__post_init__` - validation_alias: The alias(es) to use to find the field in the validation data - serialization_alias: The alias to use as a key when serializing - serialization_exclude: Whether to exclude the field when serializing - metadata: Any other information you want to include with the schema, not used by pydantic-core - frozen: Whether the field is frozen - """ - return _dict_not_none( - type='dataclass-field', - name=name, - schema=schema, - kw_only=kw_only, - init=init, - init_only=init_only, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - serialization_exclude=serialization_exclude, - metadata=metadata, - frozen=frozen, - ) - - -class DataclassArgsSchema(TypedDict, total=False): - type: Required[Literal['dataclass-args']] - dataclass_name: Required[str] - fields: Required[List[DataclassField]] - computed_fields: List[ComputedField] - populate_by_name: bool # default: False - collect_init_only: bool # default: False - ref: str - metadata: Any - serialization: SerSchema - extra_behavior: ExtraBehavior - - -def dataclass_args_schema( - dataclass_name: str, - fields: list[DataclassField], - *, - computed_fields: List[ComputedField] | None = None, - populate_by_name: bool | None = None, - collect_init_only: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, - extra_behavior: ExtraBehavior | None = None, -) -> DataclassArgsSchema: - """ - Returns a schema for validating dataclass arguments, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - field_a = core_schema.dataclass_field( - name='a', schema=core_schema.str_schema(), kw_only=False - ) - field_b = core_schema.dataclass_field( - name='b', schema=core_schema.bool_schema(), kw_only=False - ) - schema = core_schema.dataclass_args_schema('Foobar', [field_a, field_b]) - v = SchemaValidator(schema) - assert v.validate_python({'a': 'hello', 'b': True}) == ({'a': 'hello', 'b': True}, None) - ``` - - Args: - dataclass_name: The name of the dataclass being validated - fields: The fields to use for the dataclass - computed_fields: Computed fields to use when serializing the dataclass - populate_by_name: Whether to populate by name - collect_init_only: Whether to collect init only fields into a dict to pass to `__post_init__` - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - extra_behavior: How to handle extra fields - """ - return _dict_not_none( - type='dataclass-args', - dataclass_name=dataclass_name, - fields=fields, - computed_fields=computed_fields, - populate_by_name=populate_by_name, - collect_init_only=collect_init_only, - ref=ref, - metadata=metadata, - serialization=serialization, - extra_behavior=extra_behavior, - ) - - -class DataclassSchema(TypedDict, total=False): - type: Required[Literal['dataclass']] - cls: Required[Type[Any]] - schema: Required[CoreSchema] - fields: Required[List[str]] - cls_name: str - post_init: bool # default: False - revalidate_instances: Literal['always', 'never', 'subclass-instances'] # default: 'never' - strict: bool # default: False - frozen: bool # default False - ref: str - metadata: Any - serialization: SerSchema - slots: bool - config: CoreConfig - - -def dataclass_schema( - cls: Type[Any], - schema: CoreSchema, - fields: List[str], - *, - cls_name: str | None = None, - post_init: bool | None = None, - revalidate_instances: Literal['always', 'never', 'subclass-instances'] | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, - frozen: bool | None = None, - slots: bool | None = None, - config: CoreConfig | None = None, -) -> DataclassSchema: - """ - Returns a schema for a dataclass. As with `ModelSchema`, this schema can only be used as a field within - another schema, not as the root type. - - Args: - cls: The dataclass type, used to perform subclass checks - schema: The schema to use for the dataclass fields - fields: Fields of the dataclass, this is used in serialization and in validation during re-validation - and while validating assignment - cls_name: The name to use in error locs, etc; this is useful for generics (default: `cls.__name__`) - post_init: Whether to call `__post_init__` after validation - revalidate_instances: whether instances of models and dataclasses (including subclass instances) - should re-validate defaults to config.revalidate_instances, else 'never' - strict: Whether to require an exact instance of `cls` - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - frozen: Whether the dataclass is frozen - slots: Whether `slots=True` on the dataclass, means each field is assigned independently, rather than - simply setting `__dict__`, default false - """ - return _dict_not_none( - type='dataclass', - cls=cls, - fields=fields, - cls_name=cls_name, - schema=schema, - post_init=post_init, - revalidate_instances=revalidate_instances, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - frozen=frozen, - slots=slots, - config=config, - ) - - -class ArgumentsParameter(TypedDict, total=False): - name: Required[str] - schema: Required[CoreSchema] - mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] # default positional_or_keyword - alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] - - -def arguments_parameter( - name: str, - schema: CoreSchema, - *, - mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] | None = None, - alias: str | list[str | int] | list[list[str | int]] | None = None, -) -> ArgumentsParameter: - """ - Returns a schema that matches an argument parameter, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - param = core_schema.arguments_parameter( - name='a', schema=core_schema.str_schema(), mode='positional_only' - ) - schema = core_schema.arguments_schema([param]) - v = SchemaValidator(schema) - assert v.validate_python(('hello',)) == (('hello',), {}) - ``` - - Args: - name: The name to use for the argument parameter - schema: The schema to use for the argument parameter - mode: The mode to use for the argument parameter - alias: The alias to use for the argument parameter - """ - return _dict_not_none(name=name, schema=schema, mode=mode, alias=alias) - - -class ArgumentsSchema(TypedDict, total=False): - type: Required[Literal['arguments']] - arguments_schema: Required[List[ArgumentsParameter]] - populate_by_name: bool - var_args_schema: CoreSchema - var_kwargs_schema: CoreSchema - ref: str - metadata: Any - serialization: SerSchema - - -def arguments_schema( - arguments: list[ArgumentsParameter], - *, - populate_by_name: bool | None = None, - var_args_schema: CoreSchema | None = None, - var_kwargs_schema: CoreSchema | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> ArgumentsSchema: - """ - Returns a schema that matches an arguments schema, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - param_a = core_schema.arguments_parameter( - name='a', schema=core_schema.str_schema(), mode='positional_only' - ) - param_b = core_schema.arguments_parameter( - name='b', schema=core_schema.bool_schema(), mode='positional_only' - ) - schema = core_schema.arguments_schema([param_a, param_b]) - v = SchemaValidator(schema) - assert v.validate_python(('hello', True)) == (('hello', True), {}) - ``` - - Args: - arguments: The arguments to use for the arguments schema - populate_by_name: Whether to populate by name - var_args_schema: The variable args schema to use for the arguments schema - var_kwargs_schema: The variable kwargs schema to use for the arguments schema - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='arguments', - arguments_schema=arguments, - populate_by_name=populate_by_name, - var_args_schema=var_args_schema, - var_kwargs_schema=var_kwargs_schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class CallSchema(TypedDict, total=False): - type: Required[Literal['call']] - arguments_schema: Required[CoreSchema] - function: Required[Callable[..., Any]] - function_name: str # default function.__name__ - return_schema: CoreSchema - ref: str - metadata: Any - serialization: SerSchema - - -def call_schema( - arguments: CoreSchema, - function: Callable[..., Any], - *, - function_name: str | None = None, - return_schema: CoreSchema | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> CallSchema: - """ - Returns a schema that matches an arguments schema, then calls a function, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - param_a = core_schema.arguments_parameter( - name='a', schema=core_schema.str_schema(), mode='positional_only' - ) - param_b = core_schema.arguments_parameter( - name='b', schema=core_schema.bool_schema(), mode='positional_only' - ) - args_schema = core_schema.arguments_schema([param_a, param_b]) - - schema = core_schema.call_schema( - arguments=args_schema, - function=lambda a, b: a + str(not b), - return_schema=core_schema.str_schema(), - ) - v = SchemaValidator(schema) - assert v.validate_python((('hello', True))) == 'helloFalse' - ``` - - Args: - arguments: The arguments to use for the arguments schema - function: The function to use for the call schema - function_name: The function name to use for the call schema, if not provided `function.__name__` is used - return_schema: The return schema to use for the call schema - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='call', - arguments_schema=arguments, - function=function, - function_name=function_name, - return_schema=return_schema, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class CustomErrorSchema(TypedDict, total=False): - type: Required[Literal['custom-error']] - schema: Required[CoreSchema] - custom_error_type: Required[str] - custom_error_message: str - custom_error_context: Dict[str, Union[str, int, float]] - ref: str - metadata: Any - serialization: SerSchema - - -def custom_error_schema( - schema: CoreSchema, - custom_error_type: str, - *, - custom_error_message: str | None = None, - custom_error_context: dict[str, Any] | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> CustomErrorSchema: - """ - Returns a schema that matches a custom error value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.custom_error_schema( - schema=core_schema.int_schema(), - custom_error_type='MyError', - custom_error_message='Error msg', - ) - v = SchemaValidator(schema) - v.validate_python(1) - ``` - - Args: - schema: The schema to use for the custom error schema - custom_error_type: The custom error type to use for the custom error schema - custom_error_message: The custom error message to use for the custom error schema - custom_error_context: The custom error context to use for the custom error schema - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='custom-error', - schema=schema, - custom_error_type=custom_error_type, - custom_error_message=custom_error_message, - custom_error_context=custom_error_context, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class JsonSchema(TypedDict, total=False): - type: Required[Literal['json']] - schema: CoreSchema - ref: str - metadata: Any - serialization: SerSchema - - -def json_schema( - schema: CoreSchema | None = None, - *, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> JsonSchema: - """ - Returns a schema that matches a JSON value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - dict_schema = core_schema.model_fields_schema( - { - 'field_a': core_schema.model_field(core_schema.str_schema()), - 'field_b': core_schema.model_field(core_schema.bool_schema()), - }, - ) - - class MyModel: - __slots__ = ( - '__dict__', - '__pydantic_fields_set__', - '__pydantic_extra__', - '__pydantic_private__', - ) - field_a: str - field_b: bool - - json_schema = core_schema.json_schema(schema=dict_schema) - schema = core_schema.model_schema(cls=MyModel, schema=json_schema) - v = SchemaValidator(schema) - m = v.validate_python('{"field_a": "hello", "field_b": true}') - assert isinstance(m, MyModel) - ``` - - Args: - schema: The schema to use for the JSON schema - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none(type='json', schema=schema, ref=ref, metadata=metadata, serialization=serialization) - - -class UrlSchema(TypedDict, total=False): - type: Required[Literal['url']] - max_length: int - allowed_schemes: List[str] - host_required: bool # default False - default_host: str - default_port: int - default_path: str - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def url_schema( - *, - max_length: int | None = None, - allowed_schemes: list[str] | None = None, - host_required: bool | None = None, - default_host: str | None = None, - default_port: int | None = None, - default_path: str | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> UrlSchema: - """ - Returns a schema that matches a URL value, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.url_schema() - v = SchemaValidator(schema) - print(v.validate_python('https://example.com')) - #> https://example.com/ - ``` - - Args: - max_length: The maximum length of the URL - allowed_schemes: The allowed URL schemes - host_required: Whether the URL must have a host - default_host: The default host to use if the URL does not have a host - default_port: The default port to use if the URL does not have a port - default_path: The default path to use if the URL does not have a path - strict: Whether to use strict URL parsing - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='url', - max_length=max_length, - allowed_schemes=allowed_schemes, - host_required=host_required, - default_host=default_host, - default_port=default_port, - default_path=default_path, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class MultiHostUrlSchema(TypedDict, total=False): - type: Required[Literal['multi-host-url']] - max_length: int - allowed_schemes: List[str] - host_required: bool # default False - default_host: str - default_port: int - default_path: str - strict: bool - ref: str - metadata: Any - serialization: SerSchema - - -def multi_host_url_schema( - *, - max_length: int | None = None, - allowed_schemes: list[str] | None = None, - host_required: bool | None = None, - default_host: str | None = None, - default_port: int | None = None, - default_path: str | None = None, - strict: bool | None = None, - ref: str | None = None, - metadata: Any = None, - serialization: SerSchema | None = None, -) -> MultiHostUrlSchema: - """ - Returns a schema that matches a URL value with possibly multiple hosts, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.multi_host_url_schema() - v = SchemaValidator(schema) - print(v.validate_python('redis://localhost,0.0.0.0,127.0.0.1')) - #> redis://localhost,0.0.0.0,127.0.0.1 - ``` - - Args: - max_length: The maximum length of the URL - allowed_schemes: The allowed URL schemes - host_required: Whether the URL must have a host - default_host: The default host to use if the URL does not have a host - default_port: The default port to use if the URL does not have a port - default_path: The default path to use if the URL does not have a path - strict: Whether to use strict URL parsing - ref: optional unique identifier of the schema, used to reference the schema in other places - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='multi-host-url', - max_length=max_length, - allowed_schemes=allowed_schemes, - host_required=host_required, - default_host=default_host, - default_port=default_port, - default_path=default_path, - strict=strict, - ref=ref, - metadata=metadata, - serialization=serialization, - ) - - -class DefinitionsSchema(TypedDict, total=False): - type: Required[Literal['definitions']] - schema: Required[CoreSchema] - definitions: Required[List[CoreSchema]] - metadata: Any - serialization: SerSchema - - -def definitions_schema(schema: CoreSchema, definitions: list[CoreSchema]) -> DefinitionsSchema: - """ - Build a schema that contains both an inner schema and a list of definitions which can be used - within the inner schema. - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema = core_schema.definitions_schema( - core_schema.list_schema(core_schema.definition_reference_schema('foobar')), - [core_schema.int_schema(ref='foobar')], - ) - v = SchemaValidator(schema) - assert v.validate_python([1, 2, '3']) == [1, 2, 3] - ``` - - Args: - schema: The inner schema - definitions: List of definitions which can be referenced within inner schema - """ - return DefinitionsSchema(type='definitions', schema=schema, definitions=definitions) - - -class DefinitionReferenceSchema(TypedDict, total=False): - type: Required[Literal['definition-ref']] - schema_ref: Required[str] - ref: str - metadata: Any - serialization: SerSchema - - -def definition_reference_schema( - schema_ref: str, ref: str | None = None, metadata: Any = None, serialization: SerSchema | None = None -) -> DefinitionReferenceSchema: - """ - Returns a schema that points to a schema stored in "definitions", this is useful for nested recursive - models and also when you want to define validators separately from the main schema, e.g.: - - ```py - from pydantic_core import SchemaValidator, core_schema - - schema_definition = core_schema.definition_reference_schema('list-schema') - schema = core_schema.definitions_schema( - schema=schema_definition, - definitions=[ - core_schema.list_schema(items_schema=schema_definition, ref='list-schema'), - ], - ) - v = SchemaValidator(schema) - assert v.validate_python([()]) == [[]] - ``` - - Args: - schema_ref: The schema ref to use for the definition reference schema - metadata: Any other information you want to include with the schema, not used by pydantic-core - serialization: Custom serialization schema - """ - return _dict_not_none( - type='definition-ref', schema_ref=schema_ref, ref=ref, metadata=metadata, serialization=serialization - ) - - -MYPY = False -# See https://github.com/python/mypy/issues/14034 for details, in summary mypy is extremely slow to process this -# union which kills performance not just for pydantic, but even for code using pydantic -if not MYPY: - CoreSchema = Union[ - AnySchema, - NoneSchema, - BoolSchema, - IntSchema, - FloatSchema, - DecimalSchema, - StringSchema, - BytesSchema, - DateSchema, - TimeSchema, - DatetimeSchema, - TimedeltaSchema, - LiteralSchema, - IsInstanceSchema, - IsSubclassSchema, - CallableSchema, - ListSchema, - TupleSchema, - SetSchema, - FrozenSetSchema, - GeneratorSchema, - DictSchema, - AfterValidatorFunctionSchema, - BeforeValidatorFunctionSchema, - WrapValidatorFunctionSchema, - PlainValidatorFunctionSchema, - WithDefaultSchema, - NullableSchema, - UnionSchema, - TaggedUnionSchema, - ChainSchema, - LaxOrStrictSchema, - JsonOrPythonSchema, - TypedDictSchema, - ModelFieldsSchema, - ModelSchema, - DataclassArgsSchema, - DataclassSchema, - ArgumentsSchema, - CallSchema, - CustomErrorSchema, - JsonSchema, - UrlSchema, - MultiHostUrlSchema, - DefinitionsSchema, - DefinitionReferenceSchema, - UuidSchema, - ] -elif False: - CoreSchema: TypeAlias = Mapping[str, Any] - - -# to update this, call `pytest -k test_core_schema_type_literal` and copy the output -CoreSchemaType = Literal[ - 'any', - 'none', - 'bool', - 'int', - 'float', - 'decimal', - 'str', - 'bytes', - 'date', - 'time', - 'datetime', - 'timedelta', - 'literal', - 'is-instance', - 'is-subclass', - 'callable', - 'list', - 'tuple', - 'set', - 'frozenset', - 'generator', - 'dict', - 'function-after', - 'function-before', - 'function-wrap', - 'function-plain', - 'default', - 'nullable', - 'union', - 'tagged-union', - 'chain', - 'lax-or-strict', - 'json-or-python', - 'typed-dict', - 'model-fields', - 'model', - 'dataclass-args', - 'dataclass', - 'arguments', - 'call', - 'custom-error', - 'json', - 'url', - 'multi-host-url', - 'definitions', - 'definition-ref', - 'uuid', -] - -CoreSchemaFieldType = Literal['model-field', 'dataclass-field', 'typed-dict-field', 'computed-field'] - - -# used in _pydantic_core.pyi::PydanticKnownError -# to update this, call `pytest -k test_all_errors` and copy the output -ErrorType = Literal[ - 'no_such_attribute', - 'json_invalid', - 'json_type', - 'recursion_loop', - 'missing', - 'frozen_field', - 'frozen_instance', - 'extra_forbidden', - 'invalid_key', - 'get_attribute_error', - 'model_type', - 'model_attributes_type', - 'dataclass_type', - 'dataclass_exact_type', - 'none_required', - 'greater_than', - 'greater_than_equal', - 'less_than', - 'less_than_equal', - 'multiple_of', - 'finite_number', - 'too_short', - 'too_long', - 'iterable_type', - 'iteration_error', - 'string_type', - 'string_sub_type', - 'string_unicode', - 'string_too_short', - 'string_too_long', - 'string_pattern_mismatch', - 'enum', - 'dict_type', - 'mapping_type', - 'list_type', - 'tuple_type', - 'set_type', - 'bool_type', - 'bool_parsing', - 'int_type', - 'int_parsing', - 'int_parsing_size', - 'int_from_float', - 'float_type', - 'float_parsing', - 'bytes_type', - 'bytes_too_short', - 'bytes_too_long', - 'value_error', - 'assertion_error', - 'literal_error', - 'date_type', - 'date_parsing', - 'date_from_datetime_parsing', - 'date_from_datetime_inexact', - 'date_past', - 'date_future', - 'time_type', - 'time_parsing', - 'datetime_type', - 'datetime_parsing', - 'datetime_object_invalid', - 'datetime_from_date_parsing', - 'datetime_past', - 'datetime_future', - 'timezone_naive', - 'timezone_aware', - 'timezone_offset', - 'time_delta_type', - 'time_delta_parsing', - 'frozen_set_type', - 'is_instance_of', - 'is_subclass_of', - 'callable_type', - 'union_tag_invalid', - 'union_tag_not_found', - 'arguments_type', - 'missing_argument', - 'unexpected_keyword_argument', - 'missing_keyword_only_argument', - 'unexpected_positional_argument', - 'missing_positional_only_argument', - 'multiple_argument_values', - 'url_type', - 'url_parsing', - 'url_syntax_violation', - 'url_too_long', - 'url_scheme', - 'uuid_type', - 'uuid_parsing', - 'uuid_version', - 'decimal_type', - 'decimal_parsing', - 'decimal_max_digits', - 'decimal_max_places', - 'decimal_whole_digits', -] - - -def _dict_not_none(**kwargs: Any) -> Any: - return {k: v for k, v in kwargs.items() if v is not None} - - -############################################################################### -# All this stuff is deprecated by #980 and will be removed eventually -# They're kept because some code external code will be using them - - -@deprecated('`field_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.') -def field_before_validator_function(function: WithInfoValidatorFunction, field_name: str, schema: CoreSchema, **kwargs): - warnings.warn( - '`field_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.', - DeprecationWarning, - ) - return with_info_before_validator_function(function, schema, field_name=field_name, **kwargs) - - -@deprecated('`general_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.') -def general_before_validator_function(*args, **kwargs): - warnings.warn( - '`general_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.', - DeprecationWarning, - ) - return with_info_before_validator_function(*args, **kwargs) - - -@deprecated('`field_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.') -def field_after_validator_function(function: WithInfoValidatorFunction, field_name: str, schema: CoreSchema, **kwargs): - warnings.warn( - '`field_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.', - DeprecationWarning, - ) - return with_info_after_validator_function(function, schema, field_name=field_name, **kwargs) - - -@deprecated('`general_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.') -def general_after_validator_function(*args, **kwargs): - warnings.warn( - '`general_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.', - DeprecationWarning, - ) - return with_info_after_validator_function(*args, **kwargs) - - -@deprecated('`field_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.') -def field_wrap_validator_function( - function: WithInfoWrapValidatorFunction, field_name: str, schema: CoreSchema, **kwargs -): - warnings.warn( - '`field_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.', - DeprecationWarning, - ) - return with_info_wrap_validator_function(function, schema, field_name=field_name, **kwargs) - - -@deprecated('`general_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.') -def general_wrap_validator_function(*args, **kwargs): - warnings.warn( - '`general_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.', - DeprecationWarning, - ) - return with_info_wrap_validator_function(*args, **kwargs) - - -@deprecated('`field_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.') -def field_plain_validator_function(function: WithInfoValidatorFunction, field_name: str, **kwargs): - warnings.warn( - '`field_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.', - DeprecationWarning, - ) - return with_info_plain_validator_function(function, field_name=field_name, **kwargs) - - -@deprecated('`general_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.') -def general_plain_validator_function(*args, **kwargs): - warnings.warn( - '`general_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.', - DeprecationWarning, - ) - return with_info_plain_validator_function(*args, **kwargs) - - -_deprecated_import_lookup = { - 'FieldValidationInfo': ValidationInfo, - 'FieldValidatorFunction': WithInfoValidatorFunction, - 'GeneralValidatorFunction': WithInfoValidatorFunction, - 'FieldWrapValidatorFunction': WithInfoWrapValidatorFunction, -} - -if TYPE_CHECKING: - FieldValidationInfo = ValidationInfo - - -def __getattr__(attr_name: str) -> object: - new_attr = _deprecated_import_lookup.get(attr_name) - if new_attr is None: - raise AttributeError(f"module 'pydantic_core' has no attribute '{attr_name}'") - else: - import warnings - - msg = f'`{attr_name}` is deprecated, use `{new_attr.__name__}` instead.' - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return new_attr diff --git a/lib/pydantic_core/py.typed b/lib/pydantic_core/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pythoncom.py b/lib/pythoncom.py deleted file mode 100644 index 2180ecc3..00000000 --- a/lib/pythoncom.py +++ /dev/null @@ -1,4 +0,0 @@ -# Magic utility that "redirects" to pythoncomxx.dll -import pywintypes - -pywintypes.__import_pywin32_system_module__("pythoncom", globals()) diff --git a/lib/pythonwin/license.txt b/lib/pythonwin/license.txt deleted file mode 100644 index fa340d74..00000000 --- a/lib/pythonwin/license.txt +++ /dev/null @@ -1,30 +0,0 @@ -Unless stated in the specfic source file, this work is -Copyright (c) 1994-2008, Mark Hammond -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -Redistributions of source code must retain the above copyright notice, -this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in -the documentation and/or other materials provided with the distribution. - -Neither name of Mark Hammond nor the name of contributors may be used -to endorse or promote products derived from this software without -specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS -IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/pythonwin/pywin/Demos/app/basictimerapp.py b/lib/pythonwin/pywin/Demos/app/basictimerapp.py deleted file mode 100644 index da771da1..00000000 --- a/lib/pythonwin/pywin/Demos/app/basictimerapp.py +++ /dev/null @@ -1,258 +0,0 @@ -# basictimerapp - a really simple timer application. -# This should be run using the command line: -# pythonwin /app demos\basictimerapp.py -import sys -import time - -import timer -import win32api -import win32con -import win32ui -from pywin.framework import app, cmdline, dlgappcore - - -class TimerAppDialog(dlgappcore.AppDialog): - softspace = 1 - - def __init__(self, appName=""): - dlgappcore.AppDialog.__init__(self, win32ui.IDD_GENERAL_STATUS) - self.timerAppName = appName - self.argOff = 0 - if len(self.timerAppName) == 0: - if len(sys.argv) > 1 and sys.argv[1][0] != "/": - self.timerAppName = sys.argv[1] - self.argOff = 1 - - def PreDoModal(self): - # sys.stderr = sys.stdout - pass - - def ProcessArgs(self, args): - for arg in args: - if arg == "/now": - self.OnOK() - - def OnInitDialog(self): - win32ui.SetProfileFileName("pytimer.ini") - self.title = win32ui.GetProfileVal( - self.timerAppName, "Title", "Remote System Timer" - ) - self.buildTimer = win32ui.GetProfileVal( - self.timerAppName, "Timer", "EachMinuteIntervaler()" - ) - self.doWork = win32ui.GetProfileVal(self.timerAppName, "Work", "DoDemoWork()") - # replace "\n" with real \n. - self.doWork = self.doWork.replace("\\n", "\n") - dlgappcore.AppDialog.OnInitDialog(self) - - self.SetWindowText(self.title) - self.prompt1 = self.GetDlgItem(win32ui.IDC_PROMPT1) - self.prompt2 = self.GetDlgItem(win32ui.IDC_PROMPT2) - self.prompt3 = self.GetDlgItem(win32ui.IDC_PROMPT3) - self.butOK = self.GetDlgItem(win32con.IDOK) - self.butCancel = self.GetDlgItem(win32con.IDCANCEL) - self.prompt1.SetWindowText("Python Timer App") - self.prompt2.SetWindowText("") - self.prompt3.SetWindowText("") - self.butOK.SetWindowText("Do it now") - self.butCancel.SetWindowText("Close") - - self.timerManager = TimerManager(self) - self.ProcessArgs(sys.argv[self.argOff :]) - self.timerManager.go() - return 1 - - def OnDestroy(self, msg): - dlgappcore.AppDialog.OnDestroy(self, msg) - self.timerManager.stop() - - def OnOK(self): - # stop the timer, then restart after setting special boolean - self.timerManager.stop() - self.timerManager.bConnectNow = 1 - self.timerManager.go() - return - - -# def OnCancel(self): default behaviour - cancel == close. -# return - - -class TimerManager: - def __init__(self, dlg): - self.dlg = dlg - self.timerId = None - self.intervaler = eval(self.dlg.buildTimer) - self.bConnectNow = 0 - self.bHaveSetPrompt1 = 0 - - def CaptureOutput(self): - self.oldOut = sys.stdout - self.oldErr = sys.stderr - sys.stdout = sys.stderr = self - self.bHaveSetPrompt1 = 0 - - def ReleaseOutput(self): - sys.stdout = self.oldOut - sys.stderr = self.oldErr - - def write(self, str): - s = str.strip() - if len(s): - if self.bHaveSetPrompt1: - dest = self.dlg.prompt3 - else: - dest = self.dlg.prompt1 - self.bHaveSetPrompt1 = 1 - dest.SetWindowText(s) - - def go(self): - self.OnTimer(None, None) - - def stop(self): - if self.timerId: - timer.kill_timer(self.timerId) - self.timerId = None - - def OnTimer(self, id, timeVal): - if id: - timer.kill_timer(id) - if self.intervaler.IsTime() or self.bConnectNow: - # do the work. - try: - self.dlg.SetWindowText(self.dlg.title + " - Working...") - self.dlg.butOK.EnableWindow(0) - self.dlg.butCancel.EnableWindow(0) - self.CaptureOutput() - try: - exec(self.dlg.doWork) - print("The last operation completed successfully.") - except: - t, v, tb = sys.exc_info() - str = "Failed: %s: %s" % (t, repr(v)) - print(str) - self.oldErr.write(str) - tb = None # Prevent cycle - finally: - self.ReleaseOutput() - self.dlg.butOK.EnableWindow() - self.dlg.butCancel.EnableWindow() - self.dlg.SetWindowText(self.dlg.title) - else: - now = time.time() - nextTime = self.intervaler.GetNextTime() - if nextTime: - timeDiffSeconds = nextTime - now - timeDiffMinutes = int(timeDiffSeconds / 60) - timeDiffSeconds = timeDiffSeconds % 60 - timeDiffHours = int(timeDiffMinutes / 60) - timeDiffMinutes = timeDiffMinutes % 60 - self.dlg.prompt1.SetWindowText( - "Next connection due in %02d:%02d:%02d" - % (timeDiffHours, timeDiffMinutes, timeDiffSeconds) - ) - self.timerId = timer.set_timer( - self.intervaler.GetWakeupInterval(), self.OnTimer - ) - self.bConnectNow = 0 - - -class TimerIntervaler: - def __init__(self): - self.nextTime = None - self.wakeUpInterval = 2000 - - def GetWakeupInterval(self): - return self.wakeUpInterval - - def GetNextTime(self): - return self.nextTime - - def IsTime(self): - now = time.time() - if self.nextTime is None: - self.nextTime = self.SetFirstTime(now) - ret = 0 - if now >= self.nextTime: - ret = 1 - self.nextTime = self.SetNextTime(self.nextTime, now) - # do the work. - return ret - - -class EachAnyIntervaler(TimerIntervaler): - def __init__(self, timeAt, timePos, timeAdd, wakeUpInterval=None): - TimerIntervaler.__init__(self) - self.timeAt = timeAt - self.timePos = timePos - self.timeAdd = timeAdd - if wakeUpInterval: - self.wakeUpInterval = wakeUpInterval - - def SetFirstTime(self, now): - timeTup = time.localtime(now) - lst = [] - for item in timeTup: - lst.append(item) - bAdd = timeTup[self.timePos] > self.timeAt - lst[self.timePos] = self.timeAt - for pos in range(self.timePos + 1, 6): - lst[pos] = 0 - ret = time.mktime(tuple(lst)) - if bAdd: - ret = ret + self.timeAdd - return ret - - def SetNextTime(self, lastTime, now): - return lastTime + self.timeAdd - - -class EachMinuteIntervaler(EachAnyIntervaler): - def __init__(self, at=0): - EachAnyIntervaler.__init__(self, at, 5, 60, 2000) - - -class EachHourIntervaler(EachAnyIntervaler): - def __init__(self, at=0): - EachAnyIntervaler.__init__(self, at, 4, 3600, 10000) - - -class EachDayIntervaler(EachAnyIntervaler): - def __init__(self, at=0): - EachAnyIntervaler.__init__(self, at, 3, 86400, 10000) - - -class TimerDialogApp(dlgappcore.DialogApp): - def CreateDialog(self): - return TimerAppDialog() - - -def DoDemoWork(): - print("Doing the work...") - print("About to connect") - win32api.MessageBeep(win32con.MB_ICONASTERISK) - win32api.Sleep(2000) - print("Doing something else...") - win32api.MessageBeep(win32con.MB_ICONEXCLAMATION) - win32api.Sleep(2000) - print("More work.") - win32api.MessageBeep(win32con.MB_ICONHAND) - win32api.Sleep(2000) - print("The last bit.") - win32api.MessageBeep(win32con.MB_OK) - win32api.Sleep(2000) - - -app = TimerDialogApp() - - -def t(): - t = TimerAppDialog("Test Dialog") - t.DoModal() - return t - - -if __name__ == "__main__": - import demoutils - - demoutils.NeedApp() diff --git a/lib/pythonwin/pywin/Demos/app/customprint.py b/lib/pythonwin/pywin/Demos/app/customprint.py deleted file mode 100644 index 356073e5..00000000 --- a/lib/pythonwin/pywin/Demos/app/customprint.py +++ /dev/null @@ -1,186 +0,0 @@ -# A demo of an Application object that has some custom print functionality. - -# If you desire, you can also run this from inside Pythonwin, in which -# case it will do the demo inside the Pythonwin environment. - -# This sample was contributed by Roger Burnham. - -import win32api -import win32con -import win32ui -from pywin.framework import app -from pywin.mfc import afxres, dialog, docview - -PRINTDLGORD = 1538 -IDC_PRINT_MAG_EDIT = 1010 - - -class PrintDemoTemplate(docview.DocTemplate): - def _SetupSharedMenu_(self): - pass - - -class PrintDemoView(docview.ScrollView): - def OnInitialUpdate(self): - ret = self._obj_.OnInitialUpdate() - self.colors = { - "Black": (0x00 << 0) + (0x00 << 8) + (0x00 << 16), - "Red": (0xFF << 0) + (0x00 << 8) + (0x00 << 16), - "Green": (0x00 << 0) + (0xFF << 8) + (0x00 << 16), - "Blue": (0x00 << 0) + (0x00 << 8) + (0xFF << 16), - "Cyan": (0x00 << 0) + (0xFF << 8) + (0xFF << 16), - "Magenta": (0xFF << 0) + (0x00 << 8) + (0xFF << 16), - "Yellow": (0xFF << 0) + (0xFF << 8) + (0x00 << 16), - } - self.pens = {} - for name, color in self.colors.items(): - self.pens[name] = win32ui.CreatePen(win32con.PS_SOLID, 5, color) - self.pen = None - self.size = (128, 128) - self.SetScaleToFitSize(self.size) - self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT) - self.HookCommand(self.OnFilePrintPreview, win32ui.ID_FILE_PRINT_PREVIEW) - return ret - - def OnDraw(self, dc): - oldPen = None - x, y = self.size - delta = 2 - colors = list(self.colors.keys()) - colors.sort() - colors = colors * 2 - for color in colors: - if oldPen is None: - oldPen = dc.SelectObject(self.pens[color]) - else: - dc.SelectObject(self.pens[color]) - dc.MoveTo((delta, delta)) - dc.LineTo((x - delta, delta)) - dc.LineTo((x - delta, y - delta)) - dc.LineTo((delta, y - delta)) - dc.LineTo((delta, delta)) - delta = delta + 4 - if x - delta <= 0 or y - delta <= 0: - break - dc.SelectObject(oldPen) - - def OnPrepareDC(self, dc, pInfo): - if dc.IsPrinting(): - mag = self.prtDlg["mag"] - dc.SetMapMode(win32con.MM_ANISOTROPIC) - dc.SetWindowOrg((0, 0)) - dc.SetWindowExt((1, 1)) - dc.SetViewportOrg((0, 0)) - dc.SetViewportExt((mag, mag)) - - def OnPreparePrinting(self, pInfo): - flags = ( - win32ui.PD_USEDEVMODECOPIES - | win32ui.PD_PAGENUMS - | win32ui.PD_NOPAGENUMS - | win32ui.PD_NOSELECTION - ) - self.prtDlg = ImagePrintDialog(pInfo, PRINTDLGORD, flags) - pInfo.SetPrintDialog(self.prtDlg) - pInfo.SetMinPage(1) - pInfo.SetMaxPage(1) - pInfo.SetFromPage(1) - pInfo.SetToPage(1) - ret = self.DoPreparePrinting(pInfo) - return ret - - def OnBeginPrinting(self, dc, pInfo): - return self._obj_.OnBeginPrinting(dc, pInfo) - - def OnEndPrinting(self, dc, pInfo): - del self.prtDlg - return self._obj_.OnEndPrinting(dc, pInfo) - - def OnFilePrintPreview(self, *arg): - self._obj_.OnFilePrintPreview() - - def OnFilePrint(self, *arg): - self._obj_.OnFilePrint() - - def OnPrint(self, dc, pInfo): - doc = self.GetDocument() - metrics = dc.GetTextMetrics() - cxChar = metrics["tmAveCharWidth"] - cyChar = metrics["tmHeight"] - left, top, right, bottom = pInfo.GetDraw() - dc.TextOut(0, 2 * cyChar, doc.GetTitle()) - top = top + (7 * cyChar) / 2 - dc.MoveTo(left, top) - dc.LineTo(right, top) - top = top + cyChar - # this seems to have not effect... - # get what I want with the dc.SetWindowOrg calls - pInfo.SetDraw((left, top, right, bottom)) - dc.SetWindowOrg((0, -top)) - - self.OnDraw(dc) - dc.SetTextAlign(win32con.TA_LEFT | win32con.TA_BOTTOM) - - rect = self.GetWindowRect() - rect = self.ScreenToClient(rect) - height = rect[3] - rect[1] - dc.SetWindowOrg((0, -(top + height + cyChar))) - dc.MoveTo(left, 0) - dc.LineTo(right, 0) - - x = 0 - y = (3 * cyChar) / 2 - - dc.TextOut(x, y, doc.GetTitle()) - y = y + cyChar - - -class PrintDemoApp(app.CApp): - def __init__(self): - app.CApp.__init__(self) - - def InitInstance(self): - template = PrintDemoTemplate(None, None, None, PrintDemoView) - self.AddDocTemplate(template) - self._obj_.InitMDIInstance() - self.LoadMainFrame() - doc = template.OpenDocumentFile(None) - doc.SetTitle("Custom Print Document") - - -class ImagePrintDialog(dialog.PrintDialog): - sectionPos = "Image Print Demo" - - def __init__(self, pInfo, dlgID, flags=win32ui.PD_USEDEVMODECOPIES): - dialog.PrintDialog.__init__(self, pInfo, dlgID, flags=flags) - mag = win32ui.GetProfileVal(self.sectionPos, "Document Magnification", 0) - if mag <= 0: - mag = 2 - win32ui.WriteProfileVal(self.sectionPos, "Document Magnification", mag) - - self["mag"] = mag - - def OnInitDialog(self): - self.magCtl = self.GetDlgItem(IDC_PRINT_MAG_EDIT) - self.magCtl.SetWindowText(repr(self["mag"])) - return dialog.PrintDialog.OnInitDialog(self) - - def OnOK(self): - dialog.PrintDialog.OnOK(self) - strMag = self.magCtl.GetWindowText() - try: - self["mag"] = int(strMag) - except: - pass - win32ui.WriteProfileVal(self.sectionPos, "Document Magnification", self["mag"]) - - -if __name__ == "__main__": - # Running under Pythonwin - def test(): - template = PrintDemoTemplate(None, None, None, PrintDemoView) - template.OpenDocumentFile(None) - - test() -else: - app = PrintDemoApp() diff --git a/lib/pythonwin/pywin/Demos/app/demoutils.py b/lib/pythonwin/pywin/Demos/app/demoutils.py deleted file mode 100644 index ee1fefcf..00000000 --- a/lib/pythonwin/pywin/Demos/app/demoutils.py +++ /dev/null @@ -1,65 +0,0 @@ -# Utilities for the demos - -import sys - -import win32api -import win32con -import win32ui - -NotScriptMsg = """\ -This demo program is not designed to be run as a Script, but is -probably used by some other test program. Please try another demo. -""" - -NeedGUIMsg = """\ -This demo program can only be run from inside of Pythonwin - -You must start Pythonwin, and select 'Run' from the toolbar or File menu -""" - - -NeedAppMsg = """\ -This demo program is a 'Pythonwin Application'. - -It is more demo code than an example of Pythonwin's capabilities. - -To run it, you must execute the command: -pythonwin.exe /app "%s" - -Would you like to execute it now? -""" - - -def NotAScript(): - import win32ui - - win32ui.MessageBox(NotScriptMsg, "Demos") - - -def NeedGoodGUI(): - from pywin.framework.app import HaveGoodGUI - - rc = HaveGoodGUI() - if not rc: - win32ui.MessageBox(NeedGUIMsg, "Demos") - return rc - - -def NeedApp(): - import win32ui - - rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO) - if rc == win32con.IDYES: - try: - parent = win32ui.GetMainFrame().GetSafeHwnd() - win32api.ShellExecute( - parent, None, "pythonwin.exe", '/app "%s"' % sys.argv[0], None, 1 - ) - except win32api.error as details: - win32ui.MessageBox("Error executing command - %s" % (details), "Demos") - - -if __name__ == "__main__": - import demoutils - - demoutils.NotAScript() diff --git a/lib/pythonwin/pywin/Demos/app/dlgappdemo.py b/lib/pythonwin/pywin/Demos/app/dlgappdemo.py deleted file mode 100644 index 38659a62..00000000 --- a/lib/pythonwin/pywin/Demos/app/dlgappdemo.py +++ /dev/null @@ -1,51 +0,0 @@ -# dlgappdemo - a demo of a dialog application. -# This is a demonstration of both a custom "application" module, -# and a Python program in a dialog box. -# -# NOTE: You CAN NOT import this module from either PythonWin or Python. -# This module must be specified on the commandline to PythonWin only. -# eg, PythonWin /app dlgappdemo.py - -import sys - -import win32ui -from pywin.framework import app, dlgappcore - - -class TestDialogApp(dlgappcore.DialogApp): - def CreateDialog(self): - return TestAppDialog() - - -class TestAppDialog(dlgappcore.AppDialog): - def __init__(self): - self.edit = None - dlgappcore.AppDialog.__init__(self, win32ui.IDD_LARGE_EDIT) - - def OnInitDialog(self): - self.SetWindowText("Test dialog application") - self.edit = self.GetDlgItem(win32ui.IDC_EDIT1) - print("Hello from Python") - print("args are:", end=" ") - for arg in sys.argv: - print(arg) - return 1 - - def PreDoModal(self): - sys.stdout = sys.stderr = self - - def write(self, str): - if self.edit: - self.edit.SetSel(-2) - # translate \n to \n\r - self.edit.ReplaceSel(str.replace("\n", "\r\n")) - else: - win32ui.OutputDebug("dlgapp - no edit control! >>\n%s\n<<\n" % str) - - -app.AppBuilder = TestDialogApp - -if __name__ == "__main__": - import demoutils - - demoutils.NeedApp() diff --git a/lib/pythonwin/pywin/Demos/app/dojobapp.py b/lib/pythonwin/pywin/Demos/app/dojobapp.py deleted file mode 100644 index d97ce5e2..00000000 --- a/lib/pythonwin/pywin/Demos/app/dojobapp.py +++ /dev/null @@ -1,72 +0,0 @@ -# dojobapp - do a job, show the result in a dialog, and exit. -# -# Very simple - faily minimal dialog based app. -# -# This should be run using the command line: -# pythonwin /app demos\dojobapp.py - - -import win32api -import win32con -import win32ui -from pywin.framework import app, dlgappcore - - -class DoJobAppDialog(dlgappcore.AppDialog): - softspace = 1 - - def __init__(self, appName=""): - self.appName = appName - dlgappcore.AppDialog.__init__(self, win32ui.IDD_GENERAL_STATUS) - - def PreDoModal(self): - pass - - def ProcessArgs(self, args): - pass - - def OnInitDialog(self): - self.SetWindowText(self.appName) - butCancel = self.GetDlgItem(win32con.IDCANCEL) - butCancel.ShowWindow(win32con.SW_HIDE) - p1 = self.GetDlgItem(win32ui.IDC_PROMPT1) - p2 = self.GetDlgItem(win32ui.IDC_PROMPT2) - - # Do something here! - - p1.SetWindowText("Hello there") - p2.SetWindowText("from the demo") - - def OnDestroy(self, msg): - pass - - -# def OnOK(self): -# pass -# def OnCancel(self): default behaviour - cancel == close. -# return - - -class DoJobDialogApp(dlgappcore.DialogApp): - def CreateDialog(self): - return DoJobAppDialog("Do Something") - - -class CopyToDialogApp(DoJobDialogApp): - def __init__(self): - DoJobDialogApp.__init__(self) - - -app.AppBuilder = DoJobDialogApp - - -def t(): - t = DoJobAppDialog("Copy To") - t.DoModal() - return t - - -if __name__ == "__main__": - import demoutils - - demoutils.NeedApp() diff --git a/lib/pythonwin/pywin/Demos/app/helloapp.py b/lib/pythonwin/pywin/Demos/app/helloapp.py deleted file mode 100644 index 876f16f7..00000000 --- a/lib/pythonwin/pywin/Demos/app/helloapp.py +++ /dev/null @@ -1,53 +0,0 @@ -## -## helloapp.py -## -## -## A nice, small 'hello world' Pythonwin application. -## NOT an MDI application - just a single, normal, top-level window. -## -## MUST be run with the command line "pythonwin.exe /app helloapp.py" -## (or if you are really keen, rename "pythonwin.exe" to something else, then -## using MSVC or similar, edit the string section in the .EXE to name this file) -## -## Originally by Willy Heineman - - -import win32con -import win32ui -from pywin.mfc import afxres, dialog, window -from pywin.mfc.thread import WinApp - - -# The main frame. -# Does almost nothing at all - doesnt even create a child window! -class HelloWindow(window.Wnd): - def __init__(self): - # The window.Wnd ctor creates a Window object, and places it in - # self._obj_. Note the window object exists, but the window itself - # does not! - window.Wnd.__init__(self, win32ui.CreateWnd()) - - # Now we ask the window object to create the window itself. - self._obj_.CreateWindowEx( - win32con.WS_EX_CLIENTEDGE, - win32ui.RegisterWndClass(0, 0, win32con.COLOR_WINDOW + 1), - "Hello World!", - win32con.WS_OVERLAPPEDWINDOW, - (100, 100, 400, 300), - None, - 0, - None, - ) - - -# The application object itself. -class HelloApp(WinApp): - def InitInstance(self): - self.frame = HelloWindow() - self.frame.ShowWindow(win32con.SW_SHOWNORMAL) - # We need to tell MFC what our main frame is. - self.SetMainFrame(self.frame) - - -# Now create the application object itself! -app = HelloApp() diff --git a/lib/pythonwin/pywin/Demos/cmdserver.py b/lib/pythonwin/pywin/Demos/cmdserver.py deleted file mode 100644 index 7b1c257a..00000000 --- a/lib/pythonwin/pywin/Demos/cmdserver.py +++ /dev/null @@ -1,116 +0,0 @@ -# cmdserver.py - -# Demo code that is not Pythonwin related, but too good to throw away... - -import _thread -import sys -import traceback - -import win32api -from pywin.framework import winout - - -class ThreadWriter: - "Assign an instance to sys.stdout for per-thread printing objects - Courtesy Guido!" - - def __init__(self): - "Constructor -- initialize the table of writers" - self.writers = {} - self.origStdOut = None - - def register(self, writer): - "Register the writer for the current thread" - self.writers[_thread.get_ident()] = writer - if self.origStdOut is None: - self.origStdOut = sys.stdout - sys.stdout = self - - def unregister(self): - "Remove the writer for the current thread, if any" - try: - del self.writers[_thread.get_ident()] - except KeyError: - pass - if len(self.writers) == 0: - sys.stdout = self.origStdOut - self.origStdOut = None - - def getwriter(self): - "Return the current thread's writer, default sys.stdout" - try: - return self.writers[_thread.get_ident()] - except KeyError: - return self.origStdOut - - def write(self, str): - "Write to the current thread's writer, default sys.stdout" - self.getwriter().write(str) - - -def Test(): - num = 1 - while num < 1000: - print("Hello there no " + str(num)) - win32api.Sleep(50) - num = num + 1 - - -class flags: - SERVER_BEST = 0 - SERVER_IMMEDIATE = 1 - SERVER_THREAD = 2 - SERVER_PROCESS = 3 - - -def StartServer(cmd, title=None, bCloseOnEnd=0, serverFlags=flags.SERVER_BEST): - out = winout.WindowOutput(title, None, winout.flags.WQ_IDLE) - if not title: - title = cmd - out.Create(title) - # ServerThread((out, cmd, title, bCloseOnEnd)) - # out = sys.stdout - _thread.start_new_thread(ServerThread, (out, cmd, title, bCloseOnEnd)) - - -def ServerThread(myout, cmd, title, bCloseOnEnd): - try: - writer.register(myout) - print('Executing "%s"\n' % cmd) - bOK = 1 - try: - import __main__ - - exec(cmd + "\n", __main__.__dict__) - except: - bOK = 0 - if bOK: - print("Command terminated without errors.") - else: - t, v, tb = sys.exc_info() - print(t, ": ", v) - traceback.print_tb(tb) - tb = None # prevent a cycle - print("Command terminated with an unhandled exception") - writer.unregister() - if bOK and bCloseOnEnd: - myout.frame.DestroyWindow() - - # Unhandled exception of any kind in a thread kills the gui! - except: - t, v, tb = sys.exc_info() - print(t, ": ", v) - traceback.print_tb(tb) - tb = None - print("Thread failed") - - -# assist for reloading (when debugging) - use only 1 tracer object, -# else a large chain of tracer objects will exist. -# try: -# writer -# except NameError: -# writer=ThreadWriter() -if __name__ == "__main__": - import demoutils - - demoutils.NotAScript() diff --git a/lib/pythonwin/pywin/Demos/createwin.py b/lib/pythonwin/pywin/Demos/createwin.py deleted file mode 100644 index 38f6947e..00000000 --- a/lib/pythonwin/pywin/Demos/createwin.py +++ /dev/null @@ -1,114 +0,0 @@ -# -# Window creation example -# -# This example creates a minimal "control" that just fills in its -# window with red. To make your own control, subclass Control and -# write your own OnPaint() method. See PyCWnd.HookMessage for what -# the parameters to OnPaint are. -# - -import win32api -import win32con -import win32ui -from pywin.mfc import dialog, window - - -class Control(window.Wnd): - """Generic control class""" - - def __init__(self): - window.Wnd.__init__(self, win32ui.CreateWnd()) - - def OnPaint(self): - dc, paintStruct = self.BeginPaint() - self.DoPaint(dc) - self.EndPaint(paintStruct) - - def DoPaint(self, dc): # Override this! - pass - - -class RedBox(Control): - def DoPaint(self, dc): - dc.FillSolidRect(self.GetClientRect(), win32api.RGB(255, 0, 0)) - - -class RedBoxWithPie(RedBox): - def DoPaint(self, dc): - RedBox.DoPaint(self, dc) - r = self.GetClientRect() - dc.Pie(r[0], r[1], r[2], r[3], 0, 0, r[2], r[3] // 2) - - -def MakeDlgTemplate(): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - - w = 64 - h = 64 - - dlg = [ - ["Red box", (0, 0, w, h), style, None, (8, "MS Sans Serif")], - ] - - s = win32con.WS_TABSTOP | cs - - dlg.append( - [ - 128, - "Cancel", - win32con.IDCANCEL, - (7, h - 18, 50, 14), - s | win32con.BS_PUSHBUTTON, - ] - ) - - return dlg - - -class TestDialog(dialog.Dialog): - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - self.redbox = RedBox() - self.redbox.CreateWindow( - None, - "RedBox", - win32con.WS_CHILD | win32con.WS_VISIBLE, - (5, 5, 90, 68), - self, - 1003, - ) - return rc - - -class TestPieDialog(dialog.Dialog): - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - self.control = RedBoxWithPie() - self.control.CreateWindow( - None, - "RedBox with Pie", - win32con.WS_CHILD | win32con.WS_VISIBLE, - (5, 5, 90, 68), - self, - 1003, - ) - - -def demo(modal=0): - d = TestPieDialog(MakeDlgTemplate()) - if modal: - d.DoModal() - else: - d.CreateWindow() - - -if __name__ == "__main__": - demo(1) diff --git a/lib/pythonwin/pywin/Demos/demoutils.py b/lib/pythonwin/pywin/Demos/demoutils.py deleted file mode 100644 index f6080c46..00000000 --- a/lib/pythonwin/pywin/Demos/demoutils.py +++ /dev/null @@ -1,67 +0,0 @@ -# Utilities for the demos - -import sys - -import win32api -import win32con -import win32ui - -NotScriptMsg = """\ -This demo program is not designed to be run as a Script, but is -probably used by some other test program. Please try another demo. -""" - -NeedGUIMsg = """\ -This demo program can only be run from inside of Pythonwin - -You must start Pythonwin, and select 'Run' from the toolbar or File menu -""" - - -NeedAppMsg = """\ -This demo program is a 'Pythonwin Application'. - -It is more demo code than an example of Pythonwin's capabilities. - -To run it, you must execute the command: -pythonwin.exe /app "%s" - -Would you like to execute it now? -""" - - -def NotAScript(): - import win32ui - - win32ui.MessageBox(NotScriptMsg, "Demos") - - -def NeedGoodGUI(): - from pywin.framework.app import HaveGoodGUI - - rc = HaveGoodGUI() - if not rc: - win32ui.MessageBox(NeedGUIMsg, "Demos") - return rc - - -def NeedApp(): - import win32ui - - rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO) - if rc == win32con.IDYES: - try: - parent = win32ui.GetMainFrame().GetSafeHwnd() - win32api.ShellExecute( - parent, None, "pythonwin.exe", '/app "%s"' % sys.argv[0], None, 1 - ) - except win32api.error as details: - win32ui.MessageBox("Error executing command - %s" % (details), "Demos") - - -from pywin.framework.app import HaveGoodGUI - -if __name__ == "__main__": - import demoutils - - demoutils.NotAScript() diff --git a/lib/pythonwin/pywin/Demos/dibdemo.py b/lib/pythonwin/pywin/Demos/dibdemo.py deleted file mode 100644 index 615227ab..00000000 --- a/lib/pythonwin/pywin/Demos/dibdemo.py +++ /dev/null @@ -1,73 +0,0 @@ -# A demo which creates a view and a frame which displays a PPM format bitmap -# -# This hasnnt been run in a while, as I dont have many of that format around! - -import win32api -import win32con -import win32ui - - -class DIBView: - def __init__(self, doc, dib): - self.dib = dib - self.view = win32ui.CreateView(doc) - self.width = self.height = 0 - # set up message handlers - # self.view.OnPrepareDC = self.OnPrepareDC - self.view.HookMessage(self.OnSize, win32con.WM_SIZE) - - def OnSize(self, params): - lParam = params[3] - self.width = win32api.LOWORD(lParam) - self.height = win32api.HIWORD(lParam) - - def OnDraw(self, ob, dc): - # set sizes used for "non strecth" mode. - self.view.SetScrollSizes(win32con.MM_TEXT, self.dib.GetSize()) - dibSize = self.dib.GetSize() - dibRect = (0, 0, dibSize[0], dibSize[1]) - # stretch BMP. - # self.dib.Paint(dc, (0,0,self.width, self.height),dibRect) - # non stretch. - self.dib.Paint(dc) - - -class DIBDemo: - def __init__(self, filename, *bPBM): - # init data members - f = open(filename, "rb") - dib = win32ui.CreateDIBitmap() - if len(bPBM) > 0: - magic = f.readline() - if magic != "P6\n": - print("The file is not a PBM format file") - raise ValueError("Failed - The file is not a PBM format file") - # check magic? - rowcollist = f.readline().split() - cols = int(rowcollist[0]) - rows = int(rowcollist[1]) - f.readline() # whats this one? - dib.LoadPBMData(f, (cols, rows)) - else: - dib.LoadWindowsFormatFile(f) - f.close() - # create doc/view - self.doc = win32ui.CreateDoc() - self.dibView = DIBView(self.doc, dib) - self.frame = win32ui.CreateMDIFrame() - self.frame.LoadFrame() # this will force OnCreateClient - self.doc.SetTitle("DIB Demo") - self.frame.ShowWindow() - - # display the sucka - self.frame.ActivateFrame() - - def OnCreateClient(self, createparams, context): - self.dibView.view.CreateWindow(self.frame) - return 1 - - -if __name__ == "__main__": - import demoutils - - demoutils.NotAScript() diff --git a/lib/pythonwin/pywin/Demos/dlgtest.py b/lib/pythonwin/pywin/Demos/dlgtest.py deleted file mode 100644 index 8dcb0e5e..00000000 --- a/lib/pythonwin/pywin/Demos/dlgtest.py +++ /dev/null @@ -1,145 +0,0 @@ -# A Demo of Pythonwin's Dialog and Property Page support. - -################### -# -# First demo - use the built-in to Pythonwin "Tab Stop" dialog, but -# customise it heavily. -# -# ID's for the tabstop dialog - out test. -# -import win32con -import win32ui -from pywin.mfc import dialog -from win32con import IDCANCEL -from win32ui import IDC_EDIT_TABS, IDC_PROMPT_TABS, IDD_SET_TABSTOPS - - -class TestDialog(dialog.Dialog): - def __init__(self, modal=1): - dialog.Dialog.__init__(self, IDD_SET_TABSTOPS) - self.counter = 0 - if modal: - self.DoModal() - else: - self.CreateWindow() - - def OnInitDialog(self): - # Set the caption of the dialog itself. - self.SetWindowText("Used to be Tab Stops!") - # Get a child control, remember it, and change its text. - self.edit = self.GetDlgItem(IDC_EDIT_TABS) # the text box. - self.edit.SetWindowText("Test") - # Hook a Windows message for the dialog. - self.edit.HookMessage(self.KillFocus, win32con.WM_KILLFOCUS) - # Get the prompt control, and change its next. - prompt = self.GetDlgItem(IDC_PROMPT_TABS) # the prompt box. - prompt.SetWindowText("Prompt") - # And the same for the button.. - cancel = self.GetDlgItem(IDCANCEL) # the cancel button - cancel.SetWindowText("&Kill me") - - # And just for demonstration purposes, we hook the notify message for the dialog. - # This allows us to be notified when the Edit Control text changes. - self.HookCommand(self.OnNotify, IDC_EDIT_TABS) - - def OnNotify(self, controlid, code): - if code == win32con.EN_CHANGE: - print("Edit text changed!") - return 1 # I handled this, so no need to call defaults! - - # kill focus for the edit box. - # Simply increment the value in the text box. - def KillFocus(self, msg): - self.counter = self.counter + 1 - if self.edit != None: - self.edit.SetWindowText(str(self.counter)) - - # Called when the dialog box is terminating... - def OnDestroy(self, msg): - del self.edit - del self.counter - - -# A very simply Property Sheet. -# We only make a new class for demonstration purposes. -class TestSheet(dialog.PropertySheet): - def __init__(self, title): - dialog.PropertySheet.__init__(self, title) - self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE) - - def OnActivate(self, msg): - pass - - -# A very simply Property Page, which will be "owned" by the above -# Property Sheet. -# We create a new class, just so we can hook a control notification. -class TestPage(dialog.PropertyPage): - def OnInitDialog(self): - # We use the HookNotify function to allow Python to respond to - # Windows WM_NOTIFY messages. - # In this case, we are interested in BN_CLICKED messages. - self.HookNotify(self.OnNotify, win32con.BN_CLICKED) - - def OnNotify(self, std, extra): - print("OnNotify", std, extra) - - -# Some code that actually uses these objects. -def demo(modal=0): - TestDialog(modal) - - # property sheet/page demo - ps = win32ui.CreatePropertySheet("Property Sheet/Page Demo") - # Create a completely standard PropertyPage. - page1 = win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO1) - # Create our custom property page. - page2 = TestPage(win32ui.IDD_PROPDEMO2) - ps.AddPage(page1) - ps.AddPage(page2) - if modal: - ps.DoModal() - else: - style = ( - win32con.WS_SYSMENU - | win32con.WS_POPUP - | win32con.WS_CAPTION - | win32con.DS_MODALFRAME - | win32con.WS_VISIBLE - ) - styleex = win32con.WS_EX_DLGMODALFRAME | win32con.WS_EX_PALETTEWINDOW - ps.CreateWindow(win32ui.GetMainFrame(), style, styleex) - - -def test(modal=1): - # dlg=dialog.Dialog(1010) - # dlg.CreateWindow() - # dlg.EndDialog(0) - # del dlg - # return - # property sheet/page demo - ps = TestSheet("Property Sheet/Page Demo") - page1 = win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO1) - page2 = win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO2) - ps.AddPage(page1) - ps.AddPage(page2) - del page1 - del page2 - if modal: - ps.DoModal() - else: - ps.CreateWindow(win32ui.GetMainFrame()) - return ps - - -def d(): - dlg = win32ui.CreateDialog(win32ui.IDD_DEBUGGER) - dlg.datalist.append((win32ui.IDC_DBG_RADIOSTACK, "radio")) - print("data list is ", dlg.datalist) - dlg.data["radio"] = 1 - dlg.DoModal() - print(dlg.data["radio"]) - - -if __name__ == "__main__": - demo(1) diff --git a/lib/pythonwin/pywin/Demos/dyndlg.py b/lib/pythonwin/pywin/Demos/dyndlg.py deleted file mode 100644 index 46a4e7fc..00000000 --- a/lib/pythonwin/pywin/Demos/dyndlg.py +++ /dev/null @@ -1,104 +0,0 @@ -# dyndlg.py -# contributed by Curt Hagenlocher - -# Dialog Template params: -# Parameter 0 - Window caption -# Parameter 1 - Bounds (rect tuple) -# Parameter 2 - Window style -# Parameter 3 - Extended style -# Parameter 4 - Font tuple -# Parameter 5 - Menu name -# Parameter 6 - Window class -# Dialog item params: -# Parameter 0 - Window class -# Parameter 1 - Text -# Parameter 2 - ID -# Parameter 3 - Bounds -# Parameter 4 - Style -# Parameter 5 - Extended style -# Parameter 6 - Extra data - - -import win32con -import win32ui -from pywin.mfc import dialog, window - - -def MakeDlgTemplate(): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - dlg = [ - ["Select Warehouse", (0, 0, 177, 93), style, None, (8, "MS Sans Serif")], - ] - dlg.append([130, "Current Warehouse:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT]) - dlg.append([130, "ASTORIA", 128, (16, 17, 99, 7), cs | win32con.SS_LEFT]) - dlg.append([130, "New &Warehouse:", -1, (7, 29, 69, 9), cs | win32con.SS_LEFT]) - s = win32con.WS_TABSTOP | cs - # dlg.append([131, None, 130, (5, 40, 110, 48), - # s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER]) - dlg.append( - [ - "{8E27C92B-1264-101C-8A2F-040224009C02}", - None, - 131, - (5, 40, 110, 48), - win32con.WS_TABSTOP, - ] - ) - - dlg.append( - [128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON] - ) - s = win32con.BS_PUSHBUTTON | s - dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s]) - dlg.append([128, "&Help", 100, (124, 74, 50, 14), s]) - - return dlg - - -def test1(): - win32ui.CreateDialogIndirect(MakeDlgTemplate()).DoModal() - - -def test2(): - dialog.Dialog(MakeDlgTemplate()).DoModal() - - -def test3(): - dlg = win32ui.LoadDialogResource(win32ui.IDD_SET_TABSTOPS) - dlg[0][0] = "New Dialog Title" - dlg[0][1] = (80, 20, 161, 60) - dlg[1][1] = "&Confusion:" - cs = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | win32con.WS_TABSTOP - | win32con.BS_PUSHBUTTON - ) - dlg.append([128, "&Help", 100, (111, 41, 40, 14), cs]) - dialog.Dialog(dlg).DoModal() - - -def test4(): - page1 = dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO1)) - page2 = dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO2)) - ps = dialog.PropertySheet("Property Sheet/Page Demo", None, [page1, page2]) - ps.DoModal() - - -def testall(): - test1() - test2() - test3() - test4() - - -if __name__ == "__main__": - testall() diff --git a/lib/pythonwin/pywin/Demos/fontdemo.py b/lib/pythonwin/pywin/Demos/fontdemo.py deleted file mode 100644 index fc35f4ca..00000000 --- a/lib/pythonwin/pywin/Demos/fontdemo.py +++ /dev/null @@ -1,86 +0,0 @@ -# Demo of Generic document windows, DC, and Font usage -# by Dave Brennan (brennan@hal.com) - -# usage examples: - -# >>> from fontdemo import * -# >>> d = FontDemo('Hello, Python') -# >>> f1 = { 'name':'Arial', 'height':36, 'weight':win32con.FW_BOLD} -# >>> d.SetFont(f1) -# >>> f2 = {'name':'Courier New', 'height':24, 'italic':1} -# >>> d.SetFont (f2) - -import win32api -import win32con -import win32ui -from pywin.mfc import docview - -# font is a dictionary in which the following elements matter: -# (the best matching font to supplied parameters is returned) -# name string name of the font as known by Windows -# size point size of font in logical units -# weight weight of font (win32con.FW_NORMAL, win32con.FW_BOLD) -# italic boolean; true if set to anything but None -# underline boolean; true if set to anything but None - - -class FontView(docview.ScrollView): - def __init__( - self, doc, text="Python Rules!", font_spec={"name": "Arial", "height": 42} - ): - docview.ScrollView.__init__(self, doc) - self.font = win32ui.CreateFont(font_spec) - self.text = text - self.width = self.height = 0 - # set up message handlers - self.HookMessage(self.OnSize, win32con.WM_SIZE) - - def OnAttachedObjectDeath(self): - docview.ScrollView.OnAttachedObjectDeath(self) - del self.font - - def SetFont(self, new_font): - # Change font on the fly - self.font = win32ui.CreateFont(new_font) - # redraw the entire client window - selfInvalidateRect(None) - - def OnSize(self, params): - lParam = params[3] - self.width = win32api.LOWORD(lParam) - self.height = win32api.HIWORD(lParam) - - def OnPrepareDC(self, dc, printinfo): - # Set up the DC for forthcoming OnDraw call - self.SetScrollSizes(win32con.MM_TEXT, (100, 100)) - dc.SetTextColor(win32api.RGB(0, 0, 255)) - dc.SetBkColor(win32api.GetSysColor(win32con.COLOR_WINDOW)) - dc.SelectObject(self.font) - dc.SetTextAlign(win32con.TA_CENTER | win32con.TA_BASELINE) - - def OnDraw(self, dc): - if self.width == 0 and self.height == 0: - left, top, right, bottom = self.GetClientRect() - self.width = right - left - self.height = bottom - top - x, y = self.width // 2, self.height // 2 - dc.TextOut(x, y, self.text) - - -def FontDemo(): - # create doc/view - template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, None, None, FontView) - doc = template.OpenDocumentFile(None) - doc.SetTitle("Font Demo") - # print "template is ", template, "obj is", template._obj_ - template.close() - - -# print "closed" -# del template - -if __name__ == "__main__": - import demoutils - - if demoutils.NeedGoodGUI(): - FontDemo() diff --git a/lib/pythonwin/pywin/Demos/guidemo.py b/lib/pythonwin/pywin/Demos/guidemo.py deleted file mode 100644 index b4d88159..00000000 --- a/lib/pythonwin/pywin/Demos/guidemo.py +++ /dev/null @@ -1,86 +0,0 @@ -# GUI Demo - just a worker script to invoke all the other demo/test scripts. -import sys - -import __main__ -import regutil -import win32api -import win32ui - -demos = [ # ('Font', 'import fontdemo;fontdemo.FontDemo()'), - ("Open GL Demo", "import openGLDemo;openGLDemo.test()"), - ("Threaded GUI", "import threadedgui;threadedgui.ThreadedDemo()"), - ("Tree View Demo", "import hiertest;hiertest.demoboth()"), - ("3-Way Splitter Window", "import splittst;splittst.demo()"), - ("Custom Toolbars and Tooltips", "import toolbar;toolbar.test()"), - ("Progress Bar", "import progressbar;progressbar.demo()"), - ("Slider Control", "import sliderdemo;sliderdemo.demo()"), - ("Dynamic window creation", "import createwin;createwin.demo()"), - ("Various Dialog demos", "import dlgtest;dlgtest.demo()"), - ("OCX Control Demo", "from ocx import ocxtest;ocxtest.demo()"), - ("OCX Serial Port Demo", "from ocx import ocxserialtest; ocxserialtest.test()"), - ( - "IE4 Control Demo", - 'from ocx import webbrowser; webbrowser.Demo("http://www.python.org")', - ), -] - - -def demo(): - try: - # seeif I can locate the demo files. - import fontdemo - except ImportError: - # else put the demos direectory on the path (if not already) - try: - instPath = regutil.GetRegistryDefaultValue( - regutil.BuildDefaultPythonKey() + "\\InstallPath" - ) - except win32api.error: - print( - "The InstallPath can not be located, and the Demos directory is not on the path" - ) - instPath = "." - - demosDir = win32ui.FullPath(instPath + "\\Demos") - for path in sys.path: - if win32ui.FullPath(path) == demosDir: - break - else: - sys.path.append(demosDir) - import fontdemo - - import sys - - if "/go" in sys.argv: - for name, cmd in demos: - try: - exec(cmd) - except: - print( - "Demo of %s failed - %s:%s" - % (cmd, sys.exc_info()[0], sys.exc_info()[1]) - ) - return - # Otherwise allow the user to select the demo to run - - import pywin.dialogs.list - - while 1: - rc = pywin.dialogs.list.SelectFromLists("Select a Demo", demos, ["Demo Title"]) - if rc is None: - break - title, cmd = demos[rc] - try: - exec(cmd) - except: - print( - "Demo of %s failed - %s:%s" - % (title, sys.exc_info()[0], sys.exc_info()[1]) - ) - - -if __name__ == __main__.__name__: - import demoutils - - if demoutils.NeedGoodGUI(): - demo() diff --git a/lib/pythonwin/pywin/Demos/hiertest.py b/lib/pythonwin/pywin/Demos/hiertest.py deleted file mode 100644 index 287b71da..00000000 --- a/lib/pythonwin/pywin/Demos/hiertest.py +++ /dev/null @@ -1,138 +0,0 @@ -import os - -import commctrl -import win32ui -from pywin.mfc import docview, window -from pywin.tools import hierlist - - -# directory listbox -# This has obvious limitations - doesnt track subdirs, etc. Demonstrates -# simple use of Python code for querying the tree as needed. -# Only use strings, and lists of strings (from curdir()) -class DirHierList(hierlist.HierList): - def __init__(self, root, listBoxID=win32ui.IDC_LIST1): - hierlist.HierList.__init__(self, root, win32ui.IDB_HIERFOLDERS, listBoxID) - - def GetText(self, item): - return os.path.basename(item) - - def GetSubList(self, item): - if os.path.isdir(item): - ret = [os.path.join(item, fname) for fname in os.listdir(item)] - else: - ret = None - return ret - - # if the item is a dir, it is expandable. - def IsExpandable(self, item): - return os.path.isdir(item) - - def GetSelectedBitmapColumn(self, item): - return self.GetBitmapColumn(item) + 6 # Use different color for selection - - -class TestDocument(docview.Document): - def __init__(self, template): - docview.Document.__init__(self, template) - self.hierlist = hierlist.HierListWithItems( - HLIFileDir("\\"), win32ui.IDB_HIERFOLDERS, win32ui.AFX_IDW_PANE_FIRST - ) - - -class HierListView(docview.TreeView): - def OnInitialUpdate(self): - rc = self._obj_.OnInitialUpdate() - self.hierList = self.GetDocument().hierlist - self.hierList.HierInit(self.GetParent()) - self.hierList.SetStyle( - commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS - ) - return rc - - -class HierListFrame(window.MDIChildWnd): - pass - - -def GetTestRoot(): - tree1 = ("Tree 1", [("Item 1", "Item 1 data"), "Item 2", 3]) - tree2 = ("Tree 2", [("Item 2.1", "Item 2 data"), "Item 2.2", 2.3]) - return ("Root", [tree1, tree2, "Item 3"]) - - -def demoboth(): - template = docview.DocTemplate( - win32ui.IDR_PYTHONTYPE, TestDocument, HierListFrame, HierListView - ) - template.OpenDocumentFile(None).SetTitle("Hierlist demo") - - demomodeless() - - -def demomodeless(): - testList2 = DirHierList("\\") - dlg = hierlist.HierDialog("hier list test", testList2) - dlg.CreateWindow() - - -def demodlg(): - testList2 = DirHierList("\\") - dlg = hierlist.HierDialog("hier list test", testList2) - dlg.DoModal() - - -def demo(): - template = docview.DocTemplate( - win32ui.IDR_PYTHONTYPE, TestDocument, HierListFrame, HierListView - ) - template.OpenDocumentFile(None).SetTitle("Hierlist demo") - - -# -# Demo/Test for HierList items. -# -# Easy to make a better directory program. -# -class HLIFileDir(hierlist.HierListItem): - def __init__(self, filename): - self.filename = filename - hierlist.HierListItem.__init__(self) - - def GetText(self): - try: - return "%-20s %d bytes" % ( - os.path.basename(self.filename), - os.stat(self.filename)[6], - ) - except os.error as details: - return "%-20s - %s" % (self.filename, details[1]) - - def IsExpandable(self): - return os.path.isdir(self.filename) - - def GetSubList(self): - ret = [] - for newname in os.listdir(self.filename): - if newname not in (".", ".."): - ret.append(HLIFileDir(os.path.join(self.filename, newname))) - return ret - - -def demohli(): - template = docview.DocTemplate( - win32ui.IDR_PYTHONTYPE, - TestDocument, - hierlist.HierListFrame, - hierlist.HierListView, - ) - template.OpenDocumentFile(None).SetTitle("Hierlist demo") - - -if __name__ == "__main__": - import demoutils - - if demoutils.HaveGoodGUI(): - demoboth() - else: - demodlg() diff --git a/lib/pythonwin/pywin/Demos/menutest.py b/lib/pythonwin/pywin/Demos/menutest.py deleted file mode 100644 index d2ba65b6..00000000 --- a/lib/pythonwin/pywin/Demos/menutest.py +++ /dev/null @@ -1,13 +0,0 @@ -# Run this as a python script, to gray "close" off the edit window system menu. -import win32con -from pywin.framework import interact - -if __name__ == "__main__": - import demoutils - - if demoutils.NeedGoodGUI(): - win = interact.edit.currentView.GetParent() - menu = win.GetSystemMenu() - id = menu.GetMenuItemID(6) - menu.EnableMenuItem(id, win32con.MF_BYCOMMAND | win32con.MF_GRAYED) - print("The interactive window's 'Close' menu item is now disabled.") diff --git a/lib/pythonwin/pywin/Demos/objdoc.py b/lib/pythonwin/pywin/Demos/objdoc.py deleted file mode 100644 index fff79253..00000000 --- a/lib/pythonwin/pywin/Demos/objdoc.py +++ /dev/null @@ -1,57 +0,0 @@ -# This is a sample file, and shows the basic framework for using an "Object" based -# document, rather than a "filename" based document. -# This is referenced by the Pythonwin .html documentation. - -# In the example below, the OpenObject() method is used instead of OpenDocumentFile, -# and all the core MFC document open functionality is retained. - -import win32ui -from pywin.mfc import docview - - -class object_template(docview.DocTemplate): - def __init__(self): - docview.DocTemplate.__init__(self, None, None, None, object_view) - - def OpenObject(self, object): # Use this instead of OpenDocumentFile. - # Look for existing open document - for doc in self.GetDocumentList(): - print("document is ", doc) - if doc.object is object: - doc.GetFirstView().ActivateFrame() - return doc - # not found - new one. - doc = object_document(self, object) - frame = self.CreateNewFrame(doc) - doc.OnNewDocument() - doc.SetTitle(str(object)) - self.InitialUpdateFrame(frame, doc) - return doc - - -class object_document(docview.Document): - def __init__(self, template, object): - docview.Document.__init__(self, template) - self.object = object - - def OnOpenDocument(self, name): - raise RuntimeError("Should not be called if template strings set up correctly") - return 0 - - -class object_view(docview.EditView): - def OnInitialUpdate(self): - self.ReplaceSel("Object is %s" % repr(self.GetDocument().object)) - - -def demo(): - t = object_template() - d = t.OpenObject(win32ui) - return (t, d) - - -if __name__ == "__main__": - import demoutils - - if demoutils.NeedGoodGUI(): - demo() diff --git a/lib/pythonwin/pywin/Demos/ocx/__init__.py b/lib/pythonwin/pywin/Demos/ocx/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pythonwin/pywin/Demos/ocx/demoutils.py b/lib/pythonwin/pywin/Demos/ocx/demoutils.py deleted file mode 100644 index 27fa091c..00000000 --- a/lib/pythonwin/pywin/Demos/ocx/demoutils.py +++ /dev/null @@ -1,67 +0,0 @@ -# Utilities for the demos - -import sys - -import win32api -import win32con -import win32ui - -NotScriptMsg = """\ -This demo program is not designed to be run as a Script, but is -probably used by some other test program. Please try another demo. -""" - -NeedGUIMsg = """\ -This demo program can only be run from inside of Pythonwin - -You must start Pythonwin, and select 'Run' from the toolbar or File menu -""" - - -NeedAppMsg = """\ -This demo program is a 'Pythonwin Application'. - -It is more demo code than an example of Pythonwin's capabilities. - -To run it, you must execute the command: -pythonwin.exe /app "%s" - -Would you like to execute it now? -""" - - -def NotAScript(): - import win32ui - - win32ui.MessageBox(NotScriptMsg, "Demos") - - -def NeedGoodGUI(): - from pywin.framework.app import HaveGoodGUI - - rc = HaveGoodGUI() - if not rc: - win32ui.MessageBox(NeedGUIMsg, "Demos") - return rc - - -def NeedApp(): - import win32ui - - rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO) - if rc == win32con.IDYES: - try: - parent = win32ui.GetMainFrame().GetSafeHwnd() - win32api.ShellExecute( - parent, None, "pythonwin.exe", '/app "%s"' % sys.argv[0], None, 1 - ) - except win32api.error as details: - win32ui.MessageBox("Error executing command - %s" % (details), "Demos") - - -from pywin.framework.app import HaveGoodGUI - -if __name__ == "__main__": - from . import demoutils - - demoutils.NotAScript() diff --git a/lib/pythonwin/pywin/Demos/ocx/flash.py b/lib/pythonwin/pywin/Demos/ocx/flash.py deleted file mode 100644 index 239240ee..00000000 --- a/lib/pythonwin/pywin/Demos/ocx/flash.py +++ /dev/null @@ -1,95 +0,0 @@ -# By Bradley Schatz -# simple flash/python application demonstrating bidirectional -# communicaion between flash and python. Click the sphere to see -# behavior. Uses Bounce.swf from FlashBounce.zip, available from -# http://pages.cpsc.ucalgary.ca/~saul/vb_examples/tutorial12/ - -# Update to the path of the .swf file (note it could be a true URL) -flash_url = "c:\\bounce.swf" - -import sys - -import regutil -import win32api -import win32con -import win32ui -from pywin.mfc import activex, window -from win32com.client import gencache - -FlashModule = gencache.EnsureModule("{D27CDB6B-AE6D-11CF-96B8-444553540000}", 0, 1, 0) - -if FlashModule is None: - raise ImportError("Flash does not appear to be installed.") - - -class MyFlashComponent(activex.Control, FlashModule.ShockwaveFlash): - def __init__(self): - activex.Control.__init__(self) - FlashModule.ShockwaveFlash.__init__(self) - self.x = 50 - self.y = 50 - self.angle = 30 - self.started = 0 - - def OnFSCommand(self, command, args): - print("FSCommend", command, args) - self.x = self.x + 20 - self.y = self.y + 20 - self.angle = self.angle + 20 - if self.x > 200 or self.y > 200: - self.x = 0 - self.y = 0 - if self.angle > 360: - self.angle = 0 - self.SetVariable("xVal", self.x) - self.SetVariable("yVal", self.y) - self.SetVariable("angle", self.angle) - self.TPlay("_root.mikeBall") - - def OnProgress(self, percentDone): - print("PercentDone", percentDone) - - def OnReadyStateChange(self, newState): - # 0=Loading, 1=Uninitialized, 2=Loaded, 3=Interactive, 4=Complete - print("State", newState) - - -class BrowserFrame(window.MDIChildWnd): - def __init__(self, url=None): - if url is None: - self.url = regutil.GetRegisteredHelpFile("Main Python Documentation") - else: - self.url = url - pass # Dont call base class doc/view version... - - def Create(self, title, rect=None, parent=None): - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW - self._obj_ = win32ui.CreateMDIChild() - self._obj_.AttachObject(self) - self._obj_.CreateWindow(None, title, style, rect, parent) - rect = self.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - self.ocx = MyFlashComponent() - self.ocx.CreateControl( - "Flash Player", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000 - ) - self.ocx.LoadMovie(0, flash_url) - self.ocx.Play() - self.HookMessage(self.OnSize, win32con.WM_SIZE) - - def OnSize(self, params): - rect = self.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - self.ocx.SetWindowPos(0, rect, 0) - - -def Demo(): - url = None - if len(sys.argv) > 1: - url = win32api.GetFullPathName(sys.argv[1]) - f = BrowserFrame(url) - f.Create("Flash Player") - - -if __name__ == "__main__": - Demo() diff --git a/lib/pythonwin/pywin/Demos/ocx/msoffice.py b/lib/pythonwin/pywin/Demos/ocx/msoffice.py deleted file mode 100644 index be24ead8..00000000 --- a/lib/pythonwin/pywin/Demos/ocx/msoffice.py +++ /dev/null @@ -1,152 +0,0 @@ -# This demo uses some of the Microsoft Office components. -# -# It was taken from an MSDN article showing how to embed excel. -# It is not comlpete yet, but it _does_ show an Excel spreadsheet in a frame! -# - -import regutil -import win32con -import win32ui -import win32uiole -from pywin.mfc import activex, docview, object, window -from win32com.client import gencache - -# WordModule = gencache.EnsureModule('{00020905-0000-0000-C000-000000000046}', 1033, 8, 0) -# if WordModule is None: -# raise ImportError, "Microsoft Word version 8 does not appear to be installed." - - -class OleClientItem(object.CmdTarget): - def __init__(self, doc): - object.CmdTarget.__init__(self, win32uiole.CreateOleClientItem(doc)) - - def OnGetItemPosition(self): - # For now return a hard-coded rect. - return (10, 10, 210, 210) - - def OnActivate(self): - # Allow only one inplace activate item per frame - view = self.GetActiveView() - item = self.GetDocument().GetInPlaceActiveItem(view) - if item is not None and item._obj_ != self._obj_: - item.Close() - self._obj_.OnActivate() - - def OnChange(self, oleNotification, dwParam): - self._obj_.OnChange(oleNotification, dwParam) - self.GetDocument().UpdateAllViews(None) - - def OnChangeItemPosition(self, rect): - # During in-place activation CEmbed_ExcelCntrItem::OnChangeItemPosition - # is called by the server to change the position of the in-place - # window. Usually, this is a result of the data in the server - # document changing such that the extent has changed or as a result - # of in-place resizing. - # - # The default here is to call the base class, which will call - # COleClientItem::SetItemRects to move the item - # to the new position. - if not self._obj_.OnChangeItemPosition(self, rect): - return 0 - - # TODO: update any cache you may have of the item's rectangle/extent - return 1 - - -class OleDocument(object.CmdTarget): - def __init__(self, template): - object.CmdTarget.__init__(self, win32uiole.CreateOleDocument(template)) - self.EnableCompoundFile() - - -class ExcelView(docview.ScrollView): - def OnInitialUpdate(self): - self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) - self.HookMessage(self.OnSize, win32con.WM_SIZE) - - self.SetScrollSizes(win32con.MM_TEXT, (100, 100)) - rc = self._obj_.OnInitialUpdate() - self.EmbedExcel() - return rc - - def EmbedExcel(self): - doc = self.GetDocument() - self.clientItem = OleClientItem(doc) - self.clientItem.CreateNewItem("Excel.Sheet") - self.clientItem.DoVerb(-1, self) - doc.UpdateAllViews(None) - - def OnDraw(self, dc): - doc = self.GetDocument() - pos = doc.GetStartPosition() - clientItem, pos = doc.GetNextItem(pos) - clientItem.Draw(dc, (10, 10, 210, 210)) - - # Special handling of OnSetFocus and OnSize are required for a container - # when an object is being edited in-place. - def OnSetFocus(self, msg): - item = self.GetDocument().GetInPlaceActiveItem(self) - if ( - item is not None - and item.GetItemState() == win32uiole.COleClientItem_activeUIState - ): - wnd = item.GetInPlaceWindow() - if wnd is not None: - wnd.SetFocus() - return 0 # Dont get the base version called. - return 1 # Call the base version. - - def OnSize(self, params): - item = self.GetDocument().GetInPlaceActiveItem(self) - if item is not None: - item.SetItemRects() - return 1 # do call the base! - - -class OleTemplate(docview.DocTemplate): - def __init__( - self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None - ): - if MakeDocument is None: - MakeDocument = OleDocument - if MakeView is None: - MakeView = ExcelView - docview.DocTemplate.__init__( - self, resourceId, MakeDocument, MakeFrame, MakeView - ) - - -class WordFrame(window.MDIChildWnd): - def __init__(self, doc=None): - self._obj_ = win32ui.CreateMDIChild() - self._obj_.AttachObject(self) - # Dont call base class doc/view version... - - def Create(self, title, rect=None, parent=None): - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW - self._obj_.CreateWindow(None, title, style, rect, parent) - - rect = self.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - self.ocx = MyWordControl() - self.ocx.CreateControl( - "Microsoft Word", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 20000 - ) - - -def Demo(): - import sys - - import win32api - - docName = None - if len(sys.argv) > 1: - docName = win32api.GetFullPathName(sys.argv[1]) - OleTemplate().OpenDocumentFile(None) - - -# f = WordFrame(docName) -# f.Create("Microsoft Office") - -if __name__ == "__main__": - Demo() diff --git a/lib/pythonwin/pywin/Demos/ocx/ocxserialtest.py b/lib/pythonwin/pywin/Demos/ocx/ocxserialtest.py deleted file mode 100644 index 326d312c..00000000 --- a/lib/pythonwin/pywin/Demos/ocx/ocxserialtest.py +++ /dev/null @@ -1,133 +0,0 @@ -# ocxserialtest.py -# -# Sample that uses the mscomm OCX to talk to a serial -# device. - -# Very simple - queries a modem for ATI responses - -import pythoncom -import win32con -import win32ui -import win32uiole -from pywin.mfc import activex, dialog -from win32com.client import gencache - -SERIAL_SETTINGS = "19200,n,8,1" -SERIAL_PORT = 2 - -win32ui.DoWaitCursor(1) -serialModule = gencache.EnsureModule("{648A5603-2C6E-101B-82B6-000000000014}", 0, 1, 1) -win32ui.DoWaitCursor(0) -if serialModule is None: - raise ImportError("MS COMM Control does not appear to be installed on the PC") - - -def MakeDlgTemplate(): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - dlg = [ - ["Very Basic Terminal", (0, 0, 350, 180), style, None, (8, "MS Sans Serif")], - ] - s = win32con.WS_TABSTOP | cs - dlg.append( - [ - "RICHEDIT", - None, - 132, - (5, 5, 340, 170), - s - | win32con.ES_WANTRETURN - | win32con.ES_MULTILINE - | win32con.ES_AUTOVSCROLL - | win32con.WS_VSCROLL, - ] - ) - return dlg - - -#################################### -# -# Serial Control -# -class MySerialControl(activex.Control, serialModule.MSComm): - def __init__(self, parent): - activex.Control.__init__(self) - serialModule.MSComm.__init__(self) - self.parent = parent - - def OnComm(self): - self.parent.OnComm() - - -class TestSerDialog(dialog.Dialog): - def __init__(self, *args): - dialog.Dialog.__init__(*(self,) + args) - self.olectl = None - - def OnComm(self): - event = self.olectl.CommEvent - if event == serialModule.OnCommConstants.comEvReceive: - self.editwindow.ReplaceSel(self.olectl.Input) - - def OnKey(self, key): - if self.olectl: - self.olectl.Output = chr(key) - - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - self.editwindow = self.GetDlgItem(132) - self.editwindow.HookAllKeyStrokes(self.OnKey) - - self.olectl = MySerialControl(self) - try: - self.olectl.CreateControl( - "OCX", - win32con.WS_TABSTOP | win32con.WS_VISIBLE, - (7, 43, 500, 300), - self._obj_, - 131, - ) - except win32ui.error: - self.MessageBox("The Serial Control could not be created") - self.olectl = None - self.EndDialog(win32con.IDCANCEL) - if self.olectl: - self.olectl.Settings = SERIAL_SETTINGS - self.olectl.CommPort = SERIAL_PORT - self.olectl.RThreshold = 1 - try: - self.olectl.PortOpen = 1 - except pythoncom.com_error as details: - print( - "Could not open the specified serial port - %s" - % (details.excepinfo[2]) - ) - self.EndDialog(win32con.IDCANCEL) - return rc - - def OnDestroy(self, msg): - if self.olectl: - try: - self.olectl.PortOpen = 0 - except pythoncom.com_error as details: - print("Error closing port - %s" % (details.excepinfo[2])) - return dialog.Dialog.OnDestroy(self, msg) - - -def test(): - d = TestSerDialog(MakeDlgTemplate()) - d.DoModal() - - -if __name__ == "__main__": - from . import demoutils - - if demoutils.NeedGoodGUI(): - test() diff --git a/lib/pythonwin/pywin/Demos/ocx/ocxtest.py b/lib/pythonwin/pywin/Demos/ocx/ocxtest.py deleted file mode 100644 index 4a3d7336..00000000 --- a/lib/pythonwin/pywin/Demos/ocx/ocxtest.py +++ /dev/null @@ -1,250 +0,0 @@ -# OCX Tester for Pythonwin -# -# This file _is_ ready to run. All that is required is that the OCXs being tested -# are installed on your machine. -# -# The .py files behind the OCXs will be automatically generated and imported. - -import glob -import os - -import win32api -import win32con -import win32ui -import win32uiole -from pywin.mfc import activex, dialog, window -from win32com.client import gencache - - -def MakeDlgTemplate(): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - dlg = [ - ["OCX Demos", (0, 0, 350, 350), style, None, (8, "MS Sans Serif")], - ] - s = win32con.WS_TABSTOP | cs - # dlg.append([131, None, 130, (5, 40, 110, 48), - # s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER]) - # dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP]) - - dlg.append( - [128, "About", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON] - ) - s = win32con.BS_PUSHBUTTON | s - dlg.append([128, "Close", win32con.IDCANCEL, (124, 22, 50, 14), s]) - - return dlg - - -#################################### -# -# Calendar test code -# - - -def GetTestCalendarClass(): - global calendarParentModule - win32ui.DoWaitCursor(1) - calendarParentModule = gencache.EnsureModule( - "{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0 - ) - win32ui.DoWaitCursor(0) - if calendarParentModule is None: - return None - - class TestCalDialog(dialog.Dialog): - def OnInitDialog(self): - class MyCal(activex.Control, calendarParentModule.Calendar): - def OnAfterUpdate(self): - print("OnAfterUpdate") - - def OnClick(self): - print("OnClick") - - def OnDblClick(self): - print("OnDblClick") - - def OnKeyDown(self, KeyCode, Shift): - print("OnKeyDown", KeyCode, Shift) - - def OnKeyPress(self, KeyAscii): - print("OnKeyPress", KeyAscii) - - def OnKeyUp(self, KeyCode, Shift): - print("OnKeyUp", KeyCode, Shift) - - def OnBeforeUpdate(self, Cancel): - print("OnBeforeUpdate", Cancel) - - def OnNewMonth(self): - print("OnNewMonth") - - def OnNewYear(self): - print("OnNewYear") - - rc = dialog.Dialog.OnInitDialog(self) - self.olectl = MyCal() - try: - self.olectl.CreateControl( - "OCX", - win32con.WS_TABSTOP | win32con.WS_VISIBLE, - (7, 43, 500, 300), - self._obj_, - 131, - ) - except win32ui.error: - self.MessageBox("The Calendar Control could not be created") - self.olectl = None - self.EndDialog(win32con.IDCANCEL) - - return rc - - def OnOK(self): - self.olectl.AboutBox() - - return TestCalDialog - - -#################################### -# -# Video Control -# -def GetTestVideoModule(): - global videoControlModule, videoControlFileName - win32ui.DoWaitCursor(1) - videoControlModule = gencache.EnsureModule( - "{05589FA0-C356-11CE-BF01-00AA0055595A}", 0, 2, 0 - ) - win32ui.DoWaitCursor(0) - if videoControlModule is None: - return None - fnames = glob.glob(os.path.join(win32api.GetWindowsDirectory(), "*.avi")) - if not fnames: - print("No AVI files available in system directory") - return None - videoControlFileName = fnames[0] - return videoControlModule - - -def GetTestVideoDialogClass(): - if GetTestVideoModule() is None: - return None - - class TestVideoDialog(dialog.Dialog): - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - try: - self.olectl = activex.MakeControlInstance( - videoControlModule.ActiveMovie - ) - self.olectl.CreateControl( - "", - win32con.WS_TABSTOP | win32con.WS_VISIBLE, - (7, 43, 500, 300), - self._obj_, - 131, - ) - except win32ui.error: - self.MessageBox("The Video Control could not be created") - self.olectl = None - self.EndDialog(win32con.IDCANCEL) - return - - self.olectl.FileName = videoControlFileName - # self.olectl.Run() - return rc - - def OnOK(self): - self.olectl.AboutBox() - - return TestVideoDialog - - -############### -# -# An OCX in an MDI Frame -# -class OCXFrame(window.MDIChildWnd): - def __init__(self): - pass # Dont call base class doc/view version... - - def Create(self, controlClass, title, rect=None, parent=None): - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW - self._obj_ = win32ui.CreateMDIChild() - self._obj_.AttachObject(self) - self._obj_.CreateWindow(None, title, style, rect, parent) - - rect = self.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - self.ocx = controlClass() - self.ocx.CreateControl( - "", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000 - ) - - -def MDITest(): - calendarParentModule = gencache.EnsureModule( - "{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0 - ) - - class MyCal(activex.Control, calendarParentModule.Calendar): - def OnAfterUpdate(self): - print("OnAfterUpdate") - - def OnClick(self): - print("OnClick") - - f = OCXFrame() - f.Create(MyCal, "Calendar Test") - - -def test1(): - klass = GetTestCalendarClass() - if klass is None: - print( - "Can not test the MSAccess Calendar control - it does not appear to be installed" - ) - return - - d = klass(MakeDlgTemplate()) - d.DoModal() - - -def test2(): - klass = GetTestVideoDialogClass() - if klass is None: - print("Can not test the Video OCX - it does not appear to be installed,") - print("or no AVI files can be found.") - return - d = klass(MakeDlgTemplate()) - d.DoModal() - d = None - - -def test3(): - d = TestCOMMDialog(MakeDlgTemplate()) - d.DoModal() - d = None - - -def testall(): - test1() - test2() - - -def demo(): - testall() - - -if __name__ == "__main__": - from . import demoutils - - if demoutils.NeedGoodGUI(): - testall() diff --git a/lib/pythonwin/pywin/Demos/ocx/webbrowser.py b/lib/pythonwin/pywin/Demos/ocx/webbrowser.py deleted file mode 100644 index cc17445e..00000000 --- a/lib/pythonwin/pywin/Demos/ocx/webbrowser.py +++ /dev/null @@ -1,72 +0,0 @@ -# This demo uses the IE4 Web Browser control. - -# It catches an "OnNavigate" event, and updates the frame title. -# (event stuff by Neil Hodgson) - -import sys - -import regutil -import win32api -import win32con -import win32ui -from pywin.mfc import activex, window -from win32com.client import gencache - -WebBrowserModule = gencache.EnsureModule( - "{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}", 0, 1, 1 -) -if WebBrowserModule is None: - raise ImportError("IE4 does not appear to be installed.") - - -class MyWebBrowser(activex.Control, WebBrowserModule.WebBrowser): - def OnBeforeNavigate2( - self, pDisp, URL, Flags, TargetFrameName, PostData, Headers, Cancel - ): - self.GetParent().OnNavigate(URL) - # print "BeforeNavigate2", pDisp, URL, Flags, TargetFrameName, PostData, Headers, Cancel - - -class BrowserFrame(window.MDIChildWnd): - def __init__(self, url=None): - if url is None: - self.url = regutil.GetRegisteredHelpFile("Main Python Documentation") - if self.url is None: - self.url = "http://www.python.org" - else: - self.url = url - pass # Dont call base class doc/view version... - - def Create(self, title, rect=None, parent=None): - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW - self._obj_ = win32ui.CreateMDIChild() - self._obj_.AttachObject(self) - self._obj_.CreateWindow(None, title, style, rect, parent) - rect = self.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - self.ocx = MyWebBrowser() - self.ocx.CreateControl( - "Web Browser", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000 - ) - self.ocx.Navigate(self.url) - self.HookMessage(self.OnSize, win32con.WM_SIZE) - - def OnSize(self, params): - rect = self.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - self.ocx.SetWindowPos(0, rect, 0) - - def OnNavigate(self, url): - title = "Web Browser - %s" % (url,) - self.SetWindowText(title) - - -def Demo(url=None): - if url is None and len(sys.argv) > 1: - url = win32api.GetFullPathName(sys.argv[1]) - f = BrowserFrame(url) - f.Create("Web Browser") - - -if __name__ == "__main__": - Demo() diff --git a/lib/pythonwin/pywin/Demos/openGLDemo.py b/lib/pythonwin/pywin/Demos/openGLDemo.py deleted file mode 100644 index 9274949d..00000000 --- a/lib/pythonwin/pywin/Demos/openGLDemo.py +++ /dev/null @@ -1,370 +0,0 @@ -# Ported from the win32 and MFC OpenGL Samples. - -import sys - -from pywin.mfc import docview - -try: - from OpenGL.GL import * # nopycln: import - from OpenGL.GLU import * # nopycln: import -except ImportError: - print("The OpenGL extensions do not appear to be installed.") - print("This Pythonwin demo can not run") - sys.exit(1) - -import timer -import win32api -import win32con -import win32ui - -PFD_TYPE_RGBA = 0 -PFD_TYPE_COLORINDEX = 1 -PFD_MAIN_PLANE = 0 -PFD_OVERLAY_PLANE = 1 -PFD_UNDERLAY_PLANE = -1 -PFD_DOUBLEBUFFER = 0x00000001 -PFD_STEREO = 0x00000002 -PFD_DRAW_TO_WINDOW = 0x00000004 -PFD_DRAW_TO_BITMAP = 0x00000008 -PFD_SUPPORT_GDI = 0x00000010 -PFD_SUPPORT_OPENGL = 0x00000020 -PFD_GENERIC_FORMAT = 0x00000040 -PFD_NEED_PALETTE = 0x00000080 -PFD_NEED_SYSTEM_PALETTE = 0x00000100 -PFD_SWAP_EXCHANGE = 0x00000200 -PFD_SWAP_COPY = 0x00000400 -PFD_SWAP_LAYER_BUFFERS = 0x00000800 -PFD_GENERIC_ACCELERATED = 0x00001000 -PFD_DEPTH_DONTCARE = 0x20000000 -PFD_DOUBLEBUFFER_DONTCARE = 0x40000000 -PFD_STEREO_DONTCARE = 0x80000000 - - -# threeto8 = [0, 0o111>>1, 0o222>>1, 0o333>>1, 0o444>>1, 0o555>>1, 0o666>>1, 0o377] -threeto8 = [0, 73 >> 1, 146 >> 1, 219 >> 1, 292 >> 1, 365 >> 1, 438 >> 1, 255] -twoto8 = [0, 0x55, 0xAA, 0xFF] -oneto8 = [0, 255] - - -def ComponentFromIndex(i, nbits, shift): - # val = (unsigned char) (i >> shift); - val = (i >> shift) & 0xF - if nbits == 1: - val = val & 0x1 - return oneto8[val] - elif nbits == 2: - val = val & 0x3 - return twoto8[val] - elif nbits == 3: - val = val & 0x7 - return threeto8[val] - else: - return 0 - - -OpenGLViewParent = docview.ScrollView - - -class OpenGLView(OpenGLViewParent): - def PreCreateWindow(self, cc): - self.HookMessage(self.OnSize, win32con.WM_SIZE) - # An OpenGL window must be created with the following flags and must not - # include CS_PARENTDC for the class style. Refer to SetPixelFormat - # documentation in the "Comments" section for further information. - style = cc[5] - style = style | win32con.WS_CLIPSIBLINGS | win32con.WS_CLIPCHILDREN - cc = cc[0], cc[1], cc[2], cc[3], cc[4], style, cc[6], cc[7], cc[8] - cc = self._obj_.PreCreateWindow(cc) - return cc - - def OnSize(self, params): - lParam = params[3] - cx = win32api.LOWORD(lParam) - cy = win32api.HIWORD(lParam) - glViewport(0, 0, cx, cy) - - if self.oldrect[2] > cx or self.oldrect[3] > cy: - self.RedrawWindow() - - self.OnSizeChange(cx, cy) - - self.oldrect = self.oldrect[0], self.oldrect[1], cx, cy - - def OnInitialUpdate(self): - self.SetScaleToFitSize( - (100, 100) - ) # or SetScrollSizes() - A Pythonwin requirement - return self._obj_.OnInitialUpdate() - - # return rc - - def OnCreate(self, cs): - self.oldrect = self.GetClientRect() - self._InitContexts() - self.Init() - - def OnDestroy(self, msg): - self.Term() - self._DestroyContexts() - return OpenGLViewParent.OnDestroy(self, msg) - - def OnDraw(self, dc): - self.DrawScene() - - def OnEraseBkgnd(self, dc): - return 1 - - # The OpenGL helpers - def _SetupPixelFormat(self): - dc = self.dc.GetSafeHdc() - pfd = CreatePIXELFORMATDESCRIPTOR() - pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER - pfd.iPixelType = PFD_TYPE_RGBA - pfd.cColorBits = 24 - pfd.cDepthBits = 32 - pfd.iLayerType = PFD_MAIN_PLANE - pixelformat = ChoosePixelFormat(dc, pfd) - SetPixelFormat(dc, pixelformat, pfd) - self._CreateRGBPalette() - - def _CreateRGBPalette(self): - dc = self.dc.GetSafeHdc() - n = GetPixelFormat(dc) - pfd = DescribePixelFormat(dc, n) - if pfd.dwFlags & PFD_NEED_PALETTE: - n = 1 << pfd.cColorBits - pal = [] - for i in range(n): - this = ( - ComponentFromIndex(i, pfd.cRedBits, pfd.cRedShift), - ComponentFromIndex(i, pfd.cGreenBits, pfd.cGreenShift), - ComponentFromIndex(i, pfd.cBlueBits, pfd.cBlueShift), - 0, - ) - pal.append(this) - hpal = win32ui.CreatePalette(pal) - self.dc.SelectPalette(hpal, 0) - self.dc.RealizePalette() - - def _InitContexts(self): - self.dc = self.GetDC() - self._SetupPixelFormat() - hrc = wglCreateContext(self.dc.GetSafeHdc()) - wglMakeCurrent(self.dc.GetSafeHdc(), hrc) - - def _DestroyContexts(self): - hrc = wglGetCurrentContext() - wglMakeCurrent(0, 0) - if hrc: - wglDeleteContext(hrc) - - # The methods to support OpenGL - def DrawScene(self): - assert 0, "You must override this method" - - def Init(self): - assert 0, "You must override this method" - - def OnSizeChange(self, cx, cy): - pass - - def Term(self): - pass - - -class TestView(OpenGLView): - def OnSizeChange(self, right, bottom): - glClearColor(0.0, 0.0, 0.0, 1.0) - glClearDepth(1.0) - glEnable(GL_DEPTH_TEST) - - glMatrixMode(GL_PROJECTION) - if bottom: - aspect = right / bottom - else: - aspect = 0 # When window created! - glLoadIdentity() - gluPerspective(45.0, aspect, 3.0, 7.0) - glMatrixMode(GL_MODELVIEW) - - near_plane = 3.0 - far_plane = 7.0 - maxObjectSize = 3.0 - self.radius = near_plane + maxObjectSize / 2.0 - - def Init(self): - pass - - def DrawScene(self): - glClearColor(0.0, 0.0, 0.0, 1.0) - glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) - - glPushMatrix() - glTranslatef(0.0, 0.0, -self.radius) - - self._DrawCone() - - self._DrawPyramid() - - glPopMatrix() - glFinish() - - SwapBuffers(wglGetCurrentDC()) - - def _DrawCone(self): - glColor3f(0.0, 1.0, 0.0) - - glPushMatrix() - glTranslatef(-1.0, 0.0, 0.0) - quadObj = gluNewQuadric() - gluQuadricDrawStyle(quadObj, GLU_FILL) - gluQuadricNormals(quadObj, GLU_SMOOTH) - gluCylinder(quadObj, 1.0, 0.0, 1.0, 20, 10) - # gluDeleteQuadric(quadObj); - glPopMatrix() - - def _DrawPyramid(self): - glPushMatrix() - glTranslatef(1.0, 0.0, 0.0) - glBegin(GL_TRIANGLE_FAN) - glColor3f(1.0, 0.0, 0.0) - glVertex3f(0.0, 1.0, 0.0) - glColor3f(0.0, 1.0, 0.0) - glVertex3f(-1.0, 0.0, 0.0) - glColor3f(0.0, 0.0, 1.0) - glVertex3f(0.0, 0.0, 1.0) - glColor3f(0.0, 1.0, 0.0) - glVertex3f(1.0, 0.0, 0.0) - glEnd() - glPopMatrix() - - -class CubeView(OpenGLView): - def OnSizeChange(self, right, bottom): - glClearColor(0.0, 0.0, 0.0, 1.0) - glClearDepth(1.0) - glEnable(GL_DEPTH_TEST) - - glMatrixMode(GL_PROJECTION) - if bottom: - aspect = right / bottom - else: - aspect = 0 # When window created! - glLoadIdentity() - gluPerspective(45.0, aspect, 3.0, 7.0) - glMatrixMode(GL_MODELVIEW) - - near_plane = 3.0 - far_plane = 7.0 - maxObjectSize = 3.0 - self.radius = near_plane + maxObjectSize / 2.0 - - def Init(self): - self.busy = 0 - self.wAngleY = 10.0 - self.wAngleX = 1.0 - self.wAngleZ = 5.0 - self.timerid = timer.set_timer(150, self.OnTimer) - - def OnTimer(self, id, timeVal): - self.DrawScene() - - def Term(self): - timer.kill_timer(self.timerid) - - def DrawScene(self): - if self.busy: - return - self.busy = 1 - - glClearColor(0.0, 0.0, 0.0, 1.0) - glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) - - glPushMatrix() - - glTranslatef(0.0, 0.0, -self.radius) - glRotatef(self.wAngleX, 1.0, 0.0, 0.0) - glRotatef(self.wAngleY, 0.0, 1.0, 0.0) - glRotatef(self.wAngleZ, 0.0, 0.0, 1.0) - - self.wAngleX = self.wAngleX + 1.0 - self.wAngleY = self.wAngleY + 10.0 - self.wAngleZ = self.wAngleZ + 5.0 - - glBegin(GL_QUAD_STRIP) - glColor3f(1.0, 0.0, 1.0) - glVertex3f(-0.5, 0.5, 0.5) - - glColor3f(1.0, 0.0, 0.0) - glVertex3f(-0.5, -0.5, 0.5) - - glColor3f(1.0, 1.0, 1.0) - glVertex3f(0.5, 0.5, 0.5) - - glColor3f(1.0, 1.0, 0.0) - glVertex3f(0.5, -0.5, 0.5) - - glColor3f(0.0, 1.0, 1.0) - glVertex3f(0.5, 0.5, -0.5) - - glColor3f(0.0, 1.0, 0.0) - glVertex3f(0.5, -0.5, -0.5) - - glColor3f(0.0, 0.0, 1.0) - glVertex3f(-0.5, 0.5, -0.5) - - glColor3f(0.0, 0.0, 0.0) - glVertex3f(-0.5, -0.5, -0.5) - - glColor3f(1.0, 0.0, 1.0) - glVertex3f(-0.5, 0.5, 0.5) - - glColor3f(1.0, 0.0, 0.0) - glVertex3f(-0.5, -0.5, 0.5) - - glEnd() - - glBegin(GL_QUADS) - glColor3f(1.0, 0.0, 1.0) - glVertex3f(-0.5, 0.5, 0.5) - - glColor3f(1.0, 1.0, 1.0) - glVertex3f(0.5, 0.5, 0.5) - - glColor3f(0.0, 1.0, 1.0) - glVertex3f(0.5, 0.5, -0.5) - - glColor3f(0.0, 0.0, 1.0) - glVertex3f(-0.5, 0.5, -0.5) - glEnd() - - glBegin(GL_QUADS) - glColor3f(1.0, 0.0, 0.0) - glVertex3f(-0.5, -0.5, 0.5) - - glColor3f(1.0, 1.0, 0.0) - glVertex3f(0.5, -0.5, 0.5) - - glColor3f(0.0, 1.0, 0.0) - glVertex3f(0.5, -0.5, -0.5) - - glColor3f(0.0, 0.0, 0.0) - glVertex3f(-0.5, -0.5, -0.5) - glEnd() - - glPopMatrix() - - glFinish() - SwapBuffers(wglGetCurrentDC()) - - self.busy = 0 - - -def test(): - template = docview.DocTemplate(None, None, None, CubeView) - # template = docview.DocTemplate(None, None, None, TestView ) - template.OpenDocumentFile(None) - - -if __name__ == "__main__": - test() diff --git a/lib/pythonwin/pywin/Demos/progressbar.py b/lib/pythonwin/pywin/Demos/progressbar.py deleted file mode 100644 index 81cd7e38..00000000 --- a/lib/pythonwin/pywin/Demos/progressbar.py +++ /dev/null @@ -1,105 +0,0 @@ -# -# Progress bar control example -# -# PyCProgressCtrl encapsulates the MFC CProgressCtrl class. To use it, -# you: -# -# - Create the control with win32ui.CreateProgressCtrl() -# - Create the control window with PyCProgressCtrl.CreateWindow() -# - Initialize the range if you want it to be other than (0, 100) using -# PyCProgressCtrl.SetRange() -# - Either: -# - Set the step size with PyCProgressCtrl.SetStep(), and -# - Increment using PyCProgressCtrl.StepIt() -# or: -# - Set the amount completed using PyCProgressCtrl.SetPos() -# -# Example and progress bar code courtesy of KDL Technologies, Ltd., Hong Kong SAR, China. -# - -import win32con -import win32ui -from pywin.mfc import dialog - - -def MakeDlgTemplate(): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - - w = 215 - h = 36 - - dlg = [ - [ - "Progress bar control example", - (0, 0, w, h), - style, - None, - (8, "MS Sans Serif"), - ], - ] - - s = win32con.WS_TABSTOP | cs - - dlg.append( - [ - 128, - "Tick", - win32con.IDOK, - (10, h - 18, 50, 14), - s | win32con.BS_DEFPUSHBUTTON, - ] - ) - - dlg.append( - [ - 128, - "Cancel", - win32con.IDCANCEL, - (w - 60, h - 18, 50, 14), - s | win32con.BS_PUSHBUTTON, - ] - ) - - return dlg - - -class TestDialog(dialog.Dialog): - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - self.pbar = win32ui.CreateProgressCtrl() - self.pbar.CreateWindow( - win32con.WS_CHILD | win32con.WS_VISIBLE, (10, 10, 310, 24), self, 1001 - ) - # self.pbar.SetStep (5) - self.progress = 0 - self.pincr = 5 - return rc - - def OnOK(self): - # NB: StepIt wraps at the end if you increment past the upper limit! - # self.pbar.StepIt() - self.progress = self.progress + self.pincr - if self.progress > 100: - self.progress = 100 - if self.progress <= 100: - self.pbar.SetPos(self.progress) - - -def demo(modal=0): - d = TestDialog(MakeDlgTemplate()) - if modal: - d.DoModal() - else: - d.CreateWindow() - - -if __name__ == "__main__": - demo(1) diff --git a/lib/pythonwin/pywin/Demos/sliderdemo.py b/lib/pythonwin/pywin/Demos/sliderdemo.py deleted file mode 100644 index 9fc7b570..00000000 --- a/lib/pythonwin/pywin/Demos/sliderdemo.py +++ /dev/null @@ -1,76 +0,0 @@ -# sliderdemo.py -# Demo of the slider control courtesy of Mike Fletcher. - -import win32con -import win32ui -from pywin.mfc import dialog - - -class MyDialog(dialog.Dialog): - """ - Example using simple controls - """ - - _dialogstyle = ( - win32con.WS_MINIMIZEBOX - | win32con.WS_DLGFRAME - | win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - _buttonstyle = ( - win32con.BS_PUSHBUTTON - | win32con.WS_TABSTOP - | win32con.WS_CHILD - | win32con.WS_VISIBLE - ) - ### The static template, contains all "normal" dialog items - DIALOGTEMPLATE = [ - # the dialog itself is the first element in the template - ["Example slider", (0, 0, 50, 43), _dialogstyle, None, (8, "MS SansSerif")], - # rest of elements are the controls within the dialog - # standard "Close" button - [128, "Close", win32con.IDCANCEL, (0, 30, 50, 13), _buttonstyle], - ] - ### ID of the control to be created during dialog initialisation - IDC_SLIDER = 9500 - - def __init__(self): - dialog.Dialog.__init__(self, self.DIALOGTEMPLATE) - - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - # now initialise your controls that you want to create - # programmatically, including those which are OLE controls - # those created directly by win32ui.Create* - # and your "custom controls" which are subclasses/whatever - win32ui.EnableControlContainer() - self.slider = win32ui.CreateSliderCtrl() - self.slider.CreateWindow( - win32con.WS_TABSTOP | win32con.WS_VISIBLE, - (0, 0, 100, 30), - self._obj_, - self.IDC_SLIDER, - ) - self.HookMessage(self.OnSliderMove, win32con.WM_HSCROLL) - return rc - - def OnSliderMove(self, params): - print("Slider moved") - - def OnCancel(self): - print("The slider control is at position", self.slider.GetPos()) - self._obj_.OnCancel() - - -### -def demo(): - dia = MyDialog() - dia.DoModal() - - -if __name__ == "__main__": - demo() diff --git a/lib/pythonwin/pywin/Demos/splittst.py b/lib/pythonwin/pywin/Demos/splittst.py deleted file mode 100644 index 0114bf09..00000000 --- a/lib/pythonwin/pywin/Demos/splittst.py +++ /dev/null @@ -1,79 +0,0 @@ -import commctrl -import fontdemo -import win32ui -from pywin.mfc import docview, window - -# derive from CMDIChild. This does much work for us. - - -class SplitterFrame(window.MDIChildWnd): - def __init__(self): - # call base CreateFrame - self.images = None - window.MDIChildWnd.__init__(self) - - def OnCreateClient(self, cp, context): - splitter = win32ui.CreateSplitter() - doc = context.doc - frame_rect = self.GetWindowRect() - size = ((frame_rect[2] - frame_rect[0]), (frame_rect[3] - frame_rect[1]) // 2) - sub_size = (size[0] // 2, size[1]) - splitter.CreateStatic(self, 2, 1) - self.v1 = win32ui.CreateEditView(doc) - self.v2 = fontdemo.FontView(doc) - # CListControl view - self.v3 = win32ui.CreateListView(doc) - sub_splitter = win32ui.CreateSplitter() - # pass "splitter" so each view knows how to get to the others - sub_splitter.CreateStatic(splitter, 1, 2) - sub_splitter.CreateView(self.v1, 0, 0, (sub_size)) - sub_splitter.CreateView(self.v2, 0, 1, (0, 0)) # size ignored. - splitter.SetRowInfo(0, size[1], 0) - splitter.CreateView(self.v3, 1, 0, (0, 0)) # size ignored. - # Setup items in the imagelist - self.images = win32ui.CreateImageList(32, 32, 1, 5, 5) - self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_MAINFRAME)) - self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_PYTHONCONTYPE)) - self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_TEXTTYPE)) - self.v3.SetImageList(self.images, commctrl.LVSIL_NORMAL) - self.v3.InsertItem(0, "Icon 1", 0) - self.v3.InsertItem(0, "Icon 2", 1) - self.v3.InsertItem(0, "Icon 3", 2) - # self.v3.Arrange(commctrl.LVA_DEFAULT) Hmmm - win95 aligns left always??? - return 1 - - def OnDestroy(self, msg): - window.MDIChildWnd.OnDestroy(self, msg) - if self.images: - self.images.DeleteImageList() - self.images = None - - def InitialUpdateFrame(self, doc, makeVisible): - self.v1.ReplaceSel("Hello from Edit Window 1") - self.v1.SetModifiedFlag(0) - - -class SampleTemplate(docview.DocTemplate): - def __init__(self): - docview.DocTemplate.__init__( - self, win32ui.IDR_PYTHONTYPE, None, SplitterFrame, None - ) - - def InitialUpdateFrame(self, frame, doc, makeVisible): - # print "frame is ", frame, frame._obj_ - # print "doc is ", doc, doc._obj_ - self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler. - frame.InitialUpdateFrame(doc, makeVisible) - - -def demo(): - template = SampleTemplate() - doc = template.OpenDocumentFile(None) - doc.SetTitle("Splitter Demo") - - -if __name__ == "__main__": - import demoutils - - if demoutils.NeedGoodGUI(): - demo() diff --git a/lib/pythonwin/pywin/Demos/threadedgui.py b/lib/pythonwin/pywin/Demos/threadedgui.py deleted file mode 100644 index bbe1369e..00000000 --- a/lib/pythonwin/pywin/Demos/threadedgui.py +++ /dev/null @@ -1,189 +0,0 @@ -# Demo of using just windows, without documents and views. - -# Also demo of a GUI thread, pretty much direct from the MFC C++ sample MTMDI. - -import timer -import win32api -import win32con -import win32ui -from pywin.mfc import docview, thread, window -from pywin.mfc.thread import WinThread - -WM_USER_PREPARE_TO_CLOSE = win32con.WM_USER + 32 - -# font is a dictionary in which the following elements matter: -# (the best matching font to supplied parameters is returned) -# name string name of the font as known by Windows -# size point size of font in logical units -# weight weight of font (win32con.FW_NORMAL, win32con.FW_BOLD) -# italic boolean; true if set to anything but None -# underline boolean; true if set to anything but None - - -# This window is a child window of a frame. It is not the frame window itself. -class FontWindow(window.Wnd): - def __init__(self, text="Python Rules!"): - window.Wnd.__init__(self) - self.text = text - self.index = 0 - self.incr = 1 - self.width = self.height = 0 - self.ChangeAttributes() - # set up message handlers - - def Create(self, title, style, rect, parent): - classStyle = win32con.CS_HREDRAW | win32con.CS_VREDRAW - className = win32ui.RegisterWndClass( - classStyle, 0, win32con.COLOR_WINDOW + 1, 0 - ) - self._obj_ = win32ui.CreateWnd() - self._obj_.AttachObject(self) - self._obj_.CreateWindow( - className, title, style, rect, parent, win32ui.AFX_IDW_PANE_FIRST - ) - self.HookMessage(self.OnSize, win32con.WM_SIZE) - self.HookMessage(self.OnPrepareToClose, WM_USER_PREPARE_TO_CLOSE) - self.HookMessage(self.OnDestroy, win32con.WM_DESTROY) - self.timerid = timer.set_timer(100, self.OnTimer) - self.InvalidateRect() - - def OnDestroy(self, msg): - timer.kill_timer(self.timerid) - - def OnTimer(self, id, timeVal): - self.index = self.index + self.incr - if self.index > len(self.text): - self.incr = -1 - self.index = len(self.text) - elif self.index < 0: - self.incr = 1 - self.index = 0 - self.InvalidateRect() - - def OnPaint(self): - # print "Paint message from thread", win32api.GetCurrentThreadId() - dc, paintStruct = self.BeginPaint() - self.OnPrepareDC(dc, None) - - if self.width == 0 and self.height == 0: - left, top, right, bottom = self.GetClientRect() - self.width = right - left - self.height = bottom - top - x, y = self.width // 2, self.height // 2 - dc.TextOut(x, y, self.text[: self.index]) - self.EndPaint(paintStruct) - - def ChangeAttributes(self): - font_spec = {"name": "Arial", "height": 42} - self.font = win32ui.CreateFont(font_spec) - - def OnPrepareToClose(self, params): - self.DestroyWindow() - - def OnSize(self, params): - lParam = params[3] - self.width = win32api.LOWORD(lParam) - self.height = win32api.HIWORD(lParam) - - def OnPrepareDC(self, dc, printinfo): - # Set up the DC for forthcoming OnDraw call - dc.SetTextColor(win32api.RGB(0, 0, 255)) - dc.SetBkColor(win32api.GetSysColor(win32con.COLOR_WINDOW)) - dc.SelectObject(self.font) - dc.SetTextAlign(win32con.TA_CENTER | win32con.TA_BASELINE) - - -class FontFrame(window.MDIChildWnd): - def __init__(self): - pass # Dont call base class doc/view version... - - def Create(self, title, rect=None, parent=None): - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW - self._obj_ = win32ui.CreateMDIChild() - self._obj_.AttachObject(self) - - self._obj_.CreateWindow(None, title, style, rect, parent) - rect = self.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - self.child = FontWindow("Not threaded") - self.child.Create( - "FontDemo", win32con.WS_CHILD | win32con.WS_VISIBLE, rect, self - ) - - -class TestThread(WinThread): - def __init__(self, parentWindow): - self.parentWindow = parentWindow - self.child = None - WinThread.__init__(self) - - def InitInstance(self): - rect = self.parentWindow.GetClientRect() - rect = (0, 0, rect[2] - rect[0], rect[3] - rect[1]) - - self.child = FontWindow() - self.child.Create( - "FontDemo", win32con.WS_CHILD | win32con.WS_VISIBLE, rect, self.parentWindow - ) - self.SetMainFrame(self.child) - return WinThread.InitInstance(self) - - def ExitInstance(self): - return 0 - - -class ThreadedFontFrame(window.MDIChildWnd): - def __init__(self): - pass # Dont call base class doc/view version... - self.thread = None - - def Create(self, title, rect=None, parent=None): - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW - self._obj_ = win32ui.CreateMDIChild() - self._obj_.CreateWindow(None, title, style, rect, parent) - self._obj_.HookMessage(self.OnDestroy, win32con.WM_DESTROY) - self._obj_.HookMessage(self.OnSize, win32con.WM_SIZE) - - self.thread = TestThread(self) - self.thread.CreateThread() - - def OnSize(self, msg): - pass - - def OnDestroy(self, msg): - win32ui.OutputDebugString("OnDestroy\n") - if self.thread and self.thread.child: - child = self.thread.child - child.SendMessage(WM_USER_PREPARE_TO_CLOSE, 0, 0) - win32ui.OutputDebugString("Destroyed\n") - - -def Demo(): - f = FontFrame() - f.Create("Font Demo") - - -def ThreadedDemo(): - rect = win32ui.GetMainFrame().GetMDIClient().GetClientRect() - rect = rect[0], int(rect[3] * 3 / 4), int(rect[2] / 4), rect[3] - incr = rect[2] - for i in range(4): - if i == 0: - f = FontFrame() - title = "Not threaded" - else: - f = ThreadedFontFrame() - title = "Threaded GUI Demo" - f.Create(title, rect) - rect = rect[0] + incr, rect[1], rect[2] + incr, rect[3] - # Givem a chance to start - win32api.Sleep(100) - win32ui.PumpWaitingMessages() - - -if __name__ == "__main__": - import demoutils - - if demoutils.NeedGoodGUI(): - ThreadedDemo() -# Demo() diff --git a/lib/pythonwin/pywin/Demos/toolbar.py b/lib/pythonwin/pywin/Demos/toolbar.py deleted file mode 100644 index e56aefef..00000000 --- a/lib/pythonwin/pywin/Demos/toolbar.py +++ /dev/null @@ -1,106 +0,0 @@ -# Demo of ToolBars - -# Shows the toolbar control. -# Demos how to make custom tooltips, etc. - -import commctrl -import win32api -import win32con -import win32ui -from pywin.mfc import afxres, docview, window - - -class GenericFrame(window.MDIChildWnd): - def OnCreateClient(self, cp, context): - # handlers for toolbar buttons - self.HookCommand(self.OnPrevious, 401) - self.HookCommand(self.OnNext, 402) - # Its not necessary for us to hook both of these - the - # common controls should fall-back all by themselves. - # Indeed, given we hook TTN_NEEDTEXTW, commctrl.TTN_NEEDTEXTA - # will not be called. - self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXT) - self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXTW) - - # parent = win32ui.GetMainFrame() - parent = self - style = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | afxres.CBRS_SIZE_DYNAMIC - | afxres.CBRS_TOP - | afxres.CBRS_TOOLTIPS - | afxres.CBRS_FLYBY - ) - - buttons = (win32ui.ID_APP_ABOUT, win32ui.ID_VIEW_INTERACTIVE) - bitmap = win32ui.IDB_BROWSER_HIER - tbid = 0xE840 - self.toolbar = tb = win32ui.CreateToolBar(parent, style, tbid) - tb.LoadBitmap(bitmap) - tb.SetButtons(buttons) - - tb.EnableDocking(afxres.CBRS_ALIGN_ANY) - tb.SetWindowText("Test") - parent.EnableDocking(afxres.CBRS_ALIGN_ANY) - parent.DockControlBar(tb) - parent.LoadBarState("ToolbarTest") - window.MDIChildWnd.OnCreateClient(self, cp, context) - return 1 - - def OnDestroy(self, msg): - self.SaveBarState("ToolbarTest") - - def GetTTText(self, std, extra): - (hwndFrom, idFrom, code) = std - text, hinst, flags = extra - if flags & commctrl.TTF_IDISHWND: - return # Not handled - if idFrom == win32ui.ID_APP_ABOUT: - # our 'extra' return value needs to be the following - # entries from a NMTTDISPINFO[W] struct: - # (szText, hinst, uFlags). None means 'don't change - # the value' - return 0, ("It works!", None, None) - return None # not handled. - - def GetMessageString(self, id): - if id == win32ui.ID_APP_ABOUT: - return "Dialog Test\nTest" - else: - return self._obj_.GetMessageString(id) - - def OnSize(self, params): - print("OnSize called with ", params) - - def OnNext(self, id, cmd): - print("OnNext called") - - def OnPrevious(self, id, cmd): - print("OnPrevious called") - - -msg = """\ -This toolbar was dynamically created.\r -\r -The first item's tooltips is provided by Python code.\r -\r -(Dont close the window with the toolbar in a floating state - it may not re-appear!)\r -""" - - -def test(): - template = docview.DocTemplate( - win32ui.IDR_PYTHONTYPE, None, GenericFrame, docview.EditView - ) - doc = template.OpenDocumentFile(None) - doc.SetTitle("Toolbar Test") - view = doc.GetFirstView() - view.SetWindowText(msg) - - -if __name__ == "__main__": - import demoutils - - if demoutils.NeedGoodGUI(): - test() diff --git a/lib/pythonwin/pywin/IDLE.cfg b/lib/pythonwin/pywin/IDLE.cfg deleted file mode 100644 index b1987b14..00000000 --- a/lib/pythonwin/pywin/IDLE.cfg +++ /dev/null @@ -1,29 +0,0 @@ -[General] -# We base this configuration on the default config. -# You can list "Based On" as many times as you like -Based On = default - -[Keys] -# Only list keys different to default. -# Note you may wish to rebind some of the default -# Pythonwin keys to "Beep" or "DoNothing" - -Alt+L = LocateSelectedFile -Ctrl+Q = AppExit - -# Other non-default Pythonwin keys -Alt+A = EditSelectAll -Alt+M = LocateModule - -# Movement -Ctrl+D = GotoEndOfFile - -# Tabs and other indent features -Alt+T = <> -Ctrl+[ = <> -Ctrl+] = <> - -[Keys:Interactive] -Alt+P = <> -Alt+N = <> - diff --git a/lib/pythonwin/pywin/__init__.py b/lib/pythonwin/pywin/__init__.py deleted file mode 100644 index 2e44fba5..00000000 --- a/lib/pythonwin/pywin/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# is_platform_unicode is an old variable that was never correctly used and -# is no longer referenced in pywin32. It is staying for a few releases incase -# others are looking at it, but it will go away soon! -is_platform_unicode = 0 - -# Ditto default_platform_encoding - not referenced and will die. -default_platform_encoding = "mbcs" - -# This one *is* real and used - but in practice can't be changed. -default_scintilla_encoding = "utf-8" # Scintilla _only_ supports this ATM diff --git a/lib/pythonwin/pywin/default.cfg b/lib/pythonwin/pywin/default.cfg deleted file mode 100644 index 55371f6b..00000000 --- a/lib/pythonwin/pywin/default.cfg +++ /dev/null @@ -1,215 +0,0 @@ -# The default keyboard etc configuration file for Pythonwin. -# -# The format of this file is very similar to a Windows INI file. -# Sections are identified with [Section] lines, but comments -# use the standatd Python # character. Depending on the section, -# lines may not be in the standard "key=value" format. - -# NOTE: You should not need to modify this file. -# Simply create a new .CFG file, and add an entry: -# [General] -# BasedOn = Default -# -# and add your customisations. Then select your new configuration -# from the Pythonwin View/Options/Editor dialog. -# This way you get to add your own customisations, -# but still take advantage of changes to the default -# configuration in new releases. - -# See IDLE.cfg for an example extension configuration. -# -########################################################################## - -[IDLE Extensions] - -# The list of IDLE extensions to load. The extensions -# AutoIndent, AutoFormat and possibly others are -# "built-in", so do not need specifying. - -FormatParagraph -CallTips - - -[Keys] - -# The list of _default_ key definitions. -# See [Keys:Interactive] and [Keys:Editor] below for further defs. - -#Events of the format <> -# are events defined in IDLE extensions. - -Alt+Q = <> - -Ctrl+W = ViewWhitespace -Ctrl+Shift+8 = ViewWhitespace # The MSVC default key def. - -Ctrl+Shift+F = ViewFixedFont - -# Auto-complete, call-tips, etc. -Alt+/ = <> -Ctrl+Space = <> -( = <> -) = <> -Up = <> -Down = <> -Left = <> -Right = <> -. = KeyDot - -# Debugger - These are the MSVC default keys, for want of a better choice. -F9 = DbgBreakpointToggle -F5 = DbgGo -Shift+F5 = DbgClose -F11 = DbgStep -F10 = DbgStepOver -Shift+F11 = DbgStepOut - -Ctrl+F3 = AutoFindNext - - -[Keys:Editor] -# Key bindings specific to the editor -F2 = GotoNextBookmark -Ctrl+F2 = ToggleBookmark -Ctrl+G = GotoLine - -Alt+I = ShowInteractiveWindow -Alt-B = AddBanner # A sample Event defined in this file. - -# Block operations -Alt+3 = <> -Shift+Alt+3 = <> -Alt+4 = <> # IDLE default. -Alt+5 = <> -Alt+6 = <> - -# Tabs and other indent features -Back = <> -Ctrl+T = <> -Alt+U = <> -Enter = EnterKey -Tab = TabKey -Shift-Tab = <> - -# Folding -Add = FoldExpand -Alt+Add = FoldExpandAll -Shift+Add = FoldExpandSecondLevel -Subtract = FoldCollapse -Alt+Subtract = FoldCollapseAll -Shift+Subtract = FoldCollapseSecondLevel -Multiply = FoldTopLevel - -[Keys:Interactive] -# Key bindings specific to the interactive window. -# History for the interactive window -Ctrl+Up = <> -Ctrl+Down = <> -Enter = ProcessEnter -Ctrl+Enter = ProcessEnter -Shift+Enter = ProcessEnter -Esc = ProcessEsc -Alt+I = WindowBack # Toggle back to previous window. -Home = InteractiveHome # A sample Event defined in this file. -Shift+Home = InteractiveHomeExtend # A sample Event defined in this file. - -# When docked, the Ctrl+Tab and Shift+Ctrl+Tab keys dont work as expected. -Ctrl+Tab = MDINext -Ctrl+Shift+Tab = MDIPrev - -[Extensions] -# Python event handlers specific to this config file. -# All functions not starting with an "_" are assumed -# to be events, and take 2 params: -# * editor_window is the same object passed to IDLE -# extensions. editor_window.text is a text widget -# that conforms to the Tk text widget interface. -# * event is the event being fired. Will always be None -# in the current implementation. - -# Simply by defining these functions, they are available as -# events. -# Note that we bind keystrokes to these events in the various -# [Keys] sections. - -# Add a simple file/class/function simple banner -def AddBanner(editor_window, event): - - text = editor_window.text - big_line = "#" * 70 - banner = "%s\n## \n## \n## \n%s\n" % (big_line, big_line) - - # Insert at the start of the current line. - pos = text.index("insert linestart") - - text.undo_block_start() # Allow action to be undone as a single unit. - text.insert(pos, banner) - text.undo_block_stop() - - # Now set the insert point to the middle of the banner. - line, col = [int(s) for s in pos.split(".")] - text.mark_set("insert", "%d.1 lineend" % (line+2, ) ) - - -# Here is a sample event bound to the "Home" key in the -# interactive window -def InteractiveHome(editor_window, event): - return _DoInteractiveHome(editor_window.text, 0) - -def InteractiveHomeExtend(editor_window, event): - return _DoInteractiveHome(editor_window.text, 1) - -def _DoInteractiveHome(text, extend): - import sys - # If Scintilla has an autocomplete window open, then let Scintilla handle it. - if text.edit.SCIAutoCActive(): - return 1 - of_interest = "insert linestart + %d c" % len(sys.ps1) - if not text.compare("insert", "==", of_interest) and \ - text.get("insert linestart", of_interest) in [sys.ps1, sys.ps2]: # Not sys.ps? line - end = of_interest - else: - end = "insert linestart" - - if extend: start = "insert" - else: start = end - text.tag_add("sel", start, end) - -# From Niki Spahie -def AutoFindNext(editor_window, event): - "find selected text or word under cursor" - - from pywin.scintilla import find - from pywin.scintilla import scintillacon - - try: - sci = editor_window.edit - word = sci.GetSelText() - if word: - find.lastSearch.findText = word - find.lastSearch.sel = sci.GetSel() - else: - pos = sci.SendScintilla( scintillacon.SCI_GETCURRENTPOS ) - start = sci.SendScintilla( scintillacon.SCI_WORDSTARTPOSITION, pos, 1 ) - end = sci.SendScintilla( scintillacon.SCI_WORDENDPOSITION, pos, 1 ) - word = sci.GetTextRange( start, end ) - if word: - find.lastSearch.findText = word - find.lastSearch.sel = (start,end) - except Exception: - import traceback - traceback.print_exc() - find.FindNext() - - -# A couple of generic events. -def Beep(editor_window, event): - editor_window.text.beep() - -def DoNothing(editor_window, event): - pass - -def ContinueEvent(editor_window, event): - # Almost an "unbind" - allows Pythonwin/MFC to handle the keystroke - return 1 - diff --git a/lib/pythonwin/pywin/dialogs/__init__.py b/lib/pythonwin/pywin/dialogs/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pythonwin/pywin/dialogs/ideoptions.py b/lib/pythonwin/pywin/dialogs/ideoptions.py deleted file mode 100644 index f1bae90d..00000000 --- a/lib/pythonwin/pywin/dialogs/ideoptions.py +++ /dev/null @@ -1,139 +0,0 @@ -# The property page to define generic IDE options for Pythonwin - -import win32con -import win32ui -from pywin.framework import interact -from pywin.mfc import dialog - -buttonControlMap = { - win32ui.IDC_BUTTON1: win32ui.IDC_EDIT1, - win32ui.IDC_BUTTON2: win32ui.IDC_EDIT2, - win32ui.IDC_BUTTON3: win32ui.IDC_EDIT3, -} - - -class OptionsPropPage(dialog.PropertyPage): - def __init__(self): - dialog.PropertyPage.__init__(self, win32ui.IDD_PP_IDE) - self.AddDDX(win32ui.IDC_CHECK1, "bShowAtStartup") - self.AddDDX(win32ui.IDC_CHECK2, "bDocking") - self.AddDDX(win32ui.IDC_EDIT4, "MRUSize", "i") - - def OnInitDialog(self): - edit = self.GetDlgItem(win32ui.IDC_EDIT1) - format = eval( - win32ui.GetProfileVal( - interact.sectionProfile, - interact.STYLE_INTERACTIVE_PROMPT, - str(interact.formatInput), - ) - ) - edit.SetDefaultCharFormat(format) - edit.SetWindowText("Input Text") - - edit = self.GetDlgItem(win32ui.IDC_EDIT2) - format = eval( - win32ui.GetProfileVal( - interact.sectionProfile, - interact.STYLE_INTERACTIVE_OUTPUT, - str(interact.formatOutput), - ) - ) - edit.SetDefaultCharFormat(format) - edit.SetWindowText("Output Text") - - edit = self.GetDlgItem(win32ui.IDC_EDIT3) - format = eval( - win32ui.GetProfileVal( - interact.sectionProfile, - interact.STYLE_INTERACTIVE_ERROR, - str(interact.formatOutputError), - ) - ) - edit.SetDefaultCharFormat(format) - edit.SetWindowText("Error Text") - - self["bShowAtStartup"] = interact.LoadPreference("Show at startup", 1) - self["bDocking"] = interact.LoadPreference("Docking", 0) - self["MRUSize"] = win32ui.GetProfileVal("Settings", "Recent File List Size", 10) - - # Hook the button clicks. - self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON1) - self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON2) - self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON3) - - # Ensure the spin control remains in range. - spinner = self.GetDlgItem(win32ui.IDC_SPIN1) - spinner.SetRange(1, 16) - - return dialog.PropertyPage.OnInitDialog(self) - - # Called to save away the new format tuple for the specified item. - def HandleCharFormatChange(self, id, code): - if code == win32con.BN_CLICKED: - editId = buttonControlMap.get(id) - assert editId is not None, "Format button has no associated edit control" - editControl = self.GetDlgItem(editId) - existingFormat = editControl.GetDefaultCharFormat() - flags = win32con.CF_SCREENFONTS - d = win32ui.CreateFontDialog(existingFormat, flags, None, self) - if d.DoModal() == win32con.IDOK: - cf = d.GetCharFormat() - editControl.SetDefaultCharFormat(cf) - self.SetModified(1) - return 0 # We handled this fully! - - def OnOK(self): - # Handle the edit controls - get all the fonts, put them back into interact, then - # get interact to save its stuff! - controlAttrs = [ - (win32ui.IDC_EDIT1, interact.STYLE_INTERACTIVE_PROMPT), - (win32ui.IDC_EDIT2, interact.STYLE_INTERACTIVE_OUTPUT), - (win32ui.IDC_EDIT3, interact.STYLE_INTERACTIVE_ERROR), - ] - for id, key in controlAttrs: - control = self.GetDlgItem(id) - fmt = control.GetDefaultCharFormat() - win32ui.WriteProfileVal(interact.sectionProfile, key, str(fmt)) - - # Save the other interactive window options. - interact.SavePreference("Show at startup", self["bShowAtStartup"]) - interact.SavePreference("Docking", self["bDocking"]) - - # And the other options. - win32ui.WriteProfileVal("Settings", "Recent File List Size", self["MRUSize"]) - - return 1 - - def ChangeFormat(self, fmtAttribute, fmt): - dlg = win32ui.CreateFontDialog(fmt) - if dlg.DoModal() != win32con.IDOK: - return None - return dlg.GetCharFormat() - - def OnFormatTitle(self, command, code): - fmt = self.GetFormat(interact.formatTitle) - if fmt: - formatTitle = fmt - SaveFontPreferences() - - def OnFormatInput(self, command, code): - global formatInput - fmt = self.GetFormat(formatInput) - if fmt: - formatInput = fmt - SaveFontPreferences() - - def OnFormatOutput(self, command, code): - global formatOutput - fmt = self.GetFormat(formatOutput) - if fmt: - formatOutput = fmt - SaveFontPreferences() - - def OnFormatError(self, command, code): - global formatOutputError - fmt = self.GetFormat(formatOutputError) - if fmt: - formatOutputError = fmt - SaveFontPreferences() diff --git a/lib/pythonwin/pywin/dialogs/list.py b/lib/pythonwin/pywin/dialogs/list.py deleted file mode 100644 index b9934ce7..00000000 --- a/lib/pythonwin/pywin/dialogs/list.py +++ /dev/null @@ -1,143 +0,0 @@ -import commctrl -import win32api -import win32con -import win32ui -from pywin.mfc import dialog - - -class ListDialog(dialog.Dialog): - def __init__(self, title, list): - dialog.Dialog.__init__(self, self._maketemplate(title)) - self.HookMessage(self.on_size, win32con.WM_SIZE) - self.HookNotify(self.OnListItemChange, commctrl.LVN_ITEMCHANGED) - self.HookCommand(self.OnListClick, win32ui.IDC_LIST1) - self.items = list - - def _maketemplate(self, title): - style = win32con.WS_DLGFRAME | win32con.WS_SYSMENU | win32con.WS_VISIBLE - ls = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | commctrl.LVS_ALIGNLEFT - | commctrl.LVS_REPORT - ) - bs = win32con.WS_CHILD | win32con.WS_VISIBLE - return [ - [title, (0, 0, 200, 200), style, None, (8, "MS Sans Serif")], - ["SysListView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), ls], - [128, "OK", win32con.IDOK, (10, 0, 50, 14), bs | win32con.BS_DEFPUSHBUTTON], - [128, "Cancel", win32con.IDCANCEL, (0, 0, 50, 14), bs], - ] - - def FillList(self): - size = self.GetWindowRect() - width = size[2] - size[0] - (10) - itemDetails = (commctrl.LVCFMT_LEFT, width, "Item", 0) - self.itemsControl.InsertColumn(0, itemDetails) - index = 0 - for item in self.items: - index = self.itemsControl.InsertItem(index + 1, str(item), 0) - - def OnListClick(self, id, code): - if code == commctrl.NM_DBLCLK: - self.EndDialog(win32con.IDOK) - return 1 - - def OnListItemChange(self, std, extra): - (hwndFrom, idFrom, code), ( - itemNotify, - sub, - newState, - oldState, - change, - point, - lparam, - ) = (std, extra) - oldSel = (oldState & commctrl.LVIS_SELECTED) != 0 - newSel = (newState & commctrl.LVIS_SELECTED) != 0 - if oldSel != newSel: - try: - self.selecteditem = itemNotify - self.butOK.EnableWindow(1) - except win32ui.error: - self.selecteditem = None - - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - self.itemsControl = self.GetDlgItem(win32ui.IDC_LIST1) - self.butOK = self.GetDlgItem(win32con.IDOK) - self.butCancel = self.GetDlgItem(win32con.IDCANCEL) - - self.FillList() - - size = self.GetWindowRect() - self.LayoutControls(size[2] - size[0], size[3] - size[1]) - self.butOK.EnableWindow(0) # wait for first selection - return rc - - def LayoutControls(self, w, h): - self.itemsControl.MoveWindow((0, 0, w, h - 30)) - self.butCancel.MoveWindow((10, h - 24, 60, h - 4)) - self.butOK.MoveWindow((w - 60, h - 24, w - 10, h - 4)) - - def on_size(self, params): - lparam = params[3] - w = win32api.LOWORD(lparam) - h = win32api.HIWORD(lparam) - self.LayoutControls(w, h) - - -class ListsDialog(ListDialog): - def __init__(self, title, list, colHeadings=["Item"]): - ListDialog.__init__(self, title, list) - self.colHeadings = colHeadings - - def FillList(self): - index = 0 - size = self.GetWindowRect() - width = ( - size[2] - size[0] - (10) - win32api.GetSystemMetrics(win32con.SM_CXVSCROLL) - ) - numCols = len(self.colHeadings) - - for col in self.colHeadings: - itemDetails = (commctrl.LVCFMT_LEFT, int(width / numCols), col, 0) - self.itemsControl.InsertColumn(index, itemDetails) - index = index + 1 - index = 0 - for items in self.items: - index = self.itemsControl.InsertItem(index + 1, str(items[0]), 0) - for itemno in range(1, numCols): - item = items[itemno] - self.itemsControl.SetItemText(index, itemno, str(item)) - - -def SelectFromList(title, lst): - dlg = ListDialog(title, lst) - if dlg.DoModal() == win32con.IDOK: - return dlg.selecteditem - else: - return None - - -def SelectFromLists(title, lists, headings): - dlg = ListsDialog(title, lists, headings) - if dlg.DoModal() == win32con.IDOK: - return dlg.selecteditem - else: - return None - - -def test(): - # print SelectFromList('Single list', [1,2,3]) - print( - SelectFromLists( - "Multi-List", - [("1", 1, "a"), ("2", 2, "b"), ("3", 3, "c")], - ["Col 1", "Col 2"], - ) - ) - - -if __name__ == "__main__": - test() diff --git a/lib/pythonwin/pywin/dialogs/login.py b/lib/pythonwin/pywin/dialogs/login.py deleted file mode 100644 index 2a329569..00000000 --- a/lib/pythonwin/pywin/dialogs/login.py +++ /dev/null @@ -1,156 +0,0 @@ -"""login -- PythonWin user ID and password dialog box - -(Adapted from originally distributed with Mark Hammond's PythonWin - -this now replaces it!) - -login.GetLogin() displays a modal "OK/Cancel" dialog box with input -fields for a user ID and password. The password field input is masked -with *'s. GetLogin takes two optional parameters, a window title, and a -default user ID. If these parameters are omitted, the title defaults to -"Login", and the user ID is left blank. GetLogin returns a (userid, password) -tuple. GetLogin can be called from scripts running on the console - i.e. you -don't need to write a full-blown GUI app to use it. - -login.GetPassword() is similar, except there is no username field. - -Example: -import pywin.dialogs.login -title = "FTP Login" -def_user = "fred" -userid, password = pywin.dialogs.login.GetLogin(title, def_user) - -Jim Eggleston, 28 August 1996 -Merged with dlgpass and moved to pywin.dialogs by Mark Hammond Jan 1998. -""" - -import win32api -import win32con -import win32ui -from pywin.mfc import dialog - - -def MakeLoginDlgTemplate(title): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - - # Window frame and title - dlg = [ - [title, (0, 0, 184, 40), style, None, (8, "MS Sans Serif")], - ] - - # ID label and text box - dlg.append([130, "User ID:", -1, (7, 9, 69, 9), cs | win32con.SS_LEFT]) - s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER - dlg.append(["EDIT", None, win32ui.IDC_EDIT1, (50, 7, 60, 12), s]) - - # Password label and text box - dlg.append([130, "Password:", -1, (7, 22, 69, 9), cs | win32con.SS_LEFT]) - s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER - dlg.append( - ["EDIT", None, win32ui.IDC_EDIT2, (50, 20, 60, 12), s | win32con.ES_PASSWORD] - ) - - # OK/Cancel Buttons - s = cs | win32con.WS_TABSTOP - dlg.append( - [128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON] - ) - s = win32con.BS_PUSHBUTTON | s - dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 20, 50, 14), s]) - return dlg - - -def MakePasswordDlgTemplate(title): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - # Window frame and title - dlg = [ - [title, (0, 0, 177, 45), style, None, (8, "MS Sans Serif")], - ] - - # Password label and text box - dlg.append([130, "Password:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT]) - s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER - dlg.append( - ["EDIT", None, win32ui.IDC_EDIT1, (50, 7, 60, 12), s | win32con.ES_PASSWORD] - ) - - # OK/Cancel Buttons - s = cs | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON - dlg.append( - [128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON] - ) - dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s]) - return dlg - - -class LoginDlg(dialog.Dialog): - Cancel = 0 - - def __init__(self, title): - dialog.Dialog.__init__(self, MakeLoginDlgTemplate(title)) - self.AddDDX(win32ui.IDC_EDIT1, "userid") - self.AddDDX(win32ui.IDC_EDIT2, "password") - - -def GetLogin(title="Login", userid="", password=""): - d = LoginDlg(title) - d["userid"] = userid - d["password"] = password - if d.DoModal() != win32con.IDOK: - return (None, None) - else: - return (d["userid"], d["password"]) - - -class PasswordDlg(dialog.Dialog): - def __init__(self, title): - dialog.Dialog.__init__(self, MakePasswordDlgTemplate(title)) - self.AddDDX(win32ui.IDC_EDIT1, "password") - - -def GetPassword(title="Password", password=""): - d = PasswordDlg(title) - d["password"] = password - if d.DoModal() != win32con.IDOK: - return None - return d["password"] - - -if __name__ == "__main__": - import sys - - title = "Login" - def_user = "" - if len(sys.argv) > 1: - title = sys.argv[1] - if len(sys.argv) > 2: - def_userid = sys.argv[2] - userid, password = GetLogin(title, def_user) - if userid == password == None: - print("User pressed Cancel") - else: - print("User ID: ", userid) - print("Password:", password) - newpassword = GetPassword("Reenter just for fun", password) - if newpassword is None: - print("User cancelled") - else: - what = "" - if newpassword != password: - what = "not " - print("The passwords did %smatch" % (what)) diff --git a/lib/pythonwin/pywin/dialogs/status.py b/lib/pythonwin/pywin/dialogs/status.py deleted file mode 100644 index aef33914..00000000 --- a/lib/pythonwin/pywin/dialogs/status.py +++ /dev/null @@ -1,242 +0,0 @@ -# No cancel button. - -import threading -import time - -import win32api -import win32con -import win32ui -from pywin.mfc import dialog -from pywin.mfc.thread import WinThread - - -def MakeProgressDlgTemplate(caption, staticText=""): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - - w = 215 - h = 36 # With button - h = 40 - - dlg = [ - [caption, (0, 0, w, h), style, None, (8, "MS Sans Serif")], - ] - - s = win32con.WS_TABSTOP | cs - - dlg.append([130, staticText, 1000, (7, 7, w - 7, h - 32), cs | win32con.SS_LEFT]) - - # dlg.append([128, - # "Cancel", - # win32con.IDCANCEL, - # (w - 60, h - 18, 50, 14), s | win32con.BS_PUSHBUTTON]) - - return dlg - - -class CStatusProgressDialog(dialog.Dialog): - def __init__(self, title, msg="", maxticks=100, tickincr=1): - self.initMsg = msg - templ = MakeProgressDlgTemplate(title, msg) - dialog.Dialog.__init__(self, templ) - self.maxticks = maxticks - self.tickincr = tickincr - self.pbar = None - - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - self.static = self.GetDlgItem(1000) - self.pbar = win32ui.CreateProgressCtrl() - self.pbar.CreateWindow( - win32con.WS_CHILD | win32con.WS_VISIBLE, (10, 30, 310, 44), self, 1001 - ) - self.pbar.SetRange(0, self.maxticks) - self.pbar.SetStep(self.tickincr) - self.progress = 0 - self.pincr = 5 - return rc - - def Close(self): - self.EndDialog(0) - - def SetMaxTicks(self, maxticks): - if self.pbar is not None: - self.pbar.SetRange(0, maxticks) - - def Tick(self): - if self.pbar is not None: - self.pbar.StepIt() - - def SetTitle(self, text): - self.SetWindowText(text) - - def SetText(self, text): - self.SetDlgItemText(1000, text) - - def Set(self, pos, max=None): - if self.pbar is not None: - self.pbar.SetPos(pos) - if max is not None: - self.pbar.SetRange(0, max) - - -# a progress dialog created in a new thread - especially suitable for -# console apps with no message loop. -MYWM_SETTITLE = win32con.WM_USER + 10 -MYWM_SETMSG = win32con.WM_USER + 11 -MYWM_TICK = win32con.WM_USER + 12 -MYWM_SETMAXTICKS = win32con.WM_USER + 13 -MYWM_SET = win32con.WM_USER + 14 - - -class CThreadedStatusProcessDialog(CStatusProgressDialog): - def __init__(self, title, msg="", maxticks=100, tickincr=1): - self.title = title - self.msg = msg - self.threadid = win32api.GetCurrentThreadId() - CStatusProgressDialog.__init__(self, title, msg, maxticks, tickincr) - - def OnInitDialog(self): - rc = CStatusProgressDialog.OnInitDialog(self) - self.HookMessage(self.OnTitle, MYWM_SETTITLE) - self.HookMessage(self.OnMsg, MYWM_SETMSG) - self.HookMessage(self.OnTick, MYWM_TICK) - self.HookMessage(self.OnMaxTicks, MYWM_SETMAXTICKS) - self.HookMessage(self.OnSet, MYWM_SET) - return rc - - def _Send(self, msg): - try: - self.PostMessage(msg) - except win32ui.error: - # the user closed the window - but this does not cancel the - # process - so just ignore it. - pass - - def OnTitle(self, msg): - CStatusProgressDialog.SetTitle(self, self.title) - - def OnMsg(self, msg): - CStatusProgressDialog.SetText(self, self.msg) - - def OnTick(self, msg): - CStatusProgressDialog.Tick(self) - - def OnMaxTicks(self, msg): - CStatusProgressDialog.SetMaxTicks(self, self.maxticks) - - def OnSet(self, msg): - CStatusProgressDialog.Set(self, self.pos, self.max) - - def Close(self): - assert self.threadid, "No thread!" - win32api.PostThreadMessage(self.threadid, win32con.WM_QUIT, 0, 0) - - def SetMaxTicks(self, maxticks): - self.maxticks = maxticks - self._Send(MYWM_SETMAXTICKS) - - def SetTitle(self, title): - self.title = title - self._Send(MYWM_SETTITLE) - - def SetText(self, text): - self.msg = text - self._Send(MYWM_SETMSG) - - def Tick(self): - self._Send(MYWM_TICK) - - def Set(self, pos, max=None): - self.pos = pos - self.max = max - self._Send(MYWM_SET) - - -class ProgressThread(WinThread): - def __init__(self, title, msg="", maxticks=100, tickincr=1): - self.title = title - self.msg = msg - self.maxticks = maxticks - self.tickincr = tickincr - self.dialog = None - WinThread.__init__(self) - self.createdEvent = threading.Event() - - def InitInstance(self): - self.dialog = CThreadedStatusProcessDialog( - self.title, self.msg, self.maxticks, self.tickincr - ) - self.dialog.CreateWindow() - try: - self.dialog.SetForegroundWindow() - except win32ui.error: - pass - self.createdEvent.set() - return WinThread.InitInstance(self) - - def ExitInstance(self): - return 0 - - -def StatusProgressDialog(title, msg="", maxticks=100, parent=None): - d = CStatusProgressDialog(title, msg, maxticks) - d.CreateWindow(parent) - return d - - -def ThreadedStatusProgressDialog(title, msg="", maxticks=100): - t = ProgressThread(title, msg, maxticks) - t.CreateThread() - # Need to run a basic "PumpWaitingMessages" loop just incase we are - # running inside Pythonwin. - # Basic timeout incase things go terribly wrong. Ideally we should use - # win32event.MsgWaitForMultipleObjects(), but we use a threading module - # event - so use a dumb strategy - end_time = time.time() + 10 - while time.time() < end_time: - if t.createdEvent.isSet(): - break - win32ui.PumpWaitingMessages() - time.sleep(0.1) - return t.dialog - - -def demo(): - d = StatusProgressDialog("A Demo", "Doing something...") - import win32api - - for i in range(100): - if i == 50: - d.SetText("Getting there...") - if i == 90: - d.SetText("Nearly done...") - win32api.Sleep(20) - d.Tick() - d.Close() - - -def thread_demo(): - d = ThreadedStatusProgressDialog("A threaded demo", "Doing something") - import win32api - - for i in range(100): - if i == 50: - d.SetText("Getting there...") - if i == 90: - d.SetText("Nearly done...") - win32api.Sleep(20) - d.Tick() - d.Close() - - -if __name__ == "__main__": - thread_demo() - # demo() diff --git a/lib/pythonwin/pywin/docking/DockingBar.py b/lib/pythonwin/pywin/docking/DockingBar.py deleted file mode 100644 index 3e51eb70..00000000 --- a/lib/pythonwin/pywin/docking/DockingBar.py +++ /dev/null @@ -1,679 +0,0 @@ -# DockingBar.py - -# Ported directly (comments and all) from the samples at www.codeguru.com - -# WARNING: Use at your own risk, as this interface is highly likely to change. -# Currently we support only one child per DockingBar. Later we need to add -# support for multiple children. - -import struct - -import win32api -import win32con -import win32ui -from pywin.mfc import afxres, window - -clrBtnHilight = win32api.GetSysColor(win32con.COLOR_BTNHILIGHT) -clrBtnShadow = win32api.GetSysColor(win32con.COLOR_BTNSHADOW) - - -def CenterPoint(rect): - width = rect[2] - rect[0] - height = rect[3] - rect[1] - return rect[0] + width // 2, rect[1] + height // 2 - - -def OffsetRect(rect, point): - (x, y) = point - return rect[0] + x, rect[1] + y, rect[2] + x, rect[3] + y - - -def DeflateRect(rect, point): - (x, y) = point - return rect[0] + x, rect[1] + y, rect[2] - x, rect[3] - y - - -def PtInRect(rect, pt): - return rect[0] <= pt[0] < rect[2] and rect[1] <= pt[1] < rect[3] - - -class DockingBar(window.Wnd): - def __init__(self, obj=None): - if obj is None: - obj = win32ui.CreateControlBar() - window.Wnd.__init__(self, obj) - self.dialog = None - self.nDockBarID = 0 - self.sizeMin = 32, 32 - self.sizeHorz = 200, 200 - self.sizeVert = 200, 200 - self.sizeFloat = 200, 200 - self.bTracking = 0 - self.bInRecalcNC = 0 - self.cxEdge = 6 - self.cxBorder = 3 - self.cxGripper = 20 - self.brushBkgd = win32ui.CreateBrush() - self.brushBkgd.CreateSolidBrush(win32api.GetSysColor(win32con.COLOR_BTNFACE)) - - # Support for diagonal resizing - self.cyBorder = 3 - self.cCaptionSize = win32api.GetSystemMetrics(win32con.SM_CYSMCAPTION) - self.cMinWidth = win32api.GetSystemMetrics(win32con.SM_CXMIN) - self.cMinHeight = win32api.GetSystemMetrics(win32con.SM_CYMIN) - self.rectUndock = (0, 0, 0, 0) - - def OnUpdateCmdUI(self, target, bDisableIfNoHndler): - return self.UpdateDialogControls(target, bDisableIfNoHndler) - - def CreateWindow( - self, - parent, - childCreator, - title, - id, - style=win32con.WS_CHILD | win32con.WS_VISIBLE | afxres.CBRS_LEFT, - childCreatorArgs=(), - ): - assert not ( - (style & afxres.CBRS_SIZE_FIXED) and (style & afxres.CBRS_SIZE_DYNAMIC) - ), "Invalid style" - self.rectClose = self.rectBorder = self.rectGripper = self.rectTracker = ( - 0, - 0, - 0, - 0, - ) - - # save the style - self._obj_.dwStyle = style & afxres.CBRS_ALL - - cursor = win32api.LoadCursor(0, win32con.IDC_ARROW) - wndClass = win32ui.RegisterWndClass( - win32con.CS_DBLCLKS, cursor, self.brushBkgd.GetSafeHandle(), 0 - ) - - self._obj_.CreateWindow(wndClass, title, style, (0, 0, 0, 0), parent, id) - - # Create the child dialog - self.dialog = childCreator(*(self,) + childCreatorArgs) - - # use the dialog dimensions as default base dimensions - assert self.dialog.IsWindow(), ( - "The childCreator function %s did not create a window!" % childCreator - ) - rect = self.dialog.GetWindowRect() - self.sizeHorz = self.sizeVert = self.sizeFloat = ( - rect[2] - rect[0], - rect[3] - rect[1], - ) - - self.sizeHorz = self.sizeHorz[0], self.sizeHorz[1] + self.cxEdge + self.cxBorder - self.sizeVert = self.sizeVert[0] + self.cxEdge + self.cxBorder, self.sizeVert[1] - self.HookMessages() - - def CalcFixedLayout(self, bStretch, bHorz): - rectTop = self.dockSite.GetControlBar( - afxres.AFX_IDW_DOCKBAR_TOP - ).GetWindowRect() - rectLeft = self.dockSite.GetControlBar( - afxres.AFX_IDW_DOCKBAR_LEFT - ).GetWindowRect() - if bStretch: - nHorzDockBarWidth = 32767 - nVertDockBarHeight = 32767 - else: - nHorzDockBarWidth = rectTop[2] - rectTop[0] + 4 - nVertDockBarHeight = rectLeft[3] - rectLeft[1] + 4 - - if self.IsFloating(): - return self.sizeFloat - if bHorz: - return nHorzDockBarWidth, self.sizeHorz[1] - return self.sizeVert[0], nVertDockBarHeight - - def CalcDynamicLayout(self, length, mode): - # Support for diagonal sizing. - if self.IsFloating(): - self.GetParent().GetParent().ModifyStyle(win32ui.MFS_4THICKFRAME, 0) - if mode & (win32ui.LM_HORZDOCK | win32ui.LM_VERTDOCK): - flags = ( - win32con.SWP_NOSIZE - | win32con.SWP_NOMOVE - | win32con.SWP_NOZORDER - | win32con.SWP_NOACTIVATE - | win32con.SWP_FRAMECHANGED - ) - self.SetWindowPos( - 0, - ( - 0, - 0, - 0, - 0, - ), - flags, - ) - self.dockSite.RecalcLayout() - return self._obj_.CalcDynamicLayout(length, mode) - - if mode & win32ui.LM_MRUWIDTH: - return self.sizeFloat - if mode & win32ui.LM_COMMIT: - self.sizeFloat = length, self.sizeFloat[1] - return self.sizeFloat - # More diagonal sizing. - if self.IsFloating(): - dc = self.dockContext - pt = win32api.GetCursorPos() - windowRect = self.GetParent().GetParent().GetWindowRect() - - hittest = dc.nHitTest - if hittest == win32con.HTTOPLEFT: - cx = max(windowRect[2] - pt[0], self.cMinWidth) - self.cxBorder - cy = max(windowRect[3] - self.cCaptionSize - pt[1], self.cMinHeight) - 1 - self.sizeFloat = cx, cy - - top = ( - min(pt[1], windowRect[3] - self.cCaptionSize - self.cMinHeight) - - self.cyBorder - ) - left = min(pt[0], windowRect[2] - self.cMinWidth) - 1 - dc.rectFrameDragHorz = ( - left, - top, - dc.rectFrameDragHorz[2], - dc.rectFrameDragHorz[3], - ) - return self.sizeFloat - if hittest == win32con.HTTOPRIGHT: - cx = max(pt[0] - windowRect[0], self.cMinWidth) - cy = max(windowRect[3] - self.cCaptionSize - pt[1], self.cMinHeight) - 1 - self.sizeFloat = cx, cy - - top = ( - min(pt[1], windowRect[3] - self.cCaptionSize - self.cMinHeight) - - self.cyBorder - ) - dc.rectFrameDragHorz = ( - dc.rectFrameDragHorz[0], - top, - dc.rectFrameDragHorz[2], - dc.rectFrameDragHorz[3], - ) - return self.sizeFloat - - if hittest == win32con.HTBOTTOMLEFT: - cx = max(windowRect[2] - pt[0], self.cMinWidth) - self.cxBorder - cy = max(pt[1] - windowRect[1] - self.cCaptionSize, self.cMinHeight) - self.sizeFloat = cx, cy - - left = min(pt[0], windowRect[2] - self.cMinWidth) - 1 - dc.rectFrameDragHorz = ( - left, - dc.rectFrameDragHorz[1], - dc.rectFrameDragHorz[2], - dc.rectFrameDragHorz[3], - ) - return self.sizeFloat - - if hittest == win32con.HTBOTTOMRIGHT: - cx = max(pt[0] - windowRect[0], self.cMinWidth) - cy = max(pt[1] - windowRect[1] - self.cCaptionSize, self.cMinHeight) - self.sizeFloat = cx, cy - return self.sizeFloat - - if mode & win32ui.LM_LENGTHY: - self.sizeFloat = self.sizeFloat[0], max(self.sizeMin[1], length) - return self.sizeFloat - else: - return max(self.sizeMin[0], length), self.sizeFloat[1] - - def OnWindowPosChanged(self, msg): - if self.GetSafeHwnd() == 0 or self.dialog is None: - return 0 - lparam = msg[3] - """ LPARAM used with WM_WINDOWPOSCHANGED: - typedef struct { - HWND hwnd; - HWND hwndInsertAfter; - int x; - int y; - int cx; - int cy; - UINT flags;} WINDOWPOS; - """ - format = "PPiiiii" - bytes = win32ui.GetBytes(lparam, struct.calcsize(format)) - hwnd, hwndAfter, x, y, cx, cy, flags = struct.unpack(format, bytes) - - if self.bInRecalcNC: - rc = self.GetClientRect() - self.dialog.MoveWindow(rc) - return 0 - # Find on which side are we docked - nDockBarID = self.GetParent().GetDlgCtrlID() - # Return if dropped at same location - # no docking side change and no size change - if ( - (nDockBarID == self.nDockBarID) - and (flags & win32con.SWP_NOSIZE) - and ( - (self._obj_.dwStyle & afxres.CBRS_BORDER_ANY) != afxres.CBRS_BORDER_ANY - ) - ): - return - self.nDockBarID = nDockBarID - - # Force recalc the non-client area - self.bInRecalcNC = 1 - try: - swpflags = ( - win32con.SWP_NOSIZE - | win32con.SWP_NOMOVE - | win32con.SWP_NOZORDER - | win32con.SWP_FRAMECHANGED - ) - self.SetWindowPos(0, (0, 0, 0, 0), swpflags) - finally: - self.bInRecalcNC = 0 - return 0 - - # This is a virtual and not a message hook. - def OnSetCursor(self, window, nHitTest, wMouseMsg): - if nHitTest != win32con.HTSIZE or self.bTracking: - return self._obj_.OnSetCursor(window, nHitTest, wMouseMsg) - - if self.IsHorz(): - win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_SIZENS)) - else: - win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_SIZEWE)) - return 1 - - # Mouse Handling - def OnLButtonUp(self, msg): - if not self.bTracking: - return 1 # pass it on. - self.StopTracking(1) - return 0 # Dont pass on - - def OnLButtonDown(self, msg): - # UINT nFlags, CPoint point) - # only start dragging if clicked in "void" space - if self.dockBar is not None: - # start the drag - pt = msg[5] - pt = self.ClientToScreen(pt) - self.dockContext.StartDrag(pt) - return 0 - return 1 - - def OnNcLButtonDown(self, msg): - if self.bTracking: - return 0 - nHitTest = wparam = msg[2] - pt = msg[5] - - if nHitTest == win32con.HTSYSMENU and not self.IsFloating(): - self.GetDockingFrame().ShowControlBar(self, 0, 0) - elif nHitTest == win32con.HTMINBUTTON and not self.IsFloating(): - self.dockContext.ToggleDocking() - elif ( - nHitTest == win32con.HTCAPTION - and not self.IsFloating() - and self.dockBar is not None - ): - self.dockContext.StartDrag(pt) - elif nHitTest == win32con.HTSIZE and not self.IsFloating(): - self.StartTracking() - else: - return 1 - return 0 - - def OnLButtonDblClk(self, msg): - # only toggle docking if clicked in "void" space - if self.dockBar is not None: - # toggle docking - self.dockContext.ToggleDocking() - return 0 - return 1 - - def OnNcLButtonDblClk(self, msg): - nHitTest = wparam = msg[2] - # UINT nHitTest, CPoint point) - if self.dockBar is not None and nHitTest == win32con.HTCAPTION: - # toggle docking - self.dockContext.ToggleDocking() - return 0 - return 1 - - def OnMouseMove(self, msg): - flags = wparam = msg[2] - lparam = msg[3] - if self.IsFloating() or not self.bTracking: - return 1 - - # Convert unsigned 16 bit to signed 32 bit. - x = win32api.LOWORD(lparam) - if x & 32768: - x = x | -65536 - y = win32api.HIWORD(lparam) - if y & 32768: - y = y | -65536 - pt = x, y - cpt = CenterPoint(self.rectTracker) - pt = self.ClientToWnd(pt) - if self.IsHorz(): - if cpt[1] != pt[1]: - self.OnInvertTracker(self.rectTracker) - self.rectTracker = OffsetRect(self.rectTracker, (0, pt[1] - cpt[1])) - self.OnInvertTracker(self.rectTracker) - else: - if cpt[0] != pt[0]: - self.OnInvertTracker(self.rectTracker) - self.rectTracker = OffsetRect(self.rectTracker, (pt[0] - cpt[0], 0)) - self.OnInvertTracker(self.rectTracker) - - return 0 # Dont pass it on. - - # def OnBarStyleChange(self, old, new): - - def OnNcCalcSize(self, bCalcValid, size_info): - (rc0, rc1, rc2, pos) = size_info - self.rectBorder = self.GetWindowRect() - self.rectBorder = OffsetRect( - self.rectBorder, (-self.rectBorder[0], -self.rectBorder[1]) - ) - - dwBorderStyle = self._obj_.dwStyle | afxres.CBRS_BORDER_ANY - - if self.nDockBarID == afxres.AFX_IDW_DOCKBAR_TOP: - dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_BOTTOM - rc0.left = rc0.left + self.cxGripper - rc0.bottom = rc0.bottom - self.cxEdge - rc0.top = rc0.top + self.cxBorder - rc0.right = rc0.right - self.cxBorder - self.rectBorder = ( - self.rectBorder[0], - self.rectBorder[3] - self.cxEdge, - self.rectBorder[2], - self.rectBorder[3], - ) - elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM: - dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_TOP - rc0.left = rc0.left + self.cxGripper - rc0.top = rc0.top + self.cxEdge - rc0.bottom = rc0.bottom - self.cxBorder - rc0.right = rc0.right - self.cxBorder - self.rectBorder = ( - self.rectBorder[0], - self.rectBorder[1], - self.rectBorder[2], - self.rectBorder[1] + self.cxEdge, - ) - elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_LEFT: - dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_RIGHT - rc0.right = rc0.right - self.cxEdge - rc0.left = rc0.left + self.cxBorder - rc0.bottom = rc0.bottom - self.cxBorder - rc0.top = rc0.top + self.cxGripper - self.rectBorder = ( - self.rectBorder[2] - self.cxEdge, - self.rectBorder[1], - self.rectBorder[2], - self.rectBorder[3], - ) - elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_RIGHT: - dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_LEFT - rc0.left = rc0.left + self.cxEdge - rc0.right = rc0.right - self.cxBorder - rc0.bottom = rc0.bottom - self.cxBorder - rc0.top = rc0.top + self.cxGripper - self.rectBorder = ( - self.rectBorder[0], - self.rectBorder[1], - self.rectBorder[0] + self.cxEdge, - self.rectBorder[3], - ) - else: - self.rectBorder = 0, 0, 0, 0 - - self.SetBarStyle(dwBorderStyle) - return 0 - - def OnNcPaint(self, msg): - self.EraseNonClient() - dc = self.GetWindowDC() - ctl = win32api.GetSysColor(win32con.COLOR_BTNHIGHLIGHT) - cbr = win32api.GetSysColor(win32con.COLOR_BTNSHADOW) - dc.Draw3dRect(self.rectBorder, ctl, cbr) - - self.DrawGripper(dc) - - rect = self.GetClientRect() - self.InvalidateRect(rect, 1) - return 0 - - def OnNcHitTest(self, pt): # A virtual, not a hooked message. - if self.IsFloating(): - return 1 - - ptOrig = pt - rect = self.GetWindowRect() - pt = pt[0] - rect[0], pt[1] - rect[1] - - if PtInRect(self.rectClose, pt): - return win32con.HTSYSMENU - elif PtInRect(self.rectUndock, pt): - return win32con.HTMINBUTTON - elif PtInRect(self.rectGripper, pt): - return win32con.HTCAPTION - elif PtInRect(self.rectBorder, pt): - return win32con.HTSIZE - else: - return self._obj_.OnNcHitTest(ptOrig) - - def StartTracking(self): - self.SetCapture() - - # make sure no updates are pending - self.RedrawWindow(None, None, win32con.RDW_ALLCHILDREN | win32con.RDW_UPDATENOW) - self.dockSite.LockWindowUpdate() - - self.ptOld = CenterPoint(self.rectBorder) - self.bTracking = 1 - - self.rectTracker = self.rectBorder - if not self.IsHorz(): - l, t, r, b = self.rectTracker - b = b - 4 - self.rectTracker = l, t, r, b - - self.OnInvertTracker(self.rectTracker) - - def OnCaptureChanged(self, msg): - hwnd = lparam = msg[3] - if self.bTracking and hwnd != self.GetSafeHwnd(): - self.StopTracking(0) # cancel tracking - return 1 - - def StopTracking(self, bAccept): - self.OnInvertTracker(self.rectTracker) - self.dockSite.UnlockWindowUpdate() - self.bTracking = 0 - self.ReleaseCapture() - if not bAccept: - return - - rcc = self.dockSite.GetWindowRect() - if self.IsHorz(): - newsize = self.sizeHorz[1] - maxsize = newsize + (rcc[3] - rcc[1]) - minsize = self.sizeMin[1] - else: - newsize = self.sizeVert[0] - maxsize = newsize + (rcc[2] - rcc[0]) - minsize = self.sizeMin[0] - - pt = CenterPoint(self.rectTracker) - if self.nDockBarID == afxres.AFX_IDW_DOCKBAR_TOP: - newsize = newsize + (pt[1] - self.ptOld[1]) - elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM: - newsize = newsize + (-pt[1] + self.ptOld[1]) - elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_LEFT: - newsize = newsize + (pt[0] - self.ptOld[0]) - elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_RIGHT: - newsize = newsize + (-pt[0] + self.ptOld[0]) - newsize = max(minsize, min(maxsize, newsize)) - if self.IsHorz(): - self.sizeHorz = self.sizeHorz[0], newsize - else: - self.sizeVert = newsize, self.sizeVert[1] - self.dockSite.RecalcLayout() - return 0 - - def OnInvertTracker(self, rect): - assert rect[2] - rect[0] > 0 and rect[3] - rect[1] > 0, "rect is empty" - assert self.bTracking - rcc = self.GetWindowRect() - rcf = self.dockSite.GetWindowRect() - - rect = OffsetRect(rect, (rcc[0] - rcf[0], rcc[1] - rcf[1])) - rect = DeflateRect(rect, (1, 1)) - - flags = win32con.DCX_WINDOW | win32con.DCX_CACHE | win32con.DCX_LOCKWINDOWUPDATE - dc = self.dockSite.GetDCEx(None, flags) - try: - brush = win32ui.GetHalftoneBrush() - oldBrush = dc.SelectObject(brush) - - dc.PatBlt( - (rect[0], rect[1]), - (rect[2] - rect[0], rect[3] - rect[1]), - win32con.PATINVERT, - ) - dc.SelectObject(oldBrush) - finally: - self.dockSite.ReleaseDC(dc) - - def IsHorz(self): - return ( - self.nDockBarID == afxres.AFX_IDW_DOCKBAR_TOP - or self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM - ) - - def ClientToWnd(self, pt): - x, y = pt - if self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM: - y = y + self.cxEdge - elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_RIGHT: - x = x + self.cxEdge - return x, y - - def DrawGripper(self, dc): - # no gripper if floating - if self._obj_.dwStyle & afxres.CBRS_FLOATING: - return - - # -==HACK==- - # in order to calculate the client area properly after docking, - # the client area must be recalculated twice (I have no idea why) - self.dockSite.RecalcLayout() - # -==END HACK==- - - gripper = self.GetWindowRect() - gripper = self.ScreenToClient(gripper) - gripper = OffsetRect(gripper, (-gripper[0], -gripper[1])) - gl, gt, gr, gb = gripper - - if self._obj_.dwStyle & afxres.CBRS_ORIENT_HORZ: - # gripper at left - self.rectGripper = gl, gt + 40, gl + 20, gb - # draw close box - self.rectClose = gl + 7, gt + 10, gl + 19, gt + 22 - dc.DrawFrameControl( - self.rectClose, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONCLOSE - ) - # draw docking toggle box - self.rectUndock = OffsetRect(self.rectClose, (0, 13)) - dc.DrawFrameControl( - self.rectUndock, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONMAX - ) - - gt = gt + 38 - gb = gb - 10 - gl = gl + 10 - gr = gl + 3 - gripper = gl, gt, gr, gb - dc.Draw3dRect(gripper, clrBtnHilight, clrBtnShadow) - dc.Draw3dRect(OffsetRect(gripper, (4, 0)), clrBtnHilight, clrBtnShadow) - else: - # gripper at top - self.rectGripper = gl, gt, gr - 40, gt + 20 - # draw close box - self.rectClose = gr - 21, gt + 7, gr - 10, gt + 18 - dc.DrawFrameControl( - self.rectClose, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONCLOSE - ) - # draw docking toggle box - self.rectUndock = OffsetRect(self.rectClose, (-13, 0)) - dc.DrawFrameControl( - self.rectUndock, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONMAX - ) - gr = gr - 38 - gl = gl + 10 - gt = gt + 10 - gb = gt + 3 - - gripper = gl, gt, gr, gb - dc.Draw3dRect(gripper, clrBtnHilight, clrBtnShadow) - dc.Draw3dRect(OffsetRect(gripper, (0, 4)), clrBtnHilight, clrBtnShadow) - - def HookMessages(self): - self.HookMessage(self.OnLButtonUp, win32con.WM_LBUTTONUP) - self.HookMessage(self.OnLButtonDown, win32con.WM_LBUTTONDOWN) - self.HookMessage(self.OnLButtonDblClk, win32con.WM_LBUTTONDBLCLK) - self.HookMessage(self.OnNcLButtonDown, win32con.WM_NCLBUTTONDOWN) - self.HookMessage(self.OnNcLButtonDblClk, win32con.WM_NCLBUTTONDBLCLK) - self.HookMessage(self.OnMouseMove, win32con.WM_MOUSEMOVE) - self.HookMessage(self.OnNcPaint, win32con.WM_NCPAINT) - self.HookMessage(self.OnCaptureChanged, win32con.WM_CAPTURECHANGED) - self.HookMessage(self.OnWindowPosChanged, win32con.WM_WINDOWPOSCHANGED) - - -# self.HookMessage(self.OnSize, win32con.WM_SIZE) - - -def EditCreator(parent): - d = win32ui.CreateEdit() - es = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | win32con.WS_BORDER - | win32con.ES_MULTILINE - | win32con.ES_WANTRETURN - ) - d.CreateWindow(es, (0, 0, 150, 150), parent, 1000) - return d - - -def test(): - import pywin.mfc.dialog - - global bar - bar = DockingBar() - creator = EditCreator - bar.CreateWindow(win32ui.GetMainFrame(), creator, "Coolbar Demo", 0xFFFFF) - # win32ui.GetMainFrame().ShowControlBar(bar, 1, 0) - bar.SetBarStyle( - bar.GetBarStyle() - | afxres.CBRS_TOOLTIPS - | afxres.CBRS_FLYBY - | afxres.CBRS_SIZE_DYNAMIC - ) - bar.EnableDocking(afxres.CBRS_ALIGN_ANY) - win32ui.GetMainFrame().DockControlBar(bar, afxres.AFX_IDW_DOCKBAR_BOTTOM) - - -if __name__ == "__main__": - test() diff --git a/lib/pythonwin/pywin/docking/__init__.py b/lib/pythonwin/pywin/docking/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pythonwin/pywin/framework/__init__.py b/lib/pythonwin/pywin/framework/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pythonwin/pywin/framework/app.py b/lib/pythonwin/pywin/framework/app.py deleted file mode 100644 index f994f99f..00000000 --- a/lib/pythonwin/pywin/framework/app.py +++ /dev/null @@ -1,457 +0,0 @@ -# App.py -# Application stuff. -# The application is responsible for managing the main frame window. -# -# We also grab the FileOpen command, to invoke our Python editor -" The PythonWin application code. Manages most aspects of MDI, etc " -import os -import sys -import traceback - -import regutil -import win32api -import win32con -import win32ui -from pywin.mfc import afxres, dialog, window -from pywin.mfc.thread import WinApp - -from . import scriptutils - -## NOTE: App and AppBuild should NOT be used - instead, you should contruct your -## APP class manually whenever you like (just ensure you leave these 2 params None!) -## Whoever wants the generic "Application" should get it via win32iu.GetApp() - -# These are "legacy" -AppBuilder = None -App = None # default - if used, must end up a CApp derived class. - - -# Helpers that should one day be removed! -def AddIdleHandler(handler): - print( - "app.AddIdleHandler is deprecated - please use win32ui.GetApp().AddIdleHandler() instead." - ) - return win32ui.GetApp().AddIdleHandler(handler) - - -def DeleteIdleHandler(handler): - print( - "app.DeleteIdleHandler is deprecated - please use win32ui.GetApp().DeleteIdleHandler() instead." - ) - return win32ui.GetApp().DeleteIdleHandler(handler) - - -# Helper for writing a Window position by name, and later loading it. -def SaveWindowSize(section, rect, state=""): - """Writes a rectangle to an INI file - Args: section = section name in the applications INI file - rect = a rectangle in a (cy, cx, y, x) tuple - (same format as CREATESTRUCT position tuples).""" - left, top, right, bottom = rect - if state: - state = state + " " - win32ui.WriteProfileVal(section, state + "left", left) - win32ui.WriteProfileVal(section, state + "top", top) - win32ui.WriteProfileVal(section, state + "right", right) - win32ui.WriteProfileVal(section, state + "bottom", bottom) - - -def LoadWindowSize(section, state=""): - """Loads a section from an INI file, and returns a rect in a tuple (see SaveWindowSize)""" - if state: - state = state + " " - left = win32ui.GetProfileVal(section, state + "left", 0) - top = win32ui.GetProfileVal(section, state + "top", 0) - right = win32ui.GetProfileVal(section, state + "right", 0) - bottom = win32ui.GetProfileVal(section, state + "bottom", 0) - return (left, top, right, bottom) - - -def RectToCreateStructRect(rect): - return (rect[3] - rect[1], rect[2] - rect[0], rect[1], rect[0]) - - -# Define FrameWindow and Application objects -# -# The Main Frame of the application. -class MainFrame(window.MDIFrameWnd): - sectionPos = "Main Window" - statusBarIndicators = ( - afxres.ID_SEPARATOR, # // status line indicator - afxres.ID_INDICATOR_CAPS, - afxres.ID_INDICATOR_NUM, - afxres.ID_INDICATOR_SCRL, - win32ui.ID_INDICATOR_LINENUM, - win32ui.ID_INDICATOR_COLNUM, - ) - - def OnCreate(self, cs): - self._CreateStatusBar() - return 0 - - def _CreateStatusBar(self): - self.statusBar = win32ui.CreateStatusBar(self) - self.statusBar.SetIndicators(self.statusBarIndicators) - self.HookCommandUpdate(self.OnUpdatePosIndicator, win32ui.ID_INDICATOR_LINENUM) - self.HookCommandUpdate(self.OnUpdatePosIndicator, win32ui.ID_INDICATOR_COLNUM) - - def OnUpdatePosIndicator(self, cmdui): - editControl = scriptutils.GetActiveEditControl() - value = " " * 5 - if editControl is not None: - try: - startChar, endChar = editControl.GetSel() - lineNo = editControl.LineFromChar(startChar) - colNo = endChar - editControl.LineIndex(lineNo) - - if cmdui.m_nID == win32ui.ID_INDICATOR_LINENUM: - value = "%0*d" % (5, lineNo + 1) - else: - value = "%0*d" % (3, colNo + 1) - except win32ui.error: - pass - cmdui.SetText(value) - cmdui.Enable() - - def PreCreateWindow(self, cc): - cc = self._obj_.PreCreateWindow(cc) - pos = LoadWindowSize(self.sectionPos) - self.startRect = pos - if pos[2] - pos[0]: - rect = RectToCreateStructRect(pos) - cc = cc[0], cc[1], cc[2], cc[3], rect, cc[5], cc[6], cc[7], cc[8] - return cc - - def OnDestroy(self, msg): - # use GetWindowPlacement(), as it works even when min'd or max'd - rectNow = self.GetWindowPlacement()[4] - if rectNow != self.startRect: - SaveWindowSize(self.sectionPos, rectNow) - return 0 - - -class CApp(WinApp): - "A class for the application" - - def __init__(self): - self.oldCallbackCaller = None - WinApp.__init__(self, win32ui.GetApp()) - self.idleHandlers = [] - - def InitInstance(self): - "Called to crank up the app" - HookInput() - numMRU = win32ui.GetProfileVal("Settings", "Recent File List Size", 10) - win32ui.LoadStdProfileSettings(numMRU) - # self._obj_.InitMDIInstance() - if win32api.GetVersionEx()[0] < 4: - win32ui.SetDialogBkColor() - win32ui.Enable3dControls() - - # install a "callback caller" - a manager for the callbacks - # self.oldCallbackCaller = win32ui.InstallCallbackCaller(self.CallbackManager) - self.LoadMainFrame() - self.SetApplicationPaths() - - def ExitInstance(self): - "Called as the app dies - too late to prevent it here!" - win32ui.OutputDebug("Application shutdown\n") - # Restore the callback manager, if any. - try: - win32ui.InstallCallbackCaller(self.oldCallbackCaller) - except AttributeError: - pass - if self.oldCallbackCaller: - del self.oldCallbackCaller - self.frame = None # clean Python references to the now destroyed window object. - self.idleHandlers = [] - # Attempt cleanup if not already done! - if self._obj_: - self._obj_.AttachObject(None) - self._obj_ = None - global App - global AppBuilder - App = None - AppBuilder = None - return 0 - - def HaveIdleHandler(self, handler): - return handler in self.idleHandlers - - def AddIdleHandler(self, handler): - self.idleHandlers.append(handler) - - def DeleteIdleHandler(self, handler): - self.idleHandlers.remove(handler) - - def OnIdle(self, count): - try: - ret = 0 - handlers = self.idleHandlers[:] # copy list, as may be modified during loop - for handler in handlers: - try: - thisRet = handler(handler, count) - except: - print("Idle handler %s failed" % (repr(handler))) - traceback.print_exc() - print("Idle handler removed from list") - try: - self.DeleteIdleHandler(handler) - except ValueError: # Item not in list. - pass - thisRet = 0 - ret = ret or thisRet - return ret - except KeyboardInterrupt: - pass - - def CreateMainFrame(self): - return MainFrame() - - def LoadMainFrame(self): - "Create the main applications frame" - self.frame = self.CreateMainFrame() - self.SetMainFrame(self.frame) - self.frame.LoadFrame(win32ui.IDR_MAINFRAME, win32con.WS_OVERLAPPEDWINDOW) - self.frame.DragAcceptFiles() # we can accept these. - self.frame.ShowWindow(win32ui.GetInitialStateRequest()) - self.frame.UpdateWindow() - self.HookCommands() - - def OnHelp(self, id, code): - try: - if id == win32ui.ID_HELP_GUI_REF: - helpFile = regutil.GetRegisteredHelpFile("Pythonwin Reference") - helpCmd = win32con.HELP_CONTENTS - else: - helpFile = regutil.GetRegisteredHelpFile("Main Python Documentation") - helpCmd = win32con.HELP_FINDER - if helpFile is None: - win32ui.MessageBox("The help file is not registered!") - else: - from . import help - - help.OpenHelpFile(helpFile, helpCmd) - except: - t, v, tb = sys.exc_info() - win32ui.MessageBox( - "Internal error in help file processing\r\n%s: %s" % (t, v) - ) - tb = None # Prevent a cycle - - def DoLoadModules(self, modules): - # XXX - this should go, but the debugger uses it :-( - # dont do much checking! - for module in modules: - __import__(module) - - def HookCommands(self): - self.frame.HookMessage(self.OnDropFiles, win32con.WM_DROPFILES) - self.HookCommand(self.HandleOnFileOpen, win32ui.ID_FILE_OPEN) - self.HookCommand(self.HandleOnFileNew, win32ui.ID_FILE_NEW) - self.HookCommand(self.OnFileMRU, win32ui.ID_FILE_MRU_FILE1) - self.HookCommand(self.OnHelpAbout, win32ui.ID_APP_ABOUT) - self.HookCommand(self.OnHelp, win32ui.ID_HELP_PYTHON) - self.HookCommand(self.OnHelp, win32ui.ID_HELP_GUI_REF) - # Hook for the right-click menu. - self.frame.GetWindow(win32con.GW_CHILD).HookMessage( - self.OnRClick, win32con.WM_RBUTTONDOWN - ) - - def SetApplicationPaths(self): - # Load the users/application paths - new_path = [] - apppath = win32ui.GetProfileVal("Python", "Application Path", "").split(";") - for path in apppath: - if len(path) > 0: - new_path.append(win32ui.FullPath(path)) - for extra_num in range(1, 11): - apppath = win32ui.GetProfileVal( - "Python", "Application Path %d" % extra_num, "" - ).split(";") - if len(apppath) == 0: - break - for path in apppath: - if len(path) > 0: - new_path.append(win32ui.FullPath(path)) - sys.path = new_path + sys.path - - def OnRClick(self, params): - "Handle right click message" - # put up the entire FILE menu! - menu = win32ui.LoadMenu(win32ui.IDR_TEXTTYPE).GetSubMenu(0) - menu.TrackPopupMenu(params[5]) # track at mouse position. - return 0 - - def OnDropFiles(self, msg): - "Handle a file being dropped from file manager" - hDropInfo = msg[2] - self.frame.SetActiveWindow() # active us - nFiles = win32api.DragQueryFile(hDropInfo) - try: - for iFile in range(0, nFiles): - fileName = win32api.DragQueryFile(hDropInfo, iFile) - win32ui.GetApp().OpenDocumentFile(fileName) - finally: - win32api.DragFinish(hDropInfo) - - return 0 - - # No longer used by Pythonwin, as the C++ code has this same basic functionality - # but handles errors slightly better. - # It all still works, tho, so if you need similar functionality, you can use it. - # Therefore I havent deleted this code completely! - # def CallbackManager( self, ob, args = () ): - # """Manage win32 callbacks. Trap exceptions, report on them, then return 'All OK' - # to the frame-work. """ - # import traceback - # try: - # ret = apply(ob, args) - # return ret - # except: - # # take copies of the exception values, else other (handled) exceptions may get - # # copied over by the other fns called. - # win32ui.SetStatusText('An exception occured in a windows command handler.') - # t, v, tb = sys.exc_info() - # traceback.print_exception(t, v, tb.tb_next) - # try: - # sys.stdout.flush() - # except (NameError, AttributeError): - # pass - - # Command handlers. - def OnFileMRU(self, id, code): - "Called when a File 1-n message is recieved" - fileName = win32ui.GetRecentFileList()[id - win32ui.ID_FILE_MRU_FILE1] - win32ui.GetApp().OpenDocumentFile(fileName) - - def HandleOnFileOpen(self, id, code): - "Called when FileOpen message is received" - win32ui.GetApp().OnFileOpen() - - def HandleOnFileNew(self, id, code): - "Called when FileNew message is received" - win32ui.GetApp().OnFileNew() - - def OnHelpAbout(self, id, code): - "Called when HelpAbout message is received. Displays the About dialog." - win32ui.InitRichEdit() - dlg = AboutBox() - dlg.DoModal() - - -def _GetRegistryValue(key, val, default=None): - # val is registry value - None for default val. - try: - hkey = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, key) - return win32api.RegQueryValueEx(hkey, val)[0] - except win32api.error: - try: - hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, key) - return win32api.RegQueryValueEx(hkey, val)[0] - except win32api.error: - return default - - -scintilla = "Scintilla is Copyright 1998-2008 Neil Hodgson (http://www.scintilla.org)" -idle = "This program uses IDLE extensions by Guido van Rossum, Tim Peters and others." -contributors = "Thanks to the following people for making significant contributions: Roger Upole, Sidnei da Silva, Sam Rushing, Curt Hagenlocher, Dave Brennan, Roger Burnham, Gordon McMillan, Neil Hodgson, Laramie Leavitt. (let me know if I have forgotten you!)" - - -# The About Box -class AboutBox(dialog.Dialog): - def __init__(self, idd=win32ui.IDD_ABOUTBOX): - dialog.Dialog.__init__(self, idd) - - def OnInitDialog(self): - text = ( - "Pythonwin - Python IDE and GUI Framework for Windows.\n\n%s\n\nPython is %s\n\n%s\n\n%s\n\n%s" - % (win32ui.copyright, sys.copyright, scintilla, idle, contributors) - ) - self.SetDlgItemText(win32ui.IDC_EDIT1, text) - # Get the build number - written by installers. - # For distutils build, read pywin32.version.txt - import sysconfig - - site_packages = sysconfig.get_paths()["platlib"] - try: - build_no = ( - open(os.path.join(site_packages, "pywin32.version.txt")).read().strip() - ) - ver = "pywin32 build %s" % build_no - except EnvironmentError: - ver = None - if ver is None: - # See if we are Part of Active Python - ver = _GetRegistryValue( - "SOFTWARE\\ActiveState\\ActivePython", "CurrentVersion" - ) - if ver is not None: - ver = "ActivePython build %s" % (ver,) - if ver is None: - ver = "" - self.SetDlgItemText(win32ui.IDC_ABOUT_VERSION, ver) - self.HookCommand(self.OnButHomePage, win32ui.IDC_BUTTON1) - - def OnButHomePage(self, id, code): - if code == win32con.BN_CLICKED: - win32api.ShellExecute( - 0, "open", "https://github.com/mhammond/pywin32", None, "", 1 - ) - - -def Win32RawInput(prompt=None): - "Provide raw_input() for gui apps" - # flush stderr/out first. - try: - sys.stdout.flush() - sys.stderr.flush() - except: - pass - if prompt is None: - prompt = "" - ret = dialog.GetSimpleInput(prompt) - if ret == None: - raise KeyboardInterrupt("operation cancelled") - return ret - - -def Win32Input(prompt=None): - "Provide input() for gui apps" - return eval(input(prompt)) - - -def HookInput(): - try: - raw_input - # must be py2x... - sys.modules["__builtin__"].raw_input = Win32RawInput - sys.modules["__builtin__"].input = Win32Input - except NameError: - # must be py3k - import code - - sys.modules["builtins"].input = Win32RawInput - - -def HaveGoodGUI(): - """Returns true if we currently have a good gui available.""" - return "pywin.framework.startup" in sys.modules - - -def CreateDefaultGUI(appClass=None): - """Creates a default GUI environment""" - if appClass is None: - from . import intpyapp # Bring in the default app - could be param'd later. - - appClass = intpyapp.InteractivePythonApp - # Create and init the app. - appClass().InitInstance() - - -def CheckCreateDefaultGUI(): - """Checks and creates if necessary a default GUI environment.""" - rc = HaveGoodGUI() - if not rc: - CreateDefaultGUI() - return rc diff --git a/lib/pythonwin/pywin/framework/bitmap.py b/lib/pythonwin/pywin/framework/bitmap.py deleted file mode 100644 index 1501f72d..00000000 --- a/lib/pythonwin/pywin/framework/bitmap.py +++ /dev/null @@ -1,164 +0,0 @@ -import os - -import win32api -import win32con -import win32ui -from pywin.mfc import docview, window - -from . import app - -bStretch = 1 - - -class BitmapDocument(docview.Document): - "A bitmap document. Holds the bitmap data itself." - - def __init__(self, template): - docview.Document.__init__(self, template) - self.bitmap = None - - def OnNewDocument(self): - # I can not create new bitmaps. - win32ui.MessageBox("Bitmaps can not be created.") - - def OnOpenDocument(self, filename): - self.bitmap = win32ui.CreateBitmap() - # init data members - f = open(filename, "rb") - try: - try: - self.bitmap.LoadBitmapFile(f) - except IOError: - win32ui.MessageBox("Could not load the bitmap from %s" % filename) - return 0 - finally: - f.close() - self.size = self.bitmap.GetSize() - return 1 - - def DeleteContents(self): - self.bitmap = None - - -class BitmapView(docview.ScrollView): - "A view of a bitmap. Obtains data from document." - - def __init__(self, doc): - docview.ScrollView.__init__(self, doc) - self.width = self.height = 0 - # set up message handlers - self.HookMessage(self.OnSize, win32con.WM_SIZE) - - def OnInitialUpdate(self): - doc = self.GetDocument() - if doc.bitmap: - bitmapSize = doc.bitmap.GetSize() - self.SetScrollSizes(win32con.MM_TEXT, bitmapSize) - - def OnSize(self, params): - lParam = params[3] - self.width = win32api.LOWORD(lParam) - self.height = win32api.HIWORD(lParam) - - def OnDraw(self, dc): - # set sizes used for "non stretch" mode. - doc = self.GetDocument() - if doc.bitmap is None: - return - bitmapSize = doc.bitmap.GetSize() - if bStretch: - # stretch BMP. - viewRect = (0, 0, self.width, self.height) - bitmapRect = (0, 0, bitmapSize[0], bitmapSize[1]) - doc.bitmap.Paint(dc, viewRect, bitmapRect) - else: - # non stretch. - doc.bitmap.Paint(dc) - - -class BitmapFrame(window.MDIChildWnd): - def OnCreateClient(self, createparams, context): - borderX = win32api.GetSystemMetrics(win32con.SM_CXFRAME) - borderY = win32api.GetSystemMetrics(win32con.SM_CYFRAME) - titleY = win32api.GetSystemMetrics(win32con.SM_CYCAPTION) # includes border - # try and maintain default window pos, else adjust if cant fit - # get the main client window dimensions. - mdiClient = win32ui.GetMainFrame().GetWindow(win32con.GW_CHILD) - clientWindowRect = mdiClient.ScreenToClient(mdiClient.GetWindowRect()) - clientWindowSize = ( - clientWindowRect[2] - clientWindowRect[0], - clientWindowRect[3] - clientWindowRect[1], - ) - left, top, right, bottom = mdiClient.ScreenToClient(self.GetWindowRect()) - # width, height=context.doc.size[0], context.doc.size[1] - # width = width+borderX*2 - # height= height+titleY+borderY*2-1 - # if (left+width)>clientWindowSize[0]: - # left = clientWindowSize[0] - width - # if left<0: - # left = 0 - # width = clientWindowSize[0] - # if (top+height)>clientWindowSize[1]: - # top = clientWindowSize[1] - height - # if top<0: - # top = 0 - # height = clientWindowSize[1] - # self.frame.MoveWindow((left, top, left+width, top+height),0) - window.MDIChildWnd.OnCreateClient(self, createparams, context) - return 1 - - -class BitmapTemplate(docview.DocTemplate): - def __init__(self): - docview.DocTemplate.__init__( - self, win32ui.IDR_PYTHONTYPE, BitmapDocument, BitmapFrame, BitmapView - ) - - def MatchDocType(self, fileName, fileType): - doc = self.FindOpenDocument(fileName) - if doc: - return doc - ext = os.path.splitext(fileName)[1].lower() - if ext == ".bmp": # removed due to PIL! or ext=='.ppm': - return win32ui.CDocTemplate_Confidence_yesAttemptNative - return win32ui.CDocTemplate_Confidence_maybeAttemptForeign - - -# return win32ui.CDocTemplate_Confidence_noAttempt - -# For debugging purposes, when this module may be reloaded many times. -try: - win32ui.GetApp().RemoveDocTemplate(bitmapTemplate) -except NameError: - pass - -bitmapTemplate = BitmapTemplate() -bitmapTemplate.SetDocStrings( - "\nBitmap\nBitmap\nBitmap (*.bmp)\n.bmp\nPythonBitmapFileType\nPython Bitmap File" -) -win32ui.GetApp().AddDocTemplate(bitmapTemplate) - -# This works, but just didnt make it through the code reorg. -# class PPMBitmap(Bitmap): -# def LoadBitmapFile(self, file ): -# magic=file.readline() -# if magic <> "P6\n": -# raise TypeError, "The file is not a PPM format file" -# rowcollist=string.split(file.readline()) -# cols=string.atoi(rowcollist[0]) -# rows=string.atoi(rowcollist[1]) -# file.readline() # whats this one? -# self.bitmap.LoadPPMFile(file,(cols,rows)) - - -def t(): - bitmapTemplate.OpenDocumentFile("d:\\winnt\\arcade.bmp") - # OpenBMPFile( 'd:\\winnt\\arcade.bmp') - - -def demo(): - import glob - - winDir = win32api.GetWindowsDirectory() - for fileName in glob.glob1(winDir, "*.bmp")[:2]: - bitmapTemplate.OpenDocumentFile(os.path.join(winDir, fileName)) diff --git a/lib/pythonwin/pywin/framework/cmdline.py b/lib/pythonwin/pywin/framework/cmdline.py deleted file mode 100644 index dba31d1a..00000000 --- a/lib/pythonwin/pywin/framework/cmdline.py +++ /dev/null @@ -1,56 +0,0 @@ -# cmdline - command line utilities. -import string -import sys - -import win32ui - - -def ParseArgs(str): - import string - - ret = [] - pos = 0 - length = len(str) - while pos < length: - try: - while str[pos] in string.whitespace: - pos = pos + 1 - except IndexError: - break - if pos >= length: - break - if str[pos] == '"': - pos = pos + 1 - try: - endPos = str.index('"', pos) - 1 - nextPos = endPos + 2 - except ValueError: - endPos = length - nextPos = endPos + 1 - else: - endPos = pos - while endPos < length and not str[endPos] in string.whitespace: - endPos = endPos + 1 - nextPos = endPos + 1 - ret.append(str[pos : endPos + 1].strip()) - pos = nextPos - return ret - - -def FixArgFileName(fileName): - """Convert a filename on the commandline to something useful. - Given an automatic filename on the commandline, turn it a python module name, - with the path added to sys.path.""" - import os - - path, fname = os.path.split(fileName) - if len(path) == 0: - path = os.curdir - path = os.path.abspath(path) - # must check that the command line arg's path is in sys.path - for syspath in sys.path: - if os.path.abspath(syspath) == path: - break - else: - sys.path.append(path) - return os.path.splitext(fname)[0] diff --git a/lib/pythonwin/pywin/framework/dbgcommands.py b/lib/pythonwin/pywin/framework/dbgcommands.py deleted file mode 100644 index e295926c..00000000 --- a/lib/pythonwin/pywin/framework/dbgcommands.py +++ /dev/null @@ -1,189 +0,0 @@ -# Command Handlers for the debugger. - -# Not in the debugger package, as I always want these interfaces to be -# available, even if the debugger has not yet been (or can not be) -# imported -import warnings - -import win32ui -from pywin.scintilla.control import CScintillaEditInterface - -from . import scriptutils - -IdToBarNames = { - win32ui.IDC_DBG_STACK: ("Stack", 0), - win32ui.IDC_DBG_BREAKPOINTS: ("Breakpoints", 0), - win32ui.IDC_DBG_WATCH: ("Watch", 1), -} - - -class DebuggerCommandHandler: - def HookCommands(self): - commands = ( - (self.OnStep, None, win32ui.IDC_DBG_STEP), - (self.OnStepOut, self.OnUpdateOnlyBreak, win32ui.IDC_DBG_STEPOUT), - (self.OnStepOver, None, win32ui.IDC_DBG_STEPOVER), - (self.OnGo, None, win32ui.IDC_DBG_GO), - (self.OnClose, self.OnUpdateClose, win32ui.IDC_DBG_CLOSE), - (self.OnAdd, self.OnUpdateAddBreakpoints, win32ui.IDC_DBG_ADD), - (self.OnClearAll, self.OnUpdateClearAllBreakpoints, win32ui.IDC_DBG_CLEAR), - # (self.OnDebuggerToolbar, self.OnUpdateDebuggerToolbar, win32ui.ID_DEBUGGER_TOOLBAR), - ) - - frame = win32ui.GetMainFrame() - - for methHandler, methUpdate, id in commands: - frame.HookCommand(methHandler, id) - if not methUpdate is None: - frame.HookCommandUpdate(methUpdate, id) - - for id in list(IdToBarNames.keys()): - frame.HookCommand(self.OnDebuggerBar, id) - frame.HookCommandUpdate(self.OnUpdateDebuggerBar, id) - - def OnDebuggerToolbar(self, id, code): - if code == 0: - return not win32ui.GetMainFrame().OnBarCheck(id) - - def OnUpdateDebuggerToolbar(self, cmdui): - win32ui.GetMainFrame().OnUpdateControlBarMenu(cmdui) - cmdui.Enable(1) - - def _GetDebugger(self): - try: - import pywin.debugger - - return pywin.debugger.currentDebugger - except ImportError: - return None - - def _DoOrStart(self, doMethod, startFlag): - d = self._GetDebugger() - if d is not None and d.IsDebugging(): - method = getattr(d, doMethod) - method() - else: - scriptutils.RunScript( - defName=None, defArgs=None, bShowDialog=0, debuggingType=startFlag - ) - - def OnStep(self, msg, code): - self._DoOrStart("do_set_step", scriptutils.RS_DEBUGGER_STEP) - - def OnStepOver(self, msg, code): - self._DoOrStart("do_set_next", scriptutils.RS_DEBUGGER_STEP) - - def OnStepOut(self, msg, code): - d = self._GetDebugger() - if d is not None and d.IsDebugging(): - d.do_set_return() - - def OnGo(self, msg, code): - self._DoOrStart("do_set_continue", scriptutils.RS_DEBUGGER_GO) - - def OnClose(self, msg, code): - d = self._GetDebugger() - if d is not None: - if d.IsDebugging(): - d.set_quit() - else: - d.close() - - def OnUpdateClose(self, cmdui): - d = self._GetDebugger() - if d is not None and d.inited: - cmdui.Enable(1) - else: - cmdui.Enable(0) - - def OnAdd(self, msg, code): - doc, view = scriptutils.GetActiveEditorDocument() - if doc is None: - ## Don't do a messagebox, as this could be triggered from the app's - ## idle loop whenever the debug toolbar is visible, giving a never-ending - ## series of dialogs. This can happen when the OnUpdate handler - ## for the toolbar button IDC_DBG_ADD fails, since MFC falls back to - ## sending a normal command if the UI update command fails. - ## win32ui.MessageBox('There is no active window - no breakpoint can be added') - warnings.warn("There is no active window - no breakpoint can be added") - return None - pathName = doc.GetPathName() - lineNo = view.LineFromChar(view.GetSel()[0]) + 1 - # If I have a debugger, then tell it, otherwise just add a marker - d = self._GetDebugger() - if d is None: - import pywin.framework.editor.color.coloreditor - - doc.MarkerToggle( - lineNo, pywin.framework.editor.color.coloreditor.MARKER_BREAKPOINT - ) - else: - if d.get_break(pathName, lineNo): - win32ui.SetStatusText("Clearing breakpoint", 1) - rc = d.clear_break(pathName, lineNo) - else: - win32ui.SetStatusText("Setting breakpoint", 1) - rc = d.set_break(pathName, lineNo) - if rc: - win32ui.MessageBox(rc) - d.GUIRespondDebuggerData() - - def OnClearAll(self, msg, code): - win32ui.SetStatusText("Clearing all breakpoints") - d = self._GetDebugger() - if d is None: - import pywin.framework.editor - import pywin.framework.editor.color.coloreditor - - for doc in pywin.framework.editor.editorTemplate.GetDocumentList(): - doc.MarkerDeleteAll( - pywin.framework.editor.color.coloreditor.MARKER_BREAKPOINT - ) - else: - d.clear_all_breaks() - d.UpdateAllLineStates() - d.GUIRespondDebuggerData() - - def OnUpdateOnlyBreak(self, cmdui): - d = self._GetDebugger() - ok = d is not None and d.IsBreak() - cmdui.Enable(ok) - - def OnUpdateAddBreakpoints(self, cmdui): - doc, view = scriptutils.GetActiveEditorDocument() - if doc is None or not isinstance(view, CScintillaEditInterface): - enabled = 0 - else: - enabled = 1 - lineNo = view.LineFromChar(view.GetSel()[0]) + 1 - import pywin.framework.editor.color.coloreditor - - cmdui.SetCheck( - doc.MarkerAtLine( - lineNo, pywin.framework.editor.color.coloreditor.MARKER_BREAKPOINT - ) - != 0 - ) - cmdui.Enable(enabled) - - def OnUpdateClearAllBreakpoints(self, cmdui): - d = self._GetDebugger() - cmdui.Enable(d is None or len(d.breaks) != 0) - - def OnUpdateDebuggerBar(self, cmdui): - name, always = IdToBarNames.get(cmdui.m_nID) - enabled = always - d = self._GetDebugger() - if d is not None and d.IsDebugging() and name is not None: - enabled = 1 - bar = d.GetDebuggerBar(name) - cmdui.SetCheck(bar.IsWindowVisible()) - cmdui.Enable(enabled) - - def OnDebuggerBar(self, id, code): - name = IdToBarNames.get(id)[0] - d = self._GetDebugger() - if d is not None and name is not None: - bar = d.GetDebuggerBar(name) - newState = not bar.IsWindowVisible() - win32ui.GetMainFrame().ShowControlBar(bar, newState, 1) diff --git a/lib/pythonwin/pywin/framework/dlgappcore.py b/lib/pythonwin/pywin/framework/dlgappcore.py deleted file mode 100644 index 4b60cc1c..00000000 --- a/lib/pythonwin/pywin/framework/dlgappcore.py +++ /dev/null @@ -1,75 +0,0 @@ -# dlgappcore. -# -# base classes for dialog based apps. - - -import win32api -import win32con -import win32ui -from pywin.mfc import dialog - -from . import app - -error = "Dialog Application Error" - - -class AppDialog(dialog.Dialog): - "The dialog box for the application" - - def __init__(self, id, dll=None): - self.iconId = win32ui.IDR_MAINFRAME - dialog.Dialog.__init__(self, id, dll) - - def OnInitDialog(self): - return dialog.Dialog.OnInitDialog(self) - - # Provide support for a dlg app using an icon - def OnPaint(self): - if not self.IsIconic(): - return self._obj_.OnPaint() - self.DefWindowProc(win32con.WM_ICONERASEBKGND, dc.GetHandleOutput(), 0) - left, top, right, bottom = self.GetClientRect() - left = (right - win32api.GetSystemMetrics(win32con.SM_CXICON)) >> 1 - top = (bottom - win32api.GetSystemMetrics(win32con.SM_CYICON)) >> 1 - hIcon = win32ui.GetApp().LoadIcon(self.iconId) - self.GetDC().DrawIcon((left, top), hIcon) - - # Only needed to provide a minimized icon (and this seems - # less important under win95/NT4 - def OnEraseBkgnd(self, dc): - if self.IsIconic(): - return 1 - else: - return self._obj_.OnEraseBkgnd(dc) - - def OnQueryDragIcon(self): - return win32ui.GetApp().LoadIcon(self.iconId) - - def PreDoModal(self): - pass - - -class DialogApp(app.CApp): - "An application class, for an app with main dialog box" - - def InitInstance(self): - # win32ui.SetProfileFileName('dlgapp.ini') - win32ui.LoadStdProfileSettings() - win32ui.EnableControlContainer() - win32ui.Enable3dControls() - self.dlg = self.frame = self.CreateDialog() - - if self.frame is None: - raise error("No dialog was created by CreateDialog()") - return - - self._obj_.InitDlgInstance(self.dlg) - self.PreDoModal() - self.dlg.PreDoModal() - self.dlg.DoModal() - - def CreateDialog(self): - pass - - def PreDoModal(self): - pass diff --git a/lib/pythonwin/pywin/framework/editor/ModuleBrowser.py b/lib/pythonwin/pywin/framework/editor/ModuleBrowser.py deleted file mode 100644 index 7fd96532..00000000 --- a/lib/pythonwin/pywin/framework/editor/ModuleBrowser.py +++ /dev/null @@ -1,235 +0,0 @@ -# ModuleBrowser.py - A view that provides a module browser for an editor document. -import pyclbr - -import afxres -import commctrl -import pywin.framework.scriptutils -import pywin.mfc.docview -import win32api -import win32con -import win32ui -from pywin.tools import browser, hierlist - - -class HierListCLBRModule(hierlist.HierListItem): - def __init__(self, modName, clbrdata): - self.modName = modName - self.clbrdata = clbrdata - - def GetText(self): - return self.modName - - def GetSubList(self): - ret = [] - for item in self.clbrdata.values(): - if ( - item.__class__ != pyclbr.Class - ): # ie, it is a pyclbr Function instance (only introduced post 1.5.2) - ret.append(HierListCLBRFunction(item)) - else: - ret.append(HierListCLBRClass(item)) - ret.sort() - return ret - - def IsExpandable(self): - return 1 - - -class HierListCLBRItem(hierlist.HierListItem): - def __init__(self, name, file, lineno, suffix=""): - self.name = str(name) - self.file = file - self.lineno = lineno - self.suffix = suffix - - def __lt__(self, other): - return self.name < other.name - - def __eq__(self, other): - return self.name == other.name - - def GetText(self): - return self.name + self.suffix - - def TakeDefaultAction(self): - if self.file: - pywin.framework.scriptutils.JumpToDocument( - self.file, self.lineno, bScrollToTop=1 - ) - else: - win32ui.SetStatusText("Can not locate the source code for this object.") - - def PerformItemSelected(self): - if self.file is None: - msg = "%s - source can not be located." % (self.name,) - else: - msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file) - win32ui.SetStatusText(msg) - - -class HierListCLBRClass(HierListCLBRItem): - def __init__(self, clbrclass, suffix=""): - try: - name = clbrclass.name - file = clbrclass.file - lineno = clbrclass.lineno - self.super = clbrclass.super - self.methods = clbrclass.methods - except AttributeError: - name = clbrclass - file = lineno = None - self.super = [] - self.methods = {} - HierListCLBRItem.__init__(self, name, file, lineno, suffix) - - def GetSubList(self): - r1 = [] - for c in self.super: - r1.append(HierListCLBRClass(c, " (Parent class)")) - r1.sort() - r2 = [] - for meth, lineno in self.methods.items(): - r2.append(HierListCLBRMethod(meth, self.file, lineno)) - r2.sort() - return r1 + r2 - - def IsExpandable(self): - return len(self.methods) + len(self.super) - - def GetBitmapColumn(self): - return 21 - - -class HierListCLBRFunction(HierListCLBRItem): - def __init__(self, clbrfunc, suffix=""): - name = clbrfunc.name - file = clbrfunc.file - lineno = clbrfunc.lineno - HierListCLBRItem.__init__(self, name, file, lineno, suffix) - - def GetBitmapColumn(self): - return 22 - - -class HierListCLBRMethod(HierListCLBRItem): - def GetBitmapColumn(self): - return 22 - - -class HierListCLBRErrorItem(hierlist.HierListItem): - def __init__(self, text): - self.text = text - - def GetText(self): - return self.text - - def GetSubList(self): - return [HierListCLBRErrorItem(self.text)] - - def IsExpandable(self): - return 0 - - -class HierListCLBRErrorRoot(HierListCLBRErrorItem): - def IsExpandable(self): - return 1 - - -class BrowserView(pywin.mfc.docview.TreeView): - def OnInitialUpdate(self): - self.list = None - rc = self._obj_.OnInitialUpdate() - self.HookMessage(self.OnSize, win32con.WM_SIZE) - self.bDirty = 0 - self.destroying = 0 - return rc - - def DestroyBrowser(self): - self.DestroyList() - - def OnActivateView(self, activate, av, dv): - # print "AV", self.bDirty, activate - if activate: - self.CheckRefreshList() - return self._obj_.OnActivateView(activate, av, dv) - - def _MakeRoot(self): - path = self.GetDocument().GetPathName() - if not path: - return HierListCLBRErrorRoot( - "Error: Can not browse a file until it is saved" - ) - else: - mod, path = pywin.framework.scriptutils.GetPackageModuleName(path) - if self.bDirty: - what = "Refreshing" - # Hack for pyclbr being too smart - try: - del pyclbr._modules[mod] - except (KeyError, AttributeError): - pass - else: - what = "Building" - win32ui.SetStatusText("%s class list - please wait..." % (what,), 1) - win32ui.DoWaitCursor(1) - try: - reader = pyclbr.readmodule_ex # new version post 1.5.2 - except AttributeError: - reader = pyclbr.readmodule - try: - data = reader(mod, [path]) - if data: - return HierListCLBRModule(mod, data) - else: - return HierListCLBRErrorRoot("No Python classes in module.") - - finally: - win32ui.DoWaitCursor(0) - win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) - - def DestroyList(self): - self.destroying = 1 - list = getattr( - self, "list", None - ) # If the document was not successfully opened, we may not have a list. - self.list = None - if list is not None: - list.HierTerm() - self.destroying = 0 - - def CheckMadeList(self): - if self.list is not None or self.destroying: - return - self.rootitem = root = self._MakeRoot() - self.list = list = hierlist.HierListWithItems(root, win32ui.IDB_BROWSER_HIER) - list.HierInit(self.GetParentFrame(), self) - list.SetStyle( - commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS - ) - - def CheckRefreshList(self): - if self.bDirty: - if self.list is None: - self.CheckMadeList() - else: - new_root = self._MakeRoot() - if self.rootitem.__class__ == new_root.__class__ == HierListCLBRModule: - self.rootitem.modName = new_root.modName - self.rootitem.clbrdata = new_root.clbrdata - self.list.Refresh() - else: - self.list.AcceptRoot(self._MakeRoot()) - self.bDirty = 0 - - def OnSize(self, params): - lparam = params[3] - w = win32api.LOWORD(lparam) - h = win32api.HIWORD(lparam) - if w != 0: - self.CheckMadeList() - elif w == 0: - self.DestroyList() - return 1 - - def _UpdateUIForState(self): - self.bDirty = 1 diff --git a/lib/pythonwin/pywin/framework/editor/__init__.py b/lib/pythonwin/pywin/framework/editor/__init__.py deleted file mode 100644 index ce13ece6..00000000 --- a/lib/pythonwin/pywin/framework/editor/__init__.py +++ /dev/null @@ -1,106 +0,0 @@ -# __init__ for the Pythonwin editor package. -# -# We used to support optional editors - eg, color or non-color. -# -# This really isnt necessary with Scintilla, and scintilla -# is getting so deeply embedded that it was too much work. - -import sys - -import win32con -import win32ui - -defaultCharacterFormat = (-402653169, 0, 200, 0, 0, 0, 49, "Courier New") - -##def GetDefaultEditorModuleName(): -## import pywin -## # If someone has set pywin.editormodulename, then this is what we use -## try: -## prefModule = pywin.editormodulename -## except AttributeError: -## prefModule = win32ui.GetProfileVal("Editor","Module", "") -## return prefModule -## -##def WriteDefaultEditorModule(module): -## try: -## module = module.__name__ -## except: -## pass -## win32ui.WriteProfileVal("Editor", "Module", module) - - -def LoadDefaultEditor(): - pass - - -## prefModule = GetDefaultEditorModuleName() -## restorePrefModule = None -## mod = None -## if prefModule: -## try: -## mod = __import__(prefModule) -## except 'xx': -## msg = "Importing your preferred editor ('%s') failed.\n\nError %s: %s\n\nAn attempt will be made to load the default editor.\n\nWould you like this editor disabled in the future?" % (prefModule, sys.exc_info()[0], sys.exc_info()[1]) -## rc = win32ui.MessageBox(msg, "Error importing editor", win32con.MB_YESNO) -## if rc == win32con.IDNO: -## restorePrefModule = prefModule -## WriteDefaultEditorModule("") -## del rc -## -## try: -## # Try and load the default one - dont catch errors here. -## if mod is None: -## prefModule = "pywin.framework.editor.color.coloreditor" -## mod = __import__(prefModule) -## -## # Get at the real module. -## mod = sys.modules[prefModule] -## -## # Do a "from mod import *" -## globals().update(mod.__dict__) -## -## finally: -## # Restore the users default editor if it failed and they requested not to disable it. -## if restorePrefModule: -## WriteDefaultEditorModule(restorePrefModule) - - -def GetEditorOption(option, defaultValue, min=None, max=None): - rc = win32ui.GetProfileVal("Editor", option, defaultValue) - if min is not None and rc < min: - rc = defaultValue - if max is not None and rc > max: - rc = defaultValue - return rc - - -def SetEditorOption(option, newValue): - win32ui.WriteProfileVal("Editor", option, newValue) - - -def DeleteEditorOption(option): - try: - win32ui.WriteProfileVal("Editor", option, None) - except win32ui.error: - pass - - -# Load and save font tuples -def GetEditorFontOption(option, default=None): - if default is None: - default = defaultCharacterFormat - fmt = GetEditorOption(option, "") - if fmt == "": - return default - try: - return eval(fmt) - except: - print("WARNING: Invalid font setting in registry - setting ignored") - return default - - -def SetEditorFontOption(option, newValue): - SetEditorOption(option, str(newValue)) - - -from pywin.framework.editor.color.coloreditor import editorTemplate diff --git a/lib/pythonwin/pywin/framework/editor/color/__init__.py b/lib/pythonwin/pywin/framework/editor/color/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pythonwin/pywin/framework/editor/color/coloreditor.py b/lib/pythonwin/pywin/framework/editor/color/coloreditor.py deleted file mode 100644 index 4aacc0ea..00000000 --- a/lib/pythonwin/pywin/framework/editor/color/coloreditor.py +++ /dev/null @@ -1,664 +0,0 @@ -# Color Editor originally by Neil Hodgson, but restructured by mh to integrate -# even tighter into Pythonwin. - -import pywin.scintilla.keycodes -import win32api -import win32con -import win32ui -from pywin.framework.editor import ( - GetEditorFontOption, - GetEditorOption, - SetEditorFontOption, - SetEditorOption, - defaultCharacterFormat, -) -from pywin.scintilla import bindings - -# from pywin.framework.editor import EditorPropertyPage - -MSG_CHECK_EXTERNAL_FILE = ( - win32con.WM_USER + 1999 -) ## WARNING: Duplicated in document.py and editor.py - -# Define a few common markers -MARKER_BOOKMARK = 0 -MARKER_BREAKPOINT = 1 -MARKER_CURRENT = 2 - -import pywin.scintilla.view -from pywin.debugger import dbgcon -from pywin.framework.editor.document import EditorDocumentBase -from pywin.scintilla import scintillacon # For the marker definitions -from pywin.scintilla.document import CScintillaDocument - - -class SyntEditDocument(EditorDocumentBase): - "A SyntEdit document." - - def OnDebuggerStateChange(self, state): - self._ApplyOptionalToViews("OnDebuggerStateChange", state) - - def HookViewNotifications(self, view): - EditorDocumentBase.HookViewNotifications(self, view) - view.SCISetUndoCollection(1) - - def FinalizeViewCreation(self, view): - EditorDocumentBase.FinalizeViewCreation(self, view) - if view == self.GetFirstView(): - self.GetDocTemplate().CheckIDLEMenus(view.idle) - - -SyntEditViewParent = pywin.scintilla.view.CScintillaView - - -class SyntEditView(SyntEditViewParent): - "A view of a SyntEdit. Obtains data from document." - - def __init__(self, doc): - SyntEditViewParent.__init__(self, doc) - self.bCheckingFile = 0 - - def OnInitialUpdate(self): - SyntEditViewParent.OnInitialUpdate(self) - - self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN) - - for id in ( - win32ui.ID_VIEW_FOLD_COLLAPSE, - win32ui.ID_VIEW_FOLD_COLLAPSE_ALL, - win32ui.ID_VIEW_FOLD_EXPAND, - win32ui.ID_VIEW_FOLD_EXPAND_ALL, - ): - self.HookCommand(self.OnCmdViewFold, id) - self.HookCommandUpdate(self.OnUpdateViewFold, id) - self.HookCommand(self.OnCmdViewFoldTopLevel, win32ui.ID_VIEW_FOLD_TOPLEVEL) - - # Define the markers - # self.SCIMarkerDeleteAll() - self.SCIMarkerDefineAll( - MARKER_BOOKMARK, - scintillacon.SC_MARK_ROUNDRECT, - win32api.RGB(0x0, 0x0, 0x0), - win32api.RGB(0, 0xFF, 0xFF), - ) - - self.SCIMarkerDefine(MARKER_CURRENT, scintillacon.SC_MARK_ARROW) - self.SCIMarkerSetBack(MARKER_CURRENT, win32api.RGB(0xFF, 0xFF, 0x00)) - - # Define the folding markers - if 1: # traditional markers - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDEROPEN, - scintillacon.SC_MARK_MINUS, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDER, - scintillacon.SC_MARK_PLUS, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDERSUB, - scintillacon.SC_MARK_EMPTY, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDERTAIL, - scintillacon.SC_MARK_EMPTY, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDEREND, - scintillacon.SC_MARK_EMPTY, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDEROPENMID, - scintillacon.SC_MARK_EMPTY, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDERMIDTAIL, - scintillacon.SC_MARK_EMPTY, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - else: # curved markers - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDEROPEN, - scintillacon.SC_MARK_CIRCLEMINUS, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDER, - scintillacon.SC_MARK_CIRCLEPLUS, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDERSUB, - scintillacon.SC_MARK_VLINE, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDERTAIL, - scintillacon.SC_MARK_LCORNERCURVE, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDEREND, - scintillacon.SC_MARK_CIRCLEPLUSCONNECTED, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDEROPENMID, - scintillacon.SC_MARK_CIRCLEMINUSCONNECTED, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - self.SCIMarkerDefineAll( - scintillacon.SC_MARKNUM_FOLDERMIDTAIL, - scintillacon.SC_MARK_TCORNERCURVE, - win32api.RGB(0xFF, 0xFF, 0xFF), - win32api.RGB(0, 0, 0), - ) - - self.SCIMarkerDefine(MARKER_BREAKPOINT, scintillacon.SC_MARK_CIRCLE) - # Marker background depends on debugger state - self.SCIMarkerSetFore(MARKER_BREAKPOINT, win32api.RGB(0x0, 0, 0)) - # Get the current debugger state. - try: - import pywin.debugger - - if pywin.debugger.currentDebugger is None: - state = dbgcon.DBGSTATE_NOT_DEBUGGING - else: - state = pywin.debugger.currentDebugger.debuggerState - except ImportError: - state = dbgcon.DBGSTATE_NOT_DEBUGGING - self.OnDebuggerStateChange(state) - - def _GetSubConfigNames(self): - return ["editor"] # Allow [Keys:Editor] sections to be specific to us - - def DoConfigChange(self): - SyntEditViewParent.DoConfigChange(self) - tabSize = GetEditorOption("Tab Size", 4, 2) - indentSize = GetEditorOption("Indent Size", 4, 2) - bUseTabs = GetEditorOption("Use Tabs", 0) - bSmartTabs = GetEditorOption("Smart Tabs", 1) - ext = self.idle.IDLEExtension("AutoIndent") # Required extension. - - self.SCISetViewWS(GetEditorOption("View Whitespace", 0)) - self.SCISetViewEOL(GetEditorOption("View EOL", 0)) - self.SCISetIndentationGuides(GetEditorOption("View Indentation Guides", 0)) - - if GetEditorOption("Right Edge Enabled", 0): - mode = scintillacon.EDGE_BACKGROUND - else: - mode = scintillacon.EDGE_NONE - self.SCISetEdgeMode(mode) - self.SCISetEdgeColumn(GetEditorOption("Right Edge Column", 75)) - self.SCISetEdgeColor( - GetEditorOption("Right Edge Color", win32api.RGB(0xEF, 0xEF, 0xEF)) - ) - - width = GetEditorOption("Marker Margin Width", 16) - self.SCISetMarginWidthN(1, width) - width = GetEditorOption("Fold Margin Width", 12) - self.SCISetMarginWidthN(2, width) - width = GetEditorOption("Line Number Margin Width", 0) - self.SCISetMarginWidthN(0, width) - self.bFolding = GetEditorOption("Enable Folding", 1) - fold_flags = 0 - self.SendScintilla( - scintillacon.SCI_SETMODEVENTMASK, scintillacon.SC_MOD_CHANGEFOLD - ) - if self.bFolding: - if GetEditorOption("Fold Lines", 1): - fold_flags = 16 - - self.SCISetProperty("fold", self.bFolding) - self.SCISetFoldFlags(fold_flags) - - tt_color = GetEditorOption("Tab Timmy Color", win32api.RGB(0xFF, 0, 0)) - self.SendScintilla(scintillacon.SCI_INDICSETFORE, 1, tt_color) - - tt_use = GetEditorOption("Use Tab Timmy", 1) - if tt_use: - self.SCISetProperty("tab.timmy.whinge.level", "1") - - # Auto-indent has very complicated behaviour. In a nutshell, the only - # way to get sensible behaviour from it is to ensure tabwidth != indentsize. - # Further, usetabs will only ever go from 1->0, never 0->1. - # This is _not_ the behaviour Pythonwin wants: - # * Tab width is arbitary, so should have no impact on smarts. - # * bUseTabs setting should reflect how new files are created, and - # if Smart Tabs disabled, existing files are edited - # * If "Smart Tabs" is enabled, bUseTabs should have no bearing - # for existing files (unless of course no context can be determined) - # - # So for smart tabs we configure the widget with completely dummy - # values (ensuring tabwidth != indentwidth), ask it to guess, then - # look at the values it has guessed, and re-configure - if bSmartTabs: - ext.config(usetabs=1, tabwidth=5, indentwidth=4) - ext.set_indentation_params(1) - if ext.indentwidth == 5: - # Either 5 literal spaces, or a single tab character. Assume a tab - usetabs = 1 - indentwidth = tabSize - else: - # Either Indented with spaces, and indent size has been guessed or - # an empty file (or no context found - tough!) - if self.GetTextLength() == 0: # emtpy - usetabs = bUseTabs - indentwidth = indentSize - else: # guessed. - indentwidth = ext.indentwidth - usetabs = 0 - # Tab size can never be guessed - set at user preference. - ext.config(usetabs=usetabs, indentwidth=indentwidth, tabwidth=tabSize) - else: - # Dont want smart-tabs - just set the options! - ext.config(usetabs=bUseTabs, tabwidth=tabSize, indentwidth=indentSize) - self.SCISetIndent(indentSize) - self.SCISetTabWidth(tabSize) - - def OnDebuggerStateChange(self, state): - if state == dbgcon.DBGSTATE_NOT_DEBUGGING: - # Indicate breakpoints arent really usable. - # Not quite white - useful when no marker margin, so set as background color. - self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xEF, 0xEF, 0xEF)) - else: - # A light-red, so still readable when no marker margin. - self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xFF, 0x80, 0x80)) - - def HookDocumentHandlers(self): - SyntEditViewParent.HookDocumentHandlers(self) - self.HookMessage(self.OnCheckExternalDocumentUpdated, MSG_CHECK_EXTERNAL_FILE) - - def HookHandlers(self): - SyntEditViewParent.HookHandlers(self) - self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) - - def _PrepareUserStateChange(self): - return self.GetSel(), self.GetFirstVisibleLine() - - def _EndUserStateChange(self, info): - scrollOff = info[1] - self.GetFirstVisibleLine() - if scrollOff: - self.LineScroll(scrollOff) - # Make sure we dont reset the cursor beyond the buffer. - max = self.GetTextLength() - newPos = min(info[0][0], max), min(info[0][1], max) - self.SetSel(newPos) - - ####################################### - # The Windows Message or Notify handlers. - ####################################### - def OnMarginClick(self, std, extra): - notify = self.SCIUnpackNotifyMessage(extra) - if notify.margin == 2: # Our fold margin - line_click = self.LineFromChar(notify.position) - # max_line = self.GetLineCount() - if self.SCIGetFoldLevel(line_click) & scintillacon.SC_FOLDLEVELHEADERFLAG: - # If a fold point. - self.SCIToggleFold(line_click) - return 1 - - def OnSetFocus(self, msg): - # Even though we use file change notifications, we should be very sure about it here. - self.OnCheckExternalDocumentUpdated(msg) - return 1 - - def OnCheckExternalDocumentUpdated(self, msg): - if self.bCheckingFile: - return - self.bCheckingFile = 1 - self.GetDocument().CheckExternalDocumentUpdated() - self.bCheckingFile = 0 - - def OnRClick(self, params): - menu = win32ui.CreatePopupMenu() - self.AppendMenu(menu, "&Locate module", "LocateModule") - self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) - self.AppendMenu(menu, "&Undo", "EditUndo") - self.AppendMenu(menu, "&Redo", "EditRedo") - self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) - self.AppendMenu(menu, "Cu&t", "EditCut") - self.AppendMenu(menu, "&Copy", "EditCopy") - self.AppendMenu(menu, "&Paste", "EditPaste") - self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) - self.AppendMenu(menu, "&Select all", "EditSelectAll") - self.AppendMenu( - menu, "View &Whitespace", "ViewWhitespace", checked=self.SCIGetViewWS() - ) - self.AppendMenu( - menu, "&Fixed Font", "ViewFixedFont", checked=self._GetColorizer().bUseFixed - ) - self.AppendMenu(menu, flags=win32con.MF_SEPARATOR) - self.AppendMenu(menu, "&Goto line...", "GotoLine") - - submenu = win32ui.CreatePopupMenu() - newitems = self.idle.GetMenuItems("edit") - for text, event in newitems: - self.AppendMenu(submenu, text, event) - - flags = win32con.MF_STRING | win32con.MF_ENABLED | win32con.MF_POPUP - menu.AppendMenu(flags, submenu.GetHandle(), "&Source code") - - flags = ( - win32con.TPM_LEFTALIGN | win32con.TPM_LEFTBUTTON | win32con.TPM_RIGHTBUTTON - ) - menu.TrackPopupMenu(params[5], flags, self) - return 0 - - def OnCmdViewFold(self, cid, code): # Handle the menu command - if cid == win32ui.ID_VIEW_FOLD_EXPAND_ALL: - self.FoldExpandAllEvent(None) - elif cid == win32ui.ID_VIEW_FOLD_EXPAND: - self.FoldExpandEvent(None) - elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE_ALL: - self.FoldCollapseAllEvent(None) - elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE: - self.FoldCollapseEvent(None) - else: - print("Unknown collapse/expand ID") - - def OnUpdateViewFold(self, cmdui): # Update the tick on the UI. - if not self.bFolding: - cmdui.Enable(0) - return - id = cmdui.m_nID - if id in (win32ui.ID_VIEW_FOLD_EXPAND_ALL, win32ui.ID_VIEW_FOLD_COLLAPSE_ALL): - cmdui.Enable() - else: - enable = 0 - lineno = self.LineFromChar(self.GetSel()[0]) - foldable = ( - self.SCIGetFoldLevel(lineno) & scintillacon.SC_FOLDLEVELHEADERFLAG - ) - is_expanded = self.SCIGetFoldExpanded(lineno) - if id == win32ui.ID_VIEW_FOLD_EXPAND: - if foldable and not is_expanded: - enable = 1 - elif id == win32ui.ID_VIEW_FOLD_COLLAPSE: - if foldable and is_expanded: - enable = 1 - cmdui.Enable(enable) - - def OnCmdViewFoldTopLevel(self, cid, code): # Handle the menu command - self.FoldTopLevelEvent(None) - - ####################################### - # The Events - ####################################### - def ToggleBookmarkEvent(self, event, pos=-1): - """Toggle a bookmark at the specified or current position""" - if pos == -1: - pos, end = self.GetSel() - startLine = self.LineFromChar(pos) - self.GetDocument().MarkerToggle(startLine + 1, MARKER_BOOKMARK) - return 0 - - def GotoNextBookmarkEvent(self, event, fromPos=-1): - """Move to the next bookmark""" - if fromPos == -1: - fromPos, end = self.GetSel() - startLine = self.LineFromChar(fromPos) + 1 # Zero based line to start - nextLine = self.GetDocument().MarkerGetNext(startLine + 1, MARKER_BOOKMARK) - 1 - if nextLine < 0: - nextLine = self.GetDocument().MarkerGetNext(0, MARKER_BOOKMARK) - 1 - if nextLine < 0 or nextLine == startLine - 1: - win32api.MessageBeep() - else: - self.SCIEnsureVisible(nextLine) - self.SCIGotoLine(nextLine) - return 0 - - def TabKeyEvent(self, event): - """Insert an indent. If no selection, a single indent, otherwise a block indent""" - # Handle auto-complete first. - if self.SCIAutoCActive(): - self.SCIAutoCComplete() - return 0 - # Call the IDLE event. - return self.bindings.fire("<>", event) - - def EnterKeyEvent(self, event): - """Handle the enter key with special handling for auto-complete""" - # Handle auto-complete first. - if self.SCIAutoCActive(): - self.SCIAutoCComplete() - self.SCIAutoCCancel() - # Call the IDLE event. - return self.bindings.fire("<>", event) - - def ShowInteractiveWindowEvent(self, event): - import pywin.framework.interact - - pywin.framework.interact.ShowInteractiveWindow() - - def FoldTopLevelEvent(self, event=None): - if not self.bFolding: - return 1 - - win32ui.DoWaitCursor(1) - try: - self.Colorize() - maxLine = self.GetLineCount() - # Find the first line, and check out its state. - for lineSeek in range(maxLine): - if self.SCIGetFoldLevel(lineSeek) & scintillacon.SC_FOLDLEVELHEADERFLAG: - expanding = not self.SCIGetFoldExpanded(lineSeek) - break - else: - # no folds here! - return - for lineSeek in range(lineSeek, maxLine): - level = self.SCIGetFoldLevel(lineSeek) - level_no = ( - level - & scintillacon.SC_FOLDLEVELNUMBERMASK - - scintillacon.SC_FOLDLEVELBASE - ) - is_header = level & scintillacon.SC_FOLDLEVELHEADERFLAG - # print lineSeek, level_no, is_header - if level_no == 0 and is_header: - if (expanding and not self.SCIGetFoldExpanded(lineSeek)) or ( - not expanding and self.SCIGetFoldExpanded(lineSeek) - ): - self.SCIToggleFold(lineSeek) - finally: - win32ui.DoWaitCursor(-1) - - def FoldExpandSecondLevelEvent(self, event): - if not self.bFolding: - return 1 - win32ui.DoWaitCursor(1) - ## I think this is needed since Scintilla may not have - ## already formatted parts of file outside visible window. - self.Colorize() - levels = [scintillacon.SC_FOLDLEVELBASE] - ## Scintilla's level number is based on amount of whitespace indentation - for lineno in range(self.GetLineCount()): - level = self.SCIGetFoldLevel(lineno) - if not level & scintillacon.SC_FOLDLEVELHEADERFLAG: - continue - curr_level = level & scintillacon.SC_FOLDLEVELNUMBERMASK - if curr_level > levels[-1]: - levels.append(curr_level) - try: - level_ind = levels.index(curr_level) - except ValueError: - ## probably syntax error in source file, bail - break - levels = levels[: level_ind + 1] - if level_ind == 1 and not self.SCIGetFoldExpanded(lineno): - self.SCIToggleFold(lineno) - win32ui.DoWaitCursor(-1) - - def FoldCollapseSecondLevelEvent(self, event): - if not self.bFolding: - return 1 - win32ui.DoWaitCursor(1) - ## I think this is needed since Scintilla may not have - ## already formatted parts of file outside visible window. - self.Colorize() - levels = [scintillacon.SC_FOLDLEVELBASE] - ## Scintilla's level number is based on amount of whitespace indentation - for lineno in range(self.GetLineCount()): - level = self.SCIGetFoldLevel(lineno) - if not level & scintillacon.SC_FOLDLEVELHEADERFLAG: - continue - curr_level = level & scintillacon.SC_FOLDLEVELNUMBERMASK - if curr_level > levels[-1]: - levels.append(curr_level) - try: - level_ind = levels.index(curr_level) - except ValueError: - ## probably syntax error in source file, bail - break - levels = levels[: level_ind + 1] - if level_ind == 1 and self.SCIGetFoldExpanded(lineno): - self.SCIToggleFold(lineno) - win32ui.DoWaitCursor(-1) - - def FoldExpandEvent(self, event): - if not self.bFolding: - return 1 - win32ui.DoWaitCursor(1) - lineno = self.LineFromChar(self.GetSel()[0]) - if self.SCIGetFoldLevel( - lineno - ) & scintillacon.SC_FOLDLEVELHEADERFLAG and not self.SCIGetFoldExpanded(lineno): - self.SCIToggleFold(lineno) - win32ui.DoWaitCursor(-1) - - def FoldExpandAllEvent(self, event): - if not self.bFolding: - return 1 - win32ui.DoWaitCursor(1) - for lineno in range(0, self.GetLineCount()): - if self.SCIGetFoldLevel( - lineno - ) & scintillacon.SC_FOLDLEVELHEADERFLAG and not self.SCIGetFoldExpanded( - lineno - ): - self.SCIToggleFold(lineno) - win32ui.DoWaitCursor(-1) - - def FoldCollapseEvent(self, event): - if not self.bFolding: - return 1 - win32ui.DoWaitCursor(1) - lineno = self.LineFromChar(self.GetSel()[0]) - if self.SCIGetFoldLevel( - lineno - ) & scintillacon.SC_FOLDLEVELHEADERFLAG and self.SCIGetFoldExpanded(lineno): - self.SCIToggleFold(lineno) - win32ui.DoWaitCursor(-1) - - def FoldCollapseAllEvent(self, event): - if not self.bFolding: - return 1 - win32ui.DoWaitCursor(1) - self.Colorize() - for lineno in range(0, self.GetLineCount()): - if self.SCIGetFoldLevel( - lineno - ) & scintillacon.SC_FOLDLEVELHEADERFLAG and self.SCIGetFoldExpanded(lineno): - self.SCIToggleFold(lineno) - win32ui.DoWaitCursor(-1) - - -from pywin.framework.editor.frame import EditorFrame - - -class SplitterFrame(EditorFrame): - def OnCreate(self, cs): - self.HookCommand(self.OnWindowSplit, win32ui.ID_WINDOW_SPLIT) - return 1 - - def OnWindowSplit(self, id, code): - self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST).DoKeyboardSplit() - return 1 - - -from pywin.framework.editor.template import EditorTemplateBase - - -class SyntEditTemplate(EditorTemplateBase): - def __init__( - self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None - ): - if makeDoc is None: - makeDoc = SyntEditDocument - if makeView is None: - makeView = SyntEditView - if makeFrame is None: - makeFrame = SplitterFrame - self.bSetMenus = 0 - EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView) - - def CheckIDLEMenus(self, idle): - if self.bSetMenus: - return - self.bSetMenus = 1 - - submenu = win32ui.CreatePopupMenu() - newitems = idle.GetMenuItems("edit") - flags = win32con.MF_STRING | win32con.MF_ENABLED - for text, event in newitems: - id = bindings.event_to_commands.get(event) - if id is not None: - keyname = pywin.scintilla.view.configManager.get_key_binding( - event, ["editor"] - ) - if keyname is not None: - text = text + "\t" + keyname - submenu.AppendMenu(flags, id, text) - - mainMenu = self.GetSharedMenu() - editMenu = mainMenu.GetSubMenu(1) - editMenu.AppendMenu(win32con.MF_SEPARATOR, 0, "") - editMenu.AppendMenu( - win32con.MF_STRING | win32con.MF_POPUP | win32con.MF_ENABLED, - submenu.GetHandle(), - "&Source Code", - ) - - def _CreateDocTemplate(self, resourceId): - return win32ui.CreateDocTemplate(resourceId) - - def CreateWin32uiDocument(self): - return self.DoCreateDoc() - - def GetPythonPropertyPages(self): - """Returns a list of property pages""" - from pywin.scintilla import configui - - return EditorTemplateBase.GetPythonPropertyPages(self) + [ - configui.ScintillaFormatPropertyPage() - ] - - -# For debugging purposes, when this module may be reloaded many times. -try: - win32ui.GetApp().RemoveDocTemplate(editorTemplate) -except NameError: - pass - -editorTemplate = SyntEditTemplate() -win32ui.GetApp().AddDocTemplate(editorTemplate) diff --git a/lib/pythonwin/pywin/framework/editor/configui.py b/lib/pythonwin/pywin/framework/editor/configui.py deleted file mode 100644 index 903ad66c..00000000 --- a/lib/pythonwin/pywin/framework/editor/configui.py +++ /dev/null @@ -1,308 +0,0 @@ -import pywin.scintilla.config -import win32api -import win32con -import win32ui -from pywin.framework.editor import ( - DeleteEditorOption, - GetEditorFontOption, - GetEditorOption, - SetEditorFontOption, - SetEditorOption, - defaultCharacterFormat, - editorTemplate, -) -from pywin.mfc import dialog - -from . import document - -# The standard 16 color VGA palette should always be possible -paletteVGA = ( - ("Black", 0, 0, 0), - ("Navy", 0, 0, 128), - ("Green", 0, 128, 0), - ("Cyan", 0, 128, 128), - ("Maroon", 128, 0, 0), - ("Purple", 128, 0, 128), - ("Olive", 128, 128, 0), - ("Gray", 128, 128, 128), - ("Silver", 192, 192, 192), - ("Blue", 0, 0, 255), - ("Lime", 0, 255, 0), - ("Aqua", 0, 255, 255), - ("Red", 255, 0, 0), - ("Fuchsia", 255, 0, 255), - ("Yellow", 255, 255, 0), - ("White", 255, 255, 255), -) - - -###################################################### -# -# Property Page for editor options -# -class EditorPropertyPage(dialog.PropertyPage): - def __init__(self): - dialog.PropertyPage.__init__(self, win32ui.IDD_PP_EDITOR) - self.autooptions = [] - self._AddEditorOption(win32ui.IDC_AUTO_RELOAD, "i", "Auto Reload", 1) - self._AddEditorOption( - win32ui.IDC_COMBO1, "i", "Backup Type", document.BAK_DOT_BAK_BAK_DIR - ) - self._AddEditorOption( - win32ui.IDC_AUTOCOMPLETE, "i", "Autocomplete Attributes", 1 - ) - self._AddEditorOption(win32ui.IDC_CALLTIPS, "i", "Show Call Tips", 1) - self._AddEditorOption( - win32ui.IDC_MARGIN_LINENUMBER, "i", "Line Number Margin Width", 0 - ) - self._AddEditorOption(win32ui.IDC_RADIO1, "i", "MarkersInMargin", None) - self._AddEditorOption( - win32ui.IDC_MARGIN_MARKER, "i", "Marker Margin Width", None - ) - self["Marker Margin Width"] = GetEditorOption("Marker Margin Width", 16) - - # Folding - self._AddEditorOption(win32ui.IDC_MARGIN_FOLD, "i", "Fold Margin Width", 12) - self._AddEditorOption(win32ui.IDC_FOLD_ENABLE, "i", "Enable Folding", 1) - self._AddEditorOption(win32ui.IDC_FOLD_ON_OPEN, "i", "Fold On Open", 0) - self._AddEditorOption(win32ui.IDC_FOLD_SHOW_LINES, "i", "Fold Lines", 1) - - # Right edge. - self._AddEditorOption( - win32ui.IDC_RIGHTEDGE_ENABLE, "i", "Right Edge Enabled", 0 - ) - self._AddEditorOption( - win32ui.IDC_RIGHTEDGE_COLUMN, "i", "Right Edge Column", 75 - ) - - # Source control, etc - self.AddDDX(win32ui.IDC_VSS_INTEGRATE, "bVSS") - self.AddDDX(win32ui.IDC_KEYBOARD_CONFIG, "Configs", "l") - self["Configs"] = pywin.scintilla.config.find_config_files() - - def _AddEditorOption(self, idd, typ, optionName, defaultVal): - self.AddDDX(idd, optionName, typ) - # some options are "derived" - ie, can be implied from others - # (eg, "view markers in background" is implied from "markerMarginWidth==0" - # So we don't actually store these values, but they do still get DDX support. - if defaultVal is not None: - self[optionName] = GetEditorOption(optionName, defaultVal) - self.autooptions.append((optionName, defaultVal)) - - def OnInitDialog(self): - for name, val in self.autooptions: - self[name] = GetEditorOption(name, val) - - # Note that these MUST be in the same order as the BAK constants. - cbo = self.GetDlgItem(win32ui.IDC_COMBO1) - cbo.AddString("None") - cbo.AddString(".BAK File") - cbo.AddString("TEMP dir") - cbo.AddString("Own dir") - - # Source Safe - bVSS = ( - GetEditorOption("Source Control Module", "") == "pywin.framework.editor.vss" - ) - self["bVSS"] = bVSS - - edit = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE) - edit.SetWindowText("Sample Color") - - rc = dialog.PropertyPage.OnInitDialog(self) - - try: - self.GetDlgItem(win32ui.IDC_KEYBOARD_CONFIG).SelectString( - -1, GetEditorOption("Keyboard Config", "default") - ) - except win32ui.error: - import traceback - - traceback.print_exc() - pass - - self.HookCommand(self.OnButSimple, win32ui.IDC_FOLD_ENABLE) - self.HookCommand(self.OnButSimple, win32ui.IDC_RADIO1) - self.HookCommand(self.OnButSimple, win32ui.IDC_RADIO2) - self.HookCommand(self.OnButSimple, win32ui.IDC_RIGHTEDGE_ENABLE) - self.HookCommand(self.OnButEdgeColor, win32ui.IDC_RIGHTEDGE_DEFINE) - - butMarginEnabled = self["Marker Margin Width"] > 0 - self.GetDlgItem(win32ui.IDC_RADIO1).SetCheck(butMarginEnabled) - self.GetDlgItem(win32ui.IDC_RADIO2).SetCheck(not butMarginEnabled) - - self.edgeColor = self.initialEdgeColor = GetEditorOption( - "Right Edge Color", win32api.RGB(0xEF, 0xEF, 0xEF) - ) - for spinner_id in (win32ui.IDC_SPIN1, win32ui.IDC_SPIN2, win32ui.IDC_SPIN3): - spinner = self.GetDlgItem(spinner_id) - spinner.SetRange(0, 100) - self.UpdateUIForState() - - return rc - - def OnButSimple(self, id, code): - if code == win32con.BN_CLICKED: - self.UpdateUIForState() - - def OnButEdgeColor(self, id, code): - if code == win32con.BN_CLICKED: - d = win32ui.CreateColorDialog(self.edgeColor, 0, self) - # Ensure the current color is a custom color (as it may not be in the swatch) - # plus some other nice gray scales. - ccs = [self.edgeColor] - for c in range(0xEF, 0x4F, -0x10): - ccs.append(win32api.RGB(c, c, c)) - d.SetCustomColors(ccs) - if d.DoModal() == win32con.IDOK: - self.edgeColor = d.GetColor() - self.UpdateUIForState() - - def UpdateUIForState(self): - folding = self.GetDlgItem(win32ui.IDC_FOLD_ENABLE).GetCheck() - self.GetDlgItem(win32ui.IDC_FOLD_ON_OPEN).EnableWindow(folding) - self.GetDlgItem(win32ui.IDC_FOLD_SHOW_LINES).EnableWindow(folding) - - widthEnabled = self.GetDlgItem(win32ui.IDC_RADIO1).GetCheck() - self.GetDlgItem(win32ui.IDC_MARGIN_MARKER).EnableWindow(widthEnabled) - self.UpdateData() # Ensure self[] is up to date with the control data. - if widthEnabled and self["Marker Margin Width"] == 0: - self["Marker Margin Width"] = 16 - self.UpdateData(0) # Ensure control up to date with self[] - - # Right edge - edgeEnabled = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_ENABLE).GetCheck() - self.GetDlgItem(win32ui.IDC_RIGHTEDGE_COLUMN).EnableWindow(edgeEnabled) - self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE).EnableWindow(edgeEnabled) - self.GetDlgItem(win32ui.IDC_RIGHTEDGE_DEFINE).EnableWindow(edgeEnabled) - - edit = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE) - edit.SetBackgroundColor(0, self.edgeColor) - - def OnOK(self): - for name, defVal in self.autooptions: - SetEditorOption(name, self[name]) - # Margin width gets handled differently. - if self["MarkersInMargin"] == 0: - SetEditorOption("Marker Margin Width", self["Marker Margin Width"]) - else: - SetEditorOption("Marker Margin Width", 0) - if self.edgeColor != self.initialEdgeColor: - SetEditorOption("Right Edge Color", self.edgeColor) - if self["bVSS"]: - SetEditorOption("Source Control Module", "pywin.framework.editor.vss") - else: - if ( - GetEditorOption("Source Control Module", "") - == "pywin.framework.editor.vss" - ): - SetEditorOption("Source Control Module", "") - # Keyboard config - configname = self.GetDlgItem(win32ui.IDC_KEYBOARD_CONFIG).GetWindowText() - if configname: - if configname == "default": - DeleteEditorOption("Keyboard Config") - else: - SetEditorOption("Keyboard Config", configname) - - import pywin.scintilla.view - - pywin.scintilla.view.LoadConfiguration() - - # Now tell all views we have changed. - ## for doc in editorTemplate.GetDocumentList(): - ## for view in doc.GetAllViews(): - ## try: - ## fn = view.OnConfigChange - ## except AttributeError: - ## continue - ## fn() - return 1 - - -class EditorWhitespacePropertyPage(dialog.PropertyPage): - def __init__(self): - dialog.PropertyPage.__init__(self, win32ui.IDD_PP_TABS) - self.autooptions = [] - self._AddEditorOption(win32ui.IDC_TAB_SIZE, "i", "Tab Size", 4) - self._AddEditorOption(win32ui.IDC_INDENT_SIZE, "i", "Indent Size", 4) - self._AddEditorOption(win32ui.IDC_USE_SMART_TABS, "i", "Smart Tabs", 1) - self._AddEditorOption(win32ui.IDC_VIEW_WHITESPACE, "i", "View Whitespace", 0) - self._AddEditorOption(win32ui.IDC_VIEW_EOL, "i", "View EOL", 0) - self._AddEditorOption( - win32ui.IDC_VIEW_INDENTATIONGUIDES, "i", "View Indentation Guides", 0 - ) - - def _AddEditorOption(self, idd, typ, optionName, defaultVal): - self.AddDDX(idd, optionName, typ) - self[optionName] = GetEditorOption(optionName, defaultVal) - self.autooptions.append((optionName, defaultVal)) - - def OnInitDialog(self): - for name, val in self.autooptions: - self[name] = GetEditorOption(name, val) - - rc = dialog.PropertyPage.OnInitDialog(self) - - idc = win32ui.IDC_TABTIMMY_NONE - if GetEditorOption("Use Tab Timmy", 1): - idc = win32ui.IDC_TABTIMMY_IND - self.GetDlgItem(idc).SetCheck(1) - - idc = win32ui.IDC_RADIO1 - if GetEditorOption("Use Tabs", 0): - idc = win32ui.IDC_USE_TABS - self.GetDlgItem(idc).SetCheck(1) - - tt_color = GetEditorOption("Tab Timmy Color", win32api.RGB(0xFF, 0, 0)) - self.cbo = self.GetDlgItem(win32ui.IDC_COMBO1) - for c in paletteVGA: - self.cbo.AddString(c[0]) - sel = 0 - for c in paletteVGA: - if tt_color == win32api.RGB(c[1], c[2], c[3]): - break - sel = sel + 1 - else: - sel = -1 - self.cbo.SetCurSel(sel) - self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_NONE) - self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_IND) - self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_BG) - # Set ranges for the spinners. - for spinner_id in [win32ui.IDC_SPIN1, win32ui.IDC_SPIN2]: - spinner = self.GetDlgItem(spinner_id) - spinner.SetRange(1, 16) - return rc - - def OnButSimple(self, id, code): - if code == win32con.BN_CLICKED: - self.UpdateUIForState() - - def UpdateUIForState(self): - timmy = self.GetDlgItem(win32ui.IDC_TABTIMMY_NONE).GetCheck() - self.GetDlgItem(win32ui.IDC_COMBO1).EnableWindow(not timmy) - - def OnOK(self): - for name, defVal in self.autooptions: - SetEditorOption(name, self[name]) - - SetEditorOption("Use Tabs", self.GetDlgItem(win32ui.IDC_USE_TABS).GetCheck()) - - SetEditorOption( - "Use Tab Timmy", self.GetDlgItem(win32ui.IDC_TABTIMMY_IND).GetCheck() - ) - c = paletteVGA[self.cbo.GetCurSel()] - SetEditorOption("Tab Timmy Color", win32api.RGB(c[1], c[2], c[3])) - - return 1 - - -def testpp(): - ps = dialog.PropertySheet("Editor Options") - ps.AddPage(EditorWhitespacePropertyPage()) - ps.DoModal() - - -if __name__ == "__main__": - testpp() diff --git a/lib/pythonwin/pywin/framework/editor/document.py b/lib/pythonwin/pywin/framework/editor/document.py deleted file mode 100644 index e66947b8..00000000 --- a/lib/pythonwin/pywin/framework/editor/document.py +++ /dev/null @@ -1,378 +0,0 @@ -# We no longer support the old, non-colour editor! - -import os -import shutil -import traceback - -import win32api -import win32con -import win32ui -from pywin.framework.editor import GetEditorOption -from pywin.mfc import docview, object - -BAK_NONE = 0 -BAK_DOT_BAK = 1 -BAK_DOT_BAK_TEMP_DIR = 2 -BAK_DOT_BAK_BAK_DIR = 3 - -MSG_CHECK_EXTERNAL_FILE = ( - win32con.WM_USER + 1999 -) ## WARNING: Duplicated in editor.py and coloreditor.py - -import pywin.scintilla.document - -ParentEditorDocument = pywin.scintilla.document.CScintillaDocument - - -class EditorDocumentBase(ParentEditorDocument): - def __init__(self, template): - self.bAutoReload = GetEditorOption("Auto Reload", 1) - self.bDeclinedReload = 0 # Has the user declined to reload. - self.fileStat = None - self.bReportedFileNotFound = 0 - - # what sort of bak file should I create. - # default to write to %temp%/bak/filename.ext - self.bakFileType = GetEditorOption("Backup Type", BAK_DOT_BAK_BAK_DIR) - - self.watcherThread = FileWatchingThread(self) - self.watcherThread.CreateThread() - # Should I try and use VSS integration? - self.scModuleName = GetEditorOption("Source Control Module", "") - self.scModule = None # Loaded when first used. - ParentEditorDocument.__init__(self, template, template.CreateWin32uiDocument()) - - def OnCloseDocument(self): - self.watcherThread.SignalStop() - return self._obj_.OnCloseDocument() - - # def OnOpenDocument(self, name): - # rc = ParentEditorDocument.OnOpenDocument(self, name) - # self.GetFirstView()._SetLoadedText(self.text) - # self._DocumentStateChanged() - # return rc - - def OnSaveDocument(self, fileName): - win32ui.SetStatusText("Saving file...", 1) - # rename to bak if required. - dir, basename = os.path.split(fileName) - if self.bakFileType == BAK_DOT_BAK: - bakFileName = dir + "\\" + os.path.splitext(basename)[0] + ".bak" - elif self.bakFileType == BAK_DOT_BAK_TEMP_DIR: - bakFileName = ( - win32api.GetTempPath() + "\\" + os.path.splitext(basename)[0] + ".bak" - ) - elif self.bakFileType == BAK_DOT_BAK_BAK_DIR: - tempPath = os.path.join(win32api.GetTempPath(), "bak") - try: - os.mkdir(tempPath, 0) - except os.error: - pass - bakFileName = os.path.join(tempPath, basename) - try: - os.unlink(bakFileName) # raise NameError if no bakups wanted. - except (os.error, NameError): - pass - try: - # Do a copy as it might be on different volumes, - # and the file may be a hard-link, causing the link - # to follow the backup. - shutil.copy2(fileName, bakFileName) - except (os.error, NameError, IOError): - pass - try: - self.SaveFile(fileName) - except IOError as details: - win32ui.MessageBox("Error - could not save file\r\n\r\n%s" % details) - return 0 - except (UnicodeEncodeError, LookupError) as details: - rc = win32ui.MessageBox( - "Encoding failed: \r\n%s" % details - + "\r\nPlease add desired source encoding as first line of file, eg \r\n" - + "# -*- coding: mbcs -*-\r\n\r\n" - + "If you continue, the file will be saved as binary and will\r\n" - + "not be valid in the declared encoding.\r\n\r\n" - + "Save the file as binary with an invalid encoding?", - "File save failed", - win32con.MB_YESNO | win32con.MB_DEFBUTTON2, - ) - if rc == win32con.IDYES: - try: - self.SaveFile(fileName, encoding="latin-1") - except IOError as details: - win32ui.MessageBox( - "Error - could not save file\r\n\r\n%s" % details - ) - return 0 - else: - return 0 - self.SetModifiedFlag(0) # No longer dirty - self.bDeclinedReload = 0 # They probably want to know if it changes again! - win32ui.AddToRecentFileList(fileName) - self.SetPathName(fileName) - win32ui.SetStatusText("Ready") - self._DocumentStateChanged() - return 1 - - def FinalizeViewCreation(self, view): - ParentEditorDocument.FinalizeViewCreation(self, view) - if view == self.GetFirstView(): - self._DocumentStateChanged() - if view.bFolding and GetEditorOption("Fold On Open", 0): - view.FoldTopLevelEvent() - - def HookViewNotifications(self, view): - ParentEditorDocument.HookViewNotifications(self, view) - - # Support for reloading the document from disk - presumably after some - # external application has modified it (or possibly source control has - # checked it out. - def ReloadDocument(self): - """Reloads the document from disk. Assumes the file has - been saved and user has been asked if necessary - it just does it! - """ - win32ui.SetStatusText("Reloading document. Please wait...", 1) - self.SetModifiedFlag(0) - # Loop over all views, saving their state, then reload the document - views = self.GetAllViews() - states = [] - for view in views: - try: - info = view._PrepareUserStateChange() - except AttributeError: # Not our editor view? - info = None - states.append(info) - self.OnOpenDocument(self.GetPathName()) - for view, info in zip(views, states): - if info is not None: - view._EndUserStateChange(info) - self._DocumentStateChanged() - win32ui.SetStatusText("Document reloaded.") - - # Reloading the file - def CheckExternalDocumentUpdated(self): - if self.bDeclinedReload or not self.GetPathName(): - return - try: - newstat = os.stat(self.GetPathName()) - except os.error as exc: - if not self.bReportedFileNotFound: - print( - "The file '%s' is open for editing, but\nchecking it for changes caused the error: %s" - % (self.GetPathName(), exc.strerror) - ) - self.bReportedFileNotFound = 1 - return - if self.bReportedFileNotFound: - print( - "The file '%s' has re-appeared - continuing to watch for changes..." - % (self.GetPathName(),) - ) - self.bReportedFileNotFound = ( - 0 # Once found again we want to start complaining. - ) - changed = ( - (self.fileStat is None) - or self.fileStat[0] != newstat[0] - or self.fileStat[6] != newstat[6] - or self.fileStat[8] != newstat[8] - or self.fileStat[9] != newstat[9] - ) - if changed: - question = None - if self.IsModified(): - question = ( - "%s\r\n\r\nThis file has been modified outside of the source editor.\r\nDo you want to reload it and LOSE THE CHANGES in the source editor?" - % self.GetPathName() - ) - mbStyle = win32con.MB_YESNO | win32con.MB_DEFBUTTON2 # Default to "No" - else: - if not self.bAutoReload: - question = ( - "%s\r\n\r\nThis file has been modified outside of the source editor.\r\nDo you want to reload it?" - % self.GetPathName() - ) - mbStyle = win32con.MB_YESNO # Default to "Yes" - if question: - rc = win32ui.MessageBox(question, None, mbStyle) - if rc != win32con.IDYES: - self.bDeclinedReload = 1 - return - self.ReloadDocument() - - def _DocumentStateChanged(self): - """Called whenever the documents state (on disk etc) has been changed - by the editor (eg, as the result of a save operation) - """ - if self.GetPathName(): - try: - self.fileStat = os.stat(self.GetPathName()) - except os.error: - self.fileStat = None - else: - self.fileStat = None - self.watcherThread._DocumentStateChanged() - self._UpdateUIForState() - self._ApplyOptionalToViews("_UpdateUIForState") - self._ApplyOptionalToViews("SetReadOnly", self._IsReadOnly()) - self._ApplyOptionalToViews("SCISetSavePoint") - # Allow the debugger to reset us too. - import pywin.debugger - - if pywin.debugger.currentDebugger is not None: - pywin.debugger.currentDebugger.UpdateDocumentLineStates(self) - - # Read-only document support - make it obvious to the user - # that the file is read-only. - def _IsReadOnly(self): - return self.fileStat is not None and (self.fileStat[0] & 128) == 0 - - def _UpdateUIForState(self): - """Change the title to reflect the state of the document - - eg ReadOnly, Dirty, etc - """ - filename = self.GetPathName() - if not filename: - return # New file - nothing to do - try: - # This seems necessary so the internal state of the window becomes - # "visible". without it, it is still shown, but certain functions - # (such as updating the title) dont immediately work? - self.GetFirstView().ShowWindow(win32con.SW_SHOW) - title = win32ui.GetFileTitle(filename) - except win32ui.error: - title = filename - if self._IsReadOnly(): - title = title + " (read-only)" - self.SetTitle(title) - - def MakeDocumentWritable(self): - pretend_ss = 0 # Set to 1 to test this without source safe :-) - if not self.scModuleName and not pretend_ss: # No Source Control support. - win32ui.SetStatusText( - "Document is read-only, and no source-control system is configured" - ) - win32api.MessageBeep() - return 0 - - # We have source control support - check if the user wants to use it. - msg = "Would you like to check this file out?" - defButton = win32con.MB_YESNO - if self.IsModified(): - msg = msg + "\r\n\r\nALL CHANGES IN THE EDITOR WILL BE LOST" - defButton = win32con.MB_YESNO - if win32ui.MessageBox(msg, None, defButton) != win32con.IDYES: - return 0 - - if pretend_ss: - print("We are only pretending to check it out!") - win32api.SetFileAttributes( - self.GetPathName(), win32con.FILE_ATTRIBUTE_NORMAL - ) - self.ReloadDocument() - return 1 - - # Now call on the module to do it. - if self.scModule is None: - try: - self.scModule = __import__(self.scModuleName) - for part in self.scModuleName.split(".")[1:]: - self.scModule = getattr(self.scModule, part) - except: - traceback.print_exc() - print("Error loading source control module.") - return 0 - - if self.scModule.CheckoutFile(self.GetPathName()): - self.ReloadDocument() - return 1 - return 0 - - def CheckMakeDocumentWritable(self): - if self._IsReadOnly(): - return self.MakeDocumentWritable() - return 1 - - def SaveModified(self): - # Called as the document is closed. If we are about - # to prompt for a save, bring the document to the foreground. - if self.IsModified(): - frame = self.GetFirstView().GetParentFrame() - try: - frame.MDIActivate() - frame.AutoRestore() - except: - print("Could not bring document to foreground") - return self._obj_.SaveModified() - - -# NOTE - I DONT use the standard threading module, -# as this waits for all threads to terminate at shutdown. -# When using the debugger, it is possible shutdown will -# occur without Pythonwin getting a complete shutdown, -# so we deadlock at the end - threading is waiting for -import pywin.mfc.thread -import win32event - - -class FileWatchingThread(pywin.mfc.thread.WinThread): - def __init__(self, doc): - self.doc = doc - self.adminEvent = win32event.CreateEvent(None, 0, 0, None) - self.stopEvent = win32event.CreateEvent(None, 0, 0, None) - self.watchEvent = None - pywin.mfc.thread.WinThread.__init__(self) - - def _DocumentStateChanged(self): - win32event.SetEvent(self.adminEvent) - - def RefreshEvent(self): - self.hwnd = self.doc.GetFirstView().GetSafeHwnd() - if self.watchEvent is not None: - win32api.FindCloseChangeNotification(self.watchEvent) - self.watchEvent = None - path = self.doc.GetPathName() - if path: - path = os.path.dirname(path) - if path: - filter = ( - win32con.FILE_NOTIFY_CHANGE_FILE_NAME - | win32con.FILE_NOTIFY_CHANGE_ATTRIBUTES - | win32con.FILE_NOTIFY_CHANGE_LAST_WRITE - ) - try: - self.watchEvent = win32api.FindFirstChangeNotification(path, 0, filter) - except win32api.error as exc: - print("Can not watch file", path, "for changes -", exc.strerror) - - def SignalStop(self): - win32event.SetEvent(self.stopEvent) - - def Run(self): - while 1: - handles = [self.stopEvent, self.adminEvent] - if self.watchEvent is not None: - handles.append(self.watchEvent) - rc = win32event.WaitForMultipleObjects(handles, 0, win32event.INFINITE) - if rc == win32event.WAIT_OBJECT_0: - break - elif rc == win32event.WAIT_OBJECT_0 + 1: - self.RefreshEvent() - else: - win32api.PostMessage(self.hwnd, MSG_CHECK_EXTERNAL_FILE, 0, 0) - try: - # If the directory has been removed underneath us, we get this error. - win32api.FindNextChangeNotification(self.watchEvent) - except win32api.error as exc: - print( - "Can not watch file", - self.doc.GetPathName(), - "for changes -", - exc.strerror, - ) - break - - # close a circular reference - self.doc = None - if self.watchEvent: - win32api.FindCloseChangeNotification(self.watchEvent) diff --git a/lib/pythonwin/pywin/framework/editor/editor.py b/lib/pythonwin/pywin/framework/editor/editor.py deleted file mode 100644 index aa1f5385..00000000 --- a/lib/pythonwin/pywin/framework/editor/editor.py +++ /dev/null @@ -1,516 +0,0 @@ -##################################################################### -# -# editor.py -# -# A general purpose text editor, built on top of the win32ui edit -# type, which is built on an MFC CEditView -# -# -# We now support reloading of externally modified documented -# (eg, presumably by some other process, such as source control or -# another editor. -# We also suport auto-loading of externally modified files. -# - if the current document has not been modified in this -# editor, but has been modified on disk, then the file -# can be automatically reloaded. -# -# Note that it will _always_ prompt you if the file in the editor has been modified. - - -import re - -import regex -import win32api -import win32con -import win32ui -from pywin.framework.editor import ( - GetEditorFontOption, - GetEditorOption, - SetEditorFontOption, - SetEditorOption, - defaultCharacterFormat, -) -from pywin.mfc import afxres, dialog, docview - -patImport = regex.symcomp("import \(.*\)") -patIndent = regex.compile("^\\([ \t]*[~ \t]\\)") - -ID_LOCATE_FILE = 0xE200 -ID_GOTO_LINE = 0xE2001 -MSG_CHECK_EXTERNAL_FILE = ( - win32con.WM_USER + 1999 -) ## WARNING: Duplicated in document.py and coloreditor.py - -# Key Codes that modify the bufffer when Ctrl or Alt are NOT pressed. -MODIFYING_VK_KEYS = [ - win32con.VK_BACK, - win32con.VK_TAB, - win32con.VK_RETURN, - win32con.VK_SPACE, - win32con.VK_DELETE, -] -for k in range(48, 91): - MODIFYING_VK_KEYS.append(k) - -# Key Codes that modify the bufffer when Ctrl is pressed. -MODIFYING_VK_KEYS_CTRL = [ - win32con.VK_BACK, - win32con.VK_RETURN, - win32con.VK_SPACE, - win32con.VK_DELETE, -] - -# Key Codes that modify the bufffer when Alt is pressed. -MODIFYING_VK_KEYS_ALT = [ - win32con.VK_BACK, - win32con.VK_RETURN, - win32con.VK_SPACE, - win32con.VK_DELETE, -] - - -# The editor itself starts here. -# Using the MFC Document/View model, we have an EditorDocument, which is responsible for -# managing the contents of the file, and a view which is responsible for rendering it. -# -# Due to a limitation in the Windows edit controls, we are limited to one view -# per document, although nothing in this code assumes this (I hope!) - -isRichText = 1 # We are using the Rich Text control. This has not been tested with value "0" for quite some time! - -# ParentEditorDocument=docview.Document -from .document import EditorDocumentBase - -ParentEditorDocument = EditorDocumentBase - - -class EditorDocument(ParentEditorDocument): - # - # File loading and saving operations - # - def OnOpenDocument(self, filename): - # - # handle Unix and PC text file format. - # - - # Get the "long name" of the file name, as it may have been translated - # to short names by the shell. - self.SetPathName(filename) # Must set this early! - # Now do the work! - self.BeginWaitCursor() - win32ui.SetStatusText("Loading file...", 1) - try: - f = open(filename, "rb") - except IOError: - win32ui.MessageBox( - filename - + "\nCan not find this file\nPlease verify that the correct path and file name are given" - ) - self.EndWaitCursor() - return 0 - raw = f.read() - f.close() - contents = self.TranslateLoadedData(raw) - rc = 0 - try: - self.GetFirstView().SetWindowText(contents) - rc = 1 - except TypeError: # Null byte in file. - win32ui.MessageBox("This file contains NULL bytes, and can not be edited") - rc = 0 - - self.EndWaitCursor() - self.SetModifiedFlag(0) # No longer dirty - self._DocumentStateChanged() - return rc - - def TranslateLoadedData(self, data): - """Given raw data read from a file, massage it suitable for the edit window""" - # if a CR in the first 250 chars, then perform the expensive translate - if data[:250].find("\r") == -1: - win32ui.SetStatusText( - "Translating from Unix file format - please wait...", 1 - ) - return re.sub("\r*\n", "\r\n", data) - else: - return data - - def SaveFile(self, fileName, encoding=None): - if isRichText: - view = self.GetFirstView() - view.SaveTextFile(fileName, encoding=encoding) - else: # Old style edit view window. - self.GetFirstView().SaveFile(fileName) - try: - # Make sure line cache has updated info about me! - import linecache - - linecache.checkcache() - except: - pass - - # - # Color state stuff - # - def SetAllLineColors(self, color=None): - for view in self.GetAllViews(): - view.SetAllLineColors(color) - - def SetLineColor(self, lineNo, color): - "Color a line of all views" - for view in self.GetAllViews(): - view.SetLineColor(lineNo, color) - - -# def StreamTextOut(self, data): ### This seems unreliable??? -# self.saveFileHandle.write(data) -# return 1 # keep em coming! - -# ParentEditorView=docview.EditView -ParentEditorView = docview.RichEditView - - -class EditorView(ParentEditorView): - def __init__(self, doc): - ParentEditorView.__init__(self, doc) - if isRichText: - self.SetWordWrap(win32ui.CRichEditView_WrapNone) - - self.addToMRU = 1 - self.HookHandlers() - self.bCheckingFile = 0 - - self.defCharFormat = GetEditorFontOption("Default Font", defaultCharacterFormat) - - # Smart tabs override everything else if context can be worked out. - self.bSmartTabs = GetEditorOption("Smart Tabs", 1) - - self.tabSize = GetEditorOption("Tab Size", 8) - self.indentSize = GetEditorOption("Indent Size", 8) - # If next indent is at a tab position, and useTabs is set, a tab will be inserted. - self.bUseTabs = GetEditorOption("Use Tabs", 1) - - def OnInitialUpdate(self): - rc = self._obj_.OnInitialUpdate() - self.SetDefaultCharFormat(self.defCharFormat) - return rc - - def CutCurLine(self): - curLine = self._obj_.LineFromChar() - nextLine = curLine + 1 - start = self._obj_.LineIndex(curLine) - end = self._obj_.LineIndex(nextLine) - if end == 0: # must be last line. - end = start + self.end.GetLineLength(curLine) - self._obj_.SetSel(start, end) - self._obj_.Cut() - - def _PrepareUserStateChange(self): - "Return selection, lineindex, etc info, so it can be restored" - self.SetRedraw(0) - return self.GetModify(), self.GetSel(), self.GetFirstVisibleLine() - - def _EndUserStateChange(self, info): - scrollOff = info[2] - self.GetFirstVisibleLine() - if scrollOff: - self.LineScroll(scrollOff) - self.SetSel(info[1]) - self.SetModify(info[0]) - self.SetRedraw(1) - self.InvalidateRect() - self.UpdateWindow() - - def _UpdateUIForState(self): - self.SetReadOnly(self.GetDocument()._IsReadOnly()) - - def SetAllLineColors(self, color=None): - if isRichText: - info = self._PrepareUserStateChange() - try: - if color is None: - color = self.defCharFormat[4] - self.SetSel(0, -1) - self.SetSelectionCharFormat((win32con.CFM_COLOR, 0, 0, 0, color)) - finally: - self._EndUserStateChange(info) - - def SetLineColor(self, lineNo, color): - "lineNo is the 1 based line number to set. If color is None, default color is used." - if isRichText: - info = self._PrepareUserStateChange() - try: - if color is None: - color = self.defCharFormat[4] - lineNo = lineNo - 1 - startIndex = self.LineIndex(lineNo) - if startIndex != -1: - self.SetSel(startIndex, self.LineIndex(lineNo + 1)) - self.SetSelectionCharFormat((win32con.CFM_COLOR, 0, 0, 0, color)) - finally: - self._EndUserStateChange(info) - - def Indent(self): - """Insert an indent to move the cursor to the next tab position. - - Honors the tab size and 'use tabs' settings. Assumes the cursor is already at the - position to be indented, and the selection is a single character (ie, not a block) - """ - start, end = self._obj_.GetSel() - startLine = self._obj_.LineFromChar(start) - line = self._obj_.GetLine(startLine) - realCol = start - self._obj_.LineIndex(startLine) - # Calulate the next tab stop. - # Expand existing tabs. - curCol = 0 - for ch in line[:realCol]: - if ch == "\t": - curCol = ((curCol / self.tabSize) + 1) * self.tabSize - else: - curCol = curCol + 1 - nextColumn = ((curCol / self.indentSize) + 1) * self.indentSize - # print "curCol is", curCol, "nextColumn is", nextColumn - ins = None - if self.bSmartTabs: - # Look for some context. - if realCol == 0: # Start of the line - see if the line above can tell us - lookLine = startLine - 1 - while lookLine >= 0: - check = self._obj_.GetLine(lookLine)[0:1] - if check in ("\t", " "): - ins = check - break - lookLine = lookLine - 1 - else: # See if the previous char can tell us - check = line[realCol - 1] - if check in ("\t", " "): - ins = check - - # Either smart tabs off, or not smart enough! - # Use the "old style" settings. - if ins is None: - if self.bUseTabs and nextColumn % self.tabSize == 0: - ins = "\t" - else: - ins = " " - - if ins == " ": - # Calc the number of spaces to take us to the next stop - ins = ins * (nextColumn - curCol) - - self._obj_.ReplaceSel(ins) - - def BlockDent(self, isIndent, startLine, endLine): - "Indent/Undent all lines specified" - if not self.GetDocument().CheckMakeDocumentWritable(): - return 0 - tabSize = self.tabSize # hard-code for now! - info = self._PrepareUserStateChange() - try: - for lineNo in range(startLine, endLine): - pos = self._obj_.LineIndex(lineNo) - self._obj_.SetSel(pos, pos) - if isIndent: - self.Indent() - else: - line = self._obj_.GetLine(lineNo) - try: - noToDel = 0 - if line[0] == "\t": - noToDel = 1 - elif line[0] == " ": - for noToDel in range(0, tabSize): - if line[noToDel] != " ": - break - else: - noToDel = tabSize - if noToDel: - self._obj_.SetSel(pos, pos + noToDel) - self._obj_.Clear() - except IndexError: - pass - finally: - self._EndUserStateChange(info) - self.GetDocument().SetModifiedFlag(1) # Now dirty - self._obj_.SetSel(self.LineIndex(startLine), self.LineIndex(endLine)) - - def GotoLine(self, lineNo=None): - try: - if lineNo is None: - lineNo = int(input("Enter Line Number")) - except (ValueError, KeyboardInterrupt): - return 0 - self.GetLineCount() # Seems to be needed when file first opened??? - charNo = self.LineIndex(lineNo - 1) - self.SetSel(charNo) - - def HookHandlers(self): # children can override, but should still call me! - # self.HookAllKeyStrokes(self.OnKey) - self.HookMessage(self.OnCheckExternalDocumentUpdated, MSG_CHECK_EXTERNAL_FILE) - self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN) - self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) - self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN) - self.HookKeyStroke(self.OnKeyCtrlY, 25) # ^Y - self.HookKeyStroke(self.OnKeyCtrlG, 7) # ^G - self.HookKeyStroke(self.OnKeyTab, 9) # TAB - self.HookKeyStroke(self.OnKeyEnter, 13) # Enter - self.HookCommand(self.OnCmdLocateFile, ID_LOCATE_FILE) - self.HookCommand(self.OnCmdGotoLine, ID_GOTO_LINE) - self.HookCommand(self.OnEditPaste, afxres.ID_EDIT_PASTE) - self.HookCommand(self.OnEditCut, afxres.ID_EDIT_CUT) - - # Hook Handlers - def OnSetFocus(self, msg): - # Even though we use file change notifications, we should be very sure about it here. - self.OnCheckExternalDocumentUpdated(msg) - - def OnRClick(self, params): - menu = win32ui.CreatePopupMenu() - - # look for a module name - line = self._obj_.GetLine().strip() - flags = win32con.MF_STRING | win32con.MF_ENABLED - if patImport.match(line) == len(line): - menu.AppendMenu( - flags, ID_LOCATE_FILE, "&Locate %s.py" % patImport.group("name") - ) - menu.AppendMenu(win32con.MF_SEPARATOR) - menu.AppendMenu(flags, win32ui.ID_EDIT_UNDO, "&Undo") - menu.AppendMenu(win32con.MF_SEPARATOR) - menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, "Cu&t") - menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, "&Copy") - menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, "&Paste") - menu.AppendMenu(flags, win32con.MF_SEPARATOR) - menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, "&Select all") - menu.AppendMenu(flags, win32con.MF_SEPARATOR) - menu.AppendMenu(flags, ID_GOTO_LINE, "&Goto line...") - menu.TrackPopupMenu(params[5]) - return 0 - - def OnCmdGotoLine(self, cmd, code): - self.GotoLine() - return 0 - - def OnCmdLocateFile(self, cmd, code): - modName = patImport.group("name") - if not modName: - return 0 - import pywin.framework.scriptutils - - fileName = pywin.framework.scriptutils.LocatePythonFile(modName) - if fileName is None: - win32ui.SetStatusText("Can't locate module %s" % modName) - else: - win32ui.GetApp().OpenDocumentFile(fileName) - return 0 - - # Key handlers - def OnKeyEnter(self, key): - if not self.GetDocument().CheckMakeDocumentWritable(): - return 0 - curLine = self._obj_.GetLine() - self._obj_.ReplaceSel("\r\n") # insert the newline - # If the current line indicates the next should be indented, - # then copy the current indentation to this line. - res = patIndent.match(curLine, 0) - if res > 0 and curLine.strip(): - curIndent = patIndent.group(1) - self._obj_.ReplaceSel(curIndent) - return 0 # dont pass on - - def OnKeyCtrlY(self, key): - if not self.GetDocument().CheckMakeDocumentWritable(): - return 0 - self.CutCurLine() - return 0 # dont let him have it! - - def OnKeyCtrlG(self, key): - self.GotoLine() - return 0 # dont let him have it! - - def OnKeyTab(self, key): - if not self.GetDocument().CheckMakeDocumentWritable(): - return 0 - start, end = self._obj_.GetSel() - if start == end: # normal TAB key - self.Indent() - return 0 # we handled this. - - # Otherwise it is a block indent/dedent. - if start > end: - start, end = end, start # swap them. - startLine = self._obj_.LineFromChar(start) - endLine = self._obj_.LineFromChar(end) - - self.BlockDent(win32api.GetKeyState(win32con.VK_SHIFT) >= 0, startLine, endLine) - return 0 - - def OnEditPaste(self, id, code): - # Return 1 if we can make the file editable.(or it already is!) - return self.GetDocument().CheckMakeDocumentWritable() - - def OnEditCut(self, id, code): - # Return 1 if we can make the file editable.(or it already is!) - return self.GetDocument().CheckMakeDocumentWritable() - - def OnKeyDown(self, msg): - key = msg[2] - if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000: - modList = MODIFYING_VK_KEYS_CTRL - elif win32api.GetKeyState(win32con.VK_MENU) & 0x8000: - modList = MODIFYING_VK_KEYS_ALT - else: - modList = MODIFYING_VK_KEYS - - if key in modList: - # Return 1 if we can make the file editable.(or it already is!) - return self.GetDocument().CheckMakeDocumentWritable() - return 1 # Pass it on OK - - # def OnKey(self, key): - # return self.GetDocument().CheckMakeDocumentWritable() - - def OnCheckExternalDocumentUpdated(self, msg): - if self._obj_ is None or self.bCheckingFile: - return - self.bCheckingFile = 1 - self.GetDocument().CheckExternalDocumentUpdated() - self.bCheckingFile = 0 - - -from .template import EditorTemplateBase - - -class EditorTemplate(EditorTemplateBase): - def __init__( - self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None - ): - if makeDoc is None: - makeDoc = EditorDocument - if makeView is None: - makeView = EditorView - EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView) - - def _CreateDocTemplate(self, resourceId): - return win32ui.CreateRichEditDocTemplate(resourceId) - - def CreateWin32uiDocument(self): - return self.DoCreateRichEditDoc() - - -def Create(fileName=None, title=None, template=None): - return editorTemplate.OpenDocumentFile(fileName) - - -from pywin.framework.editor import GetDefaultEditorModuleName - -prefModule = GetDefaultEditorModuleName() -# Initialize only if this is the "default" editor. -if __name__ == prefModule: - # For debugging purposes, when this module may be reloaded many times. - try: - win32ui.GetApp().RemoveDocTemplate(editorTemplate) - except (NameError, win32ui.error): - pass - - editorTemplate = EditorTemplate() - win32ui.GetApp().AddDocTemplate(editorTemplate) diff --git a/lib/pythonwin/pywin/framework/editor/frame.py b/lib/pythonwin/pywin/framework/editor/frame.py deleted file mode 100644 index e927d16e..00000000 --- a/lib/pythonwin/pywin/framework/editor/frame.py +++ /dev/null @@ -1,74 +0,0 @@ -# frame.py - The MDI frame window for an editor. -import pywin.framework.window -import win32con -import win32ui - -from . import ModuleBrowser - - -class EditorFrame(pywin.framework.window.MDIChildWnd): - def OnCreateClient(self, cp, context): - # Create the default view as specified by the template (ie, the editor view) - view = context.template.MakeView(context.doc) - # Create the browser view. - browserView = ModuleBrowser.BrowserView(context.doc) - view2 = context.template.MakeView(context.doc) - - splitter = win32ui.CreateSplitter() - style = win32con.WS_CHILD | win32con.WS_VISIBLE - splitter.CreateStatic(self, 1, 2, style, win32ui.AFX_IDW_PANE_FIRST) - sub_splitter = self.sub_splitter = win32ui.CreateSplitter() - sub_splitter.CreateStatic(splitter, 2, 1, style, win32ui.AFX_IDW_PANE_FIRST + 1) - - # Note we must add the default view first, so that doc.GetFirstView() returns the editor view. - sub_splitter.CreateView(view, 1, 0, (0, 0)) - splitter.CreateView(browserView, 0, 0, (0, 0)) - sub_splitter.CreateView(view2, 0, 0, (0, 0)) - - ## print "First view is", context.doc.GetFirstView() - ## print "Views are", view, view2, browserView - ## print "Parents are", view.GetParent(), view2.GetParent(), browserView.GetParent() - ## print "Splitter is", splitter - ## print "sub splitter is", sub_splitter - ## Old - ## splitter.CreateStatic (self, 1, 2) - ## splitter.CreateView(view, 0, 1, (0,0)) # size ignored. - ## splitter.CreateView (browserView, 0, 0, (0, 0)) - - # Restrict the size of the browser splitter (and we can avoid filling - # it until it is shown) - splitter.SetColumnInfo(0, 10, 20) - # And the active view is our default view (so it gets initial focus) - self.SetActiveView(view) - - def GetEditorView(self): - # In a multi-view (eg, splitter) environment, get - # an editor (ie, scintilla) view - # Look for the splitter opened the most! - if self.sub_splitter is None: - return self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST) - v1 = self.sub_splitter.GetPane(0, 0) - v2 = self.sub_splitter.GetPane(1, 0) - r1 = v1.GetWindowRect() - r2 = v2.GetWindowRect() - if r1[3] - r1[1] > r2[3] - r2[1]: - return v1 - return v2 - - def GetBrowserView(self): - # XXX - should fix this :-) - return self.GetActiveDocument().GetAllViews()[1] - - def OnClose(self): - doc = self.GetActiveDocument() - if not doc.SaveModified(): - ## Cancel button selected from Save dialog, do not actually close - ## print 'close cancelled' - return 0 - ## So the 'Save' dialog doesn't come up twice - doc._obj_.SetModifiedFlag(False) - - # Must force the module browser to close itself here (OnDestroy for the view itself is too late!) - self.sub_splitter = None # ensure no circles! - self.GetBrowserView().DestroyBrowser() - return self._obj_.OnClose() diff --git a/lib/pythonwin/pywin/framework/editor/template.py b/lib/pythonwin/pywin/framework/editor/template.py deleted file mode 100644 index 362a74a6..00000000 --- a/lib/pythonwin/pywin/framework/editor/template.py +++ /dev/null @@ -1,60 +0,0 @@ -import os - -import pywin.framework.window -import win32api -import win32ui -from pywin.mfc import docview - -from . import frame - -ParentEditorTemplate = docview.DocTemplate - - -class EditorTemplateBase(ParentEditorTemplate): - def __init__( - self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None - ): - if makeFrame is None: - makeFrame = frame.EditorFrame - ParentEditorTemplate.__init__(self, res, makeDoc, makeFrame, makeView) - - def _CreateDocTemplate(self, resourceId): - assert 0, "You must override this" - - def CreateWin32uiDocument(self): - assert 0, "You must override this" - - def GetFileExtensions(self): - return ".txt", ".py" - - def MatchDocType(self, fileName, fileType): - doc = self.FindOpenDocument(fileName) - if doc: - return doc - ext = os.path.splitext(fileName)[1].lower() - if ext in self.GetFileExtensions(): - return win32ui.CDocTemplate_Confidence_yesAttemptNative - return win32ui.CDocTemplate_Confidence_maybeAttemptForeign - - def InitialUpdateFrame(self, frame, doc, makeVisible=1): - self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler. - doc._UpdateUIForState() - - def GetPythonPropertyPages(self): - """Returns a list of property pages""" - from . import configui - - return [configui.EditorPropertyPage(), configui.EditorWhitespacePropertyPage()] - - def OpenDocumentFile(self, filename, bMakeVisible=1): - if filename is not None: - try: - path = os.path.split(filename)[0] - # print "The editor is translating", `filename`,"to", - filename = win32api.FindFiles(filename)[0][8] - filename = os.path.join(path, filename) - # print `filename` - except (win32api.error, IndexError) as details: - pass - # print "Couldnt get the full filename!", details - return self._obj_.OpenDocumentFile(filename, bMakeVisible) diff --git a/lib/pythonwin/pywin/framework/editor/vss.py b/lib/pythonwin/pywin/framework/editor/vss.py deleted file mode 100644 index 718f83ee..00000000 --- a/lib/pythonwin/pywin/framework/editor/vss.py +++ /dev/null @@ -1,104 +0,0 @@ -# vss.py -- Source Control using Microsoft VSS. - -# Provides routines for checking files out of VSS. -# -# Uses an INI file very similar to how VB integrates with VSS - even -# as far as using the same name. - -# The file must be named "Mssccprj.scc", and be in the format of -# an INI file. This file may be in a parent directory, in which -# case the project name will be built from what is specified in the -# ini file, plus the path from the INI file to the file itself. -# -# The INI file should have a [Python] section, and a -# Project=Project Name -# and optionally -# Database=?? - - -import os -import sys -import traceback - -import win32api -import win32ui - -g_iniName = "Mssccprj.scc" # Use the same INI name as VB! - -g_sourceSafe = None - - -def FindVssProjectInfo(fullfname): - """Looks up the file system for an INI file describing the project. - - Looking up the tree is for ni style packages. - - Returns (projectName, pathToFileName) where pathToFileName contains - the path from the ini file to the actual file. - """ - path, fnameonly = os.path.split(fullfname) - origPath = path - project = "" - retPaths = [fnameonly] - while not project: - iniName = os.path.join(path, g_iniName) - database = win32api.GetProfileVal("Python", "Database", "", iniName) - project = win32api.GetProfileVal("Python", "Project", "", iniName) - if project: - break - # No valid INI file in this directory - look up a level. - path, addpath = os.path.split(path) - if not addpath: # Root? - break - retPaths.insert(0, addpath) - if not project: - win32ui.MessageBox( - "%s\r\n\r\nThis directory is not configured for Python/VSS" % origPath - ) - return - return project, "/".join(retPaths), database - - -def CheckoutFile(fileName): - global g_sourceSafe - import pythoncom - - ok = 0 - # Assumes the fileName has a complete path, - # and that the INI file can be found in that path - # (or a parent path if a ni style package) - try: - import win32com.client - import win32com.client.gencache - - mod = win32com.client.gencache.EnsureModule( - "{783CD4E0-9D54-11CF-B8EE-00608CC9A71F}", 0, 5, 0 - ) - if mod is None: - win32ui.MessageBox( - "VSS does not appear to be installed. The TypeInfo can not be created" - ) - return ok - - rc = FindVssProjectInfo(fileName) - if rc is None: - return - project, vssFname, database = rc - if g_sourceSafe is None: - g_sourceSafe = win32com.client.Dispatch("SourceSafe") - # SS seems a bit wierd. It defaults the arguments as empty strings, but - # then complains when they are used - so we pass "Missing" - if not database: - database = pythoncom.Missing - g_sourceSafe.Open(database, pythoncom.Missing, pythoncom.Missing) - item = g_sourceSafe.VSSItem("$/%s/%s" % (project, vssFname)) - item.Checkout(None, fileName) - ok = 1 - except pythoncom.com_error as exc: - win32ui.MessageBox(exc.strerror, "Error checking out file") - except: - typ, val, tb = sys.exc_info() - traceback.print_exc() - win32ui.MessageBox("%s - %s" % (str(typ), str(val)), "Error checking out file") - tb = None # Cleanup a cycle - return ok diff --git a/lib/pythonwin/pywin/framework/help.py b/lib/pythonwin/pywin/framework/help.py deleted file mode 100644 index ab664b4d..00000000 --- a/lib/pythonwin/pywin/framework/help.py +++ /dev/null @@ -1,173 +0,0 @@ -# help.py - help utilities for PythonWin. -import os - -import regutil -import win32api -import win32con -import win32ui - -htmlhelp_handle = None - -html_help_command_translators = { - win32con.HELP_CONTENTS: 1, # HH_DISPLAY_TOC - win32con.HELP_CONTEXT: 15, # HH_HELP_CONTEXT - win32con.HELP_FINDER: 1, # HH_DISPLAY_TOC -} - - -def FinalizeHelp(): - global htmlhelp_handle - if htmlhelp_handle is not None: - import win32help - - try: - # frame = win32ui.GetMainFrame().GetSafeHwnd() - frame = 0 - win32help.HtmlHelp(frame, None, win32help.HH_UNINITIALIZE, htmlhelp_handle) - except win32help.error: - print("Failed to finalize htmlhelp!") - htmlhelp_handle = None - - -def OpenHelpFile(fileName, helpCmd=None, helpArg=None): - "Open a help file, given a full path" - # default help arg. - win32ui.DoWaitCursor(1) - try: - if helpCmd is None: - helpCmd = win32con.HELP_CONTENTS - ext = os.path.splitext(fileName)[1].lower() - if ext == ".hlp": - win32api.WinHelp( - win32ui.GetMainFrame().GetSafeHwnd(), fileName, helpCmd, helpArg - ) - # XXX - using the htmlhelp API wreaks havoc with keyboard shortcuts - # so we disable it, forcing ShellExecute, which works fine (but - # doesn't close the help file when Pythonwin is closed. - # Tom Heller also points out http://www.microsoft.com/mind/0499/faq/faq0499.asp, - # which may or may not be related. - elif 0 and ext == ".chm": - import win32help - - global htmlhelp_handle - helpCmd = html_help_command_translators.get(helpCmd, helpCmd) - # frame = win32ui.GetMainFrame().GetSafeHwnd() - frame = 0 # Dont want it overlapping ours! - if htmlhelp_handle is None: - htmlhelp_hwnd, htmlhelp_handle = win32help.HtmlHelp( - frame, None, win32help.HH_INITIALIZE - ) - win32help.HtmlHelp(frame, fileName, helpCmd, helpArg) - else: - # Hope that the extension is registered, and we know what to do! - win32api.ShellExecute(0, "open", fileName, None, "", win32con.SW_SHOW) - return fileName - finally: - win32ui.DoWaitCursor(-1) - - -def ListAllHelpFiles(): - ret = [] - ret = _ListAllHelpFilesInRoot(win32con.HKEY_LOCAL_MACHINE) - # Ensure we don't get dups. - for item in _ListAllHelpFilesInRoot(win32con.HKEY_CURRENT_USER): - if item not in ret: - ret.append(item) - return ret - - -def _ListAllHelpFilesInRoot(root): - """Returns a list of (helpDesc, helpFname) for all registered help files""" - import regutil - - retList = [] - try: - key = win32api.RegOpenKey( - root, regutil.BuildDefaultPythonKey() + "\\Help", 0, win32con.KEY_READ - ) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - return retList - try: - keyNo = 0 - while 1: - try: - helpDesc = win32api.RegEnumKey(key, keyNo) - helpFile = win32api.RegQueryValue(key, helpDesc) - retList.append((helpDesc, helpFile)) - keyNo = keyNo + 1 - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_NO_MORE_ITEMS: - raise - break - finally: - win32api.RegCloseKey(key) - return retList - - -def SelectAndRunHelpFile(): - from pywin.dialogs import list - - helpFiles = ListAllHelpFiles() - if len(helpFiles) == 1: - # only 1 help file registered - probably ours - no point asking - index = 0 - else: - index = list.SelectFromLists("Select Help file", helpFiles, ["Title"]) - if index is not None: - OpenHelpFile(helpFiles[index][1]) - - -helpIDMap = None - - -def SetHelpMenuOtherHelp(mainMenu): - """Modifies the main Help Menu to handle all registered help files. - mainMenu -- The main menu to modify - usually from docTemplate.GetSharedMenu() - """ - - # Load all help files from the registry. - global helpIDMap - if helpIDMap is None: - helpIDMap = {} - cmdID = win32ui.ID_HELP_OTHER - excludeList = ["Main Python Documentation", "Pythonwin Reference"] - firstList = ListAllHelpFiles() - # We actually want to not only exclude these entries, but - # their help file names (as many entries may share the same name) - excludeFnames = [] - for desc, fname in firstList: - if desc in excludeList: - excludeFnames.append(fname) - - helpDescs = [] - for desc, fname in firstList: - if fname not in excludeFnames: - helpIDMap[cmdID] = (desc, fname) - win32ui.GetMainFrame().HookCommand(HandleHelpOtherCommand, cmdID) - cmdID = cmdID + 1 - - helpMenu = mainMenu.GetSubMenu( - mainMenu.GetMenuItemCount() - 1 - ) # Help menu always last. - otherHelpMenuPos = 2 # cant search for ID, as sub-menu has no ID. - otherMenu = helpMenu.GetSubMenu(otherHelpMenuPos) - while otherMenu.GetMenuItemCount(): - otherMenu.DeleteMenu(0, win32con.MF_BYPOSITION) - - if helpIDMap: - for id, (desc, fname) in helpIDMap.items(): - otherMenu.AppendMenu(win32con.MF_ENABLED | win32con.MF_STRING, id, desc) - else: - helpMenu.EnableMenuItem( - otherHelpMenuPos, win32con.MF_BYPOSITION | win32con.MF_GRAYED - ) - - -def HandleHelpOtherCommand(cmd, code): - OpenHelpFile(helpIDMap[cmd][1]) diff --git a/lib/pythonwin/pywin/framework/interact.py b/lib/pythonwin/pywin/framework/interact.py deleted file mode 100644 index 5f0b87f2..00000000 --- a/lib/pythonwin/pywin/framework/interact.py +++ /dev/null @@ -1,985 +0,0 @@ -################################################################## -## -## Interactive Shell Window -## - -import array -import code -import os -import string -import sys -import traceback - -import __main__ -import afxres -import pywin.framework.app -import pywin.scintilla.control -import pywin.scintilla.formatter -import pywin.scintilla.IDLEenvironment -import win32api -import win32clipboard -import win32con -import win32ui - -## sequential after ID_GOTO_LINE defined in editor.py -ID_EDIT_COPY_CODE = 0xE2002 -ID_EDIT_EXEC_CLIPBOARD = 0x2003 - -trace = pywin.scintilla.formatter.trace - -import re - -from . import winout - -# from IDLE. -_is_block_opener = re.compile(r":\s*(#.*)?$").search -_is_block_closer = re.compile( - r""" - \s* - ( return - | break - | continue - | raise - | pass - ) - \b -""", - re.VERBOSE, -).match - -tracebackHeader = "Traceback (".encode("ascii") - -sectionProfile = "Interactive Window" -valueFormatTitle = "FormatTitle" -valueFormatInput = "FormatInput" -valueFormatOutput = "FormatOutput" -valueFormatOutputError = "FormatOutputError" - -# These are defaults only. Values are read from the registry. -formatTitle = (-536870897, 0, 220, 0, 16711680, 184, 34, "Arial") -formatInput = (-402653169, 0, 200, 0, 0, 0, 49, "Courier New") -formatOutput = (-402653169, 0, 200, 0, 8421376, 0, 49, "Courier New") -formatOutputError = (-402653169, 0, 200, 0, 255, 0, 49, "Courier New") - -try: - sys.ps1 -except AttributeError: - sys.ps1 = ">>> " - sys.ps2 = "... " - - -def LoadPreference(preference, default=""): - return win32ui.GetProfileVal(sectionProfile, preference, default) - - -def SavePreference(prefName, prefValue): - win32ui.WriteProfileVal(sectionProfile, prefName, prefValue) - - -def GetPromptPrefix(line): - ps1 = sys.ps1 - if line[: len(ps1)] == ps1: - return ps1 - ps2 = sys.ps2 - if line[: len(ps2)] == ps2: - return ps2 - - -############################################################# -# -# Colorizer related code. -# -############################################################# -STYLE_INTERACTIVE_EOL = "Interactive EOL" -STYLE_INTERACTIVE_OUTPUT = "Interactive Output" -STYLE_INTERACTIVE_PROMPT = "Interactive Prompt" -STYLE_INTERACTIVE_BANNER = "Interactive Banner" -STYLE_INTERACTIVE_ERROR = "Interactive Error" -STYLE_INTERACTIVE_ERROR_FINALLINE = "Interactive Error (final line)" - -INTERACTIVE_STYLES = [ - STYLE_INTERACTIVE_EOL, - STYLE_INTERACTIVE_OUTPUT, - STYLE_INTERACTIVE_PROMPT, - STYLE_INTERACTIVE_BANNER, - STYLE_INTERACTIVE_ERROR, - STYLE_INTERACTIVE_ERROR_FINALLINE, -] - -FormatterParent = pywin.scintilla.formatter.PythonSourceFormatter - - -class InteractiveFormatter(FormatterParent): - def __init__(self, scintilla): - FormatterParent.__init__(self, scintilla) - self.bannerDisplayed = False - - def SetStyles(self): - FormatterParent.SetStyles(self) - Style = pywin.scintilla.formatter.Style - self.RegisterStyle(Style(STYLE_INTERACTIVE_EOL, STYLE_INTERACTIVE_PROMPT)) - self.RegisterStyle(Style(STYLE_INTERACTIVE_PROMPT, formatInput)) - self.RegisterStyle(Style(STYLE_INTERACTIVE_OUTPUT, formatOutput)) - self.RegisterStyle(Style(STYLE_INTERACTIVE_BANNER, formatTitle)) - self.RegisterStyle(Style(STYLE_INTERACTIVE_ERROR, formatOutputError)) - self.RegisterStyle( - Style(STYLE_INTERACTIVE_ERROR_FINALLINE, STYLE_INTERACTIVE_ERROR) - ) - - def LoadPreference(self, name, default): - rc = win32ui.GetProfileVal("Format", name, default) - if rc == default: - rc = win32ui.GetProfileVal(sectionProfile, name, default) - return rc - - def ColorizeInteractiveCode(self, cdoc, styleStart, stylePyStart): - lengthDoc = len(cdoc) - if lengthDoc == 0: - return - state = styleStart - # As per comments in Colorize(), we work with the raw utf8 - # bytes. To avoid too muych py3k pain, we treat each utf8 byte - # as a latin-1 unicode character - we only use it to compare - # against ascii chars anyway... - chNext = cdoc[0:1].decode("latin-1") - startSeg = 0 - i = 0 - lastState = state # debug only - while i < lengthDoc: - ch = chNext - chNext = cdoc[i + 1 : i + 2].decode("latin-1") - - # trace("ch=%r, i=%d, next=%r, state=%s" % (ch, i, chNext, state)) - if state == STYLE_INTERACTIVE_EOL: - if ch not in "\r\n": - self.ColorSeg(startSeg, i - 1, state) - startSeg = i - if ch in (sys.ps1[0], sys.ps2[0]): - state = STYLE_INTERACTIVE_PROMPT - elif cdoc[i : i + len(tracebackHeader)] == tracebackHeader: - state = STYLE_INTERACTIVE_ERROR - else: - state = STYLE_INTERACTIVE_OUTPUT - elif state == STYLE_INTERACTIVE_PROMPT: - if ch not in sys.ps1 + sys.ps2 + " ": - self.ColorSeg(startSeg, i - 1, state) - startSeg = i - if ch in "\r\n": - state = STYLE_INTERACTIVE_EOL - else: - state = stylePyStart # Start coloring Python code. - elif state in (STYLE_INTERACTIVE_OUTPUT,): - if ch in "\r\n": - self.ColorSeg(startSeg, i - 1, state) - startSeg = i - state = STYLE_INTERACTIVE_EOL - elif state == STYLE_INTERACTIVE_ERROR: - if ch in "\r\n" and chNext and chNext not in string.whitespace: - # Everything including me - self.ColorSeg(startSeg, i, state) - startSeg = i + 1 - state = STYLE_INTERACTIVE_ERROR_FINALLINE - elif i == 0 and ch not in string.whitespace: - # If we are coloring from the start of a line, - # we need this better check for the last line - # Color up to not including me - self.ColorSeg(startSeg, i - 1, state) - startSeg = i - state = STYLE_INTERACTIVE_ERROR_FINALLINE - elif state == STYLE_INTERACTIVE_ERROR_FINALLINE: - if ch in "\r\n": - self.ColorSeg(startSeg, i - 1, state) - startSeg = i - state = STYLE_INTERACTIVE_EOL - elif state == STYLE_INTERACTIVE_BANNER: - if ch in "\r\n" and (chNext == "" or chNext in ">["): - # Everything including me - self.ColorSeg(startSeg, i - 1, state) - startSeg = i - state = STYLE_INTERACTIVE_EOL - else: - # It is a PythonColorizer state - seek past the end of the line - # and ask the Python colorizer to color that. - end = startSeg - while end < lengthDoc and cdoc[end] not in "\r\n".encode("ascii"): - end = end + 1 - self.ColorizePythonCode(cdoc[:end], startSeg, state) - stylePyStart = self.GetStringStyle(end - 1) - if stylePyStart is None: - stylePyStart = pywin.scintilla.formatter.STYLE_DEFAULT - else: - stylePyStart = stylePyStart.name - startSeg = end - i = end - 1 # ready for increment. - chNext = cdoc[end : end + 1].decode("latin-1") - state = STYLE_INTERACTIVE_EOL - if lastState != state: - lastState = state - i = i + 1 - # and the rest - if startSeg < i: - self.ColorSeg(startSeg, i - 1, state) - - def Colorize(self, start=0, end=-1): - # scintilla's formatting is all done in terms of utf, so - # we work with utf8 bytes instead of unicode. This magically - # works as any extended chars found in the utf8 don't change - # the semantics. - stringVal = self.scintilla.GetTextRange(start, end, decode=False) - styleStart = None - stylePyStart = None - if start > 1: - # Likely we are being asked to color from the start of the line. - # We find the last formatted character on the previous line. - # If TQString, we continue it. Otherwise, we reset. - look = start - 1 - while look and self.scintilla.SCIGetCharAt(look) in "\n\r": - look = look - 1 - if look and look < start - 1: # Did we find a char before the \n\r sets? - strstyle = self.GetStringStyle(look) - quote_char = None - if strstyle is not None: - if strstyle.name == pywin.scintilla.formatter.STYLE_TQSSTRING: - quote_char = "'" - elif strstyle.name == pywin.scintilla.formatter.STYLE_TQDSTRING: - quote_char = '"' - if quote_char is not None: - # It is a TQS. If the TQS is not terminated, we - # carry the style through. - if look > 2: - look_str = ( - self.scintilla.SCIGetCharAt(look - 2) - + self.scintilla.SCIGetCharAt(look - 1) - + self.scintilla.SCIGetCharAt(look) - ) - if look_str != quote_char * 3: - stylePyStart = strstyle.name - if stylePyStart is None: - stylePyStart = pywin.scintilla.formatter.STYLE_DEFAULT - - if start > 0: - stylenum = self.scintilla.SCIGetStyleAt(start - 1) - styleStart = self.GetStyleByNum(stylenum).name - elif self.bannerDisplayed: - styleStart = STYLE_INTERACTIVE_EOL - else: - styleStart = STYLE_INTERACTIVE_BANNER - self.bannerDisplayed = True - self.scintilla.SCIStartStyling(start, 31) - self.style_buffer = array.array("b", (0,) * len(stringVal)) - self.ColorizeInteractiveCode(stringVal, styleStart, stylePyStart) - self.scintilla.SCISetStylingEx(self.style_buffer) - self.style_buffer = None - - -############################################################### -# -# This class handles the Python interactive interpreter. -# -# It uses a basic EditWindow, and does all the magic. -# This is triggered by the enter key hander attached by the -# start-up code. It determines if a command is to be executed -# or continued (ie, emit "... ") by snooping around the current -# line, looking for the prompts -# -class PythonwinInteractiveInterpreter(code.InteractiveInterpreter): - def __init__(self, locals=None, globals=None): - if locals is None: - locals = __main__.__dict__ - if globals is None: - globals = locals - self.globals = globals - code.InteractiveInterpreter.__init__(self, locals) - - def showsyntaxerror(self, filename=None): - sys.stderr.write( - tracebackHeader.decode("ascii") - ) # So the color syntaxer recognises it. - code.InteractiveInterpreter.showsyntaxerror(self, filename) - - def runcode(self, code): - try: - exec(code, self.globals, self.locals) - except SystemExit: - raise - except: - self.showtraceback() - - -class InteractiveCore: - def __init__(self, banner=None): - self.banner = banner - - # LoadFontPreferences() - def Init(self): - self.oldStdOut = self.oldStdErr = None - - # self.SetWordWrap(win32ui.CRichEditView_WrapNone) - self.interp = PythonwinInteractiveInterpreter() - - self.OutputGrab() # Release at cleanup. - - if self.GetTextLength() == 0: - if self.banner is None: - suffix = "" - if win32ui.debug: - suffix = ", debug build" - sys.stderr.write( - "PythonWin %s on %s%s.\n" % (sys.version, sys.platform, suffix) - ) - sys.stderr.write( - "Portions %s - see 'Help/About PythonWin' for further copyright information.\n" - % (win32ui.copyright,) - ) - else: - sys.stderr.write(banner) - rcfile = os.environ.get("PYTHONSTARTUP") - if rcfile: - import __main__ - - try: - exec( - compile( - open(rcfile, "rb").read(), rcfile, "exec", dont_inherit=True - ), - __main__.__dict__, - __main__.__dict__, - ) - except: - sys.stderr.write( - ">>> \nError executing PYTHONSTARTUP script %r\n" % (rcfile) - ) - traceback.print_exc(file=sys.stderr) - self.AppendToPrompt([]) - - def SetContext(self, globals, locals, name="Dbg"): - oldPrompt = sys.ps1 - if globals is None: - # Reset - sys.ps1 = ">>> " - sys.ps2 = "... " - locals = globals = __main__.__dict__ - else: - sys.ps1 = "[%s]>>> " % name - sys.ps2 = "[%s]... " % name - self.interp.locals = locals - self.interp.globals = globals - self.AppendToPrompt([], oldPrompt) - - def GetContext(self): - return self.interp.globals, self.interp.locals - - def DoGetLine(self, line=-1): - if line == -1: - line = self.LineFromChar() - line = self.GetLine(line) - while line and line[-1] in ("\r", "\n"): - line = line[:-1] - return line - - def AppendToPrompt(self, bufLines, oldPrompt=None): - "Take a command and stick it at the end of the buffer (with python prompts inserted if required)." - self.flush() - lastLineNo = self.GetLineCount() - 1 - line = self.DoGetLine(lastLineNo) - if oldPrompt and line == oldPrompt: - self.SetSel(self.GetTextLength() - len(oldPrompt), self.GetTextLength()) - self.ReplaceSel(sys.ps1) - elif line != str(sys.ps1): - if len(line) != 0: - self.write("\n") - self.write(sys.ps1) - self.flush() - self.idle.text.mark_set("iomark", "end-1c") - if not bufLines: - return - terms = (["\n" + sys.ps2] * (len(bufLines) - 1)) + [""] - for bufLine, term in zip(bufLines, terms): - if bufLine.strip(): - self.write(bufLine + term) - self.flush() - - def EnsureNoPrompt(self): - # Get ready to write some text NOT at a Python prompt. - self.flush() - lastLineNo = self.GetLineCount() - 1 - line = self.DoGetLine(lastLineNo) - if not line or line in (sys.ps1, sys.ps2): - self.SetSel(self.GetTextLength() - len(line), self.GetTextLength()) - self.ReplaceSel("") - else: - # Just add a new line. - self.write("\n") - - def _GetSubConfigNames(self): - return ["interactive"] # Allow [Keys:Interactive] sections to be specific - - def HookHandlers(self): - # Hook menu command (executed when a menu item with that ID is selected from a menu/toolbar - self.HookCommand(self.OnSelectBlock, win32ui.ID_EDIT_SELECT_BLOCK) - self.HookCommand(self.OnEditCopyCode, ID_EDIT_COPY_CODE) - self.HookCommand(self.OnEditExecClipboard, ID_EDIT_EXEC_CLIPBOARD) - mod = pywin.scintilla.IDLEenvironment.GetIDLEModule("IdleHistory") - if mod is not None: - self.history = mod.History(self.idle.text, "\n" + sys.ps2) - else: - self.history = None - # hack for now for event handling. - - # GetBlockBoundary takes a line number, and will return the - # start and and line numbers of the block, and a flag indicating if the - # block is a Python code block. - # If the line specified has a Python prompt, then the lines are parsed - # backwards and forwards, and the flag is true. - # If the line does not start with a prompt, the block is searched forward - # and backward until a prompt _is_ found, and all lines in between without - # prompts are returned, and the flag is false. - def GetBlockBoundary(self, lineNo): - line = self.DoGetLine(lineNo) - maxLineNo = self.GetLineCount() - 1 - prefix = GetPromptPrefix(line) - if prefix is None: # Non code block - flag = 0 - startLineNo = lineNo - while startLineNo > 0: - if GetPromptPrefix(self.DoGetLine(startLineNo - 1)) is not None: - break # there _is_ a prompt - startLineNo = startLineNo - 1 - endLineNo = lineNo - while endLineNo < maxLineNo: - if GetPromptPrefix(self.DoGetLine(endLineNo + 1)) is not None: - break # there _is_ a prompt - endLineNo = endLineNo + 1 - else: # Code block - flag = 1 - startLineNo = lineNo - while startLineNo > 0 and prefix != str(sys.ps1): - prefix = GetPromptPrefix(self.DoGetLine(startLineNo - 1)) - if prefix is None: - break - # there is no prompt. - startLineNo = startLineNo - 1 - endLineNo = lineNo - while endLineNo < maxLineNo: - prefix = GetPromptPrefix(self.DoGetLine(endLineNo + 1)) - if prefix is None: - break # there is no prompt - if prefix == str(sys.ps1): - break # this is another command - endLineNo = endLineNo + 1 - # continue until end of buffer, or no prompt - return (startLineNo, endLineNo, flag) - - def ExtractCommand(self, lines): - start, end = lines - retList = [] - while end >= start: - thisLine = self.DoGetLine(end) - promptLen = len(GetPromptPrefix(thisLine)) - retList = [thisLine[promptLen:]] + retList - end = end - 1 - return retList - - def OutputGrab(self): - # import win32traceutil; return - self.oldStdOut = sys.stdout - self.oldStdErr = sys.stderr - sys.stdout = self - sys.stderr = self - self.flush() - - def OutputRelease(self): - # a command may have overwritten these - only restore if not. - if self.oldStdOut is not None: - if sys.stdout == self: - sys.stdout = self.oldStdOut - if self.oldStdErr is not None: - if sys.stderr == self: - sys.stderr = self.oldStdErr - self.oldStdOut = None - self.oldStdErr = None - self.flush() - - ################################### - # - # Message/Command/Key Hooks. - # - # Enter key handler - # - def ProcessEnterEvent(self, event): - # If autocompletion has been triggered, complete and do not process event - if self.SCIAutoCActive(): - self.SCIAutoCComplete() - self.SCICancel() - return - - self.SCICancel() - # First, check for an error message - haveGrabbedOutput = 0 - if self.HandleSpecialLine(): - return 0 - - lineNo = self.LineFromChar() - start, end, isCode = self.GetBlockBoundary(lineNo) - # If we are not in a code block just go to the prompt (or create a new one) - if not isCode: - self.AppendToPrompt([]) - win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) - return - - lines = self.ExtractCommand((start, end)) - - # If we are in a code-block, but it isnt at the end of the buffer - # then copy it to the end ready for editing and subsequent execution - if end != self.GetLineCount() - 1: - win32ui.SetStatusText("Press ENTER to execute command") - self.AppendToPrompt(lines) - self.SetSel(-2) - return - - # If SHIFT held down, we want new code here and now! - bNeedIndent = ( - win32api.GetKeyState(win32con.VK_SHIFT) < 0 - or win32api.GetKeyState(win32con.VK_CONTROL) < 0 - ) - if bNeedIndent: - self.ReplaceSel("\n") - else: - self.SetSel(-2) - self.ReplaceSel("\n") - source = "\n".join(lines) - while source and source[-1] in "\t ": - source = source[:-1] - self.OutputGrab() # grab the output for the command exec. - try: - if self.interp.runsource( - source, "" - ): # Need more input! - bNeedIndent = 1 - else: - # If the last line isnt empty, append a newline - if self.history is not None: - self.history.history_store(source) - self.AppendToPrompt([]) - win32ui.SetStatusText( - win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE) - ) - # win32ui.SetStatusText('Successfully executed statement') - finally: - self.OutputRelease() - if bNeedIndent: - win32ui.SetStatusText("Ready to continue the command") - # Now attempt correct indentation (should use IDLE?) - curLine = self.DoGetLine(lineNo)[len(sys.ps2) :] - pos = 0 - indent = "" - while len(curLine) > pos and curLine[pos] in string.whitespace: - indent = indent + curLine[pos] - pos = pos + 1 - if _is_block_opener(curLine): - indent = indent + "\t" - elif _is_block_closer(curLine): - indent = indent[:-1] - # use ReplaceSel to ensure it goes at the cursor rather than end of buffer. - self.ReplaceSel(sys.ps2 + indent) - return 0 - - # ESC key handler - def ProcessEscEvent(self, event): - # Implement a cancel. - if self.SCIAutoCActive() or self.SCICallTipActive(): - self.SCICancel() - else: - win32ui.SetStatusText("Cancelled.") - self.AppendToPrompt(("",)) - return 0 - - def OnSelectBlock(self, command, code): - lineNo = self.LineFromChar() - start, end, isCode = self.GetBlockBoundary(lineNo) - startIndex = self.LineIndex(start) - endIndex = self.LineIndex(end + 1) - 2 # skip \r + \n - if endIndex < 0: # must be beyond end of buffer - endIndex = -2 # self.Length() - self.SetSel(startIndex, endIndex) - - def OnEditCopyCode(self, command, code): - """Sanitizes code from interactive window, removing prompts and output, - and inserts it in the clipboard.""" - code = self.GetSelText() - lines = code.splitlines() - out_lines = [] - for line in lines: - if line.startswith(sys.ps1): - line = line[len(sys.ps1) :] - out_lines.append(line) - elif line.startswith(sys.ps2): - line = line[len(sys.ps2) :] - out_lines.append(line) - out_code = os.linesep.join(out_lines) - win32clipboard.OpenClipboard() - try: - win32clipboard.SetClipboardData( - win32clipboard.CF_UNICODETEXT, str(out_code) - ) - finally: - win32clipboard.CloseClipboard() - - def OnEditExecClipboard(self, command, code): - """Executes python code directly from the clipboard.""" - win32clipboard.OpenClipboard() - try: - code = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT) - finally: - win32clipboard.CloseClipboard() - - code = code.replace("\r\n", "\n") + "\n" - try: - o = compile(code, "", "exec") - exec(o, __main__.__dict__) - except: - traceback.print_exc() - - def GetRightMenuItems(self): - # Just override parents - ret = [] - flags = 0 - ret.append((flags, win32ui.ID_EDIT_UNDO, "&Undo")) - ret.append(win32con.MF_SEPARATOR) - ret.append((flags, win32ui.ID_EDIT_CUT, "Cu&t")) - ret.append((flags, win32ui.ID_EDIT_COPY, "&Copy")) - - start, end = self.GetSel() - if start != end: - ret.append((flags, ID_EDIT_COPY_CODE, "Copy code without prompts")) - if win32clipboard.IsClipboardFormatAvailable(win32clipboard.CF_UNICODETEXT): - ret.append( - (flags, ID_EDIT_EXEC_CLIPBOARD, "Execute python code from clipboard") - ) - - ret.append((flags, win32ui.ID_EDIT_PASTE, "&Paste")) - ret.append(win32con.MF_SEPARATOR) - ret.append((flags, win32ui.ID_EDIT_SELECT_ALL, "&Select all")) - ret.append((flags, win32ui.ID_EDIT_SELECT_BLOCK, "Select &block")) - ret.append((flags, win32ui.ID_VIEW_WHITESPACE, "View &Whitespace")) - return ret - - def MDINextEvent(self, event): - win32ui.GetMainFrame().MDINext(0) - - def MDIPrevEvent(self, event): - win32ui.GetMainFrame().MDINext(0) - - def WindowBackEvent(self, event): - parent = self.GetParentFrame() - if parent == win32ui.GetMainFrame(): - # It is docked. - try: - wnd, isactive = parent.MDIGetActive() - wnd.SetFocus() - except win32ui.error: - # No MDI window active! - pass - else: - # Normal Window - try: - lastActive = self.GetParentFrame().lastActive - # If the window is invalid, reset it. - if lastActive is not None and ( - lastActive._obj_ is None or lastActive.GetSafeHwnd() == 0 - ): - lastActive = self.GetParentFrame().lastActive = None - win32ui.SetStatusText("The last active Window has been closed.") - except AttributeError: - print("Can't find the last active window!") - lastActive = None - if lastActive is not None: - lastActive.MDIActivate() - - -class InteractiveView(InteractiveCore, winout.WindowOutputView): - def __init__(self, doc): - InteractiveCore.__init__(self) - winout.WindowOutputView.__init__(self, doc) - self.encoding = pywin.default_scintilla_encoding - - def _MakeColorizer(self): - return InteractiveFormatter(self) - - def OnInitialUpdate(self): - winout.WindowOutputView.OnInitialUpdate(self) - self.SetWordWrap() - self.Init() - - def HookHandlers(self): - winout.WindowOutputView.HookHandlers(self) - InteractiveCore.HookHandlers(self) - - -class CInteractivePython(winout.WindowOutput): - def __init__(self, makeDoc=None, makeFrame=None): - self.IsFinalDestroy = 0 - winout.WindowOutput.__init__( - self, - sectionProfile, - sectionProfile, - winout.flags.WQ_LINE, - 1, - None, - makeDoc, - makeFrame, - InteractiveView, - ) - self.Create() - - def OnViewDestroy(self, view): - if self.IsFinalDestroy: - view.OutputRelease() - winout.WindowOutput.OnViewDestroy(self, view) - - def Close(self): - self.IsFinalDestroy = 1 - winout.WindowOutput.Close(self) - - -class InteractiveFrame(winout.WindowOutputFrame): - def __init__(self): - self.lastActive = None - winout.WindowOutputFrame.__init__(self) - - def OnMDIActivate(self, bActive, wndActive, wndDeactive): - if bActive: - self.lastActive = wndDeactive - - -###################################################################### -## -## Dockable Window Support -## -###################################################################### -ID_DOCKED_INTERACTIVE_CONTROLBAR = 0xE802 - -DockedInteractiveViewParent = InteractiveView - - -class DockedInteractiveView(DockedInteractiveViewParent): - def HookHandlers(self): - DockedInteractiveViewParent.HookHandlers(self) - self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS) - self.HookMessage(self.OnKillFocus, win32con.WM_KILLFOCUS) - - def OnSetFocus(self, msg): - self.GetParentFrame().SetActiveView(self) - return 1 - - def OnKillFocus(self, msg): - # If we are losing focus to another in this app, reset the main frame's active view. - hwnd = wparam = msg[2] - try: - wnd = win32ui.CreateWindowFromHandle(hwnd) - reset = wnd.GetTopLevelFrame() == self.GetTopLevelFrame() - except win32ui.error: - reset = 0 # Not my window - if reset: - self.GetParentFrame().SetActiveView(None) - return 1 - - def OnDestroy(self, msg): - newSize = self.GetWindowPlacement()[4] - pywin.framework.app.SaveWindowSize("Interactive Window", newSize, "docked") - try: - if self.GetParentFrame().GetActiveView == self: - self.GetParentFrame().SetActiveView(None) - except win32ui.error: - pass - try: - if win32ui.GetMainFrame().GetActiveView() == self: - win32ui.GetMainFrame().SetActiveView(None) - except win32ui.error: - pass - return DockedInteractiveViewParent.OnDestroy(self, msg) - - -class CDockedInteractivePython(CInteractivePython): - def __init__(self, dockbar): - self.bFirstCreated = 0 - self.dockbar = dockbar - CInteractivePython.__init__(self) - - def NeedRecreateWindow(self): - if self.bCreating: - return 0 - try: - frame = win32ui.GetMainFrame() - if frame.closing: - return 0 # Dieing! - except (win32ui.error, AttributeError): - return 0 # The app is dieing! - try: - cb = frame.GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR) - return not cb.IsWindowVisible() - except win32ui.error: - return 1 # Control bar does not exist! - - def RecreateWindow(self): - try: - dockbar = win32ui.GetMainFrame().GetControlBar( - ID_DOCKED_INTERACTIVE_CONTROLBAR - ) - win32ui.GetMainFrame().ShowControlBar(dockbar, 1, 1) - except win32ui.error: - CreateDockedInteractiveWindow() - - def Create(self): - self.bCreating = 1 - doc = InteractiveDocument(None, self.DoCreateDoc()) - view = DockedInteractiveView(doc) - defRect = pywin.framework.app.LoadWindowSize("Interactive Window", "docked") - if defRect[2] - defRect[0] == 0: - defRect = 0, 0, 500, 200 - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER - id = 1050 # win32ui.AFX_IDW_PANE_FIRST - view.CreateWindow(self.dockbar, id, style, defRect) - view.OnInitialUpdate() - self.bFirstCreated = 1 - - self.currentView = doc.GetFirstView() - self.bCreating = 0 - if self.title: - doc.SetTitle(self.title) - - -# The factory we pass to the dockable window support. -def InteractiveViewCreator(parent): - global edit - edit = CDockedInteractivePython(parent) - return edit.currentView - - -def CreateDockedInteractiveWindow(): - # Later, the DockingBar should be capable of hosting multiple - # children. - from pywin.docking.DockingBar import DockingBar - - bar = DockingBar() - creator = InteractiveViewCreator - bar.CreateWindow( - win32ui.GetMainFrame(), - creator, - "Interactive Window", - ID_DOCKED_INTERACTIVE_CONTROLBAR, - ) - bar.SetBarStyle( - bar.GetBarStyle() - | afxres.CBRS_TOOLTIPS - | afxres.CBRS_FLYBY - | afxres.CBRS_SIZE_DYNAMIC - ) - bar.EnableDocking(afxres.CBRS_ALIGN_ANY) - win32ui.GetMainFrame().DockControlBar(bar, afxres.AFX_IDW_DOCKBAR_BOTTOM) - - -###################################################################### -# -# The public interface to this module. -# -###################################################################### -# No extra functionality now, but maybe later, so -# publicize these names. -InteractiveDocument = winout.WindowOutputDocument - -# We remember our one and only interactive window in the "edit" variable. -edit = None - - -def CreateInteractiveWindowUserPreference(makeDoc=None, makeFrame=None): - """Create some sort of interactive window if the user's preference say we should.""" - bCreate = LoadPreference("Show at startup", 1) - if bCreate: - CreateInteractiveWindow(makeDoc, makeFrame) - - -def CreateInteractiveWindow(makeDoc=None, makeFrame=None): - """Create a standard or docked interactive window unconditionally""" - assert edit is None, "Creating second interactive window!" - bDocking = LoadPreference("Docking", 0) - if bDocking: - CreateDockedInteractiveWindow() - else: - CreateMDIInteractiveWindow(makeDoc, makeFrame) - assert edit is not None, "Created interactive window, but did not set the global!" - edit.currentView.SetFocus() - - -def CreateMDIInteractiveWindow(makeDoc=None, makeFrame=None): - """Create a standard (non-docked) interactive window unconditionally""" - global edit - if makeDoc is None: - makeDoc = InteractiveDocument - if makeFrame is None: - makeFrame = InteractiveFrame - edit = CInteractivePython(makeDoc=makeDoc, makeFrame=makeFrame) - - -def DestroyInteractiveWindow(): - """Destroy the interactive window. - This is different to Closing the window, - which may automatically re-appear. Once destroyed, it can never be recreated, - and a complete new instance must be created (which the various other helper - functions will then do after making this call - """ - global edit - if edit is not None and edit.currentView is not None: - if edit.currentView.GetParentFrame() == win32ui.GetMainFrame(): - # It is docked - do nothing now (this is only called at shutdown!) - pass - else: - # It is a standard window - call Close on the container. - edit.Close() - edit = None - - -def CloseInteractiveWindow(): - """Close the interactive window, allowing it to be re-created on demand.""" - global edit - if edit is not None and edit.currentView is not None: - if edit.currentView.GetParentFrame() == win32ui.GetMainFrame(): - # It is docked, just hide the dock bar. - frame = win32ui.GetMainFrame() - cb = frame.GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR) - frame.ShowControlBar(cb, 0, 1) - else: - # It is a standard window - destroy the frame/view, allowing the object itself to remain. - edit.currentView.GetParentFrame().DestroyWindow() - - -def ToggleInteractiveWindow(): - """If the interactive window is visible, hide it, otherwise show it.""" - if edit is None: - CreateInteractiveWindow() - else: - if edit.NeedRecreateWindow(): - edit.RecreateWindow() - else: - # Close it, allowing a reopen. - CloseInteractiveWindow() - - -def ShowInteractiveWindow(): - """Shows (or creates if necessary) an interactive window""" - if edit is None: - CreateInteractiveWindow() - else: - if edit.NeedRecreateWindow(): - edit.RecreateWindow() - else: - parent = edit.currentView.GetParentFrame() - if parent == win32ui.GetMainFrame(): # It is docked. - edit.currentView.SetFocus() - else: # It is a "normal" window - edit.currentView.GetParentFrame().AutoRestore() - win32ui.GetMainFrame().MDIActivate(edit.currentView.GetParentFrame()) - - -def IsInteractiveWindowVisible(): - return edit is not None and not edit.NeedRecreateWindow() diff --git a/lib/pythonwin/pywin/framework/intpyapp.py b/lib/pythonwin/pywin/framework/intpyapp.py deleted file mode 100644 index 48461dba..00000000 --- a/lib/pythonwin/pywin/framework/intpyapp.py +++ /dev/null @@ -1,555 +0,0 @@ -# intpyapp.py - Interactive Python application class -# -import os -import sys -import traceback - -import __main__ -import commctrl -import win32api -import win32con -import win32ui -from pywin.mfc import afxres, dialog - -from . import app, dbgcommands - -lastLocateFileName = ".py" # used in the "File/Locate" dialog... - - -# todo - _SetupSharedMenu should be moved to a framework class. -def _SetupSharedMenu_(self): - sharedMenu = self.GetSharedMenu() - from pywin.framework import toolmenu - - toolmenu.SetToolsMenu(sharedMenu) - from pywin.framework import help - - help.SetHelpMenuOtherHelp(sharedMenu) - - -from pywin.mfc import docview - -docview.DocTemplate._SetupSharedMenu_ = _SetupSharedMenu_ - - -class MainFrame(app.MainFrame): - def OnCreate(self, createStruct): - self.closing = 0 - if app.MainFrame.OnCreate(self, createStruct) == -1: - return -1 - style = ( - win32con.WS_CHILD - | afxres.CBRS_SIZE_DYNAMIC - | afxres.CBRS_TOP - | afxres.CBRS_TOOLTIPS - | afxres.CBRS_FLYBY - ) - - self.EnableDocking(afxres.CBRS_ALIGN_ANY) - - tb = win32ui.CreateToolBar(self, style | win32con.WS_VISIBLE) - tb.ModifyStyle(0, commctrl.TBSTYLE_FLAT) - tb.LoadToolBar(win32ui.IDR_MAINFRAME) - tb.EnableDocking(afxres.CBRS_ALIGN_ANY) - tb.SetWindowText("Standard") - self.DockControlBar(tb) - # Any other packages which use toolbars - from pywin.debugger.debugger import PrepareControlBars - - PrepareControlBars(self) - # Note "interact" also uses dockable windows, but they already happen - - # And a "Tools" menu on the main frame. - menu = self.GetMenu() - from . import toolmenu - - toolmenu.SetToolsMenu(menu, 2) - # And fix the "Help" menu on the main frame - from pywin.framework import help - - help.SetHelpMenuOtherHelp(menu) - - def OnClose(self): - try: - import pywin.debugger - - if ( - pywin.debugger.currentDebugger is not None - and pywin.debugger.currentDebugger.pumping - ): - try: - pywin.debugger.currentDebugger.close(1) - except: - traceback.print_exc() - return - except win32ui.error: - pass - self.closing = 1 - self.SaveBarState("ToolbarDefault") - self.SetActiveView(None) # Otherwise MFC's OnClose may _not_ prompt for save. - - from pywin.framework import help - - help.FinalizeHelp() - - self.DestroyControlBar(afxres.AFX_IDW_TOOLBAR) - self.DestroyControlBar(win32ui.ID_VIEW_TOOLBAR_DBG) - - return self._obj_.OnClose() - - def DestroyControlBar(self, id): - try: - bar = self.GetControlBar(id) - except win32ui.error: - return - bar.DestroyWindow() - - def OnCommand(self, wparam, lparam): - # By default, the current MDI child frame will process WM_COMMAND - # messages before any docked control bars - even if the control bar - # has focus. This is a problem for the interactive window when docked. - # Therefore, we detect the situation of a view having the main frame - # as its parent, and assume it must be a docked view (which it will in an MDI app) - try: - v = ( - self.GetActiveView() - ) # Raise an exception if none - good - then we want default handling - # Main frame _does_ have a current view (ie, a docking view) - see if it wants it. - if v.OnCommand(wparam, lparam): - return 1 - except (win32ui.error, AttributeError): - pass - return self._obj_.OnCommand(wparam, lparam) - - -class InteractivePythonApp(app.CApp): - # This works if necessary - just we dont need to override the Run method. - # def Run(self): - # return self._obj_.Run() - - def HookCommands(self): - app.CApp.HookCommands(self) - dbgcommands.DebuggerCommandHandler().HookCommands() - self.HookCommand(self.OnViewBrowse, win32ui.ID_VIEW_BROWSE) - self.HookCommand(self.OnFileImport, win32ui.ID_FILE_IMPORT) - self.HookCommand(self.OnFileCheck, win32ui.ID_FILE_CHECK) - self.HookCommandUpdate(self.OnUpdateFileCheck, win32ui.ID_FILE_CHECK) - self.HookCommand(self.OnFileRun, win32ui.ID_FILE_RUN) - self.HookCommand(self.OnFileLocate, win32ui.ID_FILE_LOCATE) - self.HookCommand(self.OnInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE) - self.HookCommandUpdate( - self.OnUpdateInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE - ) - self.HookCommand(self.OnViewOptions, win32ui.ID_VIEW_OPTIONS) - self.HookCommand(self.OnHelpIndex, afxres.ID_HELP_INDEX) - self.HookCommand(self.OnFileSaveAll, win32ui.ID_FILE_SAVE_ALL) - self.HookCommand(self.OnViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) - self.HookCommandUpdate(self.OnUpdateViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) - - def CreateMainFrame(self): - return MainFrame() - - def MakeExistingDDEConnection(self): - # Use DDE to connect to an existing instance - # Return None if no existing instance - try: - from . import intpydde - except ImportError: - # No dde support! - return None - conv = intpydde.CreateConversation(self.ddeServer) - try: - conv.ConnectTo("Pythonwin", "System") - return conv - except intpydde.error: - return None - - def InitDDE(self): - # Do all the magic DDE handling. - # Returns TRUE if we have pumped the arguments to our - # remote DDE app, and we should terminate. - try: - from . import intpydde - except ImportError: - self.ddeServer = None - intpydde = None - if intpydde is not None: - self.ddeServer = intpydde.DDEServer(self) - self.ddeServer.Create("Pythonwin", intpydde.CBF_FAIL_SELFCONNECTIONS) - try: - # If there is an existing instance, pump the arguments to it. - connection = self.MakeExistingDDEConnection() - if connection is not None: - connection.Exec("self.Activate()") - if self.ProcessArgs(sys.argv, connection) is None: - return 1 - except: - # It is too early to 'print' an exception - we - # don't have stdout setup yet! - win32ui.DisplayTraceback( - sys.exc_info(), " - error in DDE conversation with Pythonwin" - ) - return 1 - - def InitInstance(self): - # Allow "/nodde" and "/new" to optimize this! - if ( - "/nodde" not in sys.argv - and "/new" not in sys.argv - and "-nodde" not in sys.argv - and "-new" not in sys.argv - ): - if self.InitDDE(): - return 1 # A remote DDE client is doing it for us! - else: - self.ddeServer = None - - win32ui.SetRegistryKey( - "Python %s" % (sys.winver,) - ) # MFC automatically puts the main frame caption on! - app.CApp.InitInstance(self) - - # Create the taskbar icon - win32ui.CreateDebuggerThread() - - # Allow Pythonwin to host OCX controls. - win32ui.EnableControlContainer() - - # Display the interactive window if the user wants it. - from . import interact - - interact.CreateInteractiveWindowUserPreference() - - # Load the modules we use internally. - self.LoadSystemModules() - - # Load additional module the user may want. - self.LoadUserModules() - - # Load the ToolBar state near the end of the init process, as - # there may be Toolbar IDs created by the user or other modules. - # By now all these modules should be loaded, so all the toolbar IDs loaded. - try: - self.frame.LoadBarState("ToolbarDefault") - except win32ui.error: - # MFC sucks. It does essentially "GetDlgItem(x)->Something", so if the - # toolbar with ID x does not exist, MFC crashes! Pythonwin has a trap for this - # but I need to investigate more how to prevent it (AFAIK, ensuring all the - # toolbars are created by now _should_ stop it!) - pass - - # Finally process the command line arguments. - try: - self.ProcessArgs(sys.argv) - except: - # too early for printing anything. - win32ui.DisplayTraceback( - sys.exc_info(), " - error processing command line args" - ) - - def ExitInstance(self): - win32ui.DestroyDebuggerThread() - try: - from . import interact - - interact.DestroyInteractiveWindow() - except: - pass - if self.ddeServer is not None: - self.ddeServer.Shutdown() - self.ddeServer = None - return app.CApp.ExitInstance(self) - - def Activate(self): - # Bring to the foreground. Mainly used when another app starts up, it asks - # this one to activate itself, then it terminates. - frame = win32ui.GetMainFrame() - frame.SetForegroundWindow() - if frame.GetWindowPlacement()[1] == win32con.SW_SHOWMINIMIZED: - frame.ShowWindow(win32con.SW_RESTORE) - - def ProcessArgs(self, args, dde=None): - # If we are going to talk to a remote app via DDE, then - # activate it! - if ( - len(args) < 1 or not args[0] - ): # argv[0]=='' when started without args, just like Python.exe! - return - - i = 0 - while i < len(args): - argType = args[i] - i += 1 - if argType.startswith("-"): - # Support dash options. Slash options are misinterpreted by python init - # as path and not finding usually 'C:\\' ends up in sys.path[0] - argType = "/" + argType[1:] - if not argType.startswith("/"): - argType = win32ui.GetProfileVal( - "Python", "Default Arg Type", "/edit" - ).lower() - i -= 1 # arg is /edit's parameter - par = i < len(args) and args[i] or "MISSING" - if argType in ("/nodde", "/new", "-nodde", "-new"): - # Already handled - pass - elif argType.startswith("/goto:"): - gotoline = int(argType[len("/goto:") :]) - if dde: - dde.Exec( - "from pywin.framework import scriptutils\n" - "ed = scriptutils.GetActiveEditControl()\n" - "if ed: ed.SetSel(ed.LineIndex(%s - 1))" % gotoline - ) - else: - from . import scriptutils - - ed = scriptutils.GetActiveEditControl() - if ed: - ed.SetSel(ed.LineIndex(gotoline - 1)) - elif argType == "/edit": - # Load up the default application. - i += 1 - fname = win32api.GetFullPathName(par) - if not os.path.isfile(fname): - # if we don't catch this, OpenDocumentFile() (actually - # PyCDocument.SetPathName() in - # pywin.scintilla.document.CScintillaDocument.OnOpenDocument) - # segfaults Pythonwin on recent PY3 builds (b228) - win32ui.MessageBox( - "No such file: %s\n\nCommand Line: %s" - % (fname, win32api.GetCommandLine()), - "Open file for edit", - win32con.MB_ICONERROR, - ) - continue - if dde: - dde.Exec("win32ui.GetApp().OpenDocumentFile(%s)" % (repr(fname))) - else: - win32ui.GetApp().OpenDocumentFile(par) - elif argType == "/rundlg": - if dde: - dde.Exec( - "from pywin.framework import scriptutils;scriptutils.RunScript(%r, %r, 1)" - % (par, " ".join(args[i + 1 :])) - ) - else: - from . import scriptutils - - scriptutils.RunScript(par, " ".join(args[i + 1 :])) - return - elif argType == "/run": - if dde: - dde.Exec( - "from pywin.framework import scriptutils;scriptutils.RunScript(%r, %r, 0)" - % (par, " ".join(args[i + 1 :])) - ) - else: - from . import scriptutils - - scriptutils.RunScript(par, " ".join(args[i + 1 :]), 0) - return - elif argType == "/app": - raise RuntimeError( - "/app only supported for new instances of Pythonwin.exe" - ) - elif argType == "/dde": # Send arbitary command - if dde is not None: - dde.Exec(par) - else: - win32ui.MessageBox( - "The /dde command can only be used\r\nwhen Pythonwin is already running" - ) - i += 1 - else: - raise ValueError("Command line argument not recognised: %s" % argType) - - def LoadSystemModules(self): - self.DoLoadModules("pywin.framework.editor,pywin.framework.stdin") - - def LoadUserModules(self, moduleNames=None): - # Load the users modules. - if moduleNames is None: - default = "pywin.framework.sgrepmdi,pywin.framework.mdi_pychecker" - moduleNames = win32ui.GetProfileVal("Python", "Startup Modules", default) - self.DoLoadModules(moduleNames) - - def DoLoadModules(self, moduleNames): # ", sep string of module names. - if not moduleNames: - return - modules = moduleNames.split(",") - for module in modules: - try: - __import__(module) - except: # Catch em all, else the app itself dies! 'ImportError: - traceback.print_exc() - msg = 'Startup import of user module "%s" failed' % module - print(msg) - win32ui.MessageBox(msg) - - # - # DDE Callback - # - def OnDDECommand(self, command): - try: - exec(command + "\n") - except: - print("ERROR executing DDE command: ", command) - traceback.print_exc() - raise - - # - # General handlers - # - def OnViewBrowse(self, id, code): - "Called when ViewBrowse message is received" - from pywin.tools import browser - - obName = dialog.GetSimpleInput("Object", "__builtins__", "Browse Python Object") - if obName is None: - return - try: - browser.Browse(eval(obName, __main__.__dict__, __main__.__dict__)) - except NameError: - win32ui.MessageBox("This is no object with this name") - except AttributeError: - win32ui.MessageBox("The object has no attribute of that name") - except: - traceback.print_exc() - win32ui.MessageBox("This object can not be browsed") - - def OnFileImport(self, id, code): - "Called when a FileImport message is received. Import the current or specified file" - from . import scriptutils - - scriptutils.ImportFile() - - def OnFileCheck(self, id, code): - "Called when a FileCheck message is received. Check the current file." - from . import scriptutils - - scriptutils.CheckFile() - - def OnUpdateFileCheck(self, cmdui): - from . import scriptutils - - cmdui.Enable(scriptutils.GetActiveFileName(0) is not None) - - def OnFileRun(self, id, code): - "Called when a FileRun message is received." - from . import scriptutils - - showDlg = win32api.GetKeyState(win32con.VK_SHIFT) >= 0 - scriptutils.RunScript(None, None, showDlg) - - def OnFileLocate(self, id, code): - from . import scriptutils - - global lastLocateFileName # save the new version away for next time... - - name = dialog.GetSimpleInput( - "File name", lastLocateFileName, "Locate Python File" - ) - if name is None: # Cancelled. - return - lastLocateFileName = name - # if ".py" supplied, rip it off! - # should also check for .pys and .pyw - if lastLocateFileName[-3:].lower() == ".py": - lastLocateFileName = lastLocateFileName[:-3] - lastLocateFileName = lastLocateFileName.replace(".", "\\") - newName = scriptutils.LocatePythonFile(lastLocateFileName) - if newName is None: - win32ui.MessageBox("The file '%s' can not be located" % lastLocateFileName) - else: - win32ui.GetApp().OpenDocumentFile(newName) - - # Display all the "options" proprety pages we can find - def OnViewOptions(self, id, code): - win32ui.InitRichEdit() - sheet = dialog.PropertySheet("Pythonwin Options") - # Add property pages we know about that need manual work. - from pywin.dialogs import ideoptions - - sheet.AddPage(ideoptions.OptionsPropPage()) - - from . import toolmenu - - sheet.AddPage(toolmenu.ToolMenuPropPage()) - - # Get other dynamic pages from templates. - pages = [] - for template in self.GetDocTemplateList(): - try: - # Dont actually call the function with the exception handler. - getter = template.GetPythonPropertyPages - except AttributeError: - # Template does not provide property pages! - continue - pages = pages + getter() - - # Debugger template goes at the end - try: - from pywin.debugger import configui - except ImportError: - configui = None - if configui is not None: - pages.append(configui.DebuggerOptionsPropPage()) - # Now simply add the pages, and display the dialog. - for page in pages: - sheet.AddPage(page) - - if sheet.DoModal() == win32con.IDOK: - win32ui.SetStatusText("Applying configuration changes...", 1) - win32ui.DoWaitCursor(1) - # Tell every Window in our app that win.ini has changed! - win32ui.GetMainFrame().SendMessageToDescendants( - win32con.WM_WININICHANGE, 0, 0 - ) - win32ui.DoWaitCursor(0) - - def OnInteractiveWindow(self, id, code): - # toggle the existing state. - from . import interact - - interact.ToggleInteractiveWindow() - - def OnUpdateInteractiveWindow(self, cmdui): - try: - interact = sys.modules["pywin.framework.interact"] - state = interact.IsInteractiveWindowVisible() - except KeyError: # Interactive module hasnt ever been imported. - state = 0 - cmdui.Enable() - cmdui.SetCheck(state) - - def OnFileSaveAll(self, id, code): - # Only attempt to save editor documents. - from pywin.framework.editor import editorTemplate - - num = 0 - for doc in editorTemplate.GetDocumentList(): - if doc.IsModified() and doc.GetPathName(): - num = num = 1 - doc.OnSaveDocument(doc.GetPathName()) - win32ui.SetStatusText("%d documents saved" % num, 1) - - def OnViewToolbarDbg(self, id, code): - if code == 0: - return not win32ui.GetMainFrame().OnBarCheck(id) - - def OnUpdateViewToolbarDbg(self, cmdui): - win32ui.GetMainFrame().OnUpdateControlBarMenu(cmdui) - cmdui.Enable(1) - - def OnHelpIndex(self, id, code): - from . import help - - help.SelectAndRunHelpFile() - - -# As per the comments in app.py, this use is depreciated. -# app.AppBuilder = InteractivePythonApp - -# Now all we do is create the application -thisApp = InteractivePythonApp() diff --git a/lib/pythonwin/pywin/framework/intpydde.py b/lib/pythonwin/pywin/framework/intpydde.py deleted file mode 100644 index 1f869b0f..00000000 --- a/lib/pythonwin/pywin/framework/intpydde.py +++ /dev/null @@ -1,60 +0,0 @@ -# DDE support for Pythonwin -# -# Seems to work fine (in the context that IE4 seems to have broken -# DDE on _all_ NT4 machines I have tried, but only when a "Command Prompt" window -# is open. Strange, but true. If you have problems with this, close all Command Prompts! - - -import sys -import traceback - -import win32api -import win32ui -from dde import * -from pywin.mfc import object - - -class DDESystemTopic(object.Object): - def __init__(self, app): - self.app = app - object.Object.__init__(self, CreateServerSystemTopic()) - - def Exec(self, data): - try: - # print "Executing", cmd - self.app.OnDDECommand(data) - except: - t, v, tb = sys.exc_info() - # The DDE Execution failed. - print("Error executing DDE command.") - traceback.print_exception(t, v, tb) - return 0 - - -class DDEServer(object.Object): - def __init__(self, app): - self.app = app - object.Object.__init__(self, CreateServer()) - self.topic = self.item = None - - def CreateSystemTopic(self): - return DDESystemTopic(self.app) - - def Shutdown(self): - self._obj_.Shutdown() - self._obj_.Destroy() - if self.topic is not None: - self.topic.Destroy() - self.topic = None - if self.item is not None: - self.item.Destroy() - self.item = None - - def OnCreate(self): - return 1 - - def Status(self, msg): - try: - win32ui.SetStatusText(msg) - except win32ui.error: - pass diff --git a/lib/pythonwin/pywin/framework/mdi_pychecker.py b/lib/pythonwin/pywin/framework/mdi_pychecker.py deleted file mode 100644 index c89f33b9..00000000 --- a/lib/pythonwin/pywin/framework/mdi_pychecker.py +++ /dev/null @@ -1,849 +0,0 @@ -###################################################################### -## -## The Pychecker MDI Plug-In UserModule for Pythonwin -## -## contributed by Robert Kiendl -## -## Style is similar to (and inherited) from the SGrepMDI UserModule -## -## Usage: -## -## Start Pychecker on current file: Menu/File/New../Pychecker. -## Use it: Jump to Pychecker warning source lines by double-click. -## Auto-add "#$pycheck_no" / "#$pycheck_no=specific-re-pattern" tags -## to source lines by context/right-mouse-click on warning lines. -## -## It requires pychecker installed and the pychecker.bat to be on -## the PATH. Example pychecker.bat: -## -## REM pychecker.bat -## C:\bin\python.exe C:\PYTHON23\Lib\site-packages\pychecker\checker.py %1 %2 %3 %4 %5 %6 %7 %8 %9 -## -## Adding it as default module in PythonWin: -## -## +++ ./intpyapp.py 2006-10-02 17:59:32.974161600 +0200 -## @@ -272,7 +282,7 @@ -## def LoadUserModules(self, moduleNames = None): -## # Load the users modules. -## if moduleNames is None: -## - default = "sgrepmdi" -## + default = "sgrepmdi,mdi_pychecker" -## moduleNames=win32ui.GetProfileVal('Python','Startup Modules',default) -## self.DoLoadModules(moduleNames) -## -###################################################################### - -import glob -import os -import re -import sys -import time - -import win32api -import win32con -import win32ui -from pywin.mfc import dialog, docview, window - -from . import scriptutils - - -def getsubdirs(d): - dlist = [] - flist = glob.glob(d + "\\*") - for f in flist: - if os.path.isdir(f): - dlist.append(f) - dlist = dlist + getsubdirs(f) - return dlist - - -class dirpath: - def __init__(self, str, recurse=0): - dp = str.split(";") - dirs = {} - for d in dp: - if os.path.isdir(d): - d = d.lower() - if d not in dirs: - dirs[d] = None - if recurse: - subdirs = getsubdirs(d) - for sd in subdirs: - sd = sd.lower() - if sd not in dirs: - dirs[sd] = None - elif os.path.isfile(d): - pass - else: - x = None - if d in os.environ: - x = dirpath(os.environ[d]) - elif d[:5] == "HKEY_": - keystr = d.split("\\") - try: - root = eval("win32con." + keystr[0]) - except: - win32ui.MessageBox( - "Can't interpret registry key name '%s'" % keystr[0] - ) - try: - subkey = "\\".join(keystr[1:]) - val = win32api.RegQueryValue(root, subkey) - if val: - x = dirpath(val) - else: - win32ui.MessageBox( - "Registry path '%s' did not return a path entry" % d - ) - except: - win32ui.MessageBox( - "Can't interpret registry key value: %s" % keystr[1:] - ) - else: - win32ui.MessageBox("Directory '%s' not found" % d) - if x: - for xd in x: - if xd not in dirs: - dirs[xd] = None - if recurse: - subdirs = getsubdirs(xd) - for sd in subdirs: - sd = sd.lower() - if sd not in dirs: - dirs[sd] = None - self.dirs = [] - for d in dirs.keys(): - self.dirs.append(d) - - def __getitem__(self, key): - return self.dirs[key] - - def __len__(self): - return len(self.dirs) - - def __setitem__(self, key, value): - self.dirs[key] = value - - def __delitem__(self, key): - del self.dirs[key] - - def __getslice__(self, lo, hi): - return self.dirs[lo:hi] - - def __setslice__(self, lo, hi, seq): - self.dirs[lo:hi] = seq - - def __delslice__(self, lo, hi): - del self.dirs[lo:hi] - - def __add__(self, other): - if type(other) == type(self) or type(other) == type([]): - return self.dirs + other.dirs - - def __radd__(self, other): - if type(other) == type(self) or type(other) == type([]): - return other.dirs + self.dirs - - -# Group(1) is the filename, group(2) is the lineno. -# regexGrepResult=regex.compile("^\\([a-zA-Z]:.*\\)(\\([0-9]+\\))") -# regexGrep=re.compile(r"^([a-zA-Z]:[^(]*)\((\d+)\)") -regexGrep = re.compile(r"^(..[^\(:]+)?[\(:](\d+)[\):]:?\s*(.*)") - -# these are the atom numbers defined by Windows for basic dialog controls - -BUTTON = 0x80 -EDIT = 0x81 -STATIC = 0x82 -LISTBOX = 0x83 -SCROLLBAR = 0x84 -COMBOBOX = 0x85 - - -class TheTemplate(docview.RichEditDocTemplate): - def __init__(self): - docview.RichEditDocTemplate.__init__( - self, win32ui.IDR_TEXTTYPE, TheDocument, TheFrame, TheView - ) - self.SetDocStrings( - "\nPychecker\nPychecker\nPychecker params (*.pychecker)\n.pychecker\n\n\n" - ) - win32ui.GetApp().AddDocTemplate(self) - self.docparams = None - - def MatchDocType(self, fileName, fileType): - doc = self.FindOpenDocument(fileName) - if doc: - return doc - ext = os.path.splitext(fileName)[1].lower() - if ext == ".pychecker": - return win32ui.CDocTemplate_Confidence_yesAttemptNative - return win32ui.CDocTemplate_Confidence_noAttempt - - def setParams(self, params): - self.docparams = params - - def readParams(self): - tmp = self.docparams - self.docparams = None - return tmp - - -class TheFrame(window.MDIChildWnd): - # The template and doc params will one day be removed. - def __init__(self, wnd=None): - window.MDIChildWnd.__init__(self, wnd) - - -class TheDocument(docview.RichEditDoc): - def __init__(self, template): - docview.RichEditDoc.__init__(self, template) - self.dirpattern = "" - self.filpattern = "" - self.greppattern = "" - self.casesensitive = 1 - self.recurse = 1 - self.verbose = 0 - - def OnOpenDocument(self, fnm): - # this bizarre stuff with params is so right clicking in a result window - # and starting a new grep can communicate the default parameters to the - # new grep. - try: - params = open(fnm, "r").read() - except: - params = None - self.setInitParams(params) - return self.OnNewDocument() - - def OnCloseDocument(self): - try: - win32ui.GetApp().DeleteIdleHandler(self.idleHandler) - except: - pass - return self._obj_.OnCloseDocument() - - def saveInitParams(self): - # Only save the flags, not the text boxes. - paramstr = "\t\t\t%d\t%d" % (self.casesensitive, self.recurse) - win32ui.WriteProfileVal("Pychecker", "Params", paramstr) - - def setInitParams(self, paramstr): - if paramstr is None: - paramstr = win32ui.GetProfileVal("Pychecker", "Params", "\t\t\t1\t0\t0") - params = paramstr.split("\t") - if len(params) < 3: - params = params + [""] * (3 - len(params)) - if len(params) < 6: - params = params + [0] * (6 - len(params)) - self.dirpattern = params[0] - self.filpattern = params[1] - self.greppattern = params[2] or "-#1000 --only" - self.casesensitive = int(params[3]) - self.recurse = int(params[4]) - self.verbose = int(params[5]) - # setup some reasonable defaults. - if not self.dirpattern: - try: - editor = win32ui.GetMainFrame().MDIGetActive()[0].GetEditorView() - self.dirpattern = os.path.abspath( - os.path.dirname(editor.GetDocument().GetPathName()) - ) - except (AttributeError, win32ui.error): - self.dirpattern = os.getcwd() - if not self.filpattern: - try: - editor = win32ui.GetMainFrame().MDIGetActive()[0].GetEditorView() - self.filpattern = editor.GetDocument().GetPathName() - except AttributeError: - self.filpattern = "*.py" - - def OnNewDocument(self): - if self.dirpattern == "": - self.setInitParams(greptemplate.readParams()) - d = TheDialog( - self.dirpattern, - self.filpattern, - self.greppattern, - self.casesensitive, - self.recurse, - self.verbose, - ) - if d.DoModal() == win32con.IDOK: - self.dirpattern = d["dirpattern"] - self.filpattern = d["filpattern"] - self.greppattern = d["greppattern"] - # self.casesensitive = d['casesensitive'] - # self.recurse = d['recursive'] - # self.verbose = d['verbose'] - self.doSearch() - self.saveInitParams() - return 1 - return 0 # cancelled - return zero to stop frame creation. - - def doSearch(self): - self.dp = dirpath(self.dirpattern, self.recurse) - self.SetTitle( - "Pychecker Run '%s' (options: %s)" % (self.filpattern, self.greppattern) - ) - # self.text = [] - self.GetFirstView().Append( - "#Pychecker Run in " + self.dirpattern + " %s\n" % time.asctime() - ) - if self.verbose: - self.GetFirstView().Append("# =" + repr(self.dp.dirs) + "\n") - self.GetFirstView().Append("# Files " + self.filpattern + "\n") - self.GetFirstView().Append("# Options " + self.greppattern + "\n") - self.fplist = self.filpattern.split(";") - self.GetFirstView().Append( - "# Running... ( double click on result lines in order to jump to the source code ) \n" - ) - win32ui.SetStatusText("Pychecker running. Please wait...", 0) - self.dpndx = self.fpndx = 0 - self.fndx = -1 - if not self.dp: - self.GetFirstView().Append( - "# ERROR: '%s' does not resolve to any search locations" - % self.dirpattern - ) - self.SetModifiedFlag(0) - else: - ##self.flist = glob.glob(self.dp[0]+'\\'+self.fplist[0]) - import operator - - self.flist = reduce(operator.add, list(map(glob.glob, self.fplist))) - # import pywin.debugger;pywin.debugger.set_trace() - self.startPycheckerRun() - - def idleHandler(self, handler, count): - import time - - time.sleep(0.001) - if self.result != None: - win32ui.GetApp().DeleteIdleHandler(self.idleHandler) - return 0 - return 1 # more - - def startPycheckerRun(self): - self.result = None - old = win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_APPSTARTING)) - win32ui.GetApp().AddIdleHandler(self.idleHandler) - import _thread - - _thread.start_new(self.threadPycheckerRun, ()) - ##win32api.SetCursor(old) - - def threadPycheckerRun(self): - result = "" - rc = -1 - try: - options = self.greppattern - files = " ".join(self.flist) - # Recently MarkH has failed to run pychecker without it having - # been explicitly installed - so we assume it is and locate it - # from its default location. - # Step1 - get python.exe - py = os.path.join(sys.prefix, "python.exe") - if not os.path.isfile(py): - if "64 bit" in sys.version: - py = os.path.join(sys.prefix, "PCBuild", "amd64", "python.exe") - else: - py = os.path.join(sys.prefix, "PCBuild", "python.exe") - try: - py = win32api.GetShortPathName(py) - except win32api.error: - py = "" - # Find checker.py - import sysconfig - - pychecker = os.path.join( - sysconfig.get_paths()["purelib"], "pychecker", "checker.py" - ) - if not os.path.isfile(py): - result = "Can't find python.exe!\n" - elif not os.path.isfile(pychecker): - result = ( - "Can't find checker.py - please install pychecker " - "(or run 'setup.py install' if you have the source version)\n" - ) - else: - cmd = '%s "%s" %s %s 2>&1' % (py, pychecker, options, files) - ##fin,fout,ferr=os.popen3(cmd) - ##result=ferr.read()+fout.read() - result = os.popen(cmd).read() - ##rc=f.close() - self.GetFirstView().Append(result) - finally: - self.result = result - print("== Pychecker run finished ==") - self.GetFirstView().Append("\n" + "== Pychecker run finished ==") - self.SetModifiedFlag(0) - - def _inactive_idleHandler(self, handler, count): - self.fndx = self.fndx + 1 - if self.fndx < len(self.flist): - f = self.flist[self.fndx] - if self.verbose: - self.GetFirstView().Append("# .." + f + "\n") - win32ui.SetStatusText("Searching " + f, 0) - lines = open(f, "r").readlines() - for i in range(len(lines)): - line = lines[i] - if self.pat.search(line) != None: - self.GetFirstView().Append(f + "(" + repr(i + 1) + ") " + line) - else: - self.fndx = -1 - self.fpndx = self.fpndx + 1 - if self.fpndx < len(self.fplist): - self.flist = glob.glob( - self.dp[self.dpndx] + "\\" + self.fplist[self.fpndx] - ) - else: - self.fpndx = 0 - self.dpndx = self.dpndx + 1 - if self.dpndx < len(self.dp): - self.flist = glob.glob( - self.dp[self.dpndx] + "\\" + self.fplist[self.fpndx] - ) - else: - win32ui.SetStatusText("Search complete.", 0) - self.SetModifiedFlag(0) # default to not modified. - try: - win32ui.GetApp().DeleteIdleHandler(self.idleHandler) - except: - pass - return 0 - return 1 - - def GetParams(self): - return ( - self.dirpattern - + "\t" - + self.filpattern - + "\t" - + self.greppattern - + "\t" - + repr(self.casesensitive) - + "\t" - + repr(self.recurse) - + "\t" - + repr(self.verbose) - ) - - def OnSaveDocument(self, filename): - # print 'OnSaveDocument() filename=',filename - savefile = open(filename, "wb") - txt = self.GetParams() + "\n" - # print 'writing',txt - savefile.write(txt) - savefile.close() - self.SetModifiedFlag(0) - return 1 - - -ID_OPEN_FILE = 0xE500 -ID_PYCHECKER = 0xE501 -ID_SAVERESULTS = 0x502 -ID_TRYAGAIN = 0x503 -ID_ADDCOMMENT = 0x504 -ID_ADDPYCHECKNO2 = 0x505 - - -class TheView(docview.RichEditView): - def __init__(self, doc): - docview.RichEditView.__init__(self, doc) - self.SetWordWrap(win32ui.CRichEditView_WrapNone) - self.HookHandlers() - - def OnInitialUpdate(self): - rc = self._obj_.OnInitialUpdate() - format = (-402653169, 0, 200, 0, 0, 0, 49, "Courier New") - self.SetDefaultCharFormat(format) - return rc - - def HookHandlers(self): - self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN) - self.HookCommand(self.OnCmdOpenFile, ID_OPEN_FILE) - self.HookCommand(self.OnCmdThe, ID_PYCHECKER) - self.HookCommand(self.OnCmdSave, ID_SAVERESULTS) - self.HookCommand(self.OnTryAgain, ID_TRYAGAIN) - self.HookCommand(self.OnAddComment, ID_ADDCOMMENT) - self.HookCommand(self.OnAddComment, ID_ADDPYCHECKNO2) - self.HookMessage(self.OnLDblClick, win32con.WM_LBUTTONDBLCLK) - - def OnLDblClick(self, params): - line = self.GetLine() - regexGrepResult = regexGrep.match(line) - if regexGrepResult: - fname = regexGrepResult.group(1) - line = int(regexGrepResult.group(2)) - scriptutils.JumpToDocument(fname, line) - return 0 # dont pass on - return 1 # pass it on by default. - - def OnRClick(self, params): - menu = win32ui.CreatePopupMenu() - flags = win32con.MF_STRING | win32con.MF_ENABLED - lineno = self._obj_.LineFromChar(-1) # selection or current line - line = self._obj_.GetLine(lineno) - regexGrepResult = regexGrep.match(line) - charstart, charend = self._obj_.GetSel() - if regexGrepResult: - self.fnm = regexGrepResult.group(1) - self.lnnum = int(regexGrepResult.group(2)) - menu.AppendMenu(flags, ID_OPEN_FILE, "&Open " + self.fnm) - menu.AppendMenu( - flags, ID_ADDCOMMENT, "&Add to source: Comment Tag/#$pycheck_no .." - ) - menu.AppendMenu( - flags, - ID_ADDPYCHECKNO2, - "&Add to source: Specific #$pycheck_no=%(errtext)s ..", - ) - menu.AppendMenu(win32con.MF_SEPARATOR) - menu.AppendMenu(flags, ID_TRYAGAIN, "&Try Again") - menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, "Cu&t") - menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, "&Copy") - menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, "&Paste") - menu.AppendMenu(flags, win32con.MF_SEPARATOR) - menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, "&Select all") - menu.AppendMenu(flags, win32con.MF_SEPARATOR) - menu.AppendMenu(flags, ID_SAVERESULTS, "Sa&ve results") - menu.TrackPopupMenu(params[5]) - return 0 - - def OnAddComment(self, cmd, code): - addspecific = cmd == ID_ADDPYCHECKNO2 - _ = list(self.GetSel()) - _.sort() - start, end = _ - line_start, line_end = self.LineFromChar(start), self.LineFromChar(end) - first = 1 - for i in range(line_start, line_end + 1): - line = self.GetLine(i) - m = regexGrep.match(line) - if m: - if first: - first = 0 - cmnt = dialog.GetSimpleInput( - "Add to %s lines" % (line_end - line_start + 1), - addspecific - and " #$pycheck_no=%(errtext)s" - or " #$pycheck_no", - ) - if not cmnt: - return 0 - ##import pywin.debugger;pywin.debugger.set_trace() - fname = m.group(1) - line = int(m.group(2)) - view = scriptutils.JumpToDocument(fname, line) - pos = view.LineIndex(line) - 1 - if view.GetTextRange(pos - 1, pos) in ("\r", "\n"): - pos -= 1 - view.SetSel(pos, pos) - errtext = m.group(3) - if start != end and line_start == line_end: - errtext = self.GetSelText() - errtext = repr(re.escape(errtext).replace("\ ", " ")) - view.ReplaceSel(addspecific and cmnt % locals() or cmnt) - return 0 - - def OnCmdOpenFile(self, cmd, code): - doc = win32ui.GetApp().OpenDocumentFile(self.fnm) - if doc: - vw = doc.GetFirstView() - # hope you have an editor that implements GotoLine()! - try: - vw.GotoLine(int(self.lnnum)) - except: - pass - return 0 - - def OnCmdThe(self, cmd, code): - curparamsstr = self.GetDocument().GetParams() - params = curparamsstr.split("\t") - params[2] = self.sel - greptemplate.setParams("\t".join(params)) - greptemplate.OpenDocumentFile() - return 0 - - def OnTryAgain(self, cmd, code): - greptemplate.setParams(self.GetDocument().GetParams()) - greptemplate.OpenDocumentFile() - return 0 - - def OnCmdSave(self, cmd, code): - flags = win32con.OFN_OVERWRITEPROMPT - dlg = win32ui.CreateFileDialog( - 0, None, None, flags, "Text Files (*.txt)|*.txt||", self - ) - dlg.SetOFNTitle("Save Results As") - if dlg.DoModal() == win32con.IDOK: - pn = dlg.GetPathName() - self._obj_.SaveFile(pn) - return 0 - - def Append(self, strng): - numlines = self.GetLineCount() - endpos = self.LineIndex(numlines - 1) + len(self.GetLine(numlines - 1)) - self.SetSel(endpos, endpos) - self.ReplaceSel(strng) - - -class TheDialog(dialog.Dialog): - def __init__(self, dp, fp, gp, cs, r, v): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - CS = win32con.WS_CHILD | win32con.WS_VISIBLE - tmp = [ - ["Pychecker Run", (0, 0, 210, 90), style, None, (8, "MS Sans Serif")], - ] - tmp.append([STATIC, "Files:", -1, (7, 7, 50, 9), CS]) - tmp.append( - [ - EDIT, - gp, - 103, - (52, 7, 144, 11), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append([STATIC, "Directories:", -1, (7, 20, 50, 9), CS]) - tmp.append( - [ - EDIT, - dp, - 102, - (52, 20, 128, 11), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append( - [ - BUTTON, - "...", - 110, - (182, 20, 16, 11), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append([STATIC, "Options:", -1, (7, 33, 50, 9), CS]) - tmp.append( - [ - EDIT, - fp, - 101, - (52, 33, 128, 11), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append( - [ - BUTTON, - "...", - 111, - (182, 33, 16, 11), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - # tmp.append([BUTTON,'Case sensitive', 104, (7, 45, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) - # tmp.append([BUTTON,'Subdirectories', 105, (7, 56, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) - # tmp.append([BUTTON,'Verbose', 106, (7, 67, 72, 9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP]) - tmp.append( - [ - BUTTON, - "OK", - win32con.IDOK, - (166, 53, 32, 12), - CS | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "Cancel", - win32con.IDCANCEL, - (166, 67, 32, 12), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - dialog.Dialog.__init__(self, tmp) - self.AddDDX(101, "greppattern") - self.AddDDX(102, "dirpattern") - self.AddDDX(103, "filpattern") - # self.AddDDX(104,'casesensitive') - # self.AddDDX(105,'recursive') - # self.AddDDX(106,'verbose') - self._obj_.data["greppattern"] = gp - self._obj_.data["dirpattern"] = dp - self._obj_.data["filpattern"] = fp - # self._obj_.data['casesensitive'] = cs - # self._obj_.data['recursive'] = r - # self._obj_.data['verbose'] = v - self.HookCommand(self.OnMoreDirectories, 110) - self.HookCommand(self.OnMoreFiles, 111) - - def OnMoreDirectories(self, cmd, code): - self.getMore("Pychecker\\Directories", "dirpattern") - - def OnMoreFiles(self, cmd, code): - self.getMore("Pychecker\\File Types", "filpattern") - - def getMore(self, section, key): - self.UpdateData(1) - # get the items out of the ini file - ini = win32ui.GetProfileFileName() - secitems = win32api.GetProfileSection(section, ini) - items = [] - for secitem in secitems: - items.append(secitem.split("=")[1]) - dlg = TheParamsDialog(items) - if dlg.DoModal() == win32con.IDOK: - itemstr = ";".join(dlg.getItems()) - self._obj_.data[key] = itemstr - # update the ini file with dlg.getNew() - i = 0 - newitems = dlg.getNew() - if newitems: - items = items + newitems - for item in items: - win32api.WriteProfileVal(section, repr(i), item, ini) - i = i + 1 - self.UpdateData(0) - - def OnOK(self): - self.UpdateData(1) - for id, name in ( - (101, "greppattern"), - (102, "dirpattern"), - (103, "filpattern"), - ): - if not self[name]: - self.GetDlgItem(id).SetFocus() - win32api.MessageBeep() - win32ui.SetStatusText("Please enter a value") - return - self._obj_.OnOK() - - -class TheParamsDialog(dialog.Dialog): - def __init__(self, items): - self.items = items - self.newitems = [] - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - CS = win32con.WS_CHILD | win32con.WS_VISIBLE - tmp = [ - [ - "Pychecker Parameters", - (0, 0, 205, 100), - style, - None, - (8, "MS Sans Serif"), - ], - ] - tmp.append( - [ - LISTBOX, - "", - 107, - (7, 7, 150, 72), - CS - | win32con.LBS_MULTIPLESEL - | win32con.LBS_STANDARD - | win32con.LBS_HASSTRINGS - | win32con.WS_TABSTOP - | win32con.LBS_NOTIFY, - ] - ) - tmp.append( - [ - BUTTON, - "OK", - win32con.IDOK, - (167, 7, 32, 12), - CS | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "Cancel", - win32con.IDCANCEL, - (167, 23, 32, 12), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append([STATIC, "New:", -1, (2, 83, 15, 12), CS]) - tmp.append( - [ - EDIT, - "", - 108, - (18, 83, 139, 12), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append( - [ - BUTTON, - "Add", - 109, - (167, 83, 32, 12), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - dialog.Dialog.__init__(self, tmp) - self.HookCommand(self.OnAddItem, 109) - self.HookCommand(self.OnListDoubleClick, 107) - - def OnInitDialog(self): - lb = self.GetDlgItem(107) - for item in self.items: - lb.AddString(item) - return self._obj_.OnInitDialog() - - def OnAddItem(self, cmd, code): - eb = self.GetDlgItem(108) - item = eb.GetLine(0) - self.newitems.append(item) - lb = self.GetDlgItem(107) - i = lb.AddString(item) - lb.SetSel(i, 1) - return 1 - - def OnListDoubleClick(self, cmd, code): - if code == win32con.LBN_DBLCLK: - self.OnOK() - return 1 - - def OnOK(self): - lb = self.GetDlgItem(107) - self.selections = lb.GetSelTextItems() - self._obj_.OnOK() - - def getItems(self): - return self.selections - - def getNew(self): - return self.newitems - - -try: - win32ui.GetApp().RemoveDocTemplate(greptemplate) -except NameError: - pass - -greptemplate = TheTemplate() diff --git a/lib/pythonwin/pywin/framework/scriptutils.py b/lib/pythonwin/pywin/framework/scriptutils.py deleted file mode 100644 index e5bc57c8..00000000 --- a/lib/pythonwin/pywin/framework/scriptutils.py +++ /dev/null @@ -1,688 +0,0 @@ -""" -Various utilities for running/importing a script -""" -import bdb -import linecache -import os -import sys -import traceback - -import __main__ -import win32api -import win32con -import win32ui -from pywin.mfc import dialog -from pywin.mfc.docview import TreeView - -from .cmdline import ParseArgs - -RS_DEBUGGER_NONE = 0 # Dont run under the debugger. -RS_DEBUGGER_STEP = 1 # Start stepping under the debugger -RS_DEBUGGER_GO = 2 # Just run under the debugger, stopping only at break-points. -RS_DEBUGGER_PM = 3 # Dont run under debugger, but do post-mortem analysis on exception. - -debugging_options = """No debugging -Step-through in the debugger -Run in the debugger -Post-Mortem of unhandled exceptions""".split( - "\n" -) - -byte_cr = "\r".encode("ascii") -byte_lf = "\n".encode("ascii") -byte_crlf = "\r\n".encode("ascii") - - -# A dialog box for the "Run Script" command. -class DlgRunScript(dialog.Dialog): - "A class for the 'run script' dialog" - - def __init__(self, bHaveDebugger): - dialog.Dialog.__init__(self, win32ui.IDD_RUN_SCRIPT) - self.AddDDX(win32ui.IDC_EDIT1, "script") - self.AddDDX(win32ui.IDC_EDIT2, "args") - self.AddDDX(win32ui.IDC_COMBO1, "debuggingType", "i") - self.HookCommand(self.OnBrowse, win32ui.IDC_BUTTON2) - self.bHaveDebugger = bHaveDebugger - - def OnInitDialog(self): - rc = dialog.Dialog.OnInitDialog(self) - cbo = self.GetDlgItem(win32ui.IDC_COMBO1) - for o in debugging_options: - cbo.AddString(o) - cbo.SetCurSel(self["debuggingType"]) - if not self.bHaveDebugger: - cbo.EnableWindow(0) - - def OnBrowse(self, id, code): - if code != 0: # BN_CLICKED - return 1 - openFlags = win32con.OFN_OVERWRITEPROMPT | win32con.OFN_FILEMUSTEXIST - dlg = win32ui.CreateFileDialog( - 1, None, None, openFlags, "Python Scripts (*.py)|*.py||", self - ) - dlg.SetOFNTitle("Run Script") - if dlg.DoModal() != win32con.IDOK: - return 0 - self["script"] = dlg.GetPathName() - self.UpdateData(0) - return 0 - - -def GetDebugger(): - """Get the default Python debugger. Returns the debugger, or None. - - It is assumed the debugger has a standard "pdb" defined interface. - Currently always returns the 'pywin.debugger' debugger, or None - (pdb is _not_ returned as it is not effective in this GUI environment) - """ - try: - import pywin.debugger - - return pywin.debugger - except ImportError: - return None - - -def IsOnPythonPath(path): - "Given a path only, see if it is on the Pythonpath. Assumes path is a full path spec." - # must check that the command line arg's path is in sys.path - for syspath in sys.path: - try: - # Python 1.5 and later allows an empty sys.path entry. - if syspath and win32ui.FullPath(syspath) == path: - return 1 - except win32ui.error as details: - print( - "Warning: The sys.path entry '%s' is invalid\n%s" % (syspath, details) - ) - return 0 - - -def GetPackageModuleName(fileName): - """Given a filename, return (module name, new path). - eg - given "c:\a\b\c\my.py", return ("b.c.my",None) if "c:\a" is on sys.path. - If no package found, will return ("my", "c:\a\b\c") - """ - path, fname = os.path.split(fileName) - path = origPath = win32ui.FullPath(path) - fname = os.path.splitext(fname)[0] - modBits = [] - newPathReturn = None - if not IsOnPythonPath(path): - # Module not directly on the search path - see if under a package. - while len(path) > 3: # ie 'C:\' - path, modBit = os.path.split(path) - modBits.append(modBit) - # If on path, _and_ existing package of that name loaded. - if ( - IsOnPythonPath(path) - and modBit in sys.modules - and ( - os.path.exists(os.path.join(path, modBit, "__init__.py")) - or os.path.exists(os.path.join(path, modBit, "__init__.pyc")) - or os.path.exists(os.path.join(path, modBit, "__init__.pyo")) - ) - ): - modBits.reverse() - return ".".join(modBits) + "." + fname, newPathReturn - # Not found - look a level higher - else: - newPathReturn = origPath - - return fname, newPathReturn - - -def GetActiveView(): - """Gets the edit control (eg, EditView) with the focus, or None""" - try: - childFrame, bIsMaximised = win32ui.GetMainFrame().MDIGetActive() - return childFrame.GetActiveView() - except win32ui.error: - return None - - -def GetActiveEditControl(): - view = GetActiveView() - if view is None: - return None - if hasattr(view, "SCIAddText"): # Is it a scintilla control? - return view - try: - return view.GetRichEditCtrl() - except AttributeError: - pass - try: - return view.GetEditCtrl() - except AttributeError: - pass - - -def GetActiveEditorDocument(): - """Returns the active editor document and view, or (None,None) if no - active document or its not an editor document. - """ - view = GetActiveView() - if view is None or isinstance(view, TreeView): - return (None, None) - doc = view.GetDocument() - if hasattr(doc, "MarkerAdd"): # Is it an Editor document? - return doc, view - return (None, None) - - -def GetActiveFileName(bAutoSave=1): - """Gets the file name for the active frame, saving it if necessary. - - Returns None if it cant be found, or raises KeyboardInterrupt. - """ - pathName = None - active = GetActiveView() - if active is None: - return None - try: - doc = active.GetDocument() - pathName = doc.GetPathName() - - if bAutoSave and ( - len(pathName) > 0 - or doc.GetTitle()[:8] == "Untitled" - or doc.GetTitle()[:6] == "Script" - ): # if not a special purpose window - if doc.IsModified(): - try: - doc.OnSaveDocument(pathName) - pathName = doc.GetPathName() - - # clear the linecache buffer - linecache.clearcache() - - except win32ui.error: - raise KeyboardInterrupt - - except (win32ui.error, AttributeError): - pass - if not pathName: - return None - return pathName - - -lastScript = "" -lastArgs = "" -lastDebuggingType = RS_DEBUGGER_NONE - - -def RunScript(defName=None, defArgs=None, bShowDialog=1, debuggingType=None): - global lastScript, lastArgs, lastDebuggingType - _debugger_stop_frame_ = 1 # Magic variable so the debugger will hide me! - - # Get the debugger - may be None! - debugger = GetDebugger() - - if defName is None: - try: - pathName = GetActiveFileName() - except KeyboardInterrupt: - return # User cancelled save. - else: - pathName = defName - if not pathName: - pathName = lastScript - if defArgs is None: - args = "" - if pathName == lastScript: - args = lastArgs - else: - args = defArgs - if debuggingType is None: - debuggingType = lastDebuggingType - - if not pathName or bShowDialog: - dlg = DlgRunScript(debugger is not None) - dlg["script"] = pathName - dlg["args"] = args - dlg["debuggingType"] = debuggingType - if dlg.DoModal() != win32con.IDOK: - return - script = dlg["script"] - args = dlg["args"] - debuggingType = dlg["debuggingType"] - if not script: - return - if debuggingType == RS_DEBUGGER_GO and debugger is not None: - # This may surprise users - they select "Run under debugger", but - # it appears not to! Only warn when they pick from the dialog! - # First - ensure the debugger is activated to pickup any break-points - # set in the editor. - try: - # Create the debugger, but _dont_ init the debugger GUI. - rd = debugger._GetCurrentDebugger() - except AttributeError: - rd = None - if rd is not None and len(rd.breaks) == 0: - msg = "There are no active break-points.\r\n\r\nSelecting this debug option without any\r\nbreak-points is unlikely to have the desired effect\r\nas the debugger is unlikely to be invoked..\r\n\r\nWould you like to step-through in the debugger instead?" - rc = win32ui.MessageBox( - msg, - win32ui.LoadString(win32ui.IDR_DEBUGGER), - win32con.MB_YESNOCANCEL | win32con.MB_ICONINFORMATION, - ) - if rc == win32con.IDCANCEL: - return - if rc == win32con.IDYES: - debuggingType = RS_DEBUGGER_STEP - - lastDebuggingType = debuggingType - lastScript = script - lastArgs = args - else: - script = pathName - - # try and open the script. - if ( - len(os.path.splitext(script)[1]) == 0 - ): # check if no extension supplied, and give one. - script = script + ".py" - # If no path specified, try and locate the file - path, fnameonly = os.path.split(script) - if len(path) == 0: - try: - os.stat(fnameonly) # See if it is OK as is... - script = fnameonly - except os.error: - fullScript = LocatePythonFile(script) - if fullScript is None: - win32ui.MessageBox("The file '%s' can not be located" % script) - return - script = fullScript - else: - path = win32ui.FullPath(path) - if not IsOnPythonPath(path): - sys.path.append(path) - - # py3k fun: If we use text mode to open the file, we get \r\n - # translated so Python allows the syntax (good!), but we get back - # text already decoded from the default encoding (bad!) and Python - # ignores any encoding decls (bad!). If we use binary mode we get - # the raw bytes and Python looks at the encoding (good!) but \r\n - # chars stay in place so Python throws a syntax error (bad!). - # So: so the binary thing and manually normalize \r\n. - try: - f = open(script, "rb") - except IOError as exc: - win32ui.MessageBox( - "The file could not be opened - %s (%d)" % (exc.strerror, exc.errno) - ) - return - - # Get the source-code - as above, normalize \r\n - code = f.read().replace(byte_crlf, byte_lf).replace(byte_cr, byte_lf) + byte_lf - - # Remember and hack sys.argv for the script. - oldArgv = sys.argv - sys.argv = ParseArgs(args) - sys.argv.insert(0, script) - # sys.path[0] is the path of the script - oldPath0 = sys.path[0] - newPath0 = os.path.split(script)[0] - if not oldPath0: # if sys.path[0] is empty - sys.path[0] = newPath0 - insertedPath0 = 0 - else: - sys.path.insert(0, newPath0) - insertedPath0 = 1 - bWorked = 0 - win32ui.DoWaitCursor(1) - base = os.path.split(script)[1] - # Allow windows to repaint before starting. - win32ui.PumpWaitingMessages() - win32ui.SetStatusText("Running script %s..." % base, 1) - exitCode = 0 - from pywin.framework import interact - - # Check the debugger flags - if debugger is None and (debuggingType != RS_DEBUGGER_NONE): - win32ui.MessageBox( - "No debugger is installed. Debugging options have been ignored!" - ) - debuggingType = RS_DEBUGGER_NONE - - # Get a code object - ignore the debugger for this, as it is probably a syntax error - # at this point - try: - codeObject = compile(code, script, "exec") - except: - # Almost certainly a syntax error! - _HandlePythonFailure("run script", script) - # No code object which to run/debug. - return - __main__.__file__ = script - try: - if debuggingType == RS_DEBUGGER_STEP: - debugger.run(codeObject, __main__.__dict__, start_stepping=1) - elif debuggingType == RS_DEBUGGER_GO: - debugger.run(codeObject, __main__.__dict__, start_stepping=0) - else: - # Post mortem or no debugging - exec(codeObject, __main__.__dict__) - bWorked = 1 - except bdb.BdbQuit: - # Dont print tracebacks when the debugger quit, but do print a message. - print("Debugging session cancelled.") - exitCode = 1 - bWorked = 1 - except SystemExit as code: - exitCode = code - bWorked = 1 - except KeyboardInterrupt: - # Consider this successful, as we dont want the debugger. - # (but we do want a traceback!) - if interact.edit and interact.edit.currentView: - interact.edit.currentView.EnsureNoPrompt() - traceback.print_exc() - if interact.edit and interact.edit.currentView: - interact.edit.currentView.AppendToPrompt([]) - bWorked = 1 - except: - if interact.edit and interact.edit.currentView: - interact.edit.currentView.EnsureNoPrompt() - traceback.print_exc() - if interact.edit and interact.edit.currentView: - interact.edit.currentView.AppendToPrompt([]) - if debuggingType == RS_DEBUGGER_PM: - debugger.pm() - del __main__.__file__ - sys.argv = oldArgv - if insertedPath0: - del sys.path[0] - else: - sys.path[0] = oldPath0 - f.close() - if bWorked: - win32ui.SetStatusText("Script '%s' returned exit code %s" % (script, exitCode)) - else: - win32ui.SetStatusText("Exception raised while running script %s" % base) - try: - sys.stdout.flush() - except AttributeError: - pass - - win32ui.DoWaitCursor(0) - - -def ImportFile(): - """This code looks for the current window, and determines if it can be imported. If not, - it will prompt for a file name, and allow it to be imported.""" - try: - pathName = GetActiveFileName() - except KeyboardInterrupt: - pathName = None - - if pathName is not None: - if os.path.splitext(pathName)[1].lower() not in (".py", ".pyw", ".pyx"): - pathName = None - - if pathName is None: - openFlags = win32con.OFN_OVERWRITEPROMPT | win32con.OFN_FILEMUSTEXIST - dlg = win32ui.CreateFileDialog( - 1, None, None, openFlags, "Python Scripts (*.py;*.pyw)|*.py;*.pyw;*.pyx||" - ) - dlg.SetOFNTitle("Import Script") - if dlg.DoModal() != win32con.IDOK: - return 0 - - pathName = dlg.GetPathName() - - # If already imported, dont look for package - path, modName = os.path.split(pathName) - modName, modExt = os.path.splitext(modName) - newPath = None - # note that some packages (*cough* email *cough*) use "lazy importers" - # meaning sys.modules can change as a side-effect of looking at - # module.__file__ - so we must take a copy (ie, items() in py2k, - # list(items()) in py3k) - for key, mod in list(sys.modules.items()): - if getattr(mod, "__file__", None): - fname = mod.__file__ - base, ext = os.path.splitext(fname) - if ext.lower() in (".pyo", ".pyc"): - ext = ".py" - fname = base + ext - if win32ui.ComparePath(fname, pathName): - modName = key - break - else: # for not broken - modName, newPath = GetPackageModuleName(pathName) - if newPath: - sys.path.append(newPath) - - if modName in sys.modules: - bNeedReload = 1 - what = "reload" - else: - what = "import" - bNeedReload = 0 - - win32ui.SetStatusText(what.capitalize() + "ing module...", 1) - win32ui.DoWaitCursor(1) - # win32ui.GetMainFrame().BeginWaitCursor() - - try: - # always do an import, as it is cheap if it's already loaded. This ensures - # it is in our name space. - codeObj = compile("import " + modName, "", "exec") - except SyntaxError: - win32ui.SetStatusText('Invalid filename for import: "' + modName + '"') - return - try: - exec(codeObj, __main__.__dict__) - mod = sys.modules.get(modName) - if bNeedReload: - from importlib import reload - - mod = reload(sys.modules[modName]) - win32ui.SetStatusText( - "Successfully " - + what - + "ed module '" - + modName - + "': %s" % getattr(mod, "__file__", "") - ) - except: - _HandlePythonFailure(what) - win32ui.DoWaitCursor(0) - - -def CheckFile(): - """This code looks for the current window, and gets Python to check it - without actually executing any code (ie, by compiling only) - """ - try: - pathName = GetActiveFileName() - except KeyboardInterrupt: - return - - what = "check" - win32ui.SetStatusText(what.capitalize() + "ing module...", 1) - win32ui.DoWaitCursor(1) - try: - f = open(pathName) - except IOError as details: - print("Cant open file '%s' - %s" % (pathName, details)) - return - try: - code = f.read() + "\n" - finally: - f.close() - try: - codeObj = compile(code, pathName, "exec") - if RunTabNanny(pathName): - win32ui.SetStatusText( - "Python and the TabNanny successfully checked the file '" - + os.path.basename(pathName) - + "'" - ) - except SyntaxError: - _HandlePythonFailure(what, pathName) - except: - traceback.print_exc() - _HandlePythonFailure(what) - win32ui.DoWaitCursor(0) - - -def RunTabNanny(filename): - import io as io - - tabnanny = FindTabNanny() - if tabnanny is None: - win32ui.MessageBox("The TabNanny is not around, so the children can run amok!") - return - - # Capture the tab-nanny output - newout = io.StringIO() - old_out = sys.stderr, sys.stdout - sys.stderr = sys.stdout = newout - try: - tabnanny.check(filename) - finally: - # Restore output - sys.stderr, sys.stdout = old_out - data = newout.getvalue() - if data: - try: - lineno = data.split()[1] - lineno = int(lineno) - _JumpToPosition(filename, lineno) - try: # Try and display whitespace - GetActiveEditControl().SCISetViewWS(1) - except: - pass - win32ui.SetStatusText("The TabNanny found trouble at line %d" % lineno) - except (IndexError, TypeError, ValueError): - print("The tab nanny complained, but I cant see where!") - print(data) - return 0 - return 1 - - -def _JumpToPosition(fileName, lineno, col=1): - JumpToDocument(fileName, lineno, col) - - -def JumpToDocument(fileName, lineno=0, col=1, nChars=0, bScrollToTop=0): - # Jump to the position in a file. - # If lineno is <= 0, dont move the position - just open/restore. - # if nChars > 0, select that many characters. - # if bScrollToTop, the specified line will be moved to the top of the window - # (eg, bScrollToTop should be false when jumping to an error line to retain the - # context, but true when jumping to a method defn, where we want the full body. - # Return the view which is editing the file, or None on error. - doc = win32ui.GetApp().OpenDocumentFile(fileName) - if doc is None: - return None - frame = doc.GetFirstView().GetParentFrame() - try: - view = frame.GetEditorView() - if frame.GetActiveView() != view: - frame.SetActiveView(view) - frame.AutoRestore() - except AttributeError: # Not an editor frame?? - view = doc.GetFirstView() - if lineno > 0: - charNo = view.LineIndex(lineno - 1) - start = charNo + col - 1 - size = view.GetTextLength() - try: - view.EnsureCharsVisible(charNo) - except AttributeError: - print("Doesnt appear to be one of our views?") - view.SetSel(min(start, size), min(start + nChars, size)) - if bScrollToTop: - curTop = view.GetFirstVisibleLine() - nScroll = (lineno - 1) - curTop - view.LineScroll(nScroll, 0) - view.SetFocus() - return view - - -def _HandlePythonFailure(what, syntaxErrorPathName=None): - typ, details, tb = sys.exc_info() - if isinstance(details, SyntaxError): - try: - msg, (fileName, line, col, text) = details - if (not fileName or fileName == "") and syntaxErrorPathName: - fileName = syntaxErrorPathName - _JumpToPosition(fileName, line, col) - except (TypeError, ValueError): - msg = str(details) - win32ui.SetStatusText("Failed to " + what + " - syntax error - %s" % msg) - else: - traceback.print_exc() - win32ui.SetStatusText("Failed to " + what + " - " + str(details)) - tb = None # Clean up a cycle. - - -# Find the Python TabNanny in either the standard library or the Python Tools/Scripts directory. -def FindTabNanny(): - try: - return __import__("tabnanny") - except ImportError: - pass - # OK - not in the standard library - go looking. - filename = "tabnanny.py" - try: - path = win32api.RegQueryValue( - win32con.HKEY_LOCAL_MACHINE, - "SOFTWARE\\Python\\PythonCore\\%s\\InstallPath" % (sys.winver), - ) - except win32api.error: - print("WARNING - The Python registry does not have an 'InstallPath' setting") - print(" The file '%s' can not be located" % (filename)) - return None - fname = os.path.join(path, "Tools\\Scripts\\%s" % filename) - try: - os.stat(fname) - except os.error: - print( - "WARNING - The file '%s' can not be located in path '%s'" % (filename, path) - ) - return None - - tabnannyhome, tabnannybase = os.path.split(fname) - tabnannybase = os.path.splitext(tabnannybase)[0] - # Put tab nanny at the top of the path. - sys.path.insert(0, tabnannyhome) - try: - return __import__(tabnannybase) - finally: - # remove the tab-nanny from the path - del sys.path[0] - - -def LocatePythonFile(fileName, bBrowseIfDir=1): - "Given a file name, return a fully qualified file name, or None" - # first look for the exact file as specified - if not os.path.isfile(fileName): - # Go looking! - baseName = fileName - for path in sys.path: - fileName = os.path.abspath(os.path.join(path, baseName)) - if os.path.isdir(fileName): - if bBrowseIfDir: - d = win32ui.CreateFileDialog( - 1, "*.py", None, 0, "Python Files (*.py)|*.py|All files|*.*" - ) - d.SetOFNInitialDir(fileName) - rc = d.DoModal() - if rc == win32con.IDOK: - fileName = d.GetPathName() - break - else: - return None - else: - fileName = fileName + ".py" - if os.path.isfile(fileName): - break # Found it! - - else: # for not broken out of - return None - return win32ui.FullPath(fileName) diff --git a/lib/pythonwin/pywin/framework/sgrepmdi.py b/lib/pythonwin/pywin/framework/sgrepmdi.py deleted file mode 100644 index b7d938bb..00000000 --- a/lib/pythonwin/pywin/framework/sgrepmdi.py +++ /dev/null @@ -1,758 +0,0 @@ -# SGrepMDI is by Gordon McMillan (gmcm@hypernet.com) -# It does basically what Find In Files does in MSVC with a couple enhancements. -# - It saves any directories in the app's ini file (if you want to get rid -# of them you'll have to edit the file) -# - "Directories" can be directories, -# - semicolon separated lists of "directories", -# - environment variables that evaluate to "directories", -# - registry path names that evaluate to "directories", -# - all of which is recursive, so you can mix them all up. -# - It is MDI, so you can 'nest' greps and return to earlier ones, -# (ie, have multiple results open at the same time) -# - Like FIF, double clicking a line opens an editor and takes you to the line. -# - You can highlight text, right click and start a new grep with the selected -# text as search pattern and same directories etc as before. -# - You can save grep parameters (so you don't lose your hardearned pattern) -# from File|Save -# - You can save grep results by right clicking in the result window. -# Hats off to Mark Hammond for providing an environment where I could cobble -# something like this together in a couple evenings! - -import glob -import os -import re - -import win32api -import win32con -import win32ui -from pywin.mfc import dialog, docview, window - -from . import scriptutils - - -def getsubdirs(d): - dlist = [] - flist = glob.glob(d + "\\*") - for f in flist: - if os.path.isdir(f): - dlist.append(f) - dlist = dlist + getsubdirs(f) - return dlist - - -class dirpath: - def __init__(self, str, recurse=0): - dp = str.split(";") - dirs = {} - for d in dp: - if os.path.isdir(d): - d = d.lower() - if d not in dirs: - dirs[d] = None - if recurse: - subdirs = getsubdirs(d) - for sd in subdirs: - sd = sd.lower() - if sd not in dirs: - dirs[sd] = None - elif os.path.isfile(d): - pass - else: - x = None - if d in os.environ: - x = dirpath(os.environ[d]) - elif d[:5] == "HKEY_": - keystr = d.split("\\") - try: - root = eval("win32con." + keystr[0]) - except: - win32ui.MessageBox( - "Can't interpret registry key name '%s'" % keystr[0] - ) - try: - subkey = "\\".join(keystr[1:]) - val = win32api.RegQueryValue(root, subkey) - if val: - x = dirpath(val) - else: - win32ui.MessageBox( - "Registry path '%s' did not return a path entry" % d - ) - except: - win32ui.MessageBox( - "Can't interpret registry key value: %s" % keystr[1:] - ) - else: - win32ui.MessageBox("Directory '%s' not found" % d) - if x: - for xd in x: - if xd not in dirs: - dirs[xd] = None - if recurse: - subdirs = getsubdirs(xd) - for sd in subdirs: - sd = sd.lower() - if sd not in dirs: - dirs[sd] = None - self.dirs = [] - for d in list(dirs.keys()): - self.dirs.append(d) - - def __getitem__(self, key): - return self.dirs[key] - - def __len__(self): - return len(self.dirs) - - def __setitem__(self, key, value): - self.dirs[key] = value - - def __delitem__(self, key): - del self.dirs[key] - - def __getslice__(self, lo, hi): - return self.dirs[lo:hi] - - def __setslice__(self, lo, hi, seq): - self.dirs[lo:hi] = seq - - def __delslice__(self, lo, hi): - del self.dirs[lo:hi] - - def __add__(self, other): - if type(other) == type(self) or type(other) == type([]): - return self.dirs + other.dirs - - def __radd__(self, other): - if type(other) == type(self) or type(other) == type([]): - return other.dirs + self.dirs - - -# Group(1) is the filename, group(2) is the lineno. -# regexGrepResult=regex.compile("^\\([a-zA-Z]:.*\\)(\\([0-9]+\\))") - -regexGrep = re.compile(r"^([a-zA-Z]:[^(]*)\(([0-9]+)\)") - -# these are the atom numbers defined by Windows for basic dialog controls - -BUTTON = 0x80 -EDIT = 0x81 -STATIC = 0x82 -LISTBOX = 0x83 -SCROLLBAR = 0x84 -COMBOBOX = 0x85 - - -class GrepTemplate(docview.RichEditDocTemplate): - def __init__(self): - docview.RichEditDocTemplate.__init__( - self, win32ui.IDR_TEXTTYPE, GrepDocument, GrepFrame, GrepView - ) - self.SetDocStrings("\nGrep\nGrep\nGrep params (*.grep)\n.grep\n\n\n") - win32ui.GetApp().AddDocTemplate(self) - self.docparams = None - - def MatchDocType(self, fileName, fileType): - doc = self.FindOpenDocument(fileName) - if doc: - return doc - ext = os.path.splitext(fileName)[1].lower() - if ext == ".grep": - return win32ui.CDocTemplate_Confidence_yesAttemptNative - return win32ui.CDocTemplate_Confidence_noAttempt - - def setParams(self, params): - self.docparams = params - - def readParams(self): - tmp = self.docparams - self.docparams = None - return tmp - - -class GrepFrame(window.MDIChildWnd): - # The template and doc params will one day be removed. - def __init__(self, wnd=None): - window.MDIChildWnd.__init__(self, wnd) - - -class GrepDocument(docview.RichEditDoc): - def __init__(self, template): - docview.RichEditDoc.__init__(self, template) - self.dirpattern = "" - self.filpattern = "" - self.greppattern = "" - self.casesensitive = 1 - self.recurse = 1 - self.verbose = 0 - - def OnOpenDocument(self, fnm): - # this bizarre stuff with params is so right clicking in a result window - # and starting a new grep can communicate the default parameters to the - # new grep. - try: - params = open(fnm, "r").read() - except: - params = None - self.setInitParams(params) - return self.OnNewDocument() - - def OnCloseDocument(self): - try: - win32ui.GetApp().DeleteIdleHandler(self.SearchFile) - except: - pass - return self._obj_.OnCloseDocument() - - def saveInitParams(self): - # Only save the flags, not the text boxes. - paramstr = "\t%s\t\t%d\t%d" % ( - self.filpattern, - self.casesensitive, - self.recurse, - ) - win32ui.WriteProfileVal("Grep", "Params", paramstr) - - def setInitParams(self, paramstr): - if paramstr is None: - paramstr = win32ui.GetProfileVal("Grep", "Params", "\t\t\t1\t0\t0") - params = paramstr.split("\t") - if len(params) < 3: - params = params + [""] * (3 - len(params)) - if len(params) < 6: - params = params + [0] * (6 - len(params)) - self.dirpattern = params[0] - self.filpattern = params[1] - self.greppattern = params[2] - self.casesensitive = int(params[3]) - self.recurse = int(params[4]) - self.verbose = int(params[5]) - # setup some reasonable defaults. - if not self.dirpattern: - try: - editor = win32ui.GetMainFrame().MDIGetActive()[0].GetEditorView() - self.dirpattern = os.path.abspath( - os.path.dirname(editor.GetDocument().GetPathName()) - ) - except (AttributeError, win32ui.error): - self.dirpattern = os.getcwd() - if not self.filpattern: - self.filpattern = "*.py" - - def OnNewDocument(self): - if self.dirpattern == "": - self.setInitParams(greptemplate.readParams()) - d = GrepDialog( - self.dirpattern, - self.filpattern, - self.greppattern, - self.casesensitive, - self.recurse, - self.verbose, - ) - if d.DoModal() == win32con.IDOK: - self.dirpattern = d["dirpattern"] - self.filpattern = d["filpattern"] - self.greppattern = d["greppattern"] - self.casesensitive = d["casesensitive"] - self.recurse = d["recursive"] - self.verbose = d["verbose"] - self.doSearch() - self.saveInitParams() - return 1 - return 0 # cancelled - return zero to stop frame creation. - - def doSearch(self): - self.dp = dirpath(self.dirpattern, self.recurse) - self.SetTitle("Grep for %s in %s" % (self.greppattern, self.filpattern)) - # self.text = [] - self.GetFirstView().Append("#Search " + self.dirpattern + "\n") - if self.verbose: - self.GetFirstView().Append("# =" + repr(self.dp.dirs) + "\n") - self.GetFirstView().Append("# Files " + self.filpattern + "\n") - self.GetFirstView().Append("# For " + self.greppattern + "\n") - self.fplist = self.filpattern.split(";") - if self.casesensitive: - self.pat = re.compile(self.greppattern) - else: - self.pat = re.compile(self.greppattern, re.IGNORECASE) - win32ui.SetStatusText("Searching. Please wait...", 0) - self.dpndx = self.fpndx = 0 - self.fndx = -1 - if not self.dp: - self.GetFirstView().Append( - "# ERROR: '%s' does not resolve to any search locations" - % self.dirpattern - ) - self.SetModifiedFlag(0) - else: - self.flist = glob.glob(self.dp[0] + "\\" + self.fplist[0]) - win32ui.GetApp().AddIdleHandler(self.SearchFile) - - def SearchFile(self, handler, count): - self.fndx = self.fndx + 1 - if self.fndx < len(self.flist): - f = self.flist[self.fndx] - if self.verbose: - self.GetFirstView().Append("# .." + f + "\n") - # Directories may match the file type pattern, and files may be removed - # while grep is running - if os.path.isfile(f): - win32ui.SetStatusText("Searching " + f, 0) - lines = open(f, "r").readlines() - for i in range(len(lines)): - line = lines[i] - if self.pat.search(line) != None: - self.GetFirstView().Append(f + "(" + repr(i + 1) + ") " + line) - else: - self.fndx = -1 - self.fpndx = self.fpndx + 1 - if self.fpndx < len(self.fplist): - self.flist = glob.glob( - self.dp[self.dpndx] + "\\" + self.fplist[self.fpndx] - ) - else: - self.fpndx = 0 - self.dpndx = self.dpndx + 1 - if self.dpndx < len(self.dp): - self.flist = glob.glob( - self.dp[self.dpndx] + "\\" + self.fplist[self.fpndx] - ) - else: - win32ui.SetStatusText("Search complete.", 0) - self.SetModifiedFlag(0) # default to not modified. - try: - win32ui.GetApp().DeleteIdleHandler(self.SearchFile) - except: - pass - return 0 - return 1 - - def GetParams(self): - return ( - self.dirpattern - + "\t" - + self.filpattern - + "\t" - + self.greppattern - + "\t" - + repr(self.casesensitive) - + "\t" - + repr(self.recurse) - + "\t" - + repr(self.verbose) - ) - - def OnSaveDocument(self, filename): - # print 'OnSaveDocument() filename=',filename - savefile = open(filename, "wb") - txt = self.GetParams() + "\n" - # print 'writing',txt - savefile.write(txt) - savefile.close() - self.SetModifiedFlag(0) - return 1 - - -ID_OPEN_FILE = 0xE400 -ID_GREP = 0xE401 -ID_SAVERESULTS = 0x402 -ID_TRYAGAIN = 0x403 - - -class GrepView(docview.RichEditView): - def __init__(self, doc): - docview.RichEditView.__init__(self, doc) - self.SetWordWrap(win32ui.CRichEditView_WrapNone) - self.HookHandlers() - - def OnInitialUpdate(self): - rc = self._obj_.OnInitialUpdate() - format = (-402653169, 0, 200, 0, 0, 0, 49, "Courier New") - self.SetDefaultCharFormat(format) - return rc - - def HookHandlers(self): - self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN) - self.HookCommand(self.OnCmdOpenFile, ID_OPEN_FILE) - self.HookCommand(self.OnCmdGrep, ID_GREP) - self.HookCommand(self.OnCmdSave, ID_SAVERESULTS) - self.HookCommand(self.OnTryAgain, ID_TRYAGAIN) - self.HookMessage(self.OnLDblClick, win32con.WM_LBUTTONDBLCLK) - - def OnLDblClick(self, params): - line = self.GetLine() - regexGrepResult = regexGrep.match(line) - if regexGrepResult: - fname = regexGrepResult.group(1) - line = int(regexGrepResult.group(2)) - scriptutils.JumpToDocument(fname, line) - return 0 # dont pass on - return 1 # pass it on by default. - - def OnRClick(self, params): - menu = win32ui.CreatePopupMenu() - flags = win32con.MF_STRING | win32con.MF_ENABLED - lineno = self._obj_.LineFromChar(-1) # selection or current line - line = self._obj_.GetLine(lineno) - regexGrepResult = regexGrep.match(line) - if regexGrepResult: - self.fnm = regexGrepResult.group(1) - self.lnnum = int(regexGrepResult.group(2)) - menu.AppendMenu(flags, ID_OPEN_FILE, "&Open " + self.fnm) - menu.AppendMenu(win32con.MF_SEPARATOR) - menu.AppendMenu(flags, ID_TRYAGAIN, "&Try Again") - charstart, charend = self._obj_.GetSel() - if charstart != charend: - linestart = self._obj_.LineIndex(lineno) - self.sel = line[charstart - linestart : charend - linestart] - menu.AppendMenu(flags, ID_GREP, "&Grep for " + self.sel) - menu.AppendMenu(win32con.MF_SEPARATOR) - menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, "Cu&t") - menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, "&Copy") - menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, "&Paste") - menu.AppendMenu(flags, win32con.MF_SEPARATOR) - menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, "&Select all") - menu.AppendMenu(flags, win32con.MF_SEPARATOR) - menu.AppendMenu(flags, ID_SAVERESULTS, "Sa&ve results") - menu.TrackPopupMenu(params[5]) - return 0 - - def OnCmdOpenFile(self, cmd, code): - doc = win32ui.GetApp().OpenDocumentFile(self.fnm) - if doc: - vw = doc.GetFirstView() - # hope you have an editor that implements GotoLine()! - try: - vw.GotoLine(int(self.lnnum)) - except: - pass - return 0 - - def OnCmdGrep(self, cmd, code): - if code != 0: - return 1 - curparamsstr = self.GetDocument().GetParams() - params = curparamsstr.split("\t") - params[2] = self.sel - greptemplate.setParams("\t".join(params)) - greptemplate.OpenDocumentFile() - return 0 - - def OnTryAgain(self, cmd, code): - if code != 0: - return 1 - greptemplate.setParams(self.GetDocument().GetParams()) - greptemplate.OpenDocumentFile() - return 0 - - def OnCmdSave(self, cmd, code): - if code != 0: - return 1 - flags = win32con.OFN_OVERWRITEPROMPT - dlg = win32ui.CreateFileDialog( - 0, None, None, flags, "Text Files (*.txt)|*.txt||", self - ) - dlg.SetOFNTitle("Save Results As") - if dlg.DoModal() == win32con.IDOK: - pn = dlg.GetPathName() - self._obj_.SaveTextFile(pn) - return 0 - - def Append(self, strng): - numlines = self.GetLineCount() - endpos = self.LineIndex(numlines - 1) + len(self.GetLine(numlines - 1)) - self.SetSel(endpos, endpos) - self.ReplaceSel(strng) - - -class GrepDialog(dialog.Dialog): - def __init__(self, dp, fp, gp, cs, r, v): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - CS = win32con.WS_CHILD | win32con.WS_VISIBLE - tmp = [ - ["Grep", (0, 0, 210, 90), style, None, (8, "MS Sans Serif")], - ] - tmp.append([STATIC, "Grep For:", -1, (7, 7, 50, 9), CS]) - tmp.append( - [ - EDIT, - gp, - 101, - (52, 7, 144, 11), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append([STATIC, "Directories:", -1, (7, 20, 50, 9), CS]) - tmp.append( - [ - EDIT, - dp, - 102, - (52, 20, 128, 11), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append( - [ - BUTTON, - "...", - 110, - (182, 20, 16, 11), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append([STATIC, "File types:", -1, (7, 33, 50, 9), CS]) - tmp.append( - [ - EDIT, - fp, - 103, - (52, 33, 128, 11), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append( - [ - BUTTON, - "...", - 111, - (182, 33, 16, 11), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "Case sensitive", - 104, - (7, 45, 72, 9), - CS - | win32con.BS_AUTOCHECKBOX - | win32con.BS_LEFTTEXT - | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "Subdirectories", - 105, - (7, 56, 72, 9), - CS - | win32con.BS_AUTOCHECKBOX - | win32con.BS_LEFTTEXT - | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "Verbose", - 106, - (7, 67, 72, 9), - CS - | win32con.BS_AUTOCHECKBOX - | win32con.BS_LEFTTEXT - | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "OK", - win32con.IDOK, - (166, 53, 32, 12), - CS | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "Cancel", - win32con.IDCANCEL, - (166, 67, 32, 12), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - dialog.Dialog.__init__(self, tmp) - self.AddDDX(101, "greppattern") - self.AddDDX(102, "dirpattern") - self.AddDDX(103, "filpattern") - self.AddDDX(104, "casesensitive") - self.AddDDX(105, "recursive") - self.AddDDX(106, "verbose") - self._obj_.data["greppattern"] = gp - self._obj_.data["dirpattern"] = dp - self._obj_.data["filpattern"] = fp - self._obj_.data["casesensitive"] = cs - self._obj_.data["recursive"] = r - self._obj_.data["verbose"] = v - self.HookCommand(self.OnMoreDirectories, 110) - self.HookCommand(self.OnMoreFiles, 111) - - def OnMoreDirectories(self, cmd, code): - if code != 0: - return 1 - self.getMore("Grep\\Directories", "dirpattern") - - def OnMoreFiles(self, cmd, code): - if code != 0: - return 1 - self.getMore("Grep\\File Types", "filpattern") - - def getMore(self, section, key): - self.UpdateData(1) - # get the items out of the ini file - ini = win32ui.GetProfileFileName() - secitems = win32api.GetProfileSection(section, ini) - items = [] - for secitem in secitems: - items.append(secitem.split("=")[1]) - dlg = GrepParamsDialog(items) - if dlg.DoModal() == win32con.IDOK: - itemstr = ";".join(dlg.getItems()) - self._obj_.data[key] = itemstr - # update the ini file with dlg.getNew() - i = 0 - newitems = dlg.getNew() - if newitems: - items = items + newitems - for item in items: - win32api.WriteProfileVal(section, repr(i), item, ini) - i = i + 1 - self.UpdateData(0) - - def OnOK(self): - self.UpdateData(1) - for id, name in ( - (101, "greppattern"), - (102, "dirpattern"), - (103, "filpattern"), - ): - if not self[name]: - self.GetDlgItem(id).SetFocus() - win32api.MessageBeep() - win32ui.SetStatusText("Please enter a value") - return - self._obj_.OnOK() - - -class GrepParamsDialog(dialog.Dialog): - def __init__(self, items): - self.items = items - self.newitems = [] - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - CS = win32con.WS_CHILD | win32con.WS_VISIBLE - tmp = [ - ["Grep Parameters", (0, 0, 205, 100), style, None, (8, "MS Sans Serif")], - ] - tmp.append( - [ - LISTBOX, - "", - 107, - (7, 7, 150, 72), - CS - | win32con.LBS_MULTIPLESEL - | win32con.LBS_STANDARD - | win32con.LBS_HASSTRINGS - | win32con.WS_TABSTOP - | win32con.LBS_NOTIFY, - ] - ) - tmp.append( - [ - BUTTON, - "OK", - win32con.IDOK, - (167, 7, 32, 12), - CS | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append( - [ - BUTTON, - "Cancel", - win32con.IDCANCEL, - (167, 23, 32, 12), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - tmp.append([STATIC, "New:", -1, (2, 83, 15, 12), CS]) - tmp.append( - [ - EDIT, - "", - 108, - (18, 83, 139, 12), - CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER, - ] - ) - tmp.append( - [ - BUTTON, - "Add", - 109, - (167, 83, 32, 12), - CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP, - ] - ) - dialog.Dialog.__init__(self, tmp) - self.HookCommand(self.OnAddItem, 109) - self.HookCommand(self.OnListDoubleClick, 107) - - def OnInitDialog(self): - lb = self.GetDlgItem(107) - for item in self.items: - lb.AddString(item) - return self._obj_.OnInitDialog() - - def OnAddItem(self, cmd, code): - if code != 0: - return 1 - eb = self.GetDlgItem(108) - item = eb.GetLine(0) - self.newitems.append(item) - lb = self.GetDlgItem(107) - i = lb.AddString(item) - lb.SetSel(i, 1) - return 1 - - def OnListDoubleClick(self, cmd, code): - if code == win32con.LBN_DBLCLK: - self.OnOK() - return 1 - - def OnOK(self): - lb = self.GetDlgItem(107) - self.selections = lb.GetSelTextItems() - self._obj_.OnOK() - - def getItems(self): - return self.selections - - def getNew(self): - return self.newitems - - -try: - win32ui.GetApp().RemoveDocTemplate(greptemplate) -except NameError: - pass - -greptemplate = GrepTemplate() diff --git a/lib/pythonwin/pywin/framework/startup.py b/lib/pythonwin/pywin/framework/startup.py deleted file mode 100644 index 4a732322..00000000 --- a/lib/pythonwin/pywin/framework/startup.py +++ /dev/null @@ -1,80 +0,0 @@ -# startup.py -# -"The main application startup code for PythonWin." - -# -# This does the basic command line handling. - -# Keep this as short as possible, cos error output is only redirected if -# this runs OK. Errors in imported modules are much better - the messages go somewhere (not any more :-) - -import os -import sys - -import win32api -import win32ui - -if not sys.argv: - # Initialize sys.argv from commandline. When sys.argv is empty list ( - # different from [''] meaning "no cmd line arguments" ), then C - # bootstrapping or another method of invocation failed to initialize - # sys.argv and it will be done here. ( This was a workaround for a bug in - # win32ui but is retained for other situations. ) - argv = win32api.CommandLineToArgv(win32api.GetCommandLine()) - sys.argv = argv[1:] - if os.getcwd() not in sys.path and "." not in sys.path: - sys.path.insert(0, os.getcwd()) - -# You may wish to redirect error output somewhere useful if you have startup errors. -# eg, 'import win32traceutil' will do this for you. -# import win32traceutil # Just uncomment this line to see error output! - -# An old class I used to use - generally only useful if Pythonwin is running under MSVC -# class DebugOutput: -# softspace=1 -# def write(self,message): -# win32ui.OutputDebug(message) -# sys.stderr=sys.stdout=DebugOutput() - -# To fix a problem with Pythonwin when started from the Pythonwin directory, -# we update the pywin path to ensure it is absolute. -# If it is indeed relative, it will be relative to our current directory. -# If its already absolute, then this will have no affect. -import pywin -import pywin.framework - -pywin.__path__[0] = win32ui.FullPath(pywin.__path__[0]) -pywin.framework.__path__[0] = win32ui.FullPath(pywin.framework.__path__[0]) - -# make a few wierd sys values. This is so later we can clobber sys.argv to trick -# scripts when running under a GUI environment. - -moduleName = "pywin.framework.intpyapp" -sys.appargvoffset = 0 -sys.appargv = sys.argv[:] -# Must check for /app param here. -if len(sys.argv) >= 2 and sys.argv[0].lower() in ("/app", "-app"): - from . import cmdline - - moduleName = cmdline.FixArgFileName(sys.argv[1]) - sys.appargvoffset = 2 - newargv = sys.argv[sys.appargvoffset :] - # newargv.insert(0, sys.argv[0]) - sys.argv = newargv - -# Import the application module. -__import__(moduleName) - -try: - win32ui.GetApp()._obj_ - # This worked - an app already exists - do nothing more -except (AttributeError, win32ui.error): - # This means either no app object exists at all, or the one - # that does exist does not have a Python class (ie, was created - # by the host .EXE). In this case, we do the "old style" init... - from . import app - - if app.AppBuilder is None: - raise TypeError("No application object has been registered") - - app.App = app.AppBuilder() diff --git a/lib/pythonwin/pywin/framework/stdin.py b/lib/pythonwin/pywin/framework/stdin.py deleted file mode 100644 index 2f3adcb6..00000000 --- a/lib/pythonwin/pywin/framework/stdin.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright (c) 2000 David Abrahams. Permission to copy, use, modify, sell -# and distribute this software is granted provided this copyright -# notice appears in all copies. This software is provided "as is" without -# express or implied warranty, and with no claim as to its suitability for -# any purpose. -"""Provides a class Stdin which can be used to emulate the regular old -sys.stdin for the PythonWin interactive window. Right now it just pops -up a raw_input() dialog. With luck, someone will integrate it into the -actual PythonWin interactive window someday. - -WARNING: Importing this file automatically replaces sys.stdin with an -instance of Stdin (below). This is useful because you can just open -Stdin.py in PythonWin and hit the import button to get it set up right -if you don't feel like changing PythonWin's source. To put things back -the way they were, simply use this magic incantation: - import sys - sys.stdin = sys.stdin.real_file -""" -import sys - -try: - get_input_line = raw_input # py2x -except NameError: - get_input_line = input # py3k - - -class Stdin: - def __init__(self): - self.real_file = sys.stdin # NOTE: Likely to be None in py3k - self.buffer = "" - self.closed = False - - def __getattr__(self, name): - """Forward most functions to the real sys.stdin for absolute realism.""" - if self.real_file is None: - raise AttributeError(name) - return getattr(self.real_file, name) - - def isatty(self): - """Return 1 if the file is connected to a tty(-like) device, else 0.""" - return 1 - - def read(self, size=-1): - """Read at most size bytes from the file (less if the read - hits EOF or no more data is immediately available on a pipe, - tty or similar device). If the size argument is negative or - omitted, read all data until EOF is reached. The bytes are - returned as a string object. An empty string is returned when - EOF is encountered immediately. (For certain files, like ttys, - it makes sense to continue reading after an EOF is hit.)""" - result_size = self.__get_lines(size) - return self.__extract_from_buffer(result_size) - - def readline(self, size=-1): - """Read one entire line from the file. A trailing newline - character is kept in the string2.6 (but may be absent when a file ends - with an incomplete line). If the size argument is present and - non-negative, it is a maximum byte count (including the trailing - newline) and an incomplete line may be returned. An empty string is - returned when EOF is hit immediately. Note: unlike stdio's fgets(), - the returned string contains null characters ('\0') if they occurred - in the input. - """ - maximum_result_size = self.__get_lines(size, lambda buffer: "\n" in buffer) - - if "\n" in self.buffer[:maximum_result_size]: - result_size = self.buffer.find("\n", 0, maximum_result_size) + 1 - assert result_size > 0 - else: - result_size = maximum_result_size - - return self.__extract_from_buffer(result_size) - - def __extract_from_buffer(self, character_count): - """Remove the first character_count characters from the internal buffer and - return them. - """ - result = self.buffer[:character_count] - self.buffer = self.buffer[character_count:] - return result - - def __get_lines(self, desired_size, done_reading=lambda buffer: False): - """Keep adding lines to our internal buffer until done_reading(self.buffer) - is true or EOF has been reached or we have desired_size bytes in the buffer. - If desired_size < 0, we are never satisfied until we reach EOF. If done_reading - is not supplied, it is not consulted. - - If desired_size < 0, returns the length of the internal buffer. Otherwise, - returns desired_size. - """ - while not done_reading(self.buffer) and ( - desired_size < 0 or len(self.buffer) < desired_size - ): - try: - self.__get_line() - except ( - EOFError, - KeyboardInterrupt, - ): # deal with cancellation of get_input_line dialog - desired_size = len(self.buffer) # Be satisfied! - - if desired_size < 0: - return len(self.buffer) - else: - return desired_size - - def __get_line(self): - """Grab one line from get_input_line() and append it to the buffer.""" - line = get_input_line() - print(">>>", line) # echo input to console - self.buffer = self.buffer + line + "\n" - - def readlines(self, *sizehint): - """Read until EOF using readline() and return a list containing the lines - thus read. If the optional sizehint argument is present, instead of - reading up to EOF, whole lines totalling approximately sizehint bytes - (possibly after rounding up to an internal buffer size) are read. - """ - result = [] - total_read = 0 - while sizehint == () or total_read < sizehint[0]: - line = self.readline() - if line == "": - break - total_read = total_read + len(line) - result.append(line) - return result - - -if __name__ == "__main__": - test_input = r"""this is some test -input that I am hoping -~ -will be very instructive -and when I am done -I will have tested everything. -Twelve and twenty blackbirds -baked in a pie. Patty cake -patty cake so am I. -~ -Thirty-five niggling idiots! -Sell you soul to the devil, baby -""" - - def fake_raw_input(prompt=None): - """Replacement for raw_input() which pulls lines out of global test_input. - For testing only! - """ - global test_input - if "\n" not in test_input: - end_of_line_pos = len(test_input) - else: - end_of_line_pos = test_input.find("\n") - result = test_input[:end_of_line_pos] - test_input = test_input[end_of_line_pos + 1 :] - if len(result) == 0 or result[0] == "~": - raise EOFError() - return result - - get_input_line = fake_raw_input - - # Some completely inadequate tests, just to make sure the code's not totally broken - try: - x = Stdin() - print(x.read()) - print(x.readline()) - print(x.read(12)) - print(x.readline(47)) - print(x.readline(3)) - print(x.readlines()) - finally: - get_input_line = raw_input -else: - import sys - - sys.stdin = Stdin() diff --git a/lib/pythonwin/pywin/framework/toolmenu.py b/lib/pythonwin/pywin/framework/toolmenu.py deleted file mode 100644 index 3f739e09..00000000 --- a/lib/pythonwin/pywin/framework/toolmenu.py +++ /dev/null @@ -1,284 +0,0 @@ -# toolmenu.py - -import sys - -import win32api -import win32con -import win32ui - -from . import app - -tools = {} -idPos = 100 - -# The default items should no tools menu exist in the INI file. -defaultToolMenuItems = [ - ("Browser", "win32ui.GetApp().OnViewBrowse(0,0)"), - ( - "Browse PythonPath", - "from pywin.tools import browseProjects;browseProjects.Browse()", - ), - ("Edit Python Path", "from pywin.tools import regedit;regedit.EditRegistry()"), - ("COM Makepy utility", "from win32com.client import makepy;makepy.main()"), - ( - "COM Browser", - "from win32com.client import combrowse;combrowse.main(modal=False)", - ), - ( - "Trace Collector Debugging tool", - "from pywin.tools import TraceCollector;TraceCollector.MakeOutputWindow()", - ), -] - - -def LoadToolMenuItems(): - # Load from the registry. - items = [] - lookNo = 1 - while 1: - menu = win32ui.GetProfileVal("Tools Menu\\%s" % lookNo, "", "") - if menu == "": - break - cmd = win32ui.GetProfileVal("Tools Menu\\%s" % lookNo, "Command", "") - items.append((menu, cmd)) - lookNo = lookNo + 1 - - if len(items) == 0: - items = defaultToolMenuItems - return items - - -def WriteToolMenuItems(items): - # Items is a list of (menu, command) - # Delete the entire registry tree. - try: - mainKey = win32ui.GetAppRegistryKey() - toolKey = win32api.RegOpenKey(mainKey, "Tools Menu") - except win32ui.error: - toolKey = None - if toolKey is not None: - while 1: - try: - subkey = win32api.RegEnumKey(toolKey, 0) - except win32api.error: - break - win32api.RegDeleteKey(toolKey, subkey) - # Keys are now removed - write the new ones. - # But first check if we have the defaults - and if so, dont write anything! - if items == defaultToolMenuItems: - return - itemNo = 1 - for menu, cmd in items: - win32ui.WriteProfileVal("Tools Menu\\%s" % itemNo, "", menu) - win32ui.WriteProfileVal("Tools Menu\\%s" % itemNo, "Command", cmd) - itemNo = itemNo + 1 - - -def SetToolsMenu(menu, menuPos=None): - global tools - global idPos - - # todo - check the menu does not already exist. - # Create the new menu - toolsMenu = win32ui.CreatePopupMenu() - - # Load from the ini file. - items = LoadToolMenuItems() - for menuString, cmd in items: - tools[idPos] = (menuString, cmd, menuString) - toolsMenu.AppendMenu( - win32con.MF_ENABLED | win32con.MF_STRING, idPos, menuString - ) - win32ui.GetMainFrame().HookCommand(HandleToolCommand, idPos) - idPos = idPos + 1 - - # Find the correct spot to insert the new tools menu. - if menuPos is None: - menuPos = menu.GetMenuItemCount() - 2 - if menuPos < 0: - menuPos = 0 - - menu.InsertMenu( - menuPos, - win32con.MF_BYPOSITION - | win32con.MF_ENABLED - | win32con.MF_STRING - | win32con.MF_POPUP, - toolsMenu.GetHandle(), - "&Tools", - ) - - -def HandleToolCommand(cmd, code): - import re - import traceback - - global tools - (menuString, pyCmd, desc) = tools[cmd] - win32ui.SetStatusText("Executing tool %s" % desc, 1) - pyCmd = re.sub("\\\\n", "\n", pyCmd) - win32ui.DoWaitCursor(1) - oldFlag = None - try: - oldFlag = sys.stdout.template.writeQueueing - sys.stdout.template.writeQueueing = 0 - except (NameError, AttributeError): - pass - - try: - exec("%s\n" % pyCmd) - worked = 1 - except SystemExit: - # The program raised a SystemExit - ignore it. - worked = 1 - except: - print("Failed to execute command:\n%s" % pyCmd) - traceback.print_exc() - worked = 0 - if oldFlag is not None: - sys.stdout.template.writeQueueing = oldFlag - win32ui.DoWaitCursor(0) - if worked: - text = "Completed successfully." - else: - text = "Error executing %s." % desc - win32ui.SetStatusText(text, 1) - - -# The property page for maintaing the items on the Tools menu. -import commctrl -from pywin.mfc import dialog - -if win32ui.UNICODE: - LVN_ENDLABELEDIT = commctrl.LVN_ENDLABELEDITW -else: - LVN_ENDLABELEDIT = commctrl.LVN_ENDLABELEDITA - - -class ToolMenuPropPage(dialog.PropertyPage): - def __init__(self): - self.bImChangingEditControls = 0 # Am I programatically changing the controls? - dialog.PropertyPage.__init__(self, win32ui.IDD_PP_TOOLMENU) - - def OnInitDialog(self): - self.editMenuCommand = self.GetDlgItem(win32ui.IDC_EDIT2) - self.butNew = self.GetDlgItem(win32ui.IDC_BUTTON3) - - # Now hook the change notification messages for the edit controls. - self.HookCommand(self.OnCommandEditControls, win32ui.IDC_EDIT1) - self.HookCommand(self.OnCommandEditControls, win32ui.IDC_EDIT2) - - self.HookNotify(self.OnNotifyListControl, commctrl.LVN_ITEMCHANGED) - self.HookNotify(self.OnNotifyListControlEndLabelEdit, commctrl.LVN_ENDLABELEDIT) - - # Hook the button clicks. - self.HookCommand(self.OnButtonNew, win32ui.IDC_BUTTON3) # New Item - self.HookCommand(self.OnButtonDelete, win32ui.IDC_BUTTON4) # Delete item - self.HookCommand(self.OnButtonMove, win32ui.IDC_BUTTON1) # Move up - self.HookCommand(self.OnButtonMove, win32ui.IDC_BUTTON2) # Move down - - # Setup the columns in the list control - lc = self.GetDlgItem(win32ui.IDC_LIST1) - rect = lc.GetWindowRect() - cx = rect[2] - rect[0] - colSize = cx / 2 - win32api.GetSystemMetrics(win32con.SM_CXBORDER) - 1 - - item = commctrl.LVCFMT_LEFT, colSize, "Menu Text" - lc.InsertColumn(0, item) - - item = commctrl.LVCFMT_LEFT, colSize, "Python Command" - lc.InsertColumn(1, item) - - # Insert the existing tools menu - itemNo = 0 - for desc, cmd in LoadToolMenuItems(): - lc.InsertItem(itemNo, desc) - lc.SetItemText(itemNo, 1, cmd) - itemNo = itemNo + 1 - - self.listControl = lc - return dialog.PropertyPage.OnInitDialog(self) - - def OnOK(self): - # Write the menu back to the registry. - items = [] - itemLook = 0 - while 1: - try: - text = self.listControl.GetItemText(itemLook, 0) - if not text: - break - items.append((text, self.listControl.GetItemText(itemLook, 1))) - except win32ui.error: - # no more items! - break - itemLook = itemLook + 1 - WriteToolMenuItems(items) - return self._obj_.OnOK() - - def OnCommandEditControls(self, id, cmd): - # print "OnEditControls", id, cmd - if cmd == win32con.EN_CHANGE and not self.bImChangingEditControls: - itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) - newText = self.editMenuCommand.GetWindowText() - self.listControl.SetItemText(itemNo, 1, newText) - - return 0 - - def OnNotifyListControlEndLabelEdit(self, id, cmd): - newText = self.listControl.GetEditControl().GetWindowText() - itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) - self.listControl.SetItemText(itemNo, 0, newText) - - def OnNotifyListControl(self, id, cmd): - # print id, cmd - try: - itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) - except win32ui.error: # No selection! - return - - self.bImChangingEditControls = 1 - try: - item = self.listControl.GetItem(itemNo, 1) - self.editMenuCommand.SetWindowText(item[4]) - finally: - self.bImChangingEditControls = 0 - - return 0 # we have handled this! - - def OnButtonNew(self, id, cmd): - if cmd == win32con.BN_CLICKED: - newIndex = self.listControl.GetItemCount() - self.listControl.InsertItem(newIndex, "Click to edit the text") - self.listControl.EnsureVisible(newIndex, 0) - - def OnButtonMove(self, id, cmd): - if cmd == win32con.BN_CLICKED: - try: - itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) - except win32ui.error: - return - menu = self.listControl.GetItemText(itemNo, 0) - cmd = self.listControl.GetItemText(itemNo, 1) - if id == win32ui.IDC_BUTTON1: - # Move up - if itemNo > 0: - self.listControl.DeleteItem(itemNo) - # reinsert it. - self.listControl.InsertItem(itemNo - 1, menu) - self.listControl.SetItemText(itemNo - 1, 1, cmd) - else: - # Move down. - if itemNo < self.listControl.GetItemCount() - 1: - self.listControl.DeleteItem(itemNo) - # reinsert it. - self.listControl.InsertItem(itemNo + 1, menu) - self.listControl.SetItemText(itemNo + 1, 1, cmd) - - def OnButtonDelete(self, id, cmd): - if cmd == win32con.BN_CLICKED: - try: - itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED) - except win32ui.error: # No selection! - return - self.listControl.DeleteItem(itemNo) diff --git a/lib/pythonwin/pywin/framework/window.py b/lib/pythonwin/pywin/framework/window.py deleted file mode 100644 index 58fd4d95..00000000 --- a/lib/pythonwin/pywin/framework/window.py +++ /dev/null @@ -1,14 +0,0 @@ -# Framework Window classes. - -# Most Pythonwin windows should use these classes rather than -# the raw MFC ones if they want Pythonwin specific functionality. -import pywin.mfc.window -import win32con - - -class MDIChildWnd(pywin.mfc.window.MDIChildWnd): - def AutoRestore(self): - "If the window is minimised or maximised, restore it." - p = self.GetWindowPlacement() - if p[1] == win32con.SW_MINIMIZE or p[1] == win32con.SW_SHOWMINIMIZED: - self.SetWindowPlacement(p[0], win32con.SW_RESTORE, p[2], p[3], p[4]) diff --git a/lib/pythonwin/pywin/framework/winout.py b/lib/pythonwin/pywin/framework/winout.py deleted file mode 100644 index e78e959a..00000000 --- a/lib/pythonwin/pywin/framework/winout.py +++ /dev/null @@ -1,594 +0,0 @@ -# winout.py -# -# generic "output window" -# -# This Window will detect itself closing, and recreate next time output is -# written to it. - -# This has the option of writing output at idle time (by hooking the -# idle message, and queueing output) or writing as each -# write is executed. -# Updating the window directly gives a jerky appearance as many writes -# take place between commands, and the windows scrolls, and updates etc -# Updating at idle-time may defer all output of a long process, giving the -# appearence nothing is happening. -# There is a compromise "line" mode, which will output whenever -# a complete line is available. - -# behaviour depends on self.writeQueueing - -# This module is thread safe - output can originate from any thread. If any thread -# other than the main thread attempts to print, it is always queued until next idle time - -import queue -import re - -import win32api -import win32con -import win32ui -from pywin.framework import app, window -from pywin.mfc import docview - -debug = lambda msg: None - -##debug=win32ui.OutputDebugString -##import win32trace;win32trace.InitWrite() # for debugging - delete me! -##debug = win32trace.write - - -class flags: - # queueing of output. - WQ_NONE = 0 - WQ_LINE = 1 - WQ_IDLE = 2 - - -# WindowOutputDocumentParent=docview.RichEditDoc -# WindowOutputDocumentParent=docview.Document -import pywin.scintilla.document -from pywin import default_scintilla_encoding -from pywin.scintilla import scintillacon - -WindowOutputDocumentParent = pywin.scintilla.document.CScintillaDocument - - -class WindowOutputDocument(WindowOutputDocumentParent): - def SaveModified(self): - return 1 # say it is OK to destroy my document - - def OnSaveDocument(self, fileName): - win32ui.SetStatusText("Saving file...", 1) - try: - self.SaveFile(fileName) - except IOError as details: - win32ui.MessageBox("Error - could not save file\r\n\r\n%s" % details) - return 0 - win32ui.SetStatusText("Ready") - return 1 - - -class WindowOutputFrame(window.MDIChildWnd): - def __init__(self, wnd=None): - window.MDIChildWnd.__init__(self, wnd) - self.HookMessage(self.OnSizeMove, win32con.WM_SIZE) - self.HookMessage(self.OnSizeMove, win32con.WM_MOVE) - - def LoadFrame(self, idResource, style, wndParent, context): - self.template = context.template - return self._obj_.LoadFrame(idResource, style, wndParent, context) - - def PreCreateWindow(self, cc): - cc = self._obj_.PreCreateWindow(cc) - if ( - self.template.defSize - and self.template.defSize[0] != self.template.defSize[1] - ): - rect = app.RectToCreateStructRect(self.template.defSize) - cc = cc[0], cc[1], cc[2], cc[3], rect, cc[5], cc[6], cc[7], cc[8] - return cc - - def OnSizeMove(self, msg): - # so recreate maintains position. - # Need to map coordinates from the - # frame windows first child. - mdiClient = self.GetParent() - self.template.defSize = mdiClient.ScreenToClient(self.GetWindowRect()) - - def OnDestroy(self, message): - self.template.OnFrameDestroy(self) - return 1 - - -class WindowOutputViewImpl: - def __init__(self): - self.patErrorMessage = re.compile('\W*File "(.*)", line ([0-9]+)') - self.template = self.GetDocument().GetDocTemplate() - - def HookHandlers(self): - # Hook for the right-click menu. - self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN) - - def OnDestroy(self, msg): - self.template.OnViewDestroy(self) - - def OnInitialUpdate(self): - self.RestoreKillBuffer() - self.SetSel(-2) # end of buffer - - def GetRightMenuItems(self): - ret = [] - flags = win32con.MF_STRING | win32con.MF_ENABLED - ret.append((flags, win32ui.ID_EDIT_COPY, "&Copy")) - ret.append((flags, win32ui.ID_EDIT_SELECT_ALL, "&Select all")) - return ret - - # - # Windows command handlers, virtuals, etc. - # - def OnRClick(self, params): - paramsList = self.GetRightMenuItems() - menu = win32ui.CreatePopupMenu() - for appendParams in paramsList: - if type(appendParams) != type(()): - appendParams = (appendParams,) - menu.AppendMenu(*appendParams) - menu.TrackPopupMenu(params[5]) # track at mouse position. - return 0 - - # as this is often used as an output window, exeptions will often - # be printed. Therefore, we support this functionality at this level. - # Returns TRUE if the current line is an error message line, and will - # jump to it. FALSE if no error (and no action taken) - def HandleSpecialLine(self): - from . import scriptutils - - line = self.GetLine() - if line[:11] == "com_error: ": - # An OLE Exception - pull apart the exception - # and try and locate a help file. - try: - import win32api - import win32con - - det = eval(line[line.find(":") + 1 :].strip()) - win32ui.SetStatusText("Opening help file on OLE error...") - from . import help - - help.OpenHelpFile(det[2][3], win32con.HELP_CONTEXT, det[2][4]) - return 1 - except win32api.error as details: - win32ui.SetStatusText( - "The help file could not be opened - %s" % details.strerror - ) - return 1 - except: - win32ui.SetStatusText( - "Line is a COM error, but no WinHelp details can be parsed" - ) - # Look for a Python traceback. - matchResult = self.patErrorMessage.match(line) - if matchResult is None: - # No match - try the previous line - lineNo = self.LineFromChar() - if lineNo > 0: - line = self.GetLine(lineNo - 1) - matchResult = self.patErrorMessage.match(line) - if matchResult is not None: - # we have an error line. - fileName = matchResult.group(1) - if fileName[0] == "<": - win32ui.SetStatusText("Can not load this file") - return 1 # still was an error message. - else: - lineNoString = matchResult.group(2) - # Attempt to locate the file (in case it is a relative spec) - fileNameSpec = fileName - fileName = scriptutils.LocatePythonFile(fileName) - if fileName is None: - # Dont force update, so it replaces the idle prompt. - win32ui.SetStatusText( - "Cant locate the file '%s'" % (fileNameSpec), 0 - ) - return 1 - - win32ui.SetStatusText( - "Jumping to line " + lineNoString + " of file " + fileName, 1 - ) - if not scriptutils.JumpToDocument(fileName, int(lineNoString)): - win32ui.SetStatusText("Could not open %s" % fileName) - return 1 # still was an error message. - return 1 - return 0 # not an error line - - def write(self, msg): - return self.template.write(msg) - - def writelines(self, lines): - for line in lines: - self.write(line) - - def flush(self): - self.template.flush() - - -class WindowOutputViewRTF(docview.RichEditView, WindowOutputViewImpl): - def __init__(self, doc): - docview.RichEditView.__init__(self, doc) - WindowOutputViewImpl.__init__(self) - - def OnInitialUpdate(self): - WindowOutputViewImpl.OnInitialUpdate(self) - return docview.RichEditView.OnInitialUpdate(self) - - def OnDestroy(self, msg): - WindowOutputViewImpl.OnDestroy(self, msg) - docview.RichEditView.OnDestroy(self, msg) - - def HookHandlers(self): - WindowOutputViewImpl.HookHandlers(self) - # Hook for finding and locating error messages - self.HookMessage(self.OnLDoubleClick, win32con.WM_LBUTTONDBLCLK) - - # docview.RichEditView.HookHandlers(self) - - def OnLDoubleClick(self, params): - if self.HandleSpecialLine(): - return 0 # dont pass on - return 1 # pass it on by default. - - def RestoreKillBuffer(self): - if len(self.template.killBuffer): - self.StreamIn(win32con.SF_RTF, self._StreamRTFIn) - self.template.killBuffer = [] - - def SaveKillBuffer(self): - self.StreamOut(win32con.SF_RTFNOOBJS, self._StreamRTFOut) - - def _StreamRTFOut(self, data): - self.template.killBuffer.append(data) - return 1 # keep em coming! - - def _StreamRTFIn(self, bytes): - try: - item = self.template.killBuffer[0] - self.template.killBuffer.remove(item) - if bytes < len(item): - print("Warning - output buffer not big enough!") - return item - except IndexError: - return None - - def dowrite(self, str): - self.SetSel(-2) - self.ReplaceSel(str) - - -import pywin.scintilla.view - - -class WindowOutputViewScintilla( - pywin.scintilla.view.CScintillaView, WindowOutputViewImpl -): - def __init__(self, doc): - pywin.scintilla.view.CScintillaView.__init__(self, doc) - WindowOutputViewImpl.__init__(self) - - def OnInitialUpdate(self): - pywin.scintilla.view.CScintillaView.OnInitialUpdate(self) - self.SCISetMarginWidth(3) - WindowOutputViewImpl.OnInitialUpdate(self) - - def OnDestroy(self, msg): - WindowOutputViewImpl.OnDestroy(self, msg) - pywin.scintilla.view.CScintillaView.OnDestroy(self, msg) - - def HookHandlers(self): - WindowOutputViewImpl.HookHandlers(self) - pywin.scintilla.view.CScintillaView.HookHandlers(self) - self.GetParent().HookNotify( - self.OnScintillaDoubleClick, scintillacon.SCN_DOUBLECLICK - ) - - ## self.HookMessage(self.OnLDoubleClick,win32con.WM_LBUTTONDBLCLK) - - def OnScintillaDoubleClick(self, std, extra): - self.HandleSpecialLine() - - ## def OnLDoubleClick(self,params): - ## return 0 # never dont pass on - - def RestoreKillBuffer(self): - assert len(self.template.killBuffer) in (0, 1), "Unexpected killbuffer contents" - if self.template.killBuffer: - self.SCIAddText(self.template.killBuffer[0]) - self.template.killBuffer = [] - - def SaveKillBuffer(self): - self.template.killBuffer = [self.GetTextRange(0, -1)] - - def dowrite(self, str): - end = self.GetTextLength() - atEnd = end == self.GetSel()[0] - self.SCIInsertText(str, end) - if atEnd: - self.SetSel(self.GetTextLength()) - - def SetWordWrap(self, bWrapOn=1): - if bWrapOn: - wrap_mode = scintillacon.SC_WRAP_WORD - else: - wrap_mode = scintillacon.SC_WRAP_NONE - self.SCISetWrapMode(wrap_mode) - - def _MakeColorizer(self): - return None # No colorizer for me! - - -WindowOutputView = WindowOutputViewScintilla - - -# The WindowOutput class is actually an MFC template. This is a conventient way of -# making sure that my state can exist beyond the life of the windows themselves. -# This is primarily to support the functionality of a WindowOutput window automatically -# being recreated if necessary when written to. -class WindowOutput(docview.DocTemplate): - """Looks like a general Output Window - text can be written by the 'write' method. - Will auto-create itself on first write, and also on next write after being closed""" - - softspace = 1 - - def __init__( - self, - title=None, - defSize=None, - queueing=flags.WQ_LINE, - bAutoRestore=1, - style=None, - makeDoc=None, - makeFrame=None, - makeView=None, - ): - """init the output window - - Params - title=None -- What is the title of the window - defSize=None -- What is the default size for the window - if this - is a string, the size will be loaded from the ini file. - queueing = flags.WQ_LINE -- When should output be written - bAutoRestore=1 -- Should a minimized window be restored. - style -- Style for Window, or None for default. - makeDoc, makeFrame, makeView -- Classes for frame, view and window respectively. - """ - if makeDoc is None: - makeDoc = WindowOutputDocument - if makeFrame is None: - makeFrame = WindowOutputFrame - if makeView is None: - makeView = WindowOutputViewScintilla - docview.DocTemplate.__init__( - self, win32ui.IDR_PYTHONTYPE, makeDoc, makeFrame, makeView - ) - self.SetDocStrings("\nOutput\n\nText Documents (*.txt)\n.txt\n\n\n") - win32ui.GetApp().AddDocTemplate(self) - self.writeQueueing = queueing - self.errorCantRecreate = 0 - self.killBuffer = [] - self.style = style - self.bAutoRestore = bAutoRestore - self.title = title - self.bCreating = 0 - self.interruptCount = 0 - if type(defSize) == type(""): # is a string - maintain size pos from ini file. - self.iniSizeSection = defSize - self.defSize = app.LoadWindowSize(defSize) - self.loadedSize = self.defSize - else: - self.iniSizeSection = None - self.defSize = defSize - self.currentView = None - self.outputQueue = queue.Queue(-1) - self.mainThreadId = win32api.GetCurrentThreadId() - self.idleHandlerSet = 0 - self.SetIdleHandler() - - def __del__(self): - self.Close() - - def Create(self, title=None, style=None): - self.bCreating = 1 - if title: - self.title = title - if style: - self.style = style - doc = self.OpenDocumentFile() - if doc is None: - return - self.currentView = doc.GetFirstView() - self.bCreating = 0 - if self.title: - doc.SetTitle(self.title) - - def Close(self): - self.RemoveIdleHandler() - try: - parent = self.currentView.GetParent() - except (AttributeError, win32ui.error): # Already closed - return - parent.DestroyWindow() - - def SetTitle(self, title): - self.title = title - if self.currentView: - self.currentView.GetDocument().SetTitle(self.title) - - def OnViewDestroy(self, view): - self.currentView.SaveKillBuffer() - self.currentView = None - - def OnFrameDestroy(self, frame): - if self.iniSizeSection: - # use GetWindowPlacement(), as it works even when min'd or max'd - newSize = frame.GetWindowPlacement()[4] - if self.loadedSize != newSize: - app.SaveWindowSize(self.iniSizeSection, newSize) - - def SetIdleHandler(self): - if not self.idleHandlerSet: - debug("Idle handler set\n") - win32ui.GetApp().AddIdleHandler(self.QueueIdleHandler) - self.idleHandlerSet = 1 - - def RemoveIdleHandler(self): - if self.idleHandlerSet: - debug("Idle handler reset\n") - if win32ui.GetApp().DeleteIdleHandler(self.QueueIdleHandler) == 0: - debug("Error deleting idle handler\n") - self.idleHandlerSet = 0 - - def RecreateWindow(self): - if self.errorCantRecreate: - debug("Error = not trying again") - return 0 - try: - # This will fail if app shutting down - win32ui.GetMainFrame().GetSafeHwnd() - self.Create() - return 1 - except (win32ui.error, AttributeError): - self.errorCantRecreate = 1 - debug("Winout can not recreate the Window!\n") - return 0 - - # this handles the idle message, and does the printing. - def QueueIdleHandler(self, handler, count): - try: - bEmpty = self.QueueFlush(20) - # If the queue is empty, then we are back to idle and restart interrupt logic. - if bEmpty: - self.interruptCount = 0 - except KeyboardInterrupt: - # First interrupt since idle we just pass on. - # later ones we dump the queue and give up. - self.interruptCount = self.interruptCount + 1 - if self.interruptCount > 1: - # Drop the queue quickly as the user is already annoyed :-) - self.outputQueue = queue.Queue(-1) - print("Interrupted.") - bEmpty = 1 - else: - raise # re-raise the error so the users exception filters up. - return not bEmpty # More to do if not empty. - - # Returns true if the Window needs to be recreated. - def NeedRecreateWindow(self): - try: - if self.currentView is not None and self.currentView.IsWindow(): - return 0 - except ( - win32ui.error, - AttributeError, - ): # Attribute error if the win32ui object has died. - pass - return 1 - - # Returns true if the Window is OK (either cos it was, or because it was recreated - def CheckRecreateWindow(self): - if self.bCreating: - return 1 - if not self.NeedRecreateWindow(): - return 1 - if self.bAutoRestore: - if self.RecreateWindow(): - return 1 - return 0 - - def QueueFlush(self, max=None): - # Returns true if the queue is empty after the flush - # debug("Queueflush - %d, %d\n" % (max, self.outputQueue.qsize())) - if self.bCreating: - return 1 - items = [] - rc = 0 - while max is None or max > 0: - try: - item = self.outputQueue.get_nowait() - items.append(item) - except queue.Empty: - rc = 1 - break - if max is not None: - max = max - 1 - if len(items) != 0: - if not self.CheckRecreateWindow(): - debug(":Recreate failed!\n") - return 1 # In trouble - so say we have nothing to do. - win32ui.PumpWaitingMessages() # Pump paint messages - self.currentView.dowrite("".join(items)) - return rc - - def HandleOutput(self, message): - # debug("QueueOutput on thread %d, flags %d with '%s'...\n" % (win32api.GetCurrentThreadId(), self.writeQueueing, message )) - self.outputQueue.put(message) - if win32api.GetCurrentThreadId() != self.mainThreadId: - pass - # debug("not my thread - ignoring queue options!\n") - elif self.writeQueueing == flags.WQ_LINE: - pos = message.rfind("\n") - if pos >= 0: - # debug("Line queueing - forcing flush\n") - self.QueueFlush() - return - elif self.writeQueueing == flags.WQ_NONE: - # debug("WQ_NONE - flushing!\n") - self.QueueFlush() - return - # Let our idle handler get it - wake it up - try: - win32ui.GetMainFrame().PostMessage( - win32con.WM_USER - ) # Kick main thread off. - except win32ui.error: - # This can happen as the app is shutting down, so we send it to the C++ debugger - win32api.OutputDebugString(message) - - # delegate certain fns to my view. - def writelines(self, lines): - for line in lines: - self.write(line) - - def write(self, message): - self.HandleOutput(message) - - def flush(self): - self.QueueFlush() - - def HandleSpecialLine(self): - self.currentView.HandleSpecialLine() - - -def RTFWindowOutput(*args, **kw): - kw["makeView"] = WindowOutputViewRTF - return WindowOutput(*args, **kw) - - -def thread_test(o): - for i in range(5): - o.write("Hi from thread %d\n" % (win32api.GetCurrentThreadId())) - win32api.Sleep(100) - - -def test(): - w = WindowOutput(queueing=flags.WQ_IDLE) - w.write("First bit of text\n") - import _thread - - for i in range(5): - w.write("Hello from the main thread\n") - _thread.start_new(thread_test, (w,)) - for i in range(2): - w.write("Hello from the main thread\n") - win32api.Sleep(50) - return w - - -if __name__ == "__main__": - test() diff --git a/lib/pythonwin/pywin/idle/AutoExpand.py b/lib/pythonwin/pywin/idle/AutoExpand.py deleted file mode 100644 index 3b302ee9..00000000 --- a/lib/pythonwin/pywin/idle/AutoExpand.py +++ /dev/null @@ -1,95 +0,0 @@ -import re -import string - -###$ event <> -###$ win -###$ unix - - -class AutoExpand: - keydefs = { - "<>": [""], - } - - unix_keydefs = { - "<>": [""], - } - - menudefs = [ - ( - "edit", - [ - ("E_xpand word", "<>"), - ], - ), - ] - - wordchars = string.ascii_letters + string.digits + "_" - - def __init__(self, editwin): - self.text = editwin.text - self.text.wordlist = None # XXX what is this? - self.state = None - - def expand_word_event(self, event): - curinsert = self.text.index("insert") - curline = self.text.get("insert linestart", "insert lineend") - if not self.state: - words = self.getwords() - index = 0 - else: - words, index, insert, line = self.state - if insert != curinsert or line != curline: - words = self.getwords() - index = 0 - if not words: - self.text.bell() - return "break" - word = self.getprevword() - self.text.delete("insert - %d chars" % len(word), "insert") - newword = words[index] - index = (index + 1) % len(words) - if index == 0: - self.text.bell() # Warn we cycled around - self.text.insert("insert", newword) - curinsert = self.text.index("insert") - curline = self.text.get("insert linestart", "insert lineend") - self.state = words, index, curinsert, curline - return "break" - - def getwords(self): - word = self.getprevword() - if not word: - return [] - before = self.text.get("1.0", "insert wordstart") - wbefore = re.findall(r"\b" + word + r"\w+\b", before) - del before - after = self.text.get("insert wordend", "end") - wafter = re.findall(r"\b" + word + r"\w+\b", after) - del after - if not wbefore and not wafter: - return [] - words = [] - dict = {} - # search backwards through words before - wbefore.reverse() - for w in wbefore: - if dict.get(w): - continue - words.append(w) - dict[w] = w - # search onwards through words after - for w in wafter: - if dict.get(w): - continue - words.append(w) - dict[w] = w - words.append(word) - return words - - def getprevword(self): - line = self.text.get("insert linestart", "insert") - i = len(line) - while i > 0 and line[i - 1] in self.wordchars: - i = i - 1 - return line[i:] diff --git a/lib/pythonwin/pywin/idle/AutoIndent.py b/lib/pythonwin/pywin/idle/AutoIndent.py deleted file mode 100644 index c5adc5f8..00000000 --- a/lib/pythonwin/pywin/idle/AutoIndent.py +++ /dev/null @@ -1,547 +0,0 @@ -import sys -import tokenize - -from pywin import default_scintilla_encoding - -from . import PyParse - -if sys.version_info < (3,): - # in py2k, tokenize() takes a 'token eater' callback, while - # generate_tokens is a generator that works with str objects. - token_generator = tokenize.generate_tokens -else: - # in py3k tokenize() is the generator working with 'byte' objects, and - # token_generator is the 'undocumented b/w compat' function that - # theoretically works with str objects - but actually seems to fail) - token_generator = tokenize.tokenize - - -class AutoIndent: - menudefs = [ - ( - "edit", - [ - None, - ("_Indent region", "<>"), - ("_Dedent region", "<>"), - ("Comment _out region", "<>"), - ("U_ncomment region", "<>"), - ("Tabify region", "<>"), - ("Untabify region", "<>"), - ("Toggle tabs", "<>"), - ("New indent width", "<>"), - ], - ), - ] - - keydefs = { - "<>": [""], - "<>": ["", ""], - "<>": [""], - } - - windows_keydefs = { - "<>": [""], - "<>": [""], - "<>": [""], - "<>": [""], - "<>": [""], - "<>": [""], - "<>": [""], - "<>": [""], - } - - unix_keydefs = { - "<>": [ - "", - "", - "", - ], - "<>": [ - "", - "", - "", - ], - "<>": ["", ""], - "<>": ["", ""], - "<>": ["", ""], - "<>": ["", ""], - "<>": [""], - "<>": [""], - } - - # usetabs true -> literal tab characters are used by indent and - # dedent cmds, possibly mixed with spaces if - # indentwidth is not a multiple of tabwidth - # false -> tab characters are converted to spaces by indent - # and dedent cmds, and ditto TAB keystrokes - # indentwidth is the number of characters per logical indent level. - # tabwidth is the display width of a literal tab character. - # CAUTION: telling Tk to use anything other than its default - # tab setting causes it to use an entirely different tabbing algorithm, - # treating tab stops as fixed distances from the left margin. - # Nobody expects this, so for now tabwidth should never be changed. - usetabs = 1 - indentwidth = 4 - tabwidth = 8 # for IDLE use, must remain 8 until Tk is fixed - - # If context_use_ps1 is true, parsing searches back for a ps1 line; - # else searches for a popular (if, def, ...) Python stmt. - context_use_ps1 = 0 - - # When searching backwards for a reliable place to begin parsing, - # first start num_context_lines[0] lines back, then - # num_context_lines[1] lines back if that didn't work, and so on. - # The last value should be huge (larger than the # of lines in a - # conceivable file). - # Making the initial values larger slows things down more often. - num_context_lines = 50, 500, 5000000 - - def __init__(self, editwin): - self.editwin = editwin - self.text = editwin.text - - def config(self, **options): - for key, value in options.items(): - if key == "usetabs": - self.usetabs = value - elif key == "indentwidth": - self.indentwidth = value - elif key == "tabwidth": - self.tabwidth = value - elif key == "context_use_ps1": - self.context_use_ps1 = value - else: - raise KeyError("bad option name: %s" % repr(key)) - - # If ispythonsource and guess are true, guess a good value for - # indentwidth based on file content (if possible), and if - # indentwidth != tabwidth set usetabs false. - # In any case, adjust the Text widget's view of what a tab - # character means. - - def set_indentation_params(self, ispythonsource, guess=1): - if guess and ispythonsource: - i = self.guess_indent() - if 2 <= i <= 8: - self.indentwidth = i - if self.indentwidth != self.tabwidth: - self.usetabs = 0 - - self.editwin.set_tabwidth(self.tabwidth) - - def smart_backspace_event(self, event): - text = self.text - first, last = self.editwin.get_selection_indices() - if first and last: - text.delete(first, last) - text.mark_set("insert", first) - return "break" - # Delete whitespace left, until hitting a real char or closest - # preceding virtual tab stop. - chars = text.get("insert linestart", "insert") - if chars == "": - if text.compare("insert", ">", "1.0"): - # easy: delete preceding newline - text.delete("insert-1c") - else: - text.bell() # at start of buffer - return "break" - if chars[-1] not in " \t": - # easy: delete preceding real char - text.delete("insert-1c") - return "break" - # Ick. It may require *inserting* spaces if we back up over a - # tab character! This is written to be clear, not fast. - have = len(chars.expandtabs(self.tabwidth)) - assert have > 0 - want = int((have - 1) / self.indentwidth) * self.indentwidth - ncharsdeleted = 0 - while 1: - chars = chars[:-1] - ncharsdeleted = ncharsdeleted + 1 - have = len(chars.expandtabs(self.tabwidth)) - if have <= want or chars[-1] not in " \t": - break - text.undo_block_start() - text.delete("insert-%dc" % ncharsdeleted, "insert") - if have < want: - text.insert("insert", " " * (want - have)) - text.undo_block_stop() - return "break" - - def smart_indent_event(self, event): - # if intraline selection: - # delete it - # elif multiline selection: - # do indent-region & return - # indent one level - text = self.text - first, last = self.editwin.get_selection_indices() - text.undo_block_start() - try: - if first and last: - if index2line(first) != index2line(last): - return self.indent_region_event(event) - text.delete(first, last) - text.mark_set("insert", first) - prefix = text.get("insert linestart", "insert") - raw, effective = classifyws(prefix, self.tabwidth) - if raw == len(prefix): - # only whitespace to the left - self.reindent_to(effective + self.indentwidth) - else: - if self.usetabs: - pad = "\t" - else: - effective = len(prefix.expandtabs(self.tabwidth)) - n = self.indentwidth - pad = " " * (n - effective % n) - text.insert("insert", pad) - text.see("insert") - return "break" - finally: - text.undo_block_stop() - - def newline_and_indent_event(self, event): - text = self.text - first, last = self.editwin.get_selection_indices() - text.undo_block_start() - try: - if first and last: - text.delete(first, last) - text.mark_set("insert", first) - line = text.get("insert linestart", "insert") - i, n = 0, len(line) - while i < n and line[i] in " \t": - i = i + 1 - if i == n: - # the cursor is in or at leading indentation; just inject - # an empty line at the start and strip space from current line - text.delete("insert - %d chars" % i, "insert") - text.insert("insert linestart", "\n") - return "break" - indent = line[:i] - # strip whitespace before insert point - i = 0 - while line and line[-1] in " \t": - line = line[:-1] - i = i + 1 - if i: - text.delete("insert - %d chars" % i, "insert") - # strip whitespace after insert point - while text.get("insert") in " \t": - text.delete("insert") - # start new line - text.insert("insert", "\n") - - # adjust indentation for continuations and block - # open/close first need to find the last stmt - lno = index2line(text.index("insert")) - y = PyParse.Parser(self.indentwidth, self.tabwidth) - for context in self.num_context_lines: - startat = max(lno - context, 1) - startatindex = repr(startat) + ".0" - rawtext = text.get(startatindex, "insert") - y.set_str(rawtext) - bod = y.find_good_parse_start( - self.context_use_ps1, self._build_char_in_string_func(startatindex) - ) - if bod is not None or startat == 1: - break - y.set_lo(bod or 0) - c = y.get_continuation_type() - if c != PyParse.C_NONE: - # The current stmt hasn't ended yet. - if c == PyParse.C_STRING: - # inside a string; just mimic the current indent - text.insert("insert", indent) - elif c == PyParse.C_BRACKET: - # line up with the first (if any) element of the - # last open bracket structure; else indent one - # level beyond the indent of the line with the - # last open bracket - self.reindent_to(y.compute_bracket_indent()) - elif c == PyParse.C_BACKSLASH: - # if more than one line in this stmt already, just - # mimic the current indent; else if initial line - # has a start on an assignment stmt, indent to - # beyond leftmost =; else to beyond first chunk of - # non-whitespace on initial line - if y.get_num_lines_in_stmt() > 1: - text.insert("insert", indent) - else: - self.reindent_to(y.compute_backslash_indent()) - else: - assert 0, "bogus continuation type " + repr(c) - return "break" - - # This line starts a brand new stmt; indent relative to - # indentation of initial line of closest preceding - # interesting stmt. - indent = y.get_base_indent_string() - text.insert("insert", indent) - if y.is_block_opener(): - self.smart_indent_event(event) - elif indent and y.is_block_closer(): - self.smart_backspace_event(event) - return "break" - finally: - text.see("insert") - text.undo_block_stop() - - auto_indent = newline_and_indent_event - - # Our editwin provides a is_char_in_string function that works - # with a Tk text index, but PyParse only knows about offsets into - # a string. This builds a function for PyParse that accepts an - # offset. - - def _build_char_in_string_func(self, startindex): - def inner(offset, _startindex=startindex, _icis=self.editwin.is_char_in_string): - return _icis(_startindex + "+%dc" % offset) - - return inner - - def indent_region_event(self, event): - head, tail, chars, lines = self.get_region() - for pos in range(len(lines)): - line = lines[pos] - if line: - raw, effective = classifyws(line, self.tabwidth) - effective = effective + self.indentwidth - lines[pos] = self._make_blanks(effective) + line[raw:] - self.set_region(head, tail, chars, lines) - return "break" - - def dedent_region_event(self, event): - head, tail, chars, lines = self.get_region() - for pos in range(len(lines)): - line = lines[pos] - if line: - raw, effective = classifyws(line, self.tabwidth) - effective = max(effective - self.indentwidth, 0) - lines[pos] = self._make_blanks(effective) + line[raw:] - self.set_region(head, tail, chars, lines) - return "break" - - def comment_region_event(self, event): - head, tail, chars, lines = self.get_region() - for pos in range(len(lines) - 1): - line = lines[pos] - lines[pos] = "##" + line - self.set_region(head, tail, chars, lines) - - def uncomment_region_event(self, event): - head, tail, chars, lines = self.get_region() - for pos in range(len(lines)): - line = lines[pos] - if not line: - continue - if line[:2] == "##": - line = line[2:] - elif line[:1] == "#": - line = line[1:] - lines[pos] = line - self.set_region(head, tail, chars, lines) - - def tabify_region_event(self, event): - head, tail, chars, lines = self.get_region() - tabwidth = self._asktabwidth() - for pos in range(len(lines)): - line = lines[pos] - if line: - raw, effective = classifyws(line, tabwidth) - ntabs, nspaces = divmod(effective, tabwidth) - lines[pos] = "\t" * ntabs + " " * nspaces + line[raw:] - self.set_region(head, tail, chars, lines) - - def untabify_region_event(self, event): - head, tail, chars, lines = self.get_region() - tabwidth = self._asktabwidth() - for pos in range(len(lines)): - lines[pos] = lines[pos].expandtabs(tabwidth) - self.set_region(head, tail, chars, lines) - - def toggle_tabs_event(self, event): - if self.editwin.askyesno( - "Toggle tabs", - "Turn tabs " + ("on", "off")[self.usetabs] + "?", - parent=self.text, - ): - self.usetabs = not self.usetabs - return "break" - - # XXX this isn't bound to anything -- see class tabwidth comments - def change_tabwidth_event(self, event): - new = self._asktabwidth() - if new != self.tabwidth: - self.tabwidth = new - self.set_indentation_params(0, guess=0) - return "break" - - def change_indentwidth_event(self, event): - new = self.editwin.askinteger( - "Indent width", - "New indent width (1-16)", - parent=self.text, - initialvalue=self.indentwidth, - minvalue=1, - maxvalue=16, - ) - if new and new != self.indentwidth: - self.indentwidth = new - return "break" - - def get_region(self): - text = self.text - first, last = self.editwin.get_selection_indices() - if first and last: - head = text.index(first + " linestart") - tail = text.index(last + "-1c lineend +1c") - else: - head = text.index("insert linestart") - tail = text.index("insert lineend +1c") - chars = text.get(head, tail) - lines = chars.split("\n") - return head, tail, chars, lines - - def set_region(self, head, tail, chars, lines): - text = self.text - newchars = "\n".join(lines) - if newchars == chars: - text.bell() - return - text.tag_remove("sel", "1.0", "end") - text.mark_set("insert", head) - text.undo_block_start() - text.delete(head, tail) - text.insert(head, newchars) - text.undo_block_stop() - text.tag_add("sel", head, "insert") - - # Make string that displays as n leading blanks. - - def _make_blanks(self, n): - if self.usetabs: - ntabs, nspaces = divmod(n, self.tabwidth) - return "\t" * ntabs + " " * nspaces - else: - return " " * n - - # Delete from beginning of line to insert point, then reinsert - # column logical (meaning use tabs if appropriate) spaces. - - def reindent_to(self, column): - text = self.text - text.undo_block_start() - if text.compare("insert linestart", "!=", "insert"): - text.delete("insert linestart", "insert") - if column: - text.insert("insert", self._make_blanks(column)) - text.undo_block_stop() - - def _asktabwidth(self): - return ( - self.editwin.askinteger( - "Tab width", - "Spaces per tab?", - parent=self.text, - initialvalue=self.tabwidth, - minvalue=1, - maxvalue=16, - ) - or self.tabwidth - ) - - # Guess indentwidth from text content. - # Return guessed indentwidth. This should not be believed unless - # it's in a reasonable range (e.g., it will be 0 if no indented - # blocks are found). - - def guess_indent(self): - opener, indented = IndentSearcher(self.text, self.tabwidth).run() - if opener and indented: - raw, indentsmall = classifyws(opener, self.tabwidth) - raw, indentlarge = classifyws(indented, self.tabwidth) - else: - indentsmall = indentlarge = 0 - return indentlarge - indentsmall - - -# "line.col" -> line, as an int -def index2line(index): - return int(float(index)) - - -# Look at the leading whitespace in s. -# Return pair (# of leading ws characters, -# effective # of leading blanks after expanding -# tabs to width tabwidth) - - -def classifyws(s, tabwidth): - raw = effective = 0 - for ch in s: - if ch == " ": - raw = raw + 1 - effective = effective + 1 - elif ch == "\t": - raw = raw + 1 - effective = (effective // tabwidth + 1) * tabwidth - else: - break - return raw, effective - - -class IndentSearcher: - # .run() chews over the Text widget, looking for a block opener - # and the stmt following it. Returns a pair, - # (line containing block opener, line containing stmt) - # Either or both may be None. - - def __init__(self, text, tabwidth): - self.text = text - self.tabwidth = tabwidth - self.i = self.finished = 0 - self.blkopenline = self.indentedline = None - - def readline(self): - if self.finished: - val = "" - else: - i = self.i = self.i + 1 - mark = repr(i) + ".0" - if self.text.compare(mark, ">=", "end"): - val = "" - else: - val = self.text.get(mark, mark + " lineend+1c") - # hrm - not sure this is correct in py3k - the source code may have - # an encoding declared, but the data will *always* be in - # default_scintilla_encoding - so if anyone looks at the encoding decl - # in the source they will be wrong. I think. Maybe. Or something... - return val.encode(default_scintilla_encoding) - - def run(self): - OPENERS = ("class", "def", "for", "if", "try", "while") - INDENT = tokenize.INDENT - NAME = tokenize.NAME - - save_tabsize = tokenize.tabsize - tokenize.tabsize = self.tabwidth - try: - try: - for typ, token, start, end, line in token_generator(self.readline): - if typ == NAME and token in OPENERS: - self.blkopenline = line - elif typ == INDENT and self.blkopenline: - self.indentedline = line - break - - except (tokenize.TokenError, IndentationError): - # since we cut off the tokenizer early, we can trigger - # spurious errors - pass - finally: - tokenize.tabsize = save_tabsize - return self.blkopenline, self.indentedline diff --git a/lib/pythonwin/pywin/idle/CallTips.py b/lib/pythonwin/pywin/idle/CallTips.py deleted file mode 100644 index cecc760a..00000000 --- a/lib/pythonwin/pywin/idle/CallTips.py +++ /dev/null @@ -1,219 +0,0 @@ -# CallTips.py - An IDLE extension that provides "Call Tips" - ie, a floating window that -# displays parameter information as you open parens. - -import inspect -import string -import sys -import traceback - - -class CallTips: - menudefs = [] - - keydefs = { - "<>": [""], - "<>": [""], - "<>": [""], - "<>": ["", ""], - } - - windows_keydefs = {} - - unix_keydefs = {} - - def __init__(self, editwin): - self.editwin = editwin - self.text = editwin.text - self.calltip = None - if hasattr(self.text, "make_calltip_window"): - self._make_calltip_window = self.text.make_calltip_window - else: - self._make_calltip_window = self._make_tk_calltip_window - - def close(self): - self._make_calltip_window = None - - # Makes a Tk based calltip window. Used by IDLE, but not Pythonwin. - # See __init__ above for how this is used. - def _make_tk_calltip_window(self): - import CallTipWindow - - return CallTipWindow.CallTip(self.text) - - def _remove_calltip_window(self): - if self.calltip: - self.calltip.hidetip() - self.calltip = None - - def paren_open_event(self, event): - self._remove_calltip_window() - arg_text = get_arg_text(self.get_object_at_cursor()) - if arg_text: - self.calltip_start = self.text.index("insert") - self.calltip = self._make_calltip_window() - self.calltip.showtip(arg_text) - return "" # so the event is handled normally. - - def paren_close_event(self, event): - # Now just hides, but later we should check if other - # paren'd expressions remain open. - self._remove_calltip_window() - return "" # so the event is handled normally. - - def check_calltip_cancel_event(self, event): - if self.calltip: - # If we have moved before the start of the calltip, - # or off the calltip line, then cancel the tip. - # (Later need to be smarter about multi-line, etc) - if self.text.compare( - "insert", "<=", self.calltip_start - ) or self.text.compare("insert", ">", self.calltip_start + " lineend"): - self._remove_calltip_window() - return "" # so the event is handled normally. - - def calltip_cancel_event(self, event): - self._remove_calltip_window() - return "" # so the event is handled normally. - - def get_object_at_cursor( - self, - wordchars="._" - + string.ascii_uppercase - + string.ascii_lowercase - + string.digits, - ): - # XXX - This needs to be moved to a better place - # so the "." attribute lookup code can also use it. - text = self.text - chars = text.get("insert linestart", "insert") - i = len(chars) - while i and chars[i - 1] in wordchars: - i = i - 1 - word = chars[i:] - if word: - # How is this for a hack! - import __main__ - - namespace = sys.modules.copy() - namespace.update(__main__.__dict__) - try: - return eval(word, namespace) - except: - pass - return None # Can't find an object. - - -def _find_constructor(class_ob): - # Given a class object, return a function object used for the - # constructor (ie, __init__() ) or None if we can't find one. - try: - return class_ob.__init__ - except AttributeError: - for base in class_ob.__bases__: - rc = _find_constructor(base) - if rc is not None: - return rc - return None - - -def get_arg_text(ob): - # Get a string describing the arguments for the given object. - argText = "" - if ob is not None: - if inspect.isclass(ob): - # Look for the highest __init__ in the class chain. - fob = _find_constructor(ob) - if fob is None: - fob = lambda: None - else: - fob = ob - if inspect.isfunction(fob) or inspect.ismethod(fob): - try: - argText = str(inspect.signature(fob)) - except: - print("Failed to format the args") - traceback.print_exc() - # See if we can use the docstring - if hasattr(ob, "__doc__"): - doc = ob.__doc__ - try: - doc = doc.strip() - pos = doc.find("\n") - except AttributeError: - ## New style classes may have __doc__ slot without actually - ## having a string assigned to it - pass - else: - if pos < 0 or pos > 70: - pos = 70 - if argText: - argText = argText + "\n" - argText = argText + doc[:pos] - - return argText - - -################################################# -# -# Test code -# -if __name__ == "__main__": - - def t1(): - "()" - - def t2(a, b=None): - "(a, b=None)" - - def t3(a, *args): - "(a, *args)" - - def t4(*args): - "(*args)" - - def t5(a, *args): - "(a, *args)" - - def t6(a, b=None, *args, **kw): - "(a, b=None, *args, **kw)" - - class TC: - "(self, a=None, *b)" - - def __init__(self, a=None, *b): - "(self, a=None, *b)" - - def t1(self): - "(self)" - - def t2(self, a, b=None): - "(self, a, b=None)" - - def t3(self, a, *args): - "(self, a, *args)" - - def t4(self, *args): - "(self, *args)" - - def t5(self, a, *args): - "(self, a, *args)" - - def t6(self, a, b=None, *args, **kw): - "(self, a, b=None, *args, **kw)" - - def test(tests): - failed = [] - for t in tests: - expected = t.__doc__ + "\n" + t.__doc__ - if get_arg_text(t) != expected: - failed.append(t) - print( - "%s - expected %s, but got %s" - % (t, repr(expected), repr(get_arg_text(t))) - ) - print("%d of %d tests failed" % (len(failed), len(tests))) - - tc = TC() - tests = t1, t2, t3, t4, t5, t6, TC, tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6 - - test(tests) diff --git a/lib/pythonwin/pywin/idle/FormatParagraph.py b/lib/pythonwin/pywin/idle/FormatParagraph.py deleted file mode 100644 index 143c18ee..00000000 --- a/lib/pythonwin/pywin/idle/FormatParagraph.py +++ /dev/null @@ -1,166 +0,0 @@ -# Extension to format a paragraph - -# Does basic, standard text formatting, and also understands Python -# comment blocks. Thus, for editing Python source code, this -# extension is really only suitable for reformatting these comment -# blocks or triple-quoted strings. - -# Known problems with comment reformatting: -# * If there is a selection marked, and the first line of the -# selection is not complete, the block will probably not be detected -# as comments, and will have the normal "text formatting" rules -# applied. -# * If a comment block has leading whitespace that mixes tabs and -# spaces, they will not be considered part of the same block. -# * Fancy comments, like this bulleted list, arent handled :-) - -import re - - -class FormatParagraph: - menudefs = [ - ( - "edit", - [ - ("Format Paragraph", "<>"), - ], - ) - ] - - keydefs = { - "<>": [""], - } - - unix_keydefs = { - "<>": [""], - } - - def __init__(self, editwin): - self.editwin = editwin - - def close(self): - self.editwin = None - - def format_paragraph_event(self, event): - text = self.editwin.text - first, last = self.editwin.get_selection_indices() - if first and last: - data = text.get(first, last) - comment_header = "" - else: - first, last, comment_header, data = find_paragraph( - text, text.index("insert") - ) - if comment_header: - # Reformat the comment lines - convert to text sans header. - lines = data.split("\n") - lines = map(lambda st, l=len(comment_header): st[l:], lines) - data = "\n".join(lines) - # Reformat to 70 chars or a 20 char width, whichever is greater. - format_width = max(70 - len(comment_header), 20) - newdata = reformat_paragraph(data, format_width) - # re-split and re-insert the comment header. - newdata = newdata.split("\n") - # If the block ends in a \n, we dont want the comment - # prefix inserted after it. (Im not sure it makes sense to - # reformat a comment block that isnt made of complete - # lines, but whatever!) Can't think of a clean soltution, - # so we hack away - block_suffix = "" - if not newdata[-1]: - block_suffix = "\n" - newdata = newdata[:-1] - builder = lambda item, prefix=comment_header: prefix + item - newdata = "\n".join([builder(d) for d in newdata]) + block_suffix - else: - # Just a normal text format - newdata = reformat_paragraph(data) - text.tag_remove("sel", "1.0", "end") - if newdata != data: - text.mark_set("insert", first) - text.undo_block_start() - text.delete(first, last) - text.insert(first, newdata) - text.undo_block_stop() - else: - text.mark_set("insert", last) - text.see("insert") - - -def find_paragraph(text, mark): - lineno, col = list(map(int, mark.split("."))) - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) - while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line): - lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) - first_lineno = lineno - comment_header = get_comment_header(line) - comment_header_len = len(comment_header) - while get_comment_header(line) == comment_header and not is_all_white( - line[comment_header_len:] - ): - lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) - last = "%d.0" % lineno - # Search back to beginning of paragraph - lineno = first_lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) - while ( - lineno > 0 - and get_comment_header(line) == comment_header - and not is_all_white(line[comment_header_len:]) - ): - lineno = lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) - first = "%d.0" % (lineno + 1) - return first, last, comment_header, text.get(first, last) - - -def reformat_paragraph(data, limit=70): - lines = data.split("\n") - i = 0 - n = len(lines) - while i < n and is_all_white(lines[i]): - i = i + 1 - if i >= n: - return data - indent1 = get_indent(lines[i]) - if i + 1 < n and not is_all_white(lines[i + 1]): - indent2 = get_indent(lines[i + 1]) - else: - indent2 = indent1 - new = lines[:i] - partial = indent1 - while i < n and not is_all_white(lines[i]): - # XXX Should take double space after period (etc.) into account - words = re.split("(\s+)", lines[i]) - for j in range(0, len(words), 2): - word = words[j] - if not word: - continue # Can happen when line ends in whitespace - if len((partial + word).expandtabs()) > limit and partial != indent1: - new.append(partial.rstrip()) - partial = indent2 - partial = partial + word + " " - if j + 1 < len(words) and words[j + 1] != " ": - partial = partial + " " - i = i + 1 - new.append(partial.rstrip()) - # XXX Should reformat remaining paragraphs as well - new.extend(lines[i:]) - return "\n".join(new) - - -def is_all_white(line): - return re.match(r"^\s*$", line) is not None - - -def get_indent(line): - return re.match(r"^(\s*)", line).group() - - -def get_comment_header(line): - m = re.match(r"^(\s*#*)", line) - if m is None: - return "" - return m.group(1) diff --git a/lib/pythonwin/pywin/idle/IdleHistory.py b/lib/pythonwin/pywin/idle/IdleHistory.py deleted file mode 100644 index 24e69240..00000000 --- a/lib/pythonwin/pywin/idle/IdleHistory.py +++ /dev/null @@ -1,87 +0,0 @@ -class History: - def __init__(self, text, output_sep="\n"): - self.text = text - self.history = [] - self.history_prefix = None - self.history_pointer = None - self.output_sep = output_sep - text.bind("<>", self.history_prev) - text.bind("<>", self.history_next) - - def history_next(self, event): - self.history_do(0) - return "break" - - def history_prev(self, event): - self.history_do(1) - return "break" - - def _get_source(self, start, end): - # Get source code from start index to end index. Lines in the - # text control may be separated by sys.ps2 . - lines = self.text.get(start, end).split(self.output_sep) - return "\n".join(lines) - - def _put_source(self, where, source): - output = self.output_sep.join(source.split("\n")) - self.text.insert(where, output) - - def history_do(self, reverse): - nhist = len(self.history) - pointer = self.history_pointer - prefix = self.history_prefix - if pointer is not None and prefix is not None: - if ( - self.text.compare("insert", "!=", "end-1c") - or self._get_source("iomark", "end-1c") != self.history[pointer] - ): - pointer = prefix = None - if pointer is None or prefix is None: - prefix = self._get_source("iomark", "end-1c") - if reverse: - pointer = nhist - else: - pointer = -1 - nprefix = len(prefix) - while 1: - if reverse: - pointer = pointer - 1 - else: - pointer = pointer + 1 - if pointer < 0 or pointer >= nhist: - self.text.bell() - if self._get_source("iomark", "end-1c") != prefix: - self.text.delete("iomark", "end-1c") - self._put_source("iomark", prefix) - pointer = prefix = None - break - item = self.history[pointer] - if item[:nprefix] == prefix and len(item) > nprefix: - self.text.delete("iomark", "end-1c") - self._put_source("iomark", item) - break - self.text.mark_set("insert", "end-1c") - self.text.see("insert") - self.text.tag_remove("sel", "1.0", "end") - self.history_pointer = pointer - self.history_prefix = prefix - - def history_store(self, source): - source = source.strip() - if len(source) > 2: - # avoid duplicates - try: - self.history.remove(source) - except ValueError: - pass - self.history.append(source) - self.history_pointer = None - self.history_prefix = None - - def recall(self, s): - s = s.strip() - self.text.tag_remove("sel", "1.0", "end") - self.text.delete("iomark", "end-1c") - self.text.mark_set("insert", "end-1c") - self.text.insert("insert", s) - self.text.see("insert") diff --git a/lib/pythonwin/pywin/idle/PyParse.py b/lib/pythonwin/pywin/idle/PyParse.py deleted file mode 100644 index 173336ca..00000000 --- a/lib/pythonwin/pywin/idle/PyParse.py +++ /dev/null @@ -1,591 +0,0 @@ -import re -import string -import sys - -# Reason last stmt is continued (or C_NONE if it's not). -C_NONE, C_BACKSLASH, C_STRING, C_BRACKET = list(range(4)) - -if 0: # for throwaway debugging output - - def dump(*stuff): - sys.__stdout__.write(" ".join(map(str, stuff)) + "\n") - - -# Find what looks like the start of a popular stmt. - -_synchre = re.compile( - r""" - ^ - [ \t]* - (?: if - | for - | while - | else - | def - | return - | assert - | break - | class - | continue - | elif - | try - | except - | raise - | import - ) - \b -""", - re.VERBOSE | re.MULTILINE, -).search - -# Match blank line or non-indenting comment line. - -_junkre = re.compile( - r""" - [ \t]* - (?: \# \S .* )? - \n -""", - re.VERBOSE, -).match - -# Match any flavor of string; the terminating quote is optional -# so that we're robust in the face of incomplete program text. - -_match_stringre = re.compile( - r""" - \""" [^"\\]* (?: - (?: \\. | "(?!"") ) - [^"\\]* - )* - (?: \""" )? - -| " [^"\\\n]* (?: \\. [^"\\\n]* )* "? - -| ''' [^'\\]* (?: - (?: \\. | '(?!'') ) - [^'\\]* - )* - (?: ''' )? - -| ' [^'\\\n]* (?: \\. [^'\\\n]* )* '? -""", - re.VERBOSE | re.DOTALL, -).match - -# Match a line that starts with something interesting; -# used to find the first item of a bracket structure. - -_itemre = re.compile( - r""" - [ \t]* - [^\s#\\] # if we match, m.end()-1 is the interesting char -""", - re.VERBOSE, -).match - -# Match start of stmts that should be followed by a dedent. - -_closere = re.compile( - r""" - \s* - (?: return - | break - | continue - | raise - | pass - ) - \b -""", - re.VERBOSE, -).match - -# Chew up non-special chars as quickly as possible. If match is -# successful, m.end() less 1 is the index of the last boring char -# matched. If match is unsuccessful, the string starts with an -# interesting char. - -_chew_ordinaryre = re.compile( - r""" - [^[\](){}#'"\\]+ -""", - re.VERBOSE, -).match - -# Build translation table to map uninteresting chars to "x", open -# brackets to "(", and close brackets to ")". - -_tran = ["x"] * 256 -for ch in "({[": - _tran[ord(ch)] = "(" -for ch in ")}]": - _tran[ord(ch)] = ")" -for ch in "\"'\\\n#": - _tran[ord(ch)] = ch -# We are called with unicode strings, and str.translate is one of the few -# py2k functions which can't 'do the right thing' - so take care to ensure -# _tran is full of unicode... -_tran = "".join(_tran) -del ch - - -class Parser: - def __init__(self, indentwidth, tabwidth): - self.indentwidth = indentwidth - self.tabwidth = tabwidth - - def set_str(self, str): - assert len(str) == 0 or str[-1] == "\n", "Oops - have str %r" % (str,) - self.str = str - self.study_level = 0 - - # Return index of a good place to begin parsing, as close to the - # end of the string as possible. This will be the start of some - # popular stmt like "if" or "def". Return None if none found: - # the caller should pass more prior context then, if possible, or - # if not (the entire program text up until the point of interest - # has already been tried) pass 0 to set_lo. - # - # This will be reliable iff given a reliable is_char_in_string - # function, meaning that when it says "no", it's absolutely - # guaranteed that the char is not in a string. - # - # Ack, hack: in the shell window this kills us, because there's - # no way to tell the differences between output, >>> etc and - # user input. Indeed, IDLE's first output line makes the rest - # look like it's in an unclosed paren!: - # Python 1.5.2 (#0, Apr 13 1999, ... - - def find_good_parse_start(self, use_ps1, is_char_in_string=None): - str, pos = self.str, None - if use_ps1: - # shell window - ps1 = "\n" + sys.ps1 - i = str.rfind(ps1) - if i >= 0: - pos = i + len(ps1) - # make it look like there's a newline instead - # of ps1 at the start -- hacking here once avoids - # repeated hackery later - self.str = str[: pos - 1] + "\n" + str[pos:] - return pos - - # File window -- real work. - if not is_char_in_string: - # no clue -- make the caller pass everything - return None - - # Peek back from the end for a good place to start, - # but don't try too often; pos will be left None, or - # bumped to a legitimate synch point. - limit = len(str) - for tries in range(5): - i = str.rfind(":\n", 0, limit) - if i < 0: - break - i = str.rfind("\n", 0, i) + 1 # start of colon line - m = _synchre(str, i, limit) - if m and not is_char_in_string(m.start()): - pos = m.start() - break - limit = i - if pos is None: - # Nothing looks like a block-opener, or stuff does - # but is_char_in_string keeps returning true; most likely - # we're in or near a giant string, the colorizer hasn't - # caught up enough to be helpful, or there simply *aren't* - # any interesting stmts. In any of these cases we're - # going to have to parse the whole thing to be sure, so - # give it one last try from the start, but stop wasting - # time here regardless of the outcome. - m = _synchre(str) - if m and not is_char_in_string(m.start()): - pos = m.start() - return pos - - # Peeking back worked; look forward until _synchre no longer - # matches. - i = pos + 1 - while 1: - m = _synchre(str, i) - if m: - s, i = m.span() - if not is_char_in_string(s): - pos = s - else: - break - return pos - - # Throw away the start of the string. Intended to be called with - # find_good_parse_start's result. - - def set_lo(self, lo): - assert lo == 0 or self.str[lo - 1] == "\n" - if lo > 0: - self.str = self.str[lo:] - - # As quickly as humanly possible , find the line numbers (0- - # based) of the non-continuation lines. - # Creates self.{goodlines, continuation}. - - def _study1(self): - if self.study_level >= 1: - return - self.study_level = 1 - - # Map all uninteresting characters to "x", all open brackets - # to "(", all close brackets to ")", then collapse runs of - # uninteresting characters. This can cut the number of chars - # by a factor of 10-40, and so greatly speed the following loop. - str = self.str - str = str.translate(_tran) - str = str.replace("xxxxxxxx", "x") - str = str.replace("xxxx", "x") - str = str.replace("xx", "x") - str = str.replace("xx", "x") - str = str.replace("\nx", "\n") - # note that replacing x\n with \n would be incorrect, because - # x may be preceded by a backslash - - # March over the squashed version of the program, accumulating - # the line numbers of non-continued stmts, and determining - # whether & why the last stmt is a continuation. - continuation = C_NONE - level = lno = 0 # level is nesting level; lno is line number - self.goodlines = goodlines = [0] - push_good = goodlines.append - i, n = 0, len(str) - while i < n: - ch = str[i] - i = i + 1 - - # cases are checked in decreasing order of frequency - if ch == "x": - continue - - if ch == "\n": - lno = lno + 1 - if level == 0: - push_good(lno) - # else we're in an unclosed bracket structure - continue - - if ch == "(": - level = level + 1 - continue - - if ch == ")": - if level: - level = level - 1 - # else the program is invalid, but we can't complain - continue - - if ch == '"' or ch == "'": - # consume the string - quote = ch - if str[i - 1 : i + 2] == quote * 3: - quote = quote * 3 - w = len(quote) - 1 - i = i + w - while i < n: - ch = str[i] - i = i + 1 - - if ch == "x": - continue - - if str[i - 1 : i + w] == quote: - i = i + w - break - - if ch == "\n": - lno = lno + 1 - if w == 0: - # unterminated single-quoted string - if level == 0: - push_good(lno) - break - continue - - if ch == "\\": - assert i < n - if str[i] == "\n": - lno = lno + 1 - i = i + 1 - continue - - # else comment char or paren inside string - - else: - # didn't break out of the loop, so we're still - # inside a string - continuation = C_STRING - continue # with outer loop - - if ch == "#": - # consume the comment - i = str.find("\n", i) - assert i >= 0 - continue - - assert ch == "\\" - assert i < n - if str[i] == "\n": - lno = lno + 1 - if i + 1 == n: - continuation = C_BACKSLASH - i = i + 1 - - # The last stmt may be continued for all 3 reasons. - # String continuation takes precedence over bracket - # continuation, which beats backslash continuation. - if continuation != C_STRING and level > 0: - continuation = C_BRACKET - self.continuation = continuation - - # Push the final line number as a sentinel value, regardless of - # whether it's continued. - assert (continuation == C_NONE) == (goodlines[-1] == lno) - if goodlines[-1] != lno: - push_good(lno) - - def get_continuation_type(self): - self._study1() - return self.continuation - - # study1 was sufficient to determine the continuation status, - # but doing more requires looking at every character. study2 - # does this for the last interesting statement in the block. - # Creates: - # self.stmt_start, stmt_end - # slice indices of last interesting stmt - # self.lastch - # last non-whitespace character before optional trailing - # comment - # self.lastopenbracketpos - # if continuation is C_BRACKET, index of last open bracket - - def _study2(self): - _ws = string.whitespace - if self.study_level >= 2: - return - self._study1() - self.study_level = 2 - - # Set p and q to slice indices of last interesting stmt. - str, goodlines = self.str, self.goodlines - i = len(goodlines) - 1 - p = len(str) # index of newest line - while i: - assert p - # p is the index of the stmt at line number goodlines[i]. - # Move p back to the stmt at line number goodlines[i-1]. - q = p - for nothing in range(goodlines[i - 1], goodlines[i]): - # tricky: sets p to 0 if no preceding newline - p = str.rfind("\n", 0, p - 1) + 1 - # The stmt str[p:q] isn't a continuation, but may be blank - # or a non-indenting comment line. - if _junkre(str, p): - i = i - 1 - else: - break - if i == 0: - # nothing but junk! - assert p == 0 - q = p - self.stmt_start, self.stmt_end = p, q - - # Analyze this stmt, to find the last open bracket (if any) - # and last interesting character (if any). - lastch = "" - stack = [] # stack of open bracket indices - push_stack = stack.append - while p < q: - # suck up all except ()[]{}'"#\\ - m = _chew_ordinaryre(str, p, q) - if m: - # we skipped at least one boring char - newp = m.end() - # back up over totally boring whitespace - i = newp - 1 # index of last boring char - while i >= p and str[i] in " \t\n": - i = i - 1 - if i >= p: - lastch = str[i] - p = newp - if p >= q: - break - - ch = str[p] - - if ch in "([{": - push_stack(p) - lastch = ch - p = p + 1 - continue - - if ch in ")]}": - if stack: - del stack[-1] - lastch = ch - p = p + 1 - continue - - if ch == '"' or ch == "'": - # consume string - # Note that study1 did this with a Python loop, but - # we use a regexp here; the reason is speed in both - # cases; the string may be huge, but study1 pre-squashed - # strings to a couple of characters per line. study1 - # also needed to keep track of newlines, and we don't - # have to. - lastch = ch - p = _match_stringre(str, p, q).end() - continue - - if ch == "#": - # consume comment and trailing newline - p = str.find("\n", p, q) + 1 - assert p > 0 - continue - - assert ch == "\\" - p = p + 1 # beyond backslash - assert p < q - if str[p] != "\n": - # the program is invalid, but can't complain - lastch = ch + str[p] - p = p + 1 # beyond escaped char - - # end while p < q: - - self.lastch = lastch - if stack: - self.lastopenbracketpos = stack[-1] - - # Assuming continuation is C_BRACKET, return the number - # of spaces the next line should be indented. - - def compute_bracket_indent(self): - self._study2() - assert self.continuation == C_BRACKET - j = self.lastopenbracketpos - str = self.str - n = len(str) - origi = i = str.rfind("\n", 0, j) + 1 - j = j + 1 # one beyond open bracket - # find first list item; set i to start of its line - while j < n: - m = _itemre(str, j) - if m: - j = m.end() - 1 # index of first interesting char - extra = 0 - break - else: - # this line is junk; advance to next line - i = j = str.find("\n", j) + 1 - else: - # nothing interesting follows the bracket; - # reproduce the bracket line's indentation + a level - j = i = origi - while str[j] in " \t": - j = j + 1 - extra = self.indentwidth - return len(str[i:j].expandtabs(self.tabwidth)) + extra - - # Return number of physical lines in last stmt (whether or not - # it's an interesting stmt! this is intended to be called when - # continuation is C_BACKSLASH). - - def get_num_lines_in_stmt(self): - self._study1() - goodlines = self.goodlines - return goodlines[-1] - goodlines[-2] - - # Assuming continuation is C_BACKSLASH, return the number of spaces - # the next line should be indented. Also assuming the new line is - # the first one following the initial line of the stmt. - - def compute_backslash_indent(self): - self._study2() - assert self.continuation == C_BACKSLASH - str = self.str - i = self.stmt_start - while str[i] in " \t": - i = i + 1 - startpos = i - - # See whether the initial line starts an assignment stmt; i.e., - # look for an = operator - endpos = str.find("\n", startpos) + 1 - found = level = 0 - while i < endpos: - ch = str[i] - if ch in "([{": - level = level + 1 - i = i + 1 - elif ch in ")]}": - if level: - level = level - 1 - i = i + 1 - elif ch == '"' or ch == "'": - i = _match_stringre(str, i, endpos).end() - elif ch == "#": - break - elif ( - level == 0 - and ch == "=" - and (i == 0 or str[i - 1] not in "=<>!") - and str[i + 1] != "=" - ): - found = 1 - break - else: - i = i + 1 - - if found: - # found a legit =, but it may be the last interesting - # thing on the line - i = i + 1 # move beyond the = - found = re.match(r"\s*\\", str[i:endpos]) is None - - if not found: - # oh well ... settle for moving beyond the first chunk - # of non-whitespace chars - i = startpos - while str[i] not in " \t\n": - i = i + 1 - - return len(str[self.stmt_start : i].expandtabs(self.tabwidth)) + 1 - - # Return the leading whitespace on the initial line of the last - # interesting stmt. - - def get_base_indent_string(self): - self._study2() - i, n = self.stmt_start, self.stmt_end - j = i - str = self.str - while j < n and str[j] in " \t": - j = j + 1 - return str[i:j] - - # Did the last interesting stmt open a block? - - def is_block_opener(self): - self._study2() - return self.lastch == ":" - - # Did the last interesting stmt close a block? - - def is_block_closer(self): - self._study2() - return _closere(self.str, self.stmt_start) is not None - - # index of last open bracket ({[, or None if none - lastopenbracketpos = None - - def get_last_open_bracket_pos(self): - self._study2() - return self.lastopenbracketpos diff --git a/lib/pythonwin/pywin/idle/__init__.py b/lib/pythonwin/pywin/idle/__init__.py deleted file mode 100644 index b175d755..00000000 --- a/lib/pythonwin/pywin/idle/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# This file denotes the directory as a Python package. diff --git a/lib/pythonwin/pywin/mfc/__init__.py b/lib/pythonwin/pywin/mfc/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/pythonwin/pywin/mfc/activex.py b/lib/pythonwin/pywin/mfc/activex.py deleted file mode 100644 index 596bbef5..00000000 --- a/lib/pythonwin/pywin/mfc/activex.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Support for ActiveX control hosting in Pythonwin. -""" -import win32ui -import win32uiole - -from . import window - -# XXX - we are still "classic style" classes in py2x, so we need can't yet -# use 'type()' everywhere - revisit soon, as py2x will move to new-style too... -try: - from types import ClassType as new_type -except ImportError: - new_type = type # py3k - - -class Control(window.Wnd): - """An ActiveX control base class. A new class must be derived from both - this class and the Events class. See the demos for more details. - """ - - def __init__(self): - self.__dict__["_dispobj_"] = None - window.Wnd.__init__(self) - - def _GetControlCLSID(self): - return self.CLSID - - def _GetDispatchClass(self): - return self.default_interface - - def _GetEventMap(self): - return self.default_source._dispid_to_func_ - - def CreateControl(self, windowTitle, style, rect, parent, id, lic_string=None): - clsid = str(self._GetControlCLSID()) - self.__dict__["_obj_"] = win32ui.CreateControl( - clsid, windowTitle, style, rect, parent, id, None, False, lic_string - ) - klass = self._GetDispatchClass() - dispobj = klass(win32uiole.GetIDispatchForWindow(self._obj_)) - self.HookOleEvents() - self.__dict__["_dispobj_"] = dispobj - - def HookOleEvents(self): - dict = self._GetEventMap() - for dispid, methodName in dict.items(): - if hasattr(self, methodName): - self._obj_.HookOleEvent(getattr(self, methodName), dispid) - - def __getattr__(self, attr): - # Delegate attributes to the windows and the Dispatch object for this class - try: - return window.Wnd.__getattr__(self, attr) - except AttributeError: - pass - return getattr(self._dispobj_, attr) - - def __setattr__(self, attr, value): - if hasattr(self.__dict__, attr): - self.__dict__[attr] = value - return - try: - if self._dispobj_: - self._dispobj_.__setattr__(attr, value) - return - except AttributeError: - pass - self.__dict__[attr] = value - - -def MakeControlClass(controlClass, name=None): - """Given a CoClass in a generated .py file, this function will return a Class - object which can be used as an OCX control. - - This function is used when you do not want to handle any events from the OCX - control. If you need events, then you should derive a class from both the - activex.Control class and the CoClass - """ - if name is None: - name = controlClass.__name__ - return new_type("OCX" + name, (Control, controlClass), {}) - - -def MakeControlInstance(controlClass, name=None): - """As for MakeControlClass(), but returns an instance of the class.""" - return MakeControlClass(controlClass, name)() diff --git a/lib/pythonwin/pywin/mfc/afxres.py b/lib/pythonwin/pywin/mfc/afxres.py deleted file mode 100644 index 249211ff..00000000 --- a/lib/pythonwin/pywin/mfc/afxres.py +++ /dev/null @@ -1,501 +0,0 @@ -# Generated by h2py from stdin -TCS_MULTILINE = 0x0200 -CBRS_ALIGN_LEFT = 0x1000 -CBRS_ALIGN_TOP = 0x2000 -CBRS_ALIGN_RIGHT = 0x4000 -CBRS_ALIGN_BOTTOM = 0x8000 -CBRS_ALIGN_ANY = 0xF000 -CBRS_BORDER_LEFT = 0x0100 -CBRS_BORDER_TOP = 0x0200 -CBRS_BORDER_RIGHT = 0x0400 -CBRS_BORDER_BOTTOM = 0x0800 -CBRS_BORDER_ANY = 0x0F00 -CBRS_TOOLTIPS = 0x0010 -CBRS_FLYBY = 0x0020 -CBRS_FLOAT_MULTI = 0x0040 -CBRS_BORDER_3D = 0x0080 -CBRS_HIDE_INPLACE = 0x0008 -CBRS_SIZE_DYNAMIC = 0x0004 -CBRS_SIZE_FIXED = 0x0002 -CBRS_FLOATING = 0x0001 -CBRS_GRIPPER = 0x00400000 -CBRS_ORIENT_HORZ = CBRS_ALIGN_TOP | CBRS_ALIGN_BOTTOM -CBRS_ORIENT_VERT = CBRS_ALIGN_LEFT | CBRS_ALIGN_RIGHT -CBRS_ORIENT_ANY = CBRS_ORIENT_HORZ | CBRS_ORIENT_VERT -CBRS_ALL = 0xFFFF -CBRS_NOALIGN = 0x00000000 -CBRS_LEFT = CBRS_ALIGN_LEFT | CBRS_BORDER_RIGHT -CBRS_TOP = CBRS_ALIGN_TOP | CBRS_BORDER_BOTTOM -CBRS_RIGHT = CBRS_ALIGN_RIGHT | CBRS_BORDER_LEFT -CBRS_BOTTOM = CBRS_ALIGN_BOTTOM | CBRS_BORDER_TOP -SBPS_NORMAL = 0x0000 -SBPS_NOBORDERS = 0x0100 -SBPS_POPOUT = 0x0200 -SBPS_OWNERDRAW = 0x1000 -SBPS_DISABLED = 0x04000000 -SBPS_STRETCH = 0x08000000 -ID_INDICATOR_EXT = 0xE700 -ID_INDICATOR_CAPS = 0xE701 -ID_INDICATOR_NUM = 0xE702 -ID_INDICATOR_SCRL = 0xE703 -ID_INDICATOR_OVR = 0xE704 -ID_INDICATOR_REC = 0xE705 -ID_INDICATOR_KANA = 0xE706 -ID_SEPARATOR = 0 -AFX_IDW_CONTROLBAR_FIRST = 0xE800 -AFX_IDW_CONTROLBAR_LAST = 0xE8FF -AFX_IDW_TOOLBAR = 0xE800 -AFX_IDW_STATUS_BAR = 0xE801 -AFX_IDW_PREVIEW_BAR = 0xE802 -AFX_IDW_RESIZE_BAR = 0xE803 -AFX_IDW_DOCKBAR_TOP = 0xE81B -AFX_IDW_DOCKBAR_LEFT = 0xE81C -AFX_IDW_DOCKBAR_RIGHT = 0xE81D -AFX_IDW_DOCKBAR_BOTTOM = 0xE81E -AFX_IDW_DOCKBAR_FLOAT = 0xE81F - - -def AFX_CONTROLBAR_MASK(nIDC): - return 1 << (nIDC - AFX_IDW_CONTROLBAR_FIRST) - - -AFX_IDW_PANE_FIRST = 0xE900 -AFX_IDW_PANE_LAST = 0xE9FF -AFX_IDW_HSCROLL_FIRST = 0xEA00 -AFX_IDW_VSCROLL_FIRST = 0xEA10 -AFX_IDW_SIZE_BOX = 0xEA20 -AFX_IDW_PANE_SAVE = 0xEA21 -AFX_IDS_APP_TITLE = 0xE000 -AFX_IDS_IDLEMESSAGE = 0xE001 -AFX_IDS_HELPMODEMESSAGE = 0xE002 -AFX_IDS_APP_TITLE_EMBEDDING = 0xE003 -AFX_IDS_COMPANY_NAME = 0xE004 -AFX_IDS_OBJ_TITLE_INPLACE = 0xE005 -ID_FILE_NEW = 0xE100 -ID_FILE_OPEN = 0xE101 -ID_FILE_CLOSE = 0xE102 -ID_FILE_SAVE = 0xE103 -ID_FILE_SAVE_AS = 0xE104 -ID_FILE_PAGE_SETUP = 0xE105 -ID_FILE_PRINT_SETUP = 0xE106 -ID_FILE_PRINT = 0xE107 -ID_FILE_PRINT_DIRECT = 0xE108 -ID_FILE_PRINT_PREVIEW = 0xE109 -ID_FILE_UPDATE = 0xE10A -ID_FILE_SAVE_COPY_AS = 0xE10B -ID_FILE_SEND_MAIL = 0xE10C -ID_FILE_MRU_FIRST = 0xE110 -ID_FILE_MRU_FILE1 = 0xE110 -ID_FILE_MRU_FILE2 = 0xE111 -ID_FILE_MRU_FILE3 = 0xE112 -ID_FILE_MRU_FILE4 = 0xE113 -ID_FILE_MRU_FILE5 = 0xE114 -ID_FILE_MRU_FILE6 = 0xE115 -ID_FILE_MRU_FILE7 = 0xE116 -ID_FILE_MRU_FILE8 = 0xE117 -ID_FILE_MRU_FILE9 = 0xE118 -ID_FILE_MRU_FILE10 = 0xE119 -ID_FILE_MRU_FILE11 = 0xE11A -ID_FILE_MRU_FILE12 = 0xE11B -ID_FILE_MRU_FILE13 = 0xE11C -ID_FILE_MRU_FILE14 = 0xE11D -ID_FILE_MRU_FILE15 = 0xE11E -ID_FILE_MRU_FILE16 = 0xE11F -ID_FILE_MRU_LAST = 0xE11F -ID_EDIT_CLEAR = 0xE120 -ID_EDIT_CLEAR_ALL = 0xE121 -ID_EDIT_COPY = 0xE122 -ID_EDIT_CUT = 0xE123 -ID_EDIT_FIND = 0xE124 -ID_EDIT_PASTE = 0xE125 -ID_EDIT_PASTE_LINK = 0xE126 -ID_EDIT_PASTE_SPECIAL = 0xE127 -ID_EDIT_REPEAT = 0xE128 -ID_EDIT_REPLACE = 0xE129 -ID_EDIT_SELECT_ALL = 0xE12A -ID_EDIT_UNDO = 0xE12B -ID_EDIT_REDO = 0xE12C -ID_WINDOW_NEW = 0xE130 -ID_WINDOW_ARRANGE = 0xE131 -ID_WINDOW_CASCADE = 0xE132 -ID_WINDOW_TILE_HORZ = 0xE133 -ID_WINDOW_TILE_VERT = 0xE134 -ID_WINDOW_SPLIT = 0xE135 -AFX_IDM_WINDOW_FIRST = 0xE130 -AFX_IDM_WINDOW_LAST = 0xE13F -AFX_IDM_FIRST_MDICHILD = 0xFF00 -ID_APP_ABOUT = 0xE140 -ID_APP_EXIT = 0xE141 -ID_HELP_INDEX = 0xE142 -ID_HELP_FINDER = 0xE143 -ID_HELP_USING = 0xE144 -ID_CONTEXT_HELP = 0xE145 -ID_HELP = 0xE146 -ID_DEFAULT_HELP = 0xE147 -ID_NEXT_PANE = 0xE150 -ID_PREV_PANE = 0xE151 -ID_FORMAT_FONT = 0xE160 -ID_OLE_INSERT_NEW = 0xE200 -ID_OLE_EDIT_LINKS = 0xE201 -ID_OLE_EDIT_CONVERT = 0xE202 -ID_OLE_EDIT_CHANGE_ICON = 0xE203 -ID_OLE_EDIT_PROPERTIES = 0xE204 -ID_OLE_VERB_FIRST = 0xE210 -ID_OLE_VERB_LAST = 0xE21F -AFX_ID_PREVIEW_CLOSE = 0xE300 -AFX_ID_PREVIEW_NUMPAGE = 0xE301 -AFX_ID_PREVIEW_NEXT = 0xE302 -AFX_ID_PREVIEW_PREV = 0xE303 -AFX_ID_PREVIEW_PRINT = 0xE304 -AFX_ID_PREVIEW_ZOOMIN = 0xE305 -AFX_ID_PREVIEW_ZOOMOUT = 0xE306 -ID_VIEW_TOOLBAR = 0xE800 -ID_VIEW_STATUS_BAR = 0xE801 -ID_RECORD_FIRST = 0xE900 -ID_RECORD_LAST = 0xE901 -ID_RECORD_NEXT = 0xE902 -ID_RECORD_PREV = 0xE903 -IDC_STATIC = -1 -AFX_IDS_SCFIRST = 0xEF00 -AFX_IDS_SCSIZE = 0xEF00 -AFX_IDS_SCMOVE = 0xEF01 -AFX_IDS_SCMINIMIZE = 0xEF02 -AFX_IDS_SCMAXIMIZE = 0xEF03 -AFX_IDS_SCNEXTWINDOW = 0xEF04 -AFX_IDS_SCPREVWINDOW = 0xEF05 -AFX_IDS_SCCLOSE = 0xEF06 -AFX_IDS_SCRESTORE = 0xEF12 -AFX_IDS_SCTASKLIST = 0xEF13 -AFX_IDS_MDICHILD = 0xEF1F -AFX_IDS_DESKACCESSORY = 0xEFDA -AFX_IDS_OPENFILE = 0xF000 -AFX_IDS_SAVEFILE = 0xF001 -AFX_IDS_ALLFILTER = 0xF002 -AFX_IDS_UNTITLED = 0xF003 -AFX_IDS_SAVEFILECOPY = 0xF004 -AFX_IDS_PREVIEW_CLOSE = 0xF005 -AFX_IDS_UNNAMED_FILE = 0xF006 -AFX_IDS_ABOUT = 0xF010 -AFX_IDS_HIDE = 0xF011 -AFX_IDP_NO_ERROR_AVAILABLE = 0xF020 -AFX_IDS_NOT_SUPPORTED_EXCEPTION = 0xF021 -AFX_IDS_RESOURCE_EXCEPTION = 0xF022 -AFX_IDS_MEMORY_EXCEPTION = 0xF023 -AFX_IDS_USER_EXCEPTION = 0xF024 -AFX_IDS_PRINTONPORT = 0xF040 -AFX_IDS_ONEPAGE = 0xF041 -AFX_IDS_TWOPAGE = 0xF042 -AFX_IDS_PRINTPAGENUM = 0xF043 -AFX_IDS_PREVIEWPAGEDESC = 0xF044 -AFX_IDS_PRINTDEFAULTEXT = 0xF045 -AFX_IDS_PRINTDEFAULT = 0xF046 -AFX_IDS_PRINTFILTER = 0xF047 -AFX_IDS_PRINTCAPTION = 0xF048 -AFX_IDS_PRINTTOFILE = 0xF049 -AFX_IDS_OBJECT_MENUITEM = 0xF080 -AFX_IDS_EDIT_VERB = 0xF081 -AFX_IDS_ACTIVATE_VERB = 0xF082 -AFX_IDS_CHANGE_LINK = 0xF083 -AFX_IDS_AUTO = 0xF084 -AFX_IDS_MANUAL = 0xF085 -AFX_IDS_FROZEN = 0xF086 -AFX_IDS_ALL_FILES = 0xF087 -AFX_IDS_SAVE_MENU = 0xF088 -AFX_IDS_UPDATE_MENU = 0xF089 -AFX_IDS_SAVE_AS_MENU = 0xF08A -AFX_IDS_SAVE_COPY_AS_MENU = 0xF08B -AFX_IDS_EXIT_MENU = 0xF08C -AFX_IDS_UPDATING_ITEMS = 0xF08D -AFX_IDS_METAFILE_FORMAT = 0xF08E -AFX_IDS_DIB_FORMAT = 0xF08F -AFX_IDS_BITMAP_FORMAT = 0xF090 -AFX_IDS_LINKSOURCE_FORMAT = 0xF091 -AFX_IDS_EMBED_FORMAT = 0xF092 -AFX_IDS_PASTELINKEDTYPE = 0xF094 -AFX_IDS_UNKNOWNTYPE = 0xF095 -AFX_IDS_RTF_FORMAT = 0xF096 -AFX_IDS_TEXT_FORMAT = 0xF097 -AFX_IDS_INVALID_CURRENCY = 0xF098 -AFX_IDS_INVALID_DATETIME = 0xF099 -AFX_IDS_INVALID_DATETIMESPAN = 0xF09A -AFX_IDP_INVALID_FILENAME = 0xF100 -AFX_IDP_FAILED_TO_OPEN_DOC = 0xF101 -AFX_IDP_FAILED_TO_SAVE_DOC = 0xF102 -AFX_IDP_ASK_TO_SAVE = 0xF103 -AFX_IDP_FAILED_TO_CREATE_DOC = 0xF104 -AFX_IDP_FILE_TOO_LARGE = 0xF105 -AFX_IDP_FAILED_TO_START_PRINT = 0xF106 -AFX_IDP_FAILED_TO_LAUNCH_HELP = 0xF107 -AFX_IDP_INTERNAL_FAILURE = 0xF108 -AFX_IDP_COMMAND_FAILURE = 0xF109 -AFX_IDP_FAILED_MEMORY_ALLOC = 0xF10A -AFX_IDP_PARSE_INT = 0xF110 -AFX_IDP_PARSE_REAL = 0xF111 -AFX_IDP_PARSE_INT_RANGE = 0xF112 -AFX_IDP_PARSE_REAL_RANGE = 0xF113 -AFX_IDP_PARSE_STRING_SIZE = 0xF114 -AFX_IDP_PARSE_RADIO_BUTTON = 0xF115 -AFX_IDP_PARSE_BYTE = 0xF116 -AFX_IDP_PARSE_UINT = 0xF117 -AFX_IDP_PARSE_DATETIME = 0xF118 -AFX_IDP_PARSE_CURRENCY = 0xF119 -AFX_IDP_FAILED_INVALID_FORMAT = 0xF120 -AFX_IDP_FAILED_INVALID_PATH = 0xF121 -AFX_IDP_FAILED_DISK_FULL = 0xF122 -AFX_IDP_FAILED_ACCESS_READ = 0xF123 -AFX_IDP_FAILED_ACCESS_WRITE = 0xF124 -AFX_IDP_FAILED_IO_ERROR_READ = 0xF125 -AFX_IDP_FAILED_IO_ERROR_WRITE = 0xF126 -AFX_IDP_STATIC_OBJECT = 0xF180 -AFX_IDP_FAILED_TO_CONNECT = 0xF181 -AFX_IDP_SERVER_BUSY = 0xF182 -AFX_IDP_BAD_VERB = 0xF183 -AFX_IDP_FAILED_TO_NOTIFY = 0xF185 -AFX_IDP_FAILED_TO_LAUNCH = 0xF186 -AFX_IDP_ASK_TO_UPDATE = 0xF187 -AFX_IDP_FAILED_TO_UPDATE = 0xF188 -AFX_IDP_FAILED_TO_REGISTER = 0xF189 -AFX_IDP_FAILED_TO_AUTO_REGISTER = 0xF18A -AFX_IDP_FAILED_TO_CONVERT = 0xF18B -AFX_IDP_GET_NOT_SUPPORTED = 0xF18C -AFX_IDP_SET_NOT_SUPPORTED = 0xF18D -AFX_IDP_ASK_TO_DISCARD = 0xF18E -AFX_IDP_FAILED_TO_CREATE = 0xF18F -AFX_IDP_FAILED_MAPI_LOAD = 0xF190 -AFX_IDP_INVALID_MAPI_DLL = 0xF191 -AFX_IDP_FAILED_MAPI_SEND = 0xF192 -AFX_IDP_FILE_NONE = 0xF1A0 -AFX_IDP_FILE_GENERIC = 0xF1A1 -AFX_IDP_FILE_NOT_FOUND = 0xF1A2 -AFX_IDP_FILE_BAD_PATH = 0xF1A3 -AFX_IDP_FILE_TOO_MANY_OPEN = 0xF1A4 -AFX_IDP_FILE_ACCESS_DENIED = 0xF1A5 -AFX_IDP_FILE_INVALID_FILE = 0xF1A6 -AFX_IDP_FILE_REMOVE_CURRENT = 0xF1A7 -AFX_IDP_FILE_DIR_FULL = 0xF1A8 -AFX_IDP_FILE_BAD_SEEK = 0xF1A9 -AFX_IDP_FILE_HARD_IO = 0xF1AA -AFX_IDP_FILE_SHARING = 0xF1AB -AFX_IDP_FILE_LOCKING = 0xF1AC -AFX_IDP_FILE_DISKFULL = 0xF1AD -AFX_IDP_FILE_EOF = 0xF1AE -AFX_IDP_ARCH_NONE = 0xF1B0 -AFX_IDP_ARCH_GENERIC = 0xF1B1 -AFX_IDP_ARCH_READONLY = 0xF1B2 -AFX_IDP_ARCH_ENDOFFILE = 0xF1B3 -AFX_IDP_ARCH_WRITEONLY = 0xF1B4 -AFX_IDP_ARCH_BADINDEX = 0xF1B5 -AFX_IDP_ARCH_BADCLASS = 0xF1B6 -AFX_IDP_ARCH_BADSCHEMA = 0xF1B7 -AFX_IDS_OCC_SCALEUNITS_PIXELS = 0xF1C0 -AFX_IDS_STATUS_FONT = 0xF230 -AFX_IDS_TOOLTIP_FONT = 0xF231 -AFX_IDS_UNICODE_FONT = 0xF232 -AFX_IDS_MINI_FONT = 0xF233 -AFX_IDP_SQL_FIRST = 0xF280 -AFX_IDP_SQL_CONNECT_FAIL = 0xF281 -AFX_IDP_SQL_RECORDSET_FORWARD_ONLY = 0xF282 -AFX_IDP_SQL_EMPTY_COLUMN_LIST = 0xF283 -AFX_IDP_SQL_FIELD_SCHEMA_MISMATCH = 0xF284 -AFX_IDP_SQL_ILLEGAL_MODE = 0xF285 -AFX_IDP_SQL_MULTIPLE_ROWS_AFFECTED = 0xF286 -AFX_IDP_SQL_NO_CURRENT_RECORD = 0xF287 -AFX_IDP_SQL_NO_ROWS_AFFECTED = 0xF288 -AFX_IDP_SQL_RECORDSET_READONLY = 0xF289 -AFX_IDP_SQL_SQL_NO_TOTAL = 0xF28A -AFX_IDP_SQL_ODBC_LOAD_FAILED = 0xF28B -AFX_IDP_SQL_DYNASET_NOT_SUPPORTED = 0xF28C -AFX_IDP_SQL_SNAPSHOT_NOT_SUPPORTED = 0xF28D -AFX_IDP_SQL_API_CONFORMANCE = 0xF28E -AFX_IDP_SQL_SQL_CONFORMANCE = 0xF28F -AFX_IDP_SQL_NO_DATA_FOUND = 0xF290 -AFX_IDP_SQL_ROW_UPDATE_NOT_SUPPORTED = 0xF291 -AFX_IDP_SQL_ODBC_V2_REQUIRED = 0xF292 -AFX_IDP_SQL_NO_POSITIONED_UPDATES = 0xF293 -AFX_IDP_SQL_LOCK_MODE_NOT_SUPPORTED = 0xF294 -AFX_IDP_SQL_DATA_TRUNCATED = 0xF295 -AFX_IDP_SQL_ROW_FETCH = 0xF296 -AFX_IDP_SQL_INCORRECT_ODBC = 0xF297 -AFX_IDP_SQL_UPDATE_DELETE_FAILED = 0xF298 -AFX_IDP_SQL_DYNAMIC_CURSOR_NOT_SUPPORTED = 0xF299 -AFX_IDP_DAO_FIRST = 0xF2A0 -AFX_IDP_DAO_ENGINE_INITIALIZATION = 0xF2A0 -AFX_IDP_DAO_DFX_BIND = 0xF2A1 -AFX_IDP_DAO_OBJECT_NOT_OPEN = 0xF2A2 -AFX_IDP_DAO_ROWTOOSHORT = 0xF2A3 -AFX_IDP_DAO_BADBINDINFO = 0xF2A4 -AFX_IDP_DAO_COLUMNUNAVAILABLE = 0xF2A5 -AFX_IDC_LISTBOX = 100 -AFX_IDC_CHANGE = 101 -AFX_IDC_PRINT_DOCNAME = 201 -AFX_IDC_PRINT_PRINTERNAME = 202 -AFX_IDC_PRINT_PORTNAME = 203 -AFX_IDC_PRINT_PAGENUM = 204 -ID_APPLY_NOW = 0x3021 -ID_WIZBACK = 0x3023 -ID_WIZNEXT = 0x3024 -ID_WIZFINISH = 0x3025 -AFX_IDC_TAB_CONTROL = 0x3020 -AFX_IDD_FILEOPEN = 28676 -AFX_IDD_FILESAVE = 28677 -AFX_IDD_FONT = 28678 -AFX_IDD_COLOR = 28679 -AFX_IDD_PRINT = 28680 -AFX_IDD_PRINTSETUP = 28681 -AFX_IDD_FIND = 28682 -AFX_IDD_REPLACE = 28683 -AFX_IDD_NEWTYPEDLG = 30721 -AFX_IDD_PRINTDLG = 30722 -AFX_IDD_PREVIEW_TOOLBAR = 30723 -AFX_IDD_PREVIEW_SHORTTOOLBAR = 30731 -AFX_IDD_INSERTOBJECT = 30724 -AFX_IDD_CHANGEICON = 30725 -AFX_IDD_CONVERT = 30726 -AFX_IDD_PASTESPECIAL = 30727 -AFX_IDD_EDITLINKS = 30728 -AFX_IDD_FILEBROWSE = 30729 -AFX_IDD_BUSY = 30730 -AFX_IDD_OBJECTPROPERTIES = 30732 -AFX_IDD_CHANGESOURCE = 30733 -AFX_IDC_CONTEXTHELP = 30977 -AFX_IDC_MAGNIFY = 30978 -AFX_IDC_SMALLARROWS = 30979 -AFX_IDC_HSPLITBAR = 30980 -AFX_IDC_VSPLITBAR = 30981 -AFX_IDC_NODROPCRSR = 30982 -AFX_IDC_TRACKNWSE = 30983 -AFX_IDC_TRACKNESW = 30984 -AFX_IDC_TRACKNS = 30985 -AFX_IDC_TRACKWE = 30986 -AFX_IDC_TRACK4WAY = 30987 -AFX_IDC_MOVE4WAY = 30988 -AFX_IDB_MINIFRAME_MENU = 30994 -AFX_IDB_CHECKLISTBOX_NT = 30995 -AFX_IDB_CHECKLISTBOX_95 = 30996 -AFX_IDR_PREVIEW_ACCEL = 30997 -AFX_IDI_STD_MDIFRAME = 31233 -AFX_IDI_STD_FRAME = 31234 -AFX_IDC_FONTPROP = 1000 -AFX_IDC_FONTNAMES = 1001 -AFX_IDC_FONTSTYLES = 1002 -AFX_IDC_FONTSIZES = 1003 -AFX_IDC_STRIKEOUT = 1004 -AFX_IDC_UNDERLINE = 1005 -AFX_IDC_SAMPLEBOX = 1006 -AFX_IDC_COLOR_BLACK = 1100 -AFX_IDC_COLOR_WHITE = 1101 -AFX_IDC_COLOR_RED = 1102 -AFX_IDC_COLOR_GREEN = 1103 -AFX_IDC_COLOR_BLUE = 1104 -AFX_IDC_COLOR_YELLOW = 1105 -AFX_IDC_COLOR_MAGENTA = 1106 -AFX_IDC_COLOR_CYAN = 1107 -AFX_IDC_COLOR_GRAY = 1108 -AFX_IDC_COLOR_LIGHTGRAY = 1109 -AFX_IDC_COLOR_DARKRED = 1110 -AFX_IDC_COLOR_DARKGREEN = 1111 -AFX_IDC_COLOR_DARKBLUE = 1112 -AFX_IDC_COLOR_LIGHTBROWN = 1113 -AFX_IDC_COLOR_DARKMAGENTA = 1114 -AFX_IDC_COLOR_DARKCYAN = 1115 -AFX_IDC_COLORPROP = 1116 -AFX_IDC_SYSTEMCOLORS = 1117 -AFX_IDC_PROPNAME = 1201 -AFX_IDC_PICTURE = 1202 -AFX_IDC_BROWSE = 1203 -AFX_IDC_CLEAR = 1204 -AFX_IDD_PROPPAGE_COLOR = 32257 -AFX_IDD_PROPPAGE_FONT = 32258 -AFX_IDD_PROPPAGE_PICTURE = 32259 -AFX_IDB_TRUETYPE = 32384 -AFX_IDS_PROPPAGE_UNKNOWN = 0xFE01 -AFX_IDS_COLOR_DESKTOP = 0xFE04 -AFX_IDS_COLOR_APPWORKSPACE = 0xFE05 -AFX_IDS_COLOR_WNDBACKGND = 0xFE06 -AFX_IDS_COLOR_WNDTEXT = 0xFE07 -AFX_IDS_COLOR_MENUBAR = 0xFE08 -AFX_IDS_COLOR_MENUTEXT = 0xFE09 -AFX_IDS_COLOR_ACTIVEBAR = 0xFE0A -AFX_IDS_COLOR_INACTIVEBAR = 0xFE0B -AFX_IDS_COLOR_ACTIVETEXT = 0xFE0C -AFX_IDS_COLOR_INACTIVETEXT = 0xFE0D -AFX_IDS_COLOR_ACTIVEBORDER = 0xFE0E -AFX_IDS_COLOR_INACTIVEBORDER = 0xFE0F -AFX_IDS_COLOR_WNDFRAME = 0xFE10 -AFX_IDS_COLOR_SCROLLBARS = 0xFE11 -AFX_IDS_COLOR_BTNFACE = 0xFE12 -AFX_IDS_COLOR_BTNSHADOW = 0xFE13 -AFX_IDS_COLOR_BTNTEXT = 0xFE14 -AFX_IDS_COLOR_BTNHIGHLIGHT = 0xFE15 -AFX_IDS_COLOR_DISABLEDTEXT = 0xFE16 -AFX_IDS_COLOR_HIGHLIGHT = 0xFE17 -AFX_IDS_COLOR_HIGHLIGHTTEXT = 0xFE18 -AFX_IDS_REGULAR = 0xFE19 -AFX_IDS_BOLD = 0xFE1A -AFX_IDS_ITALIC = 0xFE1B -AFX_IDS_BOLDITALIC = 0xFE1C -AFX_IDS_SAMPLETEXT = 0xFE1D -AFX_IDS_DISPLAYSTRING_FONT = 0xFE1E -AFX_IDS_DISPLAYSTRING_COLOR = 0xFE1F -AFX_IDS_DISPLAYSTRING_PICTURE = 0xFE20 -AFX_IDS_PICTUREFILTER = 0xFE21 -AFX_IDS_PICTYPE_UNKNOWN = 0xFE22 -AFX_IDS_PICTYPE_NONE = 0xFE23 -AFX_IDS_PICTYPE_BITMAP = 0xFE24 -AFX_IDS_PICTYPE_METAFILE = 0xFE25 -AFX_IDS_PICTYPE_ICON = 0xFE26 -AFX_IDS_COLOR_PPG = 0xFE28 -AFX_IDS_COLOR_PPG_CAPTION = 0xFE29 -AFX_IDS_FONT_PPG = 0xFE2A -AFX_IDS_FONT_PPG_CAPTION = 0xFE2B -AFX_IDS_PICTURE_PPG = 0xFE2C -AFX_IDS_PICTURE_PPG_CAPTION = 0xFE2D -AFX_IDS_PICTUREBROWSETITLE = 0xFE30 -AFX_IDS_BORDERSTYLE_0 = 0xFE31 -AFX_IDS_BORDERSTYLE_1 = 0xFE32 -AFX_IDS_VERB_EDIT = 0xFE40 -AFX_IDS_VERB_PROPERTIES = 0xFE41 -AFX_IDP_PICTURECANTOPEN = 0xFE83 -AFX_IDP_PICTURECANTLOAD = 0xFE84 -AFX_IDP_PICTURETOOLARGE = 0xFE85 -AFX_IDP_PICTUREREADFAILED = 0xFE86 -AFX_IDP_E_ILLEGALFUNCTIONCALL = 0xFEA0 -AFX_IDP_E_OVERFLOW = 0xFEA1 -AFX_IDP_E_OUTOFMEMORY = 0xFEA2 -AFX_IDP_E_DIVISIONBYZERO = 0xFEA3 -AFX_IDP_E_OUTOFSTRINGSPACE = 0xFEA4 -AFX_IDP_E_OUTOFSTACKSPACE = 0xFEA5 -AFX_IDP_E_BADFILENAMEORNUMBER = 0xFEA6 -AFX_IDP_E_FILENOTFOUND = 0xFEA7 -AFX_IDP_E_BADFILEMODE = 0xFEA8 -AFX_IDP_E_FILEALREADYOPEN = 0xFEA9 -AFX_IDP_E_DEVICEIOERROR = 0xFEAA -AFX_IDP_E_FILEALREADYEXISTS = 0xFEAB -AFX_IDP_E_BADRECORDLENGTH = 0xFEAC -AFX_IDP_E_DISKFULL = 0xFEAD -AFX_IDP_E_BADRECORDNUMBER = 0xFEAE -AFX_IDP_E_BADFILENAME = 0xFEAF -AFX_IDP_E_TOOMANYFILES = 0xFEB0 -AFX_IDP_E_DEVICEUNAVAILABLE = 0xFEB1 -AFX_IDP_E_PERMISSIONDENIED = 0xFEB2 -AFX_IDP_E_DISKNOTREADY = 0xFEB3 -AFX_IDP_E_PATHFILEACCESSERROR = 0xFEB4 -AFX_IDP_E_PATHNOTFOUND = 0xFEB5 -AFX_IDP_E_INVALIDPATTERNSTRING = 0xFEB6 -AFX_IDP_E_INVALIDUSEOFNULL = 0xFEB7 -AFX_IDP_E_INVALIDFILEFORMAT = 0xFEB8 -AFX_IDP_E_INVALIDPROPERTYVALUE = 0xFEB9 -AFX_IDP_E_INVALIDPROPERTYARRAYINDEX = 0xFEBA -AFX_IDP_E_SETNOTSUPPORTEDATRUNTIME = 0xFEBB -AFX_IDP_E_SETNOTSUPPORTED = 0xFEBC -AFX_IDP_E_NEEDPROPERTYARRAYINDEX = 0xFEBD -AFX_IDP_E_SETNOTPERMITTED = 0xFEBE -AFX_IDP_E_GETNOTSUPPORTEDATRUNTIME = 0xFEBF -AFX_IDP_E_GETNOTSUPPORTED = 0xFEC0 -AFX_IDP_E_PROPERTYNOTFOUND = 0xFEC1 -AFX_IDP_E_INVALIDCLIPBOARDFORMAT = 0xFEC2 -AFX_IDP_E_INVALIDPICTURE = 0xFEC3 -AFX_IDP_E_PRINTERERROR = 0xFEC4 -AFX_IDP_E_CANTSAVEFILETOTEMP = 0xFEC5 -AFX_IDP_E_SEARCHTEXTNOTFOUND = 0xFEC6 -AFX_IDP_E_REPLACEMENTSTOOLONG = 0xFEC7 diff --git a/lib/pythonwin/pywin/mfc/dialog.py b/lib/pythonwin/pywin/mfc/dialog.py deleted file mode 100644 index 25fe1363..00000000 --- a/lib/pythonwin/pywin/mfc/dialog.py +++ /dev/null @@ -1,278 +0,0 @@ -""" \ -Base class for Dialogs. Also contains a few useful utility functions -""" -# dialog.py -# Python class for Dialog Boxes in PythonWin. - -import win32con -import win32ui - -# sob - 2to3 doesn't see this as a relative import :( -from pywin.mfc import window - - -def dllFromDll(dllid): - "given a 'dll' (maybe a dll, filename, etc), return a DLL object" - if dllid == None: - return None - elif type("") == type(dllid): - return win32ui.LoadLibrary(dllid) - else: - try: - dllid.GetFileName() - except AttributeError: - raise TypeError("DLL parameter must be None, a filename or a dll object") - return dllid - - -class Dialog(window.Wnd): - "Base class for a dialog" - - def __init__(self, id, dllid=None): - """id is the resource ID, or a template - dllid may be None, a dll object, or a string with a dll name""" - # must take a reference to the DLL until InitDialog. - self.dll = dllFromDll(dllid) - if type(id) == type([]): # a template - dlg = win32ui.CreateDialogIndirect(id) - else: - dlg = win32ui.CreateDialog(id, self.dll) - window.Wnd.__init__(self, dlg) - self.HookCommands() - self.bHaveInit = None - - def HookCommands(self): - pass - - def OnAttachedObjectDeath(self): - self.data = self._obj_.data - window.Wnd.OnAttachedObjectDeath(self) - - # provide virtuals. - def OnOK(self): - self._obj_.OnOK() - - def OnCancel(self): - self._obj_.OnCancel() - - def OnInitDialog(self): - self.bHaveInit = 1 - if self._obj_.data: - self._obj_.UpdateData(0) - return 1 # I did NOT set focus to a child window. - - def OnDestroy(self, msg): - self.dll = None # theoretically not needed if object destructs normally. - - # DDX support - def AddDDX(self, *args): - self._obj_.datalist.append(args) - - # Make a dialog object look like a dictionary for the DDX support - def __bool__(self): - return True - - def __len__(self): - return len(self.data) - - def __getitem__(self, key): - return self.data[key] - - def __setitem__(self, key, item): - self._obj_.data[key] = item # self.UpdateData(0) - - def keys(self): - return list(self.data.keys()) - - def items(self): - return list(self.data.items()) - - def values(self): - return list(self.data.values()) - - # XXX - needs py3k work! - def has_key(self, key): - return key in self.data - - -class PrintDialog(Dialog): - "Base class for a print dialog" - - def __init__( - self, - pInfo, - dlgID, - printSetupOnly=0, - flags=( - win32ui.PD_ALLPAGES - | win32ui.PD_USEDEVMODECOPIES - | win32ui.PD_NOPAGENUMS - | win32ui.PD_HIDEPRINTTOFILE - | win32ui.PD_NOSELECTION - ), - parent=None, - dllid=None, - ): - self.dll = dllFromDll(dllid) - if type(dlgID) == type([]): # a template - raise TypeError("dlgID parameter must be an integer resource ID") - dlg = win32ui.CreatePrintDialog(dlgID, printSetupOnly, flags, parent, self.dll) - window.Wnd.__init__(self, dlg) - self.HookCommands() - self.bHaveInit = None - self.pInfo = pInfo - # init values (if PrintSetup is called, values still available) - flags = pInfo.GetFlags() - self["toFile"] = flags & win32ui.PD_PRINTTOFILE != 0 - self["direct"] = pInfo.GetDirect() - self["preview"] = pInfo.GetPreview() - self["continuePrinting"] = pInfo.GetContinuePrinting() - self["curPage"] = pInfo.GetCurPage() - self["numPreviewPages"] = pInfo.GetNumPreviewPages() - self["userData"] = pInfo.GetUserData() - self["draw"] = pInfo.GetDraw() - self["pageDesc"] = pInfo.GetPageDesc() - self["minPage"] = pInfo.GetMinPage() - self["maxPage"] = pInfo.GetMaxPage() - self["offsetPage"] = pInfo.GetOffsetPage() - self["fromPage"] = pInfo.GetFromPage() - self["toPage"] = pInfo.GetToPage() - # these values updated after OnOK - self["copies"] = 0 - self["deviceName"] = "" - self["driverName"] = "" - self["printAll"] = 0 - self["printCollate"] = 0 - self["printRange"] = 0 - self["printSelection"] = 0 - - def OnInitDialog(self): - self.pInfo.CreatePrinterDC() # This also sets the hDC of the pInfo structure. - return self._obj_.OnInitDialog() - - def OnCancel(self): - del self.pInfo - - def OnOK(self): - """DoModal has finished. Can now access the users choices""" - self._obj_.OnOK() - pInfo = self.pInfo - # user values - flags = pInfo.GetFlags() - self["toFile"] = flags & win32ui.PD_PRINTTOFILE != 0 - self["direct"] = pInfo.GetDirect() - self["preview"] = pInfo.GetPreview() - self["continuePrinting"] = pInfo.GetContinuePrinting() - self["curPage"] = pInfo.GetCurPage() - self["numPreviewPages"] = pInfo.GetNumPreviewPages() - self["userData"] = pInfo.GetUserData() - self["draw"] = pInfo.GetDraw() - self["pageDesc"] = pInfo.GetPageDesc() - self["minPage"] = pInfo.GetMinPage() - self["maxPage"] = pInfo.GetMaxPage() - self["offsetPage"] = pInfo.GetOffsetPage() - self["fromPage"] = pInfo.GetFromPage() - self["toPage"] = pInfo.GetToPage() - self["copies"] = pInfo.GetCopies() - self["deviceName"] = pInfo.GetDeviceName() - self["driverName"] = pInfo.GetDriverName() - self["printAll"] = pInfo.PrintAll() - self["printCollate"] = pInfo.PrintCollate() - self["printRange"] = pInfo.PrintRange() - self["printSelection"] = pInfo.PrintSelection() - del self.pInfo - - -class PropertyPage(Dialog): - "Base class for a Property Page" - - def __init__(self, id, dllid=None, caption=0): - """id is the resource ID - dllid may be None, a dll object, or a string with a dll name""" - - self.dll = dllFromDll(dllid) - if self.dll: - oldRes = win32ui.SetResource(self.dll) - if type(id) == type([]): - dlg = win32ui.CreatePropertyPageIndirect(id) - else: - dlg = win32ui.CreatePropertyPage(id, caption) - if self.dll: - win32ui.SetResource(oldRes) - # dont call dialog init! - window.Wnd.__init__(self, dlg) - self.HookCommands() - - -class PropertySheet(window.Wnd): - def __init__(self, caption, dll=None, pageList=None): # parent=None, style,etc): - "Initialize a property sheet. pageList is a list of ID's" - # must take a reference to the DLL until InitDialog. - self.dll = dllFromDll(dll) - self.sheet = win32ui.CreatePropertySheet(caption) - window.Wnd.__init__(self, self.sheet) - if not pageList is None: - self.AddPage(pageList) - - def OnInitDialog(self): - return self._obj_.OnInitDialog() - - def DoModal(self): - if self.dll: - oldRes = win32ui.SetResource(self.dll) - rc = self.sheet.DoModal() - if self.dll: - win32ui.SetResource(oldRes) - return rc - - def AddPage(self, pages): - if self.dll: - oldRes = win32ui.SetResource(self.dll) - try: # try list style access - pages[0] - isSeq = 1 - except (TypeError, KeyError): - isSeq = 0 - if isSeq: - for page in pages: - self.DoAddSinglePage(page) - else: - self.DoAddSinglePage(pages) - if self.dll: - win32ui.SetResource(oldRes) - - def DoAddSinglePage(self, page): - "Page may be page, or int ID. Assumes DLL setup" - if type(page) == type(0): - self.sheet.AddPage(win32ui.CreatePropertyPage(page)) - else: - self.sheet.AddPage(page) - - -# define some app utility functions. -def GetSimpleInput(prompt, defValue="", title=None): - """displays a dialog, and returns a string, or None if cancelled. - args prompt, defValue='', title=main frames title""" - # uses a simple dialog to return a string object. - if title is None: - title = win32ui.GetMainFrame().GetWindowText() - # 2to3 insists on converting 'Dialog.__init__' to 'tkinter.dialog...' - DlgBaseClass = Dialog - - class DlgSimpleInput(DlgBaseClass): - def __init__(self, prompt, defValue, title): - self.title = title - DlgBaseClass.__init__(self, win32ui.IDD_SIMPLE_INPUT) - self.AddDDX(win32ui.IDC_EDIT1, "result") - self.AddDDX(win32ui.IDC_PROMPT1, "prompt") - self._obj_.data["result"] = defValue - self._obj_.data["prompt"] = prompt - - def OnInitDialog(self): - self.SetWindowText(self.title) - return DlgBaseClass.OnInitDialog(self) - - dlg = DlgSimpleInput(prompt, defValue, title) - if dlg.DoModal() != win32con.IDOK: - return None - return dlg["result"] diff --git a/lib/pythonwin/pywin/mfc/docview.py b/lib/pythonwin/pywin/mfc/docview.py deleted file mode 100644 index 7da91b46..00000000 --- a/lib/pythonwin/pywin/mfc/docview.py +++ /dev/null @@ -1,151 +0,0 @@ -# document and view classes for MFC. -import win32ui - -from . import object, window - - -class View(window.Wnd): - def __init__(self, initobj): - window.Wnd.__init__(self, initobj) - - def OnInitialUpdate(self): - pass - - -# Simple control based views. -class CtrlView(View): - def __init__(self, doc, wndclass, style=0): - View.__init__(self, win32ui.CreateCtrlView(doc, wndclass, style)) - - -class EditView(CtrlView): - def __init__(self, doc): - View.__init__(self, win32ui.CreateEditView(doc)) - - -class RichEditView(CtrlView): - def __init__(self, doc): - View.__init__(self, win32ui.CreateRichEditView(doc)) - - -class ListView(CtrlView): - def __init__(self, doc): - View.__init__(self, win32ui.CreateListView(doc)) - - -class TreeView(CtrlView): - def __init__(self, doc): - View.__init__(self, win32ui.CreateTreeView(doc)) - - -# Other more advanced views. -class ScrollView(View): - def __init__(self, doc): - View.__init__(self, win32ui.CreateView(doc)) - - -class FormView(View): - def __init__(self, doc, id): - View.__init__(self, win32ui.CreateFormView(doc, id)) - - -class Document(object.CmdTarget): - def __init__(self, template, docobj=None): - if docobj is None: - docobj = template.DoCreateDoc() - object.CmdTarget.__init__(self, docobj) - - -class RichEditDoc(object.CmdTarget): - def __init__(self, template): - object.CmdTarget.__init__(self, template.DoCreateRichEditDoc()) - - -class CreateContext: - "A transient base class used as a CreateContext" - - def __init__(self, template, doc=None): - self.template = template - self.doc = doc - - def __del__(self): - self.close() - - def close(self): - self.doc = None - self.template = None - - -class DocTemplate(object.CmdTarget): - def __init__( - self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None - ): - if resourceId is None: - resourceId = win32ui.IDR_PYTHONTYPE - object.CmdTarget.__init__(self, self._CreateDocTemplate(resourceId)) - self.MakeDocument = MakeDocument - self.MakeFrame = MakeFrame - self.MakeView = MakeView - self._SetupSharedMenu_() - - def _SetupSharedMenu_(self): - pass # to be overridden by each "app" - - def _CreateDocTemplate(self, resourceId): - return win32ui.CreateDocTemplate(resourceId) - - def __del__(self): - object.CmdTarget.__del__(self) - - def CreateCreateContext(self, doc=None): - return CreateContext(self, doc) - - def CreateNewFrame(self, doc): - makeFrame = self.MakeFrame - if makeFrame is None: - makeFrame = window.MDIChildWnd - wnd = makeFrame() - context = self.CreateCreateContext(doc) - wnd.LoadFrame( - self.GetResourceID(), -1, None, context - ) # triggers OnCreateClient... - return wnd - - def CreateNewDocument(self): - makeDocument = self.MakeDocument - if makeDocument is None: - makeDocument = Document - return makeDocument(self) - - def CreateView(self, frame, context): - makeView = self.MakeView - if makeView is None: - makeView = EditView - view = makeView(context.doc) - view.CreateWindow(frame) - - -class RichEditDocTemplate(DocTemplate): - def __init__( - self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None - ): - if MakeView is None: - MakeView = RichEditView - if MakeDocument is None: - MakeDocument = RichEditDoc - DocTemplate.__init__(self, resourceId, MakeDocument, MakeFrame, MakeView) - - def _CreateDocTemplate(self, resourceId): - return win32ui.CreateRichEditDocTemplate(resourceId) - - -def t(): - class FormTemplate(DocTemplate): - def CreateView(self, frame, context): - makeView = self.MakeView - # view = FormView(context.doc, win32ui.IDD_PROPDEMO1) - view = ListView(context.doc) - view.CreateWindow(frame) - - t = FormTemplate() - return t.OpenDocumentFile(None) diff --git a/lib/pythonwin/pywin/mfc/object.py b/lib/pythonwin/pywin/mfc/object.py deleted file mode 100644 index 70138b6e..00000000 --- a/lib/pythonwin/pywin/mfc/object.py +++ /dev/null @@ -1,66 +0,0 @@ -# MFC base classes. - -import win32ui - - -class Object: - def __init__(self, initObj=None): - self.__dict__["_obj_"] = initObj - # self._obj_ = initObj - if initObj is not None: - initObj.AttachObject(self) - - def __del__(self): - self.close() - - def __getattr__( - self, attr - ): # Make this object look like the underlying win32ui one. - # During cleanup __dict__ is not available, causing recursive death. - if not attr.startswith("__"): - try: - o = self.__dict__["_obj_"] - if o is not None: - return getattr(o, attr) - # Only raise this error for non "internal" names - - # Python may be calling __len__, __nonzero__, etc, so - # we dont want this exception - if attr[0] != "_" and attr[-1] != "_": - raise win32ui.error("The MFC object has died.") - except KeyError: - # No _obj_ at all - dont report MFC object died when there isnt one! - pass - raise AttributeError(attr) - - def OnAttachedObjectDeath(self): - # print "object", self.__class__.__name__, "dieing" - self._obj_ = None - - def close(self): - if "_obj_" in self.__dict__: - if self._obj_ is not None: - self._obj_.AttachObject(None) - self._obj_ = None - - -class CmdTarget(Object): - def __init__(self, initObj): - Object.__init__(self, initObj) - - def HookNotifyRange(self, handler, firstID, lastID): - oldhandlers = [] - for i in range(firstID, lastID + 1): - oldhandlers.append(self.HookNotify(handler, i)) - return oldhandlers - - def HookCommandRange(self, handler, firstID, lastID): - oldhandlers = [] - for i in range(firstID, lastID + 1): - oldhandlers.append(self.HookCommand(handler, i)) - return oldhandlers - - def HookCommandUpdateRange(self, handler, firstID, lastID): - oldhandlers = [] - for i in range(firstID, lastID + 1): - oldhandlers.append(self.HookCommandUpdate(handler, i)) - return oldhandlers diff --git a/lib/pythonwin/pywin/mfc/thread.py b/lib/pythonwin/pywin/mfc/thread.py deleted file mode 100644 index 90a0a762..00000000 --- a/lib/pythonwin/pywin/mfc/thread.py +++ /dev/null @@ -1,25 +0,0 @@ -# Thread and application objects - -import win32ui - -from . import object - - -class WinThread(object.CmdTarget): - def __init__(self, initObj=None): - if initObj is None: - initObj = win32ui.CreateThread() - object.CmdTarget.__init__(self, initObj) - - def InitInstance(self): - pass # Default None/0 return indicates success for InitInstance() - - def ExitInstance(self): - pass - - -class WinApp(WinThread): - def __init__(self, initApp=None): - if initApp is None: - initApp = win32ui.GetApp() - WinThread.__init__(self, initApp) diff --git a/lib/pythonwin/pywin/mfc/window.py b/lib/pythonwin/pywin/mfc/window.py deleted file mode 100644 index e52a7c1c..00000000 --- a/lib/pythonwin/pywin/mfc/window.py +++ /dev/null @@ -1,50 +0,0 @@ -# The MFCish window classes. -import win32con -import win32ui - -from . import object - - -class Wnd(object.CmdTarget): - def __init__(self, initobj=None): - object.CmdTarget.__init__(self, initobj) - if self._obj_: - self._obj_.HookMessage(self.OnDestroy, win32con.WM_DESTROY) - - def OnDestroy(self, msg): - pass - - -# NOTE NOTE - This facility is currently disabled in Pythonwin!!!!! -# Note - to process all messages for your window, add the following method -# to a derived class. This code provides default message handling (ie, is -# identical, except presumably in speed, as if the method did not exist at -# all, so presumably will be modified to test for specific messages to be -# useful! -# def WindowProc(self, msg, wParam, lParam): -# rc, lResult = self._obj_.OnWndMsg(msg, wParam, lParam) -# if not rc: lResult = self._obj_.DefWindowProc(msg, wParam, lParam) -# return lResult - - -class FrameWnd(Wnd): - def __init__(self, wnd): - Wnd.__init__(self, wnd) - - -class MDIChildWnd(FrameWnd): - def __init__(self, wnd=None): - if wnd is None: - wnd = win32ui.CreateMDIChild() - FrameWnd.__init__(self, wnd) - - def OnCreateClient(self, cp, context): - if context is not None and context.template is not None: - context.template.CreateView(self, context) - - -class MDIFrameWnd(FrameWnd): - def __init__(self, wnd=None): - if wnd is None: - wnd = win32ui.CreateMDIFrame() - FrameWnd.__init__(self, wnd) diff --git a/lib/pythonwin/pywin/scintilla/IDLEenvironment.py b/lib/pythonwin/pywin/scintilla/IDLEenvironment.py deleted file mode 100644 index b1db5093..00000000 --- a/lib/pythonwin/pywin/scintilla/IDLEenvironment.py +++ /dev/null @@ -1,598 +0,0 @@ -# Code that allows Pythonwin to pretend it is IDLE -# (at least as far as most IDLE extensions are concerned) - -import string -import sys - -import win32api -import win32con -import win32ui -from pywin import default_scintilla_encoding -from pywin.mfc.dialog import GetSimpleInput - -wordchars = string.ascii_uppercase + string.ascii_lowercase + string.digits - - -class TextError(Exception): # When a TclError would normally be raised. - pass - - -class EmptyRange(Exception): # Internally raised. - pass - - -def GetIDLEModule(module): - try: - # First get it from Pythonwin it is exists. - modname = "pywin.idle." + module - __import__(modname) - except ImportError as details: - msg = ( - "The IDLE extension '%s' can not be located.\r\n\r\n" - "Please correct the installation and restart the" - " application.\r\n\r\n%s" % (module, details) - ) - win32ui.MessageBox(msg) - return None - mod = sys.modules[modname] - mod.TclError = TextError # A hack that can go soon! - return mod - - -# A class that is injected into the IDLE auto-indent extension. -# It allows for decent performance when opening a new file, -# as auto-indent uses the tokenizer module to determine indents. -# The default AutoIndent readline method works OK, but it goes through -# this layer of Tk index indirection for every single line. For large files -# without indents (and even small files with indents :-) it was pretty slow! -def fast_readline(self): - if self.finished: - val = "" - else: - if "_scint_lines" not in self.__dict__: - # XXX - note - assumes this is only called once the file is loaded! - self._scint_lines = self.text.edit.GetTextRange().split("\n") - sl = self._scint_lines - i = self.i = self.i + 1 - if i >= len(sl): - val = "" - else: - val = sl[i] + "\n" - return val.encode(default_scintilla_encoding) - - -try: - GetIDLEModule("AutoIndent").IndentSearcher.readline = fast_readline -except AttributeError: # GetIDLEModule may return None - pass - - -# A class that attempts to emulate an IDLE editor window. -# Construct with a Pythonwin view. -class IDLEEditorWindow: - def __init__(self, edit): - self.edit = edit - self.text = TkText(edit) - self.extensions = {} - self.extension_menus = {} - - def close(self): - self.edit = self.text = None - self.extension_menus = None - try: - for ext in self.extensions.values(): - closer = getattr(ext, "close", None) - if closer is not None: - closer() - finally: - self.extensions = {} - - def IDLEExtension(self, extension): - ext = self.extensions.get(extension) - if ext is not None: - return ext - mod = GetIDLEModule(extension) - if mod is None: - return None - klass = getattr(mod, extension) - ext = self.extensions[extension] = klass(self) - # Find and bind all the events defined in the extension. - events = [item for item in dir(klass) if item[-6:] == "_event"] - for event in events: - name = "<<%s>>" % (event[:-6].replace("_", "-"),) - self.edit.bindings.bind(name, getattr(ext, event)) - return ext - - def GetMenuItems(self, menu_name): - # Get all menu items for the menu name (eg, "edit") - bindings = self.edit.bindings - ret = [] - for ext in self.extensions.values(): - menudefs = getattr(ext, "menudefs", []) - for name, items in menudefs: - if name == menu_name: - for text, event in [item for item in items if item is not None]: - text = text.replace("&", "&&") - text = text.replace("_", "&") - ret.append((text, event)) - return ret - - ###################################################################### - # The IDLE "Virtual UI" methods that are exposed to the IDLE extensions. - # - def askinteger( - self, caption, prompt, parent=None, initialvalue=0, minvalue=None, maxvalue=None - ): - while 1: - rc = GetSimpleInput(prompt, str(initialvalue), caption) - if rc is None: - return 0 # Correct "cancel" semantics? - err = None - try: - rc = int(rc) - except ValueError: - err = "Please enter an integer" - if not err and minvalue is not None and rc < minvalue: - err = "Please enter an integer greater then or equal to %s" % ( - minvalue, - ) - if not err and maxvalue is not None and rc > maxvalue: - err = "Please enter an integer less then or equal to %s" % (maxvalue,) - if err: - win32ui.MessageBox(err, caption, win32con.MB_OK) - continue - return rc - - def askyesno(self, caption, prompt, parent=None): - return win32ui.MessageBox(prompt, caption, win32con.MB_YESNO) == win32con.IDYES - - ###################################################################### - # The IDLE "Virtual Text Widget" methods that are exposed to the IDLE extensions. - # - - # Is character at text_index in a Python string? Return 0 for - # "guaranteed no", true for anything else. - def is_char_in_string(self, text_index): - # A helper for the code analyser - we need internal knowledge of - # the colorizer to get this information - # This assumes the colorizer has got to this point! - text_index = self.text._getoffset(text_index) - c = self.text.edit._GetColorizer() - if c and c.GetStringStyle(text_index) is None: - return 0 - return 1 - - # If a selection is defined in the text widget, return - # (start, end) as Tkinter text indices, otherwise return - # (None, None) - def get_selection_indices(self): - try: - first = self.text.index("sel.first") - last = self.text.index("sel.last") - return first, last - except TextError: - return None, None - - def set_tabwidth(self, width): - self.edit.SCISetTabWidth(width) - - def get_tabwidth(self): - return self.edit.GetTabWidth() - - -# A class providing the generic "Call Tips" interface -class CallTips: - def __init__(self, edit): - self.edit = edit - - def showtip(self, tip_text): - self.edit.SCICallTipShow(tip_text) - - def hidetip(self): - self.edit.SCICallTipCancel() - - -######################################## -# -# Helpers for the TkText emulation. -def TkOffsetToIndex(offset, edit): - lineoff = 0 - # May be 1 > actual end if we pretended there was a trailing '\n' - offset = min(offset, edit.GetTextLength()) - line = edit.LineFromChar(offset) - lineIndex = edit.LineIndex(line) - return "%d.%d" % (line + 1, offset - lineIndex) - - -def _NextTok(str, pos): - # Returns (token, endPos) - end = len(str) - if pos >= end: - return None, 0 - while pos < end and str[pos] in string.whitespace: - pos = pos + 1 - # Special case for +- - if str[pos] in "+-": - return str[pos], pos + 1 - # Digits also a special case. - endPos = pos - while endPos < end and str[endPos] in string.digits + ".": - endPos = endPos + 1 - if pos != endPos: - return str[pos:endPos], endPos - endPos = pos - while endPos < end and str[endPos] not in string.whitespace + string.digits + "+-": - endPos = endPos + 1 - if pos != endPos: - return str[pos:endPos], endPos - return None, 0 - - -def TkIndexToOffset(bm, edit, marks): - base, nextTokPos = _NextTok(bm, 0) - if base is None: - raise ValueError("Empty bookmark ID!") - if base.find(".") > 0: - try: - line, col = base.split(".", 2) - if col == "first" or col == "last": - # Tag name - if line != "sel": - raise ValueError("Tags arent here!") - sel = edit.GetSel() - if sel[0] == sel[1]: - raise EmptyRange - if col == "first": - pos = sel[0] - else: - pos = sel[1] - else: - # Lines are 1 based for tkinter - line = int(line) - 1 - if line > edit.GetLineCount(): - pos = edit.GetTextLength() + 1 - else: - pos = edit.LineIndex(line) - if pos == -1: - pos = edit.GetTextLength() - pos = pos + int(col) - except (ValueError, IndexError): - raise ValueError("Unexpected literal in '%s'" % base) - elif base == "insert": - pos = edit.GetSel()[0] - elif base == "end": - pos = edit.GetTextLength() - # Pretend there is a trailing '\n' if necessary - if pos and edit.SCIGetCharAt(pos - 1) != "\n": - pos = pos + 1 - else: - try: - pos = marks[base] - except KeyError: - raise ValueError("Unsupported base offset or undefined mark '%s'" % base) - - while 1: - word, nextTokPos = _NextTok(bm, nextTokPos) - if word is None: - break - if word in ("+", "-"): - num, nextTokPos = _NextTok(bm, nextTokPos) - if num is None: - raise ValueError("+/- operator needs 2 args") - what, nextTokPos = _NextTok(bm, nextTokPos) - if what is None: - raise ValueError("+/- operator needs 2 args") - if what[0] != "c": - raise ValueError("+/- only supports chars") - if word == "+": - pos = pos + int(num) - else: - pos = pos - int(num) - elif word == "wordstart": - while pos > 0 and edit.SCIGetCharAt(pos - 1) in wordchars: - pos = pos - 1 - elif word == "wordend": - end = edit.GetTextLength() - while pos < end and edit.SCIGetCharAt(pos) in wordchars: - pos = pos + 1 - elif word == "linestart": - while pos > 0 and edit.SCIGetCharAt(pos - 1) not in "\n\r": - pos = pos - 1 - elif word == "lineend": - end = edit.GetTextLength() - while pos < end and edit.SCIGetCharAt(pos) not in "\n\r": - pos = pos + 1 - else: - raise ValueError("Unsupported relative offset '%s'" % word) - return max(pos, 0) # Tkinter is tollerant of -ve indexes - we aren't - - -# A class that resembles an IDLE (ie, a Tk) text widget. -# Construct with an edit object (eg, an editor view) -class TkText: - def __init__(self, edit): - self.calltips = None - self.edit = edit - self.marks = {} - - ## def __getattr__(self, attr): - ## if attr=="tk": return self # So text.tk.call works. - ## if attr=="master": return None # ditto! - ## raise AttributeError, attr - ## def __getitem__(self, item): - ## if item=="tabs": - ## size = self.edit.GetTabWidth() - ## if size==8: return "" # Tk default - ## return size # correct semantics? - ## elif item=="font": # Used for measurements we dont need to do! - ## return "Dont know the font" - ## raise IndexError, "Invalid index '%s'" % item - def make_calltip_window(self): - if self.calltips is None: - self.calltips = CallTips(self.edit) - return self.calltips - - def _getoffset(self, index): - return TkIndexToOffset(index, self.edit, self.marks) - - def _getindex(self, off): - return TkOffsetToIndex(off, self.edit) - - def _fix_indexes(self, start, end): - # first some magic to handle skipping over utf8 extended chars. - while start > 0 and ord(self.edit.SCIGetCharAt(start)) & 0xC0 == 0x80: - start -= 1 - while ( - end < self.edit.GetTextLength() - and ord(self.edit.SCIGetCharAt(end)) & 0xC0 == 0x80 - ): - end += 1 - # now handling fixing \r\n->\n disparities... - if ( - start > 0 - and self.edit.SCIGetCharAt(start) == "\n" - and self.edit.SCIGetCharAt(start - 1) == "\r" - ): - start = start - 1 - if ( - end < self.edit.GetTextLength() - and self.edit.SCIGetCharAt(end - 1) == "\r" - and self.edit.SCIGetCharAt(end) == "\n" - ): - end = end + 1 - return start, end - - ## def get_tab_width(self): - ## return self.edit.GetTabWidth() - ## def call(self, *rest): - ## # Crap to support Tk measurement hacks for tab widths - ## if rest[0] != "font" or rest[1] != "measure": - ## raise ValueError, "Unsupport call type" - ## return len(rest[5]) - ## def configure(self, **kw): - ## for name, val in kw.items(): - ## if name=="tabs": - ## self.edit.SCISetTabWidth(int(val)) - ## else: - ## raise ValueError, "Unsupported configuration item %s" % kw - def bind(self, binding, handler): - self.edit.bindings.bind(binding, handler) - - def get(self, start, end=None): - try: - start = self._getoffset(start) - if end is None: - end = start + 1 - else: - end = self._getoffset(end) - except EmptyRange: - return "" - # Simple semantic checks to conform to the Tk text interface - if end <= start: - return "" - max = self.edit.GetTextLength() - checkEnd = 0 - if end > max: - end = max - checkEnd = 1 - start, end = self._fix_indexes(start, end) - ret = self.edit.GetTextRange(start, end) - # pretend a trailing '\n' exists if necessary. - if checkEnd and (not ret or ret[-1] != "\n"): - ret = ret + "\n" - return ret.replace("\r", "") - - def index(self, spec): - try: - return self._getindex(self._getoffset(spec)) - except EmptyRange: - return "" - - def insert(self, pos, text): - try: - pos = self._getoffset(pos) - except EmptyRange: - raise TextError("Empty range") - self.edit.SetSel((pos, pos)) - # IDLE only deals with "\n" - we will be nicer - - bits = text.split("\n") - self.edit.SCIAddText(bits[0]) - for bit in bits[1:]: - self.edit.SCINewline() - self.edit.SCIAddText(bit) - - def delete(self, start, end=None): - try: - start = self._getoffset(start) - if end is not None: - end = self._getoffset(end) - except EmptyRange: - raise TextError("Empty range") - # If end is specified and == start, then we must delete nothing. - if start == end: - return - # If end is not specified, delete one char - if end is None: - end = start + 1 - else: - # Tk says not to delete in this case, but our control would. - if end < start: - return - if start == self.edit.GetTextLength(): - return # Nothing to delete. - old = self.edit.GetSel()[0] # Lose a selection - # Hack for partial '\r\n' and UTF-8 char removal - start, end = self._fix_indexes(start, end) - self.edit.SetSel((start, end)) - self.edit.Clear() - if old >= start and old < end: - old = start - elif old >= end: - old = old - (end - start) - self.edit.SetSel(old) - - def bell(self): - win32api.MessageBeep() - - def see(self, pos): - # Most commands we use in Scintilla actually force the selection - # to be seen, making this unnecessary. - pass - - def mark_set(self, name, pos): - try: - pos = self._getoffset(pos) - except EmptyRange: - raise TextError("Empty range '%s'" % pos) - if name == "insert": - self.edit.SetSel(pos) - else: - self.marks[name] = pos - - def tag_add(self, name, start, end): - if name != "sel": - raise ValueError("Only sel tag is supported") - try: - start = self._getoffset(start) - end = self._getoffset(end) - except EmptyRange: - raise TextError("Empty range") - self.edit.SetSel(start, end) - - def tag_remove(self, name, start, end): - if name != "sel" or start != "1.0" or end != "end": - raise ValueError("Cant remove this tag") - # Turn the sel into a cursor - self.edit.SetSel(self.edit.GetSel()[0]) - - def compare(self, i1, op, i2): - try: - i1 = self._getoffset(i1) - except EmptyRange: - i1 = "" - try: - i2 = self._getoffset(i2) - except EmptyRange: - i2 = "" - return eval("%d%s%d" % (i1, op, i2)) - - def undo_block_start(self): - self.edit.SCIBeginUndoAction() - - def undo_block_stop(self): - self.edit.SCIEndUndoAction() - - -###################################################################### -# -# Test related code. -# -###################################################################### -def TestCheck(index, edit, expected=None): - rc = TkIndexToOffset(index, edit, {}) - if rc != expected: - print("ERROR: Index", index, ", expected", expected, "but got", rc) - - -def TestGet(fr, to, t, expected): - got = t.get(fr, to) - if got != expected: - print( - "ERROR: get(%s, %s) expected %s, but got %s" - % (repr(fr), repr(to), repr(expected), repr(got)) - ) - - -def test(): - import pywin.framework.editor - - d = pywin.framework.editor.editorTemplate.OpenDocumentFile(None) - e = d.GetFirstView() - t = TkText(e) - e.SCIAddText("hi there how\nare you today\r\nI hope you are well") - e.SetSel((4, 4)) - - skip = """ - TestCheck("insert", e, 4) - TestCheck("insert wordstart", e, 3) - TestCheck("insert wordend", e, 8) - TestCheck("insert linestart", e, 0) - TestCheck("insert lineend", e, 12) - TestCheck("insert + 4 chars", e, 8) - TestCheck("insert +4c", e, 8) - TestCheck("insert - 2 chars", e, 2) - TestCheck("insert -2c", e, 2) - TestCheck("insert-2c", e, 2) - TestCheck("insert-2 c", e, 2) - TestCheck("insert- 2c", e, 2) - TestCheck("1.1", e, 1) - TestCheck("1.0", e, 0) - TestCheck("2.0", e, 13) - try: - TestCheck("sel.first", e, 0) - print "*** sel.first worked with an empty selection" - except TextError: - pass - e.SetSel((4,5)) - TestCheck("sel.first- 2c", e, 2) - TestCheck("sel.last- 2c", e, 3) - """ - # Check EOL semantics - e.SetSel((4, 4)) - TestGet("insert lineend", "insert lineend +1c", t, "\n") - e.SetSel((20, 20)) - TestGet("insert lineend", "insert lineend +1c", t, "\n") - e.SetSel((35, 35)) - TestGet("insert lineend", "insert lineend +1c", t, "\n") - - -class IDLEWrapper: - def __init__(self, control): - self.text = control - - -def IDLETest(extension): - import os - import sys - - modname = "pywin.idle." + extension - __import__(modname) - mod = sys.modules[modname] - mod.TclError = TextError - klass = getattr(mod, extension) - - # Create a new Scintilla Window. - import pywin.framework.editor - - d = pywin.framework.editor.editorTemplate.OpenDocumentFile(None) - v = d.GetFirstView() - fname = os.path.splitext(__file__)[0] + ".py" - v.SCIAddText(open(fname).read()) - d.SetModifiedFlag(0) - r = klass(IDLEWrapper(TkText(v))) - return r - - -if __name__ == "__main__": - test() diff --git a/lib/pythonwin/pywin/scintilla/__init__.py b/lib/pythonwin/pywin/scintilla/__init__.py deleted file mode 100644 index 0981b622..00000000 --- a/lib/pythonwin/pywin/scintilla/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# package init. diff --git a/lib/pythonwin/pywin/scintilla/bindings.py b/lib/pythonwin/pywin/scintilla/bindings.py deleted file mode 100644 index 785bd42a..00000000 --- a/lib/pythonwin/pywin/scintilla/bindings.py +++ /dev/null @@ -1,178 +0,0 @@ -import traceback - -import win32api -import win32con -import win32ui - -from . import IDLEenvironment, keycodes - -HANDLER_ARGS_GUESS = 0 -HANDLER_ARGS_NATIVE = 1 -HANDLER_ARGS_IDLE = 2 -HANDLER_ARGS_EXTENSION = 3 - -next_id = 5000 - -event_to_commands = {} # dict of integer IDs to event names. -command_to_events = {} # dict of event names to int IDs - - -def assign_command_id(event, id=0): - global next_id - if id == 0: - id = event_to_commands.get(event, 0) - if id == 0: - id = next_id - next_id = next_id + 1 - # Only map the ones we allocated - specified ones are assumed to have a handler - command_to_events[id] = event - event_to_commands[event] = id - return id - - -class SendCommandHandler: - def __init__(self, cmd): - self.cmd = cmd - - def __call__(self, *args): - win32ui.GetMainFrame().SendMessage(win32con.WM_COMMAND, self.cmd) - - -class Binding: - def __init__(self, handler, handler_args_type): - self.handler = handler - self.handler_args_type = handler_args_type - - -class BindingsManager: - def __init__(self, parent_view): - self.parent_view = parent_view - self.bindings = {} # dict of Binding instances. - self.keymap = {} - - def prepare_configure(self): - self.keymap = {} - - def complete_configure(self): - for id in command_to_events.keys(): - self.parent_view.HookCommand(self._OnCommand, id) - - def close(self): - self.parent_view = self.bindings = self.keymap = None - - def report_error(self, problem): - try: - win32ui.SetStatusText(problem, 1) - except win32ui.error: - # No status bar! - print(problem) - - def update_keymap(self, keymap): - self.keymap.update(keymap) - - def bind(self, event, handler, handler_args_type=HANDLER_ARGS_GUESS, cid=0): - if handler is None: - handler = SendCommandHandler(cid) - self.bindings[event] = self._new_binding(handler, handler_args_type) - self.bind_command(event, cid) - - def bind_command(self, event, id=0): - "Binds an event to a Windows control/command ID" - id = assign_command_id(event, id) - return id - - def get_command_id(self, event): - id = event_to_commands.get(event) - if id is None: - # See if we even have an event of that name!? - if event not in self.bindings: - return None - id = self.bind_command(event) - return id - - def _OnCommand(self, id, code): - event = command_to_events.get(id) - if event is None: - self.report_error("No event associated with event ID %d" % id) - return 1 - return self.fire(event) - - def _new_binding(self, event, handler_args_type): - return Binding(event, handler_args_type) - - def _get_IDLE_handler(self, ext, handler): - try: - instance = self.parent_view.idle.IDLEExtension(ext) - name = handler.replace("-", "_") + "_event" - return getattr(instance, name) - except (ImportError, AttributeError): - msg = "Can not find event '%s' in IDLE extension '%s'" % (handler, ext) - self.report_error(msg) - return None - - def fire(self, event, event_param=None): - # Fire the specified event. Result is native Pythonwin result - # (ie, 1==pass one, 0 or None==handled) - - # First look up the event directly - if there, we are set. - binding = self.bindings.get(event) - if binding is None: - # If possible, find it! - # A native method name - handler = getattr(self.parent_view, event + "Event", None) - if handler is None: - # Can't decide if I should report an error?? - self.report_error("The event name '%s' can not be found." % event) - # Either way, just let the default handlers grab it. - return 1 - binding = self._new_binding(handler, HANDLER_ARGS_NATIVE) - # Cache it. - self.bindings[event] = binding - - handler_args_type = binding.handler_args_type - # Now actually fire it. - if handler_args_type == HANDLER_ARGS_GUESS: - # Can't be native, as natives are never added with "guess". - # Must be extension or IDLE. - if event[0] == "<": - handler_args_type = HANDLER_ARGS_IDLE - else: - handler_args_type = HANDLER_ARGS_EXTENSION - try: - if handler_args_type == HANDLER_ARGS_EXTENSION: - args = self.parent_view.idle, event_param - else: - args = (event_param,) - rc = binding.handler(*args) - if handler_args_type == HANDLER_ARGS_IDLE: - # Convert to our return code. - if rc in (None, "break"): - rc = 0 - else: - rc = 1 - except: - message = "Firing event '%s' failed." % event - print(message) - traceback.print_exc() - self.report_error(message) - rc = 1 # Let any default handlers have a go! - return rc - - def fire_key_event(self, msg): - key = msg[2] - keyState = 0 - if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000: - keyState = ( - keyState | win32con.RIGHT_CTRL_PRESSED | win32con.LEFT_CTRL_PRESSED - ) - if win32api.GetKeyState(win32con.VK_SHIFT) & 0x8000: - keyState = keyState | win32con.SHIFT_PRESSED - if win32api.GetKeyState(win32con.VK_MENU) & 0x8000: - keyState = keyState | win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED - keyinfo = key, keyState - # Special hacks for the dead-char key on non-US keyboards. - # (XXX - which do not work :-( - event = self.keymap.get(keyinfo) - if event is None: - return 1 - return self.fire(event, None) diff --git a/lib/pythonwin/pywin/scintilla/config.py b/lib/pythonwin/pywin/scintilla/config.py deleted file mode 100644 index 9a1b210e..00000000 --- a/lib/pythonwin/pywin/scintilla/config.py +++ /dev/null @@ -1,365 +0,0 @@ -# config.py - deals with loading configuration information. - -# Loads config data from a .cfg file. Also caches the compiled -# data back into a .cfc file. - -# If you are wondering how to avoid needing .cfg files (eg, -# if you are freezing Pythonwin etc) I suggest you create a -# .py file, and put the config info in a docstring. Then -# pass a CStringIO file (rather than a filename) to the -# config manager. -import glob -import importlib.util -import marshal -import os -import stat -import sys -import traceback -import types - -import pywin -import win32api - -from . import keycodes - -debugging = 0 -if debugging: - import win32traceutil # Some trace statements fire before the interactive window is open. - - def trace(*args): - sys.stderr.write(" ".join(map(str, args)) + "\n") - -else: - trace = lambda *args: None - -compiled_config_version = 3 - - -def split_line(line, lineno): - comment_pos = line.find("#") - if comment_pos >= 0: - line = line[:comment_pos] - sep_pos = line.rfind("=") - if sep_pos == -1: - if line.strip(): - print("Warning: Line %d: %s is an invalid entry" % (lineno, repr(line))) - return None, None - return "", "" - return line[:sep_pos].strip(), line[sep_pos + 1 :].strip() - - -def get_section_header(line): - # Returns the section if the line is a section header, else None - if line[0] == "[": - end = line.find("]") - if end == -1: - end = len(line) - rc = line[1:end].lower() - try: - i = rc.index(":") - return rc[:i], rc[i + 1 :] - except ValueError: - return rc, "" - return None, None - - -def find_config_file(f): - return os.path.join(pywin.__path__[0], f + ".cfg") - - -def find_config_files(): - return [ - os.path.split(x)[1] - for x in [ - os.path.splitext(x)[0] - for x in glob.glob(os.path.join(pywin.__path__[0], "*.cfg")) - ] - ] - - -class ConfigManager: - def __init__(self, f): - self.filename = "unknown" - self.last_error = None - self.key_to_events = {} - b_close = False - if hasattr(f, "readline"): - fp = f - self.filename = "" - compiled_name = None - else: - try: - f = find_config_file(f) - src_stat = os.stat(f) - except os.error: - self.report_error("Config file '%s' not found" % f) - return - self.filename = f - self.basename = os.path.basename(f) - trace("Loading configuration", self.basename) - compiled_name = os.path.splitext(f)[0] + ".cfc" - try: - cf = open(compiled_name, "rb") - try: - ver = marshal.load(cf) - ok = compiled_config_version == ver - if ok: - kblayoutname = marshal.load(cf) - magic = marshal.load(cf) - size = marshal.load(cf) - mtime = marshal.load(cf) - if ( - magic == importlib.util.MAGIC_NUMBER - and win32api.GetKeyboardLayoutName() == kblayoutname - and src_stat[stat.ST_MTIME] == mtime - and src_stat[stat.ST_SIZE] == size - ): - self.cache = marshal.load(cf) - trace("Configuration loaded cached", compiled_name) - return # We are ready to roll! - finally: - cf.close() - except (os.error, IOError, EOFError): - pass - fp = open(f) - b_close = True - self.cache = {} - lineno = 1 - line = fp.readline() - while line: - # Skip to the next section (maybe already there!) - section, subsection = get_section_header(line) - while line and section is None: - line = fp.readline() - if not line: - break - lineno = lineno + 1 - section, subsection = get_section_header(line) - if not line: - break - - if section == "keys": - line, lineno = self._load_keys(subsection, fp, lineno) - elif section == "extensions": - line, lineno = self._load_extensions(subsection, fp, lineno) - elif section == "idle extensions": - line, lineno = self._load_idle_extensions(subsection, fp, lineno) - elif section == "general": - line, lineno = self._load_general(subsection, fp, lineno) - else: - self.report_error( - "Unrecognised section header '%s:%s'" % (section, subsection) - ) - line = fp.readline() - lineno = lineno + 1 - if b_close: - fp.close() - # Check critical data. - if not self.cache.get("keys"): - self.report_error("No keyboard definitions were loaded") - if not self.last_error and compiled_name: - try: - cf = open(compiled_name, "wb") - marshal.dump(compiled_config_version, cf) - marshal.dump(win32api.GetKeyboardLayoutName(), cf) - marshal.dump(importlib.util.MAGIC_NUMBER, cf) - marshal.dump(src_stat[stat.ST_SIZE], cf) - marshal.dump(src_stat[stat.ST_MTIME], cf) - marshal.dump(self.cache, cf) - cf.close() - except (IOError, EOFError): - pass # Ignore errors - may be read only. - - def configure(self, editor, subsections=None): - # Execute the extension code, and find any events. - # First, we "recursively" connect any we are based on. - if subsections is None: - subsections = [] - subsections = [""] + subsections - general = self.get_data("general") - if general: - parents = general.get("based on", []) - for parent in parents: - trace("Configuration based on", parent, "- loading.") - parent = self.__class__(parent) - parent.configure(editor, subsections) - if parent.last_error: - self.report_error(parent.last_error) - - bindings = editor.bindings - codeob = self.get_data("extension code") - if codeob is not None: - ns = {} - try: - exec(codeob, ns) - except: - traceback.print_exc() - self.report_error("Executing extension code failed") - ns = None - if ns: - num = 0 - for name, func in list(ns.items()): - if type(func) == types.FunctionType and name[:1] != "_": - bindings.bind(name, func) - num = num + 1 - trace("Configuration Extension code loaded", num, "events") - # Load the idle extensions - for subsection in subsections: - for ext in self.get_data("idle extensions", {}).get(subsection, []): - try: - editor.idle.IDLEExtension(ext) - trace("Loaded IDLE extension", ext) - except: - self.report_error("Can not load the IDLE extension '%s'" % ext) - - # Now bind up the key-map (remembering a reverse map - subsection_keymap = self.get_data("keys") - num_bound = 0 - for subsection in subsections: - keymap = subsection_keymap.get(subsection, {}) - bindings.update_keymap(keymap) - num_bound = num_bound + len(keymap) - trace("Configuration bound", num_bound, "keys") - - def get_key_binding(self, event, subsections=None): - if subsections is None: - subsections = [] - subsections = [""] + subsections - - subsection_keymap = self.get_data("keys") - for subsection in subsections: - map = self.key_to_events.get(subsection) - if map is None: # Build it - map = {} - keymap = subsection_keymap.get(subsection, {}) - for key_info, map_event in list(keymap.items()): - map[map_event] = key_info - self.key_to_events[subsection] = map - - info = map.get(event) - if info is not None: - return keycodes.make_key_name(info[0], info[1]) - return None - - def report_error(self, msg): - self.last_error = msg - print("Error in %s: %s" % (self.filename, msg)) - - def report_warning(self, msg): - print("Warning in %s: %s" % (self.filename, msg)) - - def _readline(self, fp, lineno, bStripComments=1): - line = fp.readline() - lineno = lineno + 1 - if line: - bBreak = ( - get_section_header(line)[0] is not None - ) # A new section is starting - if bStripComments and not bBreak: - pos = line.find("#") - if pos >= 0: - line = line[:pos] + "\n" - else: - bBreak = 1 - return line, lineno, bBreak - - def get_data(self, name, default=None): - return self.cache.get(name, default) - - def _save_data(self, name, data): - self.cache[name] = data - return data - - def _load_general(self, sub_section, fp, lineno): - map = {} - while 1: - line, lineno, bBreak = self._readline(fp, lineno) - if bBreak: - break - - key, val = split_line(line, lineno) - if not key: - continue - key = key.lower() - l = map.get(key, []) - l.append(val) - map[key] = l - self._save_data("general", map) - return line, lineno - - def _load_keys(self, sub_section, fp, lineno): - # Builds a nested dictionary of - # (scancode, flags) = event_name - main_map = self.get_data("keys", {}) - map = main_map.get(sub_section, {}) - while 1: - line, lineno, bBreak = self._readline(fp, lineno) - if bBreak: - break - - key, event = split_line(line, lineno) - if not event: - continue - sc, flag = keycodes.parse_key_name(key) - if sc is None: - self.report_warning("Line %d: Invalid key name '%s'" % (lineno, key)) - else: - map[sc, flag] = event - main_map[sub_section] = map - self._save_data("keys", main_map) - return line, lineno - - def _load_extensions(self, sub_section, fp, lineno): - start_lineno = lineno - lines = [] - while 1: - line, lineno, bBreak = self._readline(fp, lineno, 0) - if bBreak: - break - lines.append(line) - try: - c = compile( - "\n" * start_lineno + "".join(lines), # produces correct tracebacks - self.filename, - "exec", - ) - self._save_data("extension code", c) - except SyntaxError as details: - errlineno = details.lineno + start_lineno - # Should handle syntax errors better here, and offset the lineno. - self.report_error( - "Compiling extension code failed:\r\nFile: %s\r\nLine %d\r\n%s" - % (details.filename, errlineno, details.msg) - ) - return line, lineno - - def _load_idle_extensions(self, sub_section, fp, lineno): - extension_map = self.get_data("idle extensions") - if extension_map is None: - extension_map = {} - extensions = [] - while 1: - line, lineno, bBreak = self._readline(fp, lineno) - if bBreak: - break - line = line.strip() - if line: - extensions.append(line) - extension_map[sub_section] = extensions - self._save_data("idle extensions", extension_map) - return line, lineno - - -def test(): - import time - - start = time.clock() - f = "default" - cm = ConfigManager(f) - map = cm.get_data("keys") - took = time.clock() - start - print("Loaded %s items in %.4f secs" % (len(map), took)) - - -if __name__ == "__main__": - test() diff --git a/lib/pythonwin/pywin/scintilla/configui.py b/lib/pythonwin/pywin/scintilla/configui.py deleted file mode 100644 index 8b6157b6..00000000 --- a/lib/pythonwin/pywin/scintilla/configui.py +++ /dev/null @@ -1,292 +0,0 @@ -import win32api -import win32con -import win32ui -from pywin.mfc import dialog - -# Used to indicate that style should use default color -from win32con import CLR_INVALID - -from . import scintillacon - -###################################################### -# Property Page for syntax formatting options - -# The standard 16 color VGA palette should always be possible -paletteVGA = ( - ("Black", win32api.RGB(0, 0, 0)), - ("Navy", win32api.RGB(0, 0, 128)), - ("Green", win32api.RGB(0, 128, 0)), - ("Cyan", win32api.RGB(0, 128, 128)), - ("Maroon", win32api.RGB(128, 0, 0)), - ("Purple", win32api.RGB(128, 0, 128)), - ("Olive", win32api.RGB(128, 128, 0)), - ("Gray", win32api.RGB(128, 128, 128)), - ("Silver", win32api.RGB(192, 192, 192)), - ("Blue", win32api.RGB(0, 0, 255)), - ("Lime", win32api.RGB(0, 255, 0)), - ("Aqua", win32api.RGB(0, 255, 255)), - ("Red", win32api.RGB(255, 0, 0)), - ("Fuchsia", win32api.RGB(255, 0, 255)), - ("Yellow", win32api.RGB(255, 255, 0)), - ("White", win32api.RGB(255, 255, 255)), - # and a few others will generally be possible. - ("DarkGrey", win32api.RGB(64, 64, 64)), - ("PurpleBlue", win32api.RGB(64, 64, 192)), - ("DarkGreen", win32api.RGB(0, 96, 0)), - ("DarkOlive", win32api.RGB(128, 128, 64)), - ("MediumBlue", win32api.RGB(0, 0, 192)), - ("DarkNavy", win32api.RGB(0, 0, 96)), - ("Magenta", win32api.RGB(96, 0, 96)), - ("OffWhite", win32api.RGB(255, 255, 220)), - ("LightPurple", win32api.RGB(220, 220, 255)), - ("", win32con.CLR_INVALID), -) - - -class ScintillaFormatPropertyPage(dialog.PropertyPage): - def __init__(self, scintillaClass=None, caption=0): - self.scintillaClass = scintillaClass - dialog.PropertyPage.__init__(self, win32ui.IDD_PP_FORMAT, caption=caption) - - def OnInitDialog(self): - try: - if self.scintillaClass is None: - from . import control - - sc = control.CScintillaEdit - else: - sc = self.scintillaClass - - self.scintilla = sc() - style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.ES_MULTILINE - # Convert the rect size - rect = self.MapDialogRect((5, 5, 120, 75)) - self.scintilla.CreateWindow(style, rect, self, 111) - self.HookNotify(self.OnBraceMatch, scintillacon.SCN_CHECKBRACE) - self.scintilla.HookKeyStroke(self.OnEsc, 27) - self.scintilla.SCISetViewWS(1) - self.pos_bstart = self.pos_bend = self.pos_bbad = 0 - - colorizer = self.scintilla._GetColorizer() - text = colorizer.GetSampleText() - items = text.split("|", 2) - pos = len(items[0]) - self.scintilla.SCIAddText("".join(items)) - self.scintilla.SetSel(pos, pos) - self.scintilla.ApplyFormattingStyles() - self.styles = self.scintilla._GetColorizer().styles - - self.cbo = self.GetDlgItem(win32ui.IDC_COMBO1) - for c in paletteVGA: - self.cbo.AddString(c[0]) - - self.cboBoldItalic = self.GetDlgItem(win32ui.IDC_COMBO2) - for item in ("Bold Italic", "Bold", "Italic", "Regular"): - self.cboBoldItalic.InsertString(0, item) - - self.butIsDefault = self.GetDlgItem(win32ui.IDC_CHECK1) - self.butIsDefaultBackground = self.GetDlgItem(win32ui.IDC_CHECK2) - self.listbox = self.GetDlgItem(win32ui.IDC_LIST1) - self.HookCommand(self.OnListCommand, win32ui.IDC_LIST1) - names = list(self.styles.keys()) - names.sort() - for name in names: - if self.styles[name].aliased is None: - self.listbox.AddString(name) - self.listbox.SetCurSel(0) - - idc = win32ui.IDC_RADIO1 - if not self.scintilla._GetColorizer().bUseFixed: - idc = win32ui.IDC_RADIO2 - self.GetDlgItem(idc).SetCheck(1) - self.UpdateUIForStyle(self.styles[names[0]]) - - self.scintilla.HookFormatter(self) - self.HookCommand(self.OnButDefaultFixedFont, win32ui.IDC_BUTTON1) - self.HookCommand(self.OnButDefaultPropFont, win32ui.IDC_BUTTON2) - self.HookCommand(self.OnButThisFont, win32ui.IDC_BUTTON3) - self.HookCommand(self.OnButUseDefaultFont, win32ui.IDC_CHECK1) - self.HookCommand(self.OnButThisBackground, win32ui.IDC_BUTTON4) - self.HookCommand(self.OnButUseDefaultBackground, win32ui.IDC_CHECK2) - self.HookCommand(self.OnStyleUIChanged, win32ui.IDC_COMBO1) - self.HookCommand(self.OnStyleUIChanged, win32ui.IDC_COMBO2) - self.HookCommand(self.OnButFixedOrDefault, win32ui.IDC_RADIO1) - self.HookCommand(self.OnButFixedOrDefault, win32ui.IDC_RADIO2) - except: - import traceback - - traceback.print_exc() - - def OnEsc(self, ch): - self.GetParent().EndDialog(win32con.IDCANCEL) - - def OnBraceMatch(self, std, extra): - import pywin.scintilla.view - - pywin.scintilla.view.DoBraceMatch(self.scintilla) - - def GetSelectedStyle(self): - return self.styles[self.listbox.GetText(self.listbox.GetCurSel())] - - def _DoButDefaultFont(self, extra_flags, attr): - baseFormat = getattr(self.scintilla._GetColorizer(), attr) - flags = ( - extra_flags - | win32con.CF_SCREENFONTS - | win32con.CF_EFFECTS - | win32con.CF_FORCEFONTEXIST - ) - d = win32ui.CreateFontDialog(baseFormat, flags, None, self) - if d.DoModal() == win32con.IDOK: - setattr(self.scintilla._GetColorizer(), attr, d.GetCharFormat()) - self.OnStyleUIChanged(0, win32con.BN_CLICKED) - - def OnButDefaultFixedFont(self, id, code): - if code == win32con.BN_CLICKED: - self._DoButDefaultFont(win32con.CF_FIXEDPITCHONLY, "baseFormatFixed") - return 1 - - def OnButDefaultPropFont(self, id, code): - if code == win32con.BN_CLICKED: - self._DoButDefaultFont(win32con.CF_SCALABLEONLY, "baseFormatProp") - return 1 - - def OnButFixedOrDefault(self, id, code): - if code == win32con.BN_CLICKED: - bUseFixed = id == win32ui.IDC_RADIO1 - self.GetDlgItem(win32ui.IDC_RADIO1).GetCheck() != 0 - self.scintilla._GetColorizer().bUseFixed = bUseFixed - self.scintilla.ApplyFormattingStyles(0) - return 1 - - def OnButThisFont(self, id, code): - if code == win32con.BN_CLICKED: - flags = ( - win32con.CF_SCREENFONTS - | win32con.CF_EFFECTS - | win32con.CF_FORCEFONTEXIST - ) - style = self.GetSelectedStyle() - # If the selected style is based on the default, we need to apply - # the default to it. - def_format = self.scintilla._GetColorizer().GetDefaultFormat() - format = style.GetCompleteFormat(def_format) - d = win32ui.CreateFontDialog(format, flags, None, self) - if d.DoModal() == win32con.IDOK: - style.format = d.GetCharFormat() - self.scintilla.ApplyFormattingStyles(0) - return 1 - - def OnButUseDefaultFont(self, id, code): - if code == win32con.BN_CLICKED: - isDef = self.butIsDefault.GetCheck() - self.GetDlgItem(win32ui.IDC_BUTTON3).EnableWindow(not isDef) - if isDef: # Being reset to the default font. - style = self.GetSelectedStyle() - style.ForceAgainstDefault() - self.UpdateUIForStyle(style) - self.scintilla.ApplyFormattingStyles(0) - else: - # User wants to override default - - # do nothing! - pass - - def OnButThisBackground(self, id, code): - if code == win32con.BN_CLICKED: - style = self.GetSelectedStyle() - bg = win32api.RGB(0xFF, 0xFF, 0xFF) - if style.background != CLR_INVALID: - bg = style.background - d = win32ui.CreateColorDialog(bg, 0, self) - if d.DoModal() == win32con.IDOK: - style.background = d.GetColor() - self.scintilla.ApplyFormattingStyles(0) - return 1 - - def OnButUseDefaultBackground(self, id, code): - if code == win32con.BN_CLICKED: - isDef = self.butIsDefaultBackground.GetCheck() - self.GetDlgItem(win32ui.IDC_BUTTON4).EnableWindow(not isDef) - if isDef: # Being reset to the default color - style = self.GetSelectedStyle() - style.background = style.default_background - self.UpdateUIForStyle(style) - self.scintilla.ApplyFormattingStyles(0) - else: - # User wants to override default - - # do nothing! - pass - - def OnListCommand(self, id, code): - if code == win32con.LBN_SELCHANGE: - style = self.GetSelectedStyle() - self.UpdateUIForStyle(style) - return 1 - - def UpdateUIForStyle(self, style): - format = style.format - sel = 0 - for c in paletteVGA: - if format[4] == c[1]: - # print "Style", style.name, "is", c[0] - break - sel = sel + 1 - else: - sel = -1 - self.cbo.SetCurSel(sel) - self.butIsDefault.SetCheck(style.IsBasedOnDefault()) - self.GetDlgItem(win32ui.IDC_BUTTON3).EnableWindow(not style.IsBasedOnDefault()) - - self.butIsDefaultBackground.SetCheck( - style.background == style.default_background - ) - self.GetDlgItem(win32ui.IDC_BUTTON4).EnableWindow( - style.background != style.default_background - ) - - bold = format[1] & win32con.CFE_BOLD != 0 - italic = format[1] & win32con.CFE_ITALIC != 0 - self.cboBoldItalic.SetCurSel(bold * 2 + italic) - - def OnStyleUIChanged(self, id, code): - if code in [win32con.BN_CLICKED, win32con.CBN_SELCHANGE]: - style = self.GetSelectedStyle() - self.ApplyUIFormatToStyle(style) - self.scintilla.ApplyFormattingStyles(0) - return 0 - return 1 - - def ApplyUIFormatToStyle(self, style): - format = style.format - color = paletteVGA[self.cbo.GetCurSel()] - effect = 0 - sel = self.cboBoldItalic.GetCurSel() - if sel == 0: - effect = 0 - elif sel == 1: - effect = win32con.CFE_ITALIC - elif sel == 2: - effect = win32con.CFE_BOLD - else: - effect = win32con.CFE_BOLD | win32con.CFE_ITALIC - maskFlags = ( - format[0] | win32con.CFM_COLOR | win32con.CFM_BOLD | win32con.CFM_ITALIC - ) - style.format = ( - maskFlags, - effect, - style.format[2], - style.format[3], - color[1], - ) + style.format[5:] - - def OnOK(self): - self.scintilla._GetColorizer().SavePreferences() - return 1 - - -def test(): - page = ColorEditorPropertyPage() - sheet = pywin.mfc.dialog.PropertySheet("Test") - sheet.AddPage(page) - sheet.CreateWindow() diff --git a/lib/pythonwin/pywin/scintilla/control.py b/lib/pythonwin/pywin/scintilla/control.py deleted file mode 100644 index 460b1686..00000000 --- a/lib/pythonwin/pywin/scintilla/control.py +++ /dev/null @@ -1,569 +0,0 @@ -# An Python interface to the Scintilla control. -# -# Exposes Python classes that allow you to use Scintilla as -# a "standard" MFC edit control (eg, control.GetTextLength(), control.GetSel() -# plus many Scintilla specific features (eg control.SCIAddStyledText()) - -import array -import os -import struct - -import win32api -import win32con -import win32ui -from pywin import default_scintilla_encoding -from pywin.mfc import window - -from . import scintillacon - -# Load Scintilla.dll to get access to the control. -# We expect to find this in the same directory as win32ui.pyd -dllid = None -if win32ui.debug: # If running _d version of Pythonwin... - try: - dllid = win32api.LoadLibrary( - os.path.join(os.path.split(win32ui.__file__)[0], "Scintilla_d.DLL") - ) - except ( - win32api.error - ): # Not there - we dont _need_ a debug ver, so ignore this error. - pass -if dllid is None: - try: - dllid = win32api.LoadLibrary( - os.path.join(os.path.split(win32ui.__file__)[0], "Scintilla.DLL") - ) - except win32api.error: - pass -if dllid is None: - # Still not there - lets see if Windows can find it by searching? - dllid = win32api.LoadLibrary("Scintilla.DLL") - -# null_byte is str in py2k, bytes on py3k -null_byte = "\0".encode("ascii") - -## These are from Richedit.h - need to add to win32con or commctrl -EM_GETTEXTRANGE = 1099 -EM_EXLINEFROMCHAR = 1078 -EM_FINDTEXTEX = 1103 -EM_GETSELTEXT = 1086 -EM_EXSETSEL = win32con.WM_USER + 55 - - -class ScintillaNotification: - def __init__(self, **args): - self.__dict__.update(args) - - -class ScintillaControlInterface: - def SCIUnpackNotifyMessage(self, msg): - format = "iiiiPiiiPPiiii" - bytes = win32ui.GetBytes(msg, struct.calcsize(format)) - ( - position, - ch, - modifiers, - modificationType, - text_ptr, - length, - linesAdded, - msg, - wParam, - lParam, - line, - foldLevelNow, - foldLevelPrev, - margin, - ) = struct.unpack(format, bytes) - return ScintillaNotification( - position=position, - ch=ch, - modifiers=modifiers, - modificationType=modificationType, - text_ptr=text_ptr, - length=length, - linesAdded=linesAdded, - msg=msg, - wParam=wParam, - lParam=lParam, - line=line, - foldLevelNow=foldLevelNow, - foldLevelPrev=foldLevelPrev, - margin=margin, - ) - - def SCIAddText(self, text): - self.SendMessage( - scintillacon.SCI_ADDTEXT, text.encode(default_scintilla_encoding) - ) - - def SCIAddStyledText(self, text, style=None): - # If style is None, text is assumed to be a "native" Scintilla buffer. - # If style is specified, text is a normal string, and the style is - # assumed to apply to the entire string. - if style is not None: - text = list(map(lambda char, style=style: char + chr(style), text)) - text = "".join(text) - self.SendMessage( - scintillacon.SCI_ADDSTYLEDTEXT, text.encode(default_scintilla_encoding) - ) - - def SCIInsertText(self, text, pos=-1): - # SCIInsertText allows unicode or bytes - but if they are bytes, - # the caller must ensure it is encoded correctly. - if isinstance(text, str): - text = text.encode(default_scintilla_encoding) - self.SendScintilla(scintillacon.SCI_INSERTTEXT, pos, text + null_byte) - - def SCISetSavePoint(self): - self.SendScintilla(scintillacon.SCI_SETSAVEPOINT) - - def SCISetUndoCollection(self, collectFlag): - self.SendScintilla(scintillacon.SCI_SETUNDOCOLLECTION, collectFlag) - - def SCIBeginUndoAction(self): - self.SendScintilla(scintillacon.SCI_BEGINUNDOACTION) - - def SCIEndUndoAction(self): - self.SendScintilla(scintillacon.SCI_ENDUNDOACTION) - - def SCIGetCurrentPos(self): - return self.SendScintilla(scintillacon.SCI_GETCURRENTPOS) - - def SCIGetCharAt(self, pos): - # Must ensure char is unsigned! - return chr(self.SendScintilla(scintillacon.SCI_GETCHARAT, pos) & 0xFF) - - def SCIGotoLine(self, line): - self.SendScintilla(scintillacon.SCI_GOTOLINE, line) - - def SCIBraceMatch(self, pos, maxReStyle): - return self.SendScintilla(scintillacon.SCI_BRACEMATCH, pos, maxReStyle) - - def SCIBraceHighlight(self, pos, posOpposite): - return self.SendScintilla(scintillacon.SCI_BRACEHIGHLIGHT, pos, posOpposite) - - def SCIBraceBadHighlight(self, pos): - return self.SendScintilla(scintillacon.SCI_BRACEBADLIGHT, pos) - - #################################### - # Styling - # def SCIColourise(self, start=0, end=-1): - # NOTE - dependent on of we use builtin lexer, so handled below. - def SCIGetEndStyled(self): - return self.SendScintilla(scintillacon.SCI_GETENDSTYLED) - - def SCIStyleSetFore(self, num, v): - return self.SendScintilla(scintillacon.SCI_STYLESETFORE, num, v) - - def SCIStyleSetBack(self, num, v): - return self.SendScintilla(scintillacon.SCI_STYLESETBACK, num, v) - - def SCIStyleSetEOLFilled(self, num, v): - return self.SendScintilla(scintillacon.SCI_STYLESETEOLFILLED, num, v) - - def SCIStyleSetFont(self, num, name, characterset=0): - buff = (name + "\0").encode(default_scintilla_encoding) - self.SendScintilla(scintillacon.SCI_STYLESETFONT, num, buff) - self.SendScintilla(scintillacon.SCI_STYLESETCHARACTERSET, num, characterset) - - def SCIStyleSetBold(self, num, bBold): - self.SendScintilla(scintillacon.SCI_STYLESETBOLD, num, bBold) - - def SCIStyleSetItalic(self, num, bItalic): - self.SendScintilla(scintillacon.SCI_STYLESETITALIC, num, bItalic) - - def SCIStyleSetSize(self, num, size): - self.SendScintilla(scintillacon.SCI_STYLESETSIZE, num, size) - - def SCIGetViewWS(self): - return self.SendScintilla(scintillacon.SCI_GETVIEWWS) - - def SCISetViewWS(self, val): - self.SendScintilla(scintillacon.SCI_SETVIEWWS, not (val == 0)) - self.InvalidateRect() - - def SCISetIndentationGuides(self, val): - self.SendScintilla(scintillacon.SCI_SETINDENTATIONGUIDES, val) - - def SCIGetIndentationGuides(self): - return self.SendScintilla(scintillacon.SCI_GETINDENTATIONGUIDES) - - def SCISetIndent(self, val): - self.SendScintilla(scintillacon.SCI_SETINDENT, val) - - def SCIGetIndent(self, val): - return self.SendScintilla(scintillacon.SCI_GETINDENT) - - def SCIGetViewEOL(self): - return self.SendScintilla(scintillacon.SCI_GETVIEWEOL) - - def SCISetViewEOL(self, val): - self.SendScintilla(scintillacon.SCI_SETVIEWEOL, not (val == 0)) - self.InvalidateRect() - - def SCISetTabWidth(self, width): - self.SendScintilla(scintillacon.SCI_SETTABWIDTH, width, 0) - - def SCIStartStyling(self, pos, mask): - self.SendScintilla(scintillacon.SCI_STARTSTYLING, pos, mask) - - def SCISetStyling(self, pos, attr): - self.SendScintilla(scintillacon.SCI_SETSTYLING, pos, attr) - - def SCISetStylingEx(self, ray): # ray is an array. - address, length = ray.buffer_info() - self.SendScintilla(scintillacon.SCI_SETSTYLINGEX, length, address) - - def SCIGetStyleAt(self, pos): - return self.SendScintilla(scintillacon.SCI_GETSTYLEAT, pos) - - def SCISetMarginWidth(self, width): - self.SendScintilla(scintillacon.SCI_SETMARGINWIDTHN, 1, width) - - def SCISetMarginWidthN(self, n, width): - self.SendScintilla(scintillacon.SCI_SETMARGINWIDTHN, n, width) - - def SCISetFoldFlags(self, flags): - self.SendScintilla(scintillacon.SCI_SETFOLDFLAGS, flags) - - # Markers - def SCIMarkerDefineAll(self, markerNum, markerType, fore, back): - self.SCIMarkerDefine(markerNum, markerType) - self.SCIMarkerSetFore(markerNum, fore) - self.SCIMarkerSetBack(markerNum, back) - - def SCIMarkerDefine(self, markerNum, markerType): - self.SendScintilla(scintillacon.SCI_MARKERDEFINE, markerNum, markerType) - - def SCIMarkerSetFore(self, markerNum, fore): - self.SendScintilla(scintillacon.SCI_MARKERSETFORE, markerNum, fore) - - def SCIMarkerSetBack(self, markerNum, back): - self.SendScintilla(scintillacon.SCI_MARKERSETBACK, markerNum, back) - - def SCIMarkerAdd(self, lineNo, markerNum): - self.SendScintilla(scintillacon.SCI_MARKERADD, lineNo, markerNum) - - def SCIMarkerDelete(self, lineNo, markerNum): - self.SendScintilla(scintillacon.SCI_MARKERDELETE, lineNo, markerNum) - - def SCIMarkerDeleteAll(self, markerNum=-1): - self.SendScintilla(scintillacon.SCI_MARKERDELETEALL, markerNum) - - def SCIMarkerGet(self, lineNo): - return self.SendScintilla(scintillacon.SCI_MARKERGET, lineNo) - - def SCIMarkerNext(self, lineNo, markerNum): - return self.SendScintilla(scintillacon.SCI_MARKERNEXT, lineNo, markerNum) - - def SCICancel(self): - self.SendScintilla(scintillacon.SCI_CANCEL) - - # AutoComplete - def SCIAutoCShow(self, text): - if type(text) in [type([]), type(())]: - text = " ".join(text) - buff = (text + "\0").encode(default_scintilla_encoding) - return self.SendScintilla(scintillacon.SCI_AUTOCSHOW, 0, buff) - - def SCIAutoCCancel(self): - self.SendScintilla(scintillacon.SCI_AUTOCCANCEL) - - def SCIAutoCActive(self): - return self.SendScintilla(scintillacon.SCI_AUTOCACTIVE) - - def SCIAutoCComplete(self): - return self.SendScintilla(scintillacon.SCI_AUTOCCOMPLETE) - - def SCIAutoCStops(self, stops): - buff = (stops + "\0").encode(default_scintilla_encoding) - self.SendScintilla(scintillacon.SCI_AUTOCSTOPS, 0, buff) - - def SCIAutoCSetAutoHide(self, hide): - self.SendScintilla(scintillacon.SCI_AUTOCSETAUTOHIDE, hide) - - def SCIAutoCSetFillups(self, fillups): - self.SendScintilla(scintillacon.SCI_AUTOCSETFILLUPS, fillups) - - # Call tips - def SCICallTipShow(self, text, pos=-1): - if pos == -1: - pos = self.GetSel()[0] - buff = (text + "\0").encode(default_scintilla_encoding) - self.SendScintilla(scintillacon.SCI_CALLTIPSHOW, pos, buff) - - def SCICallTipCancel(self): - self.SendScintilla(scintillacon.SCI_CALLTIPCANCEL) - - def SCICallTipActive(self): - return self.SendScintilla(scintillacon.SCI_CALLTIPACTIVE) - - def SCICallTipPosStart(self): - return self.SendScintilla(scintillacon.SCI_CALLTIPPOSSTART) - - def SCINewline(self): - self.SendScintilla(scintillacon.SCI_NEWLINE) - - # Lexer etc - def SCISetKeywords(self, keywords, kw_list_no=0): - buff = (keywords + "\0").encode(default_scintilla_encoding) - self.SendScintilla(scintillacon.SCI_SETKEYWORDS, kw_list_no, buff) - - def SCISetProperty(self, name, value): - name_buff = array.array("b", (name + "\0").encode(default_scintilla_encoding)) - val_buff = array.array( - "b", (str(value) + "\0").encode(default_scintilla_encoding) - ) - address_name_buffer = name_buff.buffer_info()[0] - address_val_buffer = val_buff.buffer_info()[0] - self.SendScintilla( - scintillacon.SCI_SETPROPERTY, address_name_buffer, address_val_buffer - ) - - def SCISetStyleBits(self, nbits): - self.SendScintilla(scintillacon.SCI_SETSTYLEBITS, nbits) - - # Folding - def SCIGetFoldLevel(self, lineno): - return self.SendScintilla(scintillacon.SCI_GETFOLDLEVEL, lineno) - - def SCIToggleFold(self, lineno): - return self.SendScintilla(scintillacon.SCI_TOGGLEFOLD, lineno) - - def SCIEnsureVisible(self, lineno): - self.SendScintilla(scintillacon.SCI_ENSUREVISIBLE, lineno) - - def SCIGetFoldExpanded(self, lineno): - return self.SendScintilla(scintillacon.SCI_GETFOLDEXPANDED, lineno) - - # right edge - def SCISetEdgeColumn(self, edge): - self.SendScintilla(scintillacon.SCI_SETEDGECOLUMN, edge) - - def SCIGetEdgeColumn(self): - return self.SendScintilla(scintillacon.SCI_GETEDGECOLUMN) - - def SCISetEdgeMode(self, mode): - self.SendScintilla(scintillacon.SCI_SETEDGEMODE, mode) - - def SCIGetEdgeMode(self): - return self.SendScintilla(scintillacon.SCI_GETEDGEMODE) - - def SCISetEdgeColor(self, color): - self.SendScintilla(scintillacon.SCI_SETEDGECOLOUR, color) - - def SCIGetEdgeColor(self): - return self.SendScintilla(scintillacon.SCI_GETEDGECOLOR) - - # Multi-doc - def SCIGetDocPointer(self): - return self.SendScintilla(scintillacon.SCI_GETDOCPOINTER) - - def SCISetDocPointer(self, p): - return self.SendScintilla(scintillacon.SCI_SETDOCPOINTER, 0, p) - - def SCISetWrapMode(self, mode): - return self.SendScintilla(scintillacon.SCI_SETWRAPMODE, mode) - - def SCIGetWrapMode(self): - return self.SendScintilla(scintillacon.SCI_GETWRAPMODE) - - -class CScintillaEditInterface(ScintillaControlInterface): - def close(self): - self.colorizer = None - - def Clear(self): - self.SendScintilla(win32con.WM_CLEAR) - - def FindText(self, flags, range, findText): - """LPARAM for EM_FINDTEXTEX: - typedef struct _findtextex { - CHARRANGE chrg; - LPCTSTR lpstrText; - CHARRANGE chrgText;} FINDTEXTEX; - typedef struct _charrange { - LONG cpMin; - LONG cpMax;} CHARRANGE; - """ - findtextex_fmt = "llPll" - ## Scintilla does not handle unicode in EM_FINDTEXT msg (FINDTEXTEX struct) - txt_buff = (findText + "\0").encode(default_scintilla_encoding) - txt_array = array.array("b", txt_buff) - ft_buff = struct.pack( - findtextex_fmt, range[0], range[1], txt_array.buffer_info()[0], 0, 0 - ) - ft_array = array.array("b", ft_buff) - rc = self.SendScintilla(EM_FINDTEXTEX, flags, ft_array.buffer_info()[0]) - ftUnpacked = struct.unpack(findtextex_fmt, ft_array) - return rc, (ftUnpacked[3], ftUnpacked[4]) - - def GetSel(self): - currentPos = self.SendScintilla(scintillacon.SCI_GETCURRENTPOS) - anchorPos = self.SendScintilla(scintillacon.SCI_GETANCHOR) - if currentPos < anchorPos: - return (currentPos, anchorPos) - else: - return (anchorPos, currentPos) - return currentPos - - def GetSelText(self): - start, end = self.GetSel() - txtBuf = array.array("b", null_byte * (end - start + 1)) - addressTxtBuf = txtBuf.buffer_info()[0] - # EM_GETSELTEXT is documented as returning the number of chars - # not including the NULL, but scintilla includes the NULL. A - # quick glance at the scintilla impl doesn't make this - # obvious - the NULL is included in the 'selection' object - # and reflected in the length of that 'selection' object. - # I expect that is a bug in scintilla and may be fixed by now, - # but we just blindly assume that the last char is \0 and - # strip it. - self.SendScintilla(EM_GETSELTEXT, 0, addressTxtBuf) - return txtBuf.tobytes()[:-1].decode(default_scintilla_encoding) - - def SetSel(self, start=0, end=None): - if type(start) == type(()): - assert ( - end is None - ), "If you pass a point in the first param, the second must be None" - start, end = start - elif end is None: - end = start - if start < 0: - start = self.GetTextLength() - if end < 0: - end = self.GetTextLength() - assert start <= self.GetTextLength(), "The start postion is invalid (%d/%d)" % ( - start, - self.GetTextLength(), - ) - assert end <= self.GetTextLength(), "The end postion is invalid (%d/%d)" % ( - end, - self.GetTextLength(), - ) - cr = struct.pack("ll", start, end) - crBuff = array.array("b", cr) - addressCrBuff = crBuff.buffer_info()[0] - rc = self.SendScintilla(EM_EXSETSEL, 0, addressCrBuff) - - def GetLineCount(self): - return self.SendScintilla(win32con.EM_GETLINECOUNT) - - def LineFromChar(self, charPos=-1): - if charPos == -1: - charPos = self.GetSel()[0] - assert ( - charPos >= 0 and charPos <= self.GetTextLength() - ), "The charPos postion (%s) is invalid (max=%s)" % ( - charPos, - self.GetTextLength(), - ) - # return self.SendScintilla(EM_EXLINEFROMCHAR, charPos) - # EM_EXLINEFROMCHAR puts charPos in lParam, not wParam - return self.SendScintilla(EM_EXLINEFROMCHAR, 0, charPos) - - def LineIndex(self, line): - return self.SendScintilla(win32con.EM_LINEINDEX, line) - - def ScrollCaret(self): - return self.SendScintilla(win32con.EM_SCROLLCARET) - - def GetCurLineNumber(self): - return self.LineFromChar(self.SCIGetCurrentPos()) - - def GetTextLength(self): - return self.SendScintilla(scintillacon.SCI_GETTEXTLENGTH) - - def GetTextRange(self, start=0, end=-1, decode=True): - if end == -1: - end = self.SendScintilla(scintillacon.SCI_GETTEXTLENGTH) - assert end >= start, "Negative index requested (%d/%d)" % (start, end) - assert ( - start >= 0 and start <= self.GetTextLength() - ), "The start postion is invalid" - assert end >= 0 and end <= self.GetTextLength(), "The end postion is invalid" - initer = null_byte * (end - start + 1) - buff = array.array("b", initer) - addressBuffer = buff.buffer_info()[0] - tr = struct.pack("llP", start, end, addressBuffer) - trBuff = array.array("b", tr) - addressTrBuff = trBuff.buffer_info()[0] - num_bytes = self.SendScintilla(EM_GETTEXTRANGE, 0, addressTrBuff) - ret = buff.tobytes()[:num_bytes] - if decode: - ret = ret.decode(default_scintilla_encoding) - return ret - - def ReplaceSel(self, str): - buff = (str + "\0").encode(default_scintilla_encoding) - self.SendScintilla(scintillacon.SCI_REPLACESEL, 0, buff) - - def GetLine(self, line=-1): - if line == -1: - line = self.GetCurLineNumber() - start = self.LineIndex(line) - end = self.LineIndex(line + 1) - return self.GetTextRange(start, end) - - def SetReadOnly(self, flag=1): - return self.SendScintilla(win32con.EM_SETREADONLY, flag) - - def LineScroll(self, lines, cols=0): - return self.SendScintilla(win32con.EM_LINESCROLL, cols, lines) - - def GetFirstVisibleLine(self): - return self.SendScintilla(win32con.EM_GETFIRSTVISIBLELINE) - - def SetWordWrap(self, mode): - if mode != win32ui.CRichEditView_WrapNone: - raise ValueError("We dont support word-wrap (I dont think :-)") - - -class CScintillaColorEditInterface(CScintillaEditInterface): - ################################ - # Plug-in colorizer support - def _GetColorizer(self): - if not hasattr(self, "colorizer"): - self.colorizer = self._MakeColorizer() - return self.colorizer - - def _MakeColorizer(self): - # Give parent a chance to hook. - parent_func = getattr(self.GetParentFrame(), "_MakeColorizer", None) - if parent_func is not None: - return parent_func() - from . import formatter - - ## return formatter.PythonSourceFormatter(self) - return formatter.BuiltinPythonSourceFormatter(self) - - def Colorize(self, start=0, end=-1): - c = self._GetColorizer() - if c is not None: - c.Colorize(start, end) - - def ApplyFormattingStyles(self, bReload=1): - c = self._GetColorizer() - if c is not None: - c.ApplyFormattingStyles(bReload) - - # The Parent window will normally hook - def HookFormatter(self, parent=None): - c = self._GetColorizer() - if c is not None: # No need if we have no color! - c.HookFormatter(parent) - - -class CScintillaEdit(window.Wnd, CScintillaColorEditInterface): - def __init__(self, wnd=None): - if wnd is None: - wnd = win32ui.CreateWnd() - window.Wnd.__init__(self, wnd) - - def SendScintilla(self, msg, w=0, l=0): - return self.SendMessage(msg, w, l) - - def CreateWindow(self, style, rect, parent, id): - self._obj_.CreateWindow("Scintilla", "Scintilla", style, rect, parent, id, None) diff --git a/lib/pythonwin/pywin/scintilla/document.py b/lib/pythonwin/pywin/scintilla/document.py deleted file mode 100644 index cddb442c..00000000 --- a/lib/pythonwin/pywin/scintilla/document.py +++ /dev/null @@ -1,312 +0,0 @@ -import codecs -import re -import string - -import win32con -import win32ui -from pywin import default_scintilla_encoding -from pywin.mfc import docview - -from . import scintillacon - -crlf_bytes = "\r\n".encode("ascii") -lf_bytes = "\n".encode("ascii") - -# re from pep263 - but we use it both on bytes and strings. -re_encoding_bytes = re.compile("coding[:=]\s*([-\w.]+)".encode("ascii")) -re_encoding_text = re.compile("coding[:=]\s*([-\w.]+)") - -ParentScintillaDocument = docview.Document - - -class CScintillaDocument(ParentScintillaDocument): - "A SyntEdit document." - - def __init__(self, *args): - self.bom = None # the BOM, if any, read from the file. - # the encoding we detected from the source. Might have - # detected via the BOM or an encoding decl. Note that in - # the latter case (ie, while self.bom is None), it can't be - # trusted - the user may have edited the encoding decl between - # open and save. - self.source_encoding = None - ParentScintillaDocument.__init__(self, *args) - - def DeleteContents(self): - pass - - def OnOpenDocument(self, filename): - # init data members - # print "Opening", filename - self.SetPathName(filename) # Must set this early! - try: - # load the text as binary we can get smart - # about detecting any existing EOL conventions. - f = open(filename, "rb") - try: - self._LoadTextFromFile(f) - finally: - f.close() - except IOError: - rc = win32ui.MessageBox( - "Could not load the file from %s\n\nDo you want to create a new file?" - % filename, - "Pythonwin", - win32con.MB_YESNO | win32con.MB_ICONWARNING, - ) - if rc == win32con.IDNO: - return 0 - assert rc == win32con.IDYES, rc - try: - f = open(filename, "wb+") - try: - self._LoadTextFromFile(f) - finally: - f.close() - except IOError as e: - rc = win32ui.MessageBox("Cannot create the file %s" % filename) - return 1 - - def SaveFile(self, fileName, encoding=None): - view = self.GetFirstView() - ok = view.SaveTextFile(fileName, encoding=encoding) - if ok: - view.SCISetSavePoint() - return ok - - def ApplyFormattingStyles(self): - self._ApplyOptionalToViews("ApplyFormattingStyles") - - # ##################### - # File related functions - # Helper to transfer text from the MFC document to the control. - def _LoadTextFromFile(self, f): - # detect EOL mode - we don't support \r only - so find the - # first '\n' and guess based on the char before. - l = f.readline() - l2 = f.readline() - # If line ends with \r\n or has no line ending, use CRLF. - if l.endswith(crlf_bytes) or not l.endswith(lf_bytes): - eol_mode = scintillacon.SC_EOL_CRLF - else: - eol_mode = scintillacon.SC_EOL_LF - - # Detect the encoding - first look for a BOM, and if not found, - # look for a pep263 encoding declaration. - for bom, encoding in ( - (codecs.BOM_UTF8, "utf8"), - (codecs.BOM_UTF16_LE, "utf_16_le"), - (codecs.BOM_UTF16_BE, "utf_16_be"), - ): - if l.startswith(bom): - self.bom = bom - self.source_encoding = encoding - l = l[len(bom) :] # remove it. - break - else: - # no bom detected - look for pep263 encoding decl. - for look in (l, l2): - # Note we are looking at raw bytes here: so - # both the re itself uses bytes and the result - # is bytes - but we need the result as a string. - match = re_encoding_bytes.search(look) - if match is not None: - self.source_encoding = match.group(1).decode("ascii") - break - - # reading by lines would be too slow? Maybe we can use the - # incremental encoders? For now just stick with loading the - # entire file in memory. - text = l + l2 + f.read() - - # Translate from source encoding to UTF-8 bytes for Scintilla - source_encoding = self.source_encoding - # If we don't know an encoding, try utf-8 - if that fails we will - # fallback to latin-1 to treat it as bytes... - if source_encoding is None: - source_encoding = "utf-8" - # we could optimize this by avoiding utf8 to-ing and from-ing, - # but then we would lose the ability to handle invalid utf8 - # (and even then, the use of encoding aliases makes this tricky) - # To create an invalid utf8 file: - # >>> open(filename, "wb").write(codecs.BOM_UTF8+"bad \xa9har\r\n") - try: - dec = text.decode(source_encoding) - except UnicodeError: - print( - "WARNING: Failed to decode bytes from '%s' encoding - treating as latin1" - % source_encoding - ) - dec = text.decode("latin1") - except LookupError: - print( - "WARNING: Invalid encoding '%s' specified - treating as latin1" - % source_encoding - ) - dec = text.decode("latin1") - # and put it back as utf8 - this shouldn't fail. - text = dec.encode(default_scintilla_encoding) - - view = self.GetFirstView() - if view.IsWindow(): - # Turn off undo collection while loading - view.SendScintilla(scintillacon.SCI_SETUNDOCOLLECTION, 0, 0) - # Make sure the control isnt read-only - view.SetReadOnly(0) - view.SendScintilla(scintillacon.SCI_CLEARALL) - view.SendMessage(scintillacon.SCI_ADDTEXT, text) - view.SendScintilla(scintillacon.SCI_SETUNDOCOLLECTION, 1, 0) - view.SendScintilla(win32con.EM_EMPTYUNDOBUFFER, 0, 0) - # set EOL mode - view.SendScintilla(scintillacon.SCI_SETEOLMODE, eol_mode) - - def _SaveTextToFile(self, view, filename, encoding=None): - s = view.GetTextRange() # already decoded from scintilla's encoding - source_encoding = encoding - if source_encoding is None: - if self.bom: - source_encoding = self.source_encoding - else: - # no BOM - look for an encoding. - bits = re.split("[\r\n]+", s, 3) - for look in bits[:-1]: - match = re_encoding_text.search(look) - if match is not None: - source_encoding = match.group(1) - self.source_encoding = source_encoding - break - - if source_encoding is None: - source_encoding = "utf-8" - - ## encode data before opening file so script is not lost if encoding fails - file_contents = s.encode(source_encoding) - # Open in binary mode as scintilla itself ensures the - # line endings are already appropriate - f = open(filename, "wb") - try: - if self.bom: - f.write(self.bom) - f.write(file_contents) - finally: - f.close() - self.SetModifiedFlag(0) - - def FinalizeViewCreation(self, view): - pass - - def HookViewNotifications(self, view): - parent = view.GetParentFrame() - parent.HookNotify( - ViewNotifyDelegate(self, "OnBraceMatch"), scintillacon.SCN_CHECKBRACE - ) - parent.HookNotify( - ViewNotifyDelegate(self, "OnMarginClick"), scintillacon.SCN_MARGINCLICK - ) - parent.HookNotify( - ViewNotifyDelegate(self, "OnNeedShown"), scintillacon.SCN_NEEDSHOWN - ) - - parent.HookNotify( - DocumentNotifyDelegate(self, "OnSavePointReached"), - scintillacon.SCN_SAVEPOINTREACHED, - ) - parent.HookNotify( - DocumentNotifyDelegate(self, "OnSavePointLeft"), - scintillacon.SCN_SAVEPOINTLEFT, - ) - parent.HookNotify( - DocumentNotifyDelegate(self, "OnModifyAttemptRO"), - scintillacon.SCN_MODIFYATTEMPTRO, - ) - # Tell scintilla what characters should abort auto-complete. - view.SCIAutoCStops(string.whitespace + "()[]:;+-/*=\\?'!#@$%^&,<>\"'|") - - if view != self.GetFirstView(): - view.SCISetDocPointer(self.GetFirstView().SCIGetDocPointer()) - - def OnSavePointReached(self, std, extra): - self.SetModifiedFlag(0) - - def OnSavePointLeft(self, std, extra): - self.SetModifiedFlag(1) - - def OnModifyAttemptRO(self, std, extra): - self.MakeDocumentWritable() - - # All Marker functions are 1 based. - def MarkerAdd(self, lineNo, marker): - self.GetEditorView().SCIMarkerAdd(lineNo - 1, marker) - - def MarkerCheck(self, lineNo, marker): - v = self.GetEditorView() - lineNo = lineNo - 1 # Make 0 based - markerState = v.SCIMarkerGet(lineNo) - return markerState & (1 << marker) != 0 - - def MarkerToggle(self, lineNo, marker): - v = self.GetEditorView() - if self.MarkerCheck(lineNo, marker): - v.SCIMarkerDelete(lineNo - 1, marker) - else: - v.SCIMarkerAdd(lineNo - 1, marker) - - def MarkerDelete(self, lineNo, marker): - self.GetEditorView().SCIMarkerDelete(lineNo - 1, marker) - - def MarkerDeleteAll(self, marker): - self.GetEditorView().SCIMarkerDeleteAll(marker) - - def MarkerGetNext(self, lineNo, marker): - return self.GetEditorView().SCIMarkerNext(lineNo - 1, 1 << marker) + 1 - - def MarkerAtLine(self, lineNo, marker): - markerState = self.GetEditorView().SCIMarkerGet(lineNo - 1) - return markerState & (1 << marker) - - # Helper for reflecting functions to views. - def _ApplyToViews(self, funcName, *args): - for view in self.GetAllViews(): - func = getattr(view, funcName) - func(*args) - - def _ApplyOptionalToViews(self, funcName, *args): - for view in self.GetAllViews(): - func = getattr(view, funcName, None) - if func is not None: - func(*args) - - def GetEditorView(self): - # Find the first frame with a view, - # then ask it to give the editor view - # as it knows which one is "active" - try: - frame_gev = self.GetFirstView().GetParentFrame().GetEditorView - except AttributeError: - return self.GetFirstView() - return frame_gev() - - -# Delegate to the correct view, based on the control that sent it. -class ViewNotifyDelegate: - def __init__(self, doc, name): - self.doc = doc - self.name = name - - def __call__(self, std, extra): - (hwndFrom, idFrom, code) = std - for v in self.doc.GetAllViews(): - if v.GetSafeHwnd() == hwndFrom: - return getattr(v, self.name)(*(std, extra)) - - -# Delegate to the document, but only from a single view (as each view sends it seperately) -class DocumentNotifyDelegate: - def __init__(self, doc, name): - self.doc = doc - self.delegate = getattr(doc, name) - - def __call__(self, std, extra): - (hwndFrom, idFrom, code) = std - if hwndFrom == self.doc.GetEditorView().GetSafeHwnd(): - self.delegate(*(std, extra)) diff --git a/lib/pythonwin/pywin/scintilla/find.py b/lib/pythonwin/pywin/scintilla/find.py deleted file mode 100644 index e1d21a5b..00000000 --- a/lib/pythonwin/pywin/scintilla/find.py +++ /dev/null @@ -1,510 +0,0 @@ -# find.py - Find and Replace -import afxres -import win32api -import win32con -import win32ui -from pywin.framework import scriptutils -from pywin.mfc import dialog - -FOUND_NOTHING = 0 -FOUND_NORMAL = 1 -FOUND_LOOPED_BACK = 2 -FOUND_NEXT_FILE = 3 - - -class SearchParams: - def __init__(self, other=None): - if other is None: - self.__dict__["findText"] = "" - self.__dict__["replaceText"] = "" - self.__dict__["matchCase"] = 0 - self.__dict__["matchWords"] = 0 - self.__dict__["acrossFiles"] = 0 - self.__dict__["remember"] = 1 - self.__dict__["sel"] = (-1, -1) - self.__dict__["keepDialogOpen"] = 0 - else: - self.__dict__.update(other.__dict__) - - # Helper so we cant misspell attributes :-) - def __setattr__(self, attr, val): - if not hasattr(self, attr): - raise AttributeError(attr) - self.__dict__[attr] = val - - -curDialog = None -lastSearch = defaultSearch = SearchParams() -searchHistory = [] - - -def ShowFindDialog(): - _ShowDialog(FindDialog) - - -def ShowReplaceDialog(): - _ShowDialog(ReplaceDialog) - - -def _ShowDialog(dlgClass): - global curDialog - if curDialog is not None: - if curDialog.__class__ != dlgClass: - curDialog.DestroyWindow() - curDialog = None - else: - curDialog.SetFocus() - if curDialog is None: - curDialog = dlgClass() - curDialog.CreateWindow() - - -def FindNext(): - params = SearchParams(lastSearch) - params.sel = (-1, -1) - if not params.findText: - ShowFindDialog() - else: - return _FindIt(None, params) - - -def _GetControl(control=None): - if control is None: - control = scriptutils.GetActiveEditControl() - return control - - -def _FindIt(control, searchParams): - global lastSearch, defaultSearch - control = _GetControl(control) - if control is None: - return FOUND_NOTHING - - # Move to the next char, so we find the next one. - flags = 0 - if searchParams.matchWords: - flags = flags | win32con.FR_WHOLEWORD - if searchParams.matchCase: - flags = flags | win32con.FR_MATCHCASE - if searchParams.sel == (-1, -1): - sel = control.GetSel() - # If the position is the same as we found last time, - # then we assume it is a "FindNext" - if sel == lastSearch.sel: - sel = sel[0] + 1, sel[0] + 1 - else: - sel = searchParams.sel - - if sel[0] == sel[1]: - sel = sel[0], control.GetTextLength() - - rc = FOUND_NOTHING - # (Old edit control will fail here!) - posFind, foundSel = control.FindText(flags, sel, searchParams.findText) - lastSearch = SearchParams(searchParams) - if posFind >= 0: - rc = FOUND_NORMAL - lineno = control.LineFromChar(posFind) - control.SCIEnsureVisible(lineno) - control.SetSel(foundSel) - control.SetFocus() - win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) - if rc == FOUND_NOTHING and lastSearch.acrossFiles: - # Loop around all documents. First find this document. - try: - try: - doc = control.GetDocument() - except AttributeError: - try: - doc = control.GetParent().GetDocument() - except AttributeError: - print("Cant find a document for the control!") - doc = None - if doc is not None: - template = doc.GetDocTemplate() - alldocs = template.GetDocumentList() - mypos = lookpos = alldocs.index(doc) - while 1: - lookpos = (lookpos + 1) % len(alldocs) - if lookpos == mypos: - break - view = alldocs[lookpos].GetFirstView() - posFind, foundSel = view.FindText( - flags, (0, view.GetTextLength()), searchParams.findText - ) - if posFind >= 0: - nChars = foundSel[1] - foundSel[0] - lineNo = view.LineFromChar(posFind) # zero based. - lineStart = view.LineIndex(lineNo) - colNo = posFind - lineStart # zero based. - scriptutils.JumpToDocument( - alldocs[lookpos].GetPathName(), - lineNo + 1, - colNo + 1, - nChars, - ) - rc = FOUND_NEXT_FILE - break - except win32ui.error: - pass - if rc == FOUND_NOTHING: - # Loop around this control - attempt to find from the start of the control. - posFind, foundSel = control.FindText( - flags, (0, sel[0] - 1), searchParams.findText - ) - if posFind >= 0: - control.SCIEnsureVisible(control.LineFromChar(foundSel[0])) - control.SetSel(foundSel) - control.SetFocus() - win32ui.SetStatusText("Not found! Searching from the top of the file.") - rc = FOUND_LOOPED_BACK - else: - lastSearch.sel = -1, -1 - win32ui.SetStatusText("Can not find '%s'" % searchParams.findText) - - if rc != FOUND_NOTHING: - lastSearch.sel = foundSel - - if lastSearch.remember: - defaultSearch = lastSearch - - # track search history - try: - ix = searchHistory.index(searchParams.findText) - except ValueError: - if len(searchHistory) > 50: - searchHistory[50:] = [] - else: - del searchHistory[ix] - searchHistory.insert(0, searchParams.findText) - - return rc - - -def _ReplaceIt(control): - control = _GetControl(control) - statusText = "Can not find '%s'." % lastSearch.findText - rc = FOUND_NOTHING - if control is not None and lastSearch.sel != (-1, -1): - control.ReplaceSel(lastSearch.replaceText) - rc = FindNext() - if rc != FOUND_NOTHING: - statusText = win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE) - win32ui.SetStatusText(statusText) - return rc - - -class FindReplaceDialog(dialog.Dialog): - def __init__(self): - dialog.Dialog.__init__(self, self._GetDialogTemplate()) - self.HookCommand(self.OnFindNext, 109) - - def OnInitDialog(self): - self.editFindText = self.GetDlgItem(102) - self.butMatchWords = self.GetDlgItem(105) - self.butMatchCase = self.GetDlgItem(107) - self.butKeepDialogOpen = self.GetDlgItem(115) - self.butAcrossFiles = self.GetDlgItem(116) - self.butRemember = self.GetDlgItem(117) - - self.editFindText.SetWindowText(defaultSearch.findText) - control = _GetControl() - # _GetControl only gets normal MDI windows; if the interactive - # window is docked and no document open, we get None. - if control: - # If we have a selection, default to that. - sel = control.GetSelText() - if len(sel) != 0: - self.editFindText.SetWindowText(sel) - if defaultSearch.remember: - defaultSearch.findText = sel - for hist in searchHistory: - self.editFindText.AddString(hist) - - if hasattr(self.editFindText, "SetEditSel"): - self.editFindText.SetEditSel(0, -1) - else: - self.editFindText.SetSel(0, -1) - self.butMatchWords.SetCheck(defaultSearch.matchWords) - self.butMatchCase.SetCheck(defaultSearch.matchCase) - self.butKeepDialogOpen.SetCheck(defaultSearch.keepDialogOpen) - self.butAcrossFiles.SetCheck(defaultSearch.acrossFiles) - self.butRemember.SetCheck(defaultSearch.remember) - return dialog.Dialog.OnInitDialog(self) - - def OnDestroy(self, msg): - global curDialog - curDialog = None - return dialog.Dialog.OnDestroy(self, msg) - - def DoFindNext(self): - params = SearchParams() - params.findText = self.editFindText.GetWindowText() - params.matchCase = self.butMatchCase.GetCheck() - params.matchWords = self.butMatchWords.GetCheck() - params.acrossFiles = self.butAcrossFiles.GetCheck() - params.remember = self.butRemember.GetCheck() - return _FindIt(None, params) - - def OnFindNext(self, id, code): - if code != 0: # BN_CLICKED - # 3d controls (python.exe + start_pythonwin.pyw) send - # other notification codes - return 1 # - if not self.editFindText.GetWindowText(): - win32api.MessageBeep() - return 1 - if self.DoFindNext() != FOUND_NOTHING: - if not self.butKeepDialogOpen.GetCheck(): - self.DestroyWindow() - - -class FindDialog(FindReplaceDialog): - def _GetDialogTemplate(self): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - visible = win32con.WS_CHILD | win32con.WS_VISIBLE - dt = [ - ["Find", (0, 2, 240, 75), style, None, (8, "MS Sans Serif")], - ["Static", "Fi&nd What:", 101, (5, 8, 40, 10), visible], - [ - "ComboBox", - "", - 102, - (50, 7, 120, 120), - visible - | win32con.WS_BORDER - | win32con.WS_TABSTOP - | win32con.WS_VSCROLL - | win32con.CBS_DROPDOWN - | win32con.CBS_AUTOHSCROLL, - ], - [ - "Button", - "Match &whole word only", - 105, - (5, 23, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "Match &case", - 107, - (5, 33, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "Keep &dialog open", - 115, - (5, 43, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "Across &open files", - 116, - (5, 52, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "&Remember as default search", - 117, - (5, 61, 150, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "&Find Next", - 109, - (185, 5, 50, 14), - visible | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, - ], - [ - "Button", - "Cancel", - win32con.IDCANCEL, - (185, 23, 50, 14), - visible | win32con.WS_TABSTOP, - ], - ] - return dt - - -class ReplaceDialog(FindReplaceDialog): - def _GetDialogTemplate(self): - style = ( - win32con.DS_MODALFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - ) - visible = win32con.WS_CHILD | win32con.WS_VISIBLE - dt = [ - ["Replace", (0, 2, 240, 95), style, 0, (8, "MS Sans Serif")], - ["Static", "Fi&nd What:", 101, (5, 8, 40, 10), visible], - [ - "ComboBox", - "", - 102, - (60, 7, 110, 120), - visible - | win32con.WS_BORDER - | win32con.WS_TABSTOP - | win32con.WS_VSCROLL - | win32con.CBS_DROPDOWN - | win32con.CBS_AUTOHSCROLL, - ], - ["Static", "Re&place with:", 103, (5, 25, 50, 10), visible], - [ - "ComboBox", - "", - 104, - (60, 24, 110, 120), - visible - | win32con.WS_BORDER - | win32con.WS_TABSTOP - | win32con.WS_VSCROLL - | win32con.CBS_DROPDOWN - | win32con.CBS_AUTOHSCROLL, - ], - [ - "Button", - "Match &whole word only", - 105, - (5, 42, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "Match &case", - 107, - (5, 52, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "Keep &dialog open", - 115, - (5, 62, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "Across &open files", - 116, - (5, 72, 100, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "&Remember as default search", - 117, - (5, 81, 150, 10), - visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP, - ], - [ - "Button", - "&Find Next", - 109, - (185, 5, 50, 14), - visible | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, - ], - [ - "Button", - "&Replace", - 110, - (185, 23, 50, 14), - visible | win32con.WS_TABSTOP, - ], - [ - "Button", - "Replace &All", - 111, - (185, 41, 50, 14), - visible | win32con.WS_TABSTOP, - ], - [ - "Button", - "Cancel", - win32con.IDCANCEL, - (185, 59, 50, 14), - visible | win32con.WS_TABSTOP, - ], - ] - return dt - - def OnInitDialog(self): - rc = FindReplaceDialog.OnInitDialog(self) - self.HookCommand(self.OnReplace, 110) - self.HookCommand(self.OnReplaceAll, 111) - self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE) - self.editReplaceText = self.GetDlgItem(104) - self.editReplaceText.SetWindowText(lastSearch.replaceText) - if hasattr(self.editReplaceText, "SetEditSel"): - self.editReplaceText.SetEditSel(0, -1) - else: - self.editReplaceText.SetSel(0, -1) - self.butReplace = self.GetDlgItem(110) - self.butReplaceAll = self.GetDlgItem(111) - self.CheckButtonStates() - return rc # 0 when focus set - - def CheckButtonStates(self): - # We can do a "Replace" or "Replace All" if the current selection - # is the same as the search text. - ft = self.editFindText.GetWindowText() - control = _GetControl() - # bCanReplace = len(ft)>0 and control.GetSelText() == ft - bCanReplace = control is not None and lastSearch.sel == control.GetSel() - self.butReplace.EnableWindow(bCanReplace) - - # self.butReplaceAll.EnableWindow(bCanReplace) - - def OnActivate(self, msg): - wparam = msg[2] - fActive = win32api.LOWORD(wparam) - if fActive != win32con.WA_INACTIVE: - self.CheckButtonStates() - - def OnFindNext(self, id, code): - if code != 0: - return 1 - self.DoFindNext() - self.CheckButtonStates() - - def OnReplace(self, id, code): - if code != 0: - return 1 - lastSearch.replaceText = self.editReplaceText.GetWindowText() - _ReplaceIt(None) - - def OnReplaceAll(self, id, code): - if code != 0: - return 1 - control = _GetControl(None) - if control is not None: - control.SetSel(0) - num = 0 - if self.DoFindNext() == FOUND_NORMAL: - num = 1 - lastSearch.replaceText = self.editReplaceText.GetWindowText() - while _ReplaceIt(control) == FOUND_NORMAL: - num = num + 1 - - win32ui.SetStatusText("Replaced %d occurrences" % num) - if num > 0 and not self.butKeepDialogOpen.GetCheck(): - self.DestroyWindow() - - -if __name__ == "__main__": - ShowFindDialog() diff --git a/lib/pythonwin/pywin/scintilla/formatter.py b/lib/pythonwin/pywin/scintilla/formatter.py deleted file mode 100644 index 56100ccf..00000000 --- a/lib/pythonwin/pywin/scintilla/formatter.py +++ /dev/null @@ -1,695 +0,0 @@ -# Does Python source formatting for Scintilla controls. -import array -import string - -import win32api -import win32con -import win32ui - -from . import scintillacon - -WM_KICKIDLE = 0x036A - -# Used to indicate that style should use default color -from win32con import CLR_INVALID - -debugging = 0 -if debugging: - # Output must go to another process else the result of - # the printing itself will trigger again trigger a trace. - - import win32trace - import win32traceutil - - def trace(*args): - win32trace.write(" ".join(map(str, args)) + "\n") - -else: - trace = lambda *args: None - - -class Style: - """Represents a single format""" - - def __init__(self, name, format, background=CLR_INVALID): - self.name = name # Name the format representes eg, "String", "Class" - # Default background for each style is only used when there are no - # saved settings (generally on first startup) - self.background = self.default_background = background - if type(format) == type(""): - self.aliased = format - self.format = None - else: - self.format = format - self.aliased = None - self.stylenum = None # Not yet registered. - - def IsBasedOnDefault(self): - return len(self.format) == 5 - - # If the currently extended font defintion matches the - # default format, restore the format to the "simple" format. - def NormalizeAgainstDefault(self, defaultFormat): - if self.IsBasedOnDefault(): - return 0 # No more to do, and not changed. - bIsDefault = ( - self.format[7] == defaultFormat[7] and self.format[2] == defaultFormat[2] - ) - if bIsDefault: - self.ForceAgainstDefault() - return bIsDefault - - def ForceAgainstDefault(self): - self.format = self.format[:5] - - def GetCompleteFormat(self, defaultFormat): - # Get the complete style after applying any relevant defaults. - if len(self.format) == 5: # It is a default one - fmt = self.format + defaultFormat[5:] - else: - fmt = self.format - flags = ( - win32con.CFM_BOLD - | win32con.CFM_CHARSET - | win32con.CFM_COLOR - | win32con.CFM_FACE - | win32con.CFM_ITALIC - | win32con.CFM_SIZE - ) - return (flags,) + fmt[1:] - - -# The Formatter interface -# used primarily when the actual formatting is done by Scintilla! -class FormatterBase: - def __init__(self, scintilla): - self.scintilla = scintilla - self.baseFormatFixed = (-402653169, 0, 200, 0, 0, 0, 49, "Courier New") - self.baseFormatProp = (-402653169, 0, 200, 0, 0, 0, 49, "Arial") - self.bUseFixed = 1 - self.styles = {} # Indexed by name - self.styles_by_id = {} # Indexed by allocated ID. - self.SetStyles() - - def HookFormatter(self, parent=None): - raise NotImplementedError() - - # Used by the IDLE extensions to quickly determine if a character is a string. - def GetStringStyle(self, pos): - try: - style = self.styles_by_id[self.scintilla.SCIGetStyleAt(pos)] - except KeyError: - # A style we dont know about - probably not even a .py file - can't be a string - return None - if style.name in self.string_style_names: - return style - return None - - def RegisterStyle(self, style, stylenum): - assert stylenum is not None, "We must have a style number" - assert style.stylenum is None, "Style has already been registered" - assert stylenum not in self.styles, "We are reusing a style number!" - style.stylenum = stylenum - self.styles[style.name] = style - self.styles_by_id[stylenum] = style - - def SetStyles(self): - raise NotImplementedError() - - def GetSampleText(self): - return "Sample Text for the Format Dialog" - - def GetDefaultFormat(self): - if self.bUseFixed: - return self.baseFormatFixed - return self.baseFormatProp - - # Update the control with the new style format. - def _ReformatStyle(self, style): - ## Selection (background only for now) - ## Passing False for WPARAM to SCI_SETSELBACK is documented as resetting to scintilla default, - ## but does not work - selection background is not visible at all. - ## Default value in SPECIAL_STYLES taken from scintilla source. - if style.name == STYLE_SELECTION: - clr = style.background - self.scintilla.SendScintilla(scintillacon.SCI_SETSELBACK, True, clr) - - ## Can't change font for selection, but could set color - ## However, the font color dropbox has no option for default, and thus would - ## always override syntax coloring - ## clr = style.format[4] - ## self.scintilla.SendScintilla(scintillacon.SCI_SETSELFORE, clr != CLR_INVALID, clr) - return - - assert style.stylenum is not None, "Unregistered style." - # print "Reformat style", style.name, style.stylenum - scintilla = self.scintilla - stylenum = style.stylenum - # Now we have the style number, indirect for the actual style. - if style.aliased is not None: - style = self.styles[style.aliased] - f = style.format - if style.IsBasedOnDefault(): - baseFormat = self.GetDefaultFormat() - else: - baseFormat = f - scintilla.SCIStyleSetFore(stylenum, f[4]) - scintilla.SCIStyleSetFont(stylenum, baseFormat[7], baseFormat[5]) - if f[1] & 1: - scintilla.SCIStyleSetBold(stylenum, 1) - else: - scintilla.SCIStyleSetBold(stylenum, 0) - if f[1] & 2: - scintilla.SCIStyleSetItalic(stylenum, 1) - else: - scintilla.SCIStyleSetItalic(stylenum, 0) - scintilla.SCIStyleSetSize(stylenum, int(baseFormat[2] / 20)) - scintilla.SCIStyleSetEOLFilled(stylenum, 1) # Only needed for unclosed strings. - - ## Default style background to whitespace background if set, - ## otherwise use system window color - bg = style.background - if bg == CLR_INVALID: - bg = self.styles[STYLE_DEFAULT].background - if bg == CLR_INVALID: - bg = win32api.GetSysColor(win32con.COLOR_WINDOW) - scintilla.SCIStyleSetBack(stylenum, bg) - - def GetStyleByNum(self, stylenum): - return self.styles_by_id[stylenum] - - def ApplyFormattingStyles(self, bReload=1): - if bReload: - self.LoadPreferences() - baseFormat = self.GetDefaultFormat() - defaultStyle = Style("default", baseFormat) - defaultStyle.stylenum = scintillacon.STYLE_DEFAULT - self._ReformatStyle(defaultStyle) - for style in list(self.styles.values()): - if style.aliased is None: - style.NormalizeAgainstDefault(baseFormat) - self._ReformatStyle(style) - self.scintilla.InvalidateRect() - - # Some functions for loading and saving preferences. By default - # an INI file (well, MFC maps this to the registry) is used. - def LoadPreferences(self): - self.baseFormatFixed = eval( - self.LoadPreference("Base Format Fixed", str(self.baseFormatFixed)) - ) - self.baseFormatProp = eval( - self.LoadPreference("Base Format Proportional", str(self.baseFormatProp)) - ) - self.bUseFixed = int(self.LoadPreference("Use Fixed", 1)) - - for style in list(self.styles.values()): - new = self.LoadPreference(style.name, str(style.format)) - try: - style.format = eval(new) - except: - print("Error loading style data for", style.name) - # Use "vanilla" background hardcoded in PYTHON_STYLES if no settings in registry - style.background = int( - self.LoadPreference( - style.name + " background", style.default_background - ) - ) - - def LoadPreference(self, name, default): - return win32ui.GetProfileVal("Format", name, default) - - def SavePreferences(self): - self.SavePreference("Base Format Fixed", str(self.baseFormatFixed)) - self.SavePreference("Base Format Proportional", str(self.baseFormatProp)) - self.SavePreference("Use Fixed", self.bUseFixed) - for style in list(self.styles.values()): - if style.aliased is None: - self.SavePreference(style.name, str(style.format)) - bg_name = style.name + " background" - self.SavePreference(bg_name, style.background) - - def SavePreference(self, name, value): - win32ui.WriteProfileVal("Format", name, value) - - -# An abstract formatter -# For all formatters we actually implement here. -# (as opposed to those formatters built in to Scintilla) -class Formatter(FormatterBase): - def __init__(self, scintilla): - self.bCompleteWhileIdle = 0 - self.bHaveIdleHandler = 0 # Dont currently have an idle handle - self.nextstylenum = 0 - FormatterBase.__init__(self, scintilla) - - def HookFormatter(self, parent=None): - if parent is None: - parent = self.scintilla.GetParent() # was GetParentFrame()!? - parent.HookNotify(self.OnStyleNeeded, scintillacon.SCN_STYLENEEDED) - - def OnStyleNeeded(self, std, extra): - notify = self.scintilla.SCIUnpackNotifyMessage(extra) - endStyledChar = self.scintilla.SendScintilla(scintillacon.SCI_GETENDSTYLED) - lineEndStyled = self.scintilla.LineFromChar(endStyledChar) - endStyled = self.scintilla.LineIndex(lineEndStyled) - # print "enPosPaint %d endStyledChar %d lineEndStyled %d endStyled %d" % (endPosPaint, endStyledChar, lineEndStyled, endStyled) - self.Colorize(endStyled, notify.position) - - def ColorSeg(self, start, end, styleName): - end = end + 1 - # assert end-start>=0, "Can't have negative styling" - stylenum = self.styles[styleName].stylenum - while start < end: - self.style_buffer[start] = stylenum - start = start + 1 - # self.scintilla.SCISetStyling(end - start + 1, stylenum) - - def RegisterStyle(self, style, stylenum=None): - if stylenum is None: - stylenum = self.nextstylenum - self.nextstylenum = self.nextstylenum + 1 - FormatterBase.RegisterStyle(self, style, stylenum) - - def ColorizeString(self, str, charStart, styleStart): - raise RuntimeError("You must override this method") - - def Colorize(self, start=0, end=-1): - scintilla = self.scintilla - # scintilla's formatting is all done in terms of utf, so - # we work with utf8 bytes instead of unicode. This magically - # works as any extended chars found in the utf8 don't change - # the semantics. - stringVal = scintilla.GetTextRange(start, end, decode=False) - if start > 0: - stylenum = scintilla.SCIGetStyleAt(start - 1) - styleStart = self.GetStyleByNum(stylenum).name - else: - styleStart = None - # trace("Coloring", start, end, end-start, len(stringVal), styleStart, self.scintilla.SCIGetCharAt(start)) - scintilla.SCIStartStyling(start, 31) - self.style_buffer = array.array("b", (0,) * len(stringVal)) - self.ColorizeString(stringVal, styleStart) - scintilla.SCISetStylingEx(self.style_buffer) - self.style_buffer = None - # trace("After styling, end styled is", self.scintilla.SCIGetEndStyled()) - if ( - self.bCompleteWhileIdle - and not self.bHaveIdleHandler - and end != -1 - and end < scintilla.GetTextLength() - ): - self.bHaveIdleHandler = 1 - win32ui.GetApp().AddIdleHandler(self.DoMoreColoring) - # Kicking idle makes the app seem slower when initially repainting! - - # win32ui.GetMainFrame().PostMessage(WM_KICKIDLE, 0, 0) - - def DoMoreColoring(self, handler, count): - try: - scintilla = self.scintilla - endStyled = scintilla.SCIGetEndStyled() - lineStartStyled = scintilla.LineFromChar(endStyled) - start = scintilla.LineIndex(lineStartStyled) - end = scintilla.LineIndex(lineStartStyled + 1) - textlen = scintilla.GetTextLength() - if end < 0: - end = textlen - - finished = end >= textlen - self.Colorize(start, end) - except (win32ui.error, AttributeError): - # Window may have closed before we finished - no big deal! - finished = 1 - - if finished: - self.bHaveIdleHandler = 0 - win32ui.GetApp().DeleteIdleHandler(handler) - return not finished - - -# A Formatter that knows how to format Python source -from keyword import iskeyword, kwlist - -wordstarts = "_0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" -wordchars = "._0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" -operators = "%^&*()-+=|{}[]:;<>,/?!.~" - -STYLE_DEFAULT = "Whitespace" -STYLE_COMMENT = "Comment" -STYLE_COMMENT_BLOCK = "Comment Blocks" -STYLE_NUMBER = "Number" -STYLE_STRING = "String" -STYLE_SQSTRING = "SQ String" -STYLE_TQSSTRING = "TQS String" -STYLE_TQDSTRING = "TQD String" -STYLE_KEYWORD = "Keyword" -STYLE_CLASS = "Class" -STYLE_METHOD = "Method" -STYLE_OPERATOR = "Operator" -STYLE_IDENTIFIER = "Identifier" -STYLE_BRACE = "Brace/Paren - matching" -STYLE_BRACEBAD = "Brace/Paren - unmatched" -STYLE_STRINGEOL = "String with no terminator" -STYLE_LINENUMBER = "Line numbers" -STYLE_INDENTGUIDE = "Indent guide" -STYLE_SELECTION = "Selection" - -STRING_STYLES = [ - STYLE_STRING, - STYLE_SQSTRING, - STYLE_TQSSTRING, - STYLE_TQDSTRING, - STYLE_STRINGEOL, -] - -# These styles can have any ID - they are not special to scintilla itself. -# However, if we use the built-in lexer, then we must use its style numbers -# so in that case, they _are_ special. -# (name, format, background, scintilla id) -PYTHON_STYLES = [ - (STYLE_DEFAULT, (0, 0, 200, 0, 0x808080), CLR_INVALID, scintillacon.SCE_P_DEFAULT), - ( - STYLE_COMMENT, - (0, 2, 200, 0, 0x008000), - CLR_INVALID, - scintillacon.SCE_P_COMMENTLINE, - ), - ( - STYLE_COMMENT_BLOCK, - (0, 2, 200, 0, 0x808080), - CLR_INVALID, - scintillacon.SCE_P_COMMENTBLOCK, - ), - (STYLE_NUMBER, (0, 0, 200, 0, 0x808000), CLR_INVALID, scintillacon.SCE_P_NUMBER), - (STYLE_STRING, (0, 0, 200, 0, 0x008080), CLR_INVALID, scintillacon.SCE_P_STRING), - (STYLE_SQSTRING, STYLE_STRING, CLR_INVALID, scintillacon.SCE_P_CHARACTER), - (STYLE_TQSSTRING, STYLE_STRING, CLR_INVALID, scintillacon.SCE_P_TRIPLE), - (STYLE_TQDSTRING, STYLE_STRING, CLR_INVALID, scintillacon.SCE_P_TRIPLEDOUBLE), - (STYLE_STRINGEOL, (0, 0, 200, 0, 0x000000), 0x008080, scintillacon.SCE_P_STRINGEOL), - (STYLE_KEYWORD, (0, 1, 200, 0, 0x800000), CLR_INVALID, scintillacon.SCE_P_WORD), - (STYLE_CLASS, (0, 1, 200, 0, 0xFF0000), CLR_INVALID, scintillacon.SCE_P_CLASSNAME), - (STYLE_METHOD, (0, 1, 200, 0, 0x808000), CLR_INVALID, scintillacon.SCE_P_DEFNAME), - ( - STYLE_OPERATOR, - (0, 0, 200, 0, 0x000000), - CLR_INVALID, - scintillacon.SCE_P_OPERATOR, - ), - ( - STYLE_IDENTIFIER, - (0, 0, 200, 0, 0x000000), - CLR_INVALID, - scintillacon.SCE_P_IDENTIFIER, - ), -] - -# These styles _always_ have this specific style number, regardless of -# internal or external formatter. -SPECIAL_STYLES = [ - (STYLE_BRACE, (0, 0, 200, 0, 0x000000), 0xFFFF80, scintillacon.STYLE_BRACELIGHT), - (STYLE_BRACEBAD, (0, 0, 200, 0, 0x000000), 0x8EA5F2, scintillacon.STYLE_BRACEBAD), - ( - STYLE_LINENUMBER, - (0, 0, 200, 0, 0x000000), - win32api.GetSysColor(win32con.COLOR_3DFACE), - scintillacon.STYLE_LINENUMBER, - ), - ( - STYLE_INDENTGUIDE, - (0, 0, 200, 0, 0x000000), - CLR_INVALID, - scintillacon.STYLE_INDENTGUIDE, - ), - ## Not actually a style; requires special handling to send appropriate messages to scintilla - ( - STYLE_SELECTION, - (0, 0, 200, 0, CLR_INVALID), - win32api.RGB(0xC0, 0xC0, 0xC0), - 999999, - ), -] - -PythonSampleCode = """\ -# Some Python -class Sample(Super): - def Fn(self): -\tself.v = 1024 -dest = 'dest.html' -x = func(a + 1)|) -s = "I forget... -## A large -## comment block""" - - -class PythonSourceFormatter(Formatter): - string_style_names = STRING_STYLES - - def GetSampleText(self): - return PythonSampleCode - - def LoadStyles(self): - pass - - def SetStyles(self): - for name, format, bg, ignore in PYTHON_STYLES: - self.RegisterStyle(Style(name, format, bg)) - for name, format, bg, sc_id in SPECIAL_STYLES: - self.RegisterStyle(Style(name, format, bg), sc_id) - - def ClassifyWord(self, cdoc, start, end, prevWord): - word = cdoc[start : end + 1].decode("latin-1") - attr = STYLE_IDENTIFIER - if prevWord == "class": - attr = STYLE_CLASS - elif prevWord == "def": - attr = STYLE_METHOD - elif word[0] in string.digits: - attr = STYLE_NUMBER - elif iskeyword(word): - attr = STYLE_KEYWORD - self.ColorSeg(start, end, attr) - return word - - def ColorizeString(self, str, styleStart): - if styleStart is None: - styleStart = STYLE_DEFAULT - return self.ColorizePythonCode(str, 0, styleStart) - - def ColorizePythonCode(self, cdoc, charStart, styleStart): - # Straight translation of C++, should do better - lengthDoc = len(cdoc) - if lengthDoc <= charStart: - return - prevWord = "" - state = styleStart - chPrev = chPrev2 = chPrev3 = " " - chNext2 = chNext = cdoc[charStart : charStart + 1].decode("latin-1") - startSeg = i = charStart - while i < lengthDoc: - ch = chNext - chNext = " " - if i + 1 < lengthDoc: - chNext = cdoc[i + 1 : i + 2].decode("latin-1") - chNext2 = " " - if i + 2 < lengthDoc: - chNext2 = cdoc[i + 2 : i + 3].decode("latin-1") - if state == STYLE_DEFAULT: - if ch in wordstarts: - self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) - state = STYLE_KEYWORD - startSeg = i - elif ch == "#": - self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) - if chNext == "#": - state = STYLE_COMMENT_BLOCK - else: - state = STYLE_COMMENT - startSeg = i - elif ch == '"': - self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) - startSeg = i - state = STYLE_COMMENT - if chNext == '"' and chNext2 == '"': - i = i + 2 - state = STYLE_TQDSTRING - ch = " " - chPrev = " " - chNext = " " - if i + 1 < lengthDoc: - chNext = cdoc[i + 1] - else: - state = STYLE_STRING - elif ch == "'": - self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) - startSeg = i - state = STYLE_COMMENT - if chNext == "'" and chNext2 == "'": - i = i + 2 - state = STYLE_TQSSTRING - ch = " " - chPrev = " " - chNext = " " - if i + 1 < lengthDoc: - chNext = cdoc[i + 1] - else: - state = STYLE_SQSTRING - elif ch in operators: - self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT) - self.ColorSeg(i, i, STYLE_OPERATOR) - startSeg = i + 1 - elif state == STYLE_KEYWORD: - if ch not in wordchars: - prevWord = self.ClassifyWord(cdoc, startSeg, i - 1, prevWord) - state = STYLE_DEFAULT - startSeg = i - if ch == "#": - if chNext == "#": - state = STYLE_COMMENT_BLOCK - else: - state = STYLE_COMMENT - elif ch == '"': - if chNext == '"' and chNext2 == '"': - i = i + 2 - state = STYLE_TQDSTRING - ch = " " - chPrev = " " - chNext = " " - if i + 1 < lengthDoc: - chNext = cdoc[i + 1] - else: - state = STYLE_STRING - elif ch == "'": - if chNext == "'" and chNext2 == "'": - i = i + 2 - state = STYLE_TQSSTRING - ch = " " - chPrev = " " - chNext = " " - if i + 1 < lengthDoc: - chNext = cdoc[i + 1] - else: - state = STYLE_SQSTRING - elif ch in operators: - self.ColorSeg(startSeg, i, STYLE_OPERATOR) - startSeg = i + 1 - elif state == STYLE_COMMENT or state == STYLE_COMMENT_BLOCK: - if ch == "\r" or ch == "\n": - self.ColorSeg(startSeg, i - 1, state) - state = STYLE_DEFAULT - startSeg = i - elif state == STYLE_STRING: - if ch == "\\": - if chNext == '"' or chNext == "'" or chNext == "\\": - i = i + 1 - ch = chNext - chNext = " " - if i + 1 < lengthDoc: - chNext = cdoc[i + 1] - elif ch == '"': - self.ColorSeg(startSeg, i, STYLE_STRING) - state = STYLE_DEFAULT - startSeg = i + 1 - elif state == STYLE_SQSTRING: - if ch == "\\": - if chNext == '"' or chNext == "'" or chNext == "\\": - i = i + 1 - ch = chNext - chNext = " " - if i + 1 < lengthDoc: - chNext = cdoc[i + 1] - elif ch == "'": - self.ColorSeg(startSeg, i, STYLE_SQSTRING) - state = STYLE_DEFAULT - startSeg = i + 1 - elif state == STYLE_TQSSTRING: - if ch == "'" and chPrev == "'" and chPrev2 == "'" and chPrev3 != "\\": - self.ColorSeg(startSeg, i, STYLE_TQSSTRING) - state = STYLE_DEFAULT - startSeg = i + 1 - elif ( - state == STYLE_TQDSTRING - and ch == '"' - and chPrev == '"' - and chPrev2 == '"' - and chPrev3 != "\\" - ): - self.ColorSeg(startSeg, i, STYLE_TQDSTRING) - state = STYLE_DEFAULT - startSeg = i + 1 - chPrev3 = chPrev2 - chPrev2 = chPrev - chPrev = ch - i = i + 1 - if startSeg < lengthDoc: - if state == STYLE_KEYWORD: - self.ClassifyWord(cdoc, startSeg, lengthDoc - 1, prevWord) - else: - self.ColorSeg(startSeg, lengthDoc - 1, state) - - -# These taken from the SciTE properties file. -source_formatter_extensions = [ - (".py .pys .pyw".split(), scintillacon.SCLEX_PYTHON), - (".html .htm .asp .shtml".split(), scintillacon.SCLEX_HTML), - ( - "c .cc .cpp .cxx .h .hh .hpp .hxx .idl .odl .php3 .phtml .inc .js".split(), - scintillacon.SCLEX_CPP, - ), - (".vbs .frm .ctl .cls".split(), scintillacon.SCLEX_VB), - (".pl .pm .cgi .pod".split(), scintillacon.SCLEX_PERL), - (".sql .spec .body .sps .spb .sf .sp".split(), scintillacon.SCLEX_SQL), - (".tex .sty".split(), scintillacon.SCLEX_LATEX), - (".xml .xul".split(), scintillacon.SCLEX_XML), - (".err".split(), scintillacon.SCLEX_ERRORLIST), - (".mak".split(), scintillacon.SCLEX_MAKEFILE), - (".bat .cmd".split(), scintillacon.SCLEX_BATCH), -] - - -class BuiltinSourceFormatter(FormatterBase): - # A class that represents a formatter built-in to Scintilla - def __init__(self, scintilla, ext): - self.ext = ext - FormatterBase.__init__(self, scintilla) - - def Colorize(self, start=0, end=-1): - self.scintilla.SendScintilla(scintillacon.SCI_COLOURISE, start, end) - - def RegisterStyle(self, style, stylenum=None): - assert style.stylenum is None, "Style has already been registered" - if stylenum is None: - stylenum = self.nextstylenum - self.nextstylenum = self.nextstylenum + 1 - assert self.styles.get(stylenum) is None, "We are reusing a style number!" - style.stylenum = stylenum - self.styles[style.name] = style - self.styles_by_id[stylenum] = style - - def HookFormatter(self, parent=None): - sc = self.scintilla - for exts, formatter in source_formatter_extensions: - if self.ext in exts: - formatter_use = formatter - break - else: - formatter_use = scintillacon.SCLEX_PYTHON - sc.SendScintilla(scintillacon.SCI_SETLEXER, formatter_use) - keywords = " ".join(kwlist) - sc.SCISetKeywords(keywords) - - -class BuiltinPythonSourceFormatter(BuiltinSourceFormatter): - sci_lexer_name = scintillacon.SCLEX_PYTHON - string_style_names = STRING_STYLES - - def __init__(self, sc, ext=".py"): - BuiltinSourceFormatter.__init__(self, sc, ext) - - def SetStyles(self): - for name, format, bg, sc_id in PYTHON_STYLES: - self.RegisterStyle(Style(name, format, bg), sc_id) - for name, format, bg, sc_id in SPECIAL_STYLES: - self.RegisterStyle(Style(name, format, bg), sc_id) - - def GetSampleText(self): - return PythonSampleCode diff --git a/lib/pythonwin/pywin/scintilla/keycodes.py b/lib/pythonwin/pywin/scintilla/keycodes.py deleted file mode 100644 index badcef3e..00000000 --- a/lib/pythonwin/pywin/scintilla/keycodes.py +++ /dev/null @@ -1,190 +0,0 @@ -import win32api -import win32con -import win32ui - -MAPVK_VK_TO_CHAR = 2 - -key_name_to_vk = {} -key_code_to_name = {} - -_better_names = { - "escape": "esc", - "return": "enter", - "back": "pgup", - "next": "pgdn", -} - - -def _fillvkmap(): - # Pull the VK_names from win32con - names = [entry for entry in win32con.__dict__ if entry.startswith("VK_")] - for name in names: - code = getattr(win32con, name) - n = name[3:].lower() - key_name_to_vk[n] = code - if n in _better_names: - n = _better_names[n] - key_name_to_vk[n] = code - key_code_to_name[code] = n - - -_fillvkmap() - - -def get_vk(chardesc): - if len(chardesc) == 1: - # it is a character. - info = win32api.VkKeyScan(chardesc) - if info == -1: - # Note: returning None, None causes an error when keyboard layout is non-English, see the report below - # https://stackoverflow.com/questions/45138084/pythonwin-occasionally-gives-an-error-on-opening - return 0, 0 - vk = win32api.LOBYTE(info) - state = win32api.HIBYTE(info) - modifiers = 0 - if state & 0x1: - modifiers |= win32con.SHIFT_PRESSED - if state & 0x2: - modifiers |= win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED - if state & 0x4: - modifiers |= win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED - return vk, modifiers - # must be a 'key name' - return key_name_to_vk.get(chardesc.lower()), 0 - - -modifiers = { - "alt": win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED, - "lalt": win32con.LEFT_ALT_PRESSED, - "ralt": win32con.RIGHT_ALT_PRESSED, - "ctrl": win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED, - "ctl": win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED, - "control": win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED, - "lctrl": win32con.LEFT_CTRL_PRESSED, - "lctl": win32con.LEFT_CTRL_PRESSED, - "rctrl": win32con.RIGHT_CTRL_PRESSED, - "rctl": win32con.RIGHT_CTRL_PRESSED, - "shift": win32con.SHIFT_PRESSED, - "key": 0, # ignore key tag. -} - - -def parse_key_name(name): - name = name + "-" # Add a sentinal - start = pos = 0 - max = len(name) - toks = [] - while pos < max: - if name[pos] in "+-": - tok = name[start:pos] - # use the ascii lower() version of tok, so ascii chars require - # an explicit shift modifier - ie 'Ctrl+G' should be treated as - # 'ctrl+g' - 'ctrl+shift+g' would be needed if desired. - # This is mainly to avoid changing all the old keystroke defs - toks.append(tok.lower()) - pos += 1 # skip the sep - start = pos - pos += 1 - flags = 0 - # do the modifiers - for tok in toks[:-1]: - mod = modifiers.get(tok.lower()) - if mod is not None: - flags |= mod - # the key name - vk, this_flags = get_vk(toks[-1]) - return vk, flags | this_flags - - -_checks = [ - [ # Shift - ("Shift", win32con.SHIFT_PRESSED), - ], - [ # Ctrl key - ("Ctrl", win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED), - ("LCtrl", win32con.LEFT_CTRL_PRESSED), - ("RCtrl", win32con.RIGHT_CTRL_PRESSED), - ], - [ # Alt key - ("Alt", win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED), - ("LAlt", win32con.LEFT_ALT_PRESSED), - ("RAlt", win32con.RIGHT_ALT_PRESSED), - ], -] - - -def make_key_name(vk, flags): - # Check alt keys. - flags_done = 0 - parts = [] - for moddata in _checks: - for name, checkflag in moddata: - if flags & checkflag: - parts.append(name) - flags_done = flags_done & checkflag - break - if flags_done & flags: - parts.append(hex(flags & ~flags_done)) - # Now the key name. - if vk is None: - parts.append("") - else: - try: - parts.append(key_code_to_name[vk]) - except KeyError: - # Not in our virtual key map - ask Windows what character this - # key corresponds to. - scancode = win32api.MapVirtualKey(vk, MAPVK_VK_TO_CHAR) - parts.append(chr(scancode)) - sep = "+" - if sep in parts: - sep = "-" - return sep.join([p.capitalize() for p in parts]) - - -def _psc(char): - sc, mods = get_vk(char) - print("Char %s -> %d -> %s" % (repr(char), sc, key_code_to_name.get(sc))) - - -def test1(): - for ch in """aA0/?[{}];:'"`~_-+=\\|,<.>/?""": - _psc(ch) - for code in ["Home", "End", "Left", "Right", "Up", "Down", "Menu", "Next"]: - _psc(code) - - -def _pkn(n): - vk, flags = parse_key_name(n) - print("%s -> %s,%s -> %s" % (n, vk, flags, make_key_name(vk, flags))) - - -def test2(): - _pkn("ctrl+alt-shift+x") - _pkn("ctrl-home") - _pkn("Shift-+") - _pkn("Shift--") - _pkn("Shift+-") - _pkn("Shift++") - _pkn("LShift-+") - _pkn("ctl+home") - _pkn("ctl+enter") - _pkn("alt+return") - _pkn("Alt+/") - _pkn("Alt+BadKeyName") - _pkn("A") # an ascii char - should be seen as 'a' - _pkn("a") - _pkn("Shift-A") - _pkn("Shift-a") - _pkn("a") - _pkn("(") - _pkn("Ctrl+(") - _pkn("Ctrl+Shift-8") - _pkn("Ctrl+*") - _pkn("{") - _pkn("!") - _pkn(".") - - -if __name__ == "__main__": - test2() diff --git a/lib/pythonwin/pywin/scintilla/scintillacon.py b/lib/pythonwin/pywin/scintilla/scintillacon.py deleted file mode 100644 index e7657dc7..00000000 --- a/lib/pythonwin/pywin/scintilla/scintillacon.py +++ /dev/null @@ -1,2001 +0,0 @@ -# Generated by h2py from Include\scintilla.h - - -# Included from BaseTsd.h -def HandleToUlong(h): - return HandleToULong(h) - - -def UlongToHandle(ul): - return ULongToHandle(ul) - - -def UlongToPtr(ul): - return ULongToPtr(ul) - - -def UintToPtr(ui): - return UIntToPtr(ui) - - -INVALID_POSITION = -1 -SCI_START = 2000 -SCI_OPTIONAL_START = 3000 -SCI_LEXER_START = 4000 -SCI_ADDTEXT = 2001 -SCI_ADDSTYLEDTEXT = 2002 -SCI_INSERTTEXT = 2003 -SCI_CLEARALL = 2004 -SCI_CLEARDOCUMENTSTYLE = 2005 -SCI_GETLENGTH = 2006 -SCI_GETCHARAT = 2007 -SCI_GETCURRENTPOS = 2008 -SCI_GETANCHOR = 2009 -SCI_GETSTYLEAT = 2010 -SCI_REDO = 2011 -SCI_SETUNDOCOLLECTION = 2012 -SCI_SELECTALL = 2013 -SCI_SETSAVEPOINT = 2014 -SCI_GETSTYLEDTEXT = 2015 -SCI_CANREDO = 2016 -SCI_MARKERLINEFROMHANDLE = 2017 -SCI_MARKERDELETEHANDLE = 2018 -SCI_GETUNDOCOLLECTION = 2019 -SCWS_INVISIBLE = 0 -SCWS_VISIBLEALWAYS = 1 -SCWS_VISIBLEAFTERINDENT = 2 -SCI_GETVIEWWS = 2020 -SCI_SETVIEWWS = 2021 -SCI_POSITIONFROMPOINT = 2022 -SCI_POSITIONFROMPOINTCLOSE = 2023 -SCI_GOTOLINE = 2024 -SCI_GOTOPOS = 2025 -SCI_SETANCHOR = 2026 -SCI_GETCURLINE = 2027 -SCI_GETENDSTYLED = 2028 -SC_EOL_CRLF = 0 -SC_EOL_CR = 1 -SC_EOL_LF = 2 -SCI_CONVERTEOLS = 2029 -SCI_GETEOLMODE = 2030 -SCI_SETEOLMODE = 2031 -SCI_STARTSTYLING = 2032 -SCI_SETSTYLING = 2033 -SCI_GETBUFFEREDDRAW = 2034 -SCI_SETBUFFEREDDRAW = 2035 -SCI_SETTABWIDTH = 2036 -SCI_GETTABWIDTH = 2121 -SC_CP_UTF8 = 65001 -SC_CP_DBCS = 1 -SCI_SETCODEPAGE = 2037 -SCI_SETUSEPALETTE = 2039 -MARKER_MAX = 31 -SC_MARK_CIRCLE = 0 -SC_MARK_ROUNDRECT = 1 -SC_MARK_ARROW = 2 -SC_MARK_SMALLRECT = 3 -SC_MARK_SHORTARROW = 4 -SC_MARK_EMPTY = 5 -SC_MARK_ARROWDOWN = 6 -SC_MARK_MINUS = 7 -SC_MARK_PLUS = 8 -SC_MARK_VLINE = 9 -SC_MARK_LCORNER = 10 -SC_MARK_TCORNER = 11 -SC_MARK_BOXPLUS = 12 -SC_MARK_BOXPLUSCONNECTED = 13 -SC_MARK_BOXMINUS = 14 -SC_MARK_BOXMINUSCONNECTED = 15 -SC_MARK_LCORNERCURVE = 16 -SC_MARK_TCORNERCURVE = 17 -SC_MARK_CIRCLEPLUS = 18 -SC_MARK_CIRCLEPLUSCONNECTED = 19 -SC_MARK_CIRCLEMINUS = 20 -SC_MARK_CIRCLEMINUSCONNECTED = 21 -SC_MARK_BACKGROUND = 22 -SC_MARK_DOTDOTDOT = 23 -SC_MARK_ARROWS = 24 -SC_MARK_PIXMAP = 25 -SC_MARK_FULLRECT = 26 -SC_MARK_LEFTRECT = 27 -SC_MARK_CHARACTER = 10000 -SC_MARKNUM_FOLDEREND = 25 -SC_MARKNUM_FOLDEROPENMID = 26 -SC_MARKNUM_FOLDERMIDTAIL = 27 -SC_MARKNUM_FOLDERTAIL = 28 -SC_MARKNUM_FOLDERSUB = 29 -SC_MARKNUM_FOLDER = 30 -SC_MARKNUM_FOLDEROPEN = 31 -SC_MASK_FOLDERS = -33554432 -SCI_MARKERDEFINE = 2040 -SCI_MARKERSETFORE = 2041 -SCI_MARKERSETBACK = 2042 -SCI_MARKERADD = 2043 -SCI_MARKERDELETE = 2044 -SCI_MARKERDELETEALL = 2045 -SCI_MARKERGET = 2046 -SCI_MARKERNEXT = 2047 -SCI_MARKERPREVIOUS = 2048 -SCI_MARKERDEFINEPIXMAP = 2049 -SCI_MARKERADDSET = 2466 -SCI_MARKERSETALPHA = 2476 -SC_MARGIN_SYMBOL = 0 -SC_MARGIN_NUMBER = 1 -SC_MARGIN_BACK = 2 -SC_MARGIN_FORE = 3 -SCI_SETMARGINTYPEN = 2240 -SCI_GETMARGINTYPEN = 2241 -SCI_SETMARGINWIDTHN = 2242 -SCI_GETMARGINWIDTHN = 2243 -SCI_SETMARGINMASKN = 2244 -SCI_GETMARGINMASKN = 2245 -SCI_SETMARGINSENSITIVEN = 2246 -SCI_GETMARGINSENSITIVEN = 2247 -STYLE_DEFAULT = 32 -STYLE_LINENUMBER = 33 -STYLE_BRACELIGHT = 34 -STYLE_BRACEBAD = 35 -STYLE_CONTROLCHAR = 36 -STYLE_INDENTGUIDE = 37 -STYLE_CALLTIP = 38 -STYLE_LASTPREDEFINED = 39 -STYLE_MAX = 255 -SC_CHARSET_ANSI = 0 -SC_CHARSET_DEFAULT = 1 -SC_CHARSET_BALTIC = 186 -SC_CHARSET_CHINESEBIG5 = 136 -SC_CHARSET_EASTEUROPE = 238 -SC_CHARSET_GB2312 = 134 -SC_CHARSET_GREEK = 161 -SC_CHARSET_HANGUL = 129 -SC_CHARSET_MAC = 77 -SC_CHARSET_OEM = 255 -SC_CHARSET_RUSSIAN = 204 -SC_CHARSET_CYRILLIC = 1251 -SC_CHARSET_SHIFTJIS = 128 -SC_CHARSET_SYMBOL = 2 -SC_CHARSET_TURKISH = 162 -SC_CHARSET_JOHAB = 130 -SC_CHARSET_HEBREW = 177 -SC_CHARSET_ARABIC = 178 -SC_CHARSET_VIETNAMESE = 163 -SC_CHARSET_THAI = 222 -SC_CHARSET_8859_15 = 1000 -SCI_STYLECLEARALL = 2050 -SCI_STYLESETFORE = 2051 -SCI_STYLESETBACK = 2052 -SCI_STYLESETBOLD = 2053 -SCI_STYLESETITALIC = 2054 -SCI_STYLESETSIZE = 2055 -SCI_STYLESETFONT = 2056 -SCI_STYLESETEOLFILLED = 2057 -SCI_STYLERESETDEFAULT = 2058 -SCI_STYLESETUNDERLINE = 2059 -SC_CASE_MIXED = 0 -SC_CASE_UPPER = 1 -SC_CASE_LOWER = 2 -SCI_STYLEGETFORE = 2481 -SCI_STYLEGETBACK = 2482 -SCI_STYLEGETBOLD = 2483 -SCI_STYLEGETITALIC = 2484 -SCI_STYLEGETSIZE = 2485 -SCI_STYLEGETFONT = 2486 -SCI_STYLEGETEOLFILLED = 2487 -SCI_STYLEGETUNDERLINE = 2488 -SCI_STYLEGETCASE = 2489 -SCI_STYLEGETCHARACTERSET = 2490 -SCI_STYLEGETVISIBLE = 2491 -SCI_STYLEGETCHANGEABLE = 2492 -SCI_STYLEGETHOTSPOT = 2493 -SCI_STYLESETCASE = 2060 -SCI_STYLESETCHARACTERSET = 2066 -SCI_STYLESETHOTSPOT = 2409 -SCI_SETSELFORE = 2067 -SCI_SETSELBACK = 2068 -SCI_GETSELALPHA = 2477 -SCI_SETSELALPHA = 2478 -SCI_GETSELEOLFILLED = 2479 -SCI_SETSELEOLFILLED = 2480 -SCI_SETCARETFORE = 2069 -SCI_ASSIGNCMDKEY = 2070 -SCI_CLEARCMDKEY = 2071 -SCI_CLEARALLCMDKEYS = 2072 -SCI_SETSTYLINGEX = 2073 -SCI_STYLESETVISIBLE = 2074 -SCI_GETCARETPERIOD = 2075 -SCI_SETCARETPERIOD = 2076 -SCI_SETWORDCHARS = 2077 -SCI_BEGINUNDOACTION = 2078 -SCI_ENDUNDOACTION = 2079 -INDIC_PLAIN = 0 -INDIC_SQUIGGLE = 1 -INDIC_TT = 2 -INDIC_DIAGONAL = 3 -INDIC_STRIKE = 4 -INDIC_HIDDEN = 5 -INDIC_BOX = 6 -INDIC_ROUNDBOX = 7 -INDIC_MAX = 31 -INDIC_CONTAINER = 8 -INDIC0_MASK = 0x20 -INDIC1_MASK = 0x40 -INDIC2_MASK = 0x80 -INDICS_MASK = 0xE0 -SCI_INDICSETSTYLE = 2080 -SCI_INDICGETSTYLE = 2081 -SCI_INDICSETFORE = 2082 -SCI_INDICGETFORE = 2083 -SCI_INDICSETUNDER = 2510 -SCI_INDICGETUNDER = 2511 -SCI_SETWHITESPACEFORE = 2084 -SCI_SETWHITESPACEBACK = 2085 -SCI_SETSTYLEBITS = 2090 -SCI_GETSTYLEBITS = 2091 -SCI_SETLINESTATE = 2092 -SCI_GETLINESTATE = 2093 -SCI_GETMAXLINESTATE = 2094 -SCI_GETCARETLINEVISIBLE = 2095 -SCI_SETCARETLINEVISIBLE = 2096 -SCI_GETCARETLINEBACK = 2097 -SCI_SETCARETLINEBACK = 2098 -SCI_STYLESETCHANGEABLE = 2099 -SCI_AUTOCSHOW = 2100 -SCI_AUTOCCANCEL = 2101 -SCI_AUTOCACTIVE = 2102 -SCI_AUTOCPOSSTART = 2103 -SCI_AUTOCCOMPLETE = 2104 -SCI_AUTOCSTOPS = 2105 -SCI_AUTOCSETSEPARATOR = 2106 -SCI_AUTOCGETSEPARATOR = 2107 -SCI_AUTOCSELECT = 2108 -SCI_AUTOCSETCANCELATSTART = 2110 -SCI_AUTOCGETCANCELATSTART = 2111 -SCI_AUTOCSETFILLUPS = 2112 -SCI_AUTOCSETCHOOSESINGLE = 2113 -SCI_AUTOCGETCHOOSESINGLE = 2114 -SCI_AUTOCSETIGNORECASE = 2115 -SCI_AUTOCGETIGNORECASE = 2116 -SCI_USERLISTSHOW = 2117 -SCI_AUTOCSETAUTOHIDE = 2118 -SCI_AUTOCGETAUTOHIDE = 2119 -SCI_AUTOCSETDROPRESTOFWORD = 2270 -SCI_AUTOCGETDROPRESTOFWORD = 2271 -SCI_REGISTERIMAGE = 2405 -SCI_CLEARREGISTEREDIMAGES = 2408 -SCI_AUTOCGETTYPESEPARATOR = 2285 -SCI_AUTOCSETTYPESEPARATOR = 2286 -SCI_AUTOCSETMAXWIDTH = 2208 -SCI_AUTOCGETMAXWIDTH = 2209 -SCI_AUTOCSETMAXHEIGHT = 2210 -SCI_AUTOCGETMAXHEIGHT = 2211 -SCI_SETINDENT = 2122 -SCI_GETINDENT = 2123 -SCI_SETUSETABS = 2124 -SCI_GETUSETABS = 2125 -SCI_SETLINEINDENTATION = 2126 -SCI_GETLINEINDENTATION = 2127 -SCI_GETLINEINDENTPOSITION = 2128 -SCI_GETCOLUMN = 2129 -SCI_SETHSCROLLBAR = 2130 -SCI_GETHSCROLLBAR = 2131 -SC_IV_NONE = 0 -SC_IV_REAL = 1 -SC_IV_LOOKFORWARD = 2 -SC_IV_LOOKBOTH = 3 -SCI_SETINDENTATIONGUIDES = 2132 -SCI_GETINDENTATIONGUIDES = 2133 -SCI_SETHIGHLIGHTGUIDE = 2134 -SCI_GETHIGHLIGHTGUIDE = 2135 -SCI_GETLINEENDPOSITION = 2136 -SCI_GETCODEPAGE = 2137 -SCI_GETCARETFORE = 2138 -SCI_GETUSEPALETTE = 2139 -SCI_GETREADONLY = 2140 -SCI_SETCURRENTPOS = 2141 -SCI_SETSELECTIONSTART = 2142 -SCI_GETSELECTIONSTART = 2143 -SCI_SETSELECTIONEND = 2144 -SCI_GETSELECTIONEND = 2145 -SCI_SETPRINTMAGNIFICATION = 2146 -SCI_GETPRINTMAGNIFICATION = 2147 -SC_PRINT_NORMAL = 0 -SC_PRINT_INVERTLIGHT = 1 -SC_PRINT_BLACKONWHITE = 2 -SC_PRINT_COLOURONWHITE = 3 -SC_PRINT_COLOURONWHITEDEFAULTBG = 4 -SCI_SETPRINTCOLOURMODE = 2148 -SCI_GETPRINTCOLOURMODE = 2149 -SCFIND_WHOLEWORD = 2 -SCFIND_MATCHCASE = 4 -SCFIND_WORDSTART = 0x00100000 -SCFIND_REGEXP = 0x00200000 -SCFIND_POSIX = 0x00400000 -SCI_FINDTEXT = 2150 -SCI_FORMATRANGE = 2151 -SCI_GETFIRSTVISIBLELINE = 2152 -SCI_GETLINE = 2153 -SCI_GETLINECOUNT = 2154 -SCI_SETMARGINLEFT = 2155 -SCI_GETMARGINLEFT = 2156 -SCI_SETMARGINRIGHT = 2157 -SCI_GETMARGINRIGHT = 2158 -SCI_GETMODIFY = 2159 -SCI_SETSEL = 2160 -SCI_GETSELTEXT = 2161 -SCI_GETTEXTRANGE = 2162 -SCI_HIDESELECTION = 2163 -SCI_POINTXFROMPOSITION = 2164 -SCI_POINTYFROMPOSITION = 2165 -SCI_LINEFROMPOSITION = 2166 -SCI_POSITIONFROMLINE = 2167 -SCI_LINESCROLL = 2168 -SCI_SCROLLCARET = 2169 -SCI_REPLACESEL = 2170 -SCI_SETREADONLY = 2171 -SCI_NULL = 2172 -SCI_CANPASTE = 2173 -SCI_CANUNDO = 2174 -SCI_EMPTYUNDOBUFFER = 2175 -SCI_UNDO = 2176 -SCI_CUT = 2177 -SCI_COPY = 2178 -SCI_PASTE = 2179 -SCI_CLEAR = 2180 -SCI_SETTEXT = 2181 -SCI_GETTEXT = 2182 -SCI_GETTEXTLENGTH = 2183 -SCI_GETDIRECTFUNCTION = 2184 -SCI_GETDIRECTPOINTER = 2185 -SCI_SETOVERTYPE = 2186 -SCI_GETOVERTYPE = 2187 -SCI_SETCARETWIDTH = 2188 -SCI_GETCARETWIDTH = 2189 -SCI_SETTARGETSTART = 2190 -SCI_GETTARGETSTART = 2191 -SCI_SETTARGETEND = 2192 -SCI_GETTARGETEND = 2193 -SCI_REPLACETARGET = 2194 -SCI_REPLACETARGETRE = 2195 -SCI_SEARCHINTARGET = 2197 -SCI_SETSEARCHFLAGS = 2198 -SCI_GETSEARCHFLAGS = 2199 -SCI_CALLTIPSHOW = 2200 -SCI_CALLTIPCANCEL = 2201 -SCI_CALLTIPACTIVE = 2202 -SCI_CALLTIPPOSSTART = 2203 -SCI_CALLTIPSETHLT = 2204 -SCI_CALLTIPSETBACK = 2205 -SCI_CALLTIPSETFORE = 2206 -SCI_CALLTIPSETFOREHLT = 2207 -SCI_CALLTIPUSESTYLE = 2212 -SCI_VISIBLEFROMDOCLINE = 2220 -SCI_DOCLINEFROMVISIBLE = 2221 -SCI_WRAPCOUNT = 2235 -SC_FOLDLEVELBASE = 0x400 -SC_FOLDLEVELWHITEFLAG = 0x1000 -SC_FOLDLEVELHEADERFLAG = 0x2000 -SC_FOLDLEVELBOXHEADERFLAG = 0x4000 -SC_FOLDLEVELBOXFOOTERFLAG = 0x8000 -SC_FOLDLEVELCONTRACTED = 0x10000 -SC_FOLDLEVELUNINDENT = 0x20000 -SC_FOLDLEVELNUMBERMASK = 0x0FFF -SCI_SETFOLDLEVEL = 2222 -SCI_GETFOLDLEVEL = 2223 -SCI_GETLASTCHILD = 2224 -SCI_GETFOLDPARENT = 2225 -SCI_SHOWLINES = 2226 -SCI_HIDELINES = 2227 -SCI_GETLINEVISIBLE = 2228 -SCI_SETFOLDEXPANDED = 2229 -SCI_GETFOLDEXPANDED = 2230 -SCI_TOGGLEFOLD = 2231 -SCI_ENSUREVISIBLE = 2232 -SC_FOLDFLAG_LINEBEFORE_EXPANDED = 0x0002 -SC_FOLDFLAG_LINEBEFORE_CONTRACTED = 0x0004 -SC_FOLDFLAG_LINEAFTER_EXPANDED = 0x0008 -SC_FOLDFLAG_LINEAFTER_CONTRACTED = 0x0010 -SC_FOLDFLAG_LEVELNUMBERS = 0x0040 -SC_FOLDFLAG_BOX = 0x0001 -SCI_SETFOLDFLAGS = 2233 -SCI_ENSUREVISIBLEENFORCEPOLICY = 2234 -SCI_SETTABINDENTS = 2260 -SCI_GETTABINDENTS = 2261 -SCI_SETBACKSPACEUNINDENTS = 2262 -SCI_GETBACKSPACEUNINDENTS = 2263 -SC_TIME_FOREVER = 10000000 -SCI_SETMOUSEDWELLTIME = 2264 -SCI_GETMOUSEDWELLTIME = 2265 -SCI_WORDSTARTPOSITION = 2266 -SCI_WORDENDPOSITION = 2267 -SC_WRAP_NONE = 0 -SC_WRAP_WORD = 1 -SC_WRAP_CHAR = 2 -SCI_SETWRAPMODE = 2268 -SCI_GETWRAPMODE = 2269 -SC_WRAPVISUALFLAG_NONE = 0x0000 -SC_WRAPVISUALFLAG_END = 0x0001 -SC_WRAPVISUALFLAG_START = 0x0002 -SCI_SETWRAPVISUALFLAGS = 2460 -SCI_GETWRAPVISUALFLAGS = 2461 -SC_WRAPVISUALFLAGLOC_DEFAULT = 0x0000 -SC_WRAPVISUALFLAGLOC_END_BY_TEXT = 0x0001 -SC_WRAPVISUALFLAGLOC_START_BY_TEXT = 0x0002 -SCI_SETWRAPVISUALFLAGSLOCATION = 2462 -SCI_GETWRAPVISUALFLAGSLOCATION = 2463 -SCI_SETWRAPSTARTINDENT = 2464 -SCI_GETWRAPSTARTINDENT = 2465 -SC_CACHE_NONE = 0 -SC_CACHE_CARET = 1 -SC_CACHE_PAGE = 2 -SC_CACHE_DOCUMENT = 3 -SCI_SETLAYOUTCACHE = 2272 -SCI_GETLAYOUTCACHE = 2273 -SCI_SETSCROLLWIDTH = 2274 -SCI_GETSCROLLWIDTH = 2275 -SCI_SETSCROLLWIDTHTRACKING = 2516 -SCI_GETSCROLLWIDTHTRACKING = 2517 -SCI_TEXTWIDTH = 2276 -SCI_SETENDATLASTLINE = 2277 -SCI_GETENDATLASTLINE = 2278 -SCI_TEXTHEIGHT = 2279 -SCI_SETVSCROLLBAR = 2280 -SCI_GETVSCROLLBAR = 2281 -SCI_APPENDTEXT = 2282 -SCI_GETTWOPHASEDRAW = 2283 -SCI_SETTWOPHASEDRAW = 2284 -SCI_TARGETFROMSELECTION = 2287 -SCI_LINESJOIN = 2288 -SCI_LINESSPLIT = 2289 -SCI_SETFOLDMARGINCOLOUR = 2290 -SCI_SETFOLDMARGINHICOLOUR = 2291 -SCI_LINEDOWN = 2300 -SCI_LINEDOWNEXTEND = 2301 -SCI_LINEUP = 2302 -SCI_LINEUPEXTEND = 2303 -SCI_CHARLEFT = 2304 -SCI_CHARLEFTEXTEND = 2305 -SCI_CHARRIGHT = 2306 -SCI_CHARRIGHTEXTEND = 2307 -SCI_WORDLEFT = 2308 -SCI_WORDLEFTEXTEND = 2309 -SCI_WORDRIGHT = 2310 -SCI_WORDRIGHTEXTEND = 2311 -SCI_HOME = 2312 -SCI_HOMEEXTEND = 2313 -SCI_LINEEND = 2314 -SCI_LINEENDEXTEND = 2315 -SCI_DOCUMENTSTART = 2316 -SCI_DOCUMENTSTARTEXTEND = 2317 -SCI_DOCUMENTEND = 2318 -SCI_DOCUMENTENDEXTEND = 2319 -SCI_PAGEUP = 2320 -SCI_PAGEUPEXTEND = 2321 -SCI_PAGEDOWN = 2322 -SCI_PAGEDOWNEXTEND = 2323 -SCI_EDITTOGGLEOVERTYPE = 2324 -SCI_CANCEL = 2325 -SCI_DELETEBACK = 2326 -SCI_TAB = 2327 -SCI_BACKTAB = 2328 -SCI_NEWLINE = 2329 -SCI_FORMFEED = 2330 -SCI_VCHOME = 2331 -SCI_VCHOMEEXTEND = 2332 -SCI_ZOOMIN = 2333 -SCI_ZOOMOUT = 2334 -SCI_DELWORDLEFT = 2335 -SCI_DELWORDRIGHT = 2336 -SCI_DELWORDRIGHTEND = 2518 -SCI_LINECUT = 2337 -SCI_LINEDELETE = 2338 -SCI_LINETRANSPOSE = 2339 -SCI_LINEDUPLICATE = 2404 -SCI_LOWERCASE = 2340 -SCI_UPPERCASE = 2341 -SCI_LINESCROLLDOWN = 2342 -SCI_LINESCROLLUP = 2343 -SCI_DELETEBACKNOTLINE = 2344 -SCI_HOMEDISPLAY = 2345 -SCI_HOMEDISPLAYEXTEND = 2346 -SCI_LINEENDDISPLAY = 2347 -SCI_LINEENDDISPLAYEXTEND = 2348 -SCI_HOMEWRAP = 2349 -SCI_HOMEWRAPEXTEND = 2450 -SCI_LINEENDWRAP = 2451 -SCI_LINEENDWRAPEXTEND = 2452 -SCI_VCHOMEWRAP = 2453 -SCI_VCHOMEWRAPEXTEND = 2454 -SCI_LINECOPY = 2455 -SCI_MOVECARETINSIDEVIEW = 2401 -SCI_LINELENGTH = 2350 -SCI_BRACEHIGHLIGHT = 2351 -SCI_BRACEBADLIGHT = 2352 -SCI_BRACEMATCH = 2353 -SCI_GETVIEWEOL = 2355 -SCI_SETVIEWEOL = 2356 -SCI_GETDOCPOINTER = 2357 -SCI_SETDOCPOINTER = 2358 -SCI_SETMODEVENTMASK = 2359 -EDGE_NONE = 0 -EDGE_LINE = 1 -EDGE_BACKGROUND = 2 -SCI_GETEDGECOLUMN = 2360 -SCI_SETEDGECOLUMN = 2361 -SCI_GETEDGEMODE = 2362 -SCI_SETEDGEMODE = 2363 -SCI_GETEDGECOLOUR = 2364 -SCI_SETEDGECOLOUR = 2365 -SCI_SEARCHANCHOR = 2366 -SCI_SEARCHNEXT = 2367 -SCI_SEARCHPREV = 2368 -SCI_LINESONSCREEN = 2370 -SCI_USEPOPUP = 2371 -SCI_SELECTIONISRECTANGLE = 2372 -SCI_SETZOOM = 2373 -SCI_GETZOOM = 2374 -SCI_CREATEDOCUMENT = 2375 -SCI_ADDREFDOCUMENT = 2376 -SCI_RELEASEDOCUMENT = 2377 -SCI_GETMODEVENTMASK = 2378 -SCI_SETFOCUS = 2380 -SCI_GETFOCUS = 2381 -SCI_SETSTATUS = 2382 -SCI_GETSTATUS = 2383 -SCI_SETMOUSEDOWNCAPTURES = 2384 -SCI_GETMOUSEDOWNCAPTURES = 2385 -SC_CURSORNORMAL = -1 -SC_CURSORWAIT = 4 -SCI_SETCURSOR = 2386 -SCI_GETCURSOR = 2387 -SCI_SETCONTROLCHARSYMBOL = 2388 -SCI_GETCONTROLCHARSYMBOL = 2389 -SCI_WORDPARTLEFT = 2390 -SCI_WORDPARTLEFTEXTEND = 2391 -SCI_WORDPARTRIGHT = 2392 -SCI_WORDPARTRIGHTEXTEND = 2393 -VISIBLE_SLOP = 0x01 -VISIBLE_STRICT = 0x04 -SCI_SETVISIBLEPOLICY = 2394 -SCI_DELLINELEFT = 2395 -SCI_DELLINERIGHT = 2396 -SCI_SETXOFFSET = 2397 -SCI_GETXOFFSET = 2398 -SCI_CHOOSECARETX = 2399 -SCI_GRABFOCUS = 2400 -CARET_SLOP = 0x01 -CARET_STRICT = 0x04 -CARET_JUMPS = 0x10 -CARET_EVEN = 0x08 -SCI_SETXCARETPOLICY = 2402 -SCI_SETYCARETPOLICY = 2403 -SCI_SETPRINTWRAPMODE = 2406 -SCI_GETPRINTWRAPMODE = 2407 -SCI_SETHOTSPOTACTIVEFORE = 2410 -SCI_GETHOTSPOTACTIVEFORE = 2494 -SCI_SETHOTSPOTACTIVEBACK = 2411 -SCI_GETHOTSPOTACTIVEBACK = 2495 -SCI_SETHOTSPOTACTIVEUNDERLINE = 2412 -SCI_GETHOTSPOTACTIVEUNDERLINE = 2496 -SCI_SETHOTSPOTSINGLELINE = 2421 -SCI_GETHOTSPOTSINGLELINE = 2497 -SCI_PARADOWN = 2413 -SCI_PARADOWNEXTEND = 2414 -SCI_PARAUP = 2415 -SCI_PARAUPEXTEND = 2416 -SCI_POSITIONBEFORE = 2417 -SCI_POSITIONAFTER = 2418 -SCI_COPYRANGE = 2419 -SCI_COPYTEXT = 2420 -SC_SEL_STREAM = 0 -SC_SEL_RECTANGLE = 1 -SC_SEL_LINES = 2 -SCI_SETSELECTIONMODE = 2422 -SCI_GETSELECTIONMODE = 2423 -SCI_GETLINESELSTARTPOSITION = 2424 -SCI_GETLINESELENDPOSITION = 2425 -SCI_LINEDOWNRECTEXTEND = 2426 -SCI_LINEUPRECTEXTEND = 2427 -SCI_CHARLEFTRECTEXTEND = 2428 -SCI_CHARRIGHTRECTEXTEND = 2429 -SCI_HOMERECTEXTEND = 2430 -SCI_VCHOMERECTEXTEND = 2431 -SCI_LINEENDRECTEXTEND = 2432 -SCI_PAGEUPRECTEXTEND = 2433 -SCI_PAGEDOWNRECTEXTEND = 2434 -SCI_STUTTEREDPAGEUP = 2435 -SCI_STUTTEREDPAGEUPEXTEND = 2436 -SCI_STUTTEREDPAGEDOWN = 2437 -SCI_STUTTEREDPAGEDOWNEXTEND = 2438 -SCI_WORDLEFTEND = 2439 -SCI_WORDLEFTENDEXTEND = 2440 -SCI_WORDRIGHTEND = 2441 -SCI_WORDRIGHTENDEXTEND = 2442 -SCI_SETWHITESPACECHARS = 2443 -SCI_SETCHARSDEFAULT = 2444 -SCI_AUTOCGETCURRENT = 2445 -SCI_ALLOCATE = 2446 -SCI_TARGETASUTF8 = 2447 -SCI_SETLENGTHFORENCODE = 2448 -SCI_ENCODEDFROMUTF8 = 2449 -SCI_FINDCOLUMN = 2456 -SCI_GETCARETSTICKY = 2457 -SCI_SETCARETSTICKY = 2458 -SCI_TOGGLECARETSTICKY = 2459 -SCI_SETPASTECONVERTENDINGS = 2467 -SCI_GETPASTECONVERTENDINGS = 2468 -SCI_SELECTIONDUPLICATE = 2469 -SC_ALPHA_TRANSPARENT = 0 -SC_ALPHA_OPAQUE = 255 -SC_ALPHA_NOALPHA = 256 -SCI_SETCARETLINEBACKALPHA = 2470 -SCI_GETCARETLINEBACKALPHA = 2471 -CARETSTYLE_INVISIBLE = 0 -CARETSTYLE_LINE = 1 -CARETSTYLE_BLOCK = 2 -SCI_SETCARETSTYLE = 2512 -SCI_GETCARETSTYLE = 2513 -SCI_SETINDICATORCURRENT = 2500 -SCI_GETINDICATORCURRENT = 2501 -SCI_SETINDICATORVALUE = 2502 -SCI_GETINDICATORVALUE = 2503 -SCI_INDICATORFILLRANGE = 2504 -SCI_INDICATORCLEARRANGE = 2505 -SCI_INDICATORALLONFOR = 2506 -SCI_INDICATORVALUEAT = 2507 -SCI_INDICATORSTART = 2508 -SCI_INDICATOREND = 2509 -SCI_SETPOSITIONCACHE = 2514 -SCI_GETPOSITIONCACHE = 2515 -SCI_COPYALLOWLINE = 2519 -SCI_GETCHARACTERPOINTER = 2520 -SCI_SETKEYSUNICODE = 2521 -SCI_GETKEYSUNICODE = 2522 -SCI_STARTRECORD = 3001 -SCI_STOPRECORD = 3002 -SCI_SETLEXER = 4001 -SCI_GETLEXER = 4002 -SCI_COLOURISE = 4003 -SCI_SETPROPERTY = 4004 -KEYWORDSET_MAX = 8 -SCI_SETKEYWORDS = 4005 -SCI_SETLEXERLANGUAGE = 4006 -SCI_LOADLEXERLIBRARY = 4007 -SCI_GETPROPERTY = 4008 -SCI_GETPROPERTYEXPANDED = 4009 -SCI_GETPROPERTYINT = 4010 -SCI_GETSTYLEBITSNEEDED = 4011 -SC_MOD_INSERTTEXT = 0x1 -SC_MOD_DELETETEXT = 0x2 -SC_MOD_CHANGESTYLE = 0x4 -SC_MOD_CHANGEFOLD = 0x8 -SC_PERFORMED_USER = 0x10 -SC_PERFORMED_UNDO = 0x20 -SC_PERFORMED_REDO = 0x40 -SC_MULTISTEPUNDOREDO = 0x80 -SC_LASTSTEPINUNDOREDO = 0x100 -SC_MOD_CHANGEMARKER = 0x200 -SC_MOD_BEFOREINSERT = 0x400 -SC_MOD_BEFOREDELETE = 0x800 -SC_MULTILINEUNDOREDO = 0x1000 -SC_STARTACTION = 0x2000 -SC_MOD_CHANGEINDICATOR = 0x4000 -SC_MOD_CHANGELINESTATE = 0x8000 -SC_MODEVENTMASKALL = 0xFFFF -SCEN_CHANGE = 768 -SCEN_SETFOCUS = 512 -SCEN_KILLFOCUS = 256 -SCK_DOWN = 300 -SCK_UP = 301 -SCK_LEFT = 302 -SCK_RIGHT = 303 -SCK_HOME = 304 -SCK_END = 305 -SCK_PRIOR = 306 -SCK_NEXT = 307 -SCK_DELETE = 308 -SCK_INSERT = 309 -SCK_ESCAPE = 7 -SCK_BACK = 8 -SCK_TAB = 9 -SCK_RETURN = 13 -SCK_ADD = 310 -SCK_SUBTRACT = 311 -SCK_DIVIDE = 312 -SCK_WIN = 313 -SCK_RWIN = 314 -SCK_MENU = 315 -SCMOD_NORM = 0 -SCMOD_SHIFT = 1 -SCMOD_CTRL = 2 -SCMOD_ALT = 4 -SCN_STYLENEEDED = 2000 -SCN_CHARADDED = 2001 -SCN_SAVEPOINTREACHED = 2002 -SCN_SAVEPOINTLEFT = 2003 -SCN_MODIFYATTEMPTRO = 2004 -SCN_KEY = 2005 -SCN_DOUBLECLICK = 2006 -SCN_UPDATEUI = 2007 -SCN_MODIFIED = 2008 -SCN_MACRORECORD = 2009 -SCN_MARGINCLICK = 2010 -SCN_NEEDSHOWN = 2011 -SCN_PAINTED = 2013 -SCN_USERLISTSELECTION = 2014 -SCN_URIDROPPED = 2015 -SCN_DWELLSTART = 2016 -SCN_DWELLEND = 2017 -SCN_ZOOM = 2018 -SCN_HOTSPOTCLICK = 2019 -SCN_HOTSPOTDOUBLECLICK = 2020 -SCN_CALLTIPCLICK = 2021 -SCN_AUTOCSELECTION = 2022 -SCN_INDICATORCLICK = 2023 -SCN_INDICATORRELEASE = 2024 -SCN_AUTOCCANCELLED = 2025 -SCI_SETCARETPOLICY = 2369 -CARET_CENTER = 0x02 -CARET_XEVEN = 0x08 -CARET_XJUMPS = 0x10 -SCN_POSCHANGED = 2012 -SCN_CHECKBRACE = 2007 -# Generated by h2py from Include\scilexer.h -SCLEX_CONTAINER = 0 -SCLEX_NULL = 1 -SCLEX_PYTHON = 2 -SCLEX_CPP = 3 -SCLEX_HTML = 4 -SCLEX_XML = 5 -SCLEX_PERL = 6 -SCLEX_SQL = 7 -SCLEX_VB = 8 -SCLEX_PROPERTIES = 9 -SCLEX_ERRORLIST = 10 -SCLEX_MAKEFILE = 11 -SCLEX_BATCH = 12 -SCLEX_XCODE = 13 -SCLEX_LATEX = 14 -SCLEX_LUA = 15 -SCLEX_DIFF = 16 -SCLEX_CONF = 17 -SCLEX_PASCAL = 18 -SCLEX_AVE = 19 -SCLEX_ADA = 20 -SCLEX_LISP = 21 -SCLEX_RUBY = 22 -SCLEX_EIFFEL = 23 -SCLEX_EIFFELKW = 24 -SCLEX_TCL = 25 -SCLEX_NNCRONTAB = 26 -SCLEX_BULLANT = 27 -SCLEX_VBSCRIPT = 28 -SCLEX_BAAN = 31 -SCLEX_MATLAB = 32 -SCLEX_SCRIPTOL = 33 -SCLEX_ASM = 34 -SCLEX_CPPNOCASE = 35 -SCLEX_FORTRAN = 36 -SCLEX_F77 = 37 -SCLEX_CSS = 38 -SCLEX_POV = 39 -SCLEX_LOUT = 40 -SCLEX_ESCRIPT = 41 -SCLEX_PS = 42 -SCLEX_NSIS = 43 -SCLEX_MMIXAL = 44 -SCLEX_CLW = 45 -SCLEX_CLWNOCASE = 46 -SCLEX_LOT = 47 -SCLEX_YAML = 48 -SCLEX_TEX = 49 -SCLEX_METAPOST = 50 -SCLEX_POWERBASIC = 51 -SCLEX_FORTH = 52 -SCLEX_ERLANG = 53 -SCLEX_OCTAVE = 54 -SCLEX_MSSQL = 55 -SCLEX_VERILOG = 56 -SCLEX_KIX = 57 -SCLEX_GUI4CLI = 58 -SCLEX_SPECMAN = 59 -SCLEX_AU3 = 60 -SCLEX_APDL = 61 -SCLEX_BASH = 62 -SCLEX_ASN1 = 63 -SCLEX_VHDL = 64 -SCLEX_CAML = 65 -SCLEX_BLITZBASIC = 66 -SCLEX_PUREBASIC = 67 -SCLEX_HASKELL = 68 -SCLEX_PHPSCRIPT = 69 -SCLEX_TADS3 = 70 -SCLEX_REBOL = 71 -SCLEX_SMALLTALK = 72 -SCLEX_FLAGSHIP = 73 -SCLEX_CSOUND = 74 -SCLEX_FREEBASIC = 75 -SCLEX_INNOSETUP = 76 -SCLEX_OPAL = 77 -SCLEX_SPICE = 78 -SCLEX_D = 79 -SCLEX_CMAKE = 80 -SCLEX_GAP = 81 -SCLEX_PLM = 82 -SCLEX_PROGRESS = 83 -SCLEX_ABAQUS = 84 -SCLEX_ASYMPTOTE = 85 -SCLEX_R = 86 -SCLEX_MAGIK = 87 -SCLEX_POWERSHELL = 88 -SCLEX_MYSQL = 89 -SCLEX_PO = 90 -SCLEX_AUTOMATIC = 1000 -SCE_P_DEFAULT = 0 -SCE_P_COMMENTLINE = 1 -SCE_P_NUMBER = 2 -SCE_P_STRING = 3 -SCE_P_CHARACTER = 4 -SCE_P_WORD = 5 -SCE_P_TRIPLE = 6 -SCE_P_TRIPLEDOUBLE = 7 -SCE_P_CLASSNAME = 8 -SCE_P_DEFNAME = 9 -SCE_P_OPERATOR = 10 -SCE_P_IDENTIFIER = 11 -SCE_P_COMMENTBLOCK = 12 -SCE_P_STRINGEOL = 13 -SCE_P_WORD2 = 14 -SCE_P_DECORATOR = 15 -SCE_C_DEFAULT = 0 -SCE_C_COMMENT = 1 -SCE_C_COMMENTLINE = 2 -SCE_C_COMMENTDOC = 3 -SCE_C_NUMBER = 4 -SCE_C_WORD = 5 -SCE_C_STRING = 6 -SCE_C_CHARACTER = 7 -SCE_C_UUID = 8 -SCE_C_PREPROCESSOR = 9 -SCE_C_OPERATOR = 10 -SCE_C_IDENTIFIER = 11 -SCE_C_STRINGEOL = 12 -SCE_C_VERBATIM = 13 -SCE_C_REGEX = 14 -SCE_C_COMMENTLINEDOC = 15 -SCE_C_WORD2 = 16 -SCE_C_COMMENTDOCKEYWORD = 17 -SCE_C_COMMENTDOCKEYWORDERROR = 18 -SCE_C_GLOBALCLASS = 19 -SCE_D_DEFAULT = 0 -SCE_D_COMMENT = 1 -SCE_D_COMMENTLINE = 2 -SCE_D_COMMENTDOC = 3 -SCE_D_COMMENTNESTED = 4 -SCE_D_NUMBER = 5 -SCE_D_WORD = 6 -SCE_D_WORD2 = 7 -SCE_D_WORD3 = 8 -SCE_D_TYPEDEF = 9 -SCE_D_STRING = 10 -SCE_D_STRINGEOL = 11 -SCE_D_CHARACTER = 12 -SCE_D_OPERATOR = 13 -SCE_D_IDENTIFIER = 14 -SCE_D_COMMENTLINEDOC = 15 -SCE_D_COMMENTDOCKEYWORD = 16 -SCE_D_COMMENTDOCKEYWORDERROR = 17 -SCE_TCL_DEFAULT = 0 -SCE_TCL_COMMENT = 1 -SCE_TCL_COMMENTLINE = 2 -SCE_TCL_NUMBER = 3 -SCE_TCL_WORD_IN_QUOTE = 4 -SCE_TCL_IN_QUOTE = 5 -SCE_TCL_OPERATOR = 6 -SCE_TCL_IDENTIFIER = 7 -SCE_TCL_SUBSTITUTION = 8 -SCE_TCL_SUB_BRACE = 9 -SCE_TCL_MODIFIER = 10 -SCE_TCL_EXPAND = 11 -SCE_TCL_WORD = 12 -SCE_TCL_WORD2 = 13 -SCE_TCL_WORD3 = 14 -SCE_TCL_WORD4 = 15 -SCE_TCL_WORD5 = 16 -SCE_TCL_WORD6 = 17 -SCE_TCL_WORD7 = 18 -SCE_TCL_WORD8 = 19 -SCE_TCL_COMMENT_BOX = 20 -SCE_TCL_BLOCK_COMMENT = 21 -SCE_H_DEFAULT = 0 -SCE_H_TAG = 1 -SCE_H_TAGUNKNOWN = 2 -SCE_H_ATTRIBUTE = 3 -SCE_H_ATTRIBUTEUNKNOWN = 4 -SCE_H_NUMBER = 5 -SCE_H_DOUBLESTRING = 6 -SCE_H_SINGLESTRING = 7 -SCE_H_OTHER = 8 -SCE_H_COMMENT = 9 -SCE_H_ENTITY = 10 -SCE_H_TAGEND = 11 -SCE_H_XMLSTART = 12 -SCE_H_XMLEND = 13 -SCE_H_SCRIPT = 14 -SCE_H_ASP = 15 -SCE_H_ASPAT = 16 -SCE_H_CDATA = 17 -SCE_H_QUESTION = 18 -SCE_H_VALUE = 19 -SCE_H_XCCOMMENT = 20 -SCE_H_SGML_DEFAULT = 21 -SCE_H_SGML_COMMAND = 22 -SCE_H_SGML_1ST_PARAM = 23 -SCE_H_SGML_DOUBLESTRING = 24 -SCE_H_SGML_SIMPLESTRING = 25 -SCE_H_SGML_ERROR = 26 -SCE_H_SGML_SPECIAL = 27 -SCE_H_SGML_ENTITY = 28 -SCE_H_SGML_COMMENT = 29 -SCE_H_SGML_1ST_PARAM_COMMENT = 30 -SCE_H_SGML_BLOCK_DEFAULT = 31 -SCE_HJ_START = 40 -SCE_HJ_DEFAULT = 41 -SCE_HJ_COMMENT = 42 -SCE_HJ_COMMENTLINE = 43 -SCE_HJ_COMMENTDOC = 44 -SCE_HJ_NUMBER = 45 -SCE_HJ_WORD = 46 -SCE_HJ_KEYWORD = 47 -SCE_HJ_DOUBLESTRING = 48 -SCE_HJ_SINGLESTRING = 49 -SCE_HJ_SYMBOLS = 50 -SCE_HJ_STRINGEOL = 51 -SCE_HJ_REGEX = 52 -SCE_HJA_START = 55 -SCE_HJA_DEFAULT = 56 -SCE_HJA_COMMENT = 57 -SCE_HJA_COMMENTLINE = 58 -SCE_HJA_COMMENTDOC = 59 -SCE_HJA_NUMBER = 60 -SCE_HJA_WORD = 61 -SCE_HJA_KEYWORD = 62 -SCE_HJA_DOUBLESTRING = 63 -SCE_HJA_SINGLESTRING = 64 -SCE_HJA_SYMBOLS = 65 -SCE_HJA_STRINGEOL = 66 -SCE_HJA_REGEX = 67 -SCE_HB_START = 70 -SCE_HB_DEFAULT = 71 -SCE_HB_COMMENTLINE = 72 -SCE_HB_NUMBER = 73 -SCE_HB_WORD = 74 -SCE_HB_STRING = 75 -SCE_HB_IDENTIFIER = 76 -SCE_HB_STRINGEOL = 77 -SCE_HBA_START = 80 -SCE_HBA_DEFAULT = 81 -SCE_HBA_COMMENTLINE = 82 -SCE_HBA_NUMBER = 83 -SCE_HBA_WORD = 84 -SCE_HBA_STRING = 85 -SCE_HBA_IDENTIFIER = 86 -SCE_HBA_STRINGEOL = 87 -SCE_HP_START = 90 -SCE_HP_DEFAULT = 91 -SCE_HP_COMMENTLINE = 92 -SCE_HP_NUMBER = 93 -SCE_HP_STRING = 94 -SCE_HP_CHARACTER = 95 -SCE_HP_WORD = 96 -SCE_HP_TRIPLE = 97 -SCE_HP_TRIPLEDOUBLE = 98 -SCE_HP_CLASSNAME = 99 -SCE_HP_DEFNAME = 100 -SCE_HP_OPERATOR = 101 -SCE_HP_IDENTIFIER = 102 -SCE_HPHP_COMPLEX_VARIABLE = 104 -SCE_HPA_START = 105 -SCE_HPA_DEFAULT = 106 -SCE_HPA_COMMENTLINE = 107 -SCE_HPA_NUMBER = 108 -SCE_HPA_STRING = 109 -SCE_HPA_CHARACTER = 110 -SCE_HPA_WORD = 111 -SCE_HPA_TRIPLE = 112 -SCE_HPA_TRIPLEDOUBLE = 113 -SCE_HPA_CLASSNAME = 114 -SCE_HPA_DEFNAME = 115 -SCE_HPA_OPERATOR = 116 -SCE_HPA_IDENTIFIER = 117 -SCE_HPHP_DEFAULT = 118 -SCE_HPHP_HSTRING = 119 -SCE_HPHP_SIMPLESTRING = 120 -SCE_HPHP_WORD = 121 -SCE_HPHP_NUMBER = 122 -SCE_HPHP_VARIABLE = 123 -SCE_HPHP_COMMENT = 124 -SCE_HPHP_COMMENTLINE = 125 -SCE_HPHP_HSTRING_VARIABLE = 126 -SCE_HPHP_OPERATOR = 127 -SCE_PL_DEFAULT = 0 -SCE_PL_ERROR = 1 -SCE_PL_COMMENTLINE = 2 -SCE_PL_POD = 3 -SCE_PL_NUMBER = 4 -SCE_PL_WORD = 5 -SCE_PL_STRING = 6 -SCE_PL_CHARACTER = 7 -SCE_PL_PUNCTUATION = 8 -SCE_PL_PREPROCESSOR = 9 -SCE_PL_OPERATOR = 10 -SCE_PL_IDENTIFIER = 11 -SCE_PL_SCALAR = 12 -SCE_PL_ARRAY = 13 -SCE_PL_HASH = 14 -SCE_PL_SYMBOLTABLE = 15 -SCE_PL_VARIABLE_INDEXER = 16 -SCE_PL_REGEX = 17 -SCE_PL_REGSUBST = 18 -SCE_PL_LONGQUOTE = 19 -SCE_PL_BACKTICKS = 20 -SCE_PL_DATASECTION = 21 -SCE_PL_HERE_DELIM = 22 -SCE_PL_HERE_Q = 23 -SCE_PL_HERE_QQ = 24 -SCE_PL_HERE_QX = 25 -SCE_PL_STRING_Q = 26 -SCE_PL_STRING_QQ = 27 -SCE_PL_STRING_QX = 28 -SCE_PL_STRING_QR = 29 -SCE_PL_STRING_QW = 30 -SCE_PL_POD_VERB = 31 -SCE_PL_SUB_PROTOTYPE = 40 -SCE_PL_FORMAT_IDENT = 41 -SCE_PL_FORMAT = 42 -SCE_RB_DEFAULT = 0 -SCE_RB_ERROR = 1 -SCE_RB_COMMENTLINE = 2 -SCE_RB_POD = 3 -SCE_RB_NUMBER = 4 -SCE_RB_WORD = 5 -SCE_RB_STRING = 6 -SCE_RB_CHARACTER = 7 -SCE_RB_CLASSNAME = 8 -SCE_RB_DEFNAME = 9 -SCE_RB_OPERATOR = 10 -SCE_RB_IDENTIFIER = 11 -SCE_RB_REGEX = 12 -SCE_RB_GLOBAL = 13 -SCE_RB_SYMBOL = 14 -SCE_RB_MODULE_NAME = 15 -SCE_RB_INSTANCE_VAR = 16 -SCE_RB_CLASS_VAR = 17 -SCE_RB_BACKTICKS = 18 -SCE_RB_DATASECTION = 19 -SCE_RB_HERE_DELIM = 20 -SCE_RB_HERE_Q = 21 -SCE_RB_HERE_QQ = 22 -SCE_RB_HERE_QX = 23 -SCE_RB_STRING_Q = 24 -SCE_RB_STRING_QQ = 25 -SCE_RB_STRING_QX = 26 -SCE_RB_STRING_QR = 27 -SCE_RB_STRING_QW = 28 -SCE_RB_WORD_DEMOTED = 29 -SCE_RB_STDIN = 30 -SCE_RB_STDOUT = 31 -SCE_RB_STDERR = 40 -SCE_RB_UPPER_BOUND = 41 -SCE_B_DEFAULT = 0 -SCE_B_COMMENT = 1 -SCE_B_NUMBER = 2 -SCE_B_KEYWORD = 3 -SCE_B_STRING = 4 -SCE_B_PREPROCESSOR = 5 -SCE_B_OPERATOR = 6 -SCE_B_IDENTIFIER = 7 -SCE_B_DATE = 8 -SCE_B_STRINGEOL = 9 -SCE_B_KEYWORD2 = 10 -SCE_B_KEYWORD3 = 11 -SCE_B_KEYWORD4 = 12 -SCE_B_CONSTANT = 13 -SCE_B_ASM = 14 -SCE_B_LABEL = 15 -SCE_B_ERROR = 16 -SCE_B_HEXNUMBER = 17 -SCE_B_BINNUMBER = 18 -SCE_PROPS_DEFAULT = 0 -SCE_PROPS_COMMENT = 1 -SCE_PROPS_SECTION = 2 -SCE_PROPS_ASSIGNMENT = 3 -SCE_PROPS_DEFVAL = 4 -SCE_PROPS_KEY = 5 -SCE_L_DEFAULT = 0 -SCE_L_COMMAND = 1 -SCE_L_TAG = 2 -SCE_L_MATH = 3 -SCE_L_COMMENT = 4 -SCE_LUA_DEFAULT = 0 -SCE_LUA_COMMENT = 1 -SCE_LUA_COMMENTLINE = 2 -SCE_LUA_COMMENTDOC = 3 -SCE_LUA_NUMBER = 4 -SCE_LUA_WORD = 5 -SCE_LUA_STRING = 6 -SCE_LUA_CHARACTER = 7 -SCE_LUA_LITERALSTRING = 8 -SCE_LUA_PREPROCESSOR = 9 -SCE_LUA_OPERATOR = 10 -SCE_LUA_IDENTIFIER = 11 -SCE_LUA_STRINGEOL = 12 -SCE_LUA_WORD2 = 13 -SCE_LUA_WORD3 = 14 -SCE_LUA_WORD4 = 15 -SCE_LUA_WORD5 = 16 -SCE_LUA_WORD6 = 17 -SCE_LUA_WORD7 = 18 -SCE_LUA_WORD8 = 19 -SCE_ERR_DEFAULT = 0 -SCE_ERR_PYTHON = 1 -SCE_ERR_GCC = 2 -SCE_ERR_MS = 3 -SCE_ERR_CMD = 4 -SCE_ERR_BORLAND = 5 -SCE_ERR_PERL = 6 -SCE_ERR_NET = 7 -SCE_ERR_LUA = 8 -SCE_ERR_CTAG = 9 -SCE_ERR_DIFF_CHANGED = 10 -SCE_ERR_DIFF_ADDITION = 11 -SCE_ERR_DIFF_DELETION = 12 -SCE_ERR_DIFF_MESSAGE = 13 -SCE_ERR_PHP = 14 -SCE_ERR_ELF = 15 -SCE_ERR_IFC = 16 -SCE_ERR_IFORT = 17 -SCE_ERR_ABSF = 18 -SCE_ERR_TIDY = 19 -SCE_ERR_JAVA_STACK = 20 -SCE_ERR_VALUE = 21 -SCE_BAT_DEFAULT = 0 -SCE_BAT_COMMENT = 1 -SCE_BAT_WORD = 2 -SCE_BAT_LABEL = 3 -SCE_BAT_HIDE = 4 -SCE_BAT_COMMAND = 5 -SCE_BAT_IDENTIFIER = 6 -SCE_BAT_OPERATOR = 7 -SCE_MAKE_DEFAULT = 0 -SCE_MAKE_COMMENT = 1 -SCE_MAKE_PREPROCESSOR = 2 -SCE_MAKE_IDENTIFIER = 3 -SCE_MAKE_OPERATOR = 4 -SCE_MAKE_TARGET = 5 -SCE_MAKE_IDEOL = 9 -SCE_DIFF_DEFAULT = 0 -SCE_DIFF_COMMENT = 1 -SCE_DIFF_COMMAND = 2 -SCE_DIFF_HEADER = 3 -SCE_DIFF_POSITION = 4 -SCE_DIFF_DELETED = 5 -SCE_DIFF_ADDED = 6 -SCE_DIFF_CHANGED = 7 -SCE_CONF_DEFAULT = 0 -SCE_CONF_COMMENT = 1 -SCE_CONF_NUMBER = 2 -SCE_CONF_IDENTIFIER = 3 -SCE_CONF_EXTENSION = 4 -SCE_CONF_PARAMETER = 5 -SCE_CONF_STRING = 6 -SCE_CONF_OPERATOR = 7 -SCE_CONF_IP = 8 -SCE_CONF_DIRECTIVE = 9 -SCE_AVE_DEFAULT = 0 -SCE_AVE_COMMENT = 1 -SCE_AVE_NUMBER = 2 -SCE_AVE_WORD = 3 -SCE_AVE_STRING = 6 -SCE_AVE_ENUM = 7 -SCE_AVE_STRINGEOL = 8 -SCE_AVE_IDENTIFIER = 9 -SCE_AVE_OPERATOR = 10 -SCE_AVE_WORD1 = 11 -SCE_AVE_WORD2 = 12 -SCE_AVE_WORD3 = 13 -SCE_AVE_WORD4 = 14 -SCE_AVE_WORD5 = 15 -SCE_AVE_WORD6 = 16 -SCE_ADA_DEFAULT = 0 -SCE_ADA_WORD = 1 -SCE_ADA_IDENTIFIER = 2 -SCE_ADA_NUMBER = 3 -SCE_ADA_DELIMITER = 4 -SCE_ADA_CHARACTER = 5 -SCE_ADA_CHARACTEREOL = 6 -SCE_ADA_STRING = 7 -SCE_ADA_STRINGEOL = 8 -SCE_ADA_LABEL = 9 -SCE_ADA_COMMENTLINE = 10 -SCE_ADA_ILLEGAL = 11 -SCE_BAAN_DEFAULT = 0 -SCE_BAAN_COMMENT = 1 -SCE_BAAN_COMMENTDOC = 2 -SCE_BAAN_NUMBER = 3 -SCE_BAAN_WORD = 4 -SCE_BAAN_STRING = 5 -SCE_BAAN_PREPROCESSOR = 6 -SCE_BAAN_OPERATOR = 7 -SCE_BAAN_IDENTIFIER = 8 -SCE_BAAN_STRINGEOL = 9 -SCE_BAAN_WORD2 = 10 -SCE_LISP_DEFAULT = 0 -SCE_LISP_COMMENT = 1 -SCE_LISP_NUMBER = 2 -SCE_LISP_KEYWORD = 3 -SCE_LISP_KEYWORD_KW = 4 -SCE_LISP_SYMBOL = 5 -SCE_LISP_STRING = 6 -SCE_LISP_STRINGEOL = 8 -SCE_LISP_IDENTIFIER = 9 -SCE_LISP_OPERATOR = 10 -SCE_LISP_SPECIAL = 11 -SCE_LISP_MULTI_COMMENT = 12 -SCE_EIFFEL_DEFAULT = 0 -SCE_EIFFEL_COMMENTLINE = 1 -SCE_EIFFEL_NUMBER = 2 -SCE_EIFFEL_WORD = 3 -SCE_EIFFEL_STRING = 4 -SCE_EIFFEL_CHARACTER = 5 -SCE_EIFFEL_OPERATOR = 6 -SCE_EIFFEL_IDENTIFIER = 7 -SCE_EIFFEL_STRINGEOL = 8 -SCE_NNCRONTAB_DEFAULT = 0 -SCE_NNCRONTAB_COMMENT = 1 -SCE_NNCRONTAB_TASK = 2 -SCE_NNCRONTAB_SECTION = 3 -SCE_NNCRONTAB_KEYWORD = 4 -SCE_NNCRONTAB_MODIFIER = 5 -SCE_NNCRONTAB_ASTERISK = 6 -SCE_NNCRONTAB_NUMBER = 7 -SCE_NNCRONTAB_STRING = 8 -SCE_NNCRONTAB_ENVIRONMENT = 9 -SCE_NNCRONTAB_IDENTIFIER = 10 -SCE_FORTH_DEFAULT = 0 -SCE_FORTH_COMMENT = 1 -SCE_FORTH_COMMENT_ML = 2 -SCE_FORTH_IDENTIFIER = 3 -SCE_FORTH_CONTROL = 4 -SCE_FORTH_KEYWORD = 5 -SCE_FORTH_DEFWORD = 6 -SCE_FORTH_PREWORD1 = 7 -SCE_FORTH_PREWORD2 = 8 -SCE_FORTH_NUMBER = 9 -SCE_FORTH_STRING = 10 -SCE_FORTH_LOCALE = 11 -SCE_MATLAB_DEFAULT = 0 -SCE_MATLAB_COMMENT = 1 -SCE_MATLAB_COMMAND = 2 -SCE_MATLAB_NUMBER = 3 -SCE_MATLAB_KEYWORD = 4 -SCE_MATLAB_STRING = 5 -SCE_MATLAB_OPERATOR = 6 -SCE_MATLAB_IDENTIFIER = 7 -SCE_MATLAB_DOUBLEQUOTESTRING = 8 -SCE_SCRIPTOL_DEFAULT = 0 -SCE_SCRIPTOL_WHITE = 1 -SCE_SCRIPTOL_COMMENTLINE = 2 -SCE_SCRIPTOL_PERSISTENT = 3 -SCE_SCRIPTOL_CSTYLE = 4 -SCE_SCRIPTOL_COMMENTBLOCK = 5 -SCE_SCRIPTOL_NUMBER = 6 -SCE_SCRIPTOL_STRING = 7 -SCE_SCRIPTOL_CHARACTER = 8 -SCE_SCRIPTOL_STRINGEOL = 9 -SCE_SCRIPTOL_KEYWORD = 10 -SCE_SCRIPTOL_OPERATOR = 11 -SCE_SCRIPTOL_IDENTIFIER = 12 -SCE_SCRIPTOL_TRIPLE = 13 -SCE_SCRIPTOL_CLASSNAME = 14 -SCE_SCRIPTOL_PREPROCESSOR = 15 -SCE_ASM_DEFAULT = 0 -SCE_ASM_COMMENT = 1 -SCE_ASM_NUMBER = 2 -SCE_ASM_STRING = 3 -SCE_ASM_OPERATOR = 4 -SCE_ASM_IDENTIFIER = 5 -SCE_ASM_CPUINSTRUCTION = 6 -SCE_ASM_MATHINSTRUCTION = 7 -SCE_ASM_REGISTER = 8 -SCE_ASM_DIRECTIVE = 9 -SCE_ASM_DIRECTIVEOPERAND = 10 -SCE_ASM_COMMENTBLOCK = 11 -SCE_ASM_CHARACTER = 12 -SCE_ASM_STRINGEOL = 13 -SCE_ASM_EXTINSTRUCTION = 14 -SCE_F_DEFAULT = 0 -SCE_F_COMMENT = 1 -SCE_F_NUMBER = 2 -SCE_F_STRING1 = 3 -SCE_F_STRING2 = 4 -SCE_F_STRINGEOL = 5 -SCE_F_OPERATOR = 6 -SCE_F_IDENTIFIER = 7 -SCE_F_WORD = 8 -SCE_F_WORD2 = 9 -SCE_F_WORD3 = 10 -SCE_F_PREPROCESSOR = 11 -SCE_F_OPERATOR2 = 12 -SCE_F_LABEL = 13 -SCE_F_CONTINUATION = 14 -SCE_CSS_DEFAULT = 0 -SCE_CSS_TAG = 1 -SCE_CSS_CLASS = 2 -SCE_CSS_PSEUDOCLASS = 3 -SCE_CSS_UNKNOWN_PSEUDOCLASS = 4 -SCE_CSS_OPERATOR = 5 -SCE_CSS_IDENTIFIER = 6 -SCE_CSS_UNKNOWN_IDENTIFIER = 7 -SCE_CSS_VALUE = 8 -SCE_CSS_COMMENT = 9 -SCE_CSS_ID = 10 -SCE_CSS_IMPORTANT = 11 -SCE_CSS_DIRECTIVE = 12 -SCE_CSS_DOUBLESTRING = 13 -SCE_CSS_SINGLESTRING = 14 -SCE_CSS_IDENTIFIER2 = 15 -SCE_CSS_ATTRIBUTE = 16 -SCE_CSS_IDENTIFIER3 = 17 -SCE_CSS_PSEUDOELEMENT = 18 -SCE_CSS_EXTENDED_IDENTIFIER = 19 -SCE_CSS_EXTENDED_PSEUDOCLASS = 20 -SCE_CSS_EXTENDED_PSEUDOELEMENT = 21 -SCE_POV_DEFAULT = 0 -SCE_POV_COMMENT = 1 -SCE_POV_COMMENTLINE = 2 -SCE_POV_NUMBER = 3 -SCE_POV_OPERATOR = 4 -SCE_POV_IDENTIFIER = 5 -SCE_POV_STRING = 6 -SCE_POV_STRINGEOL = 7 -SCE_POV_DIRECTIVE = 8 -SCE_POV_BADDIRECTIVE = 9 -SCE_POV_WORD2 = 10 -SCE_POV_WORD3 = 11 -SCE_POV_WORD4 = 12 -SCE_POV_WORD5 = 13 -SCE_POV_WORD6 = 14 -SCE_POV_WORD7 = 15 -SCE_POV_WORD8 = 16 -SCE_LOUT_DEFAULT = 0 -SCE_LOUT_COMMENT = 1 -SCE_LOUT_NUMBER = 2 -SCE_LOUT_WORD = 3 -SCE_LOUT_WORD2 = 4 -SCE_LOUT_WORD3 = 5 -SCE_LOUT_WORD4 = 6 -SCE_LOUT_STRING = 7 -SCE_LOUT_OPERATOR = 8 -SCE_LOUT_IDENTIFIER = 9 -SCE_LOUT_STRINGEOL = 10 -SCE_ESCRIPT_DEFAULT = 0 -SCE_ESCRIPT_COMMENT = 1 -SCE_ESCRIPT_COMMENTLINE = 2 -SCE_ESCRIPT_COMMENTDOC = 3 -SCE_ESCRIPT_NUMBER = 4 -SCE_ESCRIPT_WORD = 5 -SCE_ESCRIPT_STRING = 6 -SCE_ESCRIPT_OPERATOR = 7 -SCE_ESCRIPT_IDENTIFIER = 8 -SCE_ESCRIPT_BRACE = 9 -SCE_ESCRIPT_WORD2 = 10 -SCE_ESCRIPT_WORD3 = 11 -SCE_PS_DEFAULT = 0 -SCE_PS_COMMENT = 1 -SCE_PS_DSC_COMMENT = 2 -SCE_PS_DSC_VALUE = 3 -SCE_PS_NUMBER = 4 -SCE_PS_NAME = 5 -SCE_PS_KEYWORD = 6 -SCE_PS_LITERAL = 7 -SCE_PS_IMMEVAL = 8 -SCE_PS_PAREN_ARRAY = 9 -SCE_PS_PAREN_DICT = 10 -SCE_PS_PAREN_PROC = 11 -SCE_PS_TEXT = 12 -SCE_PS_HEXSTRING = 13 -SCE_PS_BASE85STRING = 14 -SCE_PS_BADSTRINGCHAR = 15 -SCE_NSIS_DEFAULT = 0 -SCE_NSIS_COMMENT = 1 -SCE_NSIS_STRINGDQ = 2 -SCE_NSIS_STRINGLQ = 3 -SCE_NSIS_STRINGRQ = 4 -SCE_NSIS_FUNCTION = 5 -SCE_NSIS_VARIABLE = 6 -SCE_NSIS_LABEL = 7 -SCE_NSIS_USERDEFINED = 8 -SCE_NSIS_SECTIONDEF = 9 -SCE_NSIS_SUBSECTIONDEF = 10 -SCE_NSIS_IFDEFINEDEF = 11 -SCE_NSIS_MACRODEF = 12 -SCE_NSIS_STRINGVAR = 13 -SCE_NSIS_NUMBER = 14 -SCE_NSIS_SECTIONGROUP = 15 -SCE_NSIS_PAGEEX = 16 -SCE_NSIS_FUNCTIONDEF = 17 -SCE_NSIS_COMMENTBOX = 18 -SCE_MMIXAL_LEADWS = 0 -SCE_MMIXAL_COMMENT = 1 -SCE_MMIXAL_LABEL = 2 -SCE_MMIXAL_OPCODE = 3 -SCE_MMIXAL_OPCODE_PRE = 4 -SCE_MMIXAL_OPCODE_VALID = 5 -SCE_MMIXAL_OPCODE_UNKNOWN = 6 -SCE_MMIXAL_OPCODE_POST = 7 -SCE_MMIXAL_OPERANDS = 8 -SCE_MMIXAL_NUMBER = 9 -SCE_MMIXAL_REF = 10 -SCE_MMIXAL_CHAR = 11 -SCE_MMIXAL_STRING = 12 -SCE_MMIXAL_REGISTER = 13 -SCE_MMIXAL_HEX = 14 -SCE_MMIXAL_OPERATOR = 15 -SCE_MMIXAL_SYMBOL = 16 -SCE_MMIXAL_INCLUDE = 17 -SCE_CLW_DEFAULT = 0 -SCE_CLW_LABEL = 1 -SCE_CLW_COMMENT = 2 -SCE_CLW_STRING = 3 -SCE_CLW_USER_IDENTIFIER = 4 -SCE_CLW_INTEGER_CONSTANT = 5 -SCE_CLW_REAL_CONSTANT = 6 -SCE_CLW_PICTURE_STRING = 7 -SCE_CLW_KEYWORD = 8 -SCE_CLW_COMPILER_DIRECTIVE = 9 -SCE_CLW_RUNTIME_EXPRESSIONS = 10 -SCE_CLW_BUILTIN_PROCEDURES_FUNCTION = 11 -SCE_CLW_STRUCTURE_DATA_TYPE = 12 -SCE_CLW_ATTRIBUTE = 13 -SCE_CLW_STANDARD_EQUATE = 14 -SCE_CLW_ERROR = 15 -SCE_CLW_DEPRECATED = 16 -SCE_LOT_DEFAULT = 0 -SCE_LOT_HEADER = 1 -SCE_LOT_BREAK = 2 -SCE_LOT_SET = 3 -SCE_LOT_PASS = 4 -SCE_LOT_FAIL = 5 -SCE_LOT_ABORT = 6 -SCE_YAML_DEFAULT = 0 -SCE_YAML_COMMENT = 1 -SCE_YAML_IDENTIFIER = 2 -SCE_YAML_KEYWORD = 3 -SCE_YAML_NUMBER = 4 -SCE_YAML_REFERENCE = 5 -SCE_YAML_DOCUMENT = 6 -SCE_YAML_TEXT = 7 -SCE_YAML_ERROR = 8 -SCE_YAML_OPERATOR = 9 -SCE_TEX_DEFAULT = 0 -SCE_TEX_SPECIAL = 1 -SCE_TEX_GROUP = 2 -SCE_TEX_SYMBOL = 3 -SCE_TEX_COMMAND = 4 -SCE_TEX_TEXT = 5 -SCE_METAPOST_DEFAULT = 0 -SCE_METAPOST_SPECIAL = 1 -SCE_METAPOST_GROUP = 2 -SCE_METAPOST_SYMBOL = 3 -SCE_METAPOST_COMMAND = 4 -SCE_METAPOST_TEXT = 5 -SCE_METAPOST_EXTRA = 6 -SCE_ERLANG_DEFAULT = 0 -SCE_ERLANG_COMMENT = 1 -SCE_ERLANG_VARIABLE = 2 -SCE_ERLANG_NUMBER = 3 -SCE_ERLANG_KEYWORD = 4 -SCE_ERLANG_STRING = 5 -SCE_ERLANG_OPERATOR = 6 -SCE_ERLANG_ATOM = 7 -SCE_ERLANG_FUNCTION_NAME = 8 -SCE_ERLANG_CHARACTER = 9 -SCE_ERLANG_MACRO = 10 -SCE_ERLANG_RECORD = 11 -SCE_ERLANG_SEPARATOR = 12 -SCE_ERLANG_NODE_NAME = 13 -SCE_ERLANG_UNKNOWN = 31 -SCE_MSSQL_DEFAULT = 0 -SCE_MSSQL_COMMENT = 1 -SCE_MSSQL_LINE_COMMENT = 2 -SCE_MSSQL_NUMBER = 3 -SCE_MSSQL_STRING = 4 -SCE_MSSQL_OPERATOR = 5 -SCE_MSSQL_IDENTIFIER = 6 -SCE_MSSQL_VARIABLE = 7 -SCE_MSSQL_COLUMN_NAME = 8 -SCE_MSSQL_STATEMENT = 9 -SCE_MSSQL_DATATYPE = 10 -SCE_MSSQL_SYSTABLE = 11 -SCE_MSSQL_GLOBAL_VARIABLE = 12 -SCE_MSSQL_FUNCTION = 13 -SCE_MSSQL_STORED_PROCEDURE = 14 -SCE_MSSQL_DEFAULT_PREF_DATATYPE = 15 -SCE_MSSQL_COLUMN_NAME_2 = 16 -SCE_V_DEFAULT = 0 -SCE_V_COMMENT = 1 -SCE_V_COMMENTLINE = 2 -SCE_V_COMMENTLINEBANG = 3 -SCE_V_NUMBER = 4 -SCE_V_WORD = 5 -SCE_V_STRING = 6 -SCE_V_WORD2 = 7 -SCE_V_WORD3 = 8 -SCE_V_PREPROCESSOR = 9 -SCE_V_OPERATOR = 10 -SCE_V_IDENTIFIER = 11 -SCE_V_STRINGEOL = 12 -SCE_V_USER = 19 -SCE_KIX_DEFAULT = 0 -SCE_KIX_COMMENT = 1 -SCE_KIX_STRING1 = 2 -SCE_KIX_STRING2 = 3 -SCE_KIX_NUMBER = 4 -SCE_KIX_VAR = 5 -SCE_KIX_MACRO = 6 -SCE_KIX_KEYWORD = 7 -SCE_KIX_FUNCTIONS = 8 -SCE_KIX_OPERATOR = 9 -SCE_KIX_IDENTIFIER = 31 -SCE_GC_DEFAULT = 0 -SCE_GC_COMMENTLINE = 1 -SCE_GC_COMMENTBLOCK = 2 -SCE_GC_GLOBAL = 3 -SCE_GC_EVENT = 4 -SCE_GC_ATTRIBUTE = 5 -SCE_GC_CONTROL = 6 -SCE_GC_COMMAND = 7 -SCE_GC_STRING = 8 -SCE_GC_OPERATOR = 9 -SCE_SN_DEFAULT = 0 -SCE_SN_CODE = 1 -SCE_SN_COMMENTLINE = 2 -SCE_SN_COMMENTLINEBANG = 3 -SCE_SN_NUMBER = 4 -SCE_SN_WORD = 5 -SCE_SN_STRING = 6 -SCE_SN_WORD2 = 7 -SCE_SN_WORD3 = 8 -SCE_SN_PREPROCESSOR = 9 -SCE_SN_OPERATOR = 10 -SCE_SN_IDENTIFIER = 11 -SCE_SN_STRINGEOL = 12 -SCE_SN_REGEXTAG = 13 -SCE_SN_SIGNAL = 14 -SCE_SN_USER = 19 -SCE_AU3_DEFAULT = 0 -SCE_AU3_COMMENT = 1 -SCE_AU3_COMMENTBLOCK = 2 -SCE_AU3_NUMBER = 3 -SCE_AU3_FUNCTION = 4 -SCE_AU3_KEYWORD = 5 -SCE_AU3_MACRO = 6 -SCE_AU3_STRING = 7 -SCE_AU3_OPERATOR = 8 -SCE_AU3_VARIABLE = 9 -SCE_AU3_SENT = 10 -SCE_AU3_PREPROCESSOR = 11 -SCE_AU3_SPECIAL = 12 -SCE_AU3_EXPAND = 13 -SCE_AU3_COMOBJ = 14 -SCE_AU3_UDF = 15 -SCE_APDL_DEFAULT = 0 -SCE_APDL_COMMENT = 1 -SCE_APDL_COMMENTBLOCK = 2 -SCE_APDL_NUMBER = 3 -SCE_APDL_STRING = 4 -SCE_APDL_OPERATOR = 5 -SCE_APDL_WORD = 6 -SCE_APDL_PROCESSOR = 7 -SCE_APDL_COMMAND = 8 -SCE_APDL_SLASHCOMMAND = 9 -SCE_APDL_STARCOMMAND = 10 -SCE_APDL_ARGUMENT = 11 -SCE_APDL_FUNCTION = 12 -SCE_SH_DEFAULT = 0 -SCE_SH_ERROR = 1 -SCE_SH_COMMENTLINE = 2 -SCE_SH_NUMBER = 3 -SCE_SH_WORD = 4 -SCE_SH_STRING = 5 -SCE_SH_CHARACTER = 6 -SCE_SH_OPERATOR = 7 -SCE_SH_IDENTIFIER = 8 -SCE_SH_SCALAR = 9 -SCE_SH_PARAM = 10 -SCE_SH_BACKTICKS = 11 -SCE_SH_HERE_DELIM = 12 -SCE_SH_HERE_Q = 13 -SCE_ASN1_DEFAULT = 0 -SCE_ASN1_COMMENT = 1 -SCE_ASN1_IDENTIFIER = 2 -SCE_ASN1_STRING = 3 -SCE_ASN1_OID = 4 -SCE_ASN1_SCALAR = 5 -SCE_ASN1_KEYWORD = 6 -SCE_ASN1_ATTRIBUTE = 7 -SCE_ASN1_DESCRIPTOR = 8 -SCE_ASN1_TYPE = 9 -SCE_ASN1_OPERATOR = 10 -SCE_VHDL_DEFAULT = 0 -SCE_VHDL_COMMENT = 1 -SCE_VHDL_COMMENTLINEBANG = 2 -SCE_VHDL_NUMBER = 3 -SCE_VHDL_STRING = 4 -SCE_VHDL_OPERATOR = 5 -SCE_VHDL_IDENTIFIER = 6 -SCE_VHDL_STRINGEOL = 7 -SCE_VHDL_KEYWORD = 8 -SCE_VHDL_STDOPERATOR = 9 -SCE_VHDL_ATTRIBUTE = 10 -SCE_VHDL_STDFUNCTION = 11 -SCE_VHDL_STDPACKAGE = 12 -SCE_VHDL_STDTYPE = 13 -SCE_VHDL_USERWORD = 14 -SCE_CAML_DEFAULT = 0 -SCE_CAML_IDENTIFIER = 1 -SCE_CAML_TAGNAME = 2 -SCE_CAML_KEYWORD = 3 -SCE_CAML_KEYWORD2 = 4 -SCE_CAML_KEYWORD3 = 5 -SCE_CAML_LINENUM = 6 -SCE_CAML_OPERATOR = 7 -SCE_CAML_NUMBER = 8 -SCE_CAML_CHAR = 9 -SCE_CAML_STRING = 11 -SCE_CAML_COMMENT = 12 -SCE_CAML_COMMENT1 = 13 -SCE_CAML_COMMENT2 = 14 -SCE_CAML_COMMENT3 = 15 -SCE_HA_DEFAULT = 0 -SCE_HA_IDENTIFIER = 1 -SCE_HA_KEYWORD = 2 -SCE_HA_NUMBER = 3 -SCE_HA_STRING = 4 -SCE_HA_CHARACTER = 5 -SCE_HA_CLASS = 6 -SCE_HA_MODULE = 7 -SCE_HA_CAPITAL = 8 -SCE_HA_DATA = 9 -SCE_HA_IMPORT = 10 -SCE_HA_OPERATOR = 11 -SCE_HA_INSTANCE = 12 -SCE_HA_COMMENTLINE = 13 -SCE_HA_COMMENTBLOCK = 14 -SCE_HA_COMMENTBLOCK2 = 15 -SCE_HA_COMMENTBLOCK3 = 16 -SCE_T3_DEFAULT = 0 -SCE_T3_X_DEFAULT = 1 -SCE_T3_PREPROCESSOR = 2 -SCE_T3_BLOCK_COMMENT = 3 -SCE_T3_LINE_COMMENT = 4 -SCE_T3_OPERATOR = 5 -SCE_T3_KEYWORD = 6 -SCE_T3_NUMBER = 7 -SCE_T3_IDENTIFIER = 8 -SCE_T3_S_STRING = 9 -SCE_T3_D_STRING = 10 -SCE_T3_X_STRING = 11 -SCE_T3_LIB_DIRECTIVE = 12 -SCE_T3_MSG_PARAM = 13 -SCE_T3_HTML_TAG = 14 -SCE_T3_HTML_DEFAULT = 15 -SCE_T3_HTML_STRING = 16 -SCE_T3_USER1 = 17 -SCE_T3_USER2 = 18 -SCE_T3_USER3 = 19 -SCE_T3_BRACE = 20 -SCE_REBOL_DEFAULT = 0 -SCE_REBOL_COMMENTLINE = 1 -SCE_REBOL_COMMENTBLOCK = 2 -SCE_REBOL_PREFACE = 3 -SCE_REBOL_OPERATOR = 4 -SCE_REBOL_CHARACTER = 5 -SCE_REBOL_QUOTEDSTRING = 6 -SCE_REBOL_BRACEDSTRING = 7 -SCE_REBOL_NUMBER = 8 -SCE_REBOL_PAIR = 9 -SCE_REBOL_TUPLE = 10 -SCE_REBOL_BINARY = 11 -SCE_REBOL_MONEY = 12 -SCE_REBOL_ISSUE = 13 -SCE_REBOL_TAG = 14 -SCE_REBOL_FILE = 15 -SCE_REBOL_EMAIL = 16 -SCE_REBOL_URL = 17 -SCE_REBOL_DATE = 18 -SCE_REBOL_TIME = 19 -SCE_REBOL_IDENTIFIER = 20 -SCE_REBOL_WORD = 21 -SCE_REBOL_WORD2 = 22 -SCE_REBOL_WORD3 = 23 -SCE_REBOL_WORD4 = 24 -SCE_REBOL_WORD5 = 25 -SCE_REBOL_WORD6 = 26 -SCE_REBOL_WORD7 = 27 -SCE_REBOL_WORD8 = 28 -SCE_SQL_DEFAULT = 0 -SCE_SQL_COMMENT = 1 -SCE_SQL_COMMENTLINE = 2 -SCE_SQL_COMMENTDOC = 3 -SCE_SQL_NUMBER = 4 -SCE_SQL_WORD = 5 -SCE_SQL_STRING = 6 -SCE_SQL_CHARACTER = 7 -SCE_SQL_SQLPLUS = 8 -SCE_SQL_SQLPLUS_PROMPT = 9 -SCE_SQL_OPERATOR = 10 -SCE_SQL_IDENTIFIER = 11 -SCE_SQL_SQLPLUS_COMMENT = 13 -SCE_SQL_COMMENTLINEDOC = 15 -SCE_SQL_WORD2 = 16 -SCE_SQL_COMMENTDOCKEYWORD = 17 -SCE_SQL_COMMENTDOCKEYWORDERROR = 18 -SCE_SQL_USER1 = 19 -SCE_SQL_USER2 = 20 -SCE_SQL_USER3 = 21 -SCE_SQL_USER4 = 22 -SCE_SQL_QUOTEDIDENTIFIER = 23 -SCE_ST_DEFAULT = 0 -SCE_ST_STRING = 1 -SCE_ST_NUMBER = 2 -SCE_ST_COMMENT = 3 -SCE_ST_SYMBOL = 4 -SCE_ST_BINARY = 5 -SCE_ST_BOOL = 6 -SCE_ST_SELF = 7 -SCE_ST_SUPER = 8 -SCE_ST_NIL = 9 -SCE_ST_GLOBAL = 10 -SCE_ST_RETURN = 11 -SCE_ST_SPECIAL = 12 -SCE_ST_KWSEND = 13 -SCE_ST_ASSIGN = 14 -SCE_ST_CHARACTER = 15 -SCE_ST_SPEC_SEL = 16 -SCE_FS_DEFAULT = 0 -SCE_FS_COMMENT = 1 -SCE_FS_COMMENTLINE = 2 -SCE_FS_COMMENTDOC = 3 -SCE_FS_COMMENTLINEDOC = 4 -SCE_FS_COMMENTDOCKEYWORD = 5 -SCE_FS_COMMENTDOCKEYWORDERROR = 6 -SCE_FS_KEYWORD = 7 -SCE_FS_KEYWORD2 = 8 -SCE_FS_KEYWORD3 = 9 -SCE_FS_KEYWORD4 = 10 -SCE_FS_NUMBER = 11 -SCE_FS_STRING = 12 -SCE_FS_PREPROCESSOR = 13 -SCE_FS_OPERATOR = 14 -SCE_FS_IDENTIFIER = 15 -SCE_FS_DATE = 16 -SCE_FS_STRINGEOL = 17 -SCE_FS_CONSTANT = 18 -SCE_FS_ASM = 19 -SCE_FS_LABEL = 20 -SCE_FS_ERROR = 21 -SCE_FS_HEXNUMBER = 22 -SCE_FS_BINNUMBER = 23 -SCE_CSOUND_DEFAULT = 0 -SCE_CSOUND_COMMENT = 1 -SCE_CSOUND_NUMBER = 2 -SCE_CSOUND_OPERATOR = 3 -SCE_CSOUND_INSTR = 4 -SCE_CSOUND_IDENTIFIER = 5 -SCE_CSOUND_OPCODE = 6 -SCE_CSOUND_HEADERSTMT = 7 -SCE_CSOUND_USERKEYWORD = 8 -SCE_CSOUND_COMMENTBLOCK = 9 -SCE_CSOUND_PARAM = 10 -SCE_CSOUND_ARATE_VAR = 11 -SCE_CSOUND_KRATE_VAR = 12 -SCE_CSOUND_IRATE_VAR = 13 -SCE_CSOUND_GLOBAL_VAR = 14 -SCE_CSOUND_STRINGEOL = 15 -SCE_INNO_DEFAULT = 0 -SCE_INNO_COMMENT = 1 -SCE_INNO_KEYWORD = 2 -SCE_INNO_PARAMETER = 3 -SCE_INNO_SECTION = 4 -SCE_INNO_PREPROC = 5 -SCE_INNO_PREPROC_INLINE = 6 -SCE_INNO_COMMENT_PASCAL = 7 -SCE_INNO_KEYWORD_PASCAL = 8 -SCE_INNO_KEYWORD_USER = 9 -SCE_INNO_STRING_DOUBLE = 10 -SCE_INNO_STRING_SINGLE = 11 -SCE_INNO_IDENTIFIER = 12 -SCE_OPAL_SPACE = 0 -SCE_OPAL_COMMENT_BLOCK = 1 -SCE_OPAL_COMMENT_LINE = 2 -SCE_OPAL_INTEGER = 3 -SCE_OPAL_KEYWORD = 4 -SCE_OPAL_SORT = 5 -SCE_OPAL_STRING = 6 -SCE_OPAL_PAR = 7 -SCE_OPAL_BOOL_CONST = 8 -SCE_OPAL_DEFAULT = 32 -SCE_SPICE_DEFAULT = 0 -SCE_SPICE_IDENTIFIER = 1 -SCE_SPICE_KEYWORD = 2 -SCE_SPICE_KEYWORD2 = 3 -SCE_SPICE_KEYWORD3 = 4 -SCE_SPICE_NUMBER = 5 -SCE_SPICE_DELIMITER = 6 -SCE_SPICE_VALUE = 7 -SCE_SPICE_COMMENTLINE = 8 -SCE_CMAKE_DEFAULT = 0 -SCE_CMAKE_COMMENT = 1 -SCE_CMAKE_STRINGDQ = 2 -SCE_CMAKE_STRINGLQ = 3 -SCE_CMAKE_STRINGRQ = 4 -SCE_CMAKE_COMMANDS = 5 -SCE_CMAKE_PARAMETERS = 6 -SCE_CMAKE_VARIABLE = 7 -SCE_CMAKE_USERDEFINED = 8 -SCE_CMAKE_WHILEDEF = 9 -SCE_CMAKE_FOREACHDEF = 10 -SCE_CMAKE_IFDEFINEDEF = 11 -SCE_CMAKE_MACRODEF = 12 -SCE_CMAKE_STRINGVAR = 13 -SCE_CMAKE_NUMBER = 14 -SCE_GAP_DEFAULT = 0 -SCE_GAP_IDENTIFIER = 1 -SCE_GAP_KEYWORD = 2 -SCE_GAP_KEYWORD2 = 3 -SCE_GAP_KEYWORD3 = 4 -SCE_GAP_KEYWORD4 = 5 -SCE_GAP_STRING = 6 -SCE_GAP_CHAR = 7 -SCE_GAP_OPERATOR = 8 -SCE_GAP_COMMENT = 9 -SCE_GAP_NUMBER = 10 -SCE_GAP_STRINGEOL = 11 -SCE_PLM_DEFAULT = 0 -SCE_PLM_COMMENT = 1 -SCE_PLM_STRING = 2 -SCE_PLM_NUMBER = 3 -SCE_PLM_IDENTIFIER = 4 -SCE_PLM_OPERATOR = 5 -SCE_PLM_CONTROL = 6 -SCE_PLM_KEYWORD = 7 -SCE_4GL_DEFAULT = 0 -SCE_4GL_NUMBER = 1 -SCE_4GL_WORD = 2 -SCE_4GL_STRING = 3 -SCE_4GL_CHARACTER = 4 -SCE_4GL_PREPROCESSOR = 5 -SCE_4GL_OPERATOR = 6 -SCE_4GL_IDENTIFIER = 7 -SCE_4GL_BLOCK = 8 -SCE_4GL_END = 9 -SCE_4GL_COMMENT1 = 10 -SCE_4GL_COMMENT2 = 11 -SCE_4GL_COMMENT3 = 12 -SCE_4GL_COMMENT4 = 13 -SCE_4GL_COMMENT5 = 14 -SCE_4GL_COMMENT6 = 15 -SCE_4GL_DEFAULT_ = 16 -SCE_4GL_NUMBER_ = 17 -SCE_4GL_WORD_ = 18 -SCE_4GL_STRING_ = 19 -SCE_4GL_CHARACTER_ = 20 -SCE_4GL_PREPROCESSOR_ = 21 -SCE_4GL_OPERATOR_ = 22 -SCE_4GL_IDENTIFIER_ = 23 -SCE_4GL_BLOCK_ = 24 -SCE_4GL_END_ = 25 -SCE_4GL_COMMENT1_ = 26 -SCE_4GL_COMMENT2_ = 27 -SCE_4GL_COMMENT3_ = 28 -SCE_4GL_COMMENT4_ = 29 -SCE_4GL_COMMENT5_ = 30 -SCE_4GL_COMMENT6_ = 31 -SCE_ABAQUS_DEFAULT = 0 -SCE_ABAQUS_COMMENT = 1 -SCE_ABAQUS_COMMENTBLOCK = 2 -SCE_ABAQUS_NUMBER = 3 -SCE_ABAQUS_STRING = 4 -SCE_ABAQUS_OPERATOR = 5 -SCE_ABAQUS_WORD = 6 -SCE_ABAQUS_PROCESSOR = 7 -SCE_ABAQUS_COMMAND = 8 -SCE_ABAQUS_SLASHCOMMAND = 9 -SCE_ABAQUS_STARCOMMAND = 10 -SCE_ABAQUS_ARGUMENT = 11 -SCE_ABAQUS_FUNCTION = 12 -SCE_ASY_DEFAULT = 0 -SCE_ASY_COMMENT = 1 -SCE_ASY_COMMENTLINE = 2 -SCE_ASY_NUMBER = 3 -SCE_ASY_WORD = 4 -SCE_ASY_STRING = 5 -SCE_ASY_CHARACTER = 6 -SCE_ASY_OPERATOR = 7 -SCE_ASY_IDENTIFIER = 8 -SCE_ASY_STRINGEOL = 9 -SCE_ASY_COMMENTLINEDOC = 10 -SCE_ASY_WORD2 = 11 -SCE_R_DEFAULT = 0 -SCE_R_COMMENT = 1 -SCE_R_KWORD = 2 -SCE_R_BASEKWORD = 3 -SCE_R_OTHERKWORD = 4 -SCE_R_NUMBER = 5 -SCE_R_STRING = 6 -SCE_R_STRING2 = 7 -SCE_R_OPERATOR = 8 -SCE_R_IDENTIFIER = 9 -SCE_R_INFIX = 10 -SCE_R_INFIXEOL = 11 -SCE_MAGIK_DEFAULT = 0 -SCE_MAGIK_COMMENT = 1 -SCE_MAGIK_HYPER_COMMENT = 16 -SCE_MAGIK_STRING = 2 -SCE_MAGIK_CHARACTER = 3 -SCE_MAGIK_NUMBER = 4 -SCE_MAGIK_IDENTIFIER = 5 -SCE_MAGIK_OPERATOR = 6 -SCE_MAGIK_FLOW = 7 -SCE_MAGIK_CONTAINER = 8 -SCE_MAGIK_BRACKET_BLOCK = 9 -SCE_MAGIK_BRACE_BLOCK = 10 -SCE_MAGIK_SQBRACKET_BLOCK = 11 -SCE_MAGIK_UNKNOWN_KEYWORD = 12 -SCE_MAGIK_KEYWORD = 13 -SCE_MAGIK_PRAGMA = 14 -SCE_MAGIK_SYMBOL = 15 -SCE_POWERSHELL_DEFAULT = 0 -SCE_POWERSHELL_COMMENT = 1 -SCE_POWERSHELL_STRING = 2 -SCE_POWERSHELL_CHARACTER = 3 -SCE_POWERSHELL_NUMBER = 4 -SCE_POWERSHELL_VARIABLE = 5 -SCE_POWERSHELL_OPERATOR = 6 -SCE_POWERSHELL_IDENTIFIER = 7 -SCE_POWERSHELL_KEYWORD = 8 -SCE_POWERSHELL_CMDLET = 9 -SCE_POWERSHELL_ALIAS = 10 -SCE_MYSQL_DEFAULT = 0 -SCE_MYSQL_COMMENT = 1 -SCE_MYSQL_COMMENTLINE = 2 -SCE_MYSQL_VARIABLE = 3 -SCE_MYSQL_SYSTEMVARIABLE = 4 -SCE_MYSQL_KNOWNSYSTEMVARIABLE = 5 -SCE_MYSQL_NUMBER = 6 -SCE_MYSQL_MAJORKEYWORD = 7 -SCE_MYSQL_KEYWORD = 8 -SCE_MYSQL_DATABASEOBJECT = 9 -SCE_MYSQL_PROCEDUREKEYWORD = 10 -SCE_MYSQL_STRING = 11 -SCE_MYSQL_SQSTRING = 12 -SCE_MYSQL_DQSTRING = 13 -SCE_MYSQL_OPERATOR = 14 -SCE_MYSQL_FUNCTION = 15 -SCE_MYSQL_IDENTIFIER = 16 -SCE_MYSQL_QUOTEDIDENTIFIER = 17 -SCE_MYSQL_USER1 = 18 -SCE_MYSQL_USER2 = 19 -SCE_MYSQL_USER3 = 20 -SCE_PO_DEFAULT = 0 -SCE_PO_COMMENT = 1 -SCE_PO_MSGID = 2 -SCE_PO_MSGID_TEXT = 3 -SCE_PO_MSGSTR = 4 -SCE_PO_MSGSTR_TEXT = 5 -SCE_PO_MSGCTXT = 6 -SCE_PO_MSGCTXT_TEXT = 7 -SCE_PO_FUZZY = 8 -SCLEX_ASP = 29 -SCLEX_PHP = 30 diff --git a/lib/pythonwin/pywin/scintilla/view.py b/lib/pythonwin/pywin/scintilla/view.py deleted file mode 100644 index 3783666f..00000000 --- a/lib/pythonwin/pywin/scintilla/view.py +++ /dev/null @@ -1,832 +0,0 @@ -# A general purpose MFC CCtrlView view that uses Scintilla. - -import array -import os -import re -import string -import struct -import sys - -import __main__ # for attribute lookup -import afxres -import win32con -import win32ui -from pywin.mfc import dialog, docview - -from . import IDLEenvironment # IDLE emulation. -from . import bindings, control, keycodes, scintillacon - -PRINTDLGORD = 1538 -IDC_PRINT_MAG_EDIT = 1010 -EM_FORMATRANGE = win32con.WM_USER + 57 - -wordbreaks = "._" + string.ascii_uppercase + string.ascii_lowercase + string.digits - -patImport = re.compile("import (?P.*)") - -_event_commands = [ - # File menu - "win32ui.ID_FILE_LOCATE", - "win32ui.ID_FILE_CHECK", - "afxres.ID_FILE_CLOSE", - "afxres.ID_FILE_NEW", - "afxres.ID_FILE_OPEN", - "afxres.ID_FILE_SAVE", - "afxres.ID_FILE_SAVE_AS", - "win32ui.ID_FILE_SAVE_ALL", - # Edit menu - "afxres.ID_EDIT_UNDO", - "afxres.ID_EDIT_REDO", - "afxres.ID_EDIT_CUT", - "afxres.ID_EDIT_COPY", - "afxres.ID_EDIT_PASTE", - "afxres.ID_EDIT_SELECT_ALL", - "afxres.ID_EDIT_FIND", - "afxres.ID_EDIT_REPEAT", - "afxres.ID_EDIT_REPLACE", - # View menu - "win32ui.ID_VIEW_WHITESPACE", - "win32ui.ID_VIEW_FIXED_FONT", - "win32ui.ID_VIEW_BROWSE", - "win32ui.ID_VIEW_INTERACTIVE", - # Window menu - "afxres.ID_WINDOW_ARRANGE", - "afxres.ID_WINDOW_CASCADE", - "afxres.ID_WINDOW_NEW", - "afxres.ID_WINDOW_SPLIT", - "afxres.ID_WINDOW_TILE_HORZ", - "afxres.ID_WINDOW_TILE_VERT", - # Others - "afxres.ID_APP_EXIT", - "afxres.ID_APP_ABOUT", -] - -_extra_event_commands = [ - ("EditDelete", afxres.ID_EDIT_CLEAR), - ("LocateModule", win32ui.ID_FILE_LOCATE), - ("GotoLine", win32ui.ID_EDIT_GOTO_LINE), - ("DbgBreakpointToggle", win32ui.IDC_DBG_ADD), - ("DbgGo", win32ui.IDC_DBG_GO), - ("DbgStepOver", win32ui.IDC_DBG_STEPOVER), - ("DbgStep", win32ui.IDC_DBG_STEP), - ("DbgStepOut", win32ui.IDC_DBG_STEPOUT), - ("DbgBreakpointClearAll", win32ui.IDC_DBG_CLEAR), - ("DbgClose", win32ui.IDC_DBG_CLOSE), -] - -event_commands = [] - - -def _CreateEvents(): - for name in _event_commands: - val = eval(name) - name_parts = name.split("_")[1:] - name_parts = [p.capitalize() for p in name_parts] - event = "".join(name_parts) - event_commands.append((event, val)) - for name, id in _extra_event_commands: - event_commands.append((name, id)) - - -_CreateEvents() -del _event_commands -del _extra_event_commands - -command_reflectors = [ - (win32ui.ID_EDIT_UNDO, win32con.WM_UNDO), - (win32ui.ID_EDIT_REDO, scintillacon.SCI_REDO), - (win32ui.ID_EDIT_CUT, win32con.WM_CUT), - (win32ui.ID_EDIT_COPY, win32con.WM_COPY), - (win32ui.ID_EDIT_PASTE, win32con.WM_PASTE), - (win32ui.ID_EDIT_CLEAR, win32con.WM_CLEAR), - (win32ui.ID_EDIT_SELECT_ALL, scintillacon.SCI_SELECTALL), -] - - -def DoBraceMatch(control): - curPos = control.SCIGetCurrentPos() - charBefore = " " - if curPos: - charBefore = control.SCIGetCharAt(curPos - 1) - charAt = control.SCIGetCharAt(curPos) - braceAtPos = braceOpposite = -1 - if charBefore in "[](){}": - braceAtPos = curPos - 1 - if braceAtPos == -1: - if charAt in "[](){}": - braceAtPos = curPos - if braceAtPos != -1: - braceOpposite = control.SCIBraceMatch(braceAtPos, 0) - if braceAtPos != -1 and braceOpposite == -1: - control.SCIBraceBadHighlight(braceAtPos) - else: - # either clear them both or set them both. - control.SCIBraceHighlight(braceAtPos, braceOpposite) - - -def _get_class_attributes(ob): - # Recurse into base classes looking for attributes - items = [] - try: - items = items + dir(ob) - for i in ob.__bases__: - for item in _get_class_attributes(i): - if item not in items: - items.append(item) - except AttributeError: - pass - return items - - -# Supposed to look like an MFC CEditView, but -# also supports IDLE extensions and other source code generic features. -class CScintillaView(docview.CtrlView, control.CScintillaColorEditInterface): - def __init__(self, doc): - docview.CtrlView.__init__( - self, - doc, - "Scintilla", - win32con.WS_CHILD - | win32con.WS_VSCROLL - | win32con.WS_HSCROLL - | win32con.WS_CLIPCHILDREN - | win32con.WS_VISIBLE, - ) - self._tabWidth = ( - 8 # Mirror of what we send to Scintilla - never change this directly - ) - self.bAutoCompleteAttributes = 1 - self.bShowCallTips = 1 - self.bMatchBraces = 0 # Editor option will default this to true later! - self.bindings = bindings.BindingsManager(self) - - self.idle = IDLEenvironment.IDLEEditorWindow(self) - self.idle.IDLEExtension("AutoExpand") - # SendScintilla is called so frequently it is worth optimizing. - self.SendScintilla = self._obj_.SendMessage - - def _MakeColorizer(self): - ext = os.path.splitext(self.GetDocument().GetPathName())[1] - from . import formatter - - return formatter.BuiltinPythonSourceFormatter(self, ext) - - # def SendScintilla(self, msg, w=0, l=0): - # return self._obj_.SendMessage(msg, w, l) - - def SCISetTabWidth(self, width): - # I need to remember the tab-width for the AutoIndent extension. This may go. - self._tabWidth = width - control.CScintillaEditInterface.SCISetTabWidth(self, width) - - def GetTabWidth(self): - return self._tabWidth - - def HookHandlers(self): - # Create events for all the menu names. - for name, val in event_commands: - # handler = lambda id, code, tosend=val, parent=parent: parent.OnCommand(tosend, 0) and 0 - self.bindings.bind(name, None, cid=val) - - # Hook commands that do nothing other than send Scintilla messages. - for command, reflection in command_reflectors: - handler = ( - lambda id, code, ss=self.SendScintilla, tosend=reflection: ss(tosend) - and 0 - ) - self.HookCommand(handler, command) - - self.HookCommand(self.OnCmdViewWS, win32ui.ID_VIEW_WHITESPACE) - self.HookCommandUpdate(self.OnUpdateViewWS, win32ui.ID_VIEW_WHITESPACE) - self.HookCommand( - self.OnCmdViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES - ) - self.HookCommandUpdate( - self.OnUpdateViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES - ) - self.HookCommand(self.OnCmdViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE) - self.HookCommandUpdate(self.OnUpdateViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE) - self.HookCommand(self.OnCmdViewEOL, win32ui.ID_VIEW_EOL) - self.HookCommandUpdate(self.OnUpdateViewEOL, win32ui.ID_VIEW_EOL) - self.HookCommand(self.OnCmdViewFixedFont, win32ui.ID_VIEW_FIXED_FONT) - self.HookCommandUpdate(self.OnUpdateViewFixedFont, win32ui.ID_VIEW_FIXED_FONT) - self.HookCommand(self.OnCmdFileLocate, win32ui.ID_FILE_LOCATE) - self.HookCommand(self.OnCmdEditFind, win32ui.ID_EDIT_FIND) - self.HookCommand(self.OnCmdEditRepeat, win32ui.ID_EDIT_REPEAT) - self.HookCommand(self.OnCmdEditReplace, win32ui.ID_EDIT_REPLACE) - self.HookCommand(self.OnCmdGotoLine, win32ui.ID_EDIT_GOTO_LINE) - self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT) - self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT_DIRECT) - self.HookCommand(self.OnFilePrintPreview, win32ui.ID_FILE_PRINT_PREVIEW) - # Key bindings. - self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN) - self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN) - # Hook wheeley mouse events - # self.HookMessage(self.OnMouseWheel, win32con.WM_MOUSEWHEEL) - self.HookFormatter() - - def OnInitialUpdate(self): - doc = self.GetDocument() - - # Enable Unicode - self.SendScintilla(scintillacon.SCI_SETCODEPAGE, scintillacon.SC_CP_UTF8, 0) - self.SendScintilla(scintillacon.SCI_SETKEYSUNICODE, 1, 0) - - # Create margins - self.SendScintilla( - scintillacon.SCI_SETMARGINTYPEN, 1, scintillacon.SC_MARGIN_SYMBOL - ) - self.SendScintilla(scintillacon.SCI_SETMARGINMASKN, 1, 0xF) - self.SendScintilla( - scintillacon.SCI_SETMARGINTYPEN, 2, scintillacon.SC_MARGIN_SYMBOL - ) - self.SendScintilla( - scintillacon.SCI_SETMARGINMASKN, 2, scintillacon.SC_MASK_FOLDERS - ) - self.SendScintilla(scintillacon.SCI_SETMARGINSENSITIVEN, 2, 1) - - self.GetDocument().HookViewNotifications( - self - ) # is there an MFC way to grab this? - self.HookHandlers() - - # Load the configuration information. - self.OnWinIniChange(None) - - self.SetSel() - - self.GetDocument().FinalizeViewCreation( - self - ) # is there an MFC way to grab this? - - def _GetSubConfigNames(self): - return None # By default we use only sections without sub-sections. - - def OnWinIniChange(self, section=None): - self.bindings.prepare_configure() - try: - self.DoConfigChange() - finally: - self.bindings.complete_configure() - - def DoConfigChange(self): - # Bit of a hack I dont kow what to do about - these should be "editor options" - from pywin.framework.editor import GetEditorOption - - self.bAutoCompleteAttributes = GetEditorOption("Autocomplete Attributes", 1) - self.bShowCallTips = GetEditorOption("Show Call Tips", 1) - # Update the key map and extension data. - configManager.configure(self, self._GetSubConfigNames()) - if configManager.last_error: - win32ui.MessageBox(configManager.last_error, "Configuration Error") - self.bMatchBraces = GetEditorOption("Match Braces", 1) - self.ApplyFormattingStyles(1) - - def OnDestroy(self, msg): - self.bindings.close() - self.bindings = None - self.idle.close() - self.idle = None - control.CScintillaColorEditInterface.close(self) - return docview.CtrlView.OnDestroy(self, msg) - - def OnMouseWheel(self, msg): - zDelta = msg[2] >> 16 - vpos = self.GetScrollPos(win32con.SB_VERT) - vpos = vpos - zDelta / 40 # 3 lines per notch - self.SetScrollPos(win32con.SB_VERT, vpos) - self.SendScintilla( - win32con.WM_VSCROLL, (vpos << 16) | win32con.SB_THUMBPOSITION, 0 - ) - - def OnBraceMatch(self, std, extra): - if not self.bMatchBraces: - return - DoBraceMatch(self) - - def OnNeedShown(self, std, extra): - notify = self.SCIUnpackNotifyMessage(extra) - # OnNeedShown is called before an edit operation when - # text is folded (as it is possible the text insertion will happen - # in a folded region.) As this happens _before_ the insert, - # we ignore the length (if we are at EOF, pos + length may - # actually be beyond the end of buffer) - self.EnsureCharsVisible(notify.position) - - def EnsureCharsVisible(self, start, end=None): - if end is None: - end = start - lineStart = self.LineFromChar(min(start, end)) - lineEnd = self.LineFromChar(max(start, end)) - while lineStart <= lineEnd: - self.SCIEnsureVisible(lineStart) - lineStart = lineStart + 1 - - # Helper to add an event to a menu. - def AppendMenu(self, menu, text="", event=None, flags=None, checked=0): - if event is None: - assert flags is not None, "No event or custom flags!" - cmdid = 0 - else: - cmdid = self.bindings.get_command_id(event) - if cmdid is None: - # No event of that name - no point displaying it. - print( - 'View.AppendMenu(): Unknown event "%s" specified for menu text "%s" - ignored' - % (event, text) - ) - return - keyname = configManager.get_key_binding(event, self._GetSubConfigNames()) - if keyname is not None: - text = text + "\t" + keyname - if flags is None: - flags = win32con.MF_STRING | win32con.MF_ENABLED - if checked: - flags = flags | win32con.MF_CHECKED - menu.AppendMenu(flags, cmdid, text) - - def OnKeyDown(self, msg): - return self.bindings.fire_key_event(msg) - - def GotoEndOfFileEvent(self, event): - self.SetSel(-1) - - def KeyDotEvent(self, event): - ## Don't trigger autocomplete if any text is selected - s, e = self.GetSel() - if s != e: - return 1 - self.SCIAddText(".") - if self.bAutoCompleteAttributes: - self._AutoComplete() - - # View Whitespace/EOL/Indentation UI. - - def OnCmdViewWS(self, cmd, code): # Handle the menu command - viewWS = self.SCIGetViewWS() - self.SCISetViewWS(not viewWS) - - def OnUpdateViewWS(self, cmdui): # Update the tick on the UI. - cmdui.SetCheck(self.SCIGetViewWS()) - cmdui.Enable() - - def OnCmdViewIndentationGuides(self, cmd, code): # Handle the menu command - viewIG = self.SCIGetIndentationGuides() - self.SCISetIndentationGuides(not viewIG) - - def OnUpdateViewIndentationGuides(self, cmdui): # Update the tick on the UI. - cmdui.SetCheck(self.SCIGetIndentationGuides()) - cmdui.Enable() - - def OnCmdViewRightEdge(self, cmd, code): # Handle the menu command - if self.SCIGetEdgeMode() == scintillacon.EDGE_NONE: - mode = scintillacon.EDGE_BACKGROUND - else: - mode = scintillacon.EDGE_NONE - self.SCISetEdgeMode(mode) - - def OnUpdateViewRightEdge(self, cmdui): # Update the tick on the UI. - cmdui.SetCheck(self.SCIGetEdgeMode() != scintillacon.EDGE_NONE) - cmdui.Enable() - - def OnCmdViewEOL(self, cmd, code): # Handle the menu command - viewEOL = self.SCIGetViewEOL() - self.SCISetViewEOL(not viewEOL) - - def OnUpdateViewEOL(self, cmdui): # Update the tick on the UI. - cmdui.SetCheck(self.SCIGetViewEOL()) - cmdui.Enable() - - def OnCmdViewFixedFont(self, cmd, code): # Handle the menu command - self._GetColorizer().bUseFixed = not self._GetColorizer().bUseFixed - self.ApplyFormattingStyles(0) - # Ensure the selection is visible! - self.ScrollCaret() - - def OnUpdateViewFixedFont(self, cmdui): # Update the tick on the UI. - c = self._GetColorizer() - if c is not None: - cmdui.SetCheck(c.bUseFixed) - cmdui.Enable(c is not None) - - def OnCmdEditFind(self, cmd, code): - from . import find - - find.ShowFindDialog() - - def OnCmdEditRepeat(self, cmd, code): - from . import find - - find.FindNext() - - def OnCmdEditReplace(self, cmd, code): - from . import find - - find.ShowReplaceDialog() - - def OnCmdFileLocate(self, cmd, id): - line = self.GetLine().strip() - import pywin.framework.scriptutils - - m = patImport.match(line) - if m: - # Module name on this line - locate that! - modName = m.group("name") - fileName = pywin.framework.scriptutils.LocatePythonFile(modName) - if fileName is None: - win32ui.SetStatusText("Can't locate module %s" % modName) - return 1 # Let the default get it. - else: - win32ui.GetApp().OpenDocumentFile(fileName) - else: - # Just to a "normal" locate - let the default handler get it. - return 1 - return 0 - - def OnCmdGotoLine(self, cmd, id): - try: - lineNo = int(input("Enter Line Number")) - 1 - except (ValueError, KeyboardInterrupt): - return 0 - self.SCIEnsureVisible(lineNo) - self.SCIGotoLine(lineNo) - return 0 - - def SaveTextFile(self, filename, encoding=None): - doc = self.GetDocument() - doc._SaveTextToFile(self, filename, encoding=encoding) - doc.SetModifiedFlag(0) - return 1 - - def _AutoComplete(self): - def list2dict(l): - ret = {} - for i in l: - ret[i] = None - return ret - - self.SCIAutoCCancel() # Cancel old auto-complete lists. - # First try and get an object without evaluating calls - ob = self._GetObjectAtPos(bAllowCalls=0) - # If that failed, try and process call or indexing to get the object. - if ob is None: - ob = self._GetObjectAtPos(bAllowCalls=1) - items_dict = {} - if ob is not None: - try: # Catch unexpected errors when fetching attribute names from the object - # extra attributes of win32ui objects - if hasattr(ob, "_obj_"): - try: - items_dict.update(list2dict(dir(ob._obj_))) - except AttributeError: - pass # object has no __dict__ - - # normal attributes - try: - items_dict.update(list2dict(dir(ob))) - except AttributeError: - pass # object has no __dict__ - if hasattr(ob, "__class__"): - items_dict.update(list2dict(_get_class_attributes(ob.__class__))) - # The object may be a COM object with typelib support - lets see if we can get its props. - # (contributed by Stefan Migowsky) - try: - # Get the automation attributes - items_dict.update(ob.__class__._prop_map_get_) - # See if there is an write only property - # could be optimized - items_dict.update(ob.__class__._prop_map_put_) - # append to the already evaluated list - except AttributeError: - pass - # The object might be a pure COM dynamic dispatch with typelib support - lets see if we can get its props. - if hasattr(ob, "_oleobj_"): - try: - for iTI in range(0, ob._oleobj_.GetTypeInfoCount()): - typeInfo = ob._oleobj_.GetTypeInfo(iTI) - self._UpdateWithITypeInfo(items_dict, typeInfo) - except: - pass - except: - win32ui.SetStatusText( - "Error attempting to get object attributes - %s" - % (repr(sys.exc_info()[0]),) - ) - - # ensure all keys are strings. - items = [str(k) for k in items_dict.keys()] - # All names that start with "_" go! - items = [k for k in items if not k.startswith("_")] - - if not items: - # Heuristics a-la AutoExpand - # The idea is to find other usages of the current binding - # and assume, that it refers to the same object (or at least, - # to an object of the same type) - # Contributed by Vadim Chugunov [vadimch@yahoo.com] - left, right = self._GetWordSplit() - if left == "": # Ignore standalone dots - return None - # We limit our search to the current class, if that - # information is available - minline, maxline, curclass = self._GetClassInfoFromBrowser() - endpos = self.LineIndex(maxline) - text = self.GetTextRange(self.LineIndex(minline), endpos) - try: - l = re.findall(r"\b" + left + "\.\w+", text) - except re.error: - # parens etc may make an invalid RE, but this code wouldnt - # benefit even if the RE did work :-) - l = [] - prefix = len(left) + 1 - unique = {} - for li in l: - unique[li[prefix:]] = 1 - # Assuming traditional usage of self... - if curclass and left == "self": - self._UpdateWithClassMethods(unique, curclass) - - items = [ - word for word in unique.keys() if word[:2] != "__" or word[-2:] != "__" - ] - # Ignore the word currently to the right of the dot - probably a red-herring. - try: - items.remove(right[1:]) - except ValueError: - pass - if items: - items.sort() - self.SCIAutoCSetAutoHide(0) - self.SCIAutoCShow(items) - - def _UpdateWithITypeInfo(self, items_dict, typeInfo): - import pythoncom - - typeInfos = [typeInfo] - # suppress IDispatch and IUnknown methods - inspectedIIDs = {pythoncom.IID_IDispatch: None} - - while len(typeInfos) > 0: - typeInfo = typeInfos.pop() - typeAttr = typeInfo.GetTypeAttr() - - if typeAttr.iid not in inspectedIIDs: - inspectedIIDs[typeAttr.iid] = None - for iFun in range(0, typeAttr.cFuncs): - funDesc = typeInfo.GetFuncDesc(iFun) - funName = typeInfo.GetNames(funDesc.memid)[0] - if funName not in items_dict: - items_dict[funName] = None - - # Inspect the type info of all implemented types - # E.g. IShellDispatch5 implements IShellDispatch4 which implements IShellDispatch3 ... - for iImplType in range(0, typeAttr.cImplTypes): - iRefType = typeInfo.GetRefTypeOfImplType(iImplType) - refTypeInfo = typeInfo.GetRefTypeInfo(iRefType) - typeInfos.append(refTypeInfo) - - # TODO: This is kinda slow. Probably need some kind of cache - # here that is flushed upon file save - # Or maybe we don't need the superclass methods at all ? - def _UpdateWithClassMethods(self, dict, classinfo): - if not hasattr(classinfo, "methods"): - # No 'methods' - probably not what we think it is. - return - dict.update(classinfo.methods) - for super in classinfo.super: - if hasattr(super, "methods"): - self._UpdateWithClassMethods(dict, super) - - # Find which class definition caret is currently in and return - # indexes of the the first and the last lines of that class definition - # Data is obtained from module browser (if enabled) - def _GetClassInfoFromBrowser(self, pos=-1): - minline = 0 - maxline = self.GetLineCount() - 1 - doc = self.GetParentFrame().GetActiveDocument() - browser = None - try: - if doc is not None: - browser = doc.GetAllViews()[1] - except IndexError: - pass - if browser is None: - return (minline, maxline, None) # Current window has no browser - if not browser.list: - return (minline, maxline, None) # Not initialized - path = self.GetDocument().GetPathName() - if not path: - return (minline, maxline, None) # No current path - - import pywin.framework.scriptutils - - curmodule, path = pywin.framework.scriptutils.GetPackageModuleName(path) - try: - clbrdata = browser.list.root.clbrdata - except AttributeError: - return (minline, maxline, None) # No class data for this module. - curline = self.LineFromChar(pos) - curclass = None - # Find out which class we are in - for item in clbrdata.values(): - if item.module == curmodule: - item_lineno = ( - item.lineno - 1 - ) # Scintilla counts lines from 0, whereas pyclbr - from 1 - if minline < item_lineno <= curline: - minline = item_lineno - curclass = item - if curline < item_lineno < maxline: - maxline = item_lineno - return (minline, maxline, curclass) - - def _GetObjectAtPos(self, pos=-1, bAllowCalls=0): - left, right = self._GetWordSplit(pos, bAllowCalls) - if left: # It is an attribute lookup - # How is this for a hack! - namespace = sys.modules.copy() - namespace.update(__main__.__dict__) - # Get the debugger's context. - try: - from pywin.framework import interact - - if interact.edit is not None and interact.edit.currentView is not None: - globs, locs = interact.edit.currentView.GetContext()[:2] - if globs: - namespace.update(globs) - if locs: - namespace.update(locs) - except ImportError: - pass - try: - return eval(left, namespace) - except: - pass - return None - - def _GetWordSplit(self, pos=-1, bAllowCalls=0): - if pos == -1: - pos = self.GetSel()[0] - 1 # Character before current one - limit = self.GetTextLength() - before = [] - after = [] - index = pos - 1 - wordbreaks_use = wordbreaks - if bAllowCalls: - wordbreaks_use = wordbreaks_use + "()[]" - while index >= 0: - char = self.SCIGetCharAt(index) - if char not in wordbreaks_use: - break - before.insert(0, char) - index = index - 1 - index = pos - while index <= limit: - char = self.SCIGetCharAt(index) - if char not in wordbreaks_use: - break - after.append(char) - index = index + 1 - return "".join(before), "".join(after) - - def OnPrepareDC(self, dc, pInfo): - # print "OnPrepareDC for page", pInfo.GetCurPage(), "of", pInfo.GetFromPage(), "to", pInfo.GetToPage(), ", starts=", self.starts - if dc.IsPrinting(): - # Check if we are beyond the end. - # (only do this when actually printing, else messes up print preview!) - if not pInfo.GetPreview() and self.starts is not None: - prevPage = pInfo.GetCurPage() - 1 - if prevPage > 0 and self.starts[prevPage] >= self.GetTextLength(): - # All finished. - pInfo.SetContinuePrinting(0) - return - dc.SetMapMode(win32con.MM_TEXT) - - def OnPreparePrinting(self, pInfo): - flags = ( - win32ui.PD_USEDEVMODECOPIES | win32ui.PD_ALLPAGES | win32ui.PD_NOSELECTION - ) # Dont support printing just a selection. - # NOTE: Custom print dialogs are stopping the user's values from coming back :-( - # self.prtDlg = PrintDialog(pInfo, PRINTDLGORD, flags) - # pInfo.SetPrintDialog(self.prtDlg) - pInfo.SetMinPage(1) - # max page remains undefined for now. - pInfo.SetFromPage(1) - pInfo.SetToPage(1) - ret = self.DoPreparePrinting(pInfo) - return ret - - def OnBeginPrinting(self, dc, pInfo): - self.starts = None - return self._obj_.OnBeginPrinting(dc, pInfo) - - def CalculatePageRanges(self, dc, pInfo): - # Calculate page ranges and max page - self.starts = {0: 0} - metrics = dc.GetTextMetrics() - left, top, right, bottom = pInfo.GetDraw() - # Leave space at the top for the header. - rc = (left, top + int((9 * metrics["tmHeight"]) / 2), right, bottom) - pageStart = 0 - maxPage = 0 - textLen = self.GetTextLength() - while pageStart < textLen: - pageStart = self.FormatRange(dc, pageStart, textLen, rc, 0) - maxPage = maxPage + 1 - self.starts[maxPage] = pageStart - # And a sentinal for one page past the end - self.starts[maxPage + 1] = textLen - # When actually printing, maxPage doesnt have any effect at this late state. - # but is needed to make the Print Preview work correctly. - pInfo.SetMaxPage(maxPage) - - def OnFilePrintPreview(self, *arg): - self._obj_.OnFilePrintPreview() - - def OnFilePrint(self, *arg): - self._obj_.OnFilePrint() - - def FormatRange(self, dc, pageStart, lengthDoc, rc, draw): - """ - typedef struct _formatrange { - HDC hdc; - HDC hdcTarget; - RECT rc; - RECT rcPage; - CHARRANGE chrg;} FORMATRANGE; - """ - fmt = "PPIIIIIIIIll" - hdcRender = dc.GetHandleOutput() - hdcFormat = dc.GetHandleAttrib() - fr = struct.pack( - fmt, - hdcRender, - hdcFormat, - rc[0], - rc[1], - rc[2], - rc[3], - rc[0], - rc[1], - rc[2], - rc[3], - pageStart, - lengthDoc, - ) - nextPageStart = self.SendScintilla(EM_FORMATRANGE, draw, fr) - return nextPageStart - - def OnPrint(self, dc, pInfo): - metrics = dc.GetTextMetrics() - # print "dev", w, h, l, metrics['tmAscent'], metrics['tmDescent'] - if self.starts is None: - self.CalculatePageRanges(dc, pInfo) - pageNum = pInfo.GetCurPage() - 1 - # Setup the header of the page - docname on left, pagenum on right. - doc = self.GetDocument() - cxChar = metrics["tmAveCharWidth"] - cyChar = metrics["tmHeight"] - left, top, right, bottom = pInfo.GetDraw() - dc.TextOut(0, 2 * cyChar, doc.GetTitle()) - pagenum_str = win32ui.LoadString(afxres.AFX_IDS_PRINTPAGENUM) % (pageNum + 1,) - dc.SetTextAlign(win32con.TA_RIGHT) - dc.TextOut(right, 2 * cyChar, pagenum_str) - dc.SetTextAlign(win32con.TA_LEFT) - top = top + int((7 * cyChar) / 2) - dc.MoveTo(left, top) - dc.LineTo(right, top) - top = top + cyChar - rc = (left, top, right, bottom) - nextPageStart = self.FormatRange( - dc, self.starts[pageNum], self.starts[pageNum + 1], rc, 1 - ) - - -def LoadConfiguration(): - global configManager - # Bit of a hack I dont kow what to do about? - from .config import ConfigManager - - configName = rc = win32ui.GetProfileVal("Editor", "Keyboard Config", "default") - configManager = ConfigManager(configName) - if configManager.last_error: - bTryDefault = 0 - msg = "Error loading configuration '%s'\n\n%s" % ( - configName, - configManager.last_error, - ) - if configName != "default": - msg = msg + "\n\nThe default configuration will be loaded." - bTryDefault = 1 - win32ui.MessageBox(msg) - if bTryDefault: - configManager = ConfigManager("default") - if configManager.last_error: - win32ui.MessageBox( - "Error loading configuration 'default'\n\n%s" - % (configManager.last_error) - ) - - -configManager = None -LoadConfiguration() diff --git a/lib/pythonwin/pywin/tools/TraceCollector.py b/lib/pythonwin/pywin/tools/TraceCollector.py deleted file mode 100644 index 5800488b..00000000 --- a/lib/pythonwin/pywin/tools/TraceCollector.py +++ /dev/null @@ -1,79 +0,0 @@ -# win32traceutil like utility for Pythonwin -import _thread - -import win32api -import win32event -import win32trace -from pywin.framework import winout - -outputWindow = None - - -def CollectorThread(stopEvent, file): - win32trace.InitRead() - handle = win32trace.GetHandle() - # Run this thread at a lower priority to the main message-loop (and printing output) - # thread can keep up - import win32process - - win32process.SetThreadPriority( - win32api.GetCurrentThread(), win32process.THREAD_PRIORITY_BELOW_NORMAL - ) - - try: - while 1: - rc = win32event.WaitForMultipleObjects( - (handle, stopEvent), 0, win32event.INFINITE - ) - if rc == win32event.WAIT_OBJECT_0: - # About the only char we can't live with is \0! - file.write(win32trace.read().replace("\0", "")) - else: - # Stop event - break - finally: - win32trace.TermRead() - print("Thread dieing") - - -class WindowOutput(winout.WindowOutput): - def __init__(self, *args): - winout.WindowOutput.__init__(*(self,) + args) - self.hStopThread = win32event.CreateEvent(None, 0, 0, None) - _thread.start_new(CollectorThread, (self.hStopThread, self)) - - def _StopThread(self): - win32event.SetEvent(self.hStopThread) - self.hStopThread = None - - def Close(self): - self._StopThread() - winout.WindowOutput.Close(self) - # def OnViewDestroy(self, frame): - # return winout.WindowOutput.OnViewDestroy(self, frame) - # def Create(self, title=None, style = None): - # rc = winout.WindowOutput.Create(self, title, style) - return rc - - -def MakeOutputWindow(): - # Note that it will not show until the first string written or - # you pass bShow = 1 - global outputWindow - if outputWindow is None: - title = "Python Trace Collector" - # queueingFlag doesnt matter, as all output will come from new thread - outputWindow = WindowOutput(title, title) - # Let people know what this does! - msg = """\ -# This window will display output from any programs that import win32traceutil -# win32com servers registered with '--debug' are in this category. -""" - outputWindow.write(msg) - # force existing window open - outputWindow.write("") - return outputWindow - - -if __name__ == "__main__": - MakeOutputWindow() diff --git a/lib/pythonwin/pywin/tools/__init__.py b/lib/pythonwin/pywin/tools/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/lib/pythonwin/pywin/tools/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/pythonwin/pywin/tools/browseProjects.py b/lib/pythonwin/pywin/tools/browseProjects.py deleted file mode 100644 index e3255579..00000000 --- a/lib/pythonwin/pywin/tools/browseProjects.py +++ /dev/null @@ -1,324 +0,0 @@ -import glob -import os -import pyclbr - -import afxres -import commctrl -import pywin.framework.scriptutils -import regutil -import win32api -import win32con -import win32ui -from pywin.mfc import dialog - -from . import hierlist - - -class HLIErrorItem(hierlist.HierListItem): - def __init__(self, text): - self.text = text - hierlist.HierListItem.__init__(self) - - def GetText(self): - return self.text - - -class HLICLBRItem(hierlist.HierListItem): - def __init__(self, name, file, lineno, suffix=""): - # If the 'name' object itself has a .name, use it. Not sure - # how this happens, but seems pyclbr related. - # See PyWin32 bug 817035 - self.name = getattr(name, "name", name) - self.file = file - self.lineno = lineno - self.suffix = suffix - - def __lt__(self, other): - return self.name < other.name - - def __eq__(self, other): - return self.name == other.name - - def GetText(self): - return self.name + self.suffix - - def TakeDefaultAction(self): - if self.file: - pywin.framework.scriptutils.JumpToDocument( - self.file, self.lineno, bScrollToTop=1 - ) - else: - win32ui.SetStatusText("The source of this object is unknown") - - def PerformItemSelected(self): - if self.file is None: - msg = "%s - source can not be located." % (self.name,) - else: - msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file) - win32ui.SetStatusText(msg) - - -class HLICLBRClass(HLICLBRItem): - def __init__(self, clbrclass, suffix=""): - try: - name = clbrclass.name - file = clbrclass.file - lineno = clbrclass.lineno - self.super = clbrclass.super - self.methods = clbrclass.methods - except AttributeError: - name = clbrclass - file = lineno = None - self.super = [] - self.methods = {} - HLICLBRItem.__init__(self, name, file, lineno, suffix) - - def GetSubList(self): - ret = [] - for c in self.super: - ret.append(HLICLBRClass(c, " (Parent class)")) - for meth, lineno in self.methods.items(): - ret.append(HLICLBRMethod(meth, self.file, lineno, " (method)")) - return ret - - def IsExpandable(self): - return len(self.methods) + len(self.super) - - def GetBitmapColumn(self): - return 21 - - -class HLICLBRFunction(HLICLBRClass): - def GetBitmapColumn(self): - return 22 - - -class HLICLBRMethod(HLICLBRItem): - def GetBitmapColumn(self): - return 22 - - -class HLIModuleItem(hierlist.HierListItem): - def __init__(self, path): - hierlist.HierListItem.__init__(self) - self.path = path - - def GetText(self): - return os.path.split(self.path)[1] + " (module)" - - def IsExpandable(self): - return 1 - - def TakeDefaultAction(self): - win32ui.GetApp().OpenDocumentFile(self.path) - - def GetBitmapColumn(self): - col = 4 # Default - try: - if win32api.GetFileAttributes(self.path) & win32con.FILE_ATTRIBUTE_READONLY: - col = 5 - except win32api.error: - pass - return col - - def GetSubList(self): - mod, path = pywin.framework.scriptutils.GetPackageModuleName(self.path) - win32ui.SetStatusText("Building class list - please wait...", 1) - win32ui.DoWaitCursor(1) - try: - try: - reader = pyclbr.readmodule_ex # Post 1.5.2 interface. - extra_msg = " or functions" - except AttributeError: - reader = pyclbr.readmodule - extra_msg = "" - data = reader(mod, [path]) - if data: - ret = [] - for item in data.values(): - if ( - item.__class__ != pyclbr.Class - ): # ie, it is a pyclbr Function instance (only introduced post 1.5.2) - ret.append(HLICLBRFunction(item, " (function)")) - else: - ret.append(HLICLBRClass(item, " (class)")) - ret.sort() - return ret - else: - return [HLIErrorItem("No Python classes%s in module." % (extra_msg,))] - finally: - win32ui.DoWaitCursor(0) - win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)) - - -def MakePathSubList(path): - ret = [] - for filename in glob.glob(os.path.join(path, "*")): - if os.path.isdir(filename) and os.path.isfile( - os.path.join(filename, "__init__.py") - ): - ret.append(HLIDirectoryItem(filename, os.path.split(filename)[1])) - else: - if os.path.splitext(filename)[1].lower() in [".py", ".pyw"]: - ret.append(HLIModuleItem(filename)) - return ret - - -class HLIDirectoryItem(hierlist.HierListItem): - def __init__(self, path, displayName=None, bSubDirs=0): - hierlist.HierListItem.__init__(self) - self.path = path - self.bSubDirs = bSubDirs - if displayName: - self.displayName = displayName - else: - self.displayName = path - - def IsExpandable(self): - return 1 - - def GetText(self): - return self.displayName - - def GetSubList(self): - ret = MakePathSubList(self.path) - if ( - os.path.split(self.path)[1] == "win32com" - ): # Complete and utter hack for win32com. - try: - path = win32api.GetFullPathName( - os.path.join(self.path, "..\\win32comext") - ) - ret = ret + MakePathSubList(path) - except win32ui.error: - pass - return ret - - -class HLIProjectRoot(hierlist.HierListItem): - def __init__(self, projectName, displayName=None): - hierlist.HierListItem.__init__(self) - self.projectName = projectName - self.displayName = displayName or projectName - - def GetText(self): - return self.displayName - - def IsExpandable(self): - return 1 - - def GetSubList(self): - paths = regutil.GetRegisteredNamedPath(self.projectName) - pathList = paths.split(";") - if len(pathList) == 1: # Single dir - dont bother putting the dir in - ret = MakePathSubList(pathList[0]) - else: - ret = list(map(HLIDirectoryItem, pathList)) - return ret - - -class HLIRoot(hierlist.HierListItem): - def __init__(self): - hierlist.HierListItem.__init__(self) - - def IsExpandable(self): - return 1 - - def GetSubList(self): - keyStr = regutil.BuildDefaultPythonKey() + "\\PythonPath" - hKey = win32api.RegOpenKey(regutil.GetRootKey(), keyStr) - try: - ret = [] - ret.append(HLIProjectRoot("", "Standard Python Library")) # The core path. - index = 0 - while 1: - try: - ret.append(HLIProjectRoot(win32api.RegEnumKey(hKey, index))) - index = index + 1 - except win32api.error: - break - return ret - finally: - win32api.RegCloseKey(hKey) - - -class dynamic_browser(dialog.Dialog): - style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE - cs = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | commctrl.TVS_HASLINES - | commctrl.TVS_LINESATROOT - | commctrl.TVS_HASBUTTONS - ) - - dt = [ - ["Python Projects", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")], - ["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs], - ] - - def __init__(self, hli_root): - dialog.Dialog.__init__(self, self.dt) - self.hier_list = hierlist.HierListWithItems(hli_root, win32ui.IDB_BROWSER_HIER) - self.HookMessage(self.on_size, win32con.WM_SIZE) - - def OnInitDialog(self): - self.hier_list.HierInit(self) - return dialog.Dialog.OnInitDialog(self) - - def on_size(self, params): - lparam = params[3] - w = win32api.LOWORD(lparam) - h = win32api.HIWORD(lparam) - self.GetDlgItem(win32ui.IDC_LIST1).MoveWindow((0, 0, w, h)) - - -def BrowseDialog(): - root = HLIRoot() - if not root.IsExpandable(): - raise TypeError( - "Browse() argument must have __dict__ attribute, or be a Browser supported type" - ) - - dlg = dynamic_browser(root) - dlg.CreateWindow() - - -def DockableBrowserCreator(parent): - root = HLIRoot() - hl = hierlist.HierListWithItems(root, win32ui.IDB_BROWSER_HIER) - - style = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | win32con.WS_BORDER - | commctrl.TVS_HASLINES - | commctrl.TVS_LINESATROOT - | commctrl.TVS_HASBUTTONS - ) - - control = win32ui.CreateTreeCtrl() - control.CreateWindow(style, (0, 0, 150, 300), parent, win32ui.IDC_LIST1) - list = hl.HierInit(parent, control) - return control - - -def DockablePathBrowser(): - import pywin.docking.DockingBar - - bar = pywin.docking.DockingBar.DockingBar() - bar.CreateWindow( - win32ui.GetMainFrame(), DockableBrowserCreator, "Path Browser", 0x8E0A - ) - bar.SetBarStyle( - bar.GetBarStyle() - | afxres.CBRS_TOOLTIPS - | afxres.CBRS_FLYBY - | afxres.CBRS_SIZE_DYNAMIC - ) - bar.EnableDocking(afxres.CBRS_ALIGN_ANY) - win32ui.GetMainFrame().DockControlBar(bar) - - -# The "default" entry point -Browse = DockablePathBrowser diff --git a/lib/pythonwin/pywin/tools/browser.py b/lib/pythonwin/pywin/tools/browser.py deleted file mode 100644 index 9d9a193d..00000000 --- a/lib/pythonwin/pywin/tools/browser.py +++ /dev/null @@ -1,517 +0,0 @@ -# basic module browser. - -# usage: -# >>> import browser -# >>> browser.Browse() -# or -# >>> browser.Browse(your_module) -import sys -import types - -import __main__ -import win32ui -from pywin.mfc import dialog - -from . import hierlist - -special_names = ["__doc__", "__name__", "__self__"] - - -# -# HierList items -class HLIPythonObject(hierlist.HierListItem): - def __init__(self, myobject=None, name=None): - hierlist.HierListItem.__init__(self) - self.myobject = myobject - self.knownExpandable = None - if name: - self.name = name - else: - try: - self.name = myobject.__name__ - except (AttributeError, TypeError): - try: - r = repr(myobject) - if len(r) > 20: - r = r[:20] + "..." - self.name = r - except (AttributeError, TypeError): - self.name = "???" - - def __lt__(self, other): - return self.name < other.name - - def __eq__(self, other): - return self.name == other.name - - def __repr__(self): - try: - type = self.GetHLIType() - except: - type = "Generic" - return ( - "HLIPythonObject(" - + type - + ") - name: " - + self.name - + " object: " - + repr(self.myobject) - ) - - def GetText(self): - try: - return str(self.name) + " (" + self.GetHLIType() + ")" - except AttributeError: - return str(self.name) + " = " + repr(self.myobject) - - def InsertDocString(self, lst): - ob = None - try: - ob = self.myobject.__doc__ - except (AttributeError, TypeError): - pass - # I don't quite grok descriptors enough to know how to - # best hook them up. Eg: - # >>> object.__getattribute__.__class__.__doc__ - # - if ob and isinstance(ob, str): - lst.insert(0, HLIDocString(ob, "Doc")) - - def GetSubList(self): - ret = [] - try: - for key, ob in self.myobject.__dict__.items(): - if key not in special_names: - ret.append(MakeHLI(ob, key)) - except (AttributeError, TypeError): - pass - try: - for name in self.myobject.__methods__: - ret.append(HLIMethod(name)) # no MakeHLI, as cant auto detect - except (AttributeError, TypeError): - pass - try: - for member in self.myobject.__members__: - if not member in special_names: - ret.append(MakeHLI(getattr(self.myobject, member), member)) - except (AttributeError, TypeError): - pass - ret.sort() - self.InsertDocString(ret) - return ret - - # if the has a dict, it is expandable. - def IsExpandable(self): - if self.knownExpandable is None: - self.knownExpandable = self.CalculateIsExpandable() - return self.knownExpandable - - def CalculateIsExpandable(self): - if hasattr(self.myobject, "__doc__"): - return 1 - try: - for key in self.myobject.__dict__.keys(): - if key not in special_names: - return 1 - except (AttributeError, TypeError): - pass - try: - self.myobject.__methods__ - return 1 - except (AttributeError, TypeError): - pass - try: - for item in self.myobject.__members__: - if item not in special_names: - return 1 - except (AttributeError, TypeError): - pass - return 0 - - def GetBitmapColumn(self): - if self.IsExpandable(): - return 0 - else: - return 4 - - def TakeDefaultAction(self): - ShowObject(self.myobject, self.name) - - -class HLIDocString(HLIPythonObject): - def GetHLIType(self): - return "DocString" - - def GetText(self): - return self.myobject.strip() - - def IsExpandable(self): - return 0 - - def GetBitmapColumn(self): - return 6 - - -class HLIModule(HLIPythonObject): - def GetHLIType(self): - return "Module" - - -class HLIFrame(HLIPythonObject): - def GetHLIType(self): - return "Stack Frame" - - -class HLITraceback(HLIPythonObject): - def GetHLIType(self): - return "Traceback" - - -class HLIClass(HLIPythonObject): - def GetHLIType(self): - return "Class" - - def GetSubList(self): - ret = [] - for base in self.myobject.__bases__: - ret.append(MakeHLI(base, "Base class: " + base.__name__)) - ret = ret + HLIPythonObject.GetSubList(self) - return ret - - -class HLIMethod(HLIPythonObject): - # myobject is just a string for methods. - def GetHLIType(self): - return "Method" - - def GetText(self): - return "Method: " + self.myobject + "()" - - -class HLICode(HLIPythonObject): - def GetHLIType(self): - return "Code" - - def IsExpandable(self): - return self.myobject - - def GetSubList(self): - ret = [] - ret.append(MakeHLI(self.myobject.co_consts, "Constants (co_consts)")) - ret.append(MakeHLI(self.myobject.co_names, "Names (co_names)")) - ret.append(MakeHLI(self.myobject.co_filename, "Filename (co_filename)")) - ret.append(MakeHLI(self.myobject.co_argcount, "Number of args (co_argcount)")) - ret.append(MakeHLI(self.myobject.co_varnames, "Param names (co_varnames)")) - - return ret - - -class HLIInstance(HLIPythonObject): - def GetHLIType(self): - return "Instance" - - def GetText(self): - return ( - str(self.name) - + " (Instance of class " - + str(self.myobject.__class__.__name__) - + ")" - ) - - def IsExpandable(self): - return 1 - - def GetSubList(self): - ret = [] - ret.append(MakeHLI(self.myobject.__class__)) - ret = ret + HLIPythonObject.GetSubList(self) - return ret - - -class HLIBuiltinFunction(HLIPythonObject): - def GetHLIType(self): - return "Builtin Function" - - -class HLIFunction(HLIPythonObject): - def GetHLIType(self): - return "Function" - - def IsExpandable(self): - return 1 - - def GetSubList(self): - ret = [] - # ret.append( MakeHLI( self.myobject.func_argcount, "Arg Count" )) - try: - ret.append(MakeHLI(self.myobject.func_argdefs, "Arg Defs")) - except AttributeError: - pass - try: - code = self.myobject.__code__ - globs = self.myobject.__globals__ - except AttributeError: - # must be py2.5 or earlier... - code = self.myobject.func_code - globs = self.myobject.func_globals - ret.append(MakeHLI(code, "Code")) - ret.append(MakeHLI(globs, "Globals")) - self.InsertDocString(ret) - return ret - - -class HLISeq(HLIPythonObject): - def GetHLIType(self): - return "Sequence (abstract!)" - - def IsExpandable(self): - return len(self.myobject) > 0 - - def GetSubList(self): - ret = [] - pos = 0 - for item in self.myobject: - ret.append(MakeHLI(item, "[" + str(pos) + "]")) - pos = pos + 1 - self.InsertDocString(ret) - return ret - - -class HLIList(HLISeq): - def GetHLIType(self): - return "List" - - -class HLITuple(HLISeq): - def GetHLIType(self): - return "Tuple" - - -class HLIDict(HLIPythonObject): - def GetHLIType(self): - return "Dict" - - def IsExpandable(self): - try: - self.myobject.__doc__ - return 1 - except (AttributeError, TypeError): - return len(self.myobject) > 0 - - def GetSubList(self): - ret = [] - keys = list(self.myobject.keys()) - keys.sort() - for key in keys: - ob = self.myobject[key] - ret.append(MakeHLI(ob, str(key))) - self.InsertDocString(ret) - return ret - - -# In Python 1.6, strings and Unicode have builtin methods, but we dont really want to see these -class HLIString(HLIPythonObject): - def IsExpandable(self): - return 0 - - -TypeMap = { - type: HLIClass, - types.FunctionType: HLIFunction, - tuple: HLITuple, - dict: HLIDict, - list: HLIList, - types.ModuleType: HLIModule, - types.CodeType: HLICode, - types.BuiltinFunctionType: HLIBuiltinFunction, - types.FrameType: HLIFrame, - types.TracebackType: HLITraceback, - str: HLIString, - int: HLIPythonObject, - bool: HLIPythonObject, - float: HLIPythonObject, -} - - -def MakeHLI(ob, name=None): - try: - cls = TypeMap[type(ob)] - except KeyError: - # hrmph - this check gets more and more bogus as Python - # improves. Its possible we should just *always* use - # HLIInstance? - if hasattr(ob, "__class__"): # 'new style' class - cls = HLIInstance - else: - cls = HLIPythonObject - return cls(ob, name) - - -######################################### -# -# Dialog related. - - -class DialogShowObject(dialog.Dialog): - def __init__(self, object, title): - self.object = object - self.title = title - dialog.Dialog.__init__(self, win32ui.IDD_LARGE_EDIT) - - def OnInitDialog(self): - import re - - self.SetWindowText(self.title) - self.edit = self.GetDlgItem(win32ui.IDC_EDIT1) - try: - strval = str(self.object) - except: - t, v, tb = sys.exc_info() - strval = "Exception getting object value\n\n%s:%s" % (t, v) - tb = None - strval = re.sub("\n", "\r\n", strval) - self.edit.ReplaceSel(strval) - - -def ShowObject(object, title): - dlg = DialogShowObject(object, title) - dlg.DoModal() - - -# And some mods for a sizable dialog from Sam Rushing! -import commctrl -import win32api -import win32con - - -class dynamic_browser(dialog.Dialog): - style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE - cs = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | commctrl.TVS_HASLINES - | commctrl.TVS_LINESATROOT - | commctrl.TVS_HASBUTTONS - ) - - dt = [ - ["Python Object Browser", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")], - ["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs], - ] - - def __init__(self, hli_root): - dialog.Dialog.__init__(self, self.dt) - self.hier_list = hierlist.HierListWithItems(hli_root, win32ui.IDB_BROWSER_HIER) - self.HookMessage(self.on_size, win32con.WM_SIZE) - - def OnInitDialog(self): - self.hier_list.HierInit(self) - return dialog.Dialog.OnInitDialog(self) - - def OnOK(self): - self.hier_list.HierTerm() - self.hier_list = None - return self._obj_.OnOK() - - def OnCancel(self): - self.hier_list.HierTerm() - self.hier_list = None - return self._obj_.OnCancel() - - def on_size(self, params): - lparam = params[3] - w = win32api.LOWORD(lparam) - h = win32api.HIWORD(lparam) - self.GetDlgItem(win32ui.IDC_LIST1).MoveWindow((0, 0, w, h)) - - -def Browse(ob=__main__): - "Browse the argument, or the main dictionary" - root = MakeHLI(ob, "root") - if not root.IsExpandable(): - raise TypeError( - "Browse() argument must have __dict__ attribute, or be a Browser supported type" - ) - - dlg = dynamic_browser(root) - dlg.CreateWindow() - return dlg - - -# -# -# Classes for using the browser in an MDI window, rather than a dialog -# -from pywin.mfc import docview - - -class BrowserTemplate(docview.DocTemplate): - def __init__(self): - docview.DocTemplate.__init__( - self, win32ui.IDR_PYTHONTYPE, BrowserDocument, None, BrowserView - ) - - def OpenObject(self, root): # Use this instead of OpenDocumentFile. - # Look for existing open document - for doc in self.GetDocumentList(): - if doc.root == root: - doc.GetFirstView().ActivateFrame() - return doc - # not found - new one. - doc = BrowserDocument(self, root) - frame = self.CreateNewFrame(doc) - doc.OnNewDocument() - self.InitialUpdateFrame(frame, doc, 1) - return doc - - -class BrowserDocument(docview.Document): - def __init__(self, template, root): - docview.Document.__init__(self, template) - self.root = root - self.SetTitle("Browser: " + root.name) - - def OnOpenDocument(self, name): - raise TypeError("This template can not open files") - return 0 - - -class BrowserView(docview.TreeView): - def OnInitialUpdate(self): - import commctrl - - rc = self._obj_.OnInitialUpdate() - list = hierlist.HierListWithItems( - self.GetDocument().root, - win32ui.IDB_BROWSER_HIER, - win32ui.AFX_IDW_PANE_FIRST, - ) - list.HierInit(self.GetParent()) - list.SetStyle( - commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS - ) - return rc - - -template = None - - -def MakeTemplate(): - global template - if template is None: - template = ( - BrowserTemplate() - ) # win32ui.IDR_PYTHONTYPE, BrowserDocument, None, BrowserView) - - -def BrowseMDI(ob=__main__): - """Browse an object using an MDI window.""" - - MakeTemplate() - root = MakeHLI(ob, repr(ob)) - if not root.IsExpandable(): - raise TypeError( - "Browse() argument must have __dict__ attribute, or be a Browser supported type" - ) - - template.OpenObject(root) diff --git a/lib/pythonwin/pywin/tools/hierlist.py b/lib/pythonwin/pywin/tools/hierlist.py deleted file mode 100644 index a60b7324..00000000 --- a/lib/pythonwin/pywin/tools/hierlist.py +++ /dev/null @@ -1,367 +0,0 @@ -# hierlist -# -# IMPORTANT - Please read before using. - -# This module exposes an API for a Hierarchical Tree Control. -# Previously, a custom tree control was included in Pythonwin which -# has an API very similar to this. - -# The current control used is the common "Tree Control". This module exists now -# to provide an API similar to the old control, but for the new Tree control. - -# If you need to use the Tree Control, you may still find this API a reasonable -# choice. However, you should investigate using the tree control directly -# to provide maximum flexibility (but with extra work). - -import sys - -import commctrl -import win32api -import win32con -import win32ui -from pywin.mfc import dialog, docview, object, window -from win32api import RGB - - -# helper to get the text of an arbitary item -def GetItemText(item): - if type(item) == type(()) or type(item) == type([]): - use = item[0] - else: - use = item - if type(use) == type(""): - return use - else: - return repr(item) - - -class HierDialog(dialog.Dialog): - def __init__( - self, - title, - hierList, - bitmapID=win32ui.IDB_HIERFOLDERS, - dlgID=win32ui.IDD_TREE, - dll=None, - childListBoxID=win32ui.IDC_LIST1, - ): - dialog.Dialog.__init__(self, dlgID, dll) # reuse this dialog. - self.hierList = hierList - self.dlgID = dlgID - self.title = title - - # self.childListBoxID = childListBoxID - def OnInitDialog(self): - self.SetWindowText(self.title) - self.hierList.HierInit(self) - return dialog.Dialog.OnInitDialog(self) - - -class HierList(object.Object): - def __init__( - self, root, bitmapID=win32ui.IDB_HIERFOLDERS, listBoxId=None, bitmapMask=None - ): # used to create object. - self.listControl = None - self.bitmapID = bitmapID - self.root = root - self.listBoxId = listBoxId - self.itemHandleMap = {} - self.filledItemHandlesMap = {} - self.bitmapMask = bitmapMask - - def __getattr__(self, attr): - try: - return getattr(self.listControl, attr) - except AttributeError: - return object.Object.__getattr__(self, attr) - - def ItemFromHandle(self, handle): - return self.itemHandleMap[handle] - - def SetStyle(self, newStyle): - hwnd = self.listControl.GetSafeHwnd() - style = win32api.GetWindowLong(hwnd, win32con.GWL_STYLE) - win32api.SetWindowLong(hwnd, win32con.GWL_STYLE, (style | newStyle)) - - def HierInit(self, parent, listControl=None): # Used when window first exists. - # this also calls "Create" on the listbox. - # params - id of listbbox, ID of bitmap, size of bitmaps - if self.bitmapMask is None: - bitmapMask = RGB(0, 0, 255) - else: - bitmapMask = self.bitmapMask - self.imageList = win32ui.CreateImageList(self.bitmapID, 16, 0, bitmapMask) - if listControl is None: - if self.listBoxId is None: - self.listBoxId = win32ui.IDC_LIST1 - self.listControl = parent.GetDlgItem(self.listBoxId) - else: - self.listControl = listControl - lbid = listControl.GetDlgCtrlID() - assert self.listBoxId is None or self.listBoxId == lbid, ( - "An invalid listbox control ID has been specified (specified as %s, but exists as %s)" - % (self.listBoxId, lbid) - ) - self.listBoxId = lbid - self.listControl.SetImageList(self.imageList, commctrl.LVSIL_NORMAL) - # self.list.AttachObject(self) - - ## ??? Need a better way to do this - either some way to detect if it's compiled with UNICODE - ## defined, and/or a way to switch the constants based on UNICODE ??? - if sys.version_info[0] < 3: - parent.HookNotify(self.OnTreeItemExpanding, commctrl.TVN_ITEMEXPANDINGA) - parent.HookNotify(self.OnTreeItemSelChanged, commctrl.TVN_SELCHANGEDA) - else: - parent.HookNotify(self.OnTreeItemExpanding, commctrl.TVN_ITEMEXPANDINGW) - parent.HookNotify(self.OnTreeItemSelChanged, commctrl.TVN_SELCHANGEDW) - parent.HookNotify(self.OnTreeItemDoubleClick, commctrl.NM_DBLCLK) - self.notify_parent = parent - - if self.root: - self.AcceptRoot(self.root) - - def DeleteAllItems(self): - self.listControl.DeleteAllItems() - self.root = None - self.itemHandleMap = {} - self.filledItemHandlesMap = {} - - def HierTerm(self): - # Dont want notifies as we kill the list. - parent = self.notify_parent # GetParentFrame() - if sys.version_info[0] < 3: - parent.HookNotify(None, commctrl.TVN_ITEMEXPANDINGA) - parent.HookNotify(None, commctrl.TVN_SELCHANGEDA) - else: - parent.HookNotify(None, commctrl.TVN_ITEMEXPANDINGW) - parent.HookNotify(None, commctrl.TVN_SELCHANGEDW) - parent.HookNotify(None, commctrl.NM_DBLCLK) - - self.DeleteAllItems() - self.listControl = None - self.notify_parent = None # Break a possible cycle - - def OnTreeItemDoubleClick(self, info, extra): - (hwndFrom, idFrom, code) = info - if idFrom != self.listBoxId: - return None - item = self.itemHandleMap[self.listControl.GetSelectedItem()] - self.TakeDefaultAction(item) - return 1 - - def OnTreeItemExpanding(self, info, extra): - (hwndFrom, idFrom, code) = info - if idFrom != self.listBoxId: - return None - action, itemOld, itemNew, pt = extra - itemHandle = itemNew[0] - if itemHandle not in self.filledItemHandlesMap: - item = self.itemHandleMap[itemHandle] - self.AddSubList(itemHandle, self.GetSubList(item)) - self.filledItemHandlesMap[itemHandle] = None - return 0 - - def OnTreeItemSelChanged(self, info, extra): - (hwndFrom, idFrom, code) = info - if idFrom != self.listBoxId: - return None - action, itemOld, itemNew, pt = extra - itemHandle = itemNew[0] - item = self.itemHandleMap[itemHandle] - self.PerformItemSelected(item) - return 1 - - def AddSubList(self, parentHandle, subItems): - for item in subItems: - self.AddItem(parentHandle, item) - - def AddItem(self, parentHandle, item, hInsertAfter=commctrl.TVI_LAST): - text = self.GetText(item) - if self.IsExpandable(item): - cItems = 1 # Trick it !! - else: - cItems = 0 - bitmapCol = self.GetBitmapColumn(item) - bitmapSel = self.GetSelectedBitmapColumn(item) - if bitmapSel is None: - bitmapSel = bitmapCol - ## if type(text) is str: - ## text = text.encode("mbcs") - hitem = self.listControl.InsertItem( - parentHandle, - hInsertAfter, - (None, None, None, text, bitmapCol, bitmapSel, cItems, 0), - ) - self.itemHandleMap[hitem] = item - return hitem - - def _GetChildHandles(self, handle): - ret = [] - try: - handle = self.listControl.GetChildItem(handle) - while 1: - ret.append(handle) - handle = self.listControl.GetNextItem(handle, commctrl.TVGN_NEXT) - except win32ui.error: - # out of children - pass - return ret - - def Refresh(self, hparent=None): - # Attempt to refresh the given item's sub-entries, but maintain the tree state - # (ie, the selected item, expanded items, etc) - if hparent is None: - hparent = commctrl.TVI_ROOT - if hparent not in self.filledItemHandlesMap: - # This item has never been expanded, so no refresh can possibly be required. - return - root_item = self.itemHandleMap[hparent] - old_handles = self._GetChildHandles(hparent) - old_items = list(map(self.ItemFromHandle, old_handles)) - new_items = self.GetSubList(root_item) - # Now an inefficient technique for synching the items. - inew = 0 - hAfter = commctrl.TVI_FIRST - for iold in range(len(old_items)): - inewlook = inew - matched = 0 - while inewlook < len(new_items): - if old_items[iold] == new_items[inewlook]: - matched = 1 - break - inewlook = inewlook + 1 - if matched: - # Insert the new items. - # print "Inserting after", old_items[iold], old_handles[iold] - for i in range(inew, inewlook): - # print "Inserting index %d (%s)" % (i, new_items[i]) - hAfter = self.AddItem(hparent, new_items[i], hAfter) - - inew = inewlook + 1 - # And recursively refresh iold - hold = old_handles[iold] - if hold in self.filledItemHandlesMap: - self.Refresh(hold) - else: - # Remove the deleted items. - # print "Deleting %d (%s)" % (iold, old_items[iold]) - hdelete = old_handles[iold] - # First recurse and remove the children from the map. - for hchild in self._GetChildHandles(hdelete): - del self.itemHandleMap[hchild] - if hchild in self.filledItemHandlesMap: - del self.filledItemHandlesMap[hchild] - self.listControl.DeleteItem(hdelete) - hAfter = old_handles[iold] - # Fill any remaining new items: - for newItem in new_items[inew:]: - # print "Inserting new item", newItem - self.AddItem(hparent, newItem) - - def AcceptRoot(self, root): - self.listControl.DeleteAllItems() - self.itemHandleMap = {commctrl.TVI_ROOT: root} - self.filledItemHandlesMap = {commctrl.TVI_ROOT: root} - subItems = self.GetSubList(root) - self.AddSubList(0, subItems) - - def GetBitmapColumn(self, item): - if self.IsExpandable(item): - return 0 - else: - return 4 - - def GetSelectedBitmapColumn(self, item): - return 0 - - def CheckChangedChildren(self): - return self.listControl.CheckChangedChildren() - - def GetText(self, item): - return GetItemText(item) - - def PerformItemSelected(self, item): - try: - win32ui.SetStatusText("Selected " + self.GetText(item)) - except win32ui.error: # No status bar! - pass - - def TakeDefaultAction(self, item): - win32ui.MessageBox("Got item " + self.GetText(item)) - - -########################################################################## -# -# Classes for use with seperate HierListItems. -# -# -class HierListWithItems(HierList): - def __init__( - self, root, bitmapID=win32ui.IDB_HIERFOLDERS, listBoxID=None, bitmapMask=None - ): # used to create object. - HierList.__init__(self, root, bitmapID, listBoxID, bitmapMask) - - def DelegateCall(self, fn): - return fn() - - def GetBitmapColumn(self, item): - rc = self.DelegateCall(item.GetBitmapColumn) - if rc is None: - rc = HierList.GetBitmapColumn(self, item) - return rc - - def GetSelectedBitmapColumn(self, item): - return self.DelegateCall(item.GetSelectedBitmapColumn) - - def IsExpandable(self, item): - return self.DelegateCall(item.IsExpandable) - - def GetText(self, item): - return self.DelegateCall(item.GetText) - - def GetSubList(self, item): - return self.DelegateCall(item.GetSubList) - - def PerformItemSelected(self, item): - func = getattr(item, "PerformItemSelected", None) - if func is None: - return HierList.PerformItemSelected(self, item) - else: - return self.DelegateCall(func) - - def TakeDefaultAction(self, item): - func = getattr(item, "TakeDefaultAction", None) - if func is None: - return HierList.TakeDefaultAction(self, item) - else: - return self.DelegateCall(func) - - -# A hier list item - for use with a HierListWithItems -class HierListItem: - def __init__(self): - pass - - def GetText(self): - pass - - def GetSubList(self): - pass - - def IsExpandable(self): - pass - - def GetBitmapColumn(self): - return None # indicate he should do it. - - def GetSelectedBitmapColumn(self): - return None # same as other - - # for py3k/rich-comp sorting compatibility. - def __lt__(self, other): - # we want unrelated items to be sortable... - return id(self) < id(other) - - # for py3k/rich-comp equality compatibility. - def __eq__(self, other): - return False diff --git a/lib/pythonwin/pywin/tools/regedit.py b/lib/pythonwin/pywin/tools/regedit.py deleted file mode 100644 index b94d3050..00000000 --- a/lib/pythonwin/pywin/tools/regedit.py +++ /dev/null @@ -1,386 +0,0 @@ -# Regedit - a Registry Editor for Python - -import commctrl -import regutil -import win32api -import win32con -import win32ui -from pywin.mfc import dialog, docview, window - -from . import hierlist - - -def SafeApply(fn, args, err_desc=""): - try: - fn(*args) - return 1 - except win32api.error as exc: - msg = "Error " + err_desc + "\r\n\r\n" + exc.strerror - win32ui.MessageBox(msg) - return 0 - - -class SplitterFrame(window.MDIChildWnd): - def __init__(self): - # call base CreateFrame - self.images = None - window.MDIChildWnd.__init__(self) - - def OnCreateClient(self, cp, context): - splitter = win32ui.CreateSplitter() - doc = context.doc - frame_rect = self.GetWindowRect() - size = ((frame_rect[2] - frame_rect[0]), (frame_rect[3] - frame_rect[1]) // 2) - sub_size = (size[0] // 3, size[1]) - splitter.CreateStatic(self, 1, 2) - # CTreeControl view - self.keysview = RegistryTreeView(doc) - # CListControl view - self.valuesview = RegistryValueView(doc) - - splitter.CreatePane(self.keysview, 0, 0, (sub_size)) - splitter.CreatePane(self.valuesview, 0, 1, (0, 0)) # size ignored. - splitter.SetRowInfo(0, size[1], 0) - # Setup items in the imagelist - - return 1 - - def OnItemDoubleClick(self, info, extra): - (hwndFrom, idFrom, code) = info - if idFrom == win32ui.AFX_IDW_PANE_FIRST: - # Tree control - return None - elif idFrom == win32ui.AFX_IDW_PANE_FIRST + 1: - item = self.keysview.SelectedItem() - self.valuesview.EditValue(item) - return 0 - # List control - else: - return None # Pass it on - - def PerformItemSelected(self, item): - return self.valuesview.UpdateForRegItem(item) - - def OnDestroy(self, msg): - window.MDIChildWnd.OnDestroy(self, msg) - if self.images: - self.images.DeleteImageList() - self.images = None - - -class RegistryTreeView(docview.TreeView): - def OnInitialUpdate(self): - rc = self._obj_.OnInitialUpdate() - self.frame = self.GetParent().GetParent() - self.hierList = hierlist.HierListWithItems( - self.GetHLIRoot(), win32ui.IDB_HIERFOLDERS, win32ui.AFX_IDW_PANE_FIRST - ) - self.hierList.HierInit(self.frame, self.GetTreeCtrl()) - self.hierList.SetStyle( - commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS - ) - self.hierList.PerformItemSelected = self.PerformItemSelected - - self.frame.HookNotify(self.frame.OnItemDoubleClick, commctrl.NM_DBLCLK) - self.frame.HookNotify(self.OnItemRightClick, commctrl.NM_RCLICK) - - # self.HookMessage(self.OnItemRightClick, win32con.WM_RBUTTONUP) - - def GetHLIRoot(self): - doc = self.GetDocument() - regroot = doc.root - subkey = doc.subkey - return HLIRegistryKey(regroot, subkey, "Root") - - def OnItemRightClick(self, notify_data, extra): - # First select the item we right-clicked on. - pt = self.ScreenToClient(win32api.GetCursorPos()) - flags, hItem = self.HitTest(pt) - if hItem == 0 or commctrl.TVHT_ONITEM & flags == 0: - return None - self.Select(hItem, commctrl.TVGN_CARET) - - menu = win32ui.CreatePopupMenu() - menu.AppendMenu(win32con.MF_STRING | win32con.MF_ENABLED, 1000, "Add Key") - menu.AppendMenu(win32con.MF_STRING | win32con.MF_ENABLED, 1001, "Add Value") - menu.AppendMenu(win32con.MF_STRING | win32con.MF_ENABLED, 1002, "Delete Key") - self.HookCommand(self.OnAddKey, 1000) - self.HookCommand(self.OnAddValue, 1001) - self.HookCommand(self.OnDeleteKey, 1002) - menu.TrackPopupMenu(win32api.GetCursorPos()) # track at mouse position. - return None - - def OnDeleteKey(self, command, code): - hitem = self.hierList.GetSelectedItem() - item = self.hierList.ItemFromHandle(hitem) - msg = "Are you sure you wish to delete the key '%s'?" % (item.keyName,) - id = win32ui.MessageBox(msg, None, win32con.MB_YESNO) - if id != win32con.IDYES: - return - if SafeApply( - win32api.RegDeleteKey, (item.keyRoot, item.keyName), "deleting registry key" - ): - # Get the items parent. - try: - hparent = self.GetParentItem(hitem) - except win32ui.error: - hparent = None - self.hierList.Refresh(hparent) - - def OnAddKey(self, command, code): - from pywin.mfc import dialog - - val = dialog.GetSimpleInput("New key name", "", "Add new key") - if val is None: - return # cancelled. - hitem = self.hierList.GetSelectedItem() - item = self.hierList.ItemFromHandle(hitem) - if SafeApply(win32api.RegCreateKey, (item.keyRoot, item.keyName + "\\" + val)): - self.hierList.Refresh(hitem) - - def OnAddValue(self, command, code): - from pywin.mfc import dialog - - val = dialog.GetSimpleInput("New value", "", "Add new value") - if val is None: - return # cancelled. - hitem = self.hierList.GetSelectedItem() - item = self.hierList.ItemFromHandle(hitem) - if SafeApply( - win32api.RegSetValue, (item.keyRoot, item.keyName, win32con.REG_SZ, val) - ): - # Simply re-select the current item to refresh the right spitter. - self.PerformItemSelected(item) - - # self.Select(hitem, commctrl.TVGN_CARET) - - def PerformItemSelected(self, item): - return self.frame.PerformItemSelected(item) - - def SelectedItem(self): - return self.hierList.ItemFromHandle(self.hierList.GetSelectedItem()) - - def SearchSelectedItem(self): - handle = self.hierList.GetChildItem(0) - while 1: - # print "State is", self.hierList.GetItemState(handle, -1) - if self.hierList.GetItemState(handle, commctrl.TVIS_SELECTED): - # print "Item is ", self.hierList.ItemFromHandle(handle) - return self.hierList.ItemFromHandle(handle) - handle = self.hierList.GetNextSiblingItem(handle) - - -class RegistryValueView(docview.ListView): - def OnInitialUpdate(self): - hwnd = self._obj_.GetSafeHwnd() - style = win32api.GetWindowLong(hwnd, win32con.GWL_STYLE) - win32api.SetWindowLong( - hwnd, - win32con.GWL_STYLE, - (style & ~commctrl.LVS_TYPEMASK) | commctrl.LVS_REPORT, - ) - - itemDetails = (commctrl.LVCFMT_LEFT, 100, "Name", 0) - self.InsertColumn(0, itemDetails) - itemDetails = (commctrl.LVCFMT_LEFT, 500, "Data", 0) - self.InsertColumn(1, itemDetails) - - def UpdateForRegItem(self, item): - self.DeleteAllItems() - hkey = win32api.RegOpenKey(item.keyRoot, item.keyName) - try: - valNum = 0 - ret = [] - while 1: - try: - res = win32api.RegEnumValue(hkey, valNum) - except win32api.error: - break - name = res[0] - if not name: - name = "(Default)" - self.InsertItem(valNum, name) - self.SetItemText(valNum, 1, str(res[1])) - valNum = valNum + 1 - finally: - win32api.RegCloseKey(hkey) - - def EditValue(self, item): - # Edit the current value - class EditDialog(dialog.Dialog): - def __init__(self, item): - self.item = item - dialog.Dialog.__init__(self, win32ui.IDD_LARGE_EDIT) - - def OnInitDialog(self): - self.SetWindowText("Enter new value") - self.GetDlgItem(win32con.IDCANCEL).ShowWindow(win32con.SW_SHOW) - self.edit = self.GetDlgItem(win32ui.IDC_EDIT1) - # Modify the edit windows style - style = win32api.GetWindowLong( - self.edit.GetSafeHwnd(), win32con.GWL_STYLE - ) - style = style & (~win32con.ES_WANTRETURN) - win32api.SetWindowLong( - self.edit.GetSafeHwnd(), win32con.GWL_STYLE, style - ) - self.edit.SetWindowText(str(self.item)) - self.edit.SetSel(-1) - return dialog.Dialog.OnInitDialog(self) - - def OnDestroy(self, msg): - self.newvalue = self.edit.GetWindowText() - - try: - index = self.GetNextItem(-1, commctrl.LVNI_SELECTED) - except win32ui.error: - return # No item selected. - - if index == 0: - keyVal = "" - else: - keyVal = self.GetItemText(index, 0) - # Query for a new value. - try: - newVal = self.GetItemsCurrentValue(item, keyVal) - except TypeError as details: - win32ui.MessageBox(details) - return - - d = EditDialog(newVal) - if d.DoModal() == win32con.IDOK: - try: - self.SetItemsCurrentValue(item, keyVal, d.newvalue) - except win32api.error as exc: - win32ui.MessageBox("Error setting value\r\n\n%s" % exc.strerror) - self.UpdateForRegItem(item) - - def GetItemsCurrentValue(self, item, valueName): - hkey = win32api.RegOpenKey(item.keyRoot, item.keyName) - try: - val, type = win32api.RegQueryValueEx(hkey, valueName) - if type != win32con.REG_SZ: - raise TypeError("Only strings can be edited") - return val - finally: - win32api.RegCloseKey(hkey) - - def SetItemsCurrentValue(self, item, valueName, value): - # ** Assumes already checked is a string. - hkey = win32api.RegOpenKey( - item.keyRoot, item.keyName, 0, win32con.KEY_SET_VALUE - ) - try: - win32api.RegSetValueEx(hkey, valueName, 0, win32con.REG_SZ, value) - finally: - win32api.RegCloseKey(hkey) - - -class RegTemplate(docview.DocTemplate): - def __init__(self): - docview.DocTemplate.__init__( - self, win32ui.IDR_PYTHONTYPE, None, SplitterFrame, None - ) - - # def InitialUpdateFrame(self, frame, doc, makeVisible=1): - # self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler. - # frame.InitialUpdateFrame(doc, makeVisible) - - def OpenRegistryKey( - self, root=None, subkey=None - ): # Use this instead of OpenDocumentFile. - # Look for existing open document - if root is None: - root = regutil.GetRootKey() - if subkey is None: - subkey = regutil.BuildDefaultPythonKey() - for doc in self.GetDocumentList(): - if doc.root == root and doc.subkey == subkey: - doc.GetFirstView().ActivateFrame() - return doc - # not found - new one. - doc = RegDocument(self, root, subkey) - frame = self.CreateNewFrame(doc) - doc.OnNewDocument() - self.InitialUpdateFrame(frame, doc, 1) - return doc - - -class RegDocument(docview.Document): - def __init__(self, template, root, subkey): - docview.Document.__init__(self, template) - self.root = root - self.subkey = subkey - self.SetTitle("Registry Editor: " + subkey) - - def OnOpenDocument(self, name): - raise TypeError("This template can not open files") - return 0 - - -class HLIRegistryKey(hierlist.HierListItem): - def __init__(self, keyRoot, keyName, userName): - self.keyRoot = keyRoot - self.keyName = keyName - self.userName = userName - hierlist.HierListItem.__init__(self) - - def __lt__(self, other): - return self.name < other.name - - def __eq__(self, other): - return ( - self.keyRoot == other.keyRoot - and self.keyName == other.keyName - and self.userName == other.userName - ) - - def __repr__(self): - return "<%s with root=%s, key=%s>" % ( - self.__class__.__name__, - self.keyRoot, - self.keyName, - ) - - def GetText(self): - return self.userName - - def IsExpandable(self): - # All keys are expandable, even if they currently have zero children. - return 1 - - ## hkey = win32api.RegOpenKey(self.keyRoot, self.keyName) - ## try: - ## keys, vals, dt = win32api.RegQueryInfoKey(hkey) - ## return (keys>0) - ## finally: - ## win32api.RegCloseKey(hkey) - - def GetSubList(self): - hkey = win32api.RegOpenKey(self.keyRoot, self.keyName) - win32ui.DoWaitCursor(1) - try: - keyNum = 0 - ret = [] - while 1: - try: - key = win32api.RegEnumKey(hkey, keyNum) - except win32api.error: - break - ret.append(HLIRegistryKey(self.keyRoot, self.keyName + "\\" + key, key)) - keyNum = keyNum + 1 - finally: - win32api.RegCloseKey(hkey) - win32ui.DoWaitCursor(0) - return ret - - -template = RegTemplate() - - -def EditRegistry(root=None, key=None): - doc = template.OpenRegistryKey(root, key) - - -if __name__ == "__main__": - EditRegistry() diff --git a/lib/pythonwin/pywin/tools/regpy.py b/lib/pythonwin/pywin/tools/regpy.py deleted file mode 100644 index 11ad63af..00000000 --- a/lib/pythonwin/pywin/tools/regpy.py +++ /dev/null @@ -1,81 +0,0 @@ -# (sort-of) Registry editor -import commctrl -import dialog -import win32con -import win32ui - - -class RegistryControl: - def __init__(self, key): - self.key = key - - -class RegEditPropertyPage(dialog.PropertyPage): - IDC_LISTVIEW = 1000 - - def GetTemplate(self): - "Return the template used to create this dialog" - - w = 152 # Dialog width - h = 122 # Dialog height - SS_STD = win32con.WS_CHILD | win32con.WS_VISIBLE - FRAMEDLG_STD = win32con.WS_CAPTION | win32con.WS_SYSMENU - style = ( - FRAMEDLG_STD - | win32con.WS_VISIBLE - | win32con.DS_SETFONT - | win32con.WS_MINIMIZEBOX - ) - template = [ - [self.caption, (0, 0, w, h), style, None, (8, "Helv")], - ] - lvStyle = ( - SS_STD - | commctrl.LVS_EDITLABELS - | commctrl.LVS_REPORT - | commctrl.LVS_AUTOARRANGE - | commctrl.LVS_ALIGNLEFT - | win32con.WS_BORDER - | win32con.WS_TABSTOP - ) - template.append( - ["SysListView32", "", self.IDC_LISTVIEW, (10, 10, 185, 100), lvStyle] - ) - return template - - -class RegistryPage(RegEditPropertyPage): - def __init__(self): - self.caption = "Path" - RegEditPropertyPage.__init__(self, self.GetTemplate()) - - def OnInitDialog(self): - self.listview = self.GetDlgItem(self.IDC_LISTVIEW) - RegEditPropertyPage.OnInitDialog(self) - # Setup the listview columns - itemDetails = (commctrl.LVCFMT_LEFT, 100, "App", 0) - self.listview.InsertColumn(0, itemDetails) - itemDetails = (commctrl.LVCFMT_LEFT, 1024, "Paths", 0) - self.listview.InsertColumn(1, itemDetails) - - index = self.listview.InsertItem(0, "App") - self.listview.SetItemText(index, 1, "Path") - - -class RegistrySheet(dialog.PropertySheet): - def __init__(self, title): - dialog.PropertySheet.__init__(self, title) - self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE) - - def OnActivate(self, msg): - print("OnAcivate") - - -def t(): - ps = RegistrySheet("Registry Settings") - ps.AddPage(RegistryPage()) - ps.DoModal() - - -if __name__ == "__main__": - t() diff --git a/lib/pythonwin/start_pythonwin.pyw b/lib/pythonwin/start_pythonwin.pyw deleted file mode 100644 index 0a1b2e60..00000000 --- a/lib/pythonwin/start_pythonwin.pyw +++ /dev/null @@ -1,19 +0,0 @@ -# A Python file that can be used to start Pythonwin, instead of using -# pythonwin.exe -import os -import sys - -import win32ui - -import pywin.framework.intpyapp # InteractivePythonApp() - -assert pywin.framework.intpyapp # not unused -# Pretend this script doesn't exist, or pythonwin tries to edit it -sys.argv[:] = sys.argv[1:] or [""] # like PySys_SetArgv(Ex) -if sys.path[0] not in ("", ".", os.getcwd()): - sys.path.insert(0, os.getcwd()) -# And bootstrap the app. -app = win32ui.GetApp() -if not app.InitInstance(): - # Run when not already handled by DDE - app.Run() diff --git a/lib/pywin32.version.txt b/lib/pywin32.version.txt deleted file mode 100644 index cd307095..00000000 --- a/lib/pywin32.version.txt +++ /dev/null @@ -1 +0,0 @@ -306 diff --git a/lib/tempora/timing.py b/lib/tempora/timing.py index e74b8962..aed0d336 100644 --- a/lib/tempora/timing.py +++ b/lib/tempora/timing.py @@ -1,21 +1,22 @@ -import collections.abc -import contextlib import datetime import functools import numbers import time +import collections.abc +import contextlib import jaraco.functools class Stopwatch: """ - A simple stopwatch that starts automatically. + A simple stopwatch which starts automatically. >>> w = Stopwatch() >>> _1_sec = datetime.timedelta(seconds=1) >>> w.split() < _1_sec True + >>> import time >>> time.sleep(1.0) >>> w.split() >= _1_sec True @@ -26,13 +27,13 @@ class Stopwatch: >>> w.split() < _1_sec True - Launch the Stopwatch in a context: + It should be possible to launch the Stopwatch in a context: >>> with Stopwatch() as watch: ... assert isinstance(watch.split(), datetime.timedelta) - After exiting the context, the watch is stopped; read the - elapsed time directly: + In that case, the watch is stopped when the context is exited, + so to read the elapsed time: >>> watch.elapsed datetime.timedelta(...) diff --git a/lib/typing_extensions.py b/lib/typing_extensions.py index f3132ea4..ef42417c 100644 --- a/lib/typing_extensions.py +++ b/lib/typing_extensions.py @@ -2,12 +2,11 @@ import abc import collections import collections.abc import functools -import inspect import operator import sys import types as _types import typing -import warnings + __all__ = [ # Super-special typing primitives. @@ -32,7 +31,6 @@ __all__ = [ 'Coroutine', 'AsyncGenerator', 'AsyncContextManager', - 'Buffer', 'ChainMap', # Concrete collection types. @@ -45,13 +43,7 @@ __all__ = [ 'TypedDict', # Structural checks, a.k.a. protocols. - 'SupportsAbs', - 'SupportsBytes', - 'SupportsComplex', - 'SupportsFloat', 'SupportsIndex', - 'SupportsInt', - 'SupportsRound', # One-off things. 'Annotated', @@ -59,17 +51,12 @@ __all__ = [ 'assert_type', 'clear_overloads', 'dataclass_transform', - 'deprecated', - 'Doc', 'get_overloads', 'final', 'get_args', 'get_origin', - 'get_original_bases', - 'get_protocol_members', 'get_type_hints', 'IntVar', - 'is_protocol', 'is_typeddict', 'Literal', 'NewType', @@ -81,54 +68,12 @@ __all__ = [ 'runtime_checkable', 'Text', 'TypeAlias', - 'TypeAliasType', 'TypeGuard', - 'TypeIs', 'TYPE_CHECKING', 'Never', 'NoReturn', - 'ReadOnly', 'Required', 'NotRequired', - - # Pure aliases, have always been in typing - 'AbstractSet', - 'AnyStr', - 'BinaryIO', - 'Callable', - 'Collection', - 'Container', - 'Dict', - 'ForwardRef', - 'FrozenSet', - 'Generator', - 'Generic', - 'Hashable', - 'IO', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'List', - 'Mapping', - 'MappingView', - 'Match', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Optional', - 'Pattern', - 'Reversible', - 'Sequence', - 'Set', - 'Sized', - 'TextIO', - 'Tuple', - 'Union', - 'ValuesView', - 'cast', - 'no_type_check', - 'no_type_check_decorator', ] # for backward compatibility @@ -138,13 +83,7 @@ GenericMeta = type # The functions below are modified copies of typing internal helpers. # They are needed by _ProtocolMeta and they provide support for PEP 646. - -class _Sentinel: - def __repr__(self): - return "" - - -_marker = _Sentinel() +_marker = object() def _check_generic(cls, parameters, elen=_marker): @@ -245,13 +184,36 @@ else: ClassVar = typing.ClassVar +# On older versions of typing there is an internal class named "Final". +# 3.8+ +if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): + Final = typing.Final +# 3.7 +else: + class _FinalForm(typing._SpecialForm, _root=True): -class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name + def __repr__(self): + return 'typing_extensions.' + self._name + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) -Final = typing.Final + Final = _FinalForm('Final', + doc="""A special typing construct to indicate that a name + cannot be re-assigned or overridden in a subclass. + For example: + + MAX_SIZE: Final = 9000 + MAX_SIZE += 1 # Error reported by type checker + + class Connection: + TIMEOUT: Final[int] = 10 + class FastConnector(Connection): + TIMEOUT = 1 # Error reported by type checker + + There is no runtime checking of these properties.""") if sys.version_info >= (3, 11): final = typing.final @@ -295,67 +257,21 @@ def IntVar(name): return typing.TypeVar(name) -# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 -if sys.version_info >= (3, 10, 1): +# 3.8+: +if hasattr(typing, 'Literal'): Literal = typing.Literal +# 3.7: else: - def _flatten_literal_params(parameters): - """An internal helper for Literal creation: flatten Literals among parameters""" - params = [] - for p in parameters: - if isinstance(p, _LiteralGenericAlias): - params.extend(p.__args__) - else: - params.append(p) - return tuple(params) + class _LiteralForm(typing._SpecialForm, _root=True): - def _value_and_type_iter(params): - for p in params: - yield p, type(p) - - class _LiteralGenericAlias(typing._GenericAlias, _root=True): - def __eq__(self, other): - if not isinstance(other, _LiteralGenericAlias): - return NotImplemented - these_args_deduped = set(_value_and_type_iter(self.__args__)) - other_args_deduped = set(_value_and_type_iter(other.__args__)) - return these_args_deduped == other_args_deduped - - def __hash__(self): - return hash(frozenset(_value_and_type_iter(self.__args__))) - - class _LiteralForm(_ExtensionsSpecialForm, _root=True): - def __init__(self, doc: str): - self._name = 'Literal' - self._doc = self.__doc__ = doc + def __repr__(self): + return 'typing_extensions.' + self._name def __getitem__(self, parameters): - if not isinstance(parameters, tuple): - parameters = (parameters,) + return typing._GenericAlias(self, parameters) - parameters = _flatten_literal_params(parameters) - - val_type_pairs = list(_value_and_type_iter(parameters)) - try: - deduped_pairs = set(val_type_pairs) - except TypeError: - # unhashable parameters - pass - else: - # similar logic to typing._deduplicate on Python 3.9+ - if len(deduped_pairs) < len(val_type_pairs): - new_parameters = [] - for pair in val_type_pairs: - if pair in deduped_pairs: - new_parameters.append(pair[0]) - deduped_pairs.remove(pair) - assert not deduped_pairs, deduped_pairs - parameters = tuple(new_parameters) - - return _LiteralGenericAlias(self, parameters) - - Literal = _LiteralForm(doc="""\ - A type that can be used to indicate to type checkers + Literal = _LiteralForm('Literal', + doc="""A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the provided parameter. For example: @@ -369,7 +285,7 @@ else: instead of a type.""") -_overload_dummy = typing._overload_dummy +_overload_dummy = typing._overload_dummy # noqa if hasattr(typing, "get_overloads"): # 3.11+ @@ -443,6 +359,8 @@ Type = typing.Type # Various ABCs mimicking those in collections.abc. # A few are simply re-exported for completeness. + + Awaitable = typing.Awaitable Coroutine = typing.Coroutine AsyncIterable = typing.AsyncIterable @@ -451,343 +369,278 @@ Deque = typing.Deque ContextManager = typing.ContextManager AsyncContextManager = typing.AsyncContextManager DefaultDict = typing.DefaultDict -OrderedDict = typing.OrderedDict + +# 3.7.2+ +if hasattr(typing, 'OrderedDict'): + OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 +else: + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) + Counter = typing.Counter ChainMap = typing.ChainMap AsyncGenerator = typing.AsyncGenerator +NewType = typing.NewType Text = typing.Text TYPE_CHECKING = typing.TYPE_CHECKING -_PROTO_ALLOWLIST = { - 'collections.abc': [ - 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', - ], - 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], - 'typing_extensions': ['Buffer'], -} - - -_EXCLUDED_ATTRS = { - "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol", - "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", - "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", - "__subclasshook__", "__orig_class__", "__init__", "__new__", - "__protocol_attrs__", "__non_callable_proto_members__", - "__match_args__", -} - -if sys.version_info >= (3, 9): - _EXCLUDED_ATTRS.add("__class_getitem__") - -if sys.version_info >= (3, 12): - _EXCLUDED_ATTRS.add("__type_params__") - -_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS) +_PROTO_WHITELIST = ['Callable', 'Awaitable', + 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', + 'ContextManager', 'AsyncContextManager'] def _get_protocol_attrs(cls): attrs = set() for base in cls.__mro__[:-1]: # without object - if base.__name__ in {'Protocol', 'Generic'}: + if base.__name__ in ('Protocol', 'Generic'): continue annotations = getattr(base, '__annotations__', {}) - for attr in (*base.__dict__, *annotations): - if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): + for attr in list(base.__dict__.keys()) + list(annotations.keys()): + if (not attr.startswith('_abc_') and attr not in ( + '__abstractmethods__', '__annotations__', '__weakref__', + '_is_protocol', '_is_runtime_protocol', '__dict__', + '__args__', '__slots__', + '__next_in_mro__', '__parameters__', '__origin__', + '__orig_bases__', '__extra__', '__tree_hash__', + '__doc__', '__subclasshook__', '__init__', '__new__', + '__module__', '_MutableMapping__marker', '_gorg')): attrs.add(attr) return attrs -def _caller(depth=2): - try: - return sys._getframe(depth).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): # For platforms without _getframe() - return None +def _is_callable_members_only(cls): + return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) -# `__match_args__` attribute was removed from protocol members in 3.13, -# we want to backport this change to older Python versions. -if sys.version_info >= (3, 13): +def _maybe_adjust_parameters(cls): + """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. + + The contents of this function are very similar + to logic found in typing.Generic.__init_subclass__ + on the CPython main branch. + """ + tvars = [] + if '__orig_bases__' in cls.__dict__: + tvars = typing._collect_type_vars(cls.__orig_bases__) + # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...] and/or Protocol[...]. + gvars = None + for base in cls.__orig_bases__: + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): + # for error messages + the_base = base.__origin__.__name__ + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...]" + " and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) + s_args = ', '.join(str(g) for g in gvars) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") + tvars = gvars + cls.__parameters__ = tuple(tvars) + + +# 3.8+ +if hasattr(typing, 'Protocol'): Protocol = typing.Protocol +# 3.7 else: - def _allow_reckless_class_checks(depth=3): - """Allow instance and class checks for special stdlib modules. - The abc and functools modules indiscriminately call isinstance() and - issubclass() on the whole MRO of a user class, which may contain protocols. - """ - return _caller(depth) in {'abc', 'functools', None} def _no_init(self, *args, **kwargs): if type(self)._is_protocol: raise TypeError('Protocols cannot be instantiated') - def _type_check_issubclass_arg_1(arg): - """Raise TypeError if `arg` is not an instance of `type` - in `issubclass(arg, )`. - - In most cases, this is verified by type.__subclasscheck__. - Checking it again unnecessarily would slow down issubclass() checks, - so, we don't perform this check unless we absolutely have to. - - For various error paths, however, - we want to ensure that *this* error message is shown to the user - where relevant, rather than a typing.py-specific error message. - """ - if not isinstance(arg, type): - # Same error message as for issubclass(1, int). - raise TypeError('issubclass() arg 1 must be a class') - - # Inheriting from typing._ProtocolMeta isn't actually desirable, - # but is necessary to allow typing.Protocol and typing_extensions.Protocol - # to mix without getting TypeErrors about "metaclass conflict" - class _ProtocolMeta(type(typing.Protocol)): - # This metaclass is somewhat unfortunate, - # but is necessary for several reasons... - # - # NOTE: DO NOT call super() in any methods in this class - # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 - # and those are slow - def __new__(mcls, name, bases, namespace, **kwargs): - if name == "Protocol" and len(bases) < 2: - pass - elif {Protocol, typing.Protocol} & set(bases): - for base in bases: - if not ( - base in {object, typing.Generic, Protocol, typing.Protocol} - or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) - or is_protocol(base) - ): - raise TypeError( - f"Protocols can only inherit from other protocols, " - f"got {base!r}" - ) - return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) - - def __init__(cls, *args, **kwargs): - abc.ABCMeta.__init__(cls, *args, **kwargs) - if getattr(cls, "_is_protocol", False): - cls.__protocol_attrs__ = _get_protocol_attrs(cls) - - def __subclasscheck__(cls, other): - if cls is Protocol: - return type.__subclasscheck__(cls, other) - if ( - getattr(cls, '_is_protocol', False) - and not _allow_reckless_class_checks() - ): - if not getattr(cls, '_is_runtime_protocol', False): - _type_check_issubclass_arg_1(other) - raise TypeError( - "Instance and class checks can only be used with " - "@runtime_checkable protocols" - ) - if ( - # this attribute is set by @runtime_checkable: - cls.__non_callable_proto_members__ - and cls.__dict__.get("__subclasshook__") is _proto_hook - ): - _type_check_issubclass_arg_1(other) - non_method_attrs = sorted(cls.__non_callable_proto_members__) - raise TypeError( - "Protocols with non-method members don't support issubclass()." - f" Non-method members: {str(non_method_attrs)[1:-1]}." - ) - return abc.ABCMeta.__subclasscheck__(cls, other) - + class _ProtocolMeta(abc.ABCMeta): # noqa: B024 + # This metaclass is a bit unfortunate and exists only because of the lack + # of __instancehook__. def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. - if cls is Protocol: - return type.__instancecheck__(cls, instance) - if not getattr(cls, "_is_protocol", False): - # i.e., it's a concrete subclass of a protocol - return abc.ABCMeta.__instancecheck__(cls, instance) - - if ( - not getattr(cls, '_is_runtime_protocol', False) and - not _allow_reckless_class_checks() - ): - raise TypeError("Instance and class checks can only be used with" - " @runtime_checkable protocols") - - if abc.ABCMeta.__instancecheck__(cls, instance): + if ((not getattr(cls, '_is_protocol', False) or + _is_callable_members_only(cls)) and + issubclass(instance.__class__, cls)): return True + if cls._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(cls, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(cls)): + return True + return super().__instancecheck__(instance) - for attr in cls.__protocol_attrs__: - try: - val = inspect.getattr_static(instance, attr) - except AttributeError: - break - # this attribute is set by @runtime_checkable: - if val is None and attr not in cls.__non_callable_proto_members__: - break - else: - return True + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this while these live in two different + # modules. The duplicated code will be removed when Protocol is moved to typing. + """Base class for protocol classes. Protocol classes are defined as:: - return False + class Proto(Protocol): + def meth(self) -> int: + ... - def __eq__(cls, other): - # Hack so that typing.Generic.__class_getitem__ - # treats typing_extensions.Protocol - # as equivalent to typing.Protocol - if abc.ABCMeta.__eq__(cls, other) is True: - return True - return cls is Protocol and other is typing.Protocol + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: - # This has to be defined, or the abc-module cache - # complains about classes with this metaclass being unhashable, - # if we define only __eq__! - def __hash__(cls) -> int: - return type.__hash__(cls) + class C: + def meth(self) -> int: + return 0 - @classmethod - def _proto_hook(cls, other): - if not cls.__dict__.get('_is_protocol', False): - return NotImplemented + def func(x: Proto) -> int: + return x.meth() - for attr in cls.__protocol_attrs__: - for base in other.__mro__: - # Check if the members appears in the class dictionary... - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break + func(C()) # Passes static type check - # ...or in annotations, if it is a sub-protocol. - annotations = getattr(base, '__annotations__', {}) - if ( - isinstance(annotations, collections.abc.Mapping) - and attr in annotations - and is_protocol(other) - ): - break - else: - return NotImplemented - return True + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. - class Protocol(typing.Generic, metaclass=_ProtocolMeta): - __doc__ = typing.Protocol.__doc__ + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ __slots__ = () _is_protocol = True - _is_runtime_protocol = False + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) # noqa + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params, len(cls.__parameters__)) + return typing._GenericAlias(cls, params) def __init_subclass__(cls, *args, **kwargs): - super().__init_subclass__(*args, **kwargs) + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + _maybe_adjust_parameters(cls) # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', False): + if not cls.__dict__.get('_is_protocol', None): cls._is_protocol = any(b is Protocol for b in cls.__bases__) # Set (or override) the protocol subclass hook. + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not getattr(cls, '_is_runtime_protocol', False): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True if '__subclasshook__' not in cls.__dict__: cls.__subclasshook__ = _proto_hook - # Prohibit instantiation for protocol classes - if cls._is_protocol and cls.__init__ is Protocol.__init__: - cls.__init__ = _no_init + # We have nothing more to do for non-protocols. + if not cls._is_protocol: + return + + # Check consistency of bases. + for base in cls.__bases__: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, _ProtocolMeta) and base._is_protocol): + raise TypeError('Protocols can only inherit from other' + f' protocols, got {repr(base)}') + cls.__init__ = _no_init -if sys.version_info >= (3, 13): +# 3.8+ +if hasattr(typing, 'runtime_checkable'): runtime_checkable = typing.runtime_checkable +# 3.7 else: def runtime_checkable(cls): - """Mark a protocol class as a runtime protocol. + """Mark a protocol class as a runtime protocol, so that it + can be used with isinstance() and issubclass(). Raise TypeError + if applied to a non-protocol class. - Such protocol can be used with isinstance() and issubclass(). - Raise TypeError if applied to a non-protocol class. - This allows a simple-minded structural check very similar to - one trick ponies in collections.abc such as Iterable. - - For example:: - - @runtime_checkable - class Closable(Protocol): - def close(self): ... - - assert isinstance(open('/some/file'), Closable) - - Warning: this will check only the presence of the required methods, - not their type signatures! + This allows a simple-minded structural check very similar to the + one-offs in collections.abc such as Hashable. """ - if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False): + if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: raise TypeError('@runtime_checkable can be only applied to protocol classes,' - ' got %r' % cls) + f' got {cls!r}') cls._is_runtime_protocol = True - - # Only execute the following block if it's a typing_extensions.Protocol class. - # typing.Protocol classes don't need it. - if isinstance(cls, _ProtocolMeta): - # PEP 544 prohibits using issubclass() - # with protocols that have non-method members. - # See gh-113320 for why we compute this attribute here, - # rather than in `_ProtocolMeta.__init__` - cls.__non_callable_proto_members__ = set() - for attr in cls.__protocol_attrs__: - try: - is_callable = callable(getattr(cls, attr, None)) - except Exception as e: - raise TypeError( - f"Failed to determine whether protocol member {attr!r} " - "is a method member" - ) from e - else: - if not is_callable: - cls.__non_callable_proto_members__.add(attr) - return cls -# The "runtime" alias exists for backwards compatibility. +# Exists for backwards compatibility. runtime = runtime_checkable -# Our version of runtime-checkable protocols is faster on Python 3.8-3.11 -if sys.version_info >= (3, 12): - SupportsInt = typing.SupportsInt - SupportsFloat = typing.SupportsFloat - SupportsComplex = typing.SupportsComplex - SupportsBytes = typing.SupportsBytes +# 3.8+ +if hasattr(typing, 'SupportsIndex'): SupportsIndex = typing.SupportsIndex - SupportsAbs = typing.SupportsAbs - SupportsRound = typing.SupportsRound +# 3.7 else: - @runtime_checkable - class SupportsInt(Protocol): - """An ABC with one abstract method __int__.""" - __slots__ = () - - @abc.abstractmethod - def __int__(self) -> int: - pass - - @runtime_checkable - class SupportsFloat(Protocol): - """An ABC with one abstract method __float__.""" - __slots__ = () - - @abc.abstractmethod - def __float__(self) -> float: - pass - - @runtime_checkable - class SupportsComplex(Protocol): - """An ABC with one abstract method __complex__.""" - __slots__ = () - - @abc.abstractmethod - def __complex__(self) -> complex: - pass - - @runtime_checkable - class SupportsBytes(Protocol): - """An ABC with one abstract method __bytes__.""" - __slots__ = () - - @abc.abstractmethod - def __bytes__(self) -> bytes: - pass - @runtime_checkable class SupportsIndex(Protocol): __slots__ = () @@ -796,45 +649,8 @@ else: def __index__(self) -> int: pass - @runtime_checkable - class SupportsAbs(Protocol[T_co]): - """ - An ABC with one abstract method __abs__ that is covariant in its return type. - """ - __slots__ = () - @abc.abstractmethod - def __abs__(self) -> T_co: - pass - - @runtime_checkable - class SupportsRound(Protocol[T_co]): - """ - An ABC with one abstract method __round__ that is covariant in its return type. - """ - __slots__ = () - - @abc.abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -def _ensure_subclassable(mro_entries): - def inner(func): - if sys.implementation.name == "pypy" and sys.version_info < (3, 9): - cls_dict = { - "__call__": staticmethod(func), - "__mro_entries__": staticmethod(mro_entries) - } - t = type(func.__name__, (), cls_dict) - return functools.update_wrapper(t(), func) - else: - func.__mro_entries__ = mro_entries - return func - return inner - - -if hasattr(typing, "ReadOnly"): +if hasattr(typing, "Required"): # The standard library TypedDict in Python 3.8 does not store runtime information # about which (if any) keys are optional. See https://bugs.python.org/issue38834 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" @@ -842,164 +658,148 @@ if hasattr(typing, "ReadOnly"): # The standard library TypedDict below Python 3.11 does not store runtime # information about optional and required keys when using Required or NotRequired. # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. - # Aaaand on 3.12 we add __orig_bases__ to TypedDict - # to enable better runtime introspection. - # On 3.13 we deprecate some odd ways of creating TypedDicts. - # PEP 705 proposes adding the ReadOnly[] qualifier. TypedDict = typing.TypedDict _TypedDictMeta = typing._TypedDictMeta is_typeddict = typing.is_typeddict else: - # 3.10.0 and later - _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + def _check_fails(cls, other): + try: + if sys._getframe(1).f_globals['__name__'] not in ['abc', + 'functools', + 'typing']: + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + except (AttributeError, ValueError): + pass + return False - def _get_typeddict_qualifiers(annotation_type): - while True: - annotation_origin = get_origin(annotation_type) - if annotation_origin is Annotated: - annotation_args = get_args(annotation_type) - if annotation_args: - annotation_type = annotation_args[0] - else: - break - elif annotation_origin is Required: - yield Required - annotation_type, = get_args(annotation_type) - elif annotation_origin is NotRequired: - yield NotRequired - annotation_type, = get_args(annotation_type) - elif annotation_origin is ReadOnly: - yield ReadOnly - annotation_type, = get_args(annotation_type) - else: - break + def _dict_new(*args, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + return dict(*args, **kwargs) + + _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' + + def _typeddict_new(*args, total=True, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + if args: + typename, args = args[0], args[1:] # allow the "_typename" keyword be passed + elif '_typename' in kwargs: + typename = kwargs.pop('_typename') + import warnings + warnings.warn("Passing '_typename' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError("TypedDict.__new__() missing 1 required positional " + "argument: '_typename'") + if args: + try: + fields, = args # allow the "_fields" keyword be passed + except ValueError: + raise TypeError('TypedDict.__new__() takes from 2 to 3 ' + f'positional arguments but {len(args) + 2} ' + 'were given') + elif '_fields' in kwargs and len(kwargs) == 1: + fields = kwargs.pop('_fields') + import warnings + warnings.warn("Passing '_fields' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + fields = None + + if fields is None: + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + + ns = {'__annotations__': dict(fields)} + try: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return _TypedDictMeta(typename, (), ns, total=total) + + _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' + ' /, *, total=True, **kwargs)') class _TypedDictMeta(type): - def __new__(cls, name, bases, ns, *, total=True, closed=False): - """Create new typed dict class object. + def __init__(cls, name, bases, ns, total=True): + super().__init__(name, bases, ns) - This method is called when TypedDict is subclassed, - or when TypedDict is instantiated. This way - TypedDict supports all three syntax forms described in its docstring. - Subclasses and instances of TypedDict return actual dictionaries. - """ - for base in bases: - if type(base) is not _TypedDictMeta and base is not typing.Generic: - raise TypeError('cannot inherit from both a TypedDict type ' - 'and a non-TypedDict base class') + def __new__(cls, name, bases, ns, total=True): + # Create new typed dict class object. + # This method is called directly when TypedDict is subclassed, + # or via _typeddict_new when TypedDict is instantiated. This way + # TypedDict supports all three syntaxes described in its docstring. + # Subclasses and instances of TypedDict return actual dictionaries + # via _dict_new. + ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new + # Don't insert typing.Generic into __bases__ here, + # or Generic.__init_subclass__ will raise TypeError + # in the super().__new__() call. + # Instead, monkey-patch __bases__ onto the class after it's been created. + tp_dict = super().__new__(cls, name, (dict,), ns) - if any(issubclass(b, typing.Generic) for b in bases): - generic_base = (typing.Generic,) - else: - generic_base = () - - # typing.py generally doesn't let you inherit from plain Generic, unless - # the name of the class happens to be "Protocol" - tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) - tp_dict.__name__ = name - if tp_dict.__qualname__ == "Protocol": - tp_dict.__qualname__ = name - - if not hasattr(tp_dict, '__orig_bases__'): - tp_dict.__orig_bases__ = bases + if any(issubclass(base, typing.Generic) for base in bases): + tp_dict.__bases__ = (typing.Generic, dict) + _maybe_adjust_parameters(tp_dict) annotations = {} own_annotations = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - if _TAKES_MODULE: - own_annotations = { - n: typing._type_check(tp, msg, module=tp_dict.__module__) - for n, tp in own_annotations.items() - } - else: - own_annotations = { - n: typing._type_check(tp, msg) - for n, tp in own_annotations.items() - } + own_annotations = { + n: typing._type_check(tp, msg) for n, tp in own_annotations.items() + } required_keys = set() optional_keys = set() - readonly_keys = set() - mutable_keys = set() - extra_items_type = None for base in bases: - base_dict = base.__dict__ - - annotations.update(base_dict.get('__annotations__', {})) - required_keys.update(base_dict.get('__required_keys__', ())) - optional_keys.update(base_dict.get('__optional_keys__', ())) - readonly_keys.update(base_dict.get('__readonly_keys__', ())) - mutable_keys.update(base_dict.get('__mutable_keys__', ())) - base_extra_items_type = base_dict.get('__extra_items__', None) - if base_extra_items_type is not None: - extra_items_type = base_extra_items_type - - if closed and extra_items_type is None: - extra_items_type = Never - if closed and "__extra_items__" in own_annotations: - annotation_type = own_annotations.pop("__extra_items__") - qualifiers = set(_get_typeddict_qualifiers(annotation_type)) - if Required in qualifiers: - raise TypeError( - "Special key __extra_items__ does not support " - "Required" - ) - if NotRequired in qualifiers: - raise TypeError( - "Special key __extra_items__ does not support " - "NotRequired" - ) - extra_items_type = annotation_type + annotations.update(base.__dict__.get('__annotations__', {})) + required_keys.update(base.__dict__.get('__required_keys__', ())) + optional_keys.update(base.__dict__.get('__optional_keys__', ())) annotations.update(own_annotations) for annotation_key, annotation_type in own_annotations.items(): - qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) - if Required in qualifiers: + if annotation_origin is Required: required_keys.add(annotation_key) - elif NotRequired in qualifiers: + elif annotation_origin is NotRequired: optional_keys.add(annotation_key) elif total: required_keys.add(annotation_key) else: optional_keys.add(annotation_key) - if ReadOnly in qualifiers: - mutable_keys.discard(annotation_key) - readonly_keys.add(annotation_key) - else: - mutable_keys.add(annotation_key) - readonly_keys.discard(annotation_key) tp_dict.__annotations__ = annotations tp_dict.__required_keys__ = frozenset(required_keys) tp_dict.__optional_keys__ = frozenset(optional_keys) - tp_dict.__readonly_keys__ = frozenset(readonly_keys) - tp_dict.__mutable_keys__ = frozenset(mutable_keys) if not hasattr(tp_dict, '__total__'): tp_dict.__total__ = total - tp_dict.__closed__ = closed - tp_dict.__extra_items__ = extra_items_type return tp_dict - __call__ = dict # static method + __instancecheck__ = __subclasscheck__ = _check_fails - def __subclasscheck__(cls, other): - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') + TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) + TypedDict.__module__ = __name__ + TypedDict.__doc__ = \ + """A simple typed name space. At runtime it is equivalent to a plain dict. - __instancecheck__ = __subclasscheck__ - - _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) - - @_ensure_subclassable(lambda bases: (_TypedDict,)) - def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs): - """A simple typed namespace. At runtime it is equivalent to a plain dict. - - TypedDict creates a dictionary type such that a type checker will expect all - instances to have a certain set of keys, where each key is + TypedDict creates a dictionary type that expects all of its + instances to have a certain set of keys, with each key associated with a value of a consistent type. This expectation - is not checked at runtime. - + is not checked at runtime but is only enforced by type checkers. Usage:: class Point2D(TypedDict): @@ -1014,71 +814,14 @@ else: The type info can be accessed via the Point2D.__annotations__ dict, and the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports an additional equivalent form:: + TypedDict supports two additional equivalent forms:: + Point2D = TypedDict('Point2D', x=int, y=int, label=str) Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - By default, all keys must be present in a TypedDict. It is possible - to override this by specifying totality:: - - class Point2D(TypedDict, total=False): - x: int - y: int - - This means that a Point2D TypedDict can have any of the keys omitted. A type - checker is only expected to support a literal False or True as the value of - the total argument. True is the default, and makes all items defined in the - class body be required. - - The Required and NotRequired special forms can also be used to mark - individual keys as being required or not required:: - - class Point2D(TypedDict): - x: int # the "x" key must always be present (Required is the default) - y: NotRequired[int] # the "y" key can be omitted - - See PEP 655 for more details on Required and NotRequired. + The class syntax is only supported in Python 3.6+, while two other + syntax forms work for Python 2.7 and 3.2+ """ - if fields is _marker or fields is None: - if fields is _marker: - deprecated_thing = "Failing to pass a value for the 'fields' parameter" - else: - deprecated_thing = "Passing `None` as the 'fields' parameter" - - example = f"`{typename} = TypedDict({typename!r}, {{}})`" - deprecation_msg = ( - f"{deprecated_thing} is deprecated and will be disallowed in " - "Python 3.15. To create a TypedDict class with 0 fields " - "using the functional syntax, pass an empty dictionary, e.g. " - ) + example + "." - warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) - if closed is not False and closed is not True: - kwargs["closed"] = closed - closed = False - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") - if kwargs: - if sys.version_info >= (3, 13): - raise TypeError("TypedDict takes no keyword arguments") - warnings.warn( - "The kwargs-based syntax for TypedDict definitions is deprecated " - "in Python 3.11, will be removed in Python 3.13, and may not be " - "understood by third-party type checkers.", - DeprecationWarning, - stacklevel=2, - ) - - ns = {'__annotations__': dict(fields)} - module = _caller() - if module is not None: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = module - - td = _TypedDictMeta(typename, (), ns, total=total, closed=closed) - td.__orig_bases__ = (TypedDict,) - return td if hasattr(typing, "_TypedDictMeta"): _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) @@ -1096,17 +839,14 @@ else: is_typeddict(Film) # => True is_typeddict(Union[list, str]) # => False """ - # On 3.8, this would otherwise return True - if hasattr(typing, "TypedDict") and tp is typing.TypedDict: - return False - return isinstance(tp, _TYPEDDICT_TYPES) + return isinstance(tp, tuple(_TYPEDDICT_TYPES)) if hasattr(typing, "assert_type"): assert_type = typing.assert_type else: - def assert_type(val, typ, /): + def assert_type(__val, __typ): """Assert (to the type checker) that the value is of the given type. When the type checker encounters a call to assert_type(), it @@ -1119,12 +859,15 @@ else: At runtime this returns the first argument unchanged and otherwise does nothing. """ - return val + return __val -if hasattr(typing, "Required"): # 3.11+ +if hasattr(typing, "Required"): get_type_hints = typing.get_type_hints -else: # <=3.10 +else: + import functools + import types + # replaces _strip_annotations() def _strip_extras(t): """Strips Annotated, Required and NotRequired from a given type.""" @@ -1137,12 +880,12 @@ else: # <=3.10 if stripped_args == t.__args__: return t return t.copy_with(stripped_args) - if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): + if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t - return _types.GenericAlias(t.__origin__, stripped_args) - if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): + return types.GenericAlias(t.__origin__, stripped_args) + if hasattr(types, "UnionType") and isinstance(t, types.UnionType): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t @@ -1182,11 +925,11 @@ else: # <=3.10 - If two dict arguments are passed, they specify globals and locals, respectively. """ - if hasattr(typing, "Annotated"): # 3.9+ + if hasattr(typing, "Annotated"): hint = typing.get_type_hints( obj, globalns=globalns, localns=localns, include_extras=True ) - else: # 3.8 + else: hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) if include_extras: return hint @@ -1199,7 +942,7 @@ if hasattr(typing, 'Annotated'): # Not exported and not a public API, but needed for get_origin() and get_args() # to work. _AnnotatedAlias = typing._AnnotatedAlias -# 3.8 +# 3.7-3.8 else: class _AnnotatedAlias(typing._GenericAlias, _root=True): """Runtime representation of an annotated type. @@ -1304,7 +1047,7 @@ else: if sys.version_info[:2] >= (3, 10): get_origin = typing.get_origin get_args = typing.get_args -# 3.8-3.9 +# 3.7-3.9 else: try: # 3.9+ @@ -1369,7 +1112,11 @@ if hasattr(typing, 'TypeAlias'): TypeAlias = typing.TypeAlias # 3.9 elif sys.version_info[:2] >= (3, 9): - @_ExtensionsSpecialForm + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeAliasForm def TypeAlias(self, parameters): """Special marker indicating that an assignment should be recognized as a proper type alias definition by type @@ -1382,89 +1129,68 @@ elif sys.version_info[:2] >= (3, 9): It's invalid when used anywhere except as in the example above. """ raise TypeError(f"{self} is not subscriptable") -# 3.8 +# 3.7-3.8 else: - TypeAlias = _ExtensionsSpecialForm( - 'TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name - For example:: + TypeAlias = _TypeAliasForm('TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. - Predicate: TypeAlias = Callable[..., bool] + For example:: - It's invalid when used anywhere except as in the example - above.""" - ) + Predicate: TypeAlias = Callable[..., bool] - -def _set_default(type_param, default): - if isinstance(default, (tuple, list)): - type_param.__default__ = tuple((typing._type_check(d, "Default must be a type") - for d in default)) - elif default != _marker: - if isinstance(type_param, ParamSpec) and default is ...: # ... not valid <3.11 - type_param.__default__ = default - else: - type_param.__default__ = typing._type_check(default, "Default must be a type") - else: - type_param.__default__ = None - - -def _set_module(typevarlike): - # for pickling: - def_mod = _caller(depth=3) - if def_mod != 'typing_extensions': - typevarlike.__module__ = def_mod + It's invalid when used anywhere except as in the example + above.""") class _DefaultMixin: """Mixin for TypeVarLike defaults.""" __slots__ = () - __init__ = _set_default - -# Classes using this metaclass must provide a _backported_typevarlike ClassVar -class _TypeVarLikeMeta(type): - def __instancecheck__(cls, __instance: Any) -> bool: - return isinstance(__instance, cls._backported_typevarlike) + def __init__(self, default): + if isinstance(default, (tuple, list)): + self.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default: + self.__default__ = typing._type_check(default, "Default must be a type") + else: + self.__default__ = None # Add default and infer_variance parameters from PEP 696 and 695 -class TypeVar(metaclass=_TypeVarLikeMeta): +class TypeVar(typing.TypeVar, _DefaultMixin, _root=True): """Type variable.""" - _backported_typevarlike = typing.TypeVar + __module__ = 'typing' - def __new__(cls, name, *constraints, bound=None, - covariant=False, contravariant=False, - default=_marker, infer_variance=False): - if hasattr(typing, "TypeAliasType"): - # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar - typevar = typing.TypeVar(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant, - infer_variance=infer_variance) - else: - typevar = typing.TypeVar(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant) - if infer_variance and (covariant or contravariant): - raise ValueError("Variance cannot be specified with infer_variance.") - typevar.__infer_variance__ = infer_variance - _set_default(typevar, default) - _set_module(typevar) - return typevar + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=None, infer_variance=False): + super().__init__(name, *constraints, bound=bound, covariant=covariant, + contravariant=contravariant) + _DefaultMixin.__init__(self, default) + self.__infer_variance__ = infer_variance - def __init_subclass__(cls) -> None: - raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod # Python 3.10+ has PEP 612 if hasattr(typing, 'ParamSpecArgs'): ParamSpecArgs = typing.ParamSpecArgs ParamSpecKwargs = typing.ParamSpecKwargs -# 3.8-3.9 +# 3.7-3.9 else: class _Immutable: """Mixin to indicate that object should not be copied.""" @@ -1525,35 +1251,27 @@ else: # 3.10+ if hasattr(typing, 'ParamSpec'): - # Add default parameter - PEP 696 - class ParamSpec(metaclass=_TypeVarLikeMeta): - """Parameter specification.""" + # Add default Parameter - PEP 696 + class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True): + """Parameter specification variable.""" - _backported_typevarlike = typing.ParamSpec + __module__ = 'typing' - def __new__(cls, name, *, bound=None, - covariant=False, contravariant=False, - infer_variance=False, default=_marker): - if hasattr(typing, "TypeAliasType"): - # PEP 695 implemented, can pass infer_variance to typing.TypeVar - paramspec = typing.ParamSpec(name, bound=bound, - covariant=covariant, - contravariant=contravariant, - infer_variance=infer_variance) - else: - paramspec = typing.ParamSpec(name, bound=bound, - covariant=covariant, - contravariant=contravariant) - paramspec.__infer_variance__ = infer_variance + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + default=None): + super().__init__(name, bound=bound, covariant=covariant, + contravariant=contravariant) + _DefaultMixin.__init__(self, default) - _set_default(paramspec, default) - _set_module(paramspec) - return paramspec + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod - def __init_subclass__(cls) -> None: - raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") - -# 3.8-3.9 +# 3.7-3.9 else: # Inherits from list as a workaround for Callable checks in Python < 3.9.2. @@ -1616,12 +1334,11 @@ else: return ParamSpecKwargs(self) def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - infer_variance=False, default=_marker): + default=None): super().__init__([self]) self.__name__ = name self.__covariant__ = bool(covariant) self.__contravariant__ = bool(contravariant) - self.__infer_variance__ = bool(infer_variance) if bound: self.__bound__ = typing._type_check(bound, 'Bound must be a type.') else: @@ -1629,14 +1346,15 @@ else: _DefaultMixin.__init__(self, default) # for pickling: - def_mod = _caller() + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None if def_mod != 'typing_extensions': self.__module__ = def_mod def __repr__(self): - if self.__infer_variance__: - prefix = '' - elif self.__covariant__: + if self.__covariant__: prefix = '+' elif self.__contravariant__: prefix = '-' @@ -1658,7 +1376,7 @@ else: pass -# 3.8-3.9 +# 3.7-3.9 if not hasattr(typing, 'Concatenate'): # Inherits from list as a workaround for Callable checks in Python < 3.9.2. class _ConcatenateGenericAlias(list): @@ -1693,7 +1411,7 @@ if not hasattr(typing, 'Concatenate'): ) -# 3.8-3.9 +# 3.7-3.9 @typing._tp_cache def _concatenate_getitem(self, parameters): if parameters == (): @@ -1711,10 +1429,10 @@ def _concatenate_getitem(self, parameters): # 3.10+ if hasattr(typing, 'Concatenate'): Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa # 3.9 elif sys.version_info[:2] >= (3, 9): - @_ExtensionsSpecialForm + @_TypeAliasForm def Concatenate(self, parameters): """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a higher order function which adds, removes or transforms parameters of a @@ -1727,9 +1445,12 @@ elif sys.version_info[:2] >= (3, 9): See PEP 612 for detailed information. """ return _concatenate_getitem(self, parameters) -# 3.8 +# 3.7-8 else: - class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): + class _ConcatenateForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + def __getitem__(self, parameters): return _concatenate_getitem(self, parameters) @@ -1751,7 +1472,11 @@ if hasattr(typing, 'TypeGuard'): TypeGuard = typing.TypeGuard # 3.9 elif sys.version_info[:2] >= (3, 9): - @_ExtensionsSpecialForm + class _TypeGuardForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeGuardForm def TypeGuard(self, parameters): """Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. @@ -1797,9 +1522,13 @@ elif sys.version_info[:2] >= (3, 9): """ item = typing._type_check(parameters, f'{self} accepts only a single type.') return typing._GenericAlias(self, (item,)) -# 3.8 +# 3.7-3.8 else: - class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): + class _TypeGuardForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type') @@ -1850,98 +1579,6 @@ else: PEP 647 (User-Defined Type Guards). """) -# 3.13+ -if hasattr(typing, 'TypeIs'): - TypeIs = typing.TypeIs -# 3.9 -elif sys.version_info[:2] >= (3, 9): - @_ExtensionsSpecialForm - def TypeIs(self, parameters): - """Special typing form used to annotate the return type of a user-defined - type narrower function. ``TypeIs`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeIs[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeIs`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the intersection of the type inside ``TypeGuard`` and the argument's - previously known type. - - For example:: - - def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: - return hasattr(val, '__await__') - - def f(val: Union[int, Awaitable[int]]) -> int: - if is_awaitable(val): - assert_type(val, Awaitable[int]) - else: - assert_type(val, int) - - ``TypeIs`` also works with type variables. For more information, see - PEP 742 (Narrowing types with TypeIs). - """ - item = typing._type_check(parameters, f'{self} accepts only a single type.') - return typing._GenericAlias(self, (item,)) -# 3.8 -else: - class _TypeIsForm(_ExtensionsSpecialForm, _root=True): - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type') - return typing._GenericAlias(self, (item,)) - - TypeIs = _TypeIsForm( - 'TypeIs', - doc="""Special typing form used to annotate the return type of a user-defined - type narrower function. ``TypeIs`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeIs[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeIs`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the intersection of the type inside ``TypeGuard`` and the argument's - previously known type. - - For example:: - - def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: - return hasattr(val, '__await__') - - def f(val: Union[int, Awaitable[int]]) -> int: - if is_awaitable(val): - assert_type(val, Awaitable[int]) - else: - assert_type(val, int) - - ``TypeIs`` also works with type variables. For more information, see - PEP 742 (Narrowing types with TypeIs). - """) - # Vendored from cpython typing._SpecialFrom class _SpecialForm(typing._Final, _root=True): @@ -1987,7 +1624,7 @@ class _SpecialForm(typing._Final, _root=True): return self._getitem(self, parameters) -if hasattr(typing, "LiteralString"): # 3.11+ +if hasattr(typing, "LiteralString"): LiteralString = typing.LiteralString else: @_SpecialForm @@ -2010,7 +1647,7 @@ else: raise TypeError(f"{self} is not subscriptable") -if hasattr(typing, "Self"): # 3.11+ +if hasattr(typing, "Self"): Self = typing.Self else: @_SpecialForm @@ -2031,7 +1668,7 @@ else: raise TypeError(f"{self} is not subscriptable") -if hasattr(typing, "Never"): # 3.11+ +if hasattr(typing, "Never"): Never = typing.Never else: @_SpecialForm @@ -2061,10 +1698,14 @@ else: raise TypeError(f"{self} is not subscriptable") -if hasattr(typing, 'Required'): # 3.11+ +if hasattr(typing, 'Required'): Required = typing.Required NotRequired = typing.NotRequired -elif sys.version_info[:2] >= (3, 9): # 3.9-3.10 +elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + @_ExtensionsSpecialForm def Required(self, parameters): """A special typing construct to mark a key of a total=False TypedDict @@ -2102,8 +1743,11 @@ elif sys.version_info[:2] >= (3, 9): # 3.9-3.10 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return typing._GenericAlias(self, (item,)) -else: # 3.8 - class _RequiredForm(_ExtensionsSpecialForm, _root=True): +else: + class _RequiredForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') @@ -2142,129 +1786,59 @@ else: # 3.8 """) -if hasattr(typing, 'ReadOnly'): - ReadOnly = typing.ReadOnly -elif sys.version_info[:2] >= (3, 9): # 3.9-3.12 - @_ExtensionsSpecialForm - def ReadOnly(self, parameters): - """A special typing construct to mark an item of a TypedDict as read-only. - - For example: - - class Movie(TypedDict): - title: ReadOnly[str] - year: int - - def mutate_movie(m: Movie) -> None: - m["year"] = 1992 # allowed - m["title"] = "The Matrix" # typechecker error - - There is no runtime checking for this property. - """ - item = typing._type_check(parameters, f'{self._name} accepts only a single type.') - return typing._GenericAlias(self, (item,)) - -else: # 3.8 - class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True): - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type.') - return typing._GenericAlias(self, (item,)) - - ReadOnly = _ReadOnlyForm( - 'ReadOnly', - doc="""A special typing construct to mark a key of a TypedDict as read-only. - - For example: - - class Movie(TypedDict): - title: ReadOnly[str] - year: int - - def mutate_movie(m: Movie) -> None: - m["year"] = 1992 # allowed - m["title"] = "The Matrix" # typechecker error - - There is no runtime checking for this propery. - """) - - -_UNPACK_DOC = """\ -Type unpack operator. - -The type unpack operator takes the child types from some container type, -such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For -example: - - # For some generic class `Foo`: - Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] - - Ts = TypeVarTuple('Ts') - # Specifies that `Bar` is generic in an arbitrary number of types. - # (Think of `Ts` as a tuple of an arbitrary number of individual - # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the - # `Generic[]`.) - class Bar(Generic[Unpack[Ts]]): ... - Bar[int] # Valid - Bar[int, str] # Also valid - -From Python 3.11, this can also be done using the `*` operator: - - Foo[*tuple[int, str]] - class Bar(Generic[*Ts]): ... - -The operator can also be used along with a `TypedDict` to annotate -`**kwargs` in a function signature. For instance: - - class Movie(TypedDict): - name: str - year: int - - # This function expects two keyword arguments - *name* of type `str` and - # *year* of type `int`. - def foo(**kwargs: Unpack[Movie]): ... - -Note that there is only some runtime checking of this operator. Not -everything the runtime allows may be accepted by static type checkers. - -For more information, see PEP 646 and PEP 692. -""" - - -if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] +if hasattr(typing, "Unpack"): # 3.11+ Unpack = typing.Unpack - - def _is_unpack(obj): - return get_origin(obj) is Unpack - -elif sys.version_info[:2] >= (3, 9): # 3.9+ - class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): - def __init__(self, getitem): - super().__init__(getitem) - self.__doc__ = _UNPACK_DOC +elif sys.version_info[:2] >= (3, 9): + class _UnpackSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar @_UnpackSpecialForm def Unpack(self, parameters): + """A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """ item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) -else: # 3.8 +else: class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar - class _UnpackForm(_ExtensionsSpecialForm, _root=True): + class _UnpackForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) - Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) + Unpack = _UnpackForm( + 'Unpack', + doc="""A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) @@ -2272,22 +1846,23 @@ else: # 3.8 if hasattr(typing, "TypeVarTuple"): # 3.11+ - # Add default parameter - PEP 696 - class TypeVarTuple(metaclass=_TypeVarLikeMeta): + # Add default Parameter - PEP 696 + class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True): """Type variable tuple.""" - _backported_typevarlike = typing.TypeVarTuple + def __init__(self, name, *, default=None): + super().__init__(name) + _DefaultMixin.__init__(self, default) - def __new__(cls, name, *, default=_marker): - tvt = typing.TypeVarTuple(name) - _set_default(tvt, default) - _set_module(tvt) - return tvt + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod - def __init_subclass__(self, *args, **kwds): - raise TypeError("Cannot subclass special typing classes") - -else: # <=3.10 +else: class TypeVarTuple(_DefaultMixin): """Type variable tuple. @@ -2338,12 +1913,15 @@ else: # <=3.10 def __iter__(self): yield self.__unpacked__ - def __init__(self, name, *, default=_marker): + def __init__(self, name, *, default=None): self.__name__ = name _DefaultMixin.__init__(self, default) # for pickling: - def_mod = _caller() + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None if def_mod != 'typing_extensions': self.__module__ = def_mod @@ -2366,10 +1944,10 @@ else: # <=3.10 raise TypeError("Cannot subclass special typing classes") -if hasattr(typing, "reveal_type"): # 3.11+ +if hasattr(typing, "reveal_type"): reveal_type = typing.reveal_type -else: # <=3.10 - def reveal_type(obj: T, /) -> T: +else: + def reveal_type(__obj: T) -> T: """Reveal the inferred type of a variable. When a static type checker encounters a call to ``reveal_type()``, @@ -2385,14 +1963,14 @@ else: # <=3.10 argument and returns it unchanged. """ - print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) - return obj + print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) + return __obj -if hasattr(typing, "assert_never"): # 3.11+ +if hasattr(typing, "assert_never"): assert_never = typing.assert_never -else: # <=3.10 - def assert_never(arg: Never, /) -> Never: +else: + def assert_never(__arg: Never) -> Never: """Assert to the type checker that a line of code is unreachable. Example:: @@ -2415,16 +1993,14 @@ else: # <=3.10 raise AssertionError("Expected code to be unreachable") -if sys.version_info >= (3, 12): # 3.12+ - # dataclass_transform exists in 3.11 but lacks the frozen_default parameter +if hasattr(typing, 'dataclass_transform'): dataclass_transform = typing.dataclass_transform -else: # <=3.11 +else: def dataclass_transform( *, eq_default: bool = True, order_default: bool = False, kw_only_default: bool = False, - frozen_default: bool = False, field_specifiers: typing.Tuple[ typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], ... @@ -2481,8 +2057,6 @@ else: # <=3.11 assumed to be True or False if it is omitted by the caller. - ``kw_only_default`` indicates whether the ``kw_only`` parameter is assumed to be True or False if it is omitted by the caller. - - ``frozen_default`` indicates whether the ``frozen`` parameter is - assumed to be True or False if it is omitted by the caller. - ``field_specifiers`` specifies a static list of supported classes or functions that describe fields, similar to ``dataclasses.field()``. @@ -2497,7 +2071,6 @@ else: # <=3.11 "eq_default": eq_default, "order_default": order_default, "kw_only_default": kw_only_default, - "frozen_default": frozen_default, "field_specifiers": field_specifiers, "kwargs": kwargs, } @@ -2505,18 +2078,18 @@ else: # <=3.11 return decorator -if hasattr(typing, "override"): # 3.12+ +if hasattr(typing, "override"): override = typing.override -else: # <=3.11 +else: _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) - def override(arg: _F, /) -> _F: + def override(__arg: _F) -> _F: """Indicate that a method is intended to override a method in a base class. Usage: class Base: - def method(self) -> None: + def method(self) -> None: ... pass class Child(Base): @@ -2529,156 +2102,10 @@ else: # <=3.11 This helps prevent bugs that may occur when a base class is changed without an equivalent change to a child class. - There is no runtime checking of these properties. The decorator - sets the ``__override__`` attribute to ``True`` on the decorated object - to allow runtime introspection. - See PEP 698 for details. """ - try: - arg.__override__ = True - except (AttributeError, TypeError): - # Skip the attribute silently if it is not writable. - # AttributeError happens if the object has __slots__ or a - # read-only property, TypeError if it's a builtin class. - pass - return arg - - -if hasattr(warnings, "deprecated"): - deprecated = warnings.deprecated -else: - _T = typing.TypeVar("_T") - - class deprecated: - """Indicate that a class, function or overload is deprecated. - - When this decorator is applied to an object, the type checker - will generate a diagnostic on usage of the deprecated object. - - Usage: - - @deprecated("Use B instead") - class A: - pass - - @deprecated("Use g instead") - def f(): - pass - - @overload - @deprecated("int support is deprecated") - def g(x: int) -> int: ... - @overload - def g(x: str) -> int: ... - - The warning specified by *category* will be emitted at runtime - on use of deprecated objects. For functions, that happens on calls; - for classes, on instantiation and on creation of subclasses. - If the *category* is ``None``, no warning is emitted at runtime. - The *stacklevel* determines where the - warning is emitted. If it is ``1`` (the default), the warning - is emitted at the direct caller of the deprecated object; if it - is higher, it is emitted further up the stack. - Static type checker behavior is not affected by the *category* - and *stacklevel* arguments. - - The deprecation message passed to the decorator is saved in the - ``__deprecated__`` attribute on the decorated object. - If applied to an overload, the decorator - must be after the ``@overload`` decorator for the attribute to - exist on the overload as returned by ``get_overloads()``. - - See PEP 702 for details. - - """ - def __init__( - self, - message: str, - /, - *, - category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, - stacklevel: int = 1, - ) -> None: - if not isinstance(message, str): - raise TypeError( - "Expected an object of type str for 'message', not " - f"{type(message).__name__!r}" - ) - self.message = message - self.category = category - self.stacklevel = stacklevel - - def __call__(self, arg: _T, /) -> _T: - # Make sure the inner functions created below don't - # retain a reference to self. - msg = self.message - category = self.category - stacklevel = self.stacklevel - if category is None: - arg.__deprecated__ = msg - return arg - elif isinstance(arg, type): - import functools - from types import MethodType - - original_new = arg.__new__ - - @functools.wraps(original_new) - def __new__(cls, *args, **kwargs): - if cls is arg: - warnings.warn(msg, category=category, stacklevel=stacklevel + 1) - if original_new is not object.__new__: - return original_new(cls, *args, **kwargs) - # Mirrors a similar check in object.__new__. - elif cls.__init__ is object.__init__ and (args or kwargs): - raise TypeError(f"{cls.__name__}() takes no arguments") - else: - return original_new(cls) - - arg.__new__ = staticmethod(__new__) - - original_init_subclass = arg.__init_subclass__ - # We need slightly different behavior if __init_subclass__ - # is a bound method (likely if it was implemented in Python) - if isinstance(original_init_subclass, MethodType): - original_init_subclass = original_init_subclass.__func__ - - @functools.wraps(original_init_subclass) - def __init_subclass__(*args, **kwargs): - warnings.warn(msg, category=category, stacklevel=stacklevel + 1) - return original_init_subclass(*args, **kwargs) - - arg.__init_subclass__ = classmethod(__init_subclass__) - # Or otherwise, which likely means it's a builtin such as - # object's implementation of __init_subclass__. - else: - @functools.wraps(original_init_subclass) - def __init_subclass__(*args, **kwargs): - warnings.warn(msg, category=category, stacklevel=stacklevel + 1) - return original_init_subclass(*args, **kwargs) - - arg.__init_subclass__ = __init_subclass__ - - arg.__deprecated__ = __new__.__deprecated__ = msg - __init_subclass__.__deprecated__ = msg - return arg - elif callable(arg): - import functools - - @functools.wraps(arg) - def wrapper(*args, **kwargs): - warnings.warn(msg, category=category, stacklevel=stacklevel + 1) - return arg(*args, **kwargs) - - arg.__deprecated__ = wrapper.__deprecated__ = msg - return wrapper - else: - raise TypeError( - "@deprecated decorator with non-None category must be applied to " - f"a class or callable, not {arg!r}" - ) + return __arg # We have to do some monkey patching to deal with the dual nature of @@ -2693,14 +2120,18 @@ if not hasattr(typing, "TypeVarTuple"): typing._check_generic = _check_generic -# Backport typing.NamedTuple as it exists in Python 3.13. +# Backport typing.NamedTuple as it exists in Python 3.11. # In 3.11, the ability to define generic `NamedTuple`s was supported. # This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. -# On 3.12, we added __orig_bases__ to call-based NamedTuples -# On 3.13, we deprecated kwargs-based NamedTuples -if sys.version_info >= (3, 13): +if sys.version_info >= (3, 11): NamedTuple = typing.NamedTuple else: + def _caller(): + try: + return sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + def _make_nmtuple(name, types, module, defaults=()): fields = [n for n, t in types] annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") @@ -2742,486 +2173,37 @@ else: ) nm_tpl.__bases__ = bases if typing.Generic in bases: - if hasattr(typing, '_generic_class_getitem'): # 3.12+ - nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) - else: - class_getitem = typing.Generic.__class_getitem__.__func__ - nm_tpl.__class_getitem__ = classmethod(class_getitem) + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) # update from user namespace without overriding special namedtuple attributes - for key, val in ns.items(): + for key in ns: if key in _prohibited_namedtuple_fields: raise AttributeError("Cannot overwrite NamedTuple attribute " + key) - elif key not in _special_namedtuple_fields: - if key not in nm_tpl._fields: - setattr(nm_tpl, key, ns[key]) - try: - set_name = type(val).__set_name__ - except AttributeError: - pass - else: - try: - set_name(val, nm_tpl, key) - except BaseException as e: - msg = ( - f"Error calling __set_name__ on {type(val).__name__!r} " - f"instance {key!r} in {typename!r}" - ) - # BaseException.add_note() existed on py311, - # but the __set_name__ machinery didn't start - # using add_note() until py312. - # Making sure exceptions are raised in the same way - # as in "normal" classes seems most important here. - if sys.version_info >= (3, 12): - e.add_note(msg) - raise - else: - raise RuntimeError(msg) from e - + elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) if typing.Generic in bases: nm_tpl.__init_subclass__() return nm_tpl + def NamedTuple(__typename, __fields=None, **kwargs): + if __fields is None: + __fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(__typename, __fields, module=_caller()) + + NamedTuple.__doc__ = typing.NamedTuple.__doc__ _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + # On 3.8+, alter the signature so that it matches typing.NamedTuple. + # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, + # so just leave the signature as it is on 3.7. + if sys.version_info >= (3, 8): + NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' + def _namedtuple_mro_entries(bases): assert NamedTuple in bases return (_NamedTuple,) - @_ensure_subclassable(_namedtuple_mro_entries) - def NamedTuple(typename, fields=_marker, /, **kwargs): - """Typed version of namedtuple. - - Usage:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has an extra __annotations__ attribute, giving a - dict that maps field names to types. (The field names are also in - the _fields attribute, which is part of the namedtuple API.) - An alternative equivalent functional syntax is also accepted:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - if fields is _marker: - if kwargs: - deprecated_thing = "Creating NamedTuple classes using keyword arguments" - deprecation_msg = ( - "{name} is deprecated and will be disallowed in Python {remove}. " - "Use the class-based or functional syntax instead." - ) - else: - deprecated_thing = "Failing to pass a value for the 'fields' parameter" - example = f"`{typename} = NamedTuple({typename!r}, [])`" - deprecation_msg = ( - "{name} is deprecated and will be disallowed in Python {remove}. " - "To create a NamedTuple class with 0 fields " - "using the functional syntax, " - "pass an empty list, e.g. " - ) + example + "." - elif fields is None: - if kwargs: - raise TypeError( - "Cannot pass `None` as the 'fields' parameter " - "and also specify fields using keyword arguments" - ) - else: - deprecated_thing = "Passing `None` as the 'fields' parameter" - example = f"`{typename} = NamedTuple({typename!r}, [])`" - deprecation_msg = ( - "{name} is deprecated and will be disallowed in Python {remove}. " - "To create a NamedTuple class with 0 fields " - "using the functional syntax, " - "pass an empty list, e.g. " - ) + example + "." - elif kwargs: - raise TypeError("Either list of fields or keywords" - " can be provided to NamedTuple, not both") - if fields is _marker or fields is None: - warnings.warn( - deprecation_msg.format(name=deprecated_thing, remove="3.15"), - DeprecationWarning, - stacklevel=2, - ) - fields = kwargs.items() - nt = _make_nmtuple(typename, fields, module=_caller()) - nt.__orig_bases__ = (NamedTuple,) - return nt - - -if hasattr(collections.abc, "Buffer"): - Buffer = collections.abc.Buffer -else: - class Buffer(abc.ABC): - """Base class for classes that implement the buffer protocol. - - The buffer protocol allows Python objects to expose a low-level - memory buffer interface. Before Python 3.12, it is not possible - to implement the buffer protocol in pure Python code, or even - to check whether a class implements the buffer protocol. In - Python 3.12 and higher, the ``__buffer__`` method allows access - to the buffer protocol from Python code, and the - ``collections.abc.Buffer`` ABC allows checking whether a class - implements the buffer protocol. - - To indicate support for the buffer protocol in earlier versions, - inherit from this ABC, either in a stub file or at runtime, - or use ABC registration. This ABC provides no methods, because - there is no Python-accessible methods shared by pre-3.12 buffer - classes. It is useful primarily for static checks. - - """ - - # As a courtesy, register the most common stdlib buffer classes. - Buffer.register(memoryview) - Buffer.register(bytearray) - Buffer.register(bytes) - - -# Backport of types.get_original_bases, available on 3.12+ in CPython -if hasattr(_types, "get_original_bases"): - get_original_bases = _types.get_original_bases -else: - def get_original_bases(cls, /): - """Return the class's "original" bases prior to modification by `__mro_entries__`. - - Examples:: - - from typing import TypeVar, Generic - from typing_extensions import NamedTuple, TypedDict - - T = TypeVar("T") - class Foo(Generic[T]): ... - class Bar(Foo[int], float): ... - class Baz(list[str]): ... - Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) - Spam = TypedDict("Spam", {"a": int, "b": str}) - - assert get_original_bases(Bar) == (Foo[int], float) - assert get_original_bases(Baz) == (list[str],) - assert get_original_bases(Eggs) == (NamedTuple,) - assert get_original_bases(Spam) == (TypedDict,) - assert get_original_bases(int) == (object,) - """ - try: - return cls.__dict__.get("__orig_bases__", cls.__bases__) - except AttributeError: - raise TypeError( - f'Expected an instance of type, not {type(cls).__name__!r}' - ) from None - - -# NewType is a class on Python 3.10+, making it pickleable -# The error message for subclassing instances of NewType was improved on 3.11+ -if sys.version_info >= (3, 11): - NewType = typing.NewType -else: - class NewType: - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy callable that simply returns its argument. Usage:: - UserId = NewType('UserId', int) - def name_by_id(user_id: UserId) -> str: - ... - UserId('user') # Fails type check - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - num = UserId(5) + 1 # type: int - """ - - def __call__(self, obj, /): - return obj - - def __init__(self, name, tp): - self.__qualname__ = name - if '.' in name: - name = name.rpartition('.')[-1] - self.__name__ = name - self.__supertype__ = tp - def_mod = _caller() - if def_mod != 'typing_extensions': - self.__module__ = def_mod - - def __mro_entries__(self, bases): - # We defined __mro_entries__ to get a better error message - # if a user attempts to subclass a NewType instance. bpo-46170 - supercls_name = self.__name__ - - class Dummy: - def __init_subclass__(cls): - subcls_name = cls.__name__ - raise TypeError( - f"Cannot subclass an instance of NewType. " - f"Perhaps you were looking for: " - f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" - ) - - return (Dummy,) - - def __repr__(self): - return f'{self.__module__}.{self.__qualname__}' - - def __reduce__(self): - return self.__qualname__ - - if sys.version_info >= (3, 10): - # PEP 604 methods - # It doesn't make sense to have these methods on Python <3.10 - - def __or__(self, other): - return typing.Union[self, other] - - def __ror__(self, other): - return typing.Union[other, self] - - -if hasattr(typing, "TypeAliasType"): - TypeAliasType = typing.TypeAliasType -else: - def _is_unionable(obj): - """Corresponds to is_unionable() in unionobject.c in CPython.""" - return obj is None or isinstance(obj, ( - type, - _types.GenericAlias, - _types.UnionType, - TypeAliasType, - )) - - class TypeAliasType: - """Create named, parameterized type aliases. - - This provides a backport of the new `type` statement in Python 3.12: - - type ListOrSet[T] = list[T] | set[T] - - is equivalent to: - - T = TypeVar("T") - ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) - - The name ListOrSet can then be used as an alias for the type it refers to. - - The type_params argument should contain all the type parameters used - in the value of the type alias. If the alias is not generic, this - argument is omitted. - - Static type checkers should only support type aliases declared using - TypeAliasType that follow these rules: - - - The first argument (the name) must be a string literal. - - The TypeAliasType instance must be immediately assigned to a variable - of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, - as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). - - """ - - def __init__(self, name: str, value, *, type_params=()): - if not isinstance(name, str): - raise TypeError("TypeAliasType name must be a string") - self.__value__ = value - self.__type_params__ = type_params - - parameters = [] - for type_param in type_params: - if isinstance(type_param, TypeVarTuple): - parameters.extend(type_param) - else: - parameters.append(type_param) - self.__parameters__ = tuple(parameters) - def_mod = _caller() - if def_mod != 'typing_extensions': - self.__module__ = def_mod - # Setting this attribute closes the TypeAliasType from further modification - self.__name__ = name - - def __setattr__(self, name: str, value: object, /) -> None: - if hasattr(self, "__name__"): - self._raise_attribute_error(name) - super().__setattr__(name, value) - - def __delattr__(self, name: str, /) -> Never: - self._raise_attribute_error(name) - - def _raise_attribute_error(self, name: str) -> Never: - # Match the Python 3.12 error messages exactly - if name == "__name__": - raise AttributeError("readonly attribute") - elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: - raise AttributeError( - f"attribute '{name}' of 'typing.TypeAliasType' objects " - "is not writable" - ) - else: - raise AttributeError( - f"'typing.TypeAliasType' object has no attribute '{name}'" - ) - - def __repr__(self) -> str: - return self.__name__ - - def __getitem__(self, parameters): - if not isinstance(parameters, tuple): - parameters = (parameters,) - parameters = [ - typing._type_check( - item, f'Subscripting {self.__name__} requires a type.' - ) - for item in parameters - ] - return typing._GenericAlias(self, tuple(parameters)) - - def __reduce__(self): - return self.__name__ - - def __init_subclass__(cls, *args, **kwargs): - raise TypeError( - "type 'typing_extensions.TypeAliasType' is not an acceptable base type" - ) - - # The presence of this method convinces typing._type_check - # that TypeAliasTypes are types. - def __call__(self): - raise TypeError("Type alias is not callable") - - if sys.version_info >= (3, 10): - def __or__(self, right): - # For forward compatibility with 3.12, reject Unions - # that are not accepted by the built-in Union. - if not _is_unionable(right): - return NotImplemented - return typing.Union[self, right] - - def __ror__(self, left): - if not _is_unionable(left): - return NotImplemented - return typing.Union[left, self] - - -if hasattr(typing, "is_protocol"): - is_protocol = typing.is_protocol - get_protocol_members = typing.get_protocol_members -else: - def is_protocol(tp: type, /) -> bool: - """Return True if the given type is a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, is_protocol - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> is_protocol(P) - True - >>> is_protocol(int) - False - """ - return ( - isinstance(tp, type) - and getattr(tp, '_is_protocol', False) - and tp is not Protocol - and tp is not typing.Protocol - ) - - def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: - """Return the set of members defined in a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, get_protocol_members - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> get_protocol_members(P) - frozenset({'a', 'b'}) - - Raise a TypeError for arguments that are not Protocols. - """ - if not is_protocol(tp): - raise TypeError(f'{tp!r} is not a Protocol') - if hasattr(tp, '__protocol_attrs__'): - return frozenset(tp.__protocol_attrs__) - return frozenset(_get_protocol_attrs(tp)) - - -if hasattr(typing, "Doc"): - Doc = typing.Doc -else: - class Doc: - """Define the documentation of a type annotation using ``Annotated``, to be - used in class attributes, function and method parameters, return values, - and variables. - - The value should be a positional-only string literal to allow static tools - like editors and documentation generators to use it. - - This complements docstrings. - - The string value passed is available in the attribute ``documentation``. - - Example:: - - >>> from typing_extensions import Annotated, Doc - >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... - """ - def __init__(self, documentation: str, /) -> None: - self.documentation = documentation - - def __repr__(self) -> str: - return f"Doc({self.documentation!r})" - - def __hash__(self) -> int: - return hash(self.documentation) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Doc): - return NotImplemented - return self.documentation == other.documentation - - -# Aliases for items that have always been in typing. -# Explicitly assign these (rather than using `from typing import *` at the top), -# so that we get a CI error if one of these is deleted from typing.py -# in a future version of Python -AbstractSet = typing.AbstractSet -AnyStr = typing.AnyStr -BinaryIO = typing.BinaryIO -Callable = typing.Callable -Collection = typing.Collection -Container = typing.Container -Dict = typing.Dict -ForwardRef = typing.ForwardRef -FrozenSet = typing.FrozenSet -Generator = typing.Generator -Generic = typing.Generic -Hashable = typing.Hashable -IO = typing.IO -ItemsView = typing.ItemsView -Iterable = typing.Iterable -Iterator = typing.Iterator -KeysView = typing.KeysView -List = typing.List -Mapping = typing.Mapping -MappingView = typing.MappingView -Match = typing.Match -MutableMapping = typing.MutableMapping -MutableSequence = typing.MutableSequence -MutableSet = typing.MutableSet -Optional = typing.Optional -Pattern = typing.Pattern -Reversible = typing.Reversible -Sequence = typing.Sequence -Set = typing.Set -Sized = typing.Sized -TextIO = typing.TextIO -Tuple = typing.Tuple -Union = typing.Union -ValuesView = typing.ValuesView -cast = typing.cast -no_type_check = typing.no_type_check -no_type_check_decorator = typing.no_type_check_decorator + NamedTuple.__mro_entries__ = _namedtuple_mro_entries diff --git a/lib/win32/Demos/BackupRead_BackupWrite.py b/lib/win32/Demos/BackupRead_BackupWrite.py deleted file mode 100644 index 0e9fc9f9..00000000 --- a/lib/win32/Demos/BackupRead_BackupWrite.py +++ /dev/null @@ -1,121 +0,0 @@ -## demonstrates using BackupRead and BackupWrite to copy all of a file's data streams - - -import ntsecuritycon -import pythoncom -import pywintypes -import win32api -import win32con -import win32file -import win32security -from pywin32_testutil import ob2memory, str2bytes -from win32com import storagecon - -all_sd_info = ( - win32security.DACL_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION - | win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION -) - -tempdir = win32api.GetTempPath() -tempfile = win32api.GetTempFileName(tempdir, "bkr")[0] -outfile = win32api.GetTempFileName(tempdir, "out")[0] -print("Filename:", tempfile, "Output file:", outfile) - -f = open(tempfile, "w") -f.write("some random junk" + "x" * 100) -f.close() - -## add a couple of alternate data streams -f = open(tempfile + ":streamdata", "w") -f.write("data written to alternate stream" + "y" * 100) -f.close() - -f = open(tempfile + ":anotherstream", "w") -f.write("z" * 100) -f.close() - -## add Summary Information, which is stored as a separate stream -m = storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE | storagecon.STGM_DIRECT -pss = pythoncom.StgOpenStorageEx( - tempfile, m, storagecon.STGFMT_FILE, 0, pythoncom.IID_IPropertySetStorage, None -) -ps = pss.Create( - pythoncom.FMTID_SummaryInformation, - pythoncom.IID_IPropertyStorage, - 0, - storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE, -) -ps.WriteMultiple( - (storagecon.PIDSI_KEYWORDS, storagecon.PIDSI_COMMENTS), ("keywords", "comments") -) -ps = None -pss = None - -## add a custom security descriptor to make sure we don't -## get a default that would always be the same for both files in temp dir -new_sd = pywintypes.SECURITY_DESCRIPTOR() -sid = win32security.LookupAccountName("", "EveryOne")[0] -acl = pywintypes.ACL() -acl.AddAccessAllowedAce(1, win32con.GENERIC_READ, sid) -acl.AddAccessAllowedAce(1, ntsecuritycon.FILE_APPEND_DATA, sid) -acl.AddAccessAllowedAce(1, win32con.GENERIC_WRITE, sid) -acl.AddAccessAllowedAce(1, ntsecuritycon.FILE_ALL_ACCESS, sid) - -new_sd.SetSecurityDescriptorDacl(True, acl, False) -win32security.SetFileSecurity(tempfile, win32security.DACL_SECURITY_INFORMATION, new_sd) - - -sa = pywintypes.SECURITY_ATTRIBUTES() -sa.bInheritHandle = True -h = win32file.CreateFile( - tempfile, - win32con.GENERIC_ALL, - win32con.FILE_SHARE_READ, - sa, - win32con.OPEN_EXISTING, - win32file.FILE_FLAG_BACKUP_SEMANTICS, - None, -) - -outh = win32file.CreateFile( - outfile, - win32con.GENERIC_ALL, - win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE, - sa, - win32con.OPEN_EXISTING, - win32file.FILE_FLAG_BACKUP_SEMANTICS, - None, -) - -ctxt = 0 -outctxt = 0 -buf = None -readsize = 100 - -while 1: - bytes_read, buf, ctxt = win32file.BackupRead(h, readsize, buf, False, True, ctxt) - if bytes_read == 0: - break - bytes_written, outctxt = win32file.BackupWrite( - outh, bytes_read, buf, False, True, outctxt - ) - print("Written:", bytes_written, "Context:", outctxt) -win32file.BackupRead(h, 0, buf, True, True, ctxt) -win32file.BackupWrite(outh, 0, str2bytes(""), True, True, outctxt) -win32file.CloseHandle(h) -win32file.CloseHandle(outh) - -assert open(tempfile).read() == open(outfile).read(), "File contents differ !" -assert ( - open(tempfile + ":streamdata").read() == open(outfile + ":streamdata").read() -), "streamdata contents differ !" -assert ( - open(tempfile + ":anotherstream").read() == open(outfile + ":anotherstream").read() -), "anotherstream contents differ !" -assert ( - ob2memory(win32security.GetFileSecurity(tempfile, all_sd_info))[:] - == ob2memory(win32security.GetFileSecurity(outfile, all_sd_info))[:] -), "Security descriptors are different !" -## also should check Summary Info programatically diff --git a/lib/win32/Demos/BackupSeek_streamheaders.py b/lib/win32/Demos/BackupSeek_streamheaders.py deleted file mode 100644 index 8b828d1d..00000000 --- a/lib/win32/Demos/BackupSeek_streamheaders.py +++ /dev/null @@ -1,137 +0,0 @@ -## demonstrates using BackupSeek to enumerate data streams for a file -import struct - -import pythoncom -import pywintypes -import win32api -import win32con -import win32file -from win32com import storagecon - -stream_types = { - win32con.BACKUP_DATA: "Standard data", - win32con.BACKUP_EA_DATA: "Extended attribute data", - win32con.BACKUP_SECURITY_DATA: "Security descriptor data", - win32con.BACKUP_ALTERNATE_DATA: "Alternative data streams", - win32con.BACKUP_LINK: "Hard link information", - win32con.BACKUP_PROPERTY_DATA: "Property data", - win32con.BACKUP_OBJECT_ID: "Objects identifiers", - win32con.BACKUP_REPARSE_DATA: "Reparse points", - win32con.BACKUP_SPARSE_BLOCK: "Sparse file", -} - -tempdir = win32api.GetTempPath() -tempfile = win32api.GetTempFileName(tempdir, "bkr")[0] -print("Filename:", tempfile) - -f = open(tempfile, "w") -f.write("some random junk" + "x" * 100) -f.close() - -f = open(tempfile + ":streamdata", "w") -f.write("data written to alternate stream" + "y" * 100) -f.close() - -f = open(tempfile + ":anotherstream", "w") -f.write("z" * 200) -f.close() - -## add Summary Information, which is stored as a separate stream -m = storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE | storagecon.STGM_DIRECT -pss = pythoncom.StgOpenStorageEx( - tempfile, m, storagecon.STGFMT_FILE, 0, pythoncom.IID_IPropertySetStorage, None -) -ps = pss.Create( - pythoncom.FMTID_SummaryInformation, - pythoncom.IID_IPropertyStorage, - 0, - storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE, -) -ps.WriteMultiple( - (storagecon.PIDSI_KEYWORDS, storagecon.PIDSI_COMMENTS), ("keywords", "comments") -) -ps = None -pss = None - -sa = pywintypes.SECURITY_ATTRIBUTES() -sa.bInheritHandle = False -h = win32file.CreateFile( - tempfile, - win32con.GENERIC_ALL, - win32con.FILE_SHARE_READ, - sa, - win32con.OPEN_EXISTING, - win32file.FILE_FLAG_BACKUP_SEMANTICS, - None, -) - - -""" stream header: -typedef struct _WIN32_STREAM_ID { - DWORD dwStreamId; DWORD dwStreamAttributes; LARGE_INTEGER Size; - DWORD dwStreamNameSize; WCHAR cStreamName[ANYSIZE_ARRAY]; -} -""" - -win32_stream_id_format = "LLQL" -win32_stream_id_size = struct.calcsize(win32_stream_id_format) - - -def parse_stream_header(h, ctxt, data): - stream_type, stream_attributes, stream_size, stream_name_size = struct.unpack( - win32_stream_id_format, data - ) - print( - "\nType:", - stream_type, - stream_types[stream_type], - "Attributes:", - stream_attributes, - "Size:", - stream_size, - "Name len:", - stream_name_size, - ) - if stream_name_size > 0: - ## ??? sdk says this size is in characters, but it appears to be number of bytes ??? - bytes_read, stream_name_buf, ctxt = win32file.BackupRead( - h, stream_name_size, None, False, True, ctxt - ) - stream_name = pywintypes.UnicodeFromRaw(stream_name_buf[:]) - else: - stream_name = "Unnamed" - print("Name:" + stream_name) - return ( - ctxt, - stream_type, - stream_attributes, - stream_size, - stream_name_size, - stream_name, - ) - - -ctxt = 0 -win32_stream_id_buf = ( - None ## gets rebound to a writable buffer on first call and reused -) -while 1: - bytes_read, win32_stream_id_buf, ctxt = win32file.BackupRead( - h, win32_stream_id_size, win32_stream_id_buf, False, True, ctxt - ) - if bytes_read == 0: - break - ( - ctxt, - stream_type, - stream_attributes, - stream_size, - stream_name_size, - stream_name, - ) = parse_stream_header(h, ctxt, win32_stream_id_buf[:]) - if stream_size > 0: - bytes_moved = win32file.BackupSeek(h, stream_size, ctxt) - print("Moved: ", bytes_moved) - -win32file.BackupRead(h, win32_stream_id_size, win32_stream_id_buf, True, True, ctxt) -win32file.CloseHandle(h) diff --git a/lib/win32/Demos/CopyFileEx.py b/lib/win32/Demos/CopyFileEx.py deleted file mode 100644 index cdac78e5..00000000 --- a/lib/win32/Demos/CopyFileEx.py +++ /dev/null @@ -1,57 +0,0 @@ -import win32api -import win32file - - -def ProgressRoutine( - TotalFileSize, - TotalBytesTransferred, - StreamSize, - StreamBytesTransferred, - StreamNumber, - CallbackReason, - SourceFile, - DestinationFile, - Data, -): - print(Data) - print( - TotalFileSize, - TotalBytesTransferred, - StreamSize, - StreamBytesTransferred, - StreamNumber, - CallbackReason, - SourceFile, - DestinationFile, - ) - ##if TotalBytesTransferred > 100000: - ## return win32file.PROGRESS_STOP - return win32file.PROGRESS_CONTINUE - - -temp_dir = win32api.GetTempPath() -fsrc = win32api.GetTempFileName(temp_dir, "cfe")[0] -fdst = win32api.GetTempFileName(temp_dir, "cfe")[0] -print(fsrc, fdst) - -f = open(fsrc, "w") -f.write("xxxxxxxxxxxxxxxx\n" * 32768) -f.close() -## add a couple of extra data streams -f = open(fsrc + ":stream_y", "w") -f.write("yyyyyyyyyyyyyyyy\n" * 32768) -f.close() -f = open(fsrc + ":stream_z", "w") -f.write("zzzzzzzzzzzzzzzz\n" * 32768) -f.close() - -operation_desc = "Copying " + fsrc + " to " + fdst -win32file.CopyFileEx( - fsrc, - fdst, - ProgressRoutine, - Data=operation_desc, - Cancel=False, - CopyFlags=win32file.COPY_FILE_RESTARTABLE, - Transaction=None, -) diff --git a/lib/win32/Demos/CreateFileTransacted_MiniVersion.py b/lib/win32/Demos/CreateFileTransacted_MiniVersion.py deleted file mode 100644 index fd600b59..00000000 --- a/lib/win32/Demos/CreateFileTransacted_MiniVersion.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -This demonstrates the creation of miniversions of a file during a transaction. -The FSCTL_TXFS_CREATE_MINIVERSION control code saves any changes to a new -miniversion (effectively a savepoint within a transaction). -""" - -import os -import struct - -import win32api -import win32con -import win32file -import win32transaction -import winerror -import winioctlcon -from pywin32_testutil import str2bytes # py3k-friendly helper - - -def demo(): - """ - Definition of buffer used with FSCTL_TXFS_CREATE_MINIVERSION: - typedef struct _TXFS_CREATE_MINIVERSION_INFO{ - USHORT StructureVersion; - USHORT StructureLength; - ULONG BaseVersion; - USHORT MiniVersion;} - """ - buf_fmt = "HHLH0L" ## buffer size must include struct padding - buf_size = struct.calcsize(buf_fmt) - - tempdir = win32api.GetTempPath() - tempfile = win32api.GetTempFileName(tempdir, "cft")[0] - print("Demonstrating transactions on tempfile", tempfile) - f = open(tempfile, "w") - f.write("This is original file.\n") - f.close() - - trans = win32transaction.CreateTransaction( - Description="Test creating miniversions of a file" - ) - hfile = win32file.CreateFileW( - tempfile, - win32con.GENERIC_READ | win32con.GENERIC_WRITE, - win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE, - None, - win32con.OPEN_EXISTING, - 0, - None, - Transaction=trans, - ) - - win32file.WriteFile(hfile, str2bytes("This is first miniversion.\n")) - buf = win32file.DeviceIoControl( - hfile, winioctlcon.FSCTL_TXFS_CREATE_MINIVERSION, None, buf_size, None - ) - struct_ver, struct_len, base_ver, ver_1 = struct.unpack(buf_fmt, buf) - - win32file.SetFilePointer(hfile, 0, win32con.FILE_BEGIN) - win32file.WriteFile(hfile, str2bytes("This is second miniversion!\n")) - buf = win32file.DeviceIoControl( - hfile, winioctlcon.FSCTL_TXFS_CREATE_MINIVERSION, None, buf_size, None - ) - struct_ver, struct_len, base_ver, ver_2 = struct.unpack(buf_fmt, buf) - hfile.Close() - - ## miniversions can't be opened with write access - hfile_0 = win32file.CreateFileW( - tempfile, - win32con.GENERIC_READ, - win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE, - None, - win32con.OPEN_EXISTING, - 0, - None, - Transaction=trans, - MiniVersion=base_ver, - ) - print("version:", base_ver, win32file.ReadFile(hfile_0, 100)) - hfile_0.Close() - - hfile_1 = win32file.CreateFileW( - tempfile, - win32con.GENERIC_READ, - win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE, - None, - win32con.OPEN_EXISTING, - 0, - None, - Transaction=trans, - MiniVersion=ver_1, - ) - print("version:", ver_1, win32file.ReadFile(hfile_1, 100)) - hfile_1.Close() - - hfile_2 = win32file.CreateFileW( - tempfile, - win32con.GENERIC_READ, - win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE, - None, - win32con.OPEN_EXISTING, - 0, - None, - Transaction=trans, - MiniVersion=ver_2, - ) - print("version:", ver_2, win32file.ReadFile(hfile_2, 100)) - hfile_2.Close() - - ## MiniVersions are destroyed when transaction is committed or rolled back - win32transaction.CommitTransaction(trans) - - os.unlink(tempfile) - - -if __name__ == "__main__": - # When run on CI, this fails with NOT_SUPPORTED, so don't have that cause "failure" - try: - demo() - except win32file.error as e: - if e.winerror == winerror.ERROR_NOT_SUPPORTED: - print("These features are not supported by this filesystem.") - else: - raise diff --git a/lib/win32/Demos/EvtFormatMessage.py b/lib/win32/Demos/EvtFormatMessage.py deleted file mode 100644 index 12543869..00000000 --- a/lib/win32/Demos/EvtFormatMessage.py +++ /dev/null @@ -1,83 +0,0 @@ -import sys - -import win32evtlog - - -def main(): - path = "System" - num_events = 5 - if len(sys.argv) > 2: - path = sys.argv[1] - num_events = int(sys.argv[2]) - elif len(sys.argv) > 1: - path = sys.argv[1] - - query = win32evtlog.EvtQuery(path, win32evtlog.EvtQueryForwardDirection) - events = win32evtlog.EvtNext(query, num_events) - context = win32evtlog.EvtCreateRenderContext(win32evtlog.EvtRenderContextSystem) - - for i, event in enumerate(events, 1): - result = win32evtlog.EvtRender( - event, win32evtlog.EvtRenderEventValues, Context=context - ) - - print("Event {}".format(i)) - - level_value, level_variant = result[win32evtlog.EvtSystemLevel] - if level_variant != win32evtlog.EvtVarTypeNull: - if level_value == 1: - print(" Level: CRITICAL") - elif level_value == 2: - print(" Level: ERROR") - elif level_value == 3: - print(" Level: WARNING") - elif level_value == 4: - print(" Level: INFO") - elif level_value == 5: - print(" Level: VERBOSE") - else: - print(" Level: UNKNOWN") - - time_created_value, time_created_variant = result[ - win32evtlog.EvtSystemTimeCreated - ] - if time_created_variant != win32evtlog.EvtVarTypeNull: - print(" Timestamp: {}".format(time_created_value.isoformat())) - - computer_value, computer_variant = result[win32evtlog.EvtSystemComputer] - if computer_variant != win32evtlog.EvtVarTypeNull: - print(" FQDN: {}".format(computer_value)) - - provider_name_value, provider_name_variant = result[ - win32evtlog.EvtSystemProviderName - ] - if provider_name_variant != win32evtlog.EvtVarTypeNull: - print(" Provider: {}".format(provider_name_value)) - - try: - metadata = win32evtlog.EvtOpenPublisherMetadata(provider_name_value) - # pywintypes.error: (2, 'EvtOpenPublisherMetadata', 'The system cannot find the file specified.') - except Exception: - pass - else: - try: - message = win32evtlog.EvtFormatMessage( - metadata, event, win32evtlog.EvtFormatMessageEvent - ) - # pywintypes.error: (15027, 'EvtFormatMessage: allocated 0, need buffer of size 0', 'The message resource is present but the message was not found in the message table.') - except Exception: - pass - else: - try: - print(" Message: {}".format(message)) - except UnicodeEncodeError: - # Obscure error when run under subprocess.Popen(), presumably due to - # not knowing the correct encoding for the console. - # > UnicodeEncodeError: \'charmap\' codec can\'t encode character \'\\u200e\' in position 57: character maps to \r\n' - # Can't reproduce when running manually, so it seems more a subprocess.Popen() - # than ours: - print(" Failed to decode:", repr(message)) - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/EvtSubscribe_pull.py b/lib/win32/Demos/EvtSubscribe_pull.py deleted file mode 100644 index f4059796..00000000 --- a/lib/win32/Demos/EvtSubscribe_pull.py +++ /dev/null @@ -1,28 +0,0 @@ -## Demonstrates how to create a "pull" subscription -import win32con -import win32event -import win32evtlog - -query_text = '*[System[Provider[@Name="Microsoft-Windows-Winlogon"]]]' - -h = win32event.CreateEvent(None, 0, 0, None) -s = win32evtlog.EvtSubscribe( - "System", - win32evtlog.EvtSubscribeStartAtOldestRecord, - SignalEvent=h, - Query=query_text, -) - -while 1: - while 1: - events = win32evtlog.EvtNext(s, 10) - if len(events) == 0: - break - ##for event in events: - ## print(win32evtlog.EvtRender(event, win32evtlog.EvtRenderEventXml)) - print("retrieved %s events" % len(events)) - while 1: - print("waiting...") - w = win32event.WaitForSingleObjectEx(h, 2000, True) - if w == win32con.WAIT_OBJECT_0: - break diff --git a/lib/win32/Demos/EvtSubscribe_push.py b/lib/win32/Demos/EvtSubscribe_push.py deleted file mode 100644 index 519ef0d7..00000000 --- a/lib/win32/Demos/EvtSubscribe_push.py +++ /dev/null @@ -1,25 +0,0 @@ -## Demonstrates a "push" subscription with a callback function -import win32evtlog - -query_text = '*[System[Provider[@Name="Microsoft-Windows-Winlogon"]]]' - - -def c(reason, context, evt): - if reason == win32evtlog.EvtSubscribeActionError: - print("EvtSubscribeActionError") - elif reason == win32evtlog.EvtSubscribeActionDeliver: - print("EvtSubscribeActionDeliver") - else: - print("??? Unknown action ???", reason) - context.append(win32evtlog.EvtRender(evt, win32evtlog.EvtRenderEventXml)) - return 0 - - -evttext = [] -s = win32evtlog.EvtSubscribe( - "System", - win32evtlog.EvtSubscribeStartAtOldestRecord, - Query="*", - Callback=c, - Context=evttext, -) diff --git a/lib/win32/Demos/FileSecurityTest.py b/lib/win32/Demos/FileSecurityTest.py deleted file mode 100644 index 90c45483..00000000 --- a/lib/win32/Demos/FileSecurityTest.py +++ /dev/null @@ -1,137 +0,0 @@ -# Contributed by Kelly Kranabetter. -import os -import sys - -import ntsecuritycon -import pywintypes -import win32security -import winerror - -# get security information -# name=r"c:\autoexec.bat" -# name= r"g:\!workgrp\lim" -name = sys.argv[0] - -if not os.path.exists(name): - print(name, "does not exist!") - sys.exit() - -print("On file ", name, "\n") - -# get owner SID -print("OWNER") -try: - sd = win32security.GetFileSecurity(name, win32security.OWNER_SECURITY_INFORMATION) - sid = sd.GetSecurityDescriptorOwner() - print(" ", win32security.LookupAccountSid(None, sid)) -except pywintypes.error as exc: - # in automation and network shares we see: - # pywintypes.error: (1332, 'LookupAccountName', 'No mapping between account names and security IDs was done.') - if exc.winerror != winerror.ERROR_NONE_MAPPED: - raise - print("No owner information is available") - -# get group SID -try: - print("GROUP") - sd = win32security.GetFileSecurity(name, win32security.GROUP_SECURITY_INFORMATION) - sid = sd.GetSecurityDescriptorGroup() - print(" ", win32security.LookupAccountSid(None, sid)) -except pywintypes.error as exc: - if exc.winerror != winerror.ERROR_NONE_MAPPED: - raise - print("No group information is available") - -# get ACEs -sd = win32security.GetFileSecurity(name, win32security.DACL_SECURITY_INFORMATION) -dacl = sd.GetSecurityDescriptorDacl() -if dacl == None: - print("No Discretionary ACL") -else: - for ace_no in range(0, dacl.GetAceCount()): - ace = dacl.GetAce(ace_no) - print("ACE", ace_no) - - print(" -Type") - for i in ( - "ACCESS_ALLOWED_ACE_TYPE", - "ACCESS_DENIED_ACE_TYPE", - "SYSTEM_AUDIT_ACE_TYPE", - "SYSTEM_ALARM_ACE_TYPE", - ): - if getattr(ntsecuritycon, i) == ace[0][0]: - print(" ", i) - - print(" -Flags", hex(ace[0][1])) - for i in ( - "OBJECT_INHERIT_ACE", - "CONTAINER_INHERIT_ACE", - "NO_PROPAGATE_INHERIT_ACE", - "INHERIT_ONLY_ACE", - "SUCCESSFUL_ACCESS_ACE_FLAG", - "FAILED_ACCESS_ACE_FLAG", - ): - if getattr(ntsecuritycon, i) & ace[0][1] == getattr(ntsecuritycon, i): - print(" ", i) - - print(" -mask", hex(ace[1])) - - # files and directories do permissions differently - permissions_file = ( - "DELETE", - "READ_CONTROL", - "WRITE_DAC", - "WRITE_OWNER", - "SYNCHRONIZE", - "FILE_GENERIC_READ", - "FILE_GENERIC_WRITE", - "FILE_GENERIC_EXECUTE", - "FILE_DELETE_CHILD", - ) - permissions_dir = ( - "DELETE", - "READ_CONTROL", - "WRITE_DAC", - "WRITE_OWNER", - "SYNCHRONIZE", - "FILE_ADD_SUBDIRECTORY", - "FILE_ADD_FILE", - "FILE_DELETE_CHILD", - "FILE_LIST_DIRECTORY", - "FILE_TRAVERSE", - "FILE_READ_ATTRIBUTES", - "FILE_WRITE_ATTRIBUTES", - "FILE_READ_EA", - "FILE_WRITE_EA", - ) - permissions_dir_inherit = ( - "DELETE", - "READ_CONTROL", - "WRITE_DAC", - "WRITE_OWNER", - "SYNCHRONIZE", - "GENERIC_READ", - "GENERIC_WRITE", - "GENERIC_EXECUTE", - "GENERIC_ALL", - ) - if os.path.isfile(name): - permissions = permissions_file - else: - permissions = permissions_dir - # directories also contain an ACE that is inherited by children (files) within them - if ( - ace[0][1] & ntsecuritycon.OBJECT_INHERIT_ACE - == ntsecuritycon.OBJECT_INHERIT_ACE - and ace[0][1] & ntsecuritycon.INHERIT_ONLY_ACE - == ntsecuritycon.INHERIT_ONLY_ACE - ): - permissions = permissions_dir_inherit - - calc_mask = 0 # calculate the mask so we can see if we are printing all of the permissions - for i in permissions: - if getattr(ntsecuritycon, i) & ace[1] == getattr(ntsecuritycon, i): - calc_mask = calc_mask | getattr(ntsecuritycon, i) - print(" ", i) - print(" ", "Calculated Check Mask=", hex(calc_mask)) - print(" -SID\n ", win32security.LookupAccountSid(None, ace[2])) diff --git a/lib/win32/Demos/GetSaveFileName.py b/lib/win32/Demos/GetSaveFileName.py deleted file mode 100644 index 28587505..00000000 --- a/lib/win32/Demos/GetSaveFileName.py +++ /dev/null @@ -1,43 +0,0 @@ -import os - -import win32con -import win32gui - -filter = "Python Scripts\0*.py;*.pyw;*.pys\0Text files\0*.txt\0" -customfilter = "Other file types\0*.*\0" - -fname, customfilter, flags = win32gui.GetSaveFileNameW( - InitialDir=os.environ["temp"], - Flags=win32con.OFN_ALLOWMULTISELECT | win32con.OFN_EXPLORER, - File="somefilename", - DefExt="py", - Title="GetSaveFileNameW", - Filter=filter, - CustomFilter=customfilter, - FilterIndex=1, -) - -print("save file names:", repr(fname)) -print("filter used:", repr(customfilter)) -print("Flags:", flags) -for k, v in list(win32con.__dict__.items()): - if k.startswith("OFN_") and flags & v: - print("\t" + k) - -fname, customfilter, flags = win32gui.GetOpenFileNameW( - InitialDir=os.environ["temp"], - Flags=win32con.OFN_ALLOWMULTISELECT | win32con.OFN_EXPLORER, - File="somefilename", - DefExt="py", - Title="GetOpenFileNameW", - Filter=filter, - CustomFilter=customfilter, - FilterIndex=0, -) - -print("open file names:", repr(fname)) -print("filter used:", repr(customfilter)) -print("Flags:", flags) -for k, v in list(win32con.__dict__.items()): - if k.startswith("OFN_") and flags & v: - print("\t" + k) diff --git a/lib/win32/Demos/NetValidatePasswordPolicy.py b/lib/win32/Demos/NetValidatePasswordPolicy.py deleted file mode 100644 index 86c3bc40..00000000 --- a/lib/win32/Demos/NetValidatePasswordPolicy.py +++ /dev/null @@ -1,127 +0,0 @@ -"""A demo of using win32net.NetValidatePasswordPolicy. - -Example usage: - -% NetValidatePasswordPolicy.py --password=foo change -which might return: - -> Result of 'change' validation is 0: The operation completed successfully. - -or depending on the policy: - -> Result of 'change' validation is 2245: The password does not meet the -> password policy requirements. Check the minimum password length, -> password complexity and password history requirements. - -Adding --user doesn't seem to change the output (even the PasswordLastSet seen -when '-f' is used doesn't depend on the username), but theoretically it will -also check the password history for the specified user. - -% NetValidatePasswordPolicy.py auth - -which always (with and without '-m') seems to return: - -> Result of 'auth' validation is 2701: Password must change at next logon -""" - -import optparse -import sys -from pprint import pprint - -import win32api -import win32net -import win32netcon - - -def main(): - parser = optparse.OptionParser( - "%prog [options] auth|change ...", - description="A win32net.NetValidatePasswordPolicy demo.", - ) - - parser.add_option( - "-u", - "--username", - action="store", - help="The username to pass to the function (only for the " "change command", - ) - - parser.add_option( - "-p", - "--password", - action="store", - help="The clear-text password to pass to the function " - "(only for the 'change' command)", - ) - - parser.add_option( - "-m", - "--password-matched", - action="store_false", - default=True, - help="Used to specify the password does NOT match (ie, " - "uses False for the PasswordMatch/PasswordMatched " - "arg, both 'auth' and 'change' commands)", - ) - - parser.add_option( - "-s", - "--server", - action="store", - help="The name of the server to execute the command on", - ) - - parser.add_option( - "-f", - "--show_fields", - action="store_true", - default=False, - help="Print the NET_VALIDATE_PERSISTED_FIELDS returned", - ) - - options, args = parser.parse_args() - - if not args: - args = ["auth"] - - for arg in args: - if arg == "auth": - input = { - "PasswordMatched": options.password_matched, - } - val_type = win32netcon.NetValidateAuthentication - elif arg == "change": - input = { - "ClearPassword": options.password, - "PasswordMatch": options.password_matched, - "UserAccountName": options.username, - } - val_type = win32netcon.NetValidatePasswordChange - else: - parser.error("Invalid arg - must be 'auth' or 'change'") - - try: - fields, status = win32net.NetValidatePasswordPolicy( - options.server, None, val_type, input - ) - except NotImplementedError: - print("NetValidatePasswordPolicy not implemented on this platform.") - return 1 - except win32net.error as exc: - print("NetValidatePasswordPolicy failed: ", exc) - return 1 - - if options.show_fields: - print("NET_VALIDATE_PERSISTED_FIELDS fields:") - pprint(fields) - - print( - "Result of %r validation is %d: %s" - % (arg, status, win32api.FormatMessage(status).strip()) - ) - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/lib/win32/Demos/OpenEncryptedFileRaw.py b/lib/win32/Demos/OpenEncryptedFileRaw.py deleted file mode 100644 index aa0f2a7c..00000000 --- a/lib/win32/Demos/OpenEncryptedFileRaw.py +++ /dev/null @@ -1,67 +0,0 @@ -import os - -import win32api -import win32file -import winerror - - -def ReadCallback(input_buffer, data, buflen): - fnamein, fnameout, f = data - ## print fnamein, fnameout, buflen - f.write(input_buffer) - ## python 2.3 throws an error if return value is a plain int - return winerror.ERROR_SUCCESS - - -def WriteCallback(output_buffer, data, buflen): - fnamebackup, fnameout, f = data - file_data = f.read(buflen) - ## returning 0 as len terminates WriteEncryptedFileRaw - output_len = len(file_data) - output_buffer[:output_len] = file_data - return winerror.ERROR_SUCCESS, output_len - - -tmp_dir = win32api.GetTempPath() -dst_dir = win32api.GetTempFileName(tmp_dir, "oef")[0] -os.remove(dst_dir) -os.mkdir(dst_dir) -print("Destination dir:", dst_dir) - -## create an encrypted file -fname = win32api.GetTempFileName(dst_dir, "ref")[0] -print("orig file:", fname) -f = open(fname, "w") -f.write("xxxxxxxxxxxxxxxx\n" * 32768) -f.close() -## add a couple of extra data streams -f = open(fname + ":stream_y", "w") -f.write("yyyyyyyyyyyyyyyy\n" * 32768) -f.close() -f = open(fname + ":stream_z", "w") -f.write("zzzzzzzzzzzzzzzz\n" * 32768) -f.close() -win32file.EncryptFile(fname) - -## backup raw data of encrypted file -bkup_fname = win32api.GetTempFileName(dst_dir, "bef")[0] -print("backup file:", bkup_fname) -f = open(bkup_fname, "wb") -ctxt = win32file.OpenEncryptedFileRaw(fname, 0) -try: - win32file.ReadEncryptedFileRaw(ReadCallback, (fname, bkup_fname, f), ctxt) -finally: - ## if context is not closed, file remains locked even if calling process is killed - win32file.CloseEncryptedFileRaw(ctxt) - f.close() - -## restore data from backup to new encrypted file -dst_fname = win32api.GetTempFileName(dst_dir, "wef")[0] -print("restored file:", dst_fname) -f = open(bkup_fname, "rb") -ctxtout = win32file.OpenEncryptedFileRaw(dst_fname, win32file.CREATE_FOR_IMPORT) -try: - win32file.WriteEncryptedFileRaw(WriteCallback, (bkup_fname, dst_fname, f), ctxtout) -finally: - win32file.CloseEncryptedFileRaw(ctxtout) - f.close() diff --git a/lib/win32/Demos/RegCreateKeyTransacted.py b/lib/win32/Demos/RegCreateKeyTransacted.py deleted file mode 100644 index 687f07ae..00000000 --- a/lib/win32/Demos/RegCreateKeyTransacted.py +++ /dev/null @@ -1,60 +0,0 @@ -import win32api -import win32con -import win32transaction - -keyname = "Pywin32 test transacted registry functions" -subkeyname = "test transacted subkey" -classname = "Transacted Class" - -trans = win32transaction.CreateTransaction(Description="test RegCreateKeyTransacted") -key, disp = win32api.RegCreateKeyEx( - win32con.HKEY_CURRENT_USER, - keyname, - samDesired=win32con.KEY_ALL_ACCESS, - Class=classname, -) -## clean up any existing keys -for subk in win32api.RegEnumKeyExW(key): - win32api.RegDeleteKey(key, subk[0]) - -## reopen key in transacted mode -transacted_key = win32api.RegOpenKeyTransacted( - Key=win32con.HKEY_CURRENT_USER, - SubKey=keyname, - Transaction=trans, - samDesired=win32con.KEY_ALL_ACCESS, -) -subkey, disp = win32api.RegCreateKeyEx( - transacted_key, - subkeyname, - Transaction=trans, - samDesired=win32con.KEY_ALL_ACCESS, - Class=classname, -) - -## Newly created key should not be visible from non-transacted handle -subkeys = [s[0] for s in win32api.RegEnumKeyExW(key)] -assert subkeyname not in subkeys - -transacted_subkeys = [s[0] for s in win32api.RegEnumKeyExW(transacted_key)] -assert subkeyname in transacted_subkeys - -## Key should be visible to non-transacted handle after commit -win32transaction.CommitTransaction(trans) -subkeys = [s[0] for s in win32api.RegEnumKeyExW(key)] -assert subkeyname in subkeys - -## test transacted delete -del_trans = win32transaction.CreateTransaction( - Description="test RegDeleteKeyTransacted" -) -win32api.RegDeleteKeyEx(key, subkeyname, Transaction=del_trans) -## subkey should still show up for non-transacted handle -subkeys = [s[0] for s in win32api.RegEnumKeyExW(key)] -assert subkeyname in subkeys -## ... and should be gone after commit -win32transaction.CommitTransaction(del_trans) -subkeys = [s[0] for s in win32api.RegEnumKeyExW(key)] -assert subkeyname not in subkeys - -win32api.RegDeleteKey(win32con.HKEY_CURRENT_USER, keyname) diff --git a/lib/win32/Demos/RegRestoreKey.py b/lib/win32/Demos/RegRestoreKey.py deleted file mode 100644 index 69ce32ea..00000000 --- a/lib/win32/Demos/RegRestoreKey.py +++ /dev/null @@ -1,71 +0,0 @@ -import os - -import ntsecuritycon -import win32api -import win32con -import win32security -import winnt - -temp_dir = win32api.GetTempPath() -fname = win32api.GetTempFileName(temp_dir, "rsk")[0] -print(fname) -## file can't exist -os.remove(fname) - -## enable backup and restore privs -required_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_BACKUP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32con.TOKEN_READ | win32con.TOKEN_ADJUST_PRIVILEGES -) -adjusted_privs = win32security.AdjustTokenPrivileges(th, 0, required_privs) - -try: - sa = win32security.SECURITY_ATTRIBUTES() - my_sid = win32security.GetTokenInformation(th, ntsecuritycon.TokenUser)[0] - sa.SECURITY_DESCRIPTOR.SetSecurityDescriptorOwner(my_sid, 0) - - k, disp = win32api.RegCreateKeyEx( - win32con.HKEY_CURRENT_USER, - "Python test key", - SecurityAttributes=sa, - samDesired=win32con.KEY_ALL_ACCESS, - Class="some class", - Options=0, - ) - win32api.RegSetValue(k, None, win32con.REG_SZ, "Default value for python test key") - - subk, disp = win32api.RegCreateKeyEx( - k, - "python test subkey", - SecurityAttributes=sa, - samDesired=win32con.KEY_ALL_ACCESS, - Class="some other class", - Options=0, - ) - win32api.RegSetValue(subk, None, win32con.REG_SZ, "Default value for subkey") - - win32api.RegSaveKeyEx( - k, fname, Flags=winnt.REG_STANDARD_FORMAT, SecurityAttributes=sa - ) - - restored_key, disp = win32api.RegCreateKeyEx( - win32con.HKEY_CURRENT_USER, - "Python test key(restored)", - SecurityAttributes=sa, - samDesired=win32con.KEY_ALL_ACCESS, - Class="restored class", - Options=0, - ) - win32api.RegRestoreKey(restored_key, fname) -finally: - win32security.AdjustTokenPrivileges(th, 0, adjusted_privs) diff --git a/lib/win32/Demos/SystemParametersInfo.py b/lib/win32/Demos/SystemParametersInfo.py deleted file mode 100644 index 55a331e6..00000000 --- a/lib/win32/Demos/SystemParametersInfo.py +++ /dev/null @@ -1,210 +0,0 @@ -import glob -import os -import time - -import win32api -import win32con -import win32gui - -## some of these tests will fail for systems prior to XP - -for pname in ( - ## Set actions all take an unsigned int in pvParam - "SPI_GETMOUSESPEED", - "SPI_GETACTIVEWNDTRKTIMEOUT", - "SPI_GETCARETWIDTH", - "SPI_GETFOREGROUNDFLASHCOUNT", - "SPI_GETFOREGROUNDLOCKTIMEOUT", - ## Set actions all take an unsigned int in uiParam - "SPI_GETWHEELSCROLLLINES", - "SPI_GETKEYBOARDDELAY", - "SPI_GETKEYBOARDSPEED", - "SPI_GETMOUSEHOVERHEIGHT", - "SPI_GETMOUSEHOVERWIDTH", - "SPI_GETMOUSEHOVERTIME", - "SPI_GETSCREENSAVETIMEOUT", - "SPI_GETMENUSHOWDELAY", - "SPI_GETLOWPOWERTIMEOUT", - "SPI_GETPOWEROFFTIMEOUT", - "SPI_GETBORDER", - ## below are winxp only: - "SPI_GETFONTSMOOTHINGCONTRAST", - "SPI_GETFONTSMOOTHINGTYPE", - "SPI_GETFOCUSBORDERHEIGHT", - "SPI_GETFOCUSBORDERWIDTH", - "SPI_GETMOUSECLICKLOCKTIME", -): - print(pname) - cget = getattr(win32con, pname) - cset = getattr(win32con, pname.replace("_GET", "_SET")) - orig_value = win32gui.SystemParametersInfo(cget) - print("\toriginal setting:", orig_value) - win32gui.SystemParametersInfo(cset, orig_value + 1) - new_value = win32gui.SystemParametersInfo(cget) - print("\tnew value:", new_value) - # On Vista, some of these values seem to be ignored. So only "fail" if - # the new value isn't what we set or the original - if new_value != orig_value + 1: - assert new_value == orig_value - print("Strange - setting %s seems to have been ignored" % (pname,)) - win32gui.SystemParametersInfo(cset, orig_value) - assert win32gui.SystemParametersInfo(cget) == orig_value - - -# these take a boolean value in pvParam -# change to opposite, check that it was changed and change back -for pname in ( - "SPI_GETFLATMENU", - "SPI_GETDROPSHADOW", - "SPI_GETKEYBOARDCUES", - "SPI_GETMENUFADE", - "SPI_GETCOMBOBOXANIMATION", - "SPI_GETCURSORSHADOW", - "SPI_GETGRADIENTCAPTIONS", - "SPI_GETHOTTRACKING", - "SPI_GETLISTBOXSMOOTHSCROLLING", - "SPI_GETMENUANIMATION", - "SPI_GETSELECTIONFADE", - "SPI_GETTOOLTIPANIMATION", - "SPI_GETTOOLTIPFADE", - "SPI_GETUIEFFECTS", - "SPI_GETACTIVEWINDOWTRACKING", - "SPI_GETACTIVEWNDTRKZORDER", -): - print(pname) - cget = getattr(win32con, pname) - cset = getattr(win32con, pname.replace("_GET", "_SET")) - orig_value = win32gui.SystemParametersInfo(cget) - print(orig_value) - win32gui.SystemParametersInfo(cset, not orig_value) - new_value = win32gui.SystemParametersInfo(cget) - print(new_value) - assert orig_value != new_value - win32gui.SystemParametersInfo(cset, orig_value) - assert win32gui.SystemParametersInfo(cget) == orig_value - - -# these take a boolean in uiParam -# could combine with above section now that SystemParametersInfo only takes a single parameter -for pname in ( - "SPI_GETFONTSMOOTHING", - "SPI_GETICONTITLEWRAP", - "SPI_GETBEEP", - "SPI_GETBLOCKSENDINPUTRESETS", - "SPI_GETKEYBOARDPREF", - "SPI_GETSCREENSAVEACTIVE", - "SPI_GETMENUDROPALIGNMENT", - "SPI_GETDRAGFULLWINDOWS", - "SPI_GETSHOWIMEUI", -): - cget = getattr(win32con, pname) - cset = getattr(win32con, pname.replace("_GET", "_SET")) - orig_value = win32gui.SystemParametersInfo(cget) - win32gui.SystemParametersInfo(cset, not orig_value) - new_value = win32gui.SystemParametersInfo(cget) - # Some of these also can't be changed (eg, SPI_GETSCREENSAVEACTIVE) so - # don't actually get upset. - if orig_value != new_value: - print("successfully toggled", pname, "from", orig_value, "to", new_value) - else: - print("couldn't toggle", pname, "from", orig_value) - win32gui.SystemParametersInfo(cset, orig_value) - assert win32gui.SystemParametersInfo(cget) == orig_value - - -print("SPI_GETICONTITLELOGFONT") -lf = win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT) -orig_height = lf.lfHeight -orig_italic = lf.lfItalic -print("Height:", orig_height, "Italic:", orig_italic) -lf.lfHeight += 2 -lf.lfItalic = not lf.lfItalic -win32gui.SystemParametersInfo(win32con.SPI_SETICONTITLELOGFONT, lf) -new_lf = win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT) -print("New Height:", new_lf.lfHeight, "New Italic:", new_lf.lfItalic) -assert new_lf.lfHeight == orig_height + 2 -assert new_lf.lfItalic != orig_italic - -lf.lfHeight = orig_height -lf.lfItalic = orig_italic -win32gui.SystemParametersInfo(win32con.SPI_SETICONTITLELOGFONT, lf) -new_lf = win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT) -assert new_lf.lfHeight == orig_height -assert new_lf.lfItalic == orig_italic - - -print("SPI_GETMOUSEHOVERWIDTH, SPI_GETMOUSEHOVERHEIGHT, SPI_GETMOUSEHOVERTIME") -w = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH) -h = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT) -t = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME) -print("w,h,t:", w, h, t) - -win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERWIDTH, w + 1) -win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERHEIGHT, h + 2) -win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERTIME, t + 3) -new_w = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH) -new_h = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT) -new_t = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME) -print("new w,h,t:", new_w, new_h, new_t) -assert new_w == w + 1 -assert new_h == h + 2 -assert new_t == t + 3 - -win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERWIDTH, w) -win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERHEIGHT, h) -win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERTIME, t) -new_w = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH) -new_h = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT) -new_t = win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME) -assert new_w == w -assert new_h == h -assert new_t == t - - -print("SPI_SETDOUBLECLKWIDTH, SPI_SETDOUBLECLKHEIGHT") -x = win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK) -y = win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK) -print("x,y:", x, y) -win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKWIDTH, x + 1) -win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKHEIGHT, y + 2) -new_x = win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK) -new_y = win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK) -print("new x,y:", new_x, new_y) -assert new_x == x + 1 -assert new_y == y + 2 -win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKWIDTH, x) -win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKHEIGHT, y) -new_x = win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK) -new_y = win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK) -assert new_x == x -assert new_y == y - - -print("SPI_SETDRAGWIDTH, SPI_SETDRAGHEIGHT") -dw = win32api.GetSystemMetrics(win32con.SM_CXDRAG) -dh = win32api.GetSystemMetrics(win32con.SM_CYDRAG) -print("dw,dh:", dw, dh) -win32gui.SystemParametersInfo(win32con.SPI_SETDRAGWIDTH, dw + 1) -win32gui.SystemParametersInfo(win32con.SPI_SETDRAGHEIGHT, dh + 2) -new_dw = win32api.GetSystemMetrics(win32con.SM_CXDRAG) -new_dh = win32api.GetSystemMetrics(win32con.SM_CYDRAG) -print("new dw,dh:", new_dw, new_dh) -assert new_dw == dw + 1 -assert new_dh == dh + 2 -win32gui.SystemParametersInfo(win32con.SPI_SETDRAGWIDTH, dw) -win32gui.SystemParametersInfo(win32con.SPI_SETDRAGHEIGHT, dh) -new_dw = win32api.GetSystemMetrics(win32con.SM_CXDRAG) -new_dh = win32api.GetSystemMetrics(win32con.SM_CYDRAG) -assert new_dw == dw -assert new_dh == dh - - -orig_wallpaper = win32gui.SystemParametersInfo(Action=win32con.SPI_GETDESKWALLPAPER) -print("Original: ", orig_wallpaper) -for bmp in glob.glob(os.path.join(os.environ["windir"], "*.bmp")): - print(bmp) - win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER, Param=bmp) - print(win32gui.SystemParametersInfo(Action=win32con.SPI_GETDESKWALLPAPER)) - time.sleep(1) - -win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER, Param=orig_wallpaper) diff --git a/lib/win32/Demos/c_extension/setup.py b/lib/win32/Demos/c_extension/setup.py deleted file mode 100644 index 6f5cb417..00000000 --- a/lib/win32/Demos/c_extension/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -# A sample distutils script to show to build your own -# extension module which extends pywintypes or pythoncom. -# -# Use 'python setup.py build' to build this extension. -import os -from distutils.core import Extension, setup -from sysconfig import get_paths - -sources = ["win32_extension.cpp"] -lib_dir = get_paths()["platlib"] - -# Specify the directory where the PyWin32 .h and .lib files are installed. -# If you are doing a win32com extension, you will also need to add -# win32com\Include and win32com\Libs. -ext = Extension( - "win32_extension", - sources, - include_dirs=[os.path.join(lib_dir, "win32", "include")], - library_dirs=[os.path.join(lib_dir, "win32", "libs")], -) - -setup( - name="win32 extension sample", - version="0.1", - ext_modules=[ext], -) diff --git a/lib/win32/Demos/cerapi.py b/lib/win32/Demos/cerapi.py deleted file mode 100644 index c5d93166..00000000 --- a/lib/win32/Demos/cerapi.py +++ /dev/null @@ -1,254 +0,0 @@ -# A demo of the Windows CE Remote API -# -# This connects to a CE device, and interacts with it. - -import getopt -import os -import sys - -import win32api -import win32con -import win32event -import wincerapi - - -def DumpPythonRegistry(): - try: - h = wincerapi.CeRegOpenKeyEx( - win32con.HKEY_LOCAL_MACHINE, - "Software\\Python\\PythonCore\\%s\\PythonPath" % sys.winver, - ) - except win32api.error: - print("The remote device does not appear to have Python installed") - return 0 - path, typ = wincerapi.CeRegQueryValueEx(h, None) - print("The remote PythonPath is '%s'" % (str(path),)) - h.Close() - return 1 - - -def DumpRegistry(root, level=0): - # A recursive dump of the remote registry to test most functions. - h = wincerapi.CeRegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, None) - level_prefix = " " * level - index = 0 - # Enumerate values. - while 1: - try: - name, data, typ = wincerapi.CeRegEnumValue(root, index) - except win32api.error: - break - print("%s%s=%s" % (level_prefix, name, repr(str(data)))) - index = index + 1 - # Now enumerate all keys. - index = 0 - while 1: - try: - name, klass = wincerapi.CeRegEnumKeyEx(root, index) - except win32api.error: - break - print("%s%s\\" % (level_prefix, name)) - subkey = wincerapi.CeRegOpenKeyEx(root, name) - DumpRegistry(subkey, level + 1) - index = index + 1 - - -def DemoCopyFile(): - # Create a file on the device, and write a string. - cefile = wincerapi.CeCreateFile( - "TestPython", win32con.GENERIC_WRITE, 0, None, win32con.OPEN_ALWAYS, 0, None - ) - wincerapi.CeWriteFile(cefile, "Hello from Python") - cefile.Close() - # reopen the file and check the data. - cefile = wincerapi.CeCreateFile( - "TestPython", win32con.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None - ) - if wincerapi.CeReadFile(cefile, 100) != "Hello from Python": - print("Couldnt read the data from the device!") - cefile.Close() - # Delete the test file - wincerapi.CeDeleteFile("TestPython") - print("Created, wrote to, read from and deleted a test file!") - - -def DemoCreateProcess(): - try: - hp, ht, pid, tid = wincerapi.CeCreateProcess( - "Windows\\Python.exe", "", None, None, 0, 0, None, "", None - ) - - # Not necessary, except to see if handle closing raises an exception - # (if auto-closed, the error is suppressed) - hp.Close() - ht.Close() - print("Python is running on the remote device!") - except win32api.error as xxx_todo_changeme1: - (hr, fn, msg) = xxx_todo_changeme1.args - print("Couldnt execute remote process -", msg) - - -def DumpRemoteMachineStatus(): - ( - ACLineStatus, - BatteryFlag, - BatteryLifePercent, - BatteryLifeTime, - BatteryFullLifeTime, - BackupBatteryFlag, - BackupBatteryLifePercent, - BackupBatteryLifeTime, - BackupBatteryLifeTime, - ) = wincerapi.CeGetSystemPowerStatusEx() - if ACLineStatus: - power = "AC" - else: - power = "battery" - if BatteryLifePercent == 255: - batPerc = "unknown" - else: - batPerc = BatteryLifePercent - print( - "The batteries are at %s%%, and is currently being powered by %s" - % (batPerc, power) - ) - - ( - memLoad, - totalPhys, - availPhys, - totalPage, - availPage, - totalVirt, - availVirt, - ) = wincerapi.CeGlobalMemoryStatus() - - print("The memory is %d%% utilized." % (memLoad)) - print("%-20s%-10s%-10s" % ("", "Total", "Avail")) - print("%-20s%-10s%-10s" % ("Physical Memory", totalPhys, availPhys)) - print("%-20s%-10s%-10s" % ("Virtual Memory", totalVirt, availVirt)) - print("%-20s%-10s%-10s" % ("Paging file", totalPage, availPage)) - - storeSize, freeSize = wincerapi.CeGetStoreInformation() - print("%-20s%-10s%-10s" % ("File store", storeSize, freeSize)) - - print("The CE temp path is", wincerapi.CeGetTempPath()) - print("The system info for the device is", wincerapi.CeGetSystemInfo()) - - -def DumpRemoteFolders(): - # Dump all special folders possible. - for name, val in list(wincerapi.__dict__.items()): - if name[:6] == "CSIDL_": - try: - loc = str(wincerapi.CeGetSpecialFolderPath(val)) - print("Folder %s is at %s" % (name, loc)) - except win32api.error as details: - pass - - # Get the shortcut targets for the "Start Menu" - print("Dumping start menu shortcuts...") - try: - startMenu = str(wincerapi.CeGetSpecialFolderPath(wincerapi.CSIDL_STARTMENU)) - except win32api.error as details: - print("This device has no start menu!", details) - startMenu = None - - if startMenu: - for fileAttr in wincerapi.CeFindFiles(os.path.join(startMenu, "*")): - fileName = fileAttr[8] - fullPath = os.path.join(startMenu, str(fileName)) - try: - resolved = wincerapi.CeSHGetShortcutTarget(fullPath) - except win32api.error as xxx_todo_changeme: - (rc, fn, msg) = xxx_todo_changeme.args - resolved = "#Error - %s" % msg - print("%s->%s" % (fileName, resolved)) - - # print "The start menu is at", - # print wincerapi.CeSHGetShortcutTarget("\\Windows\\Start Menu\\Shortcut to Python.exe.lnk") - - -def usage(): - print("Options:") - print("-a - Execute all demos") - print("-p - Execute Python process on remote device") - print("-r - Dump the remote registry") - print("-f - Dump all remote special folder locations") - print("-s - Dont dump machine status") - print("-y - Perform asynch init of CE connection") - - -def main(): - async_init = bStartPython = bDumpRegistry = bDumpFolders = 0 - bDumpStatus = 1 - try: - opts, args = getopt.getopt(sys.argv[1:], "apr") - except getopt.error as why: - print("Invalid usage:", why) - usage() - return - - for o, v in opts: - if o == "-a": - bStartPython = bDumpRegistry = bDumpStatus = bDumpFolders = asynch_init = 1 - if o == "-p": - bStartPython = 1 - if o == "-r": - bDumpRegistry = 1 - if o == "-s": - bDumpStatus = 0 - if o == "-f": - bDumpFolders = 1 - if o == "-y": - print("Doing asynch init of CE connection") - async_init = 1 - - if async_init: - event, rc = wincerapi.CeRapiInitEx() - while 1: - rc = win32event.WaitForSingleObject(event, 500) - if rc == win32event.WAIT_OBJECT_0: - # We connected. - break - else: - print( - "Waiting for Initialize to complete (picture a Cancel button here :)" - ) - else: - wincerapi.CeRapiInit() - print("Connected to remote CE device.") - try: - verinfo = wincerapi.CeGetVersionEx() - print( - "The device is running windows CE version %d.%d - %s" - % (verinfo[0], verinfo[1], verinfo[4]) - ) - - if bDumpStatus: - print("Dumping remote machine status") - DumpRemoteMachineStatus() - - if bDumpRegistry: - print("Dumping remote registry...") - DumpRegistry(win32con.HKEY_LOCAL_MACHINE) - - if bDumpFolders: - print("Dumping remote folder information") - DumpRemoteFolders() - - DemoCopyFile() - if bStartPython: - print("Starting remote Python process") - if DumpPythonRegistry(): - DemoCreateProcess() - else: - print("Not trying to start Python, as it's not installed") - - finally: - wincerapi.CeRapiUninit() - print("Disconnected") - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/dde/ddeclient.py b/lib/win32/Demos/dde/ddeclient.py deleted file mode 100644 index 32af3130..00000000 --- a/lib/win32/Demos/dde/ddeclient.py +++ /dev/null @@ -1,18 +0,0 @@ -# 'Request' example added jjk 11/20/98 - -import dde -import win32ui - -server = dde.CreateServer() -server.Create("TestClient") - -conversation = dde.CreateConversation(server) - -conversation.ConnectTo("RunAny", "RunAnyCommand") -conversation.Exec("DoSomething") -conversation.Exec("DoSomethingElse") - -conversation.ConnectTo("RunAny", "ComputeStringLength") -s = "abcdefghi" -sl = conversation.Request(s) -print('length of "%s" is %s' % (s, sl)) diff --git a/lib/win32/Demos/dde/ddeserver.py b/lib/win32/Demos/dde/ddeserver.py deleted file mode 100644 index 0374827f..00000000 --- a/lib/win32/Demos/dde/ddeserver.py +++ /dev/null @@ -1,42 +0,0 @@ -# 'Request' example added jjk 11/20/98 - -import dde -import win32ui -from pywin.mfc import object - - -class MySystemTopic(object.Object): - def __init__(self): - object.Object.__init__(self, dde.CreateServerSystemTopic()) - - def Exec(self, cmd): - print("System Topic asked to exec", cmd) - - -class MyOtherTopic(object.Object): - def __init__(self, topicName): - object.Object.__init__(self, dde.CreateTopic(topicName)) - - def Exec(self, cmd): - print("Other Topic asked to exec", cmd) - - -class MyRequestTopic(object.Object): - def __init__(self, topicName): - topic = dde.CreateTopic(topicName) - topic.AddItem(dde.CreateStringItem("")) - object.Object.__init__(self, topic) - - def Request(self, aString): - print("Request Topic asked to compute length of:", aString) - return str(len(aString)) - - -server = dde.CreateServer() -server.AddTopic(MySystemTopic()) -server.AddTopic(MyOtherTopic("RunAnyCommand")) -server.AddTopic(MyRequestTopic("ComputeStringLength")) -server.Create("RunAny") - -while 1: - win32ui.PumpWaitingMessages(0, -1) diff --git a/lib/win32/Demos/desktopmanager.py b/lib/win32/Demos/desktopmanager.py deleted file mode 100644 index 6ffaf2a9..00000000 --- a/lib/win32/Demos/desktopmanager.py +++ /dev/null @@ -1,246 +0,0 @@ -# Demonstrates using a taskbar icon to create and navigate between desktops - -import _thread -import io -import time -import traceback - -import pywintypes -import win32api -import win32con -import win32gui -import win32process -import win32service - -## "Shell_TrayWnd" is class of system tray window, broadcasts "TaskbarCreated" when initialized - - -def desktop_name_dlgproc(hwnd, msg, wparam, lparam): - """Handles messages from the desktop name dialog box""" - if msg in (win32con.WM_CLOSE, win32con.WM_DESTROY): - win32gui.DestroyWindow(hwnd) - elif msg == win32con.WM_COMMAND: - if wparam == win32con.IDOK: - desktop_name = win32gui.GetDlgItemText(hwnd, 72) - print("new desktop name: ", desktop_name) - win32gui.DestroyWindow(hwnd) - create_desktop(desktop_name) - - elif wparam == win32con.IDCANCEL: - win32gui.DestroyWindow(hwnd) - - -def get_new_desktop_name(parent_hwnd): - """Create a dialog box to ask the user for name of desktop to be created""" - msgs = { - win32con.WM_COMMAND: desktop_name_dlgproc, - win32con.WM_CLOSE: desktop_name_dlgproc, - win32con.WM_DESTROY: desktop_name_dlgproc, - } - # dlg item [type, caption, id, (x,y,cx,cy), style, ex style - style = ( - win32con.WS_BORDER - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - ) ## |win32con.DS_SYSMODAL - h = win32gui.CreateDialogIndirect( - win32api.GetModuleHandle(None), - [ - ["One ugly dialog box !", (100, 100, 200, 100), style, 0], - [ - "Button", - "Create", - win32con.IDOK, - (10, 10, 30, 20), - win32con.WS_VISIBLE - | win32con.WS_TABSTOP - | win32con.BS_HOLLOW - | win32con.BS_DEFPUSHBUTTON, - ], - [ - "Button", - "Never mind", - win32con.IDCANCEL, - (45, 10, 50, 20), - win32con.WS_VISIBLE | win32con.WS_TABSTOP | win32con.BS_HOLLOW, - ], - ["Static", "Desktop name:", 71, (10, 40, 70, 10), win32con.WS_VISIBLE], - ["Edit", "", 72, (75, 40, 90, 10), win32con.WS_VISIBLE], - ], - parent_hwnd, - msgs, - ) ## parent_hwnd, msgs) - - win32gui.EnableWindow(h, True) - hcontrol = win32gui.GetDlgItem(h, 72) - win32gui.EnableWindow(hcontrol, True) - win32gui.SetFocus(hcontrol) - - -def new_icon(hdesk, desktop_name): - """Runs as a thread on each desktop to create a new tray icon and handle its messages""" - global id - id = id + 1 - hdesk.SetThreadDesktop() - ## apparently the threads can't use same hinst, so each needs its own window class - windowclassname = "PythonDesktopManager" + desktop_name - wc = win32gui.WNDCLASS() - wc.hInstance = win32api.GetModuleHandle(None) - wc.lpszClassName = windowclassname - wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW | win32con.CS_GLOBALCLASS - wc.hCursor = win32gui.LoadCursor(0, win32con.IDC_ARROW) - wc.hbrBackground = win32con.COLOR_WINDOW - wc.lpfnWndProc = icon_wndproc - windowclass = win32gui.RegisterClass(wc) - style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU - hwnd = win32gui.CreateWindow( - windowclass, - "dm_" + desktop_name, - win32con.WS_SYSMENU, - 0, - 0, - win32con.CW_USEDEFAULT, - win32con.CW_USEDEFAULT, - 0, - 0, - wc.hInstance, - None, - ) - win32gui.UpdateWindow(hwnd) - flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_TIP - notify_info = ( - hwnd, - id, - flags, - win32con.WM_USER + 20, - hicon, - "Desktop Manager (%s)" % desktop_name, - ) - window_info[hwnd] = notify_info - ## wait for explorer to initialize system tray for new desktop - tray_found = 0 - while not tray_found: - try: - tray_found = win32gui.FindWindow("Shell_TrayWnd", None) - except win32gui.error: - traceback.print_exc - time.sleep(0.5) - win32gui.Shell_NotifyIcon(win32gui.NIM_ADD, notify_info) - win32gui.PumpMessages() - - -def create_desktop(desktop_name, start_explorer=1): - """Creates a new desktop and spawns a thread running on it - Will also start a new icon thread on an existing desktop - """ - sa = pywintypes.SECURITY_ATTRIBUTES() - sa.bInheritHandle = 1 - - try: - hdesk = win32service.CreateDesktop( - desktop_name, 0, win32con.MAXIMUM_ALLOWED, sa - ) - except win32service.error: - traceback.print_exc() - errbuf = io.StringIO() - traceback.print_exc(None, errbuf) - win32api.MessageBox(0, errbuf.getvalue(), "Desktop creation failed") - return - if start_explorer: - s = win32process.STARTUPINFO() - s.lpDesktop = desktop_name - prc_info = win32process.CreateProcess( - None, - "Explorer.exe", - None, - None, - True, - win32con.CREATE_NEW_CONSOLE, - None, - "c:\\", - s, - ) - - th = _thread.start_new_thread(new_icon, (hdesk, desktop_name)) - hdesk.SwitchDesktop() - - -def icon_wndproc(hwnd, msg, wp, lp): - """Window proc for the tray icons""" - if lp == win32con.WM_LBUTTONDOWN: - ## popup menu won't disappear if you don't do this - win32gui.SetForegroundWindow(hwnd) - - curr_desktop = win32service.OpenInputDesktop(0, True, win32con.MAXIMUM_ALLOWED) - curr_desktop_name = win32service.GetUserObjectInformation( - curr_desktop, win32con.UOI_NAME - ) - winsta = win32service.GetProcessWindowStation() - desktops = winsta.EnumDesktops() - m = win32gui.CreatePopupMenu() - desktop_cnt = len(desktops) - ## *don't* create an item 0 - for d in range(1, desktop_cnt + 1): - mf_flags = win32con.MF_STRING - ## if you switch to winlogon yourself, there's nothing there and you're stuck - if desktops[d - 1].lower() in ("winlogon", "disconnect"): - mf_flags = mf_flags | win32con.MF_GRAYED | win32con.MF_DISABLED - if desktops[d - 1] == curr_desktop_name: - mf_flags = mf_flags | win32con.MF_CHECKED - win32gui.AppendMenu(m, mf_flags, d, desktops[d - 1]) - win32gui.AppendMenu(m, win32con.MF_STRING, desktop_cnt + 1, "Create new ...") - win32gui.AppendMenu(m, win32con.MF_STRING, desktop_cnt + 2, "Exit") - - x, y = win32gui.GetCursorPos() - d = win32gui.TrackPopupMenu( - m, - win32con.TPM_LEFTBUTTON | win32con.TPM_RETURNCMD | win32con.TPM_NONOTIFY, - x, - y, - 0, - hwnd, - None, - ) - win32gui.PumpWaitingMessages() - win32gui.DestroyMenu(m) - if d == desktop_cnt + 1: ## Create new - get_new_desktop_name(hwnd) - elif d == desktop_cnt + 2: ## Exit - win32gui.PostQuitMessage(0) - win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, window_info[hwnd]) - del window_info[hwnd] - origin_desktop.SwitchDesktop() - elif d > 0: - hdesk = win32service.OpenDesktop( - desktops[d - 1], 0, 0, win32con.MAXIMUM_ALLOWED - ) - hdesk.SwitchDesktop() - return 0 - else: - return win32gui.DefWindowProc(hwnd, msg, wp, lp) - - -window_info = {} -origin_desktop = win32service.OpenInputDesktop(0, True, win32con.MAXIMUM_ALLOWED) -origin_desktop_name = win32service.GetUserObjectInformation( - origin_desktop, win32service.UOI_NAME -) - -hinst = win32api.GetModuleHandle(None) -try: - hicon = win32gui.LoadIcon(hinst, 1) ## python.exe and pythonw.exe -except win32gui.error: - hicon = win32gui.LoadIcon(hinst, 135) ## pythonwin's icon -id = 0 - -create_desktop(str(origin_desktop_name), 0) - -## wait for first thread to initialize its icon -while not window_info: - time.sleep(1) - -## exit when last tray icon goes away -while window_info: - win32gui.PumpWaitingMessages() - time.sleep(3) diff --git a/lib/win32/Demos/eventLogDemo.py b/lib/win32/Demos/eventLogDemo.py deleted file mode 100644 index 8d4220b0..00000000 --- a/lib/win32/Demos/eventLogDemo.py +++ /dev/null @@ -1,142 +0,0 @@ -import win32api # To translate NT Sids to account names. -import win32con -import win32evtlog -import win32evtlogutil -import win32security - - -def ReadLog(computer, logType="Application", dumpEachRecord=0): - # read the entire log back. - h = win32evtlog.OpenEventLog(computer, logType) - numRecords = win32evtlog.GetNumberOfEventLogRecords(h) - # print "There are %d records" % numRecords - - num = 0 - while 1: - objects = win32evtlog.ReadEventLog( - h, - win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ, - 0, - ) - if not objects: - break - for object in objects: - # get it for testing purposes, but dont print it. - msg = win32evtlogutil.SafeFormatMessage(object, logType) - if object.Sid is not None: - try: - domain, user, typ = win32security.LookupAccountSid( - computer, object.Sid - ) - sidDesc = "%s/%s" % (domain, user) - except win32security.error: - sidDesc = str(object.Sid) - user_desc = "Event associated with user %s" % (sidDesc,) - else: - user_desc = None - if dumpEachRecord: - print( - "Event record from %r generated at %s" - % (object.SourceName, object.TimeGenerated.Format()) - ) - if user_desc: - print(user_desc) - try: - print(msg) - except UnicodeError: - print("(unicode error printing message: repr() follows...)") - print(repr(msg)) - - num = num + len(objects) - - if numRecords == num: - print("Successfully read all", numRecords, "records") - else: - print( - "Couldn't get all records - reported %d, but found %d" % (numRecords, num) - ) - print( - "(Note that some other app may have written records while we were running!)" - ) - win32evtlog.CloseEventLog(h) - - -def usage(): - print("Writes an event to the event log.") - print("-w : Dont write any test records.") - print("-r : Dont read the event log") - print("-c : computerName : Process the log on the specified computer") - print("-v : Verbose") - print("-t : LogType - Use the specified log - default = 'Application'") - - -def test(): - # check if running on Windows NT, if not, display notice and terminate - if win32api.GetVersion() & 0x80000000: - print("This sample only runs on NT") - return - - import getopt - import sys - - opts, args = getopt.getopt(sys.argv[1:], "rwh?c:t:v") - computer = None - do_read = do_write = 1 - - logType = "Application" - verbose = 0 - - if len(args) > 0: - print("Invalid args") - usage() - return 1 - for opt, val in opts: - if opt == "-t": - logType = val - if opt == "-c": - computer = val - if opt in ["-h", "-?"]: - usage() - return - if opt == "-r": - do_read = 0 - if opt == "-w": - do_write = 0 - if opt == "-v": - verbose = verbose + 1 - if do_write: - ph = win32api.GetCurrentProcess() - th = win32security.OpenProcessToken(ph, win32con.TOKEN_READ) - my_sid = win32security.GetTokenInformation(th, win32security.TokenUser)[0] - - win32evtlogutil.ReportEvent( - logType, - 2, - strings=["The message text for event 2", "Another insert"], - data="Raw\0Data".encode("ascii"), - sid=my_sid, - ) - win32evtlogutil.ReportEvent( - logType, - 1, - eventType=win32evtlog.EVENTLOG_WARNING_TYPE, - strings=["A warning", "An even more dire warning"], - data="Raw\0Data".encode("ascii"), - sid=my_sid, - ) - win32evtlogutil.ReportEvent( - logType, - 1, - eventType=win32evtlog.EVENTLOG_INFORMATION_TYPE, - strings=["An info", "Too much info"], - data="Raw\0Data".encode("ascii"), - sid=my_sid, - ) - print("Successfully wrote 3 records to the log") - - if do_read: - ReadLog(computer, logType, verbose > 0) - - -if __name__ == "__main__": - test() diff --git a/lib/win32/Demos/getfilever.py b/lib/win32/Demos/getfilever.py deleted file mode 100644 index 721e72f3..00000000 --- a/lib/win32/Demos/getfilever.py +++ /dev/null @@ -1,33 +0,0 @@ -import os - -import win32api - -ver_strings = ( - "Comments", - "InternalName", - "ProductName", - "CompanyName", - "LegalCopyright", - "ProductVersion", - "FileDescription", - "LegalTrademarks", - "PrivateBuild", - "FileVersion", - "OriginalFilename", - "SpecialBuild", -) -fname = os.environ["comspec"] -d = win32api.GetFileVersionInfo(fname, "\\") -## backslash as parm returns dictionary of numeric info corresponding to VS_FIXEDFILEINFO struc -for n, v in d.items(): - print(n, v) - -pairs = win32api.GetFileVersionInfo(fname, "\\VarFileInfo\\Translation") -## \VarFileInfo\Translation returns list of available (language, codepage) pairs that can be used to retreive string info -## any other must be of the form \StringfileInfo\%04X%04X\parm_name, middle two are language/codepage pair returned from above -for lang, codepage in pairs: - print("lang: ", lang, "codepage:", codepage) - for ver_string in ver_strings: - str_info = "\\StringFileInfo\\%04X%04X\\%s" % (lang, codepage, ver_string) - ## print str_info - print(ver_string, repr(win32api.GetFileVersionInfo(fname, str_info))) diff --git a/lib/win32/Demos/images/frowny.bmp b/lib/win32/Demos/images/frowny.bmp deleted file mode 100644 index 43e7621e..00000000 Binary files a/lib/win32/Demos/images/frowny.bmp and /dev/null differ diff --git a/lib/win32/Demos/images/smiley.bmp b/lib/win32/Demos/images/smiley.bmp deleted file mode 100644 index 12ed5dee..00000000 Binary files a/lib/win32/Demos/images/smiley.bmp and /dev/null differ diff --git a/lib/win32/Demos/mmapfile_demo.py b/lib/win32/Demos/mmapfile_demo.py deleted file mode 100644 index 9068ce72..00000000 --- a/lib/win32/Demos/mmapfile_demo.py +++ /dev/null @@ -1,102 +0,0 @@ -import os -import tempfile - -import mmapfile -import win32api -import winerror -from pywin32_testutil import str2bytes - -system_info = win32api.GetSystemInfo() -page_size = system_info[1] -alloc_size = system_info[7] - -fname = tempfile.mktemp() -mapping_name = os.path.split(fname)[1] -fsize = 8 * page_size -print(fname, fsize, mapping_name) - -m1 = mmapfile.mmapfile(File=fname, Name=mapping_name, MaximumSize=fsize) -m1.seek(100) -m1.write_byte(str2bytes("?")) -m1.seek(-1, 1) -assert m1.read_byte() == str2bytes("?") - -## A reopened named mapping should have exact same size as original mapping -m2 = mmapfile.mmapfile(Name=mapping_name, File=None, MaximumSize=fsize * 2) -assert m2.size() == m1.size() -m1.seek(0, 0) -m1.write(fsize * str2bytes("s")) -assert m2.read(fsize) == fsize * str2bytes("s") - -move_src = 100 -move_dest = 500 -move_size = 150 - -m2.seek(move_src, 0) -assert m2.tell() == move_src -m2.write(str2bytes("m") * move_size) -m2.move(move_dest, move_src, move_size) -m2.seek(move_dest, 0) -assert m2.read(move_size) == str2bytes("m") * move_size -## m2.write('x'* (fsize+1)) - -m2.close() -m1.resize(fsize * 2) -assert m1.size() == fsize * 2 -m1.seek(fsize) -m1.write(str2bytes("w") * fsize) -m1.flush() -m1.close() -os.remove(fname) - - -## Test a file with size larger than 32 bits -## need 10 GB free on drive where your temp folder lives -fname_large = tempfile.mktemp() -mapping_name = "Pywin32_large_mmap" -offsetdata = str2bytes("This is start of offset") - -## Deliberately use odd numbers to test rounding logic -fsize = (1024 * 1024 * 1024 * 10) + 333 -offset = (1024 * 1024 * 32) + 42 -view_size = (1024 * 1024 * 16) + 111 - -## round mapping size and view size up to multiple of system page size -if fsize % page_size: - fsize += page_size - (fsize % page_size) -if view_size % page_size: - view_size += page_size - (view_size % page_size) -## round offset down to multiple of allocation granularity -offset -= offset % alloc_size - -m1 = None -m2 = None -try: - try: - m1 = mmapfile.mmapfile(fname_large, mapping_name, fsize, 0, offset * 2) - except mmapfile.error as exc: - # if we don't have enough disk-space, that's OK. - if exc.winerror != winerror.ERROR_DISK_FULL: - raise - print("skipping large file test - need", fsize, "available bytes.") - else: - m1.seek(offset) - m1.write(offsetdata) - - ## When reopening an existing mapping without passing a file handle, you have - ## to specify a positive size even though it's ignored - m2 = mmapfile.mmapfile( - File=None, - Name=mapping_name, - MaximumSize=1, - FileOffset=offset, - NumberOfBytesToMap=view_size, - ) - assert m2.read(len(offsetdata)) == offsetdata -finally: - if m1 is not None: - m1.close() - if m2 is not None: - m2.close() - if os.path.exists(fname_large): - os.remove(fname_large) diff --git a/lib/win32/Demos/pipes/cat.py b/lib/win32/Demos/pipes/cat.py deleted file mode 100644 index 13b6400a..00000000 --- a/lib/win32/Demos/pipes/cat.py +++ /dev/null @@ -1,17 +0,0 @@ -"""cat.py -a version of unix cat, tweaked to show off runproc.py -""" - -import sys - -data = sys.stdin.read(1) -sys.stdout.write(data) -sys.stdout.flush() -while data: - data = sys.stdin.read(1) - sys.stdout.write(data) - sys.stdout.flush() -# Just here to have something to read from stderr. -sys.stderr.write("Blah...") - -# end of cat.py diff --git a/lib/win32/Demos/pipes/runproc.py b/lib/win32/Demos/pipes/runproc.py deleted file mode 100644 index 2ed02dd3..00000000 --- a/lib/win32/Demos/pipes/runproc.py +++ /dev/null @@ -1,114 +0,0 @@ -"""runproc.py - -start a process with three inherited pipes. -Try to write to and read from those. -""" - -import msvcrt -import os - -import win32api -import win32con -import win32file -import win32pipe -import win32process -import win32security - - -class Process: - def run(self, cmdline): - # security attributes for pipes - sAttrs = win32security.SECURITY_ATTRIBUTES() - sAttrs.bInheritHandle = 1 - - # create pipes - hStdin_r, self.hStdin_w = win32pipe.CreatePipe(sAttrs, 0) - self.hStdout_r, hStdout_w = win32pipe.CreatePipe(sAttrs, 0) - self.hStderr_r, hStderr_w = win32pipe.CreatePipe(sAttrs, 0) - - # set the info structure for the new process. - StartupInfo = win32process.STARTUPINFO() - StartupInfo.hStdInput = hStdin_r - StartupInfo.hStdOutput = hStdout_w - StartupInfo.hStdError = hStderr_w - StartupInfo.dwFlags = win32process.STARTF_USESTDHANDLES - # Mark doesn't support wShowWindow yet. - # StartupInfo.dwFlags = StartupInfo.dwFlags | win32process.STARTF_USESHOWWINDOW - # StartupInfo.wShowWindow = win32con.SW_HIDE - - # Create new output read handles and the input write handle. Set - # the inheritance properties to FALSE. Otherwise, the child inherits - # the these handles; resulting in non-closeable handles to the pipes - # being created. - pid = win32api.GetCurrentProcess() - - tmp = win32api.DuplicateHandle( - pid, - self.hStdin_w, - pid, - 0, - 0, # non-inheritable!! - win32con.DUPLICATE_SAME_ACCESS, - ) - # Close the inhertible version of the handle - win32file.CloseHandle(self.hStdin_w) - self.hStdin_w = tmp - tmp = win32api.DuplicateHandle( - pid, - self.hStdout_r, - pid, - 0, - 0, # non-inheritable! - win32con.DUPLICATE_SAME_ACCESS, - ) - # Close the inhertible version of the handle - win32file.CloseHandle(self.hStdout_r) - self.hStdout_r = tmp - - # start the process. - hProcess, hThread, dwPid, dwTid = win32process.CreateProcess( - None, # program - cmdline, # command line - None, # process security attributes - None, # thread attributes - 1, # inherit handles, or USESTDHANDLES won't work. - # creation flags. Don't access the console. - 0, # Don't need anything here. - # If you're in a GUI app, you should use - # CREATE_NEW_CONSOLE here, or any subprocesses - # might fall victim to the problem described in: - # KB article: Q156755, cmd.exe requires - # an NT console in order to perform redirection.. - None, # no new environment - None, # current directory (stay where we are) - StartupInfo, - ) - # normally, we would save the pid etc. here... - - # Child is launched. Close the parents copy of those pipe handles - # that only the child should have open. - # You need to make sure that no handles to the write end of the - # output pipe are maintained in this process or else the pipe will - # not close when the child process exits and the ReadFile will hang. - win32file.CloseHandle(hStderr_w) - win32file.CloseHandle(hStdout_w) - win32file.CloseHandle(hStdin_r) - - self.stdin = os.fdopen(msvcrt.open_osfhandle(self.hStdin_w, 0), "wb") - self.stdin.write("hmmmmm\r\n") - self.stdin.flush() - self.stdin.close() - - self.stdout = os.fdopen(msvcrt.open_osfhandle(self.hStdout_r, 0), "rb") - print("Read on stdout: ", repr(self.stdout.read())) - - self.stderr = os.fdopen(msvcrt.open_osfhandle(self.hStderr_r, 0), "rb") - print("Read on stderr: ", repr(self.stderr.read())) - - -if __name__ == "__main__": - p = Process() - exe = win32api.GetModuleFileName(0) - p.run(exe + " cat.py") - -# end of runproc.py diff --git a/lib/win32/Demos/print_desktop.py b/lib/win32/Demos/print_desktop.py deleted file mode 100644 index d06504e2..00000000 --- a/lib/win32/Demos/print_desktop.py +++ /dev/null @@ -1,113 +0,0 @@ -import pywintypes -import win32api -import win32con -import win32gui -import win32print - -pname = win32print.GetDefaultPrinter() -print(pname) -p = win32print.OpenPrinter(pname) -print("Printer handle: ", p) -print_processor = win32print.GetPrinter(p, 2)["pPrintProcessor"] -## call with last parm set to 0 to get total size needed for printer's DEVMODE -dmsize = win32print.DocumentProperties(0, p, pname, None, None, 0) -## dmDriverExtra should be total size - fixed size -driverextra = ( - dmsize - pywintypes.DEVMODEType().Size -) ## need a better way to get DEVMODE.dmSize -dm = pywintypes.DEVMODEType(driverextra) -dm.Fields = dm.Fields | win32con.DM_ORIENTATION | win32con.DM_COPIES -dm.Orientation = win32con.DMORIENT_LANDSCAPE -dm.Copies = 2 -win32print.DocumentProperties( - 0, p, pname, dm, dm, win32con.DM_IN_BUFFER | win32con.DM_OUT_BUFFER -) - -pDC = win32gui.CreateDC(print_processor, pname, dm) -printerwidth = win32print.GetDeviceCaps(pDC, win32con.PHYSICALWIDTH) -printerheight = win32print.GetDeviceCaps(pDC, win32con.PHYSICALHEIGHT) - -hwnd = win32gui.GetDesktopWindow() -l, t, r, b = win32gui.GetWindowRect(hwnd) -desktopheight = b - t -desktopwidth = r - l -dDC = win32gui.GetWindowDC(hwnd) - -dcDC = win32gui.CreateCompatibleDC(dDC) -dcBM = win32gui.CreateCompatibleBitmap(dDC, desktopwidth, desktopheight) -win32gui.SelectObject(dcDC, dcBM) -win32gui.StretchBlt( - dcDC, - 0, - 0, - desktopwidth, - desktopheight, - dDC, - 0, - 0, - desktopwidth, - desktopheight, - win32con.SRCCOPY, -) - -pcDC = win32gui.CreateCompatibleDC(pDC) -pcBM = win32gui.CreateCompatibleBitmap(pDC, printerwidth, printerheight) -win32gui.SelectObject(pcDC, pcBM) -win32gui.StretchBlt( - pcDC, - 0, - 0, - printerwidth, - printerheight, - dcDC, - 0, - 0, - desktopwidth, - desktopheight, - win32con.SRCCOPY, -) - -win32print.StartDoc(pDC, ("desktop.bmp", None, None, 0)) -win32print.StartPage(pDC) -win32gui.StretchBlt( - pDC, - 0, - 0, - int(printerwidth * 0.9), - int(printerheight * 0.9), - pcDC, - 0, - 0, - printerwidth, - printerheight, - win32con.SRCCOPY, -) - -font = win32gui.LOGFONT() -font.lfHeight = int(printerheight / 20) -font.lfWidth = font.lfHeight -font.lfWeight = 150 -font.lfItalic = 1 -font.lfUnderline = 1 -hf = win32gui.CreateFontIndirect(font) -win32gui.SelectObject(pDC, hf) -win32gui.SetBkMode(pDC, win32con.TRANSPARENT) -win32gui.SetTextColor(pDC, win32api.RGB(0, 255, 0)) -win32gui.DrawText( - pDC, - "Printed by Python!", - -1, - (0, 0, int(printerwidth * 0.9), int(printerheight * 0.9)), - win32con.DT_RIGHT | win32con.DT_BOTTOM | win32con.DT_SINGLELINE, -) -win32print.EndPage(pDC) -win32print.EndDoc(pDC) - -win32print.ClosePrinter(p) -win32gui.DeleteObject(dcBM) -win32gui.DeleteObject(pcBM) -win32gui.DeleteObject(hf) -win32gui.DeleteDC(dDC) -win32gui.DeleteDC(dcDC) -win32gui.DeleteDC(pDC) -win32gui.DeleteDC(pcDC) diff --git a/lib/win32/Demos/rastest.py b/lib/win32/Demos/rastest.py deleted file mode 100644 index 669a2a80..00000000 --- a/lib/win32/Demos/rastest.py +++ /dev/null @@ -1,168 +0,0 @@ -# rastest.py - test/demonstrate the win32ras module. -# Much of the code here contributed by Jethro Wright. - -import os -import sys - -import win32ras - -# Build a little dictionary of RAS states to decent strings. -# eg win32ras.RASCS_OpenPort -> "OpenPort" -stateMap = {} -for name, val in list(win32ras.__dict__.items()): - if name[:6] == "RASCS_": - stateMap[val] = name[6:] - -# Use a lock so the callback can tell the main thread when it is finished. -import win32event - -callbackEvent = win32event.CreateEvent(None, 0, 0, None) - - -def Callback(hras, msg, state, error, exterror): - # print "Callback called with ", hras, msg, state, error, exterror - stateName = stateMap.get(state, "Unknown state?") - print("Status is %s (%04lx), error code is %d" % (stateName, state, error)) - finished = state in [win32ras.RASCS_Connected] - if finished: - win32event.SetEvent(callbackEvent) - if error != 0 or int(state) == win32ras.RASCS_Disconnected: - # we know for sure this is a good place to hangup.... - print("Detected call failure: %s" % win32ras.GetErrorString(error)) - HangUp(hras) - win32event.SetEvent(callbackEvent) - - -def ShowConnections(): - print("All phone-book entries:") - for (name,) in win32ras.EnumEntries(): - print(" ", name) - print("Current Connections:") - for con in win32ras.EnumConnections(): - print(" ", con) - - -def EditEntry(entryName): - try: - win32ras.EditPhonebookEntry(0, None, entryName) - except win32ras.error as xxx_todo_changeme: - (rc, function, msg) = xxx_todo_changeme.args - print("Can not edit/find the RAS entry -", msg) - - -def HangUp(hras): - # trap potential, irrelevant errors from win32ras.... - try: - win32ras.HangUp(hras) - except: - print("Tried to hang up gracefully on error, but didn't work....") - return None - - -def Connect(entryName, bUseCallback): - if bUseCallback: - theCallback = Callback - win32event.ResetEvent(callbackEvent) - else: - theCallback = None - # in order to *use* the username/password of a particular dun entry, one must - # explicitly get those params under win95.... - try: - dp, b = win32ras.GetEntryDialParams(None, entryName) - except: - print("Couldn't find DUN entry: %s" % entryName) - else: - hras, rc = win32ras.Dial( - None, None, (entryName, "", "", dp[3], dp[4], ""), theCallback - ) - # hras, rc = win32ras.Dial(None, None, (entryName, ),theCallback) - # print hras, rc - if not bUseCallback and rc != 0: - print("Could not dial the RAS connection:", win32ras.GetErrorString(rc)) - hras = HangUp(hras) - # don't wait here if there's no need to.... - elif ( - bUseCallback - and win32event.WaitForSingleObject(callbackEvent, 60000) - != win32event.WAIT_OBJECT_0 - ): - print("Gave up waiting for the process to complete!") - # sdk docs state one must explcitly hangup, even if there's an error.... - try: - cs = win32ras.GetConnectStatus(hras) - except: - # on error, attempt a hang up anyway.... - hras = HangUp(hras) - else: - if int(cs[0]) == win32ras.RASCS_Disconnected: - hras = HangUp(hras) - return hras, rc - - -def Disconnect(rasEntry): - # Need to find the entry - name = rasEntry.lower() - for hcon, entryName, devName, devType in win32ras.EnumConnections(): - if entryName.lower() == name: - win32ras.HangUp(hcon) - print("Disconnected from", rasEntry) - break - else: - print("Could not find an open connection to", entryName) - - -usage = """ -Usage: %s [-s] [-l] [-c connection] [-d connection] --l : List phone-book entries and current connections. --s : Show status while connecting/disconnecting (uses callbacks) --c : Connect to the specified phonebook name. --d : Disconnect from the specified phonebook name. --e : Edit the specified phonebook entry. -""" - - -def main(): - import getopt - - try: - opts, args = getopt.getopt(sys.argv[1:], "slc:d:e:") - except getopt.error as why: - print(why) - print( - usage - % ( - os.path.basename( - sys.argv[0], - ) - ) - ) - return - - bCallback = 0 - if args or not opts: - print( - usage - % ( - os.path.basename( - sys.argv[0], - ) - ) - ) - return - for opt, val in opts: - if opt == "-s": - bCallback = 1 - if opt == "-l": - ShowConnections() - if opt == "-c": - hras, rc = Connect(val, bCallback) - if hras != None: - print("hras: 0x%8lx, rc: 0x%04x" % (hras, rc)) - if opt == "-d": - Disconnect(val) - if opt == "-e": - EditEntry(val) - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/security/GetTokenInformation.py b/lib/win32/Demos/security/GetTokenInformation.py deleted file mode 100644 index 192c549e..00000000 --- a/lib/win32/Demos/security/GetTokenInformation.py +++ /dev/null @@ -1,110 +0,0 @@ -""" Lists various types of information about current user's access token, - including UAC status on Vista -""" - -import pywintypes -import win32api -import win32con -import win32security -import winerror -from security_enums import ( - SECURITY_IMPERSONATION_LEVEL, - TOKEN_ELEVATION_TYPE, - TOKEN_GROUP_ATTRIBUTES, - TOKEN_PRIVILEGE_ATTRIBUTES, - TOKEN_TYPE, -) - - -def dump_token(th): - token_type = win32security.GetTokenInformation(th, win32security.TokenType) - print("TokenType:", token_type, TOKEN_TYPE.lookup_name(token_type)) - if token_type == win32security.TokenImpersonation: - imp_lvl = win32security.GetTokenInformation( - th, win32security.TokenImpersonationLevel - ) - print( - "TokenImpersonationLevel:", - imp_lvl, - SECURITY_IMPERSONATION_LEVEL.lookup_name(imp_lvl), - ) - - print( - "TokenSessionId:", - win32security.GetTokenInformation(th, win32security.TokenSessionId), - ) - - privs = win32security.GetTokenInformation(th, win32security.TokenPrivileges) - print("TokenPrivileges:") - for priv_luid, priv_flags in privs: - flag_names, unk = TOKEN_PRIVILEGE_ATTRIBUTES.lookup_flags(priv_flags) - flag_desc = " ".join(flag_names) - if unk: - flag_desc += "(" + str(unk) + ")" - - priv_name = win32security.LookupPrivilegeName("", priv_luid) - priv_desc = win32security.LookupPrivilegeDisplayName("", priv_name) - print("\t", priv_name, priv_desc, priv_flags, flag_desc) - - print("TokenGroups:") - groups = win32security.GetTokenInformation(th, win32security.TokenGroups) - for group_sid, group_attr in groups: - flag_names, unk = TOKEN_GROUP_ATTRIBUTES.lookup_flags(group_attr) - flag_desc = " ".join(flag_names) - if unk: - flag_desc += "(" + str(unk) + ")" - if group_attr & TOKEN_GROUP_ATTRIBUTES.SE_GROUP_LOGON_ID: - sid_desc = "Logon sid" - else: - sid_desc = win32security.LookupAccountSid("", group_sid) - print("\t", group_sid, sid_desc, group_attr, flag_desc) - - ## Vista token information types, will throw (87, 'GetTokenInformation', 'The parameter is incorrect.') on earier OS - try: - is_elevated = win32security.GetTokenInformation( - th, win32security.TokenElevation - ) - print("TokenElevation:", is_elevated) - except pywintypes.error as details: - if details.winerror != winerror.ERROR_INVALID_PARAMETER: - raise - return None - print( - "TokenHasRestrictions:", - win32security.GetTokenInformation(th, win32security.TokenHasRestrictions), - ) - print( - "TokenMandatoryPolicy", - win32security.GetTokenInformation(th, win32security.TokenMandatoryPolicy), - ) - print( - "TokenVirtualizationAllowed:", - win32security.GetTokenInformation(th, win32security.TokenVirtualizationAllowed), - ) - print( - "TokenVirtualizationEnabled:", - win32security.GetTokenInformation(th, win32security.TokenVirtualizationEnabled), - ) - - elevation_type = win32security.GetTokenInformation( - th, win32security.TokenElevationType - ) - print( - "TokenElevationType:", - elevation_type, - TOKEN_ELEVATION_TYPE.lookup_name(elevation_type), - ) - if elevation_type != win32security.TokenElevationTypeDefault: - lt = win32security.GetTokenInformation(th, win32security.TokenLinkedToken) - print("TokenLinkedToken:", lt) - else: - lt = None - return lt - - -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken(ph, win32con.MAXIMUM_ALLOWED) -lt = dump_token(th) -if lt: - print("\n\nlinked token info:") - dump_token(lt) diff --git a/lib/win32/Demos/security/account_rights.py b/lib/win32/Demos/security/account_rights.py deleted file mode 100644 index 357b789a..00000000 --- a/lib/win32/Demos/security/account_rights.py +++ /dev/null @@ -1,51 +0,0 @@ -import ntsecuritycon -import win32api -import win32con -import win32file -import win32security -from security_enums import ACCESS_MODE, ACE_FLAGS, TRUSTEE_FORM, TRUSTEE_TYPE - -new_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", "SeEnableDelegationPrivilege"), - win32con.SE_PRIVILEGE_ENABLED, - ), ##doesn't seem to be in ntsecuritycon.py ? -) - -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS -) ##win32con.TOKEN_ADJUST_PRIVILEGES) -win32security.AdjustTokenPrivileges(th, 0, new_privs) - -policy_handle = win32security.GetPolicyHandle("", win32security.POLICY_ALL_ACCESS) -tmp_sid = win32security.LookupAccountName("", "tmp")[0] - -privs = [ - ntsecuritycon.SE_DEBUG_NAME, - ntsecuritycon.SE_TCB_NAME, - ntsecuritycon.SE_RESTORE_NAME, - ntsecuritycon.SE_REMOTE_SHUTDOWN_NAME, -] -win32security.LsaAddAccountRights(policy_handle, tmp_sid, privs) - -privlist = win32security.LsaEnumerateAccountRights(policy_handle, tmp_sid) -for priv in privlist: - print(priv) - -privs = [ntsecuritycon.SE_DEBUG_NAME, ntsecuritycon.SE_TCB_NAME] -win32security.LsaRemoveAccountRights(policy_handle, tmp_sid, 0, privs) - -privlist = win32security.LsaEnumerateAccountRights(policy_handle, tmp_sid) -for priv in privlist: - print(priv) - -win32security.LsaClose(policy_handle) diff --git a/lib/win32/Demos/security/explicit_entries.py b/lib/win32/Demos/security/explicit_entries.py deleted file mode 100644 index b04f4517..00000000 --- a/lib/win32/Demos/security/explicit_entries.py +++ /dev/null @@ -1,171 +0,0 @@ -import os - -import ntsecuritycon -import win32api -import win32con -import win32file -import win32security -from security_enums import ACCESS_MODE, ACE_FLAGS, TRUSTEE_FORM, TRUSTEE_TYPE - -fname = os.path.join(win32api.GetTempPath(), "win32security_test.txt") -f = open(fname, "w") -f.write("Hello from Python\n") -f.close() -print("Testing on file", fname) - -new_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SHUTDOWN_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_TAKE_OWNERSHIP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", "SeEnableDelegationPrivilege"), - win32con.SE_PRIVILEGE_ENABLED, - ), ##doesn't seem to be in ntsecuritycon.py ? -) - -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS -) ##win32con.TOKEN_ADJUST_PRIVILEGES) -win32security.AdjustTokenPrivileges(th, 0, new_privs) - -all_security_info = ( - win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION - | win32security.SACL_SECURITY_INFORMATION -) - -sd = win32security.GetFileSecurity(fname, all_security_info) - -old_sacl = sd.GetSecurityDescriptorSacl() -if old_sacl == None: - old_sacl = win32security.ACL() -old_dacl = sd.GetSecurityDescriptorDacl() -if old_dacl == None: - old_dacl = win32security.ACL() - -my_sid = win32security.GetTokenInformation(th, ntsecuritycon.TokenUser)[0] -tmp_sid = win32security.LookupAccountName("", "tmp")[0] -pwr_sid = win32security.LookupAccountName("", "Power Users")[0] - - -## MultipleTrustee,MultipleTrusteeOperation,TrusteeForm,TrusteeType,Identifier -## first two are ignored -my_trustee = {} -my_trustee["MultipleTrustee"] = None -my_trustee["MultipleTrusteeOperation"] = 0 -my_trustee["TrusteeForm"] = TRUSTEE_FORM.TRUSTEE_IS_SID -my_trustee["TrusteeType"] = TRUSTEE_TYPE.TRUSTEE_IS_USER -my_trustee["Identifier"] = my_sid - -tmp_trustee = {} -tmp_trustee["MultipleTrustee"] = None -tmp_trustee["MultipleTrusteeOperation"] = 0 -tmp_trustee["TrusteeForm"] = TRUSTEE_FORM.TRUSTEE_IS_NAME -tmp_trustee["TrusteeType"] = TRUSTEE_TYPE.TRUSTEE_IS_USER -tmp_trustee["Identifier"] = "rupole\\tmp" - -pwr_trustee = {} -pwr_trustee["MultipleTrustee"] = None -pwr_trustee["MultipleTrusteeOperation"] = 0 -pwr_trustee["TrusteeForm"] = TRUSTEE_FORM.TRUSTEE_IS_SID -pwr_trustee["TrusteeType"] = TRUSTEE_TYPE.TRUSTEE_IS_USER -pwr_trustee["Identifier"] = pwr_sid - -expl_list = [] -expl_list.append( - { - "Trustee": my_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.SET_AUDIT_SUCCESS, ##|ACCESS_MODE.SET_AUDIT_FAILURE, - "AccessPermissions": win32con.GENERIC_ALL, - } -) - -expl_list.append( - { - "Trustee": my_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.SET_AUDIT_FAILURE, - "AccessPermissions": win32con.GENERIC_ALL, - } -) - -expl_list.append( - { - "Trustee": tmp_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.SET_AUDIT_SUCCESS, - "AccessPermissions": win32con.GENERIC_ALL, - } -) - -expl_list.append( - { - "Trustee": tmp_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.SET_AUDIT_FAILURE, - "AccessPermissions": win32con.GENERIC_ALL, - } -) -old_sacl.SetEntriesInAcl(expl_list) - -expl_list = [] -expl_list.append( - { - "Trustee": tmp_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.DENY_ACCESS, - "AccessPermissions": win32con.DELETE, - } -) - -expl_list.append( - { - "Trustee": tmp_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.GRANT_ACCESS, - "AccessPermissions": win32con.WRITE_OWNER, - } -) -expl_list.append( - { - "Trustee": pwr_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.GRANT_ACCESS, - "AccessPermissions": win32con.GENERIC_READ, - } -) -expl_list.append( - { - "Trustee": my_trustee, - "Inheritance": ACE_FLAGS.NO_INHERITANCE, - "AccessMode": ACCESS_MODE.GRANT_ACCESS, - "AccessPermissions": win32con.GENERIC_ALL, - } -) - -old_dacl.SetEntriesInAcl(expl_list) -sd.SetSecurityDescriptorSacl(1, old_sacl, 1) -sd.SetSecurityDescriptorDacl(1, old_dacl, 1) -sd.SetSecurityDescriptorOwner(pwr_sid, 1) - -win32security.SetFileSecurity(fname, all_security_info, sd) diff --git a/lib/win32/Demos/security/get_policy_info.py b/lib/win32/Demos/security/get_policy_info.py deleted file mode 100644 index 1eba9cd0..00000000 --- a/lib/win32/Demos/security/get_policy_info.py +++ /dev/null @@ -1,42 +0,0 @@ -import ntsecuritycon -import win32api -import win32file -import win32security - -policy_handle = win32security.GetPolicyHandle("rupole", win32security.POLICY_ALL_ACCESS) - -## mod_nbr, mod_time = win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyModificationInformation) -## print mod_nbr, mod_time - -( - domain_name, - dns_domain_name, - dns_forest_name, - domain_guid, - domain_sid, -) = win32security.LsaQueryInformationPolicy( - policy_handle, win32security.PolicyDnsDomainInformation -) -print(domain_name, dns_domain_name, dns_forest_name, domain_guid, domain_sid) - -event_audit_info = win32security.LsaQueryInformationPolicy( - policy_handle, win32security.PolicyAuditEventsInformation -) -print(event_audit_info) - -domain_name, sid = win32security.LsaQueryInformationPolicy( - policy_handle, win32security.PolicyPrimaryDomainInformation -) -print(domain_name, sid) - -domain_name, sid = win32security.LsaQueryInformationPolicy( - policy_handle, win32security.PolicyAccountDomainInformation -) -print(domain_name, sid) - -server_role = win32security.LsaQueryInformationPolicy( - policy_handle, win32security.PolicyLsaServerRoleInformation -) -print("server role: ", server_role) - -win32security.LsaClose(policy_handle) diff --git a/lib/win32/Demos/security/list_rights.py b/lib/win32/Demos/security/list_rights.py deleted file mode 100644 index 780008a2..00000000 --- a/lib/win32/Demos/security/list_rights.py +++ /dev/null @@ -1,37 +0,0 @@ -import ntsecuritycon -import win32api -import win32con -import win32file -import win32security -from security_enums import ACCESS_MODE, ACE_FLAGS, TRUSTEE_FORM, TRUSTEE_TYPE - -new_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", "SeEnableDelegationPrivilege"), - win32con.SE_PRIVILEGE_ENABLED, - ), ##doesn't seem to be in ntsecuritycon.py ? -) - -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS -) ##win32con.TOKEN_ADJUST_PRIVILEGES) -win32security.AdjustTokenPrivileges(th, 0, new_privs) - -policy_handle = win32security.GetPolicyHandle("", win32security.POLICY_ALL_ACCESS) - -sidlist = win32security.LsaEnumerateAccountsWithUserRight( - policy_handle, ntsecuritycon.SE_RESTORE_NAME -) -for sid in sidlist: - print(win32security.LookupAccountSid("", sid)) - -win32security.LsaClose(policy_handle) diff --git a/lib/win32/Demos/security/localized_names.py b/lib/win32/Demos/security/localized_names.py deleted file mode 100644 index 30a0bde6..00000000 --- a/lib/win32/Demos/security/localized_names.py +++ /dev/null @@ -1,65 +0,0 @@ -# A Python port of the MS knowledge base article Q157234 -# "How to deal with localized and renamed user and group names" -# http://support.microsoft.com/default.aspx?kbid=157234 - -import sys - -import pywintypes -from ntsecuritycon import * -from win32net import NetUserModalsGet -from win32security import LookupAccountSid - - -def LookupAliasFromRid(TargetComputer, Rid): - # Sid is the same regardless of machine, since the well-known - # BUILTIN domain is referenced. - sid = pywintypes.SID() - sid.Initialize(SECURITY_NT_AUTHORITY, 2) - - for i, r in enumerate((SECURITY_BUILTIN_DOMAIN_RID, Rid)): - sid.SetSubAuthority(i, r) - - name, domain, typ = LookupAccountSid(TargetComputer, sid) - return name - - -def LookupUserGroupFromRid(TargetComputer, Rid): - # get the account domain Sid on the target machine - # note: if you were looking up multiple sids based on the same - # account domain, only need to call this once. - umi2 = NetUserModalsGet(TargetComputer, 2) - domain_sid = umi2["domain_id"] - - SubAuthorityCount = domain_sid.GetSubAuthorityCount() - - # create and init new sid with acct domain Sid + acct Rid - sid = pywintypes.SID() - sid.Initialize(domain_sid.GetSidIdentifierAuthority(), SubAuthorityCount + 1) - - # copy existing subauthorities from account domain Sid into - # new Sid - for i in range(SubAuthorityCount): - sid.SetSubAuthority(i, domain_sid.GetSubAuthority(i)) - - # append Rid to new Sid - sid.SetSubAuthority(SubAuthorityCount, Rid) - - name, domain, typ = LookupAccountSid(TargetComputer, sid) - return name - - -def main(): - if len(sys.argv) == 2: - targetComputer = sys.argv[1] - else: - targetComputer = None - - name = LookupUserGroupFromRid(targetComputer, DOMAIN_USER_RID_ADMIN) - print("'Administrator' user name = %s" % (name,)) - - name = LookupAliasFromRid(targetComputer, DOMAIN_ALIAS_RID_ADMINS) - print("'Administrators' local group/alias name = %s" % (name,)) - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/security/lsaregevent.py b/lib/win32/Demos/security/lsaregevent.py deleted file mode 100644 index 38e7adc9..00000000 --- a/lib/win32/Demos/security/lsaregevent.py +++ /dev/null @@ -1,14 +0,0 @@ -import win32event -import win32security - -evt = win32event.CreateEvent(None, 0, 0, None) -win32security.LsaRegisterPolicyChangeNotification( - win32security.PolicyNotifyAuditEventsInformation, evt -) -print("Waiting for you change Audit policy in Management console ...") -ret_code = win32event.WaitForSingleObject(evt, 1000000000) -## should come back when you change Audit policy in Management console ... -print(ret_code) -win32security.LsaUnregisterPolicyChangeNotification( - win32security.PolicyNotifyAuditEventsInformation, evt -) diff --git a/lib/win32/Demos/security/lsastore.py b/lib/win32/Demos/security/lsastore.py deleted file mode 100644 index dd9ff384..00000000 --- a/lib/win32/Demos/security/lsastore.py +++ /dev/null @@ -1,12 +0,0 @@ -import win32security - -policy_handle = win32security.GetPolicyHandle("", win32security.POLICY_ALL_ACCESS) -privatedata = "some sensitive data" -keyname = "tmp" -win32security.LsaStorePrivateData(policy_handle, keyname, privatedata) -retrieveddata = win32security.LsaRetrievePrivateData(policy_handle, keyname) -assert retrieveddata == privatedata - -## passing None deletes key -win32security.LsaStorePrivateData(policy_handle, keyname, None) -win32security.LsaClose(policy_handle) diff --git a/lib/win32/Demos/security/query_information.py b/lib/win32/Demos/security/query_information.py deleted file mode 100644 index 9fa3e4a9..00000000 --- a/lib/win32/Demos/security/query_information.py +++ /dev/null @@ -1,25 +0,0 @@ -import win32api -import win32security -import winerror -from ntsecuritycon import * - - -# This is a Python implementation of win32api.GetDomainName() -def GetDomainName(): - try: - tok = win32security.OpenThreadToken(win32api.GetCurrentThread(), TOKEN_QUERY, 1) - except win32api.error as details: - if details[0] != winerror.ERROR_NO_TOKEN: - raise - # attempt to open the process token, since no thread token - # exists - tok = win32security.OpenProcessToken(win32api.GetCurrentProcess(), TOKEN_QUERY) - sid, attr = win32security.GetTokenInformation(tok, TokenUser) - win32api.CloseHandle(tok) - - name, dom, typ = win32security.LookupAccountSid(None, sid) - return dom - - -if __name__ == "__main__": - print("Domain name is", GetDomainName()) diff --git a/lib/win32/Demos/security/regsave_sa.py b/lib/win32/Demos/security/regsave_sa.py deleted file mode 100644 index 29a6ac10..00000000 --- a/lib/win32/Demos/security/regsave_sa.py +++ /dev/null @@ -1,61 +0,0 @@ -fname = "h:\\tmp.reg" - -import os - -import ntsecuritycon -import pywintypes -import win32api -import win32con -import win32security - -## regsave will not overwrite a file -if os.path.isfile(fname): - os.remove(fname) - -new_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_TCB_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_BACKUP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS | win32con.TOKEN_ADJUST_PRIVILEGES -) -win32security.AdjustTokenPrivileges(th, 0, new_privs) -my_sid = win32security.GetTokenInformation(th, ntsecuritycon.TokenUser)[0] - -hklm = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, None, 0, win32con.KEY_ALL_ACCESS -) -skey = win32api.RegOpenKey(hklm, "SYSTEM", 0, win32con.KEY_ALL_ACCESS) - -sa = pywintypes.SECURITY_ATTRIBUTES() -sd = pywintypes.SECURITY_DESCRIPTOR() -sa.SECURITY_DESCRIPTOR = sd -acl = pywintypes.ACL() - -pwr_sid = win32security.LookupAccountName("", "Power Users")[0] -acl.AddAccessAllowedAce( - win32con.ACL_REVISION, - win32con.GENERIC_READ | win32con.ACCESS_SYSTEM_SECURITY, - my_sid, -) -sd.SetSecurityDescriptorDacl(1, acl, 0) -sd.SetSecurityDescriptorOwner(pwr_sid, 0) -sa.bInheritHandle = 1 -assert sa.SECURITY_DESCRIPTOR is sd - -win32api.RegSaveKey(skey, fname, sa) diff --git a/lib/win32/Demos/security/regsecurity.py b/lib/win32/Demos/security/regsecurity.py deleted file mode 100644 index 3bac65c0..00000000 --- a/lib/win32/Demos/security/regsecurity.py +++ /dev/null @@ -1,36 +0,0 @@ -import ntsecuritycon -import win32api -import win32con -import win32security - -new_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_TCB_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS | win32con.TOKEN_ADJUST_PRIVILEGES -) - -win32security.AdjustTokenPrivileges(th, 0, new_privs) -hkey = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, None, 0, win32con.KEY_ALL_ACCESS -) -win32api.RegCreateKey(hkey, "SYSTEM\\NOTMP") -notmpkey = win32api.RegOpenKey( - hkey, "SYSTEM\\notmp", 0, win32con.ACCESS_SYSTEM_SECURITY -) - -tmp_sid = win32security.LookupAccountName("", "tmp")[0] -sacl = win32security.ACL() -sacl.AddAuditAccessAce(win32security.ACL_REVISION, win32con.GENERIC_ALL, tmp_sid, 1, 1) - -sd = win32security.SECURITY_DESCRIPTOR() -sd.SetSecurityDescriptorSacl(1, sacl, 1) -win32api.RegSetKeySecurity(notmpkey, win32con.SACL_SECURITY_INFORMATION, sd) diff --git a/lib/win32/Demos/security/sa_inherit.py b/lib/win32/Demos/security/sa_inherit.py deleted file mode 100644 index a2855e50..00000000 --- a/lib/win32/Demos/security/sa_inherit.py +++ /dev/null @@ -1,8 +0,0 @@ -import pywintypes -import win32security - -sa = pywintypes.SECURITY_ATTRIBUTES() -tmp_sid = win32security.LookupAccountName("", "tmp")[0] -sa.SetSecurityDescriptorOwner(tmp_sid, 0) -sid = sa.SECURITY_DESCRIPTOR.GetSecurityDescriptorOwner() -print(win32security.LookupAccountSid("", sid)) diff --git a/lib/win32/Demos/security/security_enums.py b/lib/win32/Demos/security/security_enums.py deleted file mode 100644 index ab3cc6d3..00000000 --- a/lib/win32/Demos/security/security_enums.py +++ /dev/null @@ -1,336 +0,0 @@ -import ntsecuritycon -import win32security -import winnt - - -class Enum: - def __init__(self, *const_names): - """Accepts variable number of constant names that can be found in either - win32security, ntsecuritycon, or winnt.""" - for const_name in const_names: - try: - const_val = getattr(win32security, const_name) - except AttributeError: - try: - const_val = getattr(ntsecuritycon, const_name) - except AttributeError: - try: - const_val = getattr(winnt, const_name) - except AttributeError: - raise AttributeError( - 'Constant "%s" not found in win32security, ntsecuritycon, or winnt.' - % const_name - ) - setattr(self, const_name, const_val) - - def lookup_name(self, const_val): - """Looks up the name of a particular value.""" - for k, v in self.__dict__.items(): - if v == const_val: - return k - raise AttributeError("Value %s not found in enum" % const_val) - - def lookup_flags(self, flags): - """Returns the names of all recognized flags in input, and any flags not found in the enum.""" - flag_names = [] - unknown_flags = flags - for k, v in self.__dict__.items(): - if flags & v == v: - flag_names.append(k) - unknown_flags = unknown_flags & ~v - return flag_names, unknown_flags - - -TOKEN_INFORMATION_CLASS = Enum( - "TokenUser", - "TokenGroups", - "TokenPrivileges", - "TokenOwner", - "TokenPrimaryGroup", - "TokenDefaultDacl", - "TokenSource", - "TokenType", - "TokenImpersonationLevel", - "TokenStatistics", - "TokenRestrictedSids", - "TokenSessionId", - "TokenGroupsAndPrivileges", - "TokenSessionReference", - "TokenSandBoxInert", - "TokenAuditPolicy", - "TokenOrigin", - "TokenElevationType", - "TokenLinkedToken", - "TokenElevation", - "TokenHasRestrictions", - "TokenAccessInformation", - "TokenVirtualizationAllowed", - "TokenVirtualizationEnabled", - "TokenIntegrityLevel", - "TokenUIAccess", - "TokenMandatoryPolicy", - "TokenLogonSid", -) - -TOKEN_TYPE = Enum("TokenPrimary", "TokenImpersonation") - -TOKEN_ELEVATION_TYPE = Enum( - "TokenElevationTypeDefault", "TokenElevationTypeFull", "TokenElevationTypeLimited" -) - -POLICY_AUDIT_EVENT_TYPE = Enum( - "AuditCategorySystem", - "AuditCategoryLogon", - "AuditCategoryObjectAccess", - "AuditCategoryPrivilegeUse", - "AuditCategoryDetailedTracking", - "AuditCategoryPolicyChange", - "AuditCategoryAccountManagement", - "AuditCategoryDirectoryServiceAccess", - "AuditCategoryAccountLogon", -) - -POLICY_INFORMATION_CLASS = Enum( - "PolicyAuditLogInformation", - "PolicyAuditEventsInformation", - "PolicyPrimaryDomainInformation", - "PolicyPdAccountInformation", - "PolicyAccountDomainInformation", - "PolicyLsaServerRoleInformation", - "PolicyReplicaSourceInformation", - "PolicyDefaultQuotaInformation", - "PolicyModificationInformation", - "PolicyAuditFullSetInformation", - "PolicyAuditFullQueryInformation", - "PolicyDnsDomainInformation", -) - -POLICY_LSA_SERVER_ROLE = Enum("PolicyServerRoleBackup", "PolicyServerRolePrimary") - -## access modes for opening a policy handle - this is not a real enum -POLICY_ACCESS_MODES = Enum( - "POLICY_VIEW_LOCAL_INFORMATION", - "POLICY_VIEW_AUDIT_INFORMATION", - "POLICY_GET_PRIVATE_INFORMATION", - "POLICY_TRUST_ADMIN", - "POLICY_CREATE_ACCOUNT", - "POLICY_CREATE_SECRET", - "POLICY_CREATE_PRIVILEGE", - "POLICY_SET_DEFAULT_QUOTA_LIMITS", - "POLICY_SET_AUDIT_REQUIREMENTS", - "POLICY_AUDIT_LOG_ADMIN", - "POLICY_SERVER_ADMIN", - "POLICY_LOOKUP_NAMES", - "POLICY_NOTIFICATION", - "POLICY_ALL_ACCESS", - "POLICY_READ", - "POLICY_WRITE", - "POLICY_EXECUTE", -) - -## EventAuditingOptions flags - not a real enum -POLICY_AUDIT_EVENT_OPTIONS_FLAGS = Enum( - "POLICY_AUDIT_EVENT_UNCHANGED", - "POLICY_AUDIT_EVENT_SUCCESS", - "POLICY_AUDIT_EVENT_FAILURE", - "POLICY_AUDIT_EVENT_NONE", -) - -# AceType in ACE_HEADER - not a real enum -ACE_TYPE = Enum( - "ACCESS_MIN_MS_ACE_TYPE", - "ACCESS_ALLOWED_ACE_TYPE", - "ACCESS_DENIED_ACE_TYPE", - "SYSTEM_AUDIT_ACE_TYPE", - "SYSTEM_ALARM_ACE_TYPE", - "ACCESS_MAX_MS_V2_ACE_TYPE", - "ACCESS_ALLOWED_COMPOUND_ACE_TYPE", - "ACCESS_MAX_MS_V3_ACE_TYPE", - "ACCESS_MIN_MS_OBJECT_ACE_TYPE", - "ACCESS_ALLOWED_OBJECT_ACE_TYPE", - "ACCESS_DENIED_OBJECT_ACE_TYPE", - "SYSTEM_AUDIT_OBJECT_ACE_TYPE", - "SYSTEM_ALARM_OBJECT_ACE_TYPE", - "ACCESS_MAX_MS_OBJECT_ACE_TYPE", - "ACCESS_MAX_MS_V4_ACE_TYPE", - "ACCESS_MAX_MS_ACE_TYPE", - "ACCESS_ALLOWED_CALLBACK_ACE_TYPE", - "ACCESS_DENIED_CALLBACK_ACE_TYPE", - "ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE", - "ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE", - "SYSTEM_AUDIT_CALLBACK_ACE_TYPE", - "SYSTEM_ALARM_CALLBACK_ACE_TYPE", - "SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE", - "SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE", - "SYSTEM_MANDATORY_LABEL_ACE_TYPE", - "ACCESS_MAX_MS_V5_ACE_TYPE", -) - -# bit flags for AceFlags - not a real enum -ACE_FLAGS = Enum( - "CONTAINER_INHERIT_ACE", - "FAILED_ACCESS_ACE_FLAG", - "INHERIT_ONLY_ACE", - "INHERITED_ACE", - "NO_PROPAGATE_INHERIT_ACE", - "OBJECT_INHERIT_ACE", - "SUCCESSFUL_ACCESS_ACE_FLAG", - "NO_INHERITANCE", - "SUB_CONTAINERS_AND_OBJECTS_INHERIT", - "SUB_CONTAINERS_ONLY_INHERIT", - "SUB_OBJECTS_ONLY_INHERIT", -) - -# used in SetEntriesInAcl - very similar to ACE_TYPE -ACCESS_MODE = Enum( - "NOT_USED_ACCESS", - "GRANT_ACCESS", - "SET_ACCESS", - "DENY_ACCESS", - "REVOKE_ACCESS", - "SET_AUDIT_SUCCESS", - "SET_AUDIT_FAILURE", -) - -# Bit flags in PSECURITY_DESCRIPTOR->Control - not a real enum -SECURITY_DESCRIPTOR_CONTROL_FLAGS = Enum( - "SE_DACL_AUTO_INHERITED", ## win2k and up - "SE_SACL_AUTO_INHERITED", ## win2k and up - "SE_DACL_PROTECTED", ## win2k and up - "SE_SACL_PROTECTED", ## win2k and up - "SE_DACL_DEFAULTED", - "SE_DACL_PRESENT", - "SE_GROUP_DEFAULTED", - "SE_OWNER_DEFAULTED", - "SE_SACL_PRESENT", - "SE_SELF_RELATIVE", - "SE_SACL_DEFAULTED", -) - -# types of SID -SID_NAME_USE = Enum( - "SidTypeUser", - "SidTypeGroup", - "SidTypeDomain", - "SidTypeAlias", - "SidTypeWellKnownGroup", - "SidTypeDeletedAccount", - "SidTypeInvalid", - "SidTypeUnknown", - "SidTypeComputer", - "SidTypeLabel", -) - -## bit flags, not a real enum -TOKEN_ACCESS_PRIVILEGES = Enum( - "TOKEN_ADJUST_DEFAULT", - "TOKEN_ADJUST_GROUPS", - "TOKEN_ADJUST_PRIVILEGES", - "TOKEN_ALL_ACCESS", - "TOKEN_ASSIGN_PRIMARY", - "TOKEN_DUPLICATE", - "TOKEN_EXECUTE", - "TOKEN_IMPERSONATE", - "TOKEN_QUERY", - "TOKEN_QUERY_SOURCE", - "TOKEN_READ", - "TOKEN_WRITE", -) - -SECURITY_IMPERSONATION_LEVEL = Enum( - "SecurityAnonymous", - "SecurityIdentification", - "SecurityImpersonation", - "SecurityDelegation", -) - -POLICY_SERVER_ENABLE_STATE = Enum("PolicyServerEnabled", "PolicyServerDisabled") - -POLICY_NOTIFICATION_INFORMATION_CLASS = Enum( - "PolicyNotifyAuditEventsInformation", - "PolicyNotifyAccountDomainInformation", - "PolicyNotifyServerRoleInformation", - "PolicyNotifyDnsDomainInformation", - "PolicyNotifyDomainEfsInformation", - "PolicyNotifyDomainKerberosTicketInformation", - "PolicyNotifyMachineAccountPasswordInformation", -) - -TRUSTED_INFORMATION_CLASS = Enum( - "TrustedDomainNameInformation", - "TrustedControllersInformation", - "TrustedPosixOffsetInformation", - "TrustedPasswordInformation", - "TrustedDomainInformationBasic", - "TrustedDomainInformationEx", - "TrustedDomainAuthInformation", - "TrustedDomainFullInformation", - "TrustedDomainAuthInformationInternal", - "TrustedDomainFullInformationInternal", - "TrustedDomainInformationEx2Internal", - "TrustedDomainFullInformation2Internal", -) - -TRUSTEE_FORM = Enum( - "TRUSTEE_IS_SID", - "TRUSTEE_IS_NAME", - "TRUSTEE_BAD_FORM", - "TRUSTEE_IS_OBJECTS_AND_SID", - "TRUSTEE_IS_OBJECTS_AND_NAME", -) - -TRUSTEE_TYPE = Enum( - "TRUSTEE_IS_UNKNOWN", - "TRUSTEE_IS_USER", - "TRUSTEE_IS_GROUP", - "TRUSTEE_IS_DOMAIN", - "TRUSTEE_IS_ALIAS", - "TRUSTEE_IS_WELL_KNOWN_GROUP", - "TRUSTEE_IS_DELETED", - "TRUSTEE_IS_INVALID", - "TRUSTEE_IS_COMPUTER", -) - -## SE_OBJECT_TYPE - securable objects -SE_OBJECT_TYPE = Enum( - "SE_UNKNOWN_OBJECT_TYPE", - "SE_FILE_OBJECT", - "SE_SERVICE", - "SE_PRINTER", - "SE_REGISTRY_KEY", - "SE_LMSHARE", - "SE_KERNEL_OBJECT", - "SE_WINDOW_OBJECT", - "SE_DS_OBJECT", - "SE_DS_OBJECT_ALL", - "SE_PROVIDER_DEFINED_OBJECT", - "SE_WMIGUID_OBJECT", - "SE_REGISTRY_WOW64_32KEY", -) - -PRIVILEGE_FLAGS = Enum( - "SE_PRIVILEGE_ENABLED_BY_DEFAULT", - "SE_PRIVILEGE_ENABLED", - "SE_PRIVILEGE_USED_FOR_ACCESS", -) - -# Group flags used with TokenGroups -TOKEN_GROUP_ATTRIBUTES = Enum( - "SE_GROUP_MANDATORY", - "SE_GROUP_ENABLED_BY_DEFAULT", - "SE_GROUP_ENABLED", - "SE_GROUP_OWNER", - "SE_GROUP_USE_FOR_DENY_ONLY", - "SE_GROUP_INTEGRITY", - "SE_GROUP_INTEGRITY_ENABLED", - "SE_GROUP_LOGON_ID", - "SE_GROUP_RESOURCE", -) - -# Privilege flags returned by TokenPrivileges -TOKEN_PRIVILEGE_ATTRIBUTES = Enum( - "SE_PRIVILEGE_ENABLED_BY_DEFAULT", - "SE_PRIVILEGE_ENABLED", - "SE_PRIVILEGE_REMOVED", - "SE_PRIVILEGE_USED_FOR_ACCESS", -) diff --git a/lib/win32/Demos/security/set_file_audit.py b/lib/win32/Demos/security/set_file_audit.py deleted file mode 100644 index 324d7274..00000000 --- a/lib/win32/Demos/security/set_file_audit.py +++ /dev/null @@ -1,107 +0,0 @@ -import os - -import ntsecuritycon -import win32api -import win32con -import win32file -import win32security -from win32security import ( - ACL_REVISION_DS, - CONTAINER_INHERIT_ACE, - DACL_SECURITY_INFORMATION, - GROUP_SECURITY_INFORMATION, - OBJECT_INHERIT_ACE, - OWNER_SECURITY_INFORMATION, - PROTECTED_DACL_SECURITY_INFORMATION, - SACL_SECURITY_INFORMATION, - SE_FILE_OBJECT, -) - -## SE_SECURITY_NAME needed to access SACL, SE_RESTORE_NAME needed to change owner to someone other than yourself -new_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS | win32con.TOKEN_ADJUST_PRIVILEGES -) -modified_privs = win32security.AdjustTokenPrivileges(th, 0, new_privs) - -## look up a few sids that should be available on most systems -my_sid = win32security.GetTokenInformation(th, ntsecuritycon.TokenUser)[0] -pwr_sid = win32security.LookupAccountName("", "Power Users")[0] -admin_sid = win32security.LookupAccountName("", "Administrators")[0] -everyone_sid = win32security.LookupAccountName("", "EveryOne")[0] - -## create a dir and set security so Everyone has read permissions, and all files and subdirs inherit its ACLs -temp_dir = win32api.GetTempPath() -dir_name = win32api.GetTempFileName(temp_dir, "sfa")[0] -os.remove(dir_name) -os.mkdir(dir_name) -dir_dacl = win32security.ACL() -dir_dacl.AddAccessAllowedAceEx( - ACL_REVISION_DS, - CONTAINER_INHERIT_ACE | OBJECT_INHERIT_ACE, - win32con.GENERIC_READ, - everyone_sid, -) -## make sure current user has permissions on dir -dir_dacl.AddAccessAllowedAceEx( - ACL_REVISION_DS, - CONTAINER_INHERIT_ACE | OBJECT_INHERIT_ACE, - win32con.GENERIC_ALL, - my_sid, -) -## keep dir from inheriting any permissions so it only has ACEs explicitely set here -win32security.SetNamedSecurityInfo( - dir_name, - SE_FILE_OBJECT, - OWNER_SECURITY_INFORMATION - | GROUP_SECURITY_INFORMATION - | DACL_SECURITY_INFORMATION - | PROTECTED_DACL_SECURITY_INFORMATION, - pwr_sid, - pwr_sid, - dir_dacl, - None, -) - -## Create a file in the dir and add some specific permissions to it -fname = win32api.GetTempFileName(dir_name, "sfa")[0] -print(fname) -file_sd = win32security.GetNamedSecurityInfo( - fname, SE_FILE_OBJECT, DACL_SECURITY_INFORMATION | SACL_SECURITY_INFORMATION -) -file_dacl = file_sd.GetSecurityDescriptorDacl() -file_sacl = file_sd.GetSecurityDescriptorSacl() - -if file_dacl is None: - file_dacl = win32security.ACL() -if file_sacl is None: - file_sacl = win32security.ACL() - -file_dacl.AddAccessDeniedAce(file_dacl.GetAclRevision(), win32con.DELETE, admin_sid) -file_dacl.AddAccessDeniedAce(file_dacl.GetAclRevision(), win32con.DELETE, my_sid) -file_dacl.AddAccessAllowedAce(file_dacl.GetAclRevision(), win32con.GENERIC_ALL, pwr_sid) -file_sacl.AddAuditAccessAce( - file_dacl.GetAclRevision(), win32con.GENERIC_ALL, my_sid, True, True -) - -win32security.SetNamedSecurityInfo( - fname, - SE_FILE_OBJECT, - DACL_SECURITY_INFORMATION | SACL_SECURITY_INFORMATION, - None, - None, - file_dacl, - file_sacl, -) - -win32security.AdjustTokenPrivileges(th, 0, modified_privs) diff --git a/lib/win32/Demos/security/set_file_owner.py b/lib/win32/Demos/security/set_file_owner.py deleted file mode 100644 index 5a384da0..00000000 --- a/lib/win32/Demos/security/set_file_owner.py +++ /dev/null @@ -1,74 +0,0 @@ -fname = r"h:\tmp.txt" - -import ntsecuritycon -import win32api -import win32con -import win32file -import win32security - -new_privs = ( - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_SHUTDOWN_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_TCB_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_TAKE_OWNERSHIP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", ntsecuritycon.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", "SeEnableDelegationPrivilege"), - win32con.SE_PRIVILEGE_ENABLED, - ), ##doesn't seem to be in ntsecuritycon.py ? -) - -ph = win32api.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS | win32con.TOKEN_ADJUST_PRIVILEGES -) -win32security.AdjustTokenPrivileges(th, 0, new_privs) - -all_security_info = ( - win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION - | win32security.SACL_SECURITY_INFORMATION -) - -sd = win32security.GetFileSecurity(fname, all_security_info) -old_dacl = sd.GetSecurityDescriptorDacl() -old_sacl = sd.GetSecurityDescriptorSacl() -old_group = sd.GetSecurityDescriptorGroup() - -new_sd = win32security.SECURITY_DESCRIPTOR() -print( - "relative, valid, size: ", - new_sd.IsSelfRelative(), - new_sd.IsValid(), - new_sd.GetLength(), -) - -my_sid = win32security.GetTokenInformation(th, ntsecuritycon.TokenUser)[0] -tmp_sid = win32security.LookupAccountName("", "tmp")[0] - -new_sd.SetSecurityDescriptorSacl(1, old_sacl, 1) -new_sd.SetSecurityDescriptorDacl(1, old_dacl, 1) -new_sd.SetSecurityDescriptorOwner(tmp_sid, 0) -new_sd.SetSecurityDescriptorGroup(old_group, 0) - -win32security.SetFileSecurity(fname, all_security_info, new_sd) diff --git a/lib/win32/Demos/security/set_policy_info.py b/lib/win32/Demos/security/set_policy_info.py deleted file mode 100644 index 5dd3cf81..00000000 --- a/lib/win32/Demos/security/set_policy_info.py +++ /dev/null @@ -1,28 +0,0 @@ -import ntsecuritycon -import win32api -import win32file -import win32security - -policy_handle = win32security.GetPolicyHandle("rupole", win32security.POLICY_ALL_ACCESS) - -event_audit_info = win32security.LsaQueryInformationPolicy( - policy_handle, win32security.PolicyAuditEventsInformation -) -print(event_audit_info) - -new_audit_info = list(event_audit_info[1]) -new_audit_info[win32security.AuditCategoryPolicyChange] = ( - win32security.POLICY_AUDIT_EVENT_SUCCESS | win32security.POLICY_AUDIT_EVENT_FAILURE -) -new_audit_info[win32security.AuditCategoryAccountLogon] = ( - win32security.POLICY_AUDIT_EVENT_SUCCESS | win32security.POLICY_AUDIT_EVENT_FAILURE -) -new_audit_info[win32security.AuditCategoryLogon] = ( - win32security.POLICY_AUDIT_EVENT_SUCCESS | win32security.POLICY_AUDIT_EVENT_FAILURE -) - -win32security.LsaSetInformationPolicy( - policy_handle, win32security.PolicyAuditEventsInformation, (1, new_audit_info) -) - -win32security.LsaClose(policy_handle) diff --git a/lib/win32/Demos/security/setkernelobjectsecurity.py b/lib/win32/Demos/security/setkernelobjectsecurity.py deleted file mode 100644 index 34c8f446..00000000 --- a/lib/win32/Demos/security/setkernelobjectsecurity.py +++ /dev/null @@ -1,135 +0,0 @@ -import win32api -import win32con -import win32process -import win32security - -## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody -## other than yourself or your primary group. Most admin logins don't have it by default, so -## enabling it may fail -new_privs = ( - ( - win32security.LookupPrivilegeValue("", win32security.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TCB_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SHUTDOWN_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TAKE_OWNERSHIP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_ENABLE_DELEGATION_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CHANGE_NOTIFY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_DEBUG_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue( - "", win32security.SE_PROF_SINGLE_PROCESS_NAME - ), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SYSTEM_PROFILE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_LOCK_MEMORY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) - -all_info = ( - win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION - | win32security.SACL_SECURITY_INFORMATION -) - -pid = win32api.GetCurrentProcessId() -ph = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, 0, pid) -## PROCESS_ALL_ACCESS does not contain ACCESS_SYSTEM_SECURITY (neccessy to do SACLs) -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS -) ##win32con.TOKEN_ADJUST_PRIVILEGES) -old_privs = win32security.GetTokenInformation(th, win32security.TokenPrivileges) -desired_privs = tuple((e[0], win32con.SE_PRIVILEGE_ENABLED) for e in old_privs) -modified_privs = win32security.AdjustTokenPrivileges( - th, 0, desired_privs -) # Will (partially) fail for new_privs (unless they are a subset of current ones) -gle = win32api.GetLastError() -if gle != 0: - print("AdjustTokenPrivileges error:", gle) -# print(modified_privs) -my_sid = win32security.GetTokenInformation(th, win32security.TokenUser)[0] -pwr_sid = win32security.LookupAccountName("", "Power Users")[0] -## reopen process with ACCESS_SYSTEM_SECURITY now that sufficent privs are enabled -ph = win32api.OpenProcess( - win32con.PROCESS_ALL_ACCESS | win32con.ACCESS_SYSTEM_SECURITY, 0, pid -) - -sd = win32security.GetKernelObjectSecurity(ph, all_info) -dacl = sd.GetSecurityDescriptorDacl() -if dacl is None: - dacl = win32security.ACL() -sacl = sd.GetSecurityDescriptorSacl() -if sacl is None: - sacl = win32security.ACL() - -dacl_ace_cnt = dacl.GetAceCount() -sacl_ace_cnt = sacl.GetAceCount() - -dacl.AddAccessAllowedAce( - dacl.GetAclRevision(), win32con.ACCESS_SYSTEM_SECURITY | win32con.WRITE_DAC, my_sid -) -sacl.AddAuditAccessAce(sacl.GetAclRevision(), win32con.GENERIC_ALL, my_sid, 1, 1) -sd.SetSecurityDescriptorDacl(1, dacl, 0) -sd.SetSecurityDescriptorSacl(1, sacl, 0) -sd.SetSecurityDescriptorGroup(pwr_sid, 0) -sd.SetSecurityDescriptorOwner(pwr_sid, 0) - -win32security.SetKernelObjectSecurity(ph, all_info, sd) -new_sd = win32security.GetKernelObjectSecurity(ph, all_info) - -if new_sd.GetSecurityDescriptorDacl().GetAceCount() != dacl_ace_cnt + 1: - print("New dacl doesn" "t contain extra ace ????") -if new_sd.GetSecurityDescriptorSacl().GetAceCount() != sacl_ace_cnt + 1: - print("New Sacl doesn" "t contain extra ace ????") -if ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorOwner())[0] - != "Power Users" -): - print("Owner not successfully set to Power Users !!!!!") -if ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorGroup())[0] - != "Power Users" -): - print("Group not successfully set to Power Users !!!!!") - -sd.SetSecurityDescriptorSacl(0, None, 0) -win32security.SetKernelObjectSecurity(ph, win32security.SACL_SECURITY_INFORMATION, sd) -new_sd_1 = win32security.GetKernelObjectSecurity( - ph, win32security.SACL_SECURITY_INFORMATION -) -if new_sd_1.GetSecurityDescriptorSacl() is not None: - print("Unable to set Sacl to NULL !!!!!!!!") diff --git a/lib/win32/Demos/security/setnamedsecurityinfo.py b/lib/win32/Demos/security/setnamedsecurityinfo.py deleted file mode 100644 index 8915a0df..00000000 --- a/lib/win32/Demos/security/setnamedsecurityinfo.py +++ /dev/null @@ -1,131 +0,0 @@ -import win32api -import win32con -import win32process -import win32security - -fname, tmp = win32api.GetTempFileName(win32api.GetTempPath(), "tmp") -print(fname) -## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody -## other than yourself or your primary group. Most admin logins don't have it by default, so -## enabling it may fail -new_privs = ( - ( - win32security.LookupPrivilegeValue("", win32security.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TCB_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SHUTDOWN_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TAKE_OWNERSHIP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_ENABLE_DELEGATION_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CHANGE_NOTIFY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_DEBUG_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue( - "", win32security.SE_PROF_SINGLE_PROCESS_NAME - ), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SYSTEM_PROFILE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_LOCK_MEMORY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) - -all_info = ( - win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION - | win32security.SACL_SECURITY_INFORMATION -) - -ph = win32process.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS -) ##win32con.TOKEN_ADJUST_PRIVILEGES) -win32security.AdjustTokenPrivileges(th, 0, new_privs) -my_sid = win32security.GetTokenInformation(th, win32security.TokenUser)[0] -pwr_sid = win32security.LookupAccountName("", "Power Users")[0] - -sd = win32security.GetNamedSecurityInfo(fname, win32security.SE_FILE_OBJECT, all_info) -dacl = sd.GetSecurityDescriptorDacl() -if dacl is None: - dacl = win32security.ACL() -sacl = sd.GetSecurityDescriptorSacl() -if sacl is None: - sacl = win32security.ACL() - -dacl_ace_cnt = dacl.GetAceCount() -sacl_ace_cnt = sacl.GetAceCount() - -dacl.AddAccessAllowedAce( - dacl.GetAclRevision(), win32con.ACCESS_SYSTEM_SECURITY | win32con.WRITE_DAC, my_sid -) -sacl.AddAuditAccessAce(sacl.GetAclRevision(), win32con.GENERIC_ALL, my_sid, 1, 1) - -win32security.SetNamedSecurityInfo( - fname, win32security.SE_FILE_OBJECT, all_info, pwr_sid, pwr_sid, dacl, sacl -) -new_sd = win32security.GetNamedSecurityInfo( - fname, win32security.SE_FILE_OBJECT, all_info -) - -## could do additional checking to make sure added ACE contains expected info -if new_sd.GetSecurityDescriptorDacl().GetAceCount() != dacl_ace_cnt + 1: - print("New dacl doesn" "t contain extra ace ????") -if new_sd.GetSecurityDescriptorSacl().GetAceCount() != sacl_ace_cnt + 1: - print("New Sacl doesn" "t contain extra ace ????") -if ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorOwner())[0] - != "Power Users" -): - print("Owner not successfully set to Power Users !!!!!") -if ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorGroup())[0] - != "Power Users" -): - print("Group not successfully set to Power Users !!!!!") - -win32security.SetNamedSecurityInfo( - fname, - win32security.SE_FILE_OBJECT, - win32security.SACL_SECURITY_INFORMATION, - None, - None, - None, - None, -) -new_sd_1 = win32security.GetNamedSecurityInfo( - fname, win32security.SE_FILE_OBJECT, win32security.SACL_SECURITY_INFORMATION -) -if new_sd_1.GetSecurityDescriptorSacl() is not None: - print("Unable to set Sacl to NULL !!!!!!!!") diff --git a/lib/win32/Demos/security/setsecurityinfo.py b/lib/win32/Demos/security/setsecurityinfo.py deleted file mode 100644 index d784d950..00000000 --- a/lib/win32/Demos/security/setsecurityinfo.py +++ /dev/null @@ -1,132 +0,0 @@ -import win32api -import win32con -import win32process -import win32security - -## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody -## other than yourself or your primary group. Most admin logins don't have it by default, so -## enabling it may fail -new_privs = ( - ( - win32security.LookupPrivilegeValue("", win32security.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TCB_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SHUTDOWN_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TAKE_OWNERSHIP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_ENABLE_DELEGATION_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CHANGE_NOTIFY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_DEBUG_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue( - "", win32security.SE_PROF_SINGLE_PROCESS_NAME - ), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SYSTEM_PROFILE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_LOCK_MEMORY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) - -all_info = ( - win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION - | win32security.SACL_SECURITY_INFORMATION -) - -pid = win32api.GetCurrentProcessId() -ph = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, 0, pid) -## PROCESS_ALL_ACCESS does not contain ACCESS_SYSTEM_SECURITY (neccessy to do SACLs) -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS -) ##win32con.TOKEN_ADJUST_PRIVILEGES) -old_privs = win32security.AdjustTokenPrivileges(th, 0, new_privs) -my_sid = win32security.GetTokenInformation(th, win32security.TokenUser)[0] -pwr_sid = win32security.LookupAccountName("", "Power Users")[0] -## reopen process with ACCESS_SYSTEM_SECURITY now that sufficent privs are enabled -ph = win32api.OpenProcess( - win32con.PROCESS_ALL_ACCESS | win32con.ACCESS_SYSTEM_SECURITY, 0, pid -) - -sd = win32security.GetSecurityInfo(ph, win32security.SE_KERNEL_OBJECT, all_info) -dacl = sd.GetSecurityDescriptorDacl() -if dacl is None: - dacl = win32security.ACL() -sacl = sd.GetSecurityDescriptorSacl() -if sacl is None: - sacl = win32security.ACL() - -dacl_ace_cnt = dacl.GetAceCount() -sacl_ace_cnt = sacl.GetAceCount() - -dacl.AddAccessAllowedAce( - dacl.GetAclRevision(), win32con.ACCESS_SYSTEM_SECURITY | win32con.WRITE_DAC, my_sid -) -sacl.AddAuditAccessAce(sacl.GetAclRevision(), win32con.GENERIC_ALL, my_sid, 1, 1) - -win32security.SetSecurityInfo( - ph, win32security.SE_KERNEL_OBJECT, all_info, pwr_sid, pwr_sid, dacl, sacl -) -new_sd = win32security.GetSecurityInfo(ph, win32security.SE_KERNEL_OBJECT, all_info) - -if new_sd.GetSecurityDescriptorDacl().GetAceCount() != dacl_ace_cnt + 1: - print("New dacl doesn" "t contain extra ace ????") -if new_sd.GetSecurityDescriptorSacl().GetAceCount() != sacl_ace_cnt + 1: - print("New Sacl doesn" "t contain extra ace ????") -if ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorOwner())[0] - != "Power Users" -): - print("Owner not successfully set to Power Users !!!!!") -if ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorGroup())[0] - != "Power Users" -): - print("Group not successfully set to Power Users !!!!!") - -win32security.SetSecurityInfo( - ph, - win32security.SE_KERNEL_OBJECT, - win32security.SACL_SECURITY_INFORMATION, - None, - None, - None, - None, -) -new_sd_1 = win32security.GetSecurityInfo( - ph, win32security.SE_KERNEL_OBJECT, win32security.SACL_SECURITY_INFORMATION -) -if new_sd_1.GetSecurityDescriptorSacl() is not None: - print("Unable to set Sacl to NULL !!!!!!!!") diff --git a/lib/win32/Demos/security/setuserobjectsecurity.py b/lib/win32/Demos/security/setuserobjectsecurity.py deleted file mode 100644 index 668571f6..00000000 --- a/lib/win32/Demos/security/setuserobjectsecurity.py +++ /dev/null @@ -1,103 +0,0 @@ -import win32api -import win32con -import win32process -import win32security - -new_privs = ( - ( - win32security.LookupPrivilegeValue("", win32security.SE_SECURITY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TCB_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SHUTDOWN_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_RESTORE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_TAKE_OWNERSHIP_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CREATE_PERMANENT_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_ENABLE_DELEGATION_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_CHANGE_NOTIFY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_DEBUG_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue( - "", win32security.SE_PROF_SINGLE_PROCESS_NAME - ), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_SYSTEM_PROFILE_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), - ( - win32security.LookupPrivilegeValue("", win32security.SE_LOCK_MEMORY_NAME), - win32con.SE_PRIVILEGE_ENABLED, - ), -) - -all_info = ( - win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION - | win32security.SACL_SECURITY_INFORMATION -) -info = ( - win32security.OWNER_SECURITY_INFORMATION - | win32security.GROUP_SECURITY_INFORMATION - | win32security.DACL_SECURITY_INFORMATION -) - -ph = win32process.GetCurrentProcess() -th = win32security.OpenProcessToken( - ph, win32security.TOKEN_ALL_ACCESS -) ##win32con.TOKEN_ADJUST_PRIVILEGES) -win32security.AdjustTokenPrivileges(th, 0, new_privs) -my_sid = win32security.GetTokenInformation(th, win32security.TokenUser)[0] -pwr_sid = win32security.LookupAccountName("", "Power Users")[0] - -h = win32process.GetProcessWindowStation() -sd = win32security.GetUserObjectSecurity(h, info) -dacl = sd.GetSecurityDescriptorDacl() -ace_cnt = dacl.GetAceCount() - -dacl.AddAccessAllowedAce( - dacl.GetAclRevision(), win32con.ACCESS_SYSTEM_SECURITY | win32con.WRITE_DAC, my_sid -) -sd.SetSecurityDescriptorDacl(1, dacl, 0) -sd.SetSecurityDescriptorGroup(pwr_sid, 0) -sd.SetSecurityDescriptorOwner(pwr_sid, 0) - -win32security.SetUserObjectSecurity(h, info, sd) -new_sd = win32security.GetUserObjectSecurity(h, info) -assert ( - new_sd.GetSecurityDescriptorDacl().GetAceCount() == ace_cnt + 1 -), "Did not add an ace to the Dacl !!!!!!" -assert ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorOwner())[0] - == "Power Users" -), "Owner not successfully set to Power Users !!!!!" -assert ( - win32security.LookupAccountSid("", new_sd.GetSecurityDescriptorGroup())[0] - == "Power Users" -), "Group not successfully set to Power Users !!!!!" diff --git a/lib/win32/Demos/security/sspi/fetch_url.py b/lib/win32/Demos/security/sspi/fetch_url.py deleted file mode 100644 index 3b43b7e4..00000000 --- a/lib/win32/Demos/security/sspi/fetch_url.py +++ /dev/null @@ -1,160 +0,0 @@ -""" -Fetches a URL from a web-server supporting NTLM authentication -eg, IIS. - -If no arguments are specified, a default of http://localhost/localstart.asp -is used. This script does follow simple 302 redirections, so pointing at the -root of an IIS server is should work. -""" - -import http.client # sorry, this demo needs 2.3+ -import optparse -import urllib.error -import urllib.parse -import urllib.request -from base64 import decodestring, encodestring - -from sspi import ClientAuth - -options = None # set to optparse options object - - -def open_url(host, url): - h = http.client.HTTPConnection(host) - # h.set_debuglevel(9) - h.putrequest("GET", url) - h.endheaders() - resp = h.getresponse() - print("Initial response is", resp.status, resp.reason) - body = resp.read() - if resp.status == 302: # object moved - url = "/" + resp.msg["location"] - resp.close() - h.putrequest("GET", url) - h.endheaders() - resp = h.getresponse() - print("After redirect response is", resp.status, resp.reason) - if options.show_headers: - print("Initial response headers:") - for name, val in list(resp.msg.items()): - print(" %s: %s" % (name, val)) - if options.show_body: - print(body) - if resp.status == 401: - # 401: Unauthorized - here is where the real work starts - auth_info = None - if options.user or options.domain or options.password: - auth_info = options.user, options.domain, options.password - ca = ClientAuth("NTLM", auth_info=auth_info) - auth_scheme = ca.pkg_info["Name"] - data = None - while 1: - err, out_buf = ca.authorize(data) - data = out_buf[0].Buffer - # Encode it as base64 as required by HTTP - auth = encodestring(data).replace("\012", "") - h.putrequest("GET", url) - h.putheader("Authorization", auth_scheme + " " + auth) - h.putheader("Content-Length", "0") - h.endheaders() - resp = h.getresponse() - if options.show_headers: - print("Token dance headers:") - for name, val in list(resp.msg.items()): - print(" %s: %s" % (name, val)) - - if err == 0: - break - else: - if resp.status != 401: - print("Eeek - got response", resp.status) - cl = resp.msg.get("content-length") - if cl: - print(repr(resp.read(int(cl)))) - else: - print("no content!") - - assert resp.status == 401, resp.status - - assert not resp.will_close, "NTLM is per-connection - must not close" - schemes = [ - s.strip() for s in resp.msg.get("WWW-Authenticate", "").split(",") - ] - for scheme in schemes: - if scheme.startswith(auth_scheme): - data = decodestring(scheme[len(auth_scheme) + 1 :]) - break - else: - print( - "Could not find scheme '%s' in schemes %r" % (auth_scheme, schemes) - ) - break - - resp.read() - print("Final response status is", resp.status, resp.reason) - if resp.status == 200: - # Worked! - # Check we can read it again without re-authenticating. - if resp.will_close: - print( - "EEEK - response will close, but NTLM is per connection - it must stay open" - ) - body = resp.read() - if options.show_body: - print("Final response body:") - print(body) - h.putrequest("GET", url) - h.endheaders() - resp = h.getresponse() - print("Second fetch response is", resp.status, resp.reason) - if options.show_headers: - print("Second response headers:") - for name, val in list(resp.msg.items()): - print(" %s: %s" % (name, val)) - - resp.read(int(resp.msg.get("content-length", 0))) - elif resp.status == 500: - print("Error text") - print(resp.read()) - else: - if options.show_body: - cl = resp.msg.get("content-length") - print(resp.read(int(cl))) - - -if __name__ == "__main__": - parser = optparse.OptionParser(description=__doc__) - - parser.add_option( - "", - "--show-body", - action="store_true", - help="print the body of each response as it is received", - ) - - parser.add_option( - "", - "--show-headers", - action="store_true", - help="print the headers of each response as it is received", - ) - - parser.add_option("", "--user", action="store", help="The username to login with") - - parser.add_option( - "", "--password", action="store", help="The password to login with" - ) - - parser.add_option("", "--domain", action="store", help="The domain to login to") - - options, args = parser.parse_args() - if not args: - print("Run with --help for usage details") - args = ["http://localhost/localstart.asp"] - for url in args: - scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(url) - if (scheme != "http") or params or query or fragment: - parser.error("Scheme must be http, URL must be simple") - - print("Opening '%s' from '%s'" % (path, netloc)) - r = open_url(netloc, path) diff --git a/lib/win32/Demos/security/sspi/simple_auth.py b/lib/win32/Demos/security/sspi/simple_auth.py deleted file mode 100644 index cc0ce5cf..00000000 --- a/lib/win32/Demos/security/sspi/simple_auth.py +++ /dev/null @@ -1,72 +0,0 @@ -# A demo of basic SSPI authentication. -# There is a 'client' context and a 'server' context - typically these will -# be on different machines (here they are in the same process, but the same -# concepts apply) -import sspi -import sspicon -import win32api -import win32security - - -def lookup_ret_code(err): - for k, v in list(sspicon.__dict__.items()): - if k[0:6] in ("SEC_I_", "SEC_E_") and v == err: - return k - - -""" -pkg_name='Kerberos' -sspiclient=SSPIClient(pkg_name, win32api.GetUserName(), ## target spn is ourself - None, None, ## use none for client name and authentication information for current context - ## u'username', (u'username',u'domain.com',u'passwd'), - sspicon.ISC_REQ_INTEGRITY|sspicon.ISC_REQ_SEQUENCE_DETECT|sspicon.ISC_REQ_REPLAY_DETECT| \ - sspicon.ISC_REQ_DELEGATE|sspicon.ISC_REQ_CONFIDENTIALITY|sspicon.ISC_REQ_USE_SESSION_KEY) -sspiserver=SSPIServer(pkg_name, None, - sspicon.ASC_REQ_INTEGRITY|sspicon.ASC_REQ_SEQUENCE_DETECT|sspicon.ASC_REQ_REPLAY_DETECT| \ - sspicon.ASC_REQ_DELEGATE|sspicon.ASC_REQ_CONFIDENTIALITY|sspicon.ASC_REQ_STREAM|sspicon.ASC_REQ_USE_SESSION_KEY) -""" - -pkg_name = "NTLM" - -# Setup the 2 contexts. -sspiclient = sspi.ClientAuth(pkg_name) -sspiserver = sspi.ServerAuth(pkg_name) - -# Perform the authentication dance, each loop exchanging more information -# on the way to completing authentication. -sec_buffer = None -while 1: - err, sec_buffer = sspiclient.authorize(sec_buffer) - err, sec_buffer = sspiserver.authorize(sec_buffer) - if err == 0: - break - -# The server can now impersonate the client. In this demo the 2 users will -# always be the same. -sspiserver.ctxt.ImpersonateSecurityContext() -print("Impersonated user: ", win32api.GetUserNameEx(win32api.NameSamCompatible)) -sspiserver.ctxt.RevertSecurityContext() -print("Reverted to self: ", win32api.GetUserName()) - -pkg_size_info = sspiclient.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) -# Now sign some data -msg = "some data to be encrypted ......" - -sigsize = pkg_size_info["MaxSignature"] -sigbuf = win32security.PySecBufferDescType() -sigbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) -sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN)) -sigbuf[0].Buffer = msg -sspiclient.ctxt.MakeSignature(0, sigbuf, 1) -sspiserver.ctxt.VerifySignature(sigbuf, 1) - -# And finally encrypt some. -trailersize = pkg_size_info["SecurityTrailer"] -encbuf = win32security.PySecBufferDescType() -encbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) -encbuf.append(win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN)) -encbuf[0].Buffer = msg -sspiclient.ctxt.EncryptMessage(0, encbuf, 1) -print("Encrypted data:", repr(encbuf[0].Buffer)) -sspiserver.ctxt.DecryptMessage(encbuf, 1) -print("Unencrypted data:", encbuf[0].Buffer) diff --git a/lib/win32/Demos/security/sspi/socket_server.py b/lib/win32/Demos/security/sspi/socket_server.py deleted file mode 100644 index 4d606bb8..00000000 --- a/lib/win32/Demos/security/sspi/socket_server.py +++ /dev/null @@ -1,201 +0,0 @@ -"""A sample socket server and client using SSPI authentication and encryption. - -You must run with either 'client' or 'server' as arguments. A server must be -running before a client can connect. - -To use with Kerberos you should include in the client options ---target-spn=username, where 'username' is the user under which the server is -being run. - -Running either the client or server as a different user can be informative. -A command-line such as the following may be useful: -`runas /user:{user} {fqp}\python.exe {fqp}\socket_server.py --wait client|server` - -{fqp} should specify the relevant fully-qualified path names. - -To use 'runas' with Kerberos, the client program will need to -specify --target-spn with the username under which the *server* is running. - -See the SSPI documentation for more details. -""" - - -import http.client # sorry, this demo needs 2.3+ -import optparse -import socketserver -import struct -import traceback - -import sspi -import win32api -import win32security - -options = None # set to optparse object. - - -def GetUserName(): - try: - return win32api.GetUserName() - except win32api.error as details: - # Seeing 'access denied' errors here for non-local users (presumably - # without permission to login locally). Get the fully-qualified - # username, although a side-effect of these permission-denied errors - # is a lack of Python codecs - so printing the Unicode value fails. - # So just return the repr(), and avoid codecs completely. - return repr(win32api.GetUserNameEx(win32api.NameSamCompatible)) - - -# Send a simple "message" over a socket - send the number of bytes first, -# then the string. Ditto for receive. -def _send_msg(s, m): - s.send(struct.pack("i", len(m))) - s.send(m) - - -def _get_msg(s): - size_data = s.recv(struct.calcsize("i")) - if not size_data: - return None - cb = struct.unpack("i", size_data)[0] - return s.recv(cb) - - -class SSPISocketServer(socketserver.TCPServer): - def __init__(self, *args, **kw): - socketserver.TCPServer.__init__(self, *args, **kw) - self.sa = sspi.ServerAuth(options.package) - - def verify_request(self, sock, ca): - # Do the sspi auth dance - self.sa.reset() - while 1: - data = _get_msg(sock) - if data is None: - return False - try: - err, sec_buffer = self.sa.authorize(data) - except sspi.error as details: - print("FAILED to authorize client:", details) - return False - - if err == 0: - break - _send_msg(sock, sec_buffer[0].Buffer) - return True - - def process_request(self, request, client_address): - # An example using the connection once it is established. - print("The server is running as user", GetUserName()) - self.sa.ctxt.ImpersonateSecurityContext() - try: - print("Having conversation with client as user", GetUserName()) - while 1: - # we need to grab 2 bits of data - the encrypted data, and the - # 'key' - data = _get_msg(request) - key = _get_msg(request) - if data is None or key is None: - break - data = self.sa.decrypt(data, key) - print("Client sent:", repr(data)) - finally: - self.sa.ctxt.RevertSecurityContext() - self.close_request(request) - print("The server is back to user", GetUserName()) - - -def serve(): - s = SSPISocketServer(("localhost", options.port), None) - print("Running test server...") - s.serve_forever() - - -def sspi_client(): - c = http.client.HTTPConnection("localhost", options.port) - c.connect() - # Do the auth dance. - ca = sspi.ClientAuth(options.package, targetspn=options.target_spn) - data = None - while 1: - err, out_buf = ca.authorize(data) - _send_msg(c.sock, out_buf[0].Buffer) - if err == 0: - break - data = _get_msg(c.sock) - print("Auth dance complete - sending a few encryted messages") - # Assume out data is sensitive - encrypt the message. - for data in "Hello from the client".split(): - blob, key = ca.encrypt(data) - _send_msg(c.sock, blob) - _send_msg(c.sock, key) - c.sock.close() - print("Client completed.") - - -if __name__ == "__main__": - parser = optparse.OptionParser("%prog [options] client|server", description=__doc__) - - parser.add_option( - "", - "--package", - action="store", - default="NTLM", - help="The SSPI package to use (eg, Kerberos) - default is NTLM", - ) - - parser.add_option( - "", - "--target-spn", - action="store", - help="""The target security provider name to use. The - string contents are security-package specific. For - example, 'Kerberos' or 'Negotiate' require the server - principal name (SPN) (ie, the username) of the remote - process. For NTLM this must be blank.""", - ) - - parser.add_option( - "", - "--port", - action="store", - default="8181", - help="The port number to use (default=8181)", - ) - - parser.add_option( - "", - "--wait", - action="store_true", - help="""Cause the program to wait for input just before - terminating. Useful when using via runas to see - any error messages before termination. - """, - ) - - options, args = parser.parse_args() - try: - options.port = int(options.port) - except (ValueError, TypeError): - parser.error("--port must be an integer") - - try: - try: - if not args: - args = [""] - if args[0] == "client": - sspi_client() - elif args[0] == "server": - serve() - else: - parser.error( - "You must supply 'client' or 'server' - " "use --help for details" - ) - except KeyboardInterrupt: - pass - except SystemExit: - pass - except: - traceback.print_exc() - finally: - if options.wait: - input("Press enter to continue") diff --git a/lib/win32/Demos/security/sspi/validate_password.py b/lib/win32/Demos/security/sspi/validate_password.py deleted file mode 100644 index 40b6ad5b..00000000 --- a/lib/win32/Demos/security/sspi/validate_password.py +++ /dev/null @@ -1,41 +0,0 @@ -# Demonstrates how to validate a password. -# See also MSKB article Q180548 -# -# To use with Kerberos you need to jump through the 'targetspn' hoops. - -import sys - -import win32security -from sspi import ClientAuth, ServerAuth - - -def validate(username, password, domain=""): - auth_info = username, domain, password - ca = ClientAuth("NTLM", auth_info=auth_info) - sa = ServerAuth("NTLM") - - data = err = None - while err != 0: - err, data = ca.authorize(data) - err, data = sa.authorize(data) - # If we get here without exception, we worked! - - -if __name__ == "__main__": - if len(sys.argv) not in [2, 3, 4]: - print("Usage: %s username [password [domain]]" % (__file__,)) - sys.exit(1) - - # password and domain are optional! - password = None - if len(sys.argv) >= 3: - password = sys.argv[2] - domain = "" - if len(sys.argv) >= 4: - domain = sys.argv[3] - try: - validate(sys.argv[1], password, domain) - print("Validated OK") - except win32security.error as details: - hr, func, msg = details - print("Validation failed: %s (%d)" % (msg, hr)) diff --git a/lib/win32/Demos/service/nativePipeTestService.py b/lib/win32/Demos/service/nativePipeTestService.py deleted file mode 100644 index 4ac05eba..00000000 --- a/lib/win32/Demos/service/nativePipeTestService.py +++ /dev/null @@ -1,63 +0,0 @@ -# This is an example of a service hosted by python.exe rather than -# pythonservice.exe. - -# Note that it is very rare that using python.exe is a better option -# than the default pythonservice.exe - the latter has better error handling -# so that if Python itself can't be initialized or there are very early -# import errors, you will get error details written to the event log. When -# using python.exe instead, you are forced to wait for the interpreter startup -# and imports to succeed before you are able to effectively setup your own -# error handling. - -# So in short, please make sure you *really* want to do this, otherwise just -# stick with the default. - -import os -import sys - -import servicemanager -import win32serviceutil -from pipeTestService import TestPipeService - - -class NativeTestPipeService(TestPipeService): - _svc_name_ = "PyNativePipeTestService" - _svc_display_name_ = "Python Native Pipe Test Service" - _svc_description_ = "Tests Python.exe hosted services" - # tell win32serviceutil we have a custom executable and custom args - # so registration does the right thing. - _exe_name_ = sys.executable - _exe_args_ = '"' + os.path.abspath(sys.argv[0]) + '"' - - -def main(): - if len(sys.argv) == 1: - # service must be starting... - print("service is starting...") - print("(execute this script with '--help' if that isn't what you want)") - - # for the sake of debugging etc, we use win32traceutil to see - # any unhandled exceptions and print statements. - import win32traceutil - - print("service is still starting...") - - servicemanager.Initialize() - servicemanager.PrepareToHostSingle(NativeTestPipeService) - # Now ask the service manager to fire things up for us... - servicemanager.StartServiceCtrlDispatcher() - print("service done!") - else: - win32serviceutil.HandleCommandLine(NativeTestPipeService) - - -if __name__ == "__main__": - try: - main() - except (SystemExit, KeyboardInterrupt): - raise - except: - print("Something went bad!") - import traceback - - traceback.print_exc() diff --git a/lib/win32/Demos/service/pipeTestService.py b/lib/win32/Demos/service/pipeTestService.py deleted file mode 100644 index 2efda606..00000000 --- a/lib/win32/Demos/service/pipeTestService.py +++ /dev/null @@ -1,184 +0,0 @@ -# A Demo of services and named pipes. - -# A multi-threaded service that simply echos back its input. - -# * Install as a service using "pipeTestService.py install" -# * Use Control Panel to change the user name of the service -# to a real user name (ie, NOT the SystemAccount) -# * Start the service. -# * Run the "pipeTestServiceClient.py" program as the client pipe side. - -import _thread -import traceback - -# Old versions of the service framework would not let you import this -# module at the top-level. Now you can, and can check 'Debugging()' and -# 'RunningAsService()' to check your context. -import pywintypes -import servicemanager -import win32con -import win32service -import win32serviceutil -import winerror -from ntsecuritycon import * -from win32api import * - -# Use "import *" to keep this looking as much as a "normal" service -# as possible. Real code shouldn't do this. -from win32event import * -from win32file import * -from win32pipe import * - - -def ApplyIgnoreError(fn, args): - try: - return fn(*args) - except error: # Ignore win32api errors. - return None - - -class TestPipeService(win32serviceutil.ServiceFramework): - _svc_name_ = "PyPipeTestService" - _svc_display_name_ = "Python Pipe Test Service" - _svc_description_ = "Tests Python service framework by receiving and echoing messages over a named pipe" - - def __init__(self, args): - win32serviceutil.ServiceFramework.__init__(self, args) - self.hWaitStop = CreateEvent(None, 0, 0, None) - self.overlapped = pywintypes.OVERLAPPED() - self.overlapped.hEvent = CreateEvent(None, 0, 0, None) - self.thread_handles = [] - - def CreatePipeSecurityObject(self): - # Create a security object giving World read/write access, - # but only "Owner" modify access. - sa = pywintypes.SECURITY_ATTRIBUTES() - sidEveryone = pywintypes.SID() - sidEveryone.Initialize(SECURITY_WORLD_SID_AUTHORITY, 1) - sidEveryone.SetSubAuthority(0, SECURITY_WORLD_RID) - sidCreator = pywintypes.SID() - sidCreator.Initialize(SECURITY_CREATOR_SID_AUTHORITY, 1) - sidCreator.SetSubAuthority(0, SECURITY_CREATOR_OWNER_RID) - - acl = pywintypes.ACL() - acl.AddAccessAllowedAce(FILE_GENERIC_READ | FILE_GENERIC_WRITE, sidEveryone) - acl.AddAccessAllowedAce(FILE_ALL_ACCESS, sidCreator) - - sa.SetSecurityDescriptorDacl(1, acl, 0) - return sa - - # The functions executed in their own thread to process a client request. - def DoProcessClient(self, pipeHandle, tid): - try: - try: - # Create a loop, reading large data. If we knew the data stream was - # was small, a simple ReadFile would do. - d = "".encode("ascii") # ensure bytes on py2k and py3k... - hr = winerror.ERROR_MORE_DATA - while hr == winerror.ERROR_MORE_DATA: - hr, thisd = ReadFile(pipeHandle, 256) - d = d + thisd - print("Read", d) - ok = 1 - except error: - # Client disconnection - do nothing - ok = 0 - - # A secure service would handle (and ignore!) errors writing to the - # pipe, but for the sake of this demo we dont (if only to see what errors - # we can get when our clients break at strange times :-) - if ok: - msg = ( - "%s (on thread %d) sent me %s" - % (GetNamedPipeHandleState(pipeHandle, False, True)[4], tid, d) - ).encode("ascii") - WriteFile(pipeHandle, msg) - finally: - ApplyIgnoreError(DisconnectNamedPipe, (pipeHandle,)) - ApplyIgnoreError(CloseHandle, (pipeHandle,)) - - def ProcessClient(self, pipeHandle): - try: - procHandle = GetCurrentProcess() - th = DuplicateHandle( - procHandle, - GetCurrentThread(), - procHandle, - 0, - 0, - win32con.DUPLICATE_SAME_ACCESS, - ) - try: - self.thread_handles.append(th) - try: - return self.DoProcessClient(pipeHandle, th) - except: - traceback.print_exc() - finally: - self.thread_handles.remove(th) - except: - traceback.print_exc() - - def SvcStop(self): - self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) - SetEvent(self.hWaitStop) - - def SvcDoRun(self): - # Write an event log record - in debug mode we will also - # see this message printed. - servicemanager.LogMsg( - servicemanager.EVENTLOG_INFORMATION_TYPE, - servicemanager.PYS_SERVICE_STARTED, - (self._svc_name_, ""), - ) - - num_connections = 0 - while 1: - pipeHandle = CreateNamedPipe( - "\\\\.\\pipe\\PyPipeTest", - PIPE_ACCESS_DUPLEX | FILE_FLAG_OVERLAPPED, - PIPE_TYPE_MESSAGE | PIPE_READMODE_BYTE, - PIPE_UNLIMITED_INSTANCES, # max instances - 0, - 0, - 6000, - self.CreatePipeSecurityObject(), - ) - try: - hr = ConnectNamedPipe(pipeHandle, self.overlapped) - except error as details: - print("Error connecting pipe!", details) - CloseHandle(pipeHandle) - break - if hr == winerror.ERROR_PIPE_CONNECTED: - # Client is already connected - signal event - SetEvent(self.overlapped.hEvent) - rc = WaitForMultipleObjects( - (self.hWaitStop, self.overlapped.hEvent), 0, INFINITE - ) - if rc == WAIT_OBJECT_0: - # Stop event - break - else: - # Pipe event - spawn thread to deal with it. - _thread.start_new_thread(self.ProcessClient, (pipeHandle,)) - num_connections = num_connections + 1 - - # Sleep to ensure that any new threads are in the list, and then - # wait for all current threads to finish. - # What is a better way? - Sleep(500) - while self.thread_handles: - self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING, 5000) - print("Waiting for %d threads to finish..." % (len(self.thread_handles))) - WaitForMultipleObjects(self.thread_handles, 1, 3000) - # Write another event log record. - servicemanager.LogMsg( - servicemanager.EVENTLOG_INFORMATION_TYPE, - servicemanager.PYS_SERVICE_STOPPED, - (self._svc_name_, " after processing %d connections" % (num_connections,)), - ) - - -if __name__ == "__main__": - win32serviceutil.HandleCommandLine(TestPipeService) diff --git a/lib/win32/Demos/service/pipeTestServiceClient.py b/lib/win32/Demos/service/pipeTestServiceClient.py deleted file mode 100644 index 98fdb3c9..00000000 --- a/lib/win32/Demos/service/pipeTestServiceClient.py +++ /dev/null @@ -1,156 +0,0 @@ -# A Test Program for pipeTestService.py -# -# Install and start the Pipe Test service, then run this test -# either from the same machine, or from another using the "-s" param. -# -# Eg: pipeTestServiceClient.py -s server_name Hi There -# Should work. - -import os -import sys -import traceback - -import pywintypes -import win32api -import winerror -from win32event import * -from win32file import * -from win32pipe import * - -verbose = 0 - -# def ReadFromPipe(pipeName): -# Could (Should?) use CallNamedPipe, but this technique allows variable size -# messages (whereas you must supply a buffer size for CallNamedPipe! -# hPipe = CreateFile(pipeName, GENERIC_WRITE, 0, None, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, 0) -# more = 1 -# while more: -# hr = ReadFile(hPipe, 256) -# if hr==0: -# more = 0 -# except win32api.error (hr, fn, desc): -# if hr==winerror.ERROR_MORE_DATA: -# data = dat -# - - -def CallPipe(fn, args): - ret = None - retryCount = 0 - while retryCount < 8: # Keep looping until user cancels. - retryCount = retryCount + 1 - try: - return fn(*args) - except win32api.error as exc: - if exc.winerror == winerror.ERROR_PIPE_BUSY: - win32api.Sleep(5000) - continue - else: - raise - - raise RuntimeError("Could not make a connection to the server") - - -def testClient(server, msg): - if verbose: - print("Sending", msg) - data = CallPipe( - CallNamedPipe, - ("\\\\%s\\pipe\\PyPipeTest" % server, msg, 256, NMPWAIT_WAIT_FOREVER), - ) - if verbose: - print("Server sent back '%s'" % data) - print("Sent and received a message!") - - -def testLargeMessage(server, size=4096): - if verbose: - print("Sending message of size %d" % (size)) - msg = "*" * size - data = CallPipe( - CallNamedPipe, - ("\\\\%s\\pipe\\PyPipeTest" % server, msg, 512, NMPWAIT_WAIT_FOREVER), - ) - if len(data) - size: - print("Sizes are all wrong - send %d, got back %d" % (size, len(data))) - - -def stressThread(server, numMessages, wait): - try: - try: - for i in range(numMessages): - r = CallPipe( - CallNamedPipe, - ( - "\\\\%s\\pipe\\PyPipeTest" % server, - "#" * 512, - 1024, - NMPWAIT_WAIT_FOREVER, - ), - ) - except: - traceback.print_exc() - print("Failed after %d messages" % i) - finally: - SetEvent(wait) - - -def stressTestClient(server, numThreads, numMessages): - import _thread - - thread_waits = [] - for t_num in range(numThreads): - # Note I could just wait on thread handles (after calling DuplicateHandle) - # See the service itself for an example of waiting for the clients... - wait = CreateEvent(None, 0, 0, None) - thread_waits.append(wait) - _thread.start_new_thread(stressThread, (server, numMessages, wait)) - # Wait for all threads to finish. - WaitForMultipleObjects(thread_waits, 1, INFINITE) - - -def main(): - import getopt - import sys - - server = "." - thread_count = 0 - msg_count = 500 - try: - opts, args = getopt.getopt(sys.argv[1:], "s:t:m:vl") - for o, a in opts: - if o == "-s": - server = a - if o == "-m": - msg_count = int(a) - if o == "-t": - thread_count = int(a) - if o == "-v": - global verbose - verbose = 1 - if o == "-l": - testLargeMessage(server) - msg = " ".join(args).encode("mbcs") - except getopt.error as msg: - print(msg) - my_name = os.path.split(sys.argv[0])[1] - print( - "Usage: %s [-v] [-s server] [-t thread_count=0] [-m msg_count=500] msg ..." - % my_name - ) - print(" -v = verbose") - print( - " Specifying a value for -t will stress test using that many threads." - ) - return - testClient(server, msg) - if thread_count > 0: - print( - "Spawning %d threads each sending %d messages..." - % (thread_count, msg_count) - ) - stressTestClient(server, thread_count, msg_count) - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/service/serviceEvents.py b/lib/win32/Demos/service/serviceEvents.py deleted file mode 100644 index a62784f9..00000000 --- a/lib/win32/Demos/service/serviceEvents.py +++ /dev/null @@ -1,98 +0,0 @@ -# A Demo of a service that takes advantage of the additional notifications -# available in later Windows versions. - -# Note that all output is written as event log entries - so you must install -# and start the service, then look at the event log for messages as events -# are generated. - -# Events are generated for USB device insertion and removal, power state -# changes and hardware profile events - so try putting your computer to -# sleep and waking it, inserting a memory stick, etc then check the event log - -# Most event notification support lives around win32gui -import servicemanager -import win32con -import win32event -import win32gui -import win32gui_struct -import win32service -import win32serviceutil - -GUID_DEVINTERFACE_USB_DEVICE = "{A5DCBF10-6530-11D2-901F-00C04FB951ED}" - - -class EventDemoService(win32serviceutil.ServiceFramework): - _svc_name_ = "PyServiceEventDemo" - _svc_display_name_ = "Python Service Event Demo" - _svc_description_ = ( - "Demonstrates a Python service which takes advantage of the extra notifications" - ) - - def __init__(self, args): - win32serviceutil.ServiceFramework.__init__(self, args) - self.hWaitStop = win32event.CreateEvent(None, 0, 0, None) - # register for a device notification - we pass our service handle - # instead of a window handle. - filter = win32gui_struct.PackDEV_BROADCAST_DEVICEINTERFACE( - GUID_DEVINTERFACE_USB_DEVICE - ) - self.hdn = win32gui.RegisterDeviceNotification( - self.ssh, filter, win32con.DEVICE_NOTIFY_SERVICE_HANDLE - ) - - # Override the base class so we can accept additional events. - def GetAcceptedControls(self): - # say we accept them all. - rc = win32serviceutil.ServiceFramework.GetAcceptedControls(self) - rc |= ( - win32service.SERVICE_ACCEPT_PARAMCHANGE - | win32service.SERVICE_ACCEPT_NETBINDCHANGE - | win32service.SERVICE_CONTROL_DEVICEEVENT - | win32service.SERVICE_ACCEPT_HARDWAREPROFILECHANGE - | win32service.SERVICE_ACCEPT_POWEREVENT - | win32service.SERVICE_ACCEPT_SESSIONCHANGE - ) - return rc - - # All extra events are sent via SvcOtherEx (SvcOther remains as a - # function taking only the first args for backwards compat) - def SvcOtherEx(self, control, event_type, data): - # This is only showing a few of the extra events - see the MSDN - # docs for "HandlerEx callback" for more info. - if control == win32service.SERVICE_CONTROL_DEVICEEVENT: - info = win32gui_struct.UnpackDEV_BROADCAST(data) - msg = "A device event occurred: %x - %s" % (event_type, info) - elif control == win32service.SERVICE_CONTROL_HARDWAREPROFILECHANGE: - msg = "A hardware profile changed: type=%s, data=%s" % (event_type, data) - elif control == win32service.SERVICE_CONTROL_POWEREVENT: - msg = "A power event: setting %s" % data - elif control == win32service.SERVICE_CONTROL_SESSIONCHANGE: - # data is a single elt tuple, but this could potentially grow - # in the future if the win32 struct does - msg = "Session event: type=%s, data=%s" % (event_type, data) - else: - msg = "Other event: code=%d, type=%s, data=%s" % (control, event_type, data) - - servicemanager.LogMsg( - servicemanager.EVENTLOG_INFORMATION_TYPE, - 0xF000, # generic message - (msg, ""), - ) - - def SvcStop(self): - self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) - win32event.SetEvent(self.hWaitStop) - - def SvcDoRun(self): - # do nothing at all - just wait to be stopped - win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE) - # Write a stop message. - servicemanager.LogMsg( - servicemanager.EVENTLOG_INFORMATION_TYPE, - servicemanager.PYS_SERVICE_STOPPED, - (self._svc_name_, ""), - ) - - -if __name__ == "__main__": - win32serviceutil.HandleCommandLine(EventDemoService) diff --git a/lib/win32/Demos/timer_demo.py b/lib/win32/Demos/timer_demo.py deleted file mode 100644 index 61115f06..00000000 --- a/lib/win32/Demos/timer_demo.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- Mode: Python; tab-width: 4 -*- -# - -# This module, and the timer.pyd core timer support, were written by -# Sam Rushing (rushing@nightmare.com) - -import time - -# Timers are based on Windows messages. So we need -# to do the event-loop thing! -import timer -import win32event -import win32gui - -# glork holds a simple counter for us. - - -class glork: - def __init__(self, delay=1000, max=10): - self.x = 0 - self.max = max - self.id = timer.set_timer(delay, self.increment) - # Could use the threading module, but this is - # a win32 extension test after all! :-) - self.event = win32event.CreateEvent(None, 0, 0, None) - - def increment(self, id, time): - print("x = %d" % self.x) - self.x = self.x + 1 - # if we've reached the max count, - # kill off the timer. - if self.x > self.max: - # we could have used 'self.id' here, too - timer.kill_timer(id) - win32event.SetEvent(self.event) - - -# create a counter that will count from '1' thru '10', incrementing -# once a second, and then stop. - - -def demo(delay=1000, stop=10): - g = glork(delay, stop) - # Timers are message based - so we need - # To run a message loop while waiting for our timers - # to expire. - start_time = time.time() - while 1: - # We can't simply give a timeout of 30 seconds, as - # we may continouusly be recieving other input messages, - # and therefore never expire. - rc = win32event.MsgWaitForMultipleObjects( - (g.event,), # list of objects - 0, # wait all - 500, # timeout - win32event.QS_ALLEVENTS, # type of input - ) - if rc == win32event.WAIT_OBJECT_0: - # Event signalled. - break - elif rc == win32event.WAIT_OBJECT_0 + 1: - # Message waiting. - if win32gui.PumpWaitingMessages(): - raise RuntimeError("We got an unexpected WM_QUIT message!") - else: - # This wait timed-out. - if time.time() - start_time > 30: - raise RuntimeError("We timed out waiting for the timers to expire!") - - -if __name__ == "__main__": - demo() diff --git a/lib/win32/Demos/win32clipboardDemo.py b/lib/win32/Demos/win32clipboardDemo.py deleted file mode 100644 index 1252ff7d..00000000 --- a/lib/win32/Demos/win32clipboardDemo.py +++ /dev/null @@ -1,145 +0,0 @@ -# win32clipboardDemo.py -# -# Demo/test of the win32clipboard module. - -import win32con -from pywin32_testutil import str2bytes # py3k-friendly helper -from win32clipboard import * - -if not __debug__: - print("WARNING: The test code in this module uses assert") - print("This instance of Python has asserts disabled, so many tests will be skipped") - -cf_names = {} -# Build map of CF_* constants to names. -for name, val in list(win32con.__dict__.items()): - if name[:3] == "CF_" and name != "CF_SCREENFONTS": # CF_SCREEN_FONTS==CF_TEXT!?!? - cf_names[val] = name - - -def TestEmptyClipboard(): - OpenClipboard() - try: - EmptyClipboard() - assert ( - EnumClipboardFormats(0) == 0 - ), "Clipboard formats were available after emptying it!" - finally: - CloseClipboard() - - -def TestText(): - OpenClipboard() - try: - text = "Hello from Python" - text_bytes = str2bytes(text) - SetClipboardText(text) - got = GetClipboardData(win32con.CF_TEXT) - # CF_TEXT always gives us 'bytes' back . - assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) - finally: - CloseClipboard() - - OpenClipboard() - try: - # CF_UNICODE text always gives unicode objects back. - got = GetClipboardData(win32con.CF_UNICODETEXT) - assert got == text, "Didnt get the correct result back - '%r'." % (got,) - assert type(got) == str, "Didnt get the correct result back - '%r'." % (got,) - - # CF_OEMTEXT is a bytes-based format. - got = GetClipboardData(win32con.CF_OEMTEXT) - assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) - - # Unicode tests - EmptyClipboard() - text = "Hello from Python unicode" - text_bytes = str2bytes(text) - # Now set the Unicode value - SetClipboardData(win32con.CF_UNICODETEXT, text) - # Get it in Unicode. - got = GetClipboardData(win32con.CF_UNICODETEXT) - assert got == text, "Didnt get the correct result back - '%r'." % (got,) - assert type(got) == str, "Didnt get the correct result back - '%r'." % (got,) - - # Close and open the clipboard to ensure auto-conversions take place. - finally: - CloseClipboard() - - OpenClipboard() - try: - # Make sure I can still get the text as bytes - got = GetClipboardData(win32con.CF_TEXT) - assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) - # Make sure we get back the correct types. - got = GetClipboardData(win32con.CF_UNICODETEXT) - assert type(got) == str, "Didnt get the correct result back - '%r'." % (got,) - got = GetClipboardData(win32con.CF_OEMTEXT) - assert got == text_bytes, "Didnt get the correct result back - '%r'." % (got,) - print("Clipboard text tests worked correctly") - finally: - CloseClipboard() - - -def TestClipboardEnum(): - OpenClipboard() - try: - # Enumerate over the clipboard types - enum = 0 - while 1: - enum = EnumClipboardFormats(enum) - if enum == 0: - break - assert IsClipboardFormatAvailable( - enum - ), "Have format, but clipboard says it is not available!" - n = cf_names.get(enum, "") - if not n: - try: - n = GetClipboardFormatName(enum) - except error: - n = "unknown (%s)" % (enum,) - - print("Have format", n) - print("Clipboard enumerator tests worked correctly") - finally: - CloseClipboard() - - -class Foo: - def __init__(self, **kw): - self.__dict__.update(kw) - - def __cmp__(self, other): - return cmp(self.__dict__, other.__dict__) - - def __eq__(self, other): - return self.__dict__ == other.__dict__ - - -def TestCustomFormat(): - OpenClipboard() - try: - # Just for the fun of it pickle Python objects through the clipboard - fmt = RegisterClipboardFormat("Python Pickle Format") - import pickle - - pickled_object = Foo(a=1, b=2, Hi=3) - SetClipboardData(fmt, pickle.dumps(pickled_object)) - # Now read it back. - data = GetClipboardData(fmt) - loaded_object = pickle.loads(data) - assert pickle.loads(data) == pickled_object, "Didnt get the correct data!" - - print("Clipboard custom format tests worked correctly") - finally: - CloseClipboard() - - -if __name__ == "__main__": - TestEmptyClipboard() - TestText() - TestCustomFormat() - TestClipboardEnum() - # And leave it empty at the end! - TestEmptyClipboard() diff --git a/lib/win32/Demos/win32clipboard_bitmapdemo.py b/lib/win32/Demos/win32clipboard_bitmapdemo.py deleted file mode 100644 index 7d453025..00000000 --- a/lib/win32/Demos/win32clipboard_bitmapdemo.py +++ /dev/null @@ -1,117 +0,0 @@ -import win32api -import win32clipboard -import win32con -import win32gui - - -class ViewerWindow: - def __init__(self): - self.hwndNextViewer = None - - def OnPaint(self, hwnd, msg, wp, lp): - dc, ps = win32gui.BeginPaint(hwnd) - wndrect = win32gui.GetClientRect(hwnd) - wndwidth = wndrect[2] - wndrect[0] - wndheight = wndrect[3] - wndrect[1] - win32clipboard.OpenClipboard() - try: - try: - hbitmap = win32clipboard.GetClipboardData(win32clipboard.CF_BITMAP) - except TypeError: - font = win32gui.LOGFONT() - font.lfHeight = 15 # int(wndheight/20) - font.lfWidth = 15 # font.lfHeight - # font.lfWeight=150 - hf = win32gui.CreateFontIndirect(font) - win32gui.SelectObject(dc, hf) - win32gui.SetBkMode(dc, win32con.TRANSPARENT) - win32gui.SetTextColor(dc, win32api.RGB(0, 0, 0)) - win32gui.DrawText( - dc, - "No bitmaps are in the clipboard\n(try pressing the PrtScn button)", - -1, - (0, 0, wndwidth, wndheight), - win32con.DT_CENTER, - ) - else: - bminfo = win32gui.GetObject(hbitmap) - dcDC = win32gui.CreateCompatibleDC(None) - win32gui.SelectObject(dcDC, hbitmap) - win32gui.StretchBlt( - dc, - 0, - 0, - wndwidth, - wndheight, - dcDC, - 0, - 0, - bminfo.bmWidth, - bminfo.bmHeight, - win32con.SRCCOPY, - ) - win32gui.DeleteDC(dcDC) - win32gui.EndPaint(hwnd, ps) - finally: - win32clipboard.CloseClipboard() - return 0 - - def OnDrawClipboard(self, hwnd, msg, wp, lp): - win32gui.InvalidateRect(hwnd, None, True) - - def OnChangeCBChain(self, hwnd, msg, wp, lp): - # If the next window is closing, repair the chain. - if wp == self.hwndNextViewer: - self.hwndNextViewer = lp - # Otherwise, pass the message to the next link. - elif self.hwndNextViewer: - win32gui.SendMessage(self.hwndNextViewer, msg, wp, lp) - - def OnCreate(self, hwnd, msg, wp, lp): - self.hwndNextViewer = win32gui.SetClipboardViewer(hwnd) - - def OnClose(self, hwnd, msg, wp, lp): - win32clipboard.ChangeClipboardChain(hwnd, self.hwndNextViewer) - win32gui.DestroyWindow(hwnd) - win32gui.PostQuitMessage(0) - - def go(self): - wndproc = { - win32con.WM_PAINT: self.OnPaint, - win32con.WM_CLOSE: self.OnClose, - win32con.WM_CREATE: self.OnCreate, - win32con.WM_DRAWCLIPBOARD: self.OnDrawClipboard, - win32con.WM_CHANGECBCHAIN: self.OnChangeCBChain, - } - - wc = win32gui.WNDCLASS() - wc.lpszClassName = "test_win32clipboard_bmp" - wc.style = win32con.CS_GLOBALCLASS | win32con.CS_VREDRAW | win32con.CS_HREDRAW - wc.hbrBackground = win32con.COLOR_WINDOW + 1 - wc.lpfnWndProc = wndproc - class_atom = win32gui.RegisterClass(wc) - hwnd = win32gui.CreateWindowEx( - 0, - class_atom, - "ClipboardViewer", - win32con.WS_CAPTION - | win32con.WS_VISIBLE - | win32con.WS_THICKFRAME - | win32con.WS_SYSMENU, - 100, - 100, - 900, - 900, - 0, - 0, - 0, - None, - ) - win32clipboard.SetClipboardViewer(hwnd) - win32gui.PumpMessages() - win32gui.UnregisterClass(class_atom, None) - - -if __name__ == "__main__": - w = ViewerWindow() - w.go() diff --git a/lib/win32/Demos/win32comport_demo.py b/lib/win32/Demos/win32comport_demo.py deleted file mode 100644 index c0be2391..00000000 --- a/lib/win32/Demos/win32comport_demo.py +++ /dev/null @@ -1,144 +0,0 @@ -# This is a simple serial port terminal demo. -# -# Its primary purpose is to demonstrate the native serial port access offered via -# win32file. - -# It uses 3 threads: -# - The main thread, which cranks up the other 2 threads, then simply waits for them to exit. -# - The user-input thread - blocks waiting for a keyboard character, and when found sends it -# out the COM port. If the character is Ctrl+C, it stops, signalling the COM port thread to stop. -# - The COM port thread is simply listening for input on the COM port, and prints it to the screen. - -# This demo uses userlapped IO, so that none of the read or write operations actually block (however, -# in this sample, the very next thing we do _is_ block - so it shows off the concepts even though it -# doesnt exploit them. - -import msvcrt # For the getch() function. -import sys -import threading - -import win32con # constants. -from win32event import * # We use events and the WaitFor[Multiple]Objects functions. -from win32file import * # The base COM port and file IO functions. - - -def FindModem(): - # Snoop over the comports, seeing if it is likely we have a modem. - for i in range(1, 5): - port = "COM%d" % (i,) - try: - handle = CreateFile( - port, - win32con.GENERIC_READ | win32con.GENERIC_WRITE, - 0, # exclusive access - None, # no security - win32con.OPEN_EXISTING, - win32con.FILE_ATTRIBUTE_NORMAL, - None, - ) - # It appears that an available COM port will always success here, - # just return 0 for the status flags. We only care that it has _any_ status - # flags (and therefore probably a real modem) - if GetCommModemStatus(handle) != 0: - return port - except error: - pass # No port, or modem status failed. - return None - - -# A basic synchronous COM port file-like object -class SerialTTY: - def __init__(self, port): - if type(port) == type(0): - port = "COM%d" % (port,) - self.handle = CreateFile( - port, - win32con.GENERIC_READ | win32con.GENERIC_WRITE, - 0, # exclusive access - None, # no security - win32con.OPEN_EXISTING, - win32con.FILE_ATTRIBUTE_NORMAL | win32con.FILE_FLAG_OVERLAPPED, - None, - ) - # Tell the port we want a notification on each char. - SetCommMask(self.handle, EV_RXCHAR) - # Setup a 4k buffer - SetupComm(self.handle, 4096, 4096) - # Remove anything that was there - PurgeComm( - self.handle, PURGE_TXABORT | PURGE_RXABORT | PURGE_TXCLEAR | PURGE_RXCLEAR - ) - # Setup for overlapped IO. - timeouts = 0xFFFFFFFF, 0, 1000, 0, 1000 - SetCommTimeouts(self.handle, timeouts) - # Setup the connection info. - dcb = GetCommState(self.handle) - dcb.BaudRate = CBR_115200 - dcb.ByteSize = 8 - dcb.Parity = NOPARITY - dcb.StopBits = ONESTOPBIT - SetCommState(self.handle, dcb) - print("Connected to %s at %s baud" % (port, dcb.BaudRate)) - - def _UserInputReaderThread(self): - overlapped = OVERLAPPED() - overlapped.hEvent = CreateEvent(None, 1, 0, None) - try: - while 1: - ch = msvcrt.getch() - if ord(ch) == 3: - break - WriteFile(self.handle, ch, overlapped) - # Wait for the write to complete. - WaitForSingleObject(overlapped.hEvent, INFINITE) - finally: - SetEvent(self.eventStop) - - def _ComPortThread(self): - overlapped = OVERLAPPED() - overlapped.hEvent = CreateEvent(None, 1, 0, None) - while 1: - # XXX - note we could _probably_ just use overlapped IO on the win32file.ReadFile() statement - # XXX but this tests the COM stuff! - rc, mask = WaitCommEvent(self.handle, overlapped) - if rc == 0: # Character already ready! - SetEvent(overlapped.hEvent) - rc = WaitForMultipleObjects( - [overlapped.hEvent, self.eventStop], 0, INFINITE - ) - if rc == WAIT_OBJECT_0: - # Some input - read and print it - flags, comstat = ClearCommError(self.handle) - rc, data = ReadFile(self.handle, comstat.cbInQue, overlapped) - WaitForSingleObject(overlapped.hEvent, INFINITE) - sys.stdout.write(data) - else: - # Stop the thread! - # Just incase the user input thread uis still going, close it - sys.stdout.close() - break - - def Run(self): - self.eventStop = CreateEvent(None, 0, 0, None) - # Start the reader and writer threads. - user_thread = threading.Thread(target=self._UserInputReaderThread) - user_thread.start() - com_thread = threading.Thread(target=self._ComPortThread) - com_thread.start() - user_thread.join() - com_thread.join() - - -if __name__ == "__main__": - print("Serial port terminal demo - press Ctrl+C to exit") - if len(sys.argv) <= 1: - port = FindModem() - if port is None: - print("No COM port specified, and no modem could be found") - print("Please re-run this script with the name of a COM port (eg COM3)") - sys.exit(1) - else: - port = sys.argv[1] - - tty = SerialTTY(port) - tty.Run() diff --git a/lib/win32/Demos/win32console_demo.py b/lib/win32/Demos/win32console_demo.py deleted file mode 100644 index bb9162bf..00000000 --- a/lib/win32/Demos/win32console_demo.py +++ /dev/null @@ -1,135 +0,0 @@ -import time - -import win32con -import win32console - -virtual_keys = {} -for k, v in list(win32con.__dict__.items()): - if k.startswith("VK_"): - virtual_keys[v] = k - -free_console = True -try: - win32console.AllocConsole() -except win32console.error as exc: - if exc.winerror != 5: - raise - ## only free console if one was created successfully - free_console = False - -stdout = win32console.GetStdHandle(win32console.STD_OUTPUT_HANDLE) -stdin = win32console.GetStdHandle(win32console.STD_INPUT_HANDLE) -newbuffer = win32console.CreateConsoleScreenBuffer() -newbuffer.SetConsoleActiveScreenBuffer() -newbuffer.SetConsoleTextAttribute( - win32console.FOREGROUND_RED - | win32console.FOREGROUND_INTENSITY - | win32console.BACKGROUND_GREEN - | win32console.BACKGROUND_INTENSITY -) -newbuffer.WriteConsole("This is a new screen buffer\n") - -## test setting screen buffer and window size -## screen buffer size cannot be smaller than window size -window_size = newbuffer.GetConsoleScreenBufferInfo()["Window"] -coord = win32console.PyCOORDType(X=window_size.Right + 20, Y=window_size.Bottom + 20) -newbuffer.SetConsoleScreenBufferSize(coord) - -window_size.Right += 10 -window_size.Bottom += 10 -newbuffer.SetConsoleWindowInfo(Absolute=True, ConsoleWindow=window_size) - -## write some records to the input queue -x = win32console.PyINPUT_RECORDType(win32console.KEY_EVENT) -x.Char = "X" -x.KeyDown = True -x.RepeatCount = 1 -x.VirtualKeyCode = 0x58 -x.ControlKeyState = win32con.SHIFT_PRESSED - -z = win32console.PyINPUT_RECORDType(win32console.KEY_EVENT) -z.Char = "Z" -z.KeyDown = True -z.RepeatCount = 1 -z.VirtualKeyCode = 0x5A -z.ControlKeyState = win32con.SHIFT_PRESSED - -stdin.WriteConsoleInput([x, z, x]) - -newbuffer.SetConsoleTextAttribute( - win32console.FOREGROUND_RED - | win32console.FOREGROUND_INTENSITY - | win32console.BACKGROUND_GREEN - | win32console.BACKGROUND_INTENSITY -) -newbuffer.WriteConsole("Press some keys, click some characters with the mouse\n") - -newbuffer.SetConsoleTextAttribute( - win32console.FOREGROUND_BLUE - | win32console.FOREGROUND_INTENSITY - | win32console.BACKGROUND_RED - | win32console.BACKGROUND_INTENSITY -) -newbuffer.WriteConsole('Hit "End" key to quit\n') - -breakout = False -while not breakout: - input_records = stdin.ReadConsoleInput(10) - for input_record in input_records: - if input_record.EventType == win32console.KEY_EVENT: - if input_record.KeyDown: - if input_record.Char == "\0": - newbuffer.WriteConsole( - virtual_keys.get( - input_record.VirtualKeyCode, - "VirtualKeyCode: %s" % input_record.VirtualKeyCode, - ) - ) - else: - newbuffer.WriteConsole(input_record.Char) - if input_record.VirtualKeyCode == win32con.VK_END: - breakout = True - break - elif input_record.EventType == win32console.MOUSE_EVENT: - if input_record.EventFlags == 0: ## 0 indicates a button event - if input_record.ButtonState != 0: ## exclude button releases - pos = input_record.MousePosition - # switch the foreground and background colors of the character that was clicked - attr = newbuffer.ReadConsoleOutputAttribute( - Length=1, ReadCoord=pos - )[0] - new_attr = attr - if attr & win32console.FOREGROUND_BLUE: - new_attr = ( - new_attr & ~win32console.FOREGROUND_BLUE - ) | win32console.BACKGROUND_BLUE - if attr & win32console.FOREGROUND_RED: - new_attr = ( - new_attr & ~win32console.FOREGROUND_RED - ) | win32console.BACKGROUND_RED - if attr & win32console.FOREGROUND_GREEN: - new_attr = ( - new_attr & ~win32console.FOREGROUND_GREEN - ) | win32console.BACKGROUND_GREEN - - if attr & win32console.BACKGROUND_BLUE: - new_attr = ( - new_attr & ~win32console.BACKGROUND_BLUE - ) | win32console.FOREGROUND_BLUE - if attr & win32console.BACKGROUND_RED: - new_attr = ( - new_attr & ~win32console.BACKGROUND_RED - ) | win32console.FOREGROUND_RED - if attr & win32console.BACKGROUND_GREEN: - new_attr = ( - new_attr & ~win32console.BACKGROUND_GREEN - ) | win32console.FOREGROUND_GREEN - newbuffer.WriteConsoleOutputAttribute((new_attr,), pos) - else: - newbuffer.WriteConsole(str(input_record)) - time.sleep(0.1) - -stdout.SetConsoleActiveScreenBuffer() -newbuffer.Close() -if free_console: - win32console.FreeConsole() diff --git a/lib/win32/Demos/win32cred_demo.py b/lib/win32/Demos/win32cred_demo.py deleted file mode 100644 index 3b49ca7c..00000000 --- a/lib/win32/Demos/win32cred_demo.py +++ /dev/null @@ -1,82 +0,0 @@ -""" -Demonstrates prompting for credentials, saving, and loggging on with marshalled credential. -Also shows how to load user's profile -""" - -import win32api -import win32con -import win32cred -import win32net -import win32profile -import win32security - -## Prompt for a username/pwd for local computer -uiinfo = { - "MessageText": "Enter credentials for local machine", - "CaptionText": "win32cred_demo.py", -} -target, pwd, save = win32cred.CredUIPromptForCredentials( - TargetName=win32api.GetComputerName(), - AuthError=0, - Flags=win32cred.CREDUI_FLAGS_DO_NOT_PERSIST - | win32cred.CREDUI_FLAGS_SHOW_SAVE_CHECK_BOX, - Save=False, - UiInfo=uiinfo, -) - -attrs = [ - {"Keyword": "attr1", "Flags": 0, "Value": "unicode data"}, - {"Keyword": "attr2", "Flags": 0, "Value": b"character data"}, -] -cred = { - "Comment": "Created by win32cred_demo.py", - "UserName": target, - "TargetAlias": None, - "TargetName": target, - "CredentialBlob": pwd, - "Flags": win32cred.CRED_FLAGS_USERNAME_TARGET, - "Persist": win32cred.CRED_PERSIST_ENTERPRISE, - "Type": win32cred.CRED_TYPE_DOMAIN_PASSWORD, - "Attributes": attrs, -} -win32cred.CredWrite(cred) -pwd = None -print(win32cred.CredRead(target, win32cred.CRED_TYPE_DOMAIN_PASSWORD)) - -## Marshal saved credential and use it to log on -mc = win32cred.CredMarshalCredential(win32cred.UsernameTargetCredential, target) - -# As of pywin32 301 this no longer works for markh and unclear when it stopped, or -# even if it ever did! # Fails in Python 2.7 too, so not a 3.x regression. -try: - th = win32security.LogonUser( - mc, - None, - "", - win32con.LOGON32_LOGON_INTERACTIVE, - win32con.LOGON32_PROVIDER_DEFAULT, - ) - win32security.ImpersonateLoggedOnUser(th) - print("GetUserName:", win32api.GetUserName()) - win32security.RevertToSelf() - - ## Load user's profile. (first check if user has a roaming profile) - username, domain = win32cred.CredUIParseUserName(target) - user_info_4 = win32net.NetUserGetInfo(None, username, 4) - profilepath = user_info_4["profile"] - ## LoadUserProfile apparently doesn't like an empty string - if not profilepath: - profilepath = None - - ## leave Flags in since 2.3 still chokes on some types of optional keyword args - hk = win32profile.LoadUserProfile( - th, {"UserName": username, "Flags": 0, "ProfilePath": profilepath} - ) - ## Get user's environment variables in a form that can be passed to win32process.CreateProcessAsUser - env = win32profile.CreateEnvironmentBlock(th, False) - - ## Cleanup should probably be in a finally block - win32profile.UnloadUserProfile(th, hk) - th.Close() -except win32security.error as exc: - print("Failed to login for some reason", exc) diff --git a/lib/win32/Demos/win32fileDemo.py b/lib/win32/Demos/win32fileDemo.py deleted file mode 100644 index a8099925..00000000 --- a/lib/win32/Demos/win32fileDemo.py +++ /dev/null @@ -1,41 +0,0 @@ -# This is a "demo" of win32file - it used to be more a test case than a -# demo, so has been moved to the test directory. - -import os - -# Please contribute your favourite simple little demo. -import win32api -import win32con -import win32file - - -# A very simple demo - note that this does no more than you can do with -# builtin Python file objects, so for something as simple as this, you -# generally *should* use builtin Python objects. Only use win32file etc -# when you need win32 specific features not available in Python. -def SimpleFileDemo(): - testName = os.path.join(win32api.GetTempPath(), "win32file_demo_test_file") - if os.path.exists(testName): - os.unlink(testName) - # Open the file for writing. - handle = win32file.CreateFile( - testName, win32file.GENERIC_WRITE, 0, None, win32con.CREATE_NEW, 0, None - ) - test_data = "Hello\0there".encode("ascii") - win32file.WriteFile(handle, test_data) - handle.Close() - # Open it for reading. - handle = win32file.CreateFile( - testName, win32file.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None - ) - rc, data = win32file.ReadFile(handle, 1024) - handle.Close() - if data == test_data: - print("Successfully wrote and read a file") - else: - raise Exception("Got different data back???") - os.unlink(testName) - - -if __name__ == "__main__": - SimpleFileDemo() diff --git a/lib/win32/Demos/win32gui_demo.py b/lib/win32/Demos/win32gui_demo.py deleted file mode 100644 index 41844a92..00000000 --- a/lib/win32/Demos/win32gui_demo.py +++ /dev/null @@ -1,177 +0,0 @@ -# The start of a win32gui generic demo. -# Feel free to contribute more demos back ;-) - -import math -import random -import time - -import win32api -import win32con -import win32gui - - -def _MyCallback(hwnd, extra): - hwnds, classes = extra - hwnds.append(hwnd) - classes[win32gui.GetClassName(hwnd)] = 1 - - -def TestEnumWindows(): - windows = [] - classes = {} - win32gui.EnumWindows(_MyCallback, (windows, classes)) - print( - "Enumerated a total of %d windows with %d classes" - % (len(windows), len(classes)) - ) - if "tooltips_class32" not in classes: - print("Hrmmmm - I'm very surprised to not find a 'tooltips_class32' class.") - - -def OnPaint_1(hwnd, msg, wp, lp): - dc, ps = win32gui.BeginPaint(hwnd) - win32gui.SetGraphicsMode(dc, win32con.GM_ADVANCED) - br = win32gui.CreateSolidBrush(win32api.RGB(255, 0, 0)) - win32gui.SelectObject(dc, br) - angle = win32gui.GetWindowLong(hwnd, win32con.GWL_USERDATA) - win32gui.SetWindowLong(hwnd, win32con.GWL_USERDATA, angle + 2) - r_angle = angle * (math.pi / 180) - win32gui.SetWorldTransform( - dc, - { - "M11": math.cos(r_angle), - "M12": math.sin(r_angle), - "M21": math.sin(r_angle) * -1, - "M22": math.cos(r_angle), - "Dx": 250, - "Dy": 250, - }, - ) - win32gui.MoveToEx(dc, 250, 250) - win32gui.BeginPath(dc) - win32gui.Pie(dc, 10, 70, 200, 200, 350, 350, 75, 10) - win32gui.Chord(dc, 200, 200, 850, 0, 350, 350, 75, 10) - win32gui.LineTo(dc, 300, 300) - win32gui.LineTo(dc, 100, 20) - win32gui.LineTo(dc, 20, 100) - win32gui.LineTo(dc, 400, 0) - win32gui.LineTo(dc, 0, 400) - win32gui.EndPath(dc) - win32gui.StrokeAndFillPath(dc) - win32gui.EndPaint(hwnd, ps) - return 0 - - -wndproc_1 = {win32con.WM_PAINT: OnPaint_1} - - -def OnPaint_2(hwnd, msg, wp, lp): - dc, ps = win32gui.BeginPaint(hwnd) - win32gui.SetGraphicsMode(dc, win32con.GM_ADVANCED) - l, t, r, b = win32gui.GetClientRect(hwnd) - - for x in range(25): - vertices = ( - { - "x": int(random.random() * r), - "y": int(random.random() * b), - "Red": int(random.random() * 0xFF00), - "Green": 0, - "Blue": 0, - "Alpha": 0, - }, - { - "x": int(random.random() * r), - "y": int(random.random() * b), - "Red": 0, - "Green": int(random.random() * 0xFF00), - "Blue": 0, - "Alpha": 0, - }, - { - "x": int(random.random() * r), - "y": int(random.random() * b), - "Red": 0, - "Green": 0, - "Blue": int(random.random() * 0xFF00), - "Alpha": 0, - }, - ) - mesh = ((0, 1, 2),) - win32gui.GradientFill(dc, vertices, mesh, win32con.GRADIENT_FILL_TRIANGLE) - win32gui.EndPaint(hwnd, ps) - return 0 - - -wndproc_2 = {win32con.WM_PAINT: OnPaint_2} - - -def TestSetWorldTransform(): - wc = win32gui.WNDCLASS() - wc.lpszClassName = "test_win32gui_1" - wc.style = win32con.CS_GLOBALCLASS | win32con.CS_VREDRAW | win32con.CS_HREDRAW - wc.hbrBackground = win32con.COLOR_WINDOW + 1 - wc.lpfnWndProc = wndproc_1 - class_atom = win32gui.RegisterClass(wc) - hwnd = win32gui.CreateWindow( - wc.lpszClassName, - "Spin the Lobster!", - win32con.WS_CAPTION | win32con.WS_VISIBLE, - 100, - 100, - 900, - 900, - 0, - 0, - 0, - None, - ) - for x in range(500): - win32gui.InvalidateRect(hwnd, None, True) - win32gui.PumpWaitingMessages() - time.sleep(0.01) - win32gui.DestroyWindow(hwnd) - win32gui.UnregisterClass(wc.lpszClassName, None) - - -def TestGradientFill(): - wc = win32gui.WNDCLASS() - wc.lpszClassName = "test_win32gui_2" - wc.style = win32con.CS_GLOBALCLASS | win32con.CS_VREDRAW | win32con.CS_HREDRAW - wc.hbrBackground = win32con.COLOR_WINDOW + 1 - wc.lpfnWndProc = wndproc_2 - class_atom = win32gui.RegisterClass(wc) - hwnd = win32gui.CreateWindowEx( - 0, - class_atom, - "Kaleidoscope", - win32con.WS_CAPTION - | win32con.WS_VISIBLE - | win32con.WS_THICKFRAME - | win32con.WS_SYSMENU, - 100, - 100, - 900, - 900, - 0, - 0, - 0, - None, - ) - s = win32gui.GetWindowLong(hwnd, win32con.GWL_EXSTYLE) - win32gui.SetWindowLong(hwnd, win32con.GWL_EXSTYLE, s | win32con.WS_EX_LAYERED) - win32gui.SetLayeredWindowAttributes(hwnd, 0, 175, win32con.LWA_ALPHA) - for x in range(30): - win32gui.InvalidateRect(hwnd, None, True) - win32gui.PumpWaitingMessages() - time.sleep(0.3) - win32gui.DestroyWindow(hwnd) - win32gui.UnregisterClass(class_atom, None) - - -print("Enumerating all windows...") -TestEnumWindows() -print("Testing drawing functions ...") -TestSetWorldTransform() -TestGradientFill() -print("All tests done!") diff --git a/lib/win32/Demos/win32gui_devicenotify.py b/lib/win32/Demos/win32gui_devicenotify.py deleted file mode 100644 index 34ad8a25..00000000 --- a/lib/win32/Demos/win32gui_devicenotify.py +++ /dev/null @@ -1,107 +0,0 @@ -# Demo RegisterDeviceNotification etc. Creates a hidden window to receive -# notifications. See serviceEvents.py for an example of a service doing -# that. -import sys -import time - -import win32api -import win32con -import win32file -import win32gui -import win32gui_struct -import winnt - -# These device GUIDs are from Ioevent.h in the Windows SDK. Ideally they -# could be collected somewhere for pywin32... -GUID_DEVINTERFACE_USB_DEVICE = "{A5DCBF10-6530-11D2-901F-00C04FB951ED}" - - -# WM_DEVICECHANGE message handler. -def OnDeviceChange(hwnd, msg, wp, lp): - # Unpack the 'lp' into the appropriate DEV_BROADCAST_* structure, - # using the self-identifying data inside the DEV_BROADCAST_HDR. - info = win32gui_struct.UnpackDEV_BROADCAST(lp) - print("Device change notification:", wp, str(info)) - if ( - wp == win32con.DBT_DEVICEQUERYREMOVE - and info.devicetype == win32con.DBT_DEVTYP_HANDLE - ): - # Our handle is stored away in the structure - just close it - print("Device being removed - closing handle") - win32file.CloseHandle(info.handle) - # and cancel our notifications - if it gets plugged back in we get - # the same notification and try and close the same handle... - win32gui.UnregisterDeviceNotification(info.hdevnotify) - return True - - -def TestDeviceNotifications(dir_names): - wc = win32gui.WNDCLASS() - wc.lpszClassName = "test_devicenotify" - wc.style = win32con.CS_GLOBALCLASS | win32con.CS_VREDRAW | win32con.CS_HREDRAW - wc.hbrBackground = win32con.COLOR_WINDOW + 1 - wc.lpfnWndProc = {win32con.WM_DEVICECHANGE: OnDeviceChange} - class_atom = win32gui.RegisterClass(wc) - hwnd = win32gui.CreateWindow( - wc.lpszClassName, - "Testing some devices", - # no need for it to be visible. - win32con.WS_CAPTION, - 100, - 100, - 900, - 900, - 0, - 0, - 0, - None, - ) - - hdevs = [] - # Watch for all USB device notifications - filter = win32gui_struct.PackDEV_BROADCAST_DEVICEINTERFACE( - GUID_DEVINTERFACE_USB_DEVICE - ) - hdev = win32gui.RegisterDeviceNotification( - hwnd, filter, win32con.DEVICE_NOTIFY_WINDOW_HANDLE - ) - hdevs.append(hdev) - # and create handles for all specified directories - for d in dir_names: - hdir = win32file.CreateFile( - d, - winnt.FILE_LIST_DIRECTORY, - winnt.FILE_SHARE_READ | winnt.FILE_SHARE_WRITE | winnt.FILE_SHARE_DELETE, - None, # security attributes - win32con.OPEN_EXISTING, - win32con.FILE_FLAG_BACKUP_SEMANTICS - | win32con.FILE_FLAG_OVERLAPPED, # required privileges: SE_BACKUP_NAME and SE_RESTORE_NAME. - None, - ) - - filter = win32gui_struct.PackDEV_BROADCAST_HANDLE(hdir) - hdev = win32gui.RegisterDeviceNotification( - hwnd, filter, win32con.DEVICE_NOTIFY_WINDOW_HANDLE - ) - hdevs.append(hdev) - - # now start a message pump and wait for messages to be delivered. - print("Watching", len(hdevs), "handles - press Ctrl+C to terminate, or") - print("add and remove some USB devices...") - if not dir_names: - print("(Note you can also pass paths to watch on the command-line - eg,") - print("pass the root of an inserted USB stick to see events specific to") - print("that volume)") - while 1: - win32gui.PumpWaitingMessages() - time.sleep(0.01) - win32gui.DestroyWindow(hwnd) - win32gui.UnregisterClass(wc.lpszClassName, None) - - -if __name__ == "__main__": - # optionally pass device/directory names to watch for notifications. - # Eg, plug in a USB device - assume it connects as E: - then execute: - # % win32gui_devicenotify.py E: - # Then remove and insert the device. - TestDeviceNotifications(sys.argv[1:]) diff --git a/lib/win32/Demos/win32gui_dialog.py b/lib/win32/Demos/win32gui_dialog.py deleted file mode 100644 index e3b89ba9..00000000 --- a/lib/win32/Demos/win32gui_dialog.py +++ /dev/null @@ -1,458 +0,0 @@ -# A demo of a fairly complex dialog. -# -# Features: -# * Uses a "dynamic dialog resource" to build the dialog. -# * Uses a ListView control. -# * Dynamically resizes content. -# * Uses a second worker thread to fill the list. -# * Demostrates support for windows XP themes. - -# If you are on Windows XP, and specify a '--noxp' argument, you will see: -# * alpha-blend issues with icons -# * The buttons are "old" style, rather than based on the XP theme. -# Hence, using: -# import winxpgui as win32gui -# is recommended. -# Please report any problems. -import sys - -if "--noxp" in sys.argv: - import win32gui -else: - import winxpgui as win32gui - -import array -import os -import queue -import struct - -import commctrl -import win32api -import win32con -import win32gui_struct -import winerror - -IDC_SEARCHTEXT = 1024 -IDC_BUTTON_SEARCH = 1025 -IDC_BUTTON_DISPLAY = 1026 -IDC_LISTBOX = 1027 - -WM_SEARCH_RESULT = win32con.WM_USER + 512 -WM_SEARCH_FINISHED = win32con.WM_USER + 513 - - -class _WIN32MASKEDSTRUCT: - def __init__(self, **kw): - full_fmt = "" - for name, fmt, default, mask in self._struct_items_: - self.__dict__[name] = None - if fmt == "z": - full_fmt += "pi" - else: - full_fmt += fmt - for name, val in kw.items(): - if name not in self.__dict__: - raise ValueError("LVITEM structures do not have an item '%s'" % (name,)) - self.__dict__[name] = val - - def __setattr__(self, attr, val): - if not attr.startswith("_") and attr not in self.__dict__: - raise AttributeError(attr) - self.__dict__[attr] = val - - def toparam(self): - self._buffs = [] - full_fmt = "" - vals = [] - mask = 0 - # calc the mask - for name, fmt, default, this_mask in self._struct_items_: - if this_mask is not None and self.__dict__.get(name) is not None: - mask |= this_mask - self.mask = mask - for name, fmt, default, this_mask in self._struct_items_: - val = self.__dict__[name] - if fmt == "z": - fmt = "Pi" - if val is None: - vals.append(0) - vals.append(0) - else: - # Note this demo still works with byte strings. An - # alternate strategy would be to use unicode natively - # and use the 'W' version of the messages - eg, - # LVM_SETITEMW etc. - val = val + "\0" - if isinstance(val, str): - val = val.encode("mbcs") - str_buf = array.array("b", val) - vals.append(str_buf.buffer_info()[0]) - vals.append(len(val)) - self._buffs.append(str_buf) # keep alive during the call. - else: - if val is None: - val = default - vals.append(val) - full_fmt += fmt - return struct.pack(*(full_fmt,) + tuple(vals)) - - -# NOTE: See the win32gui_struct module for an alternative way of dealing -# with these structures -class LVITEM(_WIN32MASKEDSTRUCT): - _struct_items_ = [ - ("mask", "I", 0, None), - ("iItem", "i", 0, None), - ("iSubItem", "i", 0, None), - ("state", "I", 0, commctrl.LVIF_STATE), - ("stateMask", "I", 0, None), - ("text", "z", None, commctrl.LVIF_TEXT), - ("iImage", "i", 0, commctrl.LVIF_IMAGE), - ("lParam", "i", 0, commctrl.LVIF_PARAM), - ("iIdent", "i", 0, None), - ] - - -class LVCOLUMN(_WIN32MASKEDSTRUCT): - _struct_items_ = [ - ("mask", "I", 0, None), - ("fmt", "i", 0, commctrl.LVCF_FMT), - ("cx", "i", 0, commctrl.LVCF_WIDTH), - ("text", "z", None, commctrl.LVCF_TEXT), - ("iSubItem", "i", 0, commctrl.LVCF_SUBITEM), - ("iImage", "i", 0, commctrl.LVCF_IMAGE), - ("iOrder", "i", 0, commctrl.LVCF_ORDER), - ] - - -class DemoWindowBase: - def __init__(self): - win32gui.InitCommonControls() - self.hinst = win32gui.dllhandle - self.list_data = {} - - def _RegisterWndClass(self): - className = "PythonDocSearch" - message_map = {} - wc = win32gui.WNDCLASS() - wc.SetDialogProc() # Make it a dialog class. - wc.hInstance = self.hinst - wc.lpszClassName = className - wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW - wc.hCursor = win32gui.LoadCursor(0, win32con.IDC_ARROW) - wc.hbrBackground = win32con.COLOR_WINDOW + 1 - wc.lpfnWndProc = message_map # could also specify a wndproc. - # C code: wc.cbWndExtra = DLGWINDOWEXTRA + sizeof(HBRUSH) + (sizeof(COLORREF)); - wc.cbWndExtra = win32con.DLGWINDOWEXTRA + struct.calcsize("Pi") - icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE - - ## py.ico went away in python 2.5, load from executable instead - this_app = win32api.GetModuleHandle(None) - try: - wc.hIcon = win32gui.LoadIcon(this_app, 1) ## python.exe and pythonw.exe - except win32gui.error: - wc.hIcon = win32gui.LoadIcon(this_app, 135) ## pythonwin's icon - try: - classAtom = win32gui.RegisterClass(wc) - except win32gui.error as err_info: - if err_info.winerror != winerror.ERROR_CLASS_ALREADY_EXISTS: - raise - return className - - def _GetDialogTemplate(self, dlgClassName): - style = ( - win32con.WS_THICKFRAME - | win32con.WS_POPUP - | win32con.WS_VISIBLE - | win32con.WS_CAPTION - | win32con.WS_SYSMENU - | win32con.DS_SETFONT - | win32con.WS_MINIMIZEBOX - ) - cs = win32con.WS_CHILD | win32con.WS_VISIBLE - title = "Dynamic Dialog Demo" - - # Window frame and title - dlg = [ - [ - title, - (0, 0, 210, 250), - style, - None, - (8, "MS Sans Serif"), - None, - dlgClassName, - ], - ] - - # ID label and text box - dlg.append([130, "Enter something", -1, (5, 5, 200, 9), cs | win32con.SS_LEFT]) - s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER - dlg.append(["EDIT", None, IDC_SEARCHTEXT, (5, 15, 200, 12), s]) - - # Search/Display Buttons - # (x positions don't matter here) - s = cs | win32con.WS_TABSTOP - dlg.append( - [ - 128, - "Fill List", - IDC_BUTTON_SEARCH, - (5, 35, 50, 14), - s | win32con.BS_DEFPUSHBUTTON, - ] - ) - s = win32con.BS_PUSHBUTTON | s - dlg.append([128, "Display", IDC_BUTTON_DISPLAY, (100, 35, 50, 14), s]) - - # List control. - # Can't make this work :( - ## s = cs | win32con.WS_TABSTOP - ## dlg.append(['SysListView32', "Title", IDC_LISTBOX, (5, 505, 200, 200), s]) - - return dlg - - def _DoCreate(self, fn): - message_map = { - win32con.WM_SIZE: self.OnSize, - win32con.WM_COMMAND: self.OnCommand, - win32con.WM_NOTIFY: self.OnNotify, - win32con.WM_INITDIALOG: self.OnInitDialog, - win32con.WM_CLOSE: self.OnClose, - win32con.WM_DESTROY: self.OnDestroy, - WM_SEARCH_RESULT: self.OnSearchResult, - WM_SEARCH_FINISHED: self.OnSearchFinished, - } - dlgClassName = self._RegisterWndClass() - template = self._GetDialogTemplate(dlgClassName) - return fn(self.hinst, template, 0, message_map) - - def _SetupList(self): - child_style = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | win32con.WS_BORDER - | win32con.WS_HSCROLL - | win32con.WS_VSCROLL - ) - child_style |= ( - commctrl.LVS_SINGLESEL | commctrl.LVS_SHOWSELALWAYS | commctrl.LVS_REPORT - ) - self.hwndList = win32gui.CreateWindow( - "SysListView32", - None, - child_style, - 0, - 0, - 100, - 100, - self.hwnd, - IDC_LISTBOX, - self.hinst, - None, - ) - - child_ex_style = win32gui.SendMessage( - self.hwndList, commctrl.LVM_GETEXTENDEDLISTVIEWSTYLE, 0, 0 - ) - child_ex_style |= commctrl.LVS_EX_FULLROWSELECT - win32gui.SendMessage( - self.hwndList, commctrl.LVM_SETEXTENDEDLISTVIEWSTYLE, 0, child_ex_style - ) - - # Add an image list - use the builtin shell folder icon - this - # demonstrates the problem with alpha-blending of icons on XP if - # winxpgui is not used in place of win32gui. - il = win32gui.ImageList_Create( - win32api.GetSystemMetrics(win32con.SM_CXSMICON), - win32api.GetSystemMetrics(win32con.SM_CYSMICON), - commctrl.ILC_COLOR32 | commctrl.ILC_MASK, - 1, # initial size - 0, - ) # cGrow - - shell_dll = os.path.join(win32api.GetSystemDirectory(), "shell32.dll") - large, small = win32gui.ExtractIconEx(shell_dll, 4, 1) - win32gui.ImageList_ReplaceIcon(il, -1, small[0]) - win32gui.DestroyIcon(small[0]) - win32gui.DestroyIcon(large[0]) - win32gui.SendMessage( - self.hwndList, commctrl.LVM_SETIMAGELIST, commctrl.LVSIL_SMALL, il - ) - - # Setup the list control columns. - lvc = LVCOLUMN( - mask=commctrl.LVCF_FMT - | commctrl.LVCF_WIDTH - | commctrl.LVCF_TEXT - | commctrl.LVCF_SUBITEM - ) - lvc.fmt = commctrl.LVCFMT_LEFT - lvc.iSubItem = 1 - lvc.text = "Title" - lvc.cx = 200 - win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTCOLUMN, 0, lvc.toparam()) - lvc.iSubItem = 0 - lvc.text = "Order" - lvc.cx = 50 - win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTCOLUMN, 0, lvc.toparam()) - - win32gui.UpdateWindow(self.hwnd) - - def ClearListItems(self): - win32gui.SendMessage(self.hwndList, commctrl.LVM_DELETEALLITEMS) - self.list_data = {} - - def AddListItem(self, data, *columns): - num_items = win32gui.SendMessage(self.hwndList, commctrl.LVM_GETITEMCOUNT) - item = LVITEM(text=columns[0], iItem=num_items) - new_index = win32gui.SendMessage( - self.hwndList, commctrl.LVM_INSERTITEM, 0, item.toparam() - ) - col_no = 1 - for col in columns[1:]: - item = LVITEM(text=col, iItem=new_index, iSubItem=col_no) - win32gui.SendMessage(self.hwndList, commctrl.LVM_SETITEM, 0, item.toparam()) - col_no += 1 - self.list_data[new_index] = data - - def OnInitDialog(self, hwnd, msg, wparam, lparam): - self.hwnd = hwnd - # centre the dialog - desktop = win32gui.GetDesktopWindow() - l, t, r, b = win32gui.GetWindowRect(self.hwnd) - dt_l, dt_t, dt_r, dt_b = win32gui.GetWindowRect(desktop) - centre_x, centre_y = win32gui.ClientToScreen( - desktop, ((dt_r - dt_l) // 2, (dt_b - dt_t) // 2) - ) - win32gui.MoveWindow( - hwnd, centre_x - (r // 2), centre_y - (b // 2), r - l, b - t, 0 - ) - self._SetupList() - l, t, r, b = win32gui.GetClientRect(self.hwnd) - self._DoSize(r - l, b - t, 1) - - def _DoSize(self, cx, cy, repaint=1): - # right-justify the textbox. - ctrl = win32gui.GetDlgItem(self.hwnd, IDC_SEARCHTEXT) - l, t, r, b = win32gui.GetWindowRect(ctrl) - l, t = win32gui.ScreenToClient(self.hwnd, (l, t)) - r, b = win32gui.ScreenToClient(self.hwnd, (r, b)) - win32gui.MoveWindow(ctrl, l, t, cx - l - 5, b - t, repaint) - # The button. - ctrl = win32gui.GetDlgItem(self.hwnd, IDC_BUTTON_DISPLAY) - l, t, r, b = win32gui.GetWindowRect(ctrl) - l, t = win32gui.ScreenToClient(self.hwnd, (l, t)) - r, b = win32gui.ScreenToClient(self.hwnd, (r, b)) - list_y = b + 10 - w = r - l - win32gui.MoveWindow(ctrl, cx - 5 - w, t, w, b - t, repaint) - - # The list control - win32gui.MoveWindow(self.hwndList, 0, list_y, cx, cy - list_y, repaint) - # The last column of the list control. - new_width = cx - win32gui.SendMessage( - self.hwndList, commctrl.LVM_GETCOLUMNWIDTH, 0 - ) - win32gui.SendMessage(self.hwndList, commctrl.LVM_SETCOLUMNWIDTH, 1, new_width) - - def OnSize(self, hwnd, msg, wparam, lparam): - x = win32api.LOWORD(lparam) - y = win32api.HIWORD(lparam) - self._DoSize(x, y) - return 1 - - def OnSearchResult(self, hwnd, msg, wparam, lparam): - try: - while 1: - params = self.result_queue.get(0) - self.AddListItem(*params) - except queue.Empty: - pass - - def OnSearchFinished(self, hwnd, msg, wparam, lparam): - print("OnSearchFinished") - - def OnNotify(self, hwnd, msg, wparam, lparam): - info = win32gui_struct.UnpackNMITEMACTIVATE(lparam) - if info.code == commctrl.NM_DBLCLK: - print("Double click on item", info.iItem + 1) - return 1 - - def OnCommand(self, hwnd, msg, wparam, lparam): - id = win32api.LOWORD(wparam) - if id == IDC_BUTTON_SEARCH: - self.ClearListItems() - - def fill_slowly(q, hwnd): - import time - - for i in range(20): - q.put(("whatever", str(i + 1), "Search result " + str(i))) - win32gui.PostMessage(hwnd, WM_SEARCH_RESULT, 0, 0) - time.sleep(0.25) - win32gui.PostMessage(hwnd, WM_SEARCH_FINISHED, 0, 0) - - import threading - - self.result_queue = queue.Queue() - thread = threading.Thread( - target=fill_slowly, args=(self.result_queue, self.hwnd) - ) - thread.start() - elif id == IDC_BUTTON_DISPLAY: - print("Display button selected") - sel = win32gui.SendMessage( - self.hwndList, commctrl.LVM_GETNEXTITEM, -1, commctrl.LVNI_SELECTED - ) - print("The selected item is", sel + 1) - - # These function differ based on how the window is used, so may be overridden - def OnClose(self, hwnd, msg, wparam, lparam): - raise NotImplementedError - - def OnDestroy(self, hwnd, msg, wparam, lparam): - pass - - -# An implementation suitable for use with the Win32 Window functions (ie, not -# a true dialog) -class DemoWindow(DemoWindowBase): - def CreateWindow(self): - # Create the window via CreateDialogBoxIndirect - it can then - # work as a "normal" window, once a message loop is established. - self._DoCreate(win32gui.CreateDialogIndirect) - - def OnClose(self, hwnd, msg, wparam, lparam): - win32gui.DestroyWindow(hwnd) - - # We need to arrange to a WM_QUIT message to be sent to our - # PumpMessages() loop. - def OnDestroy(self, hwnd, msg, wparam, lparam): - win32gui.PostQuitMessage(0) # Terminate the app. - - -# An implementation suitable for use with the Win32 Dialog functions. -class DemoDialog(DemoWindowBase): - def DoModal(self): - return self._DoCreate(win32gui.DialogBoxIndirect) - - def OnClose(self, hwnd, msg, wparam, lparam): - win32gui.EndDialog(hwnd, 0) - - -def DemoModal(): - w = DemoDialog() - w.DoModal() - - -def DemoCreateWindow(): - w = DemoWindow() - w.CreateWindow() - # PumpMessages runs until PostQuitMessage() is called by someone. - win32gui.PumpMessages() - - -if __name__ == "__main__": - DemoModal() - DemoCreateWindow() diff --git a/lib/win32/Demos/win32gui_menu.py b/lib/win32/Demos/win32gui_menu.py deleted file mode 100644 index c462b062..00000000 --- a/lib/win32/Demos/win32gui_menu.py +++ /dev/null @@ -1,420 +0,0 @@ -# Demonstrates some advanced menu concepts using win32gui. -# This creates a taskbar icon which has some fancy menus (but note that -# selecting the menu items does nothing useful - see win32gui_taskbar.py -# for examples of this. - -# NOTE: This is a work in progress. Todo: -# * The "Checked" menu items don't work correctly - I'm not sure why. -# * No support for GetMenuItemInfo. - -# Based on Andy McKay's demo code. -from win32api import * - -# Try and use XP features, so we get alpha-blending etc. -try: - from winxpgui import * -except ImportError: - from win32gui import * - -import array -import os -import struct -import sys - -import win32con -from win32gui_struct import * - -this_dir = os.path.split(sys.argv[0])[0] - - -class MainWindow: - def __init__(self): - message_map = { - win32con.WM_DESTROY: self.OnDestroy, - win32con.WM_COMMAND: self.OnCommand, - win32con.WM_USER + 20: self.OnTaskbarNotify, - # owner-draw related handlers. - win32con.WM_MEASUREITEM: self.OnMeasureItem, - win32con.WM_DRAWITEM: self.OnDrawItem, - } - # Register the Window class. - wc = WNDCLASS() - hinst = wc.hInstance = GetModuleHandle(None) - wc.lpszClassName = "PythonTaskbarDemo" - wc.lpfnWndProc = message_map # could also specify a wndproc. - classAtom = RegisterClass(wc) - # Create the Window. - style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU - self.hwnd = CreateWindow( - classAtom, - "Taskbar Demo", - style, - 0, - 0, - win32con.CW_USEDEFAULT, - win32con.CW_USEDEFAULT, - 0, - 0, - hinst, - None, - ) - UpdateWindow(self.hwnd) - iconPathName = os.path.abspath(os.path.join(sys.prefix, "pyc.ico")) - # py2.5 includes the .ico files in the DLLs dir for some reason. - if not os.path.isfile(iconPathName): - iconPathName = os.path.abspath( - os.path.join(os.path.split(sys.executable)[0], "DLLs", "pyc.ico") - ) - if not os.path.isfile(iconPathName): - # Look in the source tree. - iconPathName = os.path.abspath( - os.path.join(os.path.split(sys.executable)[0], "..\\PC\\pyc.ico") - ) - if os.path.isfile(iconPathName): - icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE - hicon = LoadImage( - hinst, iconPathName, win32con.IMAGE_ICON, 0, 0, icon_flags - ) - else: - iconPathName = None - print("Can't find a Python icon file - using default") - hicon = LoadIcon(0, win32con.IDI_APPLICATION) - self.iconPathName = iconPathName - - # Load up some information about menus needed by our owner-draw code. - # The font to use on the menu. - ncm = SystemParametersInfo(win32con.SPI_GETNONCLIENTMETRICS) - self.font_menu = CreateFontIndirect(ncm["lfMenuFont"]) - # spacing for our ownerdraw menus - not sure exactly what constants - # should be used (and if you owner-draw all items on the menu, it - # doesn't matter!) - self.menu_icon_height = GetSystemMetrics(win32con.SM_CYMENU) - 4 - self.menu_icon_width = self.menu_icon_height - self.icon_x_pad = 8 # space from end of icon to start of text. - # A map we use to stash away data we need for ownerdraw. Keyed - # by integer ID - that ID will be set in dwTypeData of the menu item. - self.menu_item_map = {} - - # Finally, create the menu - self.createMenu() - - flags = NIF_ICON | NIF_MESSAGE | NIF_TIP - nid = (self.hwnd, 0, flags, win32con.WM_USER + 20, hicon, "Python Demo") - Shell_NotifyIcon(NIM_ADD, nid) - print("Please right-click on the Python icon in the taskbar") - - def createMenu(self): - self.hmenu = menu = CreatePopupMenu() - # Create our 'Exit' item with the standard, ugly 'close' icon. - item, extras = PackMENUITEMINFO( - text="Exit", hbmpItem=win32con.HBMMENU_MBAR_CLOSE, wID=1000 - ) - InsertMenuItem(menu, 0, 1, item) - # Create a 'text only' menu via InsertMenuItem rather then - # AppendMenu, just to prove we can! - item, extras = PackMENUITEMINFO(text="Text only item", wID=1001) - InsertMenuItem(menu, 0, 1, item) - - load_bmp_flags = win32con.LR_LOADFROMFILE | win32con.LR_LOADTRANSPARENT - # These images are "over sized", so we load them scaled. - hbmp = LoadImage( - 0, - os.path.join(this_dir, "images/smiley.bmp"), - win32con.IMAGE_BITMAP, - 20, - 20, - load_bmp_flags, - ) - - # Create a top-level menu with a bitmap - item, extras = PackMENUITEMINFO( - text="Menu with bitmap", hbmpItem=hbmp, wID=1002 - ) - InsertMenuItem(menu, 0, 1, item) - - # Owner-draw menus mainly from: - # http://windowssdk.msdn.microsoft.com/en-us/library/ms647558.aspx - # and: - # http://www.codeguru.com/cpp/controls/menu/bitmappedmenus/article.php/c165 - - # Create one with an icon - this is *lots* more work - we do it - # owner-draw! The primary reason is to handle transparency better - - # converting to a bitmap causes the background to be incorrect when - # the menu item is selected. I can't see a simpler way. - # First, load the icon we want to use. - ico_x = GetSystemMetrics(win32con.SM_CXSMICON) - ico_y = GetSystemMetrics(win32con.SM_CYSMICON) - if self.iconPathName: - hicon = LoadImage( - 0, - self.iconPathName, - win32con.IMAGE_ICON, - ico_x, - ico_y, - win32con.LR_LOADFROMFILE, - ) - else: - shell_dll = os.path.join(GetSystemDirectory(), "shell32.dll") - large, small = win32gui.ExtractIconEx(shell_dll, 4, 1) - hicon = small[0] - DestroyIcon(large[0]) - - # Stash away the text and hicon in our map, and add the owner-draw - # item to the menu. - index = 0 - self.menu_item_map[index] = (hicon, "Menu with owner-draw icon") - item, extras = PackMENUITEMINFO( - fType=win32con.MFT_OWNERDRAW, dwItemData=index, wID=1009 - ) - InsertMenuItem(menu, 0, 1, item) - - # Add another icon-based icon - but this time using HBMMENU_CALLBACK - # in the hbmpItem elt, so we only need to draw the icon (ie, not the - # text or checkmark) - index = 1 - self.menu_item_map[index] = (hicon, None) - item, extras = PackMENUITEMINFO( - text="Menu with o-d icon 2", - dwItemData=index, - hbmpItem=win32con.HBMMENU_CALLBACK, - wID=1010, - ) - InsertMenuItem(menu, 0, 1, item) - - # Add another icon-based icon - this time by converting - # via bitmap. Note the icon background when selected is ugly :( - hdcBitmap = CreateCompatibleDC(0) - hdcScreen = GetDC(0) - hbm = CreateCompatibleBitmap(hdcScreen, ico_x, ico_y) - hbmOld = SelectObject(hdcBitmap, hbm) - SetBkMode(hdcBitmap, win32con.TRANSPARENT) - # Fill the background. - brush = GetSysColorBrush(win32con.COLOR_MENU) - FillRect(hdcBitmap, (0, 0, 16, 16), brush) - # unclear if brush needs to be freed. Best clue I can find is: - # "GetSysColorBrush returns a cached brush instead of allocating a new - # one." - implies no DeleteObject. - # draw the icon - DrawIconEx(hdcBitmap, 0, 0, hicon, ico_x, ico_y, 0, 0, win32con.DI_NORMAL) - SelectObject(hdcBitmap, hbmOld) - DeleteDC(hdcBitmap) - item, extras = PackMENUITEMINFO( - text="Menu with icon", hbmpItem=hbm.Detach(), wID=1011 - ) - InsertMenuItem(menu, 0, 1, item) - - # Create a sub-menu, and put a few funky ones there. - self.sub_menu = sub_menu = CreatePopupMenu() - # A 'checkbox' menu. - item, extras = PackMENUITEMINFO( - fState=win32con.MFS_CHECKED, text="Checkbox menu", hbmpItem=hbmp, wID=1003 - ) - InsertMenuItem(sub_menu, 0, 1, item) - # A 'radio' menu. - InsertMenu(sub_menu, 0, win32con.MF_BYPOSITION, win32con.MF_SEPARATOR, None) - item, extras = PackMENUITEMINFO( - fType=win32con.MFT_RADIOCHECK, - fState=win32con.MFS_CHECKED, - text="Checkbox menu - bullet 1", - hbmpItem=hbmp, - wID=1004, - ) - InsertMenuItem(sub_menu, 0, 1, item) - item, extras = PackMENUITEMINFO( - fType=win32con.MFT_RADIOCHECK, - fState=win32con.MFS_UNCHECKED, - text="Checkbox menu - bullet 2", - hbmpItem=hbmp, - wID=1005, - ) - InsertMenuItem(sub_menu, 0, 1, item) - # And add the sub-menu to the top-level menu. - item, extras = PackMENUITEMINFO(text="Sub-Menu", hSubMenu=sub_menu) - InsertMenuItem(menu, 0, 1, item) - - # Set 'Exit' as the default option. - SetMenuDefaultItem(menu, 1000, 0) - - def OnDestroy(self, hwnd, msg, wparam, lparam): - nid = (self.hwnd, 0) - Shell_NotifyIcon(NIM_DELETE, nid) - PostQuitMessage(0) # Terminate the app. - - def OnTaskbarNotify(self, hwnd, msg, wparam, lparam): - if lparam == win32con.WM_RBUTTONUP: - print("You right clicked me.") - # display the menu at the cursor pos. - pos = GetCursorPos() - SetForegroundWindow(self.hwnd) - TrackPopupMenu( - self.hmenu, win32con.TPM_LEFTALIGN, pos[0], pos[1], 0, self.hwnd, None - ) - PostMessage(self.hwnd, win32con.WM_NULL, 0, 0) - elif lparam == win32con.WM_LBUTTONDBLCLK: - print("You double-clicked me") - # find the default menu item and fire it. - cmd = GetMenuDefaultItem(self.hmenu, False, 0) - if cmd == -1: - print("Can't find a default!") - # and just pretend it came from the menu - self.OnCommand(hwnd, win32con.WM_COMMAND, cmd, 0) - return 1 - - def OnCommand(self, hwnd, msg, wparam, lparam): - id = LOWORD(wparam) - if id == 1000: - print("Goodbye") - DestroyWindow(self.hwnd) - elif id in (1003, 1004, 1005): - # Our 'checkbox' and 'radio' items - state = GetMenuState(self.sub_menu, id, win32con.MF_BYCOMMAND) - if state == -1: - raise RuntimeError("No item found") - if state & win32con.MF_CHECKED: - check_flags = win32con.MF_UNCHECKED - print("Menu was checked - unchecking") - else: - check_flags = win32con.MF_CHECKED - print("Menu was unchecked - checking") - - if id == 1003: - # simple checkbox - rc = CheckMenuItem( - self.sub_menu, id, win32con.MF_BYCOMMAND | check_flags - ) - else: - # radio button - must pass the first and last IDs in the - # "group", and the ID in the group that is to be selected. - rc = CheckMenuRadioItem( - self.sub_menu, 1004, 1005, id, win32con.MF_BYCOMMAND - ) - # Get and check the new state - first the simple way... - new_state = GetMenuState(self.sub_menu, id, win32con.MF_BYCOMMAND) - if new_state & win32con.MF_CHECKED != check_flags: - raise RuntimeError("The new item didn't get the new checked state!") - # Now the long-winded way via GetMenuItemInfo... - buf, extras = EmptyMENUITEMINFO() - win32gui.GetMenuItemInfo(self.sub_menu, id, False, buf) - ( - fType, - fState, - wID, - hSubMenu, - hbmpChecked, - hbmpUnchecked, - dwItemData, - text, - hbmpItem, - ) = UnpackMENUITEMINFO(buf) - - if fState & win32con.MF_CHECKED != check_flags: - raise RuntimeError("The new item didn't get the new checked state!") - else: - print("OnCommand for ID", id) - - # Owner-draw related functions. We only have 1 owner-draw item, but - # we pretend we have more than that :) - def OnMeasureItem(self, hwnd, msg, wparam, lparam): - ## Last item of MEASUREITEMSTRUCT is a ULONG_PTR - fmt = "5iP" - buf = PyMakeBuffer(struct.calcsize(fmt), lparam) - data = struct.unpack(fmt, buf) - ctlType, ctlID, itemID, itemWidth, itemHeight, itemData = data - - hicon, text = self.menu_item_map[itemData] - if text is None: - # Only drawing icon due to HBMMENU_CALLBACK - cx = self.menu_icon_width - cy = self.menu_icon_height - else: - # drawing the lot! - dc = GetDC(hwnd) - oldFont = SelectObject(dc, self.font_menu) - cx, cy = GetTextExtentPoint32(dc, text) - SelectObject(dc, oldFont) - ReleaseDC(hwnd, dc) - - cx += GetSystemMetrics(win32con.SM_CXMENUCHECK) - cx += self.menu_icon_width + self.icon_x_pad - - cy = GetSystemMetrics(win32con.SM_CYMENU) - - new_data = struct.pack(fmt, ctlType, ctlID, itemID, cx, cy, itemData) - PySetMemory(lparam, new_data) - return True - - def OnDrawItem(self, hwnd, msg, wparam, lparam): - ## lparam is a DRAWITEMSTRUCT - fmt = "5i2P4iP" - data = struct.unpack(fmt, PyGetMemory(lparam, struct.calcsize(fmt))) - ( - ctlType, - ctlID, - itemID, - itemAction, - itemState, - hwndItem, - hDC, - left, - top, - right, - bot, - itemData, - ) = data - - rect = left, top, right, bot - hicon, text = self.menu_item_map[itemData] - - if text is None: - # This means the menu-item had HBMMENU_CALLBACK - so all we - # draw is the icon. rect is the entire area we should use. - DrawIconEx( - hDC, left, top, hicon, right - left, bot - top, 0, 0, win32con.DI_NORMAL - ) - else: - # If the user has selected the item, use the selected - # text and background colors to display the item. - selected = itemState & win32con.ODS_SELECTED - if selected: - crText = SetTextColor(hDC, GetSysColor(win32con.COLOR_HIGHLIGHTTEXT)) - crBkgnd = SetBkColor(hDC, GetSysColor(win32con.COLOR_HIGHLIGHT)) - - each_pad = self.icon_x_pad // 2 - x_icon = left + GetSystemMetrics(win32con.SM_CXMENUCHECK) + each_pad - x_text = x_icon + self.menu_icon_width + each_pad - - # Draw text first, specifying a complete rect to fill - this sets - # up the background (but overwrites anything else already there!) - # Select the font, draw it, and restore the previous font. - hfontOld = SelectObject(hDC, self.font_menu) - ExtTextOut(hDC, x_text, top + 2, win32con.ETO_OPAQUE, rect, text) - SelectObject(hDC, hfontOld) - - # Icon image next. Icons are transparent - no need to handle - # selection specially. - DrawIconEx( - hDC, - x_icon, - top + 2, - hicon, - self.menu_icon_width, - self.menu_icon_height, - 0, - 0, - win32con.DI_NORMAL, - ) - - # Return the text and background colors to their - # normal state (not selected). - if selected: - SetTextColor(hDC, crText) - SetBkColor(hDC, crBkgnd) - - -def main(): - w = MainWindow() - PumpMessages() - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/win32gui_taskbar.py b/lib/win32/Demos/win32gui_taskbar.py deleted file mode 100644 index 84d642a9..00000000 --- a/lib/win32/Demos/win32gui_taskbar.py +++ /dev/null @@ -1,141 +0,0 @@ -# Creates a task-bar icon. Run from Python.exe to see the -# messages printed. -import os -import sys - -import win32api -import win32con -import win32gui -import winerror - - -class MainWindow: - def __init__(self): - msg_TaskbarRestart = win32gui.RegisterWindowMessage("TaskbarCreated") - message_map = { - msg_TaskbarRestart: self.OnRestart, - win32con.WM_DESTROY: self.OnDestroy, - win32con.WM_COMMAND: self.OnCommand, - win32con.WM_USER + 20: self.OnTaskbarNotify, - } - # Register the Window class. - wc = win32gui.WNDCLASS() - hinst = wc.hInstance = win32api.GetModuleHandle(None) - wc.lpszClassName = "PythonTaskbarDemo" - wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW - wc.hCursor = win32api.LoadCursor(0, win32con.IDC_ARROW) - wc.hbrBackground = win32con.COLOR_WINDOW - wc.lpfnWndProc = message_map # could also specify a wndproc. - - # Don't blow up if class already registered to make testing easier - try: - classAtom = win32gui.RegisterClass(wc) - except win32gui.error as err_info: - if err_info.winerror != winerror.ERROR_CLASS_ALREADY_EXISTS: - raise - - # Create the Window. - style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU - self.hwnd = win32gui.CreateWindow( - wc.lpszClassName, - "Taskbar Demo", - style, - 0, - 0, - win32con.CW_USEDEFAULT, - win32con.CW_USEDEFAULT, - 0, - 0, - hinst, - None, - ) - win32gui.UpdateWindow(self.hwnd) - self._DoCreateIcons() - - def _DoCreateIcons(self): - # Try and find a custom icon - hinst = win32api.GetModuleHandle(None) - iconPathName = os.path.abspath( - os.path.join(os.path.split(sys.executable)[0], "pyc.ico") - ) - if not os.path.isfile(iconPathName): - # Look in DLLs dir, a-la py 2.5 - iconPathName = os.path.abspath( - os.path.join(os.path.split(sys.executable)[0], "DLLs", "pyc.ico") - ) - if not os.path.isfile(iconPathName): - # Look in the source tree. - iconPathName = os.path.abspath( - os.path.join(os.path.split(sys.executable)[0], "..\\PC\\pyc.ico") - ) - if os.path.isfile(iconPathName): - icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE - hicon = win32gui.LoadImage( - hinst, iconPathName, win32con.IMAGE_ICON, 0, 0, icon_flags - ) - else: - print("Can't find a Python icon file - using default") - hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION) - - flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_TIP - nid = (self.hwnd, 0, flags, win32con.WM_USER + 20, hicon, "Python Demo") - try: - win32gui.Shell_NotifyIcon(win32gui.NIM_ADD, nid) - except win32gui.error: - # This is common when windows is starting, and this code is hit - # before the taskbar has been created. - print("Failed to add the taskbar icon - is explorer running?") - # but keep running anyway - when explorer starts, we get the - # TaskbarCreated message. - - def OnRestart(self, hwnd, msg, wparam, lparam): - self._DoCreateIcons() - - def OnDestroy(self, hwnd, msg, wparam, lparam): - nid = (self.hwnd, 0) - win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, nid) - win32gui.PostQuitMessage(0) # Terminate the app. - - def OnTaskbarNotify(self, hwnd, msg, wparam, lparam): - if lparam == win32con.WM_LBUTTONUP: - print("You clicked me.") - elif lparam == win32con.WM_LBUTTONDBLCLK: - print("You double-clicked me - goodbye") - win32gui.DestroyWindow(self.hwnd) - elif lparam == win32con.WM_RBUTTONUP: - print("You right clicked me.") - menu = win32gui.CreatePopupMenu() - win32gui.AppendMenu(menu, win32con.MF_STRING, 1023, "Display Dialog") - win32gui.AppendMenu(menu, win32con.MF_STRING, 1024, "Say Hello") - win32gui.AppendMenu(menu, win32con.MF_STRING, 1025, "Exit program") - pos = win32gui.GetCursorPos() - # See http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/menus_0hdi.asp - win32gui.SetForegroundWindow(self.hwnd) - win32gui.TrackPopupMenu( - menu, win32con.TPM_LEFTALIGN, pos[0], pos[1], 0, self.hwnd, None - ) - win32gui.PostMessage(self.hwnd, win32con.WM_NULL, 0, 0) - return 1 - - def OnCommand(self, hwnd, msg, wparam, lparam): - id = win32api.LOWORD(wparam) - if id == 1023: - import win32gui_dialog - - win32gui_dialog.DemoModal() - elif id == 1024: - print("Hello") - elif id == 1025: - print("Goodbye") - win32gui.DestroyWindow(self.hwnd) - else: - print("Unknown command -", id) - - -def main(): - w = MainWindow() - win32gui.PumpMessages() - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/win32netdemo.py b/lib/win32/Demos/win32netdemo.py deleted file mode 100644 index 36c49542..00000000 --- a/lib/win32/Demos/win32netdemo.py +++ /dev/null @@ -1,274 +0,0 @@ -import getopt -import sys -import traceback - -import win32api -import win32net -import win32netcon -import win32security - -verbose_level = 0 - -server = None # Run on local machine. - - -def verbose(msg): - if verbose_level: - print(msg) - - -def CreateUser(): - "Creates a new test user, then deletes the user" - testName = "PyNetTestUser" - try: - win32net.NetUserDel(server, testName) - print("Warning - deleted user before creating it!") - except win32net.error: - pass - - d = {} - d["name"] = testName - d["password"] = "deleteme" - d["priv"] = win32netcon.USER_PRIV_USER - d["comment"] = "Delete me - created by Python test code" - d["flags"] = win32netcon.UF_NORMAL_ACCOUNT | win32netcon.UF_SCRIPT - win32net.NetUserAdd(server, 1, d) - try: - try: - win32net.NetUserChangePassword(server, testName, "wrong", "new") - print("ERROR: NetUserChangePassword worked with a wrong password!") - except win32net.error: - pass - win32net.NetUserChangePassword(server, testName, "deleteme", "new") - finally: - win32net.NetUserDel(server, testName) - print("Created a user, changed their password, and deleted them!") - - -def UserEnum(): - "Enumerates all the local users" - resume = 0 - nuser = 0 - while 1: - data, total, resume = win32net.NetUserEnum( - server, 3, win32netcon.FILTER_NORMAL_ACCOUNT, resume - ) - verbose( - "Call to NetUserEnum obtained %d entries of %d total" % (len(data), total) - ) - for user in data: - verbose("Found user %s" % user["name"]) - nuser = nuser + 1 - if not resume: - break - assert nuser, "Could not find any users!" - print("Enumerated all the local users") - - -def GroupEnum(): - "Enumerates all the domain groups" - nmembers = 0 - resume = 0 - while 1: - data, total, resume = win32net.NetGroupEnum(server, 1, resume) - # print "Call to NetGroupEnum obtained %d entries of %d total" % (len(data), total) - for group in data: - verbose("Found group %(name)s:%(comment)s " % group) - memberresume = 0 - while 1: - memberdata, total, memberresume = win32net.NetGroupGetUsers( - server, group["name"], 0, resume - ) - for member in memberdata: - verbose(" Member %(name)s" % member) - nmembers = nmembers + 1 - if memberresume == 0: - break - if not resume: - break - assert nmembers, "Couldnt find a single member in a single group!" - print("Enumerated all the groups") - - -def LocalGroupEnum(): - "Enumerates all the local groups" - resume = 0 - nmembers = 0 - while 1: - data, total, resume = win32net.NetLocalGroupEnum(server, 1, resume) - for group in data: - verbose("Found group %(name)s:%(comment)s " % group) - memberresume = 0 - while 1: - memberdata, total, memberresume = win32net.NetLocalGroupGetMembers( - server, group["name"], 2, resume - ) - for member in memberdata: - # Just for the sake of it, we convert the SID to a username - username, domain, type = win32security.LookupAccountSid( - server, member["sid"] - ) - nmembers = nmembers + 1 - verbose(" Member %s (%s)" % (username, member["domainandname"])) - if memberresume == 0: - break - if not resume: - break - assert nmembers, "Couldnt find a single member in a single group!" - print("Enumerated all the local groups") - - -def ServerEnum(): - "Enumerates all servers on the network" - resume = 0 - while 1: - data, total, resume = win32net.NetServerEnum( - server, 100, win32netcon.SV_TYPE_ALL, None, resume - ) - for s in data: - verbose("Found server %s" % s["name"]) - # Now loop over the shares. - shareresume = 0 - while 1: - sharedata, total, shareresume = win32net.NetShareEnum( - server, 2, shareresume - ) - for share in sharedata: - verbose( - " %(netname)s (%(path)s):%(remark)s - in use by %(current_uses)d users" - % share - ) - if not shareresume: - break - if not resume: - break - print("Enumerated all the servers on the network") - - -def LocalGroup(uname=None): - "Creates a local group, adds some members, deletes them, then removes the group" - level = 3 - if uname is None: - uname = win32api.GetUserName() - if uname.find("\\") < 0: - uname = win32api.GetDomainName() + "\\" + uname - group = "python_test_group" - # delete the group if it already exists - try: - win32net.NetLocalGroupDel(server, group) - print("WARNING: existing local group '%s' has been deleted.") - except win32net.error: - pass - group_data = {"name": group} - win32net.NetLocalGroupAdd(server, 1, group_data) - try: - u = {"domainandname": uname} - win32net.NetLocalGroupAddMembers(server, group, level, [u]) - mem, tot, res = win32net.NetLocalGroupGetMembers(server, group, level) - print("members are", mem) - if mem[0]["domainandname"] != uname: - print("ERROR: LocalGroup just added %s, but members are %r" % (uname, mem)) - # Convert the list of dicts to a list of strings. - win32net.NetLocalGroupDelMembers( - server, group, [m["domainandname"] for m in mem] - ) - finally: - win32net.NetLocalGroupDel(server, group) - print("Created a local group, added and removed members, then deleted the group") - - -def GetInfo(userName=None): - "Dumps level 3 information about the current user" - if userName is None: - userName = win32api.GetUserName() - print("Dumping level 3 information about user") - info = win32net.NetUserGetInfo(server, userName, 3) - for key, val in list(info.items()): - verbose("%s=%s" % (key, val)) - - -def SetInfo(userName=None): - "Attempts to change the current users comment, then set it back" - if userName is None: - userName = win32api.GetUserName() - oldData = win32net.NetUserGetInfo(server, userName, 3) - try: - d = oldData.copy() - d["usr_comment"] = "Test comment" - win32net.NetUserSetInfo(server, userName, 3, d) - new = win32net.NetUserGetInfo(server, userName, 3)["usr_comment"] - if str(new) != "Test comment": - raise RuntimeError("Could not read the same comment back - got %s" % new) - print("Changed the data for the user") - finally: - win32net.NetUserSetInfo(server, userName, 3, oldData) - - -def SetComputerInfo(): - "Doesnt actually change anything, just make sure we could ;-)" - info = win32net.NetWkstaGetInfo(None, 502) - # *sob* - but we can't! Why not!!! - # win32net.NetWkstaSetInfo(None, 502, info) - - -def usage(tests): - import os - - print("Usage: %s [-s server ] [-v] [Test ...]" % os.path.basename(sys.argv[0])) - print(" -v : Verbose - print more information") - print(" -s : server - execute the tests against the named server") - print(" -c : include the CreateUser test by default") - print("where Test is one of:") - for t in tests: - print(t.__name__, ":", t.__doc__) - print() - print("If not tests are specified, all tests are run") - sys.exit(1) - - -def main(): - tests = [] - for ob in list(globals().values()): - if type(ob) == type(main) and ob.__doc__: - tests.append(ob) - opts, args = getopt.getopt(sys.argv[1:], "s:hvc") - create_user = False - for opt, val in opts: - if opt == "-s": - global server - server = val - if opt == "-h": - usage(tests) - if opt == "-v": - global verbose_level - verbose_level = verbose_level + 1 - if opt == "-c": - create_user = True - - if len(args) == 0: - print("Running all tests - use '-h' to see command-line options...") - dotests = tests - if not create_user: - dotests.remove(CreateUser) - else: - dotests = [] - for arg in args: - for t in tests: - if t.__name__ == arg: - dotests.append(t) - break - else: - print("Test '%s' unknown - skipping" % arg) - if not len(dotests): - print("Nothing to do!") - usage(tests) - for test in dotests: - try: - test() - except: - print("Test %s failed" % test.__name__) - traceback.print_exc() - - -if __name__ == "__main__": - main() diff --git a/lib/win32/Demos/win32rcparser_demo.py b/lib/win32/Demos/win32rcparser_demo.py deleted file mode 100644 index 6a41d453..00000000 --- a/lib/win32/Demos/win32rcparser_demo.py +++ /dev/null @@ -1,86 +0,0 @@ -# A demo of the win32rcparser module and using win32gui - -import os - -import commctrl -import win32api -import win32con -import win32gui -import win32rcparser - -this_dir = os.path.abspath(os.path.dirname(__file__)) -g_rcname = os.path.abspath( - os.path.join(this_dir, "..", "test", "win32rcparser", "test.rc") -) - -if not os.path.isfile(g_rcname): - raise RuntimeError("Can't locate test.rc (should be at '%s')" % (g_rcname,)) - - -class DemoWindow: - def __init__(self, dlg_template): - self.dlg_template = dlg_template - - def CreateWindow(self): - self._DoCreate(win32gui.CreateDialogIndirect) - - def DoModal(self): - return self._DoCreate(win32gui.DialogBoxIndirect) - - def _DoCreate(self, fn): - message_map = { - win32con.WM_INITDIALOG: self.OnInitDialog, - win32con.WM_CLOSE: self.OnClose, - win32con.WM_DESTROY: self.OnDestroy, - win32con.WM_COMMAND: self.OnCommand, - } - return fn(0, self.dlg_template, 0, message_map) - - def OnInitDialog(self, hwnd, msg, wparam, lparam): - self.hwnd = hwnd - # centre the dialog - desktop = win32gui.GetDesktopWindow() - l, t, r, b = win32gui.GetWindowRect(self.hwnd) - dt_l, dt_t, dt_r, dt_b = win32gui.GetWindowRect(desktop) - centre_x, centre_y = win32gui.ClientToScreen( - desktop, ((dt_r - dt_l) // 2, (dt_b - dt_t) // 2) - ) - win32gui.MoveWindow( - hwnd, centre_x - (r // 2), centre_y - (b // 2), r - l, b - t, 0 - ) - - def OnCommand(self, hwnd, msg, wparam, lparam): - # Needed to make OK/Cancel work - no other controls are handled. - id = win32api.LOWORD(wparam) - if id in [win32con.IDOK, win32con.IDCANCEL]: - win32gui.EndDialog(hwnd, id) - - def OnClose(self, hwnd, msg, wparam, lparam): - win32gui.EndDialog(hwnd, 0) - - def OnDestroy(self, hwnd, msg, wparam, lparam): - pass - - -def DemoModal(): - # Load the .rc file. - resources = win32rcparser.Parse(g_rcname) - for id, ddef in resources.dialogs.items(): - print("Displaying dialog", id) - w = DemoWindow(ddef) - w.DoModal() - - -if __name__ == "__main__": - flags = 0 - for flag in """ICC_DATE_CLASSES ICC_ANIMATE_CLASS ICC_ANIMATE_CLASS - ICC_BAR_CLASSES ICC_COOL_CLASSES ICC_DATE_CLASSES - ICC_HOTKEY_CLASS ICC_INTERNET_CLASSES ICC_LISTVIEW_CLASSES - ICC_PAGESCROLLER_CLASS ICC_PROGRESS_CLASS ICC_TAB_CLASSES - ICC_TREEVIEW_CLASSES ICC_UPDOWN_CLASS ICC_USEREX_CLASSES - ICC_WIN95_CLASSES """.split(): - flags |= getattr(commctrl, flag) - win32gui.InitCommonControlsEx(flags) - # Need to do this go get rich-edit working. - win32api.LoadLibrary("riched20.dll") - DemoModal() diff --git a/lib/win32/Demos/win32servicedemo.py b/lib/win32/Demos/win32servicedemo.py deleted file mode 100644 index 75967ef3..00000000 --- a/lib/win32/Demos/win32servicedemo.py +++ /dev/null @@ -1,23 +0,0 @@ -import win32con -import win32service - - -def EnumServices(): - resume = 0 - accessSCM = win32con.GENERIC_READ - accessSrv = win32service.SC_MANAGER_ALL_ACCESS - - # Open Service Control Manager - hscm = win32service.OpenSCManager(None, None, accessSCM) - - # Enumerate Service Control Manager DB - - typeFilter = win32service.SERVICE_WIN32 - stateFilter = win32service.SERVICE_STATE_ALL - - statuses = win32service.EnumServicesStatus(hscm, typeFilter, stateFilter) - for short_name, desc, status in statuses: - print(short_name, desc, status) - - -EnumServices() diff --git a/lib/win32/Demos/win32ts_logoff_disconnected.py b/lib/win32/Demos/win32ts_logoff_disconnected.py deleted file mode 100644 index be61e5d1..00000000 --- a/lib/win32/Demos/win32ts_logoff_disconnected.py +++ /dev/null @@ -1,24 +0,0 @@ -""" Finds any disconnected terminal service sessions and logs them off""" -import pywintypes -import win32ts -import winerror - -sessions = win32ts.WTSEnumerateSessions(win32ts.WTS_CURRENT_SERVER_HANDLE) -for session in sessions: - """ - WTS_CONNECTSTATE_CLASS: WTSActive,WTSConnected,WTSConnectQuery,WTSShadow,WTSDisconnected, - WTSIdle,WTSListen,WTSReset,WTSDown,WTSInit - """ - if session["State"] == win32ts.WTSDisconnected: - sessionid = session["SessionId"] - username = win32ts.WTSQuerySessionInformation( - win32ts.WTS_CURRENT_SERVER_HANDLE, sessionid, win32ts.WTSUserName - ) - print("Logging off disconnected user:", username) - try: - win32ts.WTSLogoffSession(win32ts.WTS_CURRENT_SERVER_HANDLE, sessionid, True) - except pywintypes.error as e: - if e.winerror == winerror.ERROR_ACCESS_DENIED: - print("Can't kill that session:", e.strerror) - else: - raise diff --git a/lib/win32/Demos/win32wnet/testwnet.py b/lib/win32/Demos/win32wnet/testwnet.py deleted file mode 100644 index 3b21ac29..00000000 --- a/lib/win32/Demos/win32wnet/testwnet.py +++ /dev/null @@ -1,125 +0,0 @@ -import os - -import win32api -import win32wnet -from winnetwk import * - -possible_shares = [] - - -def _doDumpHandle(handle, level=0): - indent = " " * level - while 1: - items = win32wnet.WNetEnumResource(handle, 0) - if len(items) == 0: - break - for item in items: - try: - if item.dwDisplayType == RESOURCEDISPLAYTYPE_SHARE: - print(indent + "Have share with name:", item.lpRemoteName) - possible_shares.append(item) - elif item.dwDisplayType == RESOURCEDISPLAYTYPE_GENERIC: - print( - indent + "Have generic resource with name:", item.lpRemoteName - ) - else: - # Try generic! - print(indent + "Enumerating " + item.lpRemoteName, end=" ") - k = win32wnet.WNetOpenEnum( - RESOURCE_GLOBALNET, RESOURCETYPE_ANY, 0, item - ) - print() - _doDumpHandle(k, level + 1) - win32wnet.WNetCloseEnum( - k - ) # could do k.Close(), but this is a good test! - except win32wnet.error as details: - print(indent + "Couldn't enumerate this resource: " + details.strerror) - - -def TestOpenEnum(): - print("Enumerating all resources on the network - this may take some time...") - handle = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET, RESOURCETYPE_ANY, 0, None) - - try: - _doDumpHandle(handle) - finally: - handle.Close() - print("Finished dumping all resources.") - - -def findUnusedDriveLetter(): - existing = [ - x[0].lower() for x in win32api.GetLogicalDriveStrings().split("\0") if x - ] - handle = win32wnet.WNetOpenEnum(RESOURCE_REMEMBERED, RESOURCETYPE_DISK, 0, None) - try: - while 1: - items = win32wnet.WNetEnumResource(handle, 0) - if len(items) == 0: - break - xtra = [i.lpLocalName[0].lower() for i in items if i.lpLocalName] - existing.extend(xtra) - finally: - handle.Close() - for maybe in "defghijklmnopqrstuvwxyz": - if maybe not in existing: - return maybe - raise RuntimeError("All drive mappings are taken?") - - -def TestConnection(): - if len(possible_shares) == 0: - print("Couldn't find any potential shares to connect to") - return - localName = findUnusedDriveLetter() + ":" - for share in possible_shares: - print("Attempting connection of", localName, "to", share.lpRemoteName) - try: - win32wnet.WNetAddConnection2(share.dwType, localName, share.lpRemoteName) - except win32wnet.error as details: - print("Couldn't connect: " + details.strerror) - continue - # Have a connection. - try: - fname = os.path.join(localName + "\\", os.listdir(localName + "\\")[0]) - try: - print( - "Universal name of '%s' is '%s'" - % (fname, win32wnet.WNetGetUniversalName(fname)) - ) - except win32wnet.error as details: - print( - "Couldn't get universal name of '%s': %s" - % (fname, details.strerror) - ) - print("User name for this connection is", win32wnet.WNetGetUser(localName)) - finally: - win32wnet.WNetCancelConnection2(localName, 0, 0) - # and do it again, but this time by using the more modern - # NETRESOURCE way. - nr = win32wnet.NETRESOURCE() - nr.dwType = share.dwType - nr.lpLocalName = localName - nr.lpRemoteName = share.lpRemoteName - win32wnet.WNetAddConnection2(nr) - win32wnet.WNetCancelConnection2(localName, 0, 0) - - # and one more time using WNetAddConnection3 - win32wnet.WNetAddConnection3(0, nr) - win32wnet.WNetCancelConnection2(localName, 0, 0) - - # Only do the first share that succeeds. - break - - -def TestGetUser(): - u = win32wnet.WNetGetUser() - print("Current global user is", repr(u)) - if u != win32wnet.WNetGetUser(None): - raise RuntimeError("Default value didnt seem to work!") - - -TestGetUser() -TestOpenEnum() -TestConnection() diff --git a/lib/win32/Demos/win32wnet/winnetwk.py b/lib/win32/Demos/win32wnet/winnetwk.py deleted file mode 100644 index 036e2ebf..00000000 --- a/lib/win32/Demos/win32wnet/winnetwk.py +++ /dev/null @@ -1,100 +0,0 @@ -# Generated by h2py from d:\mssdk\include\winnetwk.h -WNNC_NET_MSNET = 0x00010000 -WNNC_NET_LANMAN = 0x00020000 -WNNC_NET_NETWARE = 0x00030000 -WNNC_NET_VINES = 0x00040000 -WNNC_NET_10NET = 0x00050000 -WNNC_NET_LOCUS = 0x00060000 -WNNC_NET_SUN_PC_NFS = 0x00070000 -WNNC_NET_LANSTEP = 0x00080000 -WNNC_NET_9TILES = 0x00090000 -WNNC_NET_LANTASTIC = 0x000A0000 -WNNC_NET_AS400 = 0x000B0000 -WNNC_NET_FTP_NFS = 0x000C0000 -WNNC_NET_PATHWORKS = 0x000D0000 -WNNC_NET_LIFENET = 0x000E0000 -WNNC_NET_POWERLAN = 0x000F0000 -WNNC_NET_BWNFS = 0x00100000 -WNNC_NET_COGENT = 0x00110000 -WNNC_NET_FARALLON = 0x00120000 -WNNC_NET_APPLETALK = 0x00130000 -WNNC_NET_INTERGRAPH = 0x00140000 -WNNC_NET_SYMFONET = 0x00150000 -WNNC_NET_CLEARCASE = 0x00160000 -WNNC_NET_FRONTIER = 0x00170000 -WNNC_NET_BMC = 0x00180000 -WNNC_NET_DCE = 0x00190000 -WNNC_NET_DECORB = 0x00200000 -WNNC_NET_PROTSTOR = 0x00210000 -WNNC_NET_FJ_REDIR = 0x00220000 -WNNC_NET_DISTINCT = 0x00230000 -WNNC_NET_TWINS = 0x00240000 -WNNC_NET_RDR2SAMPLE = 0x00250000 -RESOURCE_CONNECTED = 0x00000001 -RESOURCE_GLOBALNET = 0x00000002 -RESOURCE_REMEMBERED = 0x00000003 -RESOURCE_RECENT = 0x00000004 -RESOURCE_CONTEXT = 0x00000005 -RESOURCETYPE_ANY = 0x00000000 -RESOURCETYPE_DISK = 0x00000001 -RESOURCETYPE_PRINT = 0x00000002 -RESOURCETYPE_RESERVED = 0x00000008 -RESOURCETYPE_UNKNOWN = 0xFFFFFFFF -RESOURCEUSAGE_CONNECTABLE = 0x00000001 -RESOURCEUSAGE_CONTAINER = 0x00000002 -RESOURCEUSAGE_NOLOCALDEVICE = 0x00000004 -RESOURCEUSAGE_SIBLING = 0x00000008 -RESOURCEUSAGE_ATTACHED = 0x00000010 -RESOURCEUSAGE_ALL = ( - RESOURCEUSAGE_CONNECTABLE | RESOURCEUSAGE_CONTAINER | RESOURCEUSAGE_ATTACHED -) -RESOURCEUSAGE_RESERVED = 0x80000000 -RESOURCEDISPLAYTYPE_GENERIC = 0x00000000 -RESOURCEDISPLAYTYPE_DOMAIN = 0x00000001 -RESOURCEDISPLAYTYPE_SERVER = 0x00000002 -RESOURCEDISPLAYTYPE_SHARE = 0x00000003 -RESOURCEDISPLAYTYPE_FILE = 0x00000004 -RESOURCEDISPLAYTYPE_GROUP = 0x00000005 -RESOURCEDISPLAYTYPE_NETWORK = 0x00000006 -RESOURCEDISPLAYTYPE_ROOT = 0x00000007 -RESOURCEDISPLAYTYPE_SHAREADMIN = 0x00000008 -RESOURCEDISPLAYTYPE_DIRECTORY = 0x00000009 -RESOURCEDISPLAYTYPE_TREE = 0x0000000A -RESOURCEDISPLAYTYPE_NDSCONTAINER = 0x0000000B -NETPROPERTY_PERSISTENT = 1 -CONNECT_UPDATE_PROFILE = 0x00000001 -CONNECT_UPDATE_RECENT = 0x00000002 -CONNECT_TEMPORARY = 0x00000004 -CONNECT_INTERACTIVE = 0x00000008 -CONNECT_PROMPT = 0x00000010 -CONNECT_NEED_DRIVE = 0x00000020 -CONNECT_REFCOUNT = 0x00000040 -CONNECT_REDIRECT = 0x00000080 -CONNECT_LOCALDRIVE = 0x00000100 -CONNECT_CURRENT_MEDIA = 0x00000200 -CONNECT_DEFERRED = 0x00000400 -CONNECT_RESERVED = 0xFF000000 -CONNDLG_RO_PATH = 0x00000001 -CONNDLG_CONN_POINT = 0x00000002 -CONNDLG_USE_MRU = 0x00000004 -CONNDLG_HIDE_BOX = 0x00000008 -CONNDLG_PERSIST = 0x00000010 -CONNDLG_NOT_PERSIST = 0x00000020 -DISC_UPDATE_PROFILE = 0x00000001 -DISC_NO_FORCE = 0x00000040 -UNIVERSAL_NAME_INFO_LEVEL = 0x00000001 -REMOTE_NAME_INFO_LEVEL = 0x00000002 -WNFMT_MULTILINE = 0x01 -WNFMT_ABBREVIATED = 0x02 -WNFMT_INENUM = 0x10 -WNFMT_CONNECTION = 0x20 -NETINFO_DLL16 = 0x00000001 -NETINFO_DISKRED = 0x00000004 -NETINFO_PRINTERRED = 0x00000008 -RP_LOGON = 0x01 -RP_INIFILE = 0x02 -PP_DISPLAYERRORS = 0x01 -WNCON_FORNETCARD = 0x00000001 -WNCON_NOTROUTED = 0x00000002 -WNCON_SLOWLINK = 0x00000004 -WNCON_DYNAMIC = 0x00000008 diff --git a/lib/win32/Demos/winprocess.py b/lib/win32/Demos/winprocess.py deleted file mode 100644 index 48f6fe71..00000000 --- a/lib/win32/Demos/winprocess.py +++ /dev/null @@ -1,230 +0,0 @@ -""" -Windows Process Control - -winprocess.run launches a child process and returns the exit code. -Optionally, it can: - redirect stdin, stdout & stderr to files - run the command as another user - limit the process's running time - control the process window (location, size, window state, desktop) -Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 -extensions. - -This code is free for any purpose, with no warranty of any kind. --- John B. Dell'Aquila -""" - -import msvcrt -import os - -import win32api -import win32con -import win32event -import win32gui -import win32process -import win32security - - -def logonUser(loginString): - """ - Login as specified user and return handle. - loginString: 'Domain\nUser\nPassword'; for local - login use . or empty string as domain - e.g. '.\nadministrator\nsecret_password' - """ - domain, user, passwd = loginString.split("\n") - return win32security.LogonUser( - user, - domain, - passwd, - win32con.LOGON32_LOGON_INTERACTIVE, - win32con.LOGON32_PROVIDER_DEFAULT, - ) - - -class Process: - """ - A Windows process. - """ - - def __init__( - self, - cmd, - login=None, - hStdin=None, - hStdout=None, - hStderr=None, - show=1, - xy=None, - xySize=None, - desktop=None, - ): - """ - Create a Windows process. - cmd: command to run - login: run as user 'Domain\nUser\nPassword' - hStdin, hStdout, hStderr: - handles for process I/O; default is caller's stdin, - stdout & stderr - show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) - xy: window offset (x, y) of upper left corner in pixels - xySize: window size (width, height) in pixels - desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' - None = inherit current desktop - '' = create new desktop if necessary - - User calling login requires additional privileges: - Act as part of the operating system [not needed on Windows XP] - Increase quotas - Replace a process level token - Login string must EITHER be an administrator's account - (ordinary user can't access current desktop - see Microsoft - Q165194) OR use desktop='' to run another desktop invisibly - (may be very slow to startup & finalize). - """ - si = win32process.STARTUPINFO() - si.dwFlags = win32con.STARTF_USESTDHANDLES ^ win32con.STARTF_USESHOWWINDOW - if hStdin is None: - si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) - else: - si.hStdInput = hStdin - if hStdout is None: - si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) - else: - si.hStdOutput = hStdout - if hStderr is None: - si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) - else: - si.hStdError = hStderr - si.wShowWindow = show - if xy is not None: - si.dwX, si.dwY = xy - si.dwFlags ^= win32con.STARTF_USEPOSITION - if xySize is not None: - si.dwXSize, si.dwYSize = xySize - si.dwFlags ^= win32con.STARTF_USESIZE - if desktop is not None: - si.lpDesktop = desktop - procArgs = ( - None, # appName - cmd, # commandLine - None, # processAttributes - None, # threadAttributes - 1, # bInheritHandles - win32process.CREATE_NEW_CONSOLE, # dwCreationFlags - None, # newEnvironment - None, # currentDirectory - si, - ) # startupinfo - if login is not None: - hUser = logonUser(login) - win32security.ImpersonateLoggedOnUser(hUser) - procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) - win32security.RevertToSelf() - else: - procHandles = win32process.CreateProcess(*procArgs) - self.hProcess, self.hThread, self.PId, self.TId = procHandles - - def wait(self, mSec=None): - """ - Wait for process to finish or for specified number of - milliseconds to elapse. - """ - if mSec is None: - mSec = win32event.INFINITE - return win32event.WaitForSingleObject(self.hProcess, mSec) - - def kill(self, gracePeriod=5000): - """ - Kill process. Try for an orderly shutdown via WM_CLOSE. If - still running after gracePeriod (5 sec. default), terminate. - """ - win32gui.EnumWindows(self.__close__, 0) - if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: - win32process.TerminateProcess(self.hProcess, 0) - win32api.Sleep(100) # wait for resources to be released - - def __close__(self, hwnd, dummy): - """ - EnumWindows callback - sends WM_CLOSE to any window - owned by this process. - """ - TId, PId = win32process.GetWindowThreadProcessId(hwnd) - if PId == self.PId: - win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) - - def exitCode(self): - """ - Return process exit code. - """ - return win32process.GetExitCodeProcess(self.hProcess) - - -def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): - """ - Run cmd as a child process and return exit code. - mSec: terminate cmd after specified number of milliseconds - stdin, stdout, stderr: - file objects for child I/O (use hStdin etc. to attach - handles instead of files); default is caller's stdin, - stdout & stderr; - kw: see Process.__init__ for more keyword options - """ - if stdin is not None: - kw["hStdin"] = msvcrt.get_osfhandle(stdin.fileno()) - if stdout is not None: - kw["hStdout"] = msvcrt.get_osfhandle(stdout.fileno()) - if stderr is not None: - kw["hStderr"] = msvcrt.get_osfhandle(stderr.fileno()) - child = Process(cmd, **kw) - if child.wait(mSec) != win32event.WAIT_OBJECT_0: - child.kill() - raise WindowsError("process timeout exceeded") - return child.exitCode() - - -if __name__ == "__main__": - # Pipe commands to a shell and display the output in notepad - print("Testing winprocess.py...") - - import tempfile - - timeoutSeconds = 15 - cmdString = ( - """\ -REM Test of winprocess.py piping commands to a shell.\r -REM This 'notepad' process will terminate in %d seconds.\r -vol\r -net user\r -_this_is_a_test_of_stderr_\r -""" - % timeoutSeconds - ) - - cmd_name = tempfile.mktemp() - out_name = cmd_name + ".txt" - try: - cmd = open(cmd_name, "w+b") - out = open(out_name, "w+b") - cmd.write(cmdString.encode("mbcs")) - cmd.seek(0) - print( - "CMD.EXE exit code:", - run("cmd.exe", show=0, stdin=cmd, stdout=out, stderr=out), - ) - cmd.close() - print( - "NOTEPAD exit code:", - run( - "notepad.exe %s" % out.name, - show=win32con.SW_MAXIMIZE, - mSec=timeoutSeconds * 1000, - ), - ) - out.close() - finally: - for n in (cmd_name, out_name): - try: - os.unlink(cmd_name) - except os.error: - pass diff --git a/lib/win32/include/PyWinTypes.h b/lib/win32/include/PyWinTypes.h deleted file mode 100644 index edfdb0b6..00000000 --- a/lib/win32/include/PyWinTypes.h +++ /dev/null @@ -1,727 +0,0 @@ - -#ifndef __PYWINTYPES_H__ -#define __PYWINTYPES_H__ - -// If building under a GCC, tweak what we need. -#if defined(__GNUC__) && defined(_POSIX_C_SOURCE) -// python.h complains if _POSIX_C_SOURCE is already defined -#undef _POSIX_C_SOURCE -#endif - -// windows rpc.h defines "small" as "char" which breaks Python's accu.h, -// so we undefine it before including python. -#ifdef small -#undef small -#endif - -#include "Python.h" -#include "structmember.h" -#include "windows.h" - -// Helpers for our modules. -// Some macros to help the pywin32 modules co-exist in py2x and py3k. -// Creates and initializes local variables called 'module' and 'dict'. - -// Maybe these should all be removed - they existed to help in the py2->3 -// transition. -// On one hand: the code would be cleaner if they were all just re-inlined? -// On the other: high confidence everything uses the exact same patterns? -// (Regardless, *some*, eg, PYWIN_MODULE_INIT_RETURN_* should be re-inlined!) - -// Use to define the function itself (ie, its name, linkage, params) -#define PYWIN_MODULE_INIT_FUNC(module_name) extern "C" __declspec(dllexport) PyObject *PyInit_##module_name(void) - -// If the module needs to early-exit on an error condition. -#define PYWIN_MODULE_INIT_RETURN_ERROR return NULL; - -// When the module has successfully initialized. -#define PYWIN_MODULE_INIT_RETURN_SUCCESS return module; - -// To setup the module object itself and the module's dictionary. -#define PYWIN_MODULE_INIT_PREPARE(module_name, functions, docstring) \ - PyObject *dict, *module; \ - static PyModuleDef module_name##_def = {PyModuleDef_HEAD_INIT, #module_name, docstring, -1, functions}; \ - if (PyWinGlobals_Ensure() == -1) \ - return NULL; \ - if (!(module = PyModule_Create(&module_name##_def))) \ - return NULL; \ - if (!(dict = PyModule_GetDict(module))) \ - return NULL; - -// Helpers for our types. -// Macro to handle PyObject layout changes in Py3k -#define PYWIN_OBJECT_HEAD PyVarObject_HEAD_INIT(NULL, 0) - -/* Attribute names are passed as Unicode in Py3k, so use a macro to - switch between string and unicode conversion. This function is not - documented, but is used extensively in the Python codebase itself, - so it's reasonable to assume it won't disappear anytime soon. -*/ -#define PYWIN_ATTR_CONVERT (char *)_PyUnicode_AsString - -typedef Py_ssize_t Py_hash_t; - -// This only enables runtime checks in debug builds - so we use -// our own so we can enable it always should we desire... -#define PyWin_SAFE_DOWNCAST Py_SAFE_DOWNCAST - -// Lars: for WAVEFORMATEX -#include "mmsystem.h" - -#ifdef BUILD_PYWINTYPES -/* We are building pywintypesxx.dll */ -#define PYWINTYPES_EXPORT __declspec(dllexport) -#else -/* This module uses pywintypesxx.dll */ -#define PYWINTYPES_EXPORT __declspec(dllimport) -#if defined(_MSC_VER) -#if defined(DEBUG) || defined(_DEBUG) -#pragma comment(lib, "pywintypes_d.lib") -#else -#pragma comment(lib, "pywintypes.lib") -#endif // DEBUG/_DEBUG -#endif // _MSC_VER -#endif // BUILD_PYWINTYPES - -// Py3k uses memoryview object in place of buffer, and we don't yet. -extern PYWINTYPES_EXPORT PyObject *PyBuffer_New(Py_ssize_t size); -extern PYWINTYPES_EXPORT PyObject *PyBuffer_FromMemory(void *buf, Py_ssize_t size); - -// Formats a python traceback into a character string - result must be free()ed -PYWINTYPES_EXPORT WCHAR *GetPythonTraceback(PyObject *exc_type, PyObject *exc_value, PyObject *exc_tb); - -#include -/* -** Error/Exception handling -*/ -extern PYWINTYPES_EXPORT PyObject *PyWinExc_ApiError; -// Register a Windows DLL that contains the messages in the specified range. -extern PYWINTYPES_EXPORT BOOL PyWin_RegisterErrorMessageModule(DWORD first, DWORD last, HINSTANCE hmod); -// Get the previously registered hmodule for an error code. -extern PYWINTYPES_EXPORT HINSTANCE PyWin_GetErrorMessageModule(DWORD err); - -/* A global function that sets an API style error (ie, (code, fn, errTest)) */ -PYWINTYPES_EXPORT PyObject *PyWin_SetAPIError(char *fnName, long err = 0); - -/* Basic COM Exception handling. The main COM exception object - is actually defined here. However, the most useful functions - for raising the exception are still in the COM package. Therefore, - you can use the fn below to raise a basic COM exception - no fancy error - messages available, just the HRESULT. It will, however, _be_ a COM - exception, and therefore trappable like any other COM exception -*/ -extern PYWINTYPES_EXPORT PyObject *PyWinExc_COMError; -PYWINTYPES_EXPORT PyObject *PyWin_SetBasicCOMError(HRESULT hr); - -// ************* -// strings, which are a bit of a mess! -// -// This has gone from 2.x ascii-only, to 2.x+3.x ascii-or-unicode, to 3.x unicode-only, -// - this baggage means some strange APIs which convert to and from "char *" in various ways. -// -// A sizes/lengths are reported as a `DWORD` rather than a `Py_ssize_t`, that's what the callers -// need. `Py_ssize_t` used as the "in" type. -// (We also use this for UINT and ULONG, all of which are 32bit unsigned ints.) - -// Sometimes we need to downcast from a ssize_t to a DWORD -inline bool PyWin_is_ssize_dword(Py_ssize_t val) { - return val <= MAXDWORD; -} - -#define PYWIN_CHECK_SSIZE_DWORD(val, failResult) \ - if (!PyWin_is_ssize_dword(val)) { \ - PyErr_SetString(PyExc_ValueError, "value is larger than a DWORD"); \ - return failResult; \ - } - -// Almost all of these are roughly identical! But start with BSTR -// Given a PyObject (string, Unicode, etc) create a "BSTR" with the value -PYWINTYPES_EXPORT BOOL PyWinObject_AsBstr(PyObject *stringObject, BSTR *pResult, BOOL bNoneOK = FALSE, - DWORD *pResultLen = NULL); -// And free it when finished. -PYWINTYPES_EXPORT void PyWinObject_FreeBstr(BSTR pResult); - -PYWINTYPES_EXPORT PyObject *PyWinObject_FromBstr(const BSTR bstr, BOOL takeOwnership = FALSE); - -// Given a string or Unicode object, get WCHAR characters. -PYWINTYPES_EXPORT BOOL PyWinObject_AsWCHAR(PyObject *stringObject, WCHAR **pResult, BOOL bNoneOK = FALSE, - DWORD *pResultLen = NULL); -// And free it when finished. -PYWINTYPES_EXPORT void PyWinObject_FreeWCHAR(WCHAR *pResult); - -inline BOOL PyWinObject_AsWCHAR(PyObject *stringObject, unsigned short **pResult, BOOL bNoneOK = FALSE, - DWORD *pResultLen = NULL) -{ - return PyWinObject_AsWCHAR(stringObject, (WCHAR **)pResult, bNoneOK, pResultLen); -} -inline void PyWinObject_FreeWCHAR(unsigned short *pResult) { PyWinObject_FreeWCHAR((WCHAR *)pResult); } - -// A bit unfortunate, but used when we logically want a "string" but only have -// a "char *" to put it in. -// * accepts bytes but boesn't try to accept buffer-like objects. -// * accepts unicode objects and converts via the code-page. -PYWINTYPES_EXPORT BOOL PyWinObject_AsChars(PyObject *stringObject, char **pResult, BOOL bNoneOK = FALSE, - DWORD *pResultLen = NULL); -// And free it when finished. -PYWINTYPES_EXPORT void PyWinObject_FreeChars(char *pResult); - -// Automatically freed WCHAR that can be used anywhere WCHAR * is required -class TmpWCHAR { - public: - WCHAR *tmp; // (NULL after conversion error) - Py_ssize_t length; // only set after successful auto-conversion; w/o trailing \0 - PyObject *u; // auxiliary slot for u2w() - - TmpWCHAR() { tmp = NULL; } - TmpWCHAR(WCHAR *t) { tmp = t; } - TmpWCHAR(PyObject *ob) : tmp(NULL) { *this = ob; } - WCHAR *u2w() { return *this = u; } - WCHAR *operator=(PyObject *ob) { - if (tmp) - PyMem_Free(tmp); - if (ob == NULL) - tmp = NULL; // (exception already has been set in this case) - else - tmp = PyUnicode_AsWideCharString(ob, &length); - return tmp; - } - WCHAR *operator=(WCHAR *t) - { - if (tmp) - PyMem_Free(tmp); - tmp = t; - return t; - } - WCHAR **operator&() { return &tmp; } - boolean operator==(WCHAR *t) { return tmp == t; } - operator WCHAR *() { return tmp; } - ~TmpWCHAR() { if (tmp) PyMem_Free(tmp); } - private: - // Block unwanted copy construction - TmpWCHAR(const TmpWCHAR& o); // = delete; - const TmpWCHAR& operator=(const TmpWCHAR& o); // = delete; -}; - -// More string helpers - how many do we need? -// A couple which should die or be modernized and have some `char *` vs `wchar_t *` confusion. -PYWINTYPES_EXPORT PyObject *PyWinCoreString_FromString(const char *str, Py_ssize_t len = (Py_ssize_t)-1); -PYWINTYPES_EXPORT PyObject *PyWinCoreString_FromString(const WCHAR *str, Py_ssize_t len = (Py_ssize_t)-1); - -#define PyWinObject_FromWCHAR PyWinObject_FromOLECHAR - -PYWINTYPES_EXPORT PyObject *PyWinObject_FromOLECHAR(const OLECHAR *str); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromOLECHAR(const OLECHAR *str, Py_ssize_t numChars); - -#define PyWinObject_AsTCHAR PyWinObject_AsWCHAR -#define PyWinObject_FreeTCHAR PyWinObject_FreeWCHAR -#define PyWinObject_FromTCHAR PyWinObject_FromOLECHAR - -// String support for buffers allocated via CoTaskMemAlloc and CoTaskMemFree -PYWINTYPES_EXPORT BOOL PyWinObject_AsTaskAllocatedWCHAR(PyObject *stringObject, WCHAR **ppResult, BOOL bNoneOK = FALSE); -PYWINTYPES_EXPORT void PyWinObject_FreeTaskAllocatedWCHAR(WCHAR *str); - -// Copy null terminated string with same allocator as PyWinObject_AsWCHAR, etc -// ? wot? -PYWINTYPES_EXPORT WCHAR *PyWin_CopyString(const WCHAR *input); -PYWINTYPES_EXPORT char *PyWin_CopyString(const char *input); - -// Some helpers for arrays of strings. -// -// Converts a series of consecutive null terminated strings into a list -// ??? wot? -PYWINTYPES_EXPORT PyObject *PyWinObject_FromMultipleString(WCHAR *multistring); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromMultipleString(char *multistring); -// Converts a sequence of str/unicode objects into a series of consecutive null-terminated -// wide character strings with extra terminating null -PYWINTYPES_EXPORT BOOL PyWinObject_AsMultipleString(PyObject *ob, WCHAR **pmultistring, BOOL bNoneOK = TRUE, - DWORD *chars_returned = NULL); -PYWINTYPES_EXPORT void PyWinObject_FreeMultipleString(WCHAR *pmultistring); - -// Convert a sequence of strings to an array of WCHAR pointers -PYWINTYPES_EXPORT void PyWinObject_FreeWCHARArray(LPWSTR *wchars, DWORD str_cnt); -PYWINTYPES_EXPORT BOOL PyWinObject_AsWCHARArray(PyObject *str_seq, LPWSTR **wchars, DWORD *str_cnt, - BOOL bNoneOK = FALSE); - -// Convert a sequence of string or unicode objects to an array of char * -PYWINTYPES_EXPORT void PyWinObject_FreeCharArray(char **pchars, DWORD str_cnt); -PYWINTYPES_EXPORT BOOL PyWinObject_AsCharArray(PyObject *str_seq, char ***pchars, DWORD *str_cnt, BOOL bNoneOK = FALSE); - -// Bytes/Buffer helpers. -// replacement for PyWinObject_AsReadBuffer and PyWinObject_AsWriteBuffer -class PYWINTYPES_EXPORT PyWinBufferView -{ -public: - PyWinBufferView() { m_view.obj = NULL; } - PyWinBufferView(PyObject *ob, bool bWrite = false, bool bNoneOk = false) { - m_view.obj = NULL; - init(ob, bWrite, bNoneOk); - } - ~PyWinBufferView() { release(); } - bool init(PyObject *ob, bool bWrite = false, bool bNoneOk = false); - void release() { - if (m_view.obj != NULL && m_view.obj != Py_None) { - PyBuffer_Release(&m_view); // sets view->obj = NULL - } - } - bool ok() { return m_view.obj != NULL; } - void* ptr() { return m_view.buf; } - DWORD len() { return static_cast(m_view.len); } - -private: - Py_buffer m_view; - - // don't copy objects and don't use C++ >= 11 -> not implemented private - // copy ctor and assignment operator - PyWinBufferView(const PyWinBufferView& src); - PyWinBufferView& operator=(PyWinBufferView const &); -}; - - -// For 64-bit python compatibility, convert sequence to tuple and check length fits in a DWORD -PYWINTYPES_EXPORT PyObject *PyWinSequence_Tuple(PyObject *obseq, DWORD *len); - -// Pointers. -// Substitute for Python's inconsistent PyLong_AsVoidPtr -PYWINTYPES_EXPORT BOOL PyWinLong_AsVoidPtr(PyObject *ob, void **pptr); -PYWINTYPES_EXPORT PyObject *PyWinLong_FromVoidPtr(const void *ptr); - -/* -** LARGE_INTEGER objects -*/ -// AsLARGE_INTEGER takes either int or long -PYWINTYPES_EXPORT BOOL PyWinObject_AsLARGE_INTEGER(PyObject *ob, LARGE_INTEGER *pResult); -PYWINTYPES_EXPORT BOOL PyWinObject_AsULARGE_INTEGER(PyObject *ob, ULARGE_INTEGER *pResult); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromLARGE_INTEGER(const LARGE_INTEGER &val); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromULARGE_INTEGER(const ULARGE_INTEGER &val); -// Helpers that take a Py_LONG_LONG, but (a) have pywin32 consistent signatures -// and (b) handle int *and* long (where Python only starts doing that in the -// PyLong_* APIs post 2.4) -// We also happen to know a LARGE_INTEGER is an __int64, so do it the easy way -#define PyWinObject_AsPY_LONG_LONG(ob, pResult) PyWinObject_AsLARGE_INTEGER((ob), (LARGE_INTEGER *)(pResult)) -#define PyWinObject_AsUPY_LONG_LONG(ob, pResult) PyWinObject_AsULARGE_INTEGER((ob), (ULARGE_INTEGER *)(pResult)) -#define PyWinObject_FromPY_LONG_LONG(val) PyWinObject_FromLARGE_INTEGER((LARGE_INTEGER)val) -#define PyWinObject_FromUPY_LONG_LONG(val) PyWinObject_FromULARGE_INTEGER((ULARGE_INTEGER)val) - -// A DWORD_PTR and ULONG_PTR appear to mean "integer long enough to hold a pointer" -// It is *not* actually a pointer (but is the same size as a pointer) -inline PyObject *PyWinObject_FromULONG_PTR(ULONG_PTR v) { return PyWinLong_FromVoidPtr((void *)v); } -inline BOOL PyWinLong_AsULONG_PTR(PyObject *ob, ULONG_PTR *r) { return PyWinLong_AsVoidPtr(ob, (void **)r); } - -inline PyObject *PyWinObject_FromDWORD_PTR(DWORD_PTR v) { return PyLong_FromVoidPtr((void *)v); } -inline BOOL PyWinLong_AsDWORD_PTR(PyObject *ob, DWORD_PTR *r) { return PyWinLong_AsVoidPtr(ob, (void **)r); } - -/* -** OVERLAPPED Object and API -*/ -class PyOVERLAPPED; // forward declare -extern PYWINTYPES_EXPORT PyTypeObject PyOVERLAPPEDType; // the Type for PyOVERLAPPED -#define PyOVERLAPPED_Check(ob) ((ob)->ob_type == &PyOVERLAPPEDType) -PYWINTYPES_EXPORT BOOL PyWinObject_AsOVERLAPPED(PyObject *ob, OVERLAPPED **ppOverlapped, BOOL bNoneOK = TRUE); -PYWINTYPES_EXPORT BOOL PyWinObject_AsPyOVERLAPPED(PyObject *ob, PyOVERLAPPED **ppOverlapped, BOOL bNoneOK = TRUE); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromOVERLAPPED(const OVERLAPPED *pOverlapped); - -// A global function that can work as a module method for making an OVERLAPPED object. -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewOVERLAPPED(PyObject *self, PyObject *args); - -#ifndef NO_PYWINTYPES_IID -/* -** IID/GUID support -*/ - -extern PYWINTYPES_EXPORT PyTypeObject PyIIDType; // the Type for PyIID -#define PyIID_Check(ob) ((ob)->ob_type == &PyIIDType) - -// Given an object repring a CLSID (either PyIID or string), fill the CLSID. -PYWINTYPES_EXPORT BOOL PyWinObject_AsIID(PyObject *obCLSID, CLSID *clsid); - -// return a native PyIID object representing an IID -PYWINTYPES_EXPORT PyObject *PyWinObject_FromIID(const IID &riid); - -// return a string/Unicode object representing an IID -PYWINTYPES_EXPORT PyObject *PyWinCoreString_FromIID(const IID &riid); - -// A global function that can work as a module method for making an IID object. -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewIID(PyObject *self, PyObject *args); -#endif /*NO_PYWINTYPES_IID */ - -/* -** TIME support -** -** We use a subclass of the builtin datetime. -*/ - -PYWINTYPES_EXPORT PyObject *PyWinObject_FromSYSTEMTIME(const SYSTEMTIME &t); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromFILETIME(const FILETIME &t); - -// Converts a TimeStamp, which is in 100 nanosecond units like a FILETIME -// TimeStamp is actually defined as a LARGE_INTEGER, so this function will also -// accept Windows security "TimeStamp" objects directly - however, we use a -// LARGE_INTEGER prototype to avoid pulling in the windows security headers. -PYWINTYPES_EXPORT PyObject *PyWinObject_FromTimeStamp(const LARGE_INTEGER &t); -PYWINTYPES_EXPORT PyObject *PyWinTimeObject_Fromtime_t(time_t t); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromDATE(DATE t); - -PYWINTYPES_EXPORT BOOL PyWinObject_AsDATE(PyObject *ob, DATE *pDate); -PYWINTYPES_EXPORT BOOL PyWinObject_AsFILETIME(PyObject *ob, FILETIME *pDate); -PYWINTYPES_EXPORT BOOL PyWinObject_AsSYSTEMTIME(PyObject *ob, SYSTEMTIME *pDate); - -// A global function that can work as a module method for making a time object. -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewTime(PyObject *self, PyObject *args); -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewTimeStamp(PyObject *self, PyObject *args); - -PYWINTYPES_EXPORT BOOL PyWinTime_Check(PyObject *ob); - -// functions to return WIN32_FIND_DATA tuples, used in shell, win32api, and win32file -PYWINTYPES_EXPORT PyObject *PyObject_FromWIN32_FIND_DATAW(WIN32_FIND_DATAW *pData); -#define PyObject_FromWIN32_FIND_DATA PyObject_FromWIN32_FIND_DATAW - -// POINT tuple, used in win32api_display.cpp and win32gui.i -PYWINTYPES_EXPORT BOOL PyWinObject_AsPOINT(PyObject *obpoint, LPPOINT ppoint); - -// IO_COUNTERS dict, used in win32process and win32job -PYWINTYPES_EXPORT PyObject *PyWinObject_FromIO_COUNTERS(PIO_COUNTERS pioc); - -// Make an array of DWORD's from a sequence of Python ints -PYWINTYPES_EXPORT BOOL PyWinObject_AsDWORDArray(PyObject *obdwords, DWORD **pdwords, DWORD *item_cnt, - BOOL bNoneOk = TRUE); - -// Conversion for resource id/name and class atom -PYWINTYPES_EXPORT BOOL PyWinObject_AsResourceIdA(PyObject *ob, char **presource_id, BOOL bNoneOK = FALSE); -PYWINTYPES_EXPORT void PyWinObject_FreeResourceIdA(char *resource_id); - -PYWINTYPES_EXPORT BOOL PyWinObject_AsResourceId(PyObject *ob, WCHAR **presource_id, BOOL bNoneOK = FALSE); -PYWINTYPES_EXPORT void PyWinObject_FreeResourceId(WCHAR *resource_id); - -// WPARAM and LPARAM conversion. -// Auto-freed WPARAM / LPARAM which ensure any memory referenced remains valid when a String or -// Buffer object is used. Make sure the destructor is called with the GIL held. -class PyWin_PARAMHolder { - protected: - WPARAM _pa; - // Holds *either* a PyWinBufferView (which will auto-free) *or* a "void *" that we - // will auto-free. - void *_pymem; - void _free() { - if (_pymem) { - PyMem_Free(_pymem); - _pymem = NULL; - } - } - public: - PyWinBufferView bufferView; - - PyWin_PARAMHolder(WPARAM t=0):_pa(t),_pymem(NULL) {} - ~PyWin_PARAMHolder() { - _free(); - } - WCHAR *set_allocated(WCHAR *t) { - assert(!bufferView.ok()); // should be one or the other. - _free(); - _pymem = t; - _pa = (WPARAM)t; - return t; - } - // When init_buffer() fails, an appropriate Python error has been set too - bool init_buffer(PyObject *ob) { - assert(!_pymem); // should be one or the other! - _free(); - if (!bufferView.init(ob)) { - return false; - } - _pa = (WPARAM)bufferView.ptr(); - return true; - } - - WPARAM operator=(WPARAM t) { - return _pa = t; - } - operator WPARAM() { return _pa; } - operator LPARAM() { return (LPARAM)_pa; } -}; - -PYWINTYPES_EXPORT BOOL PyWinObject_AsPARAM(PyObject *ob, PyWin_PARAMHolder *pparam); -inline PyObject *PyWinObject_FromPARAM(WPARAM param) { return PyWinObject_FromULONG_PTR(param); } -inline PyObject *PyWinObject_FromPARAM(LPARAM param) { return PyWinObject_FromULONG_PTR(param); } - -PYWINTYPES_EXPORT BOOL PyWinObject_AsSimplePARAM(PyObject *ob, WPARAM *pparam); -inline BOOL PyWinObject_AsSimplePARAM(PyObject *ob, LPARAM *pparam) { return PyWinObject_AsSimplePARAM(ob, (WPARAM *)pparam); } - -// RECT conversions -// @object PyRECT|Tuple of 4 ints defining a rectangle: (left, top, right, bottom) -PYWINTYPES_EXPORT BOOL PyWinObject_AsRECT(PyObject *obrect, LPRECT prect); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromRECT(LPRECT prect); - -/* -** SECURITY_ATTRIBUTES support -*/ -extern PYWINTYPES_EXPORT PyTypeObject PySECURITY_ATTRIBUTESType; -#define PySECURITY_ATTRIBUTES_Check(ob) ((ob)->ob_type == &PySECURITY_ATTRIBUTESType) -extern PYWINTYPES_EXPORT PyTypeObject PyDEVMODEWType; - -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSECURITY_ATTRIBUTES(PyObject *self, PyObject *args); -PYWINTYPES_EXPORT BOOL PyWinObject_AsSECURITY_ATTRIBUTES(PyObject *ob, SECURITY_ATTRIBUTES **ppSECURITY_ATTRIBUTES, - BOOL bNoneOK = TRUE); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromSECURITY_ATTRIBUTES(const SECURITY_ATTRIBUTES &sa); -PYWINTYPES_EXPORT BOOL PyWinObject_AsDEVMODE(PyObject *ob, PDEVMODEW *ppDEVMODE, BOOL bNoneOK); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromDEVMODE(PDEVMODEW); - -/* -** WAVEFORMATEX support -*/ - -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewWAVEFORMATEX(PyObject *self, PyObject *args); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromWAVEFROMATEX(const WAVEFORMATEX &wfx); -PYWINTYPES_EXPORT BOOL PyWinObject_AsWAVEFORMATEX(PyObject *ob, WAVEFORMATEX **ppWAVEFORMATEX, BOOL bNoneOK = TRUE); -extern PYWINTYPES_EXPORT PyTypeObject PyWAVEFORMATEXType; -#define PyWAVEFORMATEX_Check(ob) ((ob)->ob_type == &PyWAVEFORMATEXType) - -/* -** SECURITY_DESCRIPTOR support -*/ -extern PYWINTYPES_EXPORT PyTypeObject PySECURITY_DESCRIPTORType; -#define PySECURITY_DESCRIPTOR_Check(ob) ((ob)->ob_type == &PySECURITY_DESCRIPTORType) - -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSECURITY_DESCRIPTOR(PyObject *self, PyObject *args); -PYWINTYPES_EXPORT BOOL PyWinObject_AsSECURITY_DESCRIPTOR(PyObject *ob, PSECURITY_DESCRIPTOR *ppSECURITY_DESCRIPTOR, - BOOL bNoneOK = TRUE); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromSECURITY_DESCRIPTOR(PSECURITY_DESCRIPTOR psd); - -PYWINTYPES_EXPORT BOOL _MakeAbsoluteSD(PSECURITY_DESCRIPTOR psd_relative, PSECURITY_DESCRIPTOR *ppsd_absolute); -PYWINTYPES_EXPORT void FreeAbsoluteSD(PSECURITY_DESCRIPTOR psd); - -/* -** SID support -*/ -extern PYWINTYPES_EXPORT PyTypeObject PySIDType; -#define PySID_Check(ob) ((ob)->ob_type == &PySIDType) - -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSID(PyObject *self, PyObject *args); -PYWINTYPES_EXPORT BOOL PyWinObject_AsSID(PyObject *ob, PSID *ppSID, BOOL bNoneOK = FALSE); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromSID(PSID pSID); - -/* -** ACL support -*/ -extern PYWINTYPES_EXPORT PyTypeObject PyACLType; -#define PyACL_Check(ob) ((ob)->ob_type == &PyACLType) - -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewACL(PyObject *self, PyObject *args); -PYWINTYPES_EXPORT BOOL PyWinObject_AsACL(PyObject *ob, PACL *ppACL, BOOL bNoneOK = FALSE); - -/* -** Win32 HANDLE wrapper - any handle closable by "CloseHandle()" -*/ -extern PYWINTYPES_EXPORT PyTypeObject PyHANDLEType; // the Type for PyHANDLE -#define PyHANDLE_Check(ob) ((ob)->ob_type == &PyHANDLEType) - -// Convert an object to a HANDLE - None is always OK, as are ints, etc. -PYWINTYPES_EXPORT BOOL PyWinObject_AsHANDLE(PyObject *ob, HANDLE *pRes); -// For handles that use PyHANDLE. -PYWINTYPES_EXPORT PyObject *PyWinObject_FromHANDLE(HANDLE h); -// For handles that aren't returned as PyHANDLE or a subclass thereof (HDC, HWND, etc). -// Return as python ints or longs -PYWINTYPES_EXPORT PyObject *PyWinLong_FromHANDLE(HANDLE h); - -// A global function that can work as a module method for making a HANDLE object. -PYWINTYPES_EXPORT PyObject *PyWinMethod_NewHANDLE(PyObject *self, PyObject *args); - -// A global function that does the right thing wrt closing a "handle". -// The object can be either a PyHANDLE or an integer. -// If result is FALSE, a Python error is all setup (cf PyHANDLE::Close(), which doesnt set the Python error) -PYWINTYPES_EXPORT BOOL PyWinObject_CloseHANDLE(PyObject *obHandle); - -PYWINTYPES_EXPORT BOOL PyWinObject_AsHKEY(PyObject *ob, HKEY *pRes); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromHKEY(HKEY h); -PYWINTYPES_EXPORT BOOL PyWinObject_CloseHKEY(PyObject *obHandle); - -// MSG structure keeps coming up... -PYWINTYPES_EXPORT BOOL PyWinObject_AsMSG(PyObject *ob, MSG *pMsg); -PYWINTYPES_EXPORT PyObject *PyWinObject_FromMSG(const MSG *pMsg); - -#include "winsock.h" -/* -** SOCKET support. -*/ -PYWINTYPES_EXPORT -BOOL PySocket_AsSOCKET - //------------------------------------------------------------------------- - // Helper function for dealing with socket arguments. - (PyObject *obSocket, - // [in] Python object being converted into a SOCKET handle. - SOCKET *ps - // [out] Returned socket handle - ); - -/* -** Other Utilities -*/ -// ---------------------------------------------------------------------- -// WARNING - NEVER EVER USE new() ON THIS CLASS -// This class can be used as a local variable, typically in a Python/C -// function, and can be passed whereever a TCHAR/WCHAR is expected. -// Typical Usage: -// PyWin_AutoFreeBstr arg; -// PyArg_ParseTuple("O", &obStr); -// PyWinObject_AsAutoFreeBstr(obStr, &arg); -// CallTheFunction(arg); // Will correctly pass BSTR/OLECHAR -// -- when the function goes out of scope, the string owned by "arg" will -// -- automatically be freed. -// ---------------------------------------------------------------------- -class PYWINTYPES_EXPORT PyWin_AutoFreeBstr { - public: - PyWin_AutoFreeBstr(BSTR bstr = NULL); - ~PyWin_AutoFreeBstr(); - void SetBstr(BSTR bstr); - operator BSTR() { return m_bstr; } - - private: - BSTR m_bstr; -}; - -inline BOOL PyWinObject_AsAutoFreeBstr(PyObject *stringObject, PyWin_AutoFreeBstr *pResult, BOOL bNoneOK = FALSE) -{ - if (bNoneOK && stringObject == Py_None) { - pResult->SetBstr(NULL); - return TRUE; - } - BSTR bs; - if (!PyWinObject_AsBstr(stringObject, &bs, bNoneOK)) - return FALSE; - pResult->SetBstr(bs); - return TRUE; -} - -// ---------------------------------------------------------------------- -// -// THREAD MANAGEMENT -// - -// ### need to rename the PYCOM_ stuff soon... - -// We have 2 discrete locks in use (when no free-threaded is used, anyway). -// The first type of lock is the global Python lock. This is the standard lock -// in use by Python, and must be used as documented by Python. Specifically, no -// 2 threads may _ever_ call _any_ Python code (including INCREF/DECREF) without -// first having this thread lock. -// -// The second type of lock is a "global framework lock". This lock is simply a -// critical section, and used whenever 2 threads of C code need access to global -// data. This is different than the Python lock - this lock is used when no Python -// code can ever be called by the threads, but the C code still needs thread-safety. - -// We also supply helper classes which make the usage of these locks a one-liner. - -// The "framework" lock, implemented as a critical section. -PYWINTYPES_EXPORT void PyWin_AcquireGlobalLock(void); -PYWINTYPES_EXPORT void PyWin_ReleaseGlobalLock(void); - -// Helper class for the DLL global lock. -// -// This class magically waits for the Win32/COM framework global lock, and releases it -// when finished. -// NEVER new one of these objects - only use on the stack! -class CEnterLeaveFramework { - public: - CEnterLeaveFramework() { PyWin_AcquireGlobalLock(); } - ~CEnterLeaveFramework() { PyWin_ReleaseGlobalLock(); } -}; - -// Python thread-lock stuff. Free-threading patches use different semantics, but -// these are abstracted away here... -#ifndef FORCE_NO_FREE_THREAD -#ifdef WITH_FREE_THREAD -#define PYCOM_USE_FREE_THREAD -#endif -#endif -#ifdef PYCOM_USE_FREE_THREAD -#include -#else -#include -#endif - -// Helper class for Enter/Leave Python -// -// This class magically waits for the Python global lock, and releases it -// when finished. - -// Nested invocations will deadlock, so be careful. - -// NEVER new one of these objects - only use on the stack! -#ifndef PYCOM_USE_FREE_THREAD -extern PYWINTYPES_EXPORT PyInterpreterState *PyWin_InterpreterState; -extern PYWINTYPES_EXPORT BOOL PyWinThreadState_Ensure(); -extern PYWINTYPES_EXPORT void PyWinThreadState_Free(); -extern PYWINTYPES_EXPORT void PyWinThreadState_Clear(); -extern PYWINTYPES_EXPORT void PyWinInterpreterLock_Acquire(); -extern PYWINTYPES_EXPORT void PyWinInterpreterLock_Release(); - -extern PYWINTYPES_EXPORT int PyWinGlobals_Ensure(); -extern PYWINTYPES_EXPORT void PyWinGlobals_Free(); -#else -#define PyWinThreadState_Ensure PyThreadState_Ensure -#define PyWinThreadState_Free PyThreadState_Free -#define PyWinThreadState_Clear PyThreadState_ClearExc - -#endif - -extern PYWINTYPES_EXPORT void PyWin_MakePendingCalls(); - -class CEnterLeavePython { - public: - CEnterLeavePython() : released(TRUE) { acquire(); } - void acquire(void) - { - if (!released) - return; - state = PyGILState_Ensure(); - released = FALSE; - } - ~CEnterLeavePython() { release(); } - void release(void) - { - if (!released) { - PyGILState_Release(state); - released = TRUE; - } - } - - private: - PyGILState_STATE state; - BOOL released; -}; - -// A helper for simple exception handling. -// try/__try -#if defined(__MINGW32__) || defined(MAINWIN) -#define PYWINTYPES_TRY try -#else -#define PYWINTYPES_TRY __try -#endif /* MAINWIN */ - -// catch/__except -#if defined(__MINGW32__) || defined(MAINWIN) -#define PYWINTYPES_EXCEPT catch (...) -#else -#define PYWINTYPES_EXCEPT __except (EXCEPTION_EXECUTE_HANDLER) -#endif -// End of exception helper macros. - -// Class to hold a temporary reference that decrements itself -class TmpPyObject { - public: - PyObject *tmp; - TmpPyObject() { tmp = NULL; } - TmpPyObject(PyObject *ob) { tmp = ob; } - PyObject *operator=(PyObject *ob) - { - Py_XDECREF(tmp); - tmp = ob; - return tmp; - } - - boolean operator==(PyObject *ob) { return tmp == ob; } - operator PyObject *() { return tmp; } - ~TmpPyObject() { Py_XDECREF(tmp); } -}; - -#endif // __PYWINTYPES_H__ diff --git a/lib/win32/lib/afxres.py b/lib/win32/lib/afxres.py deleted file mode 100644 index 249211ff..00000000 --- a/lib/win32/lib/afxres.py +++ /dev/null @@ -1,501 +0,0 @@ -# Generated by h2py from stdin -TCS_MULTILINE = 0x0200 -CBRS_ALIGN_LEFT = 0x1000 -CBRS_ALIGN_TOP = 0x2000 -CBRS_ALIGN_RIGHT = 0x4000 -CBRS_ALIGN_BOTTOM = 0x8000 -CBRS_ALIGN_ANY = 0xF000 -CBRS_BORDER_LEFT = 0x0100 -CBRS_BORDER_TOP = 0x0200 -CBRS_BORDER_RIGHT = 0x0400 -CBRS_BORDER_BOTTOM = 0x0800 -CBRS_BORDER_ANY = 0x0F00 -CBRS_TOOLTIPS = 0x0010 -CBRS_FLYBY = 0x0020 -CBRS_FLOAT_MULTI = 0x0040 -CBRS_BORDER_3D = 0x0080 -CBRS_HIDE_INPLACE = 0x0008 -CBRS_SIZE_DYNAMIC = 0x0004 -CBRS_SIZE_FIXED = 0x0002 -CBRS_FLOATING = 0x0001 -CBRS_GRIPPER = 0x00400000 -CBRS_ORIENT_HORZ = CBRS_ALIGN_TOP | CBRS_ALIGN_BOTTOM -CBRS_ORIENT_VERT = CBRS_ALIGN_LEFT | CBRS_ALIGN_RIGHT -CBRS_ORIENT_ANY = CBRS_ORIENT_HORZ | CBRS_ORIENT_VERT -CBRS_ALL = 0xFFFF -CBRS_NOALIGN = 0x00000000 -CBRS_LEFT = CBRS_ALIGN_LEFT | CBRS_BORDER_RIGHT -CBRS_TOP = CBRS_ALIGN_TOP | CBRS_BORDER_BOTTOM -CBRS_RIGHT = CBRS_ALIGN_RIGHT | CBRS_BORDER_LEFT -CBRS_BOTTOM = CBRS_ALIGN_BOTTOM | CBRS_BORDER_TOP -SBPS_NORMAL = 0x0000 -SBPS_NOBORDERS = 0x0100 -SBPS_POPOUT = 0x0200 -SBPS_OWNERDRAW = 0x1000 -SBPS_DISABLED = 0x04000000 -SBPS_STRETCH = 0x08000000 -ID_INDICATOR_EXT = 0xE700 -ID_INDICATOR_CAPS = 0xE701 -ID_INDICATOR_NUM = 0xE702 -ID_INDICATOR_SCRL = 0xE703 -ID_INDICATOR_OVR = 0xE704 -ID_INDICATOR_REC = 0xE705 -ID_INDICATOR_KANA = 0xE706 -ID_SEPARATOR = 0 -AFX_IDW_CONTROLBAR_FIRST = 0xE800 -AFX_IDW_CONTROLBAR_LAST = 0xE8FF -AFX_IDW_TOOLBAR = 0xE800 -AFX_IDW_STATUS_BAR = 0xE801 -AFX_IDW_PREVIEW_BAR = 0xE802 -AFX_IDW_RESIZE_BAR = 0xE803 -AFX_IDW_DOCKBAR_TOP = 0xE81B -AFX_IDW_DOCKBAR_LEFT = 0xE81C -AFX_IDW_DOCKBAR_RIGHT = 0xE81D -AFX_IDW_DOCKBAR_BOTTOM = 0xE81E -AFX_IDW_DOCKBAR_FLOAT = 0xE81F - - -def AFX_CONTROLBAR_MASK(nIDC): - return 1 << (nIDC - AFX_IDW_CONTROLBAR_FIRST) - - -AFX_IDW_PANE_FIRST = 0xE900 -AFX_IDW_PANE_LAST = 0xE9FF -AFX_IDW_HSCROLL_FIRST = 0xEA00 -AFX_IDW_VSCROLL_FIRST = 0xEA10 -AFX_IDW_SIZE_BOX = 0xEA20 -AFX_IDW_PANE_SAVE = 0xEA21 -AFX_IDS_APP_TITLE = 0xE000 -AFX_IDS_IDLEMESSAGE = 0xE001 -AFX_IDS_HELPMODEMESSAGE = 0xE002 -AFX_IDS_APP_TITLE_EMBEDDING = 0xE003 -AFX_IDS_COMPANY_NAME = 0xE004 -AFX_IDS_OBJ_TITLE_INPLACE = 0xE005 -ID_FILE_NEW = 0xE100 -ID_FILE_OPEN = 0xE101 -ID_FILE_CLOSE = 0xE102 -ID_FILE_SAVE = 0xE103 -ID_FILE_SAVE_AS = 0xE104 -ID_FILE_PAGE_SETUP = 0xE105 -ID_FILE_PRINT_SETUP = 0xE106 -ID_FILE_PRINT = 0xE107 -ID_FILE_PRINT_DIRECT = 0xE108 -ID_FILE_PRINT_PREVIEW = 0xE109 -ID_FILE_UPDATE = 0xE10A -ID_FILE_SAVE_COPY_AS = 0xE10B -ID_FILE_SEND_MAIL = 0xE10C -ID_FILE_MRU_FIRST = 0xE110 -ID_FILE_MRU_FILE1 = 0xE110 -ID_FILE_MRU_FILE2 = 0xE111 -ID_FILE_MRU_FILE3 = 0xE112 -ID_FILE_MRU_FILE4 = 0xE113 -ID_FILE_MRU_FILE5 = 0xE114 -ID_FILE_MRU_FILE6 = 0xE115 -ID_FILE_MRU_FILE7 = 0xE116 -ID_FILE_MRU_FILE8 = 0xE117 -ID_FILE_MRU_FILE9 = 0xE118 -ID_FILE_MRU_FILE10 = 0xE119 -ID_FILE_MRU_FILE11 = 0xE11A -ID_FILE_MRU_FILE12 = 0xE11B -ID_FILE_MRU_FILE13 = 0xE11C -ID_FILE_MRU_FILE14 = 0xE11D -ID_FILE_MRU_FILE15 = 0xE11E -ID_FILE_MRU_FILE16 = 0xE11F -ID_FILE_MRU_LAST = 0xE11F -ID_EDIT_CLEAR = 0xE120 -ID_EDIT_CLEAR_ALL = 0xE121 -ID_EDIT_COPY = 0xE122 -ID_EDIT_CUT = 0xE123 -ID_EDIT_FIND = 0xE124 -ID_EDIT_PASTE = 0xE125 -ID_EDIT_PASTE_LINK = 0xE126 -ID_EDIT_PASTE_SPECIAL = 0xE127 -ID_EDIT_REPEAT = 0xE128 -ID_EDIT_REPLACE = 0xE129 -ID_EDIT_SELECT_ALL = 0xE12A -ID_EDIT_UNDO = 0xE12B -ID_EDIT_REDO = 0xE12C -ID_WINDOW_NEW = 0xE130 -ID_WINDOW_ARRANGE = 0xE131 -ID_WINDOW_CASCADE = 0xE132 -ID_WINDOW_TILE_HORZ = 0xE133 -ID_WINDOW_TILE_VERT = 0xE134 -ID_WINDOW_SPLIT = 0xE135 -AFX_IDM_WINDOW_FIRST = 0xE130 -AFX_IDM_WINDOW_LAST = 0xE13F -AFX_IDM_FIRST_MDICHILD = 0xFF00 -ID_APP_ABOUT = 0xE140 -ID_APP_EXIT = 0xE141 -ID_HELP_INDEX = 0xE142 -ID_HELP_FINDER = 0xE143 -ID_HELP_USING = 0xE144 -ID_CONTEXT_HELP = 0xE145 -ID_HELP = 0xE146 -ID_DEFAULT_HELP = 0xE147 -ID_NEXT_PANE = 0xE150 -ID_PREV_PANE = 0xE151 -ID_FORMAT_FONT = 0xE160 -ID_OLE_INSERT_NEW = 0xE200 -ID_OLE_EDIT_LINKS = 0xE201 -ID_OLE_EDIT_CONVERT = 0xE202 -ID_OLE_EDIT_CHANGE_ICON = 0xE203 -ID_OLE_EDIT_PROPERTIES = 0xE204 -ID_OLE_VERB_FIRST = 0xE210 -ID_OLE_VERB_LAST = 0xE21F -AFX_ID_PREVIEW_CLOSE = 0xE300 -AFX_ID_PREVIEW_NUMPAGE = 0xE301 -AFX_ID_PREVIEW_NEXT = 0xE302 -AFX_ID_PREVIEW_PREV = 0xE303 -AFX_ID_PREVIEW_PRINT = 0xE304 -AFX_ID_PREVIEW_ZOOMIN = 0xE305 -AFX_ID_PREVIEW_ZOOMOUT = 0xE306 -ID_VIEW_TOOLBAR = 0xE800 -ID_VIEW_STATUS_BAR = 0xE801 -ID_RECORD_FIRST = 0xE900 -ID_RECORD_LAST = 0xE901 -ID_RECORD_NEXT = 0xE902 -ID_RECORD_PREV = 0xE903 -IDC_STATIC = -1 -AFX_IDS_SCFIRST = 0xEF00 -AFX_IDS_SCSIZE = 0xEF00 -AFX_IDS_SCMOVE = 0xEF01 -AFX_IDS_SCMINIMIZE = 0xEF02 -AFX_IDS_SCMAXIMIZE = 0xEF03 -AFX_IDS_SCNEXTWINDOW = 0xEF04 -AFX_IDS_SCPREVWINDOW = 0xEF05 -AFX_IDS_SCCLOSE = 0xEF06 -AFX_IDS_SCRESTORE = 0xEF12 -AFX_IDS_SCTASKLIST = 0xEF13 -AFX_IDS_MDICHILD = 0xEF1F -AFX_IDS_DESKACCESSORY = 0xEFDA -AFX_IDS_OPENFILE = 0xF000 -AFX_IDS_SAVEFILE = 0xF001 -AFX_IDS_ALLFILTER = 0xF002 -AFX_IDS_UNTITLED = 0xF003 -AFX_IDS_SAVEFILECOPY = 0xF004 -AFX_IDS_PREVIEW_CLOSE = 0xF005 -AFX_IDS_UNNAMED_FILE = 0xF006 -AFX_IDS_ABOUT = 0xF010 -AFX_IDS_HIDE = 0xF011 -AFX_IDP_NO_ERROR_AVAILABLE = 0xF020 -AFX_IDS_NOT_SUPPORTED_EXCEPTION = 0xF021 -AFX_IDS_RESOURCE_EXCEPTION = 0xF022 -AFX_IDS_MEMORY_EXCEPTION = 0xF023 -AFX_IDS_USER_EXCEPTION = 0xF024 -AFX_IDS_PRINTONPORT = 0xF040 -AFX_IDS_ONEPAGE = 0xF041 -AFX_IDS_TWOPAGE = 0xF042 -AFX_IDS_PRINTPAGENUM = 0xF043 -AFX_IDS_PREVIEWPAGEDESC = 0xF044 -AFX_IDS_PRINTDEFAULTEXT = 0xF045 -AFX_IDS_PRINTDEFAULT = 0xF046 -AFX_IDS_PRINTFILTER = 0xF047 -AFX_IDS_PRINTCAPTION = 0xF048 -AFX_IDS_PRINTTOFILE = 0xF049 -AFX_IDS_OBJECT_MENUITEM = 0xF080 -AFX_IDS_EDIT_VERB = 0xF081 -AFX_IDS_ACTIVATE_VERB = 0xF082 -AFX_IDS_CHANGE_LINK = 0xF083 -AFX_IDS_AUTO = 0xF084 -AFX_IDS_MANUAL = 0xF085 -AFX_IDS_FROZEN = 0xF086 -AFX_IDS_ALL_FILES = 0xF087 -AFX_IDS_SAVE_MENU = 0xF088 -AFX_IDS_UPDATE_MENU = 0xF089 -AFX_IDS_SAVE_AS_MENU = 0xF08A -AFX_IDS_SAVE_COPY_AS_MENU = 0xF08B -AFX_IDS_EXIT_MENU = 0xF08C -AFX_IDS_UPDATING_ITEMS = 0xF08D -AFX_IDS_METAFILE_FORMAT = 0xF08E -AFX_IDS_DIB_FORMAT = 0xF08F -AFX_IDS_BITMAP_FORMAT = 0xF090 -AFX_IDS_LINKSOURCE_FORMAT = 0xF091 -AFX_IDS_EMBED_FORMAT = 0xF092 -AFX_IDS_PASTELINKEDTYPE = 0xF094 -AFX_IDS_UNKNOWNTYPE = 0xF095 -AFX_IDS_RTF_FORMAT = 0xF096 -AFX_IDS_TEXT_FORMAT = 0xF097 -AFX_IDS_INVALID_CURRENCY = 0xF098 -AFX_IDS_INVALID_DATETIME = 0xF099 -AFX_IDS_INVALID_DATETIMESPAN = 0xF09A -AFX_IDP_INVALID_FILENAME = 0xF100 -AFX_IDP_FAILED_TO_OPEN_DOC = 0xF101 -AFX_IDP_FAILED_TO_SAVE_DOC = 0xF102 -AFX_IDP_ASK_TO_SAVE = 0xF103 -AFX_IDP_FAILED_TO_CREATE_DOC = 0xF104 -AFX_IDP_FILE_TOO_LARGE = 0xF105 -AFX_IDP_FAILED_TO_START_PRINT = 0xF106 -AFX_IDP_FAILED_TO_LAUNCH_HELP = 0xF107 -AFX_IDP_INTERNAL_FAILURE = 0xF108 -AFX_IDP_COMMAND_FAILURE = 0xF109 -AFX_IDP_FAILED_MEMORY_ALLOC = 0xF10A -AFX_IDP_PARSE_INT = 0xF110 -AFX_IDP_PARSE_REAL = 0xF111 -AFX_IDP_PARSE_INT_RANGE = 0xF112 -AFX_IDP_PARSE_REAL_RANGE = 0xF113 -AFX_IDP_PARSE_STRING_SIZE = 0xF114 -AFX_IDP_PARSE_RADIO_BUTTON = 0xF115 -AFX_IDP_PARSE_BYTE = 0xF116 -AFX_IDP_PARSE_UINT = 0xF117 -AFX_IDP_PARSE_DATETIME = 0xF118 -AFX_IDP_PARSE_CURRENCY = 0xF119 -AFX_IDP_FAILED_INVALID_FORMAT = 0xF120 -AFX_IDP_FAILED_INVALID_PATH = 0xF121 -AFX_IDP_FAILED_DISK_FULL = 0xF122 -AFX_IDP_FAILED_ACCESS_READ = 0xF123 -AFX_IDP_FAILED_ACCESS_WRITE = 0xF124 -AFX_IDP_FAILED_IO_ERROR_READ = 0xF125 -AFX_IDP_FAILED_IO_ERROR_WRITE = 0xF126 -AFX_IDP_STATIC_OBJECT = 0xF180 -AFX_IDP_FAILED_TO_CONNECT = 0xF181 -AFX_IDP_SERVER_BUSY = 0xF182 -AFX_IDP_BAD_VERB = 0xF183 -AFX_IDP_FAILED_TO_NOTIFY = 0xF185 -AFX_IDP_FAILED_TO_LAUNCH = 0xF186 -AFX_IDP_ASK_TO_UPDATE = 0xF187 -AFX_IDP_FAILED_TO_UPDATE = 0xF188 -AFX_IDP_FAILED_TO_REGISTER = 0xF189 -AFX_IDP_FAILED_TO_AUTO_REGISTER = 0xF18A -AFX_IDP_FAILED_TO_CONVERT = 0xF18B -AFX_IDP_GET_NOT_SUPPORTED = 0xF18C -AFX_IDP_SET_NOT_SUPPORTED = 0xF18D -AFX_IDP_ASK_TO_DISCARD = 0xF18E -AFX_IDP_FAILED_TO_CREATE = 0xF18F -AFX_IDP_FAILED_MAPI_LOAD = 0xF190 -AFX_IDP_INVALID_MAPI_DLL = 0xF191 -AFX_IDP_FAILED_MAPI_SEND = 0xF192 -AFX_IDP_FILE_NONE = 0xF1A0 -AFX_IDP_FILE_GENERIC = 0xF1A1 -AFX_IDP_FILE_NOT_FOUND = 0xF1A2 -AFX_IDP_FILE_BAD_PATH = 0xF1A3 -AFX_IDP_FILE_TOO_MANY_OPEN = 0xF1A4 -AFX_IDP_FILE_ACCESS_DENIED = 0xF1A5 -AFX_IDP_FILE_INVALID_FILE = 0xF1A6 -AFX_IDP_FILE_REMOVE_CURRENT = 0xF1A7 -AFX_IDP_FILE_DIR_FULL = 0xF1A8 -AFX_IDP_FILE_BAD_SEEK = 0xF1A9 -AFX_IDP_FILE_HARD_IO = 0xF1AA -AFX_IDP_FILE_SHARING = 0xF1AB -AFX_IDP_FILE_LOCKING = 0xF1AC -AFX_IDP_FILE_DISKFULL = 0xF1AD -AFX_IDP_FILE_EOF = 0xF1AE -AFX_IDP_ARCH_NONE = 0xF1B0 -AFX_IDP_ARCH_GENERIC = 0xF1B1 -AFX_IDP_ARCH_READONLY = 0xF1B2 -AFX_IDP_ARCH_ENDOFFILE = 0xF1B3 -AFX_IDP_ARCH_WRITEONLY = 0xF1B4 -AFX_IDP_ARCH_BADINDEX = 0xF1B5 -AFX_IDP_ARCH_BADCLASS = 0xF1B6 -AFX_IDP_ARCH_BADSCHEMA = 0xF1B7 -AFX_IDS_OCC_SCALEUNITS_PIXELS = 0xF1C0 -AFX_IDS_STATUS_FONT = 0xF230 -AFX_IDS_TOOLTIP_FONT = 0xF231 -AFX_IDS_UNICODE_FONT = 0xF232 -AFX_IDS_MINI_FONT = 0xF233 -AFX_IDP_SQL_FIRST = 0xF280 -AFX_IDP_SQL_CONNECT_FAIL = 0xF281 -AFX_IDP_SQL_RECORDSET_FORWARD_ONLY = 0xF282 -AFX_IDP_SQL_EMPTY_COLUMN_LIST = 0xF283 -AFX_IDP_SQL_FIELD_SCHEMA_MISMATCH = 0xF284 -AFX_IDP_SQL_ILLEGAL_MODE = 0xF285 -AFX_IDP_SQL_MULTIPLE_ROWS_AFFECTED = 0xF286 -AFX_IDP_SQL_NO_CURRENT_RECORD = 0xF287 -AFX_IDP_SQL_NO_ROWS_AFFECTED = 0xF288 -AFX_IDP_SQL_RECORDSET_READONLY = 0xF289 -AFX_IDP_SQL_SQL_NO_TOTAL = 0xF28A -AFX_IDP_SQL_ODBC_LOAD_FAILED = 0xF28B -AFX_IDP_SQL_DYNASET_NOT_SUPPORTED = 0xF28C -AFX_IDP_SQL_SNAPSHOT_NOT_SUPPORTED = 0xF28D -AFX_IDP_SQL_API_CONFORMANCE = 0xF28E -AFX_IDP_SQL_SQL_CONFORMANCE = 0xF28F -AFX_IDP_SQL_NO_DATA_FOUND = 0xF290 -AFX_IDP_SQL_ROW_UPDATE_NOT_SUPPORTED = 0xF291 -AFX_IDP_SQL_ODBC_V2_REQUIRED = 0xF292 -AFX_IDP_SQL_NO_POSITIONED_UPDATES = 0xF293 -AFX_IDP_SQL_LOCK_MODE_NOT_SUPPORTED = 0xF294 -AFX_IDP_SQL_DATA_TRUNCATED = 0xF295 -AFX_IDP_SQL_ROW_FETCH = 0xF296 -AFX_IDP_SQL_INCORRECT_ODBC = 0xF297 -AFX_IDP_SQL_UPDATE_DELETE_FAILED = 0xF298 -AFX_IDP_SQL_DYNAMIC_CURSOR_NOT_SUPPORTED = 0xF299 -AFX_IDP_DAO_FIRST = 0xF2A0 -AFX_IDP_DAO_ENGINE_INITIALIZATION = 0xF2A0 -AFX_IDP_DAO_DFX_BIND = 0xF2A1 -AFX_IDP_DAO_OBJECT_NOT_OPEN = 0xF2A2 -AFX_IDP_DAO_ROWTOOSHORT = 0xF2A3 -AFX_IDP_DAO_BADBINDINFO = 0xF2A4 -AFX_IDP_DAO_COLUMNUNAVAILABLE = 0xF2A5 -AFX_IDC_LISTBOX = 100 -AFX_IDC_CHANGE = 101 -AFX_IDC_PRINT_DOCNAME = 201 -AFX_IDC_PRINT_PRINTERNAME = 202 -AFX_IDC_PRINT_PORTNAME = 203 -AFX_IDC_PRINT_PAGENUM = 204 -ID_APPLY_NOW = 0x3021 -ID_WIZBACK = 0x3023 -ID_WIZNEXT = 0x3024 -ID_WIZFINISH = 0x3025 -AFX_IDC_TAB_CONTROL = 0x3020 -AFX_IDD_FILEOPEN = 28676 -AFX_IDD_FILESAVE = 28677 -AFX_IDD_FONT = 28678 -AFX_IDD_COLOR = 28679 -AFX_IDD_PRINT = 28680 -AFX_IDD_PRINTSETUP = 28681 -AFX_IDD_FIND = 28682 -AFX_IDD_REPLACE = 28683 -AFX_IDD_NEWTYPEDLG = 30721 -AFX_IDD_PRINTDLG = 30722 -AFX_IDD_PREVIEW_TOOLBAR = 30723 -AFX_IDD_PREVIEW_SHORTTOOLBAR = 30731 -AFX_IDD_INSERTOBJECT = 30724 -AFX_IDD_CHANGEICON = 30725 -AFX_IDD_CONVERT = 30726 -AFX_IDD_PASTESPECIAL = 30727 -AFX_IDD_EDITLINKS = 30728 -AFX_IDD_FILEBROWSE = 30729 -AFX_IDD_BUSY = 30730 -AFX_IDD_OBJECTPROPERTIES = 30732 -AFX_IDD_CHANGESOURCE = 30733 -AFX_IDC_CONTEXTHELP = 30977 -AFX_IDC_MAGNIFY = 30978 -AFX_IDC_SMALLARROWS = 30979 -AFX_IDC_HSPLITBAR = 30980 -AFX_IDC_VSPLITBAR = 30981 -AFX_IDC_NODROPCRSR = 30982 -AFX_IDC_TRACKNWSE = 30983 -AFX_IDC_TRACKNESW = 30984 -AFX_IDC_TRACKNS = 30985 -AFX_IDC_TRACKWE = 30986 -AFX_IDC_TRACK4WAY = 30987 -AFX_IDC_MOVE4WAY = 30988 -AFX_IDB_MINIFRAME_MENU = 30994 -AFX_IDB_CHECKLISTBOX_NT = 30995 -AFX_IDB_CHECKLISTBOX_95 = 30996 -AFX_IDR_PREVIEW_ACCEL = 30997 -AFX_IDI_STD_MDIFRAME = 31233 -AFX_IDI_STD_FRAME = 31234 -AFX_IDC_FONTPROP = 1000 -AFX_IDC_FONTNAMES = 1001 -AFX_IDC_FONTSTYLES = 1002 -AFX_IDC_FONTSIZES = 1003 -AFX_IDC_STRIKEOUT = 1004 -AFX_IDC_UNDERLINE = 1005 -AFX_IDC_SAMPLEBOX = 1006 -AFX_IDC_COLOR_BLACK = 1100 -AFX_IDC_COLOR_WHITE = 1101 -AFX_IDC_COLOR_RED = 1102 -AFX_IDC_COLOR_GREEN = 1103 -AFX_IDC_COLOR_BLUE = 1104 -AFX_IDC_COLOR_YELLOW = 1105 -AFX_IDC_COLOR_MAGENTA = 1106 -AFX_IDC_COLOR_CYAN = 1107 -AFX_IDC_COLOR_GRAY = 1108 -AFX_IDC_COLOR_LIGHTGRAY = 1109 -AFX_IDC_COLOR_DARKRED = 1110 -AFX_IDC_COLOR_DARKGREEN = 1111 -AFX_IDC_COLOR_DARKBLUE = 1112 -AFX_IDC_COLOR_LIGHTBROWN = 1113 -AFX_IDC_COLOR_DARKMAGENTA = 1114 -AFX_IDC_COLOR_DARKCYAN = 1115 -AFX_IDC_COLORPROP = 1116 -AFX_IDC_SYSTEMCOLORS = 1117 -AFX_IDC_PROPNAME = 1201 -AFX_IDC_PICTURE = 1202 -AFX_IDC_BROWSE = 1203 -AFX_IDC_CLEAR = 1204 -AFX_IDD_PROPPAGE_COLOR = 32257 -AFX_IDD_PROPPAGE_FONT = 32258 -AFX_IDD_PROPPAGE_PICTURE = 32259 -AFX_IDB_TRUETYPE = 32384 -AFX_IDS_PROPPAGE_UNKNOWN = 0xFE01 -AFX_IDS_COLOR_DESKTOP = 0xFE04 -AFX_IDS_COLOR_APPWORKSPACE = 0xFE05 -AFX_IDS_COLOR_WNDBACKGND = 0xFE06 -AFX_IDS_COLOR_WNDTEXT = 0xFE07 -AFX_IDS_COLOR_MENUBAR = 0xFE08 -AFX_IDS_COLOR_MENUTEXT = 0xFE09 -AFX_IDS_COLOR_ACTIVEBAR = 0xFE0A -AFX_IDS_COLOR_INACTIVEBAR = 0xFE0B -AFX_IDS_COLOR_ACTIVETEXT = 0xFE0C -AFX_IDS_COLOR_INACTIVETEXT = 0xFE0D -AFX_IDS_COLOR_ACTIVEBORDER = 0xFE0E -AFX_IDS_COLOR_INACTIVEBORDER = 0xFE0F -AFX_IDS_COLOR_WNDFRAME = 0xFE10 -AFX_IDS_COLOR_SCROLLBARS = 0xFE11 -AFX_IDS_COLOR_BTNFACE = 0xFE12 -AFX_IDS_COLOR_BTNSHADOW = 0xFE13 -AFX_IDS_COLOR_BTNTEXT = 0xFE14 -AFX_IDS_COLOR_BTNHIGHLIGHT = 0xFE15 -AFX_IDS_COLOR_DISABLEDTEXT = 0xFE16 -AFX_IDS_COLOR_HIGHLIGHT = 0xFE17 -AFX_IDS_COLOR_HIGHLIGHTTEXT = 0xFE18 -AFX_IDS_REGULAR = 0xFE19 -AFX_IDS_BOLD = 0xFE1A -AFX_IDS_ITALIC = 0xFE1B -AFX_IDS_BOLDITALIC = 0xFE1C -AFX_IDS_SAMPLETEXT = 0xFE1D -AFX_IDS_DISPLAYSTRING_FONT = 0xFE1E -AFX_IDS_DISPLAYSTRING_COLOR = 0xFE1F -AFX_IDS_DISPLAYSTRING_PICTURE = 0xFE20 -AFX_IDS_PICTUREFILTER = 0xFE21 -AFX_IDS_PICTYPE_UNKNOWN = 0xFE22 -AFX_IDS_PICTYPE_NONE = 0xFE23 -AFX_IDS_PICTYPE_BITMAP = 0xFE24 -AFX_IDS_PICTYPE_METAFILE = 0xFE25 -AFX_IDS_PICTYPE_ICON = 0xFE26 -AFX_IDS_COLOR_PPG = 0xFE28 -AFX_IDS_COLOR_PPG_CAPTION = 0xFE29 -AFX_IDS_FONT_PPG = 0xFE2A -AFX_IDS_FONT_PPG_CAPTION = 0xFE2B -AFX_IDS_PICTURE_PPG = 0xFE2C -AFX_IDS_PICTURE_PPG_CAPTION = 0xFE2D -AFX_IDS_PICTUREBROWSETITLE = 0xFE30 -AFX_IDS_BORDERSTYLE_0 = 0xFE31 -AFX_IDS_BORDERSTYLE_1 = 0xFE32 -AFX_IDS_VERB_EDIT = 0xFE40 -AFX_IDS_VERB_PROPERTIES = 0xFE41 -AFX_IDP_PICTURECANTOPEN = 0xFE83 -AFX_IDP_PICTURECANTLOAD = 0xFE84 -AFX_IDP_PICTURETOOLARGE = 0xFE85 -AFX_IDP_PICTUREREADFAILED = 0xFE86 -AFX_IDP_E_ILLEGALFUNCTIONCALL = 0xFEA0 -AFX_IDP_E_OVERFLOW = 0xFEA1 -AFX_IDP_E_OUTOFMEMORY = 0xFEA2 -AFX_IDP_E_DIVISIONBYZERO = 0xFEA3 -AFX_IDP_E_OUTOFSTRINGSPACE = 0xFEA4 -AFX_IDP_E_OUTOFSTACKSPACE = 0xFEA5 -AFX_IDP_E_BADFILENAMEORNUMBER = 0xFEA6 -AFX_IDP_E_FILENOTFOUND = 0xFEA7 -AFX_IDP_E_BADFILEMODE = 0xFEA8 -AFX_IDP_E_FILEALREADYOPEN = 0xFEA9 -AFX_IDP_E_DEVICEIOERROR = 0xFEAA -AFX_IDP_E_FILEALREADYEXISTS = 0xFEAB -AFX_IDP_E_BADRECORDLENGTH = 0xFEAC -AFX_IDP_E_DISKFULL = 0xFEAD -AFX_IDP_E_BADRECORDNUMBER = 0xFEAE -AFX_IDP_E_BADFILENAME = 0xFEAF -AFX_IDP_E_TOOMANYFILES = 0xFEB0 -AFX_IDP_E_DEVICEUNAVAILABLE = 0xFEB1 -AFX_IDP_E_PERMISSIONDENIED = 0xFEB2 -AFX_IDP_E_DISKNOTREADY = 0xFEB3 -AFX_IDP_E_PATHFILEACCESSERROR = 0xFEB4 -AFX_IDP_E_PATHNOTFOUND = 0xFEB5 -AFX_IDP_E_INVALIDPATTERNSTRING = 0xFEB6 -AFX_IDP_E_INVALIDUSEOFNULL = 0xFEB7 -AFX_IDP_E_INVALIDFILEFORMAT = 0xFEB8 -AFX_IDP_E_INVALIDPROPERTYVALUE = 0xFEB9 -AFX_IDP_E_INVALIDPROPERTYARRAYINDEX = 0xFEBA -AFX_IDP_E_SETNOTSUPPORTEDATRUNTIME = 0xFEBB -AFX_IDP_E_SETNOTSUPPORTED = 0xFEBC -AFX_IDP_E_NEEDPROPERTYARRAYINDEX = 0xFEBD -AFX_IDP_E_SETNOTPERMITTED = 0xFEBE -AFX_IDP_E_GETNOTSUPPORTEDATRUNTIME = 0xFEBF -AFX_IDP_E_GETNOTSUPPORTED = 0xFEC0 -AFX_IDP_E_PROPERTYNOTFOUND = 0xFEC1 -AFX_IDP_E_INVALIDCLIPBOARDFORMAT = 0xFEC2 -AFX_IDP_E_INVALIDPICTURE = 0xFEC3 -AFX_IDP_E_PRINTERERROR = 0xFEC4 -AFX_IDP_E_CANTSAVEFILETOTEMP = 0xFEC5 -AFX_IDP_E_SEARCHTEXTNOTFOUND = 0xFEC6 -AFX_IDP_E_REPLACEMENTSTOOLONG = 0xFEC7 diff --git a/lib/win32/lib/commctrl.py b/lib/win32/lib/commctrl.py deleted file mode 100644 index 26641e84..00000000 --- a/lib/win32/lib/commctrl.py +++ /dev/null @@ -1,1551 +0,0 @@ -# Generated by h2py from COMMCTRL.H -WM_USER = 1024 -ICC_LISTVIEW_CLASSES = 1 # listview, header -ICC_TREEVIEW_CLASSES = 2 # treeview, tooltips -ICC_BAR_CLASSES = 4 # toolbar, statusbar, trackbar, tooltips -ICC_TAB_CLASSES = 8 # tab, tooltips -ICC_UPDOWN_CLASS = 16 # updown -ICC_PROGRESS_CLASS = 32 # progress -ICC_HOTKEY_CLASS = 64 # hotkey -ICC_ANIMATE_CLASS = 128 # animate -ICC_WIN95_CLASSES = 255 -ICC_DATE_CLASSES = 256 # month picker, date picker, time picker, updown -ICC_USEREX_CLASSES = 512 # comboex -ICC_COOL_CLASSES = 1024 # rebar (coolbar) control -ICC_INTERNET_CLASSES = 2048 -ICC_PAGESCROLLER_CLASS = 4096 # page scroller -ICC_NATIVEFNTCTL_CLASS = 8192 # native font control -ODT_HEADER = 100 -ODT_TAB = 101 -ODT_LISTVIEW = 102 -PY_0U = 0 -NM_FIRST = PY_0U # generic to all controls -NM_LAST = PY_0U - 99 -LVN_FIRST = PY_0U - 100 # listview -LVN_LAST = PY_0U - 199 -HDN_FIRST = PY_0U - 300 # header -HDN_LAST = PY_0U - 399 -TVN_FIRST = PY_0U - 400 # treeview -TVN_LAST = PY_0U - 499 -TTN_FIRST = PY_0U - 520 # tooltips -TTN_LAST = PY_0U - 549 -TCN_FIRST = PY_0U - 550 # tab control -TCN_LAST = PY_0U - 580 -CDN_FIRST = PY_0U - 601 # common dialog (new) -CDN_LAST = PY_0U - 699 -TBN_FIRST = PY_0U - 700 # toolbar -TBN_LAST = PY_0U - 720 -UDN_FIRST = PY_0U - 721 # updown -UDN_LAST = PY_0U - 740 -MCN_FIRST = PY_0U - 750 # monthcal -MCN_LAST = PY_0U - 759 -DTN_FIRST = PY_0U - 760 # datetimepick -DTN_LAST = PY_0U - 799 -CBEN_FIRST = PY_0U - 800 # combo box ex -CBEN_LAST = PY_0U - 830 -RBN_FIRST = PY_0U - 831 # rebar -RBN_LAST = PY_0U - 859 -IPN_FIRST = PY_0U - 860 # internet address -IPN_LAST = PY_0U - 879 # internet address -SBN_FIRST = PY_0U - 880 # status bar -SBN_LAST = PY_0U - 899 -PGN_FIRST = PY_0U - 900 # Pager Control -PGN_LAST = PY_0U - 950 -LVM_FIRST = 4096 # ListView messages -TV_FIRST = 4352 # TreeView messages -HDM_FIRST = 4608 # Header messages -TCM_FIRST = 4864 # Tab control messages -PGM_FIRST = 5120 # Pager control messages -CCM_FIRST = 8192 # Common control shared messages -CCM_SETBKCOLOR = CCM_FIRST + 1 # lParam is bkColor -CCM_SETCOLORSCHEME = CCM_FIRST + 2 # lParam is color scheme -CCM_GETCOLORSCHEME = CCM_FIRST + 3 # fills in COLORSCHEME pointed to by lParam -CCM_GETDROPTARGET = CCM_FIRST + 4 -CCM_SETUNICODEFORMAT = CCM_FIRST + 5 -CCM_GETUNICODEFORMAT = CCM_FIRST + 6 -INFOTIPSIZE = 1024 -NM_OUTOFMEMORY = NM_FIRST - 1 -NM_CLICK = NM_FIRST - 2 # uses NMCLICK struct -NM_DBLCLK = NM_FIRST - 3 -NM_RETURN = NM_FIRST - 4 -NM_RCLICK = NM_FIRST - 5 # uses NMCLICK struct -NM_RDBLCLK = NM_FIRST - 6 -NM_SETFOCUS = NM_FIRST - 7 -NM_KILLFOCUS = NM_FIRST - 8 -NM_CUSTOMDRAW = NM_FIRST - 12 -NM_HOVER = NM_FIRST - 13 -NM_NCHITTEST = NM_FIRST - 14 # uses NMMOUSE struct -NM_KEYDOWN = NM_FIRST - 15 # uses NMKEY struct -NM_RELEASEDCAPTURE = NM_FIRST - 16 -NM_SETCURSOR = NM_FIRST - 17 # uses NMMOUSE struct -NM_CHAR = NM_FIRST - 18 # uses NMCHAR struct -MSGF_COMMCTRL_BEGINDRAG = 16896 -MSGF_COMMCTRL_SIZEHEADER = 16897 -MSGF_COMMCTRL_DRAGSELECT = 16898 -MSGF_COMMCTRL_TOOLBARCUST = 16899 -CDRF_DODEFAULT = 0 -CDRF_NEWFONT = 2 -CDRF_SKIPDEFAULT = 4 -CDRF_NOTIFYPOSTPAINT = 16 -CDRF_NOTIFYITEMDRAW = 32 -CDRF_NOTIFYSUBITEMDRAW = 32 # flags are the same, we can distinguish by context -CDRF_NOTIFYPOSTERASE = 64 -CDDS_PREPAINT = 1 -CDDS_POSTPAINT = 2 -CDDS_PREERASE = 3 -CDDS_POSTERASE = 4 -CDDS_ITEM = 65536 -CDDS_ITEMPREPAINT = CDDS_ITEM | CDDS_PREPAINT -CDDS_ITEMPOSTPAINT = CDDS_ITEM | CDDS_POSTPAINT -CDDS_ITEMPREERASE = CDDS_ITEM | CDDS_PREERASE -CDDS_ITEMPOSTERASE = CDDS_ITEM | CDDS_POSTERASE -CDDS_SUBITEM = 131072 -CDIS_SELECTED = 1 -CDIS_GRAYED = 2 -CDIS_DISABLED = 4 -CDIS_CHECKED = 8 -CDIS_FOCUS = 16 -CDIS_DEFAULT = 32 -CDIS_HOT = 64 -CDIS_MARKED = 128 -CDIS_INDETERMINATE = 256 -CLR_NONE = -1 # 0xFFFFFFFFL -CLR_DEFAULT = -16777216 # 0xFF000000L -ILC_MASK = 1 -ILC_COLOR = 0 -ILC_COLORDDB = 254 -ILC_COLOR4 = 4 -ILC_COLOR8 = 8 -ILC_COLOR16 = 16 -ILC_COLOR24 = 24 -ILC_COLOR32 = 32 -ILC_PALETTE = 2048 # (not implemented) -ILD_NORMAL = 0 -ILD_TRANSPARENT = 1 -ILD_MASK = 16 -ILD_IMAGE = 32 -ILD_ROP = 64 -ILD_BLEND25 = 2 -ILD_BLEND50 = 4 -ILD_OVERLAYMASK = 3840 -ILD_SELECTED = ILD_BLEND50 -ILD_FOCUS = ILD_BLEND25 -ILD_BLEND = ILD_BLEND50 -CLR_HILIGHT = CLR_DEFAULT -ILCF_MOVE = 0 -ILCF_SWAP = 1 -WC_HEADERA = "SysHeader32" -WC_HEADER = WC_HEADERA -HDS_HORZ = 0 -HDS_BUTTONS = 2 -HDS_HOTTRACK = 4 -HDS_HIDDEN = 8 -HDS_DRAGDROP = 64 -HDS_FULLDRAG = 128 -HDI_WIDTH = 1 -HDI_HEIGHT = HDI_WIDTH -HDI_TEXT = 2 -HDI_FORMAT = 4 -HDI_LPARAM = 8 -HDI_BITMAP = 16 -HDI_IMAGE = 32 -HDI_DI_SETITEM = 64 -HDI_ORDER = 128 -HDF_LEFT = 0 -HDF_RIGHT = 1 -HDF_CENTER = 2 -HDF_JUSTIFYMASK = 3 -HDF_RTLREADING = 4 -HDF_OWNERDRAW = 32768 -HDF_STRING = 16384 -HDF_BITMAP = 8192 -HDF_BITMAP_ON_RIGHT = 4096 -HDF_IMAGE = 2048 -HDM_GETITEMCOUNT = HDM_FIRST + 0 -HDM_INSERTITEMA = HDM_FIRST + 1 -HDM_INSERTITEMW = HDM_FIRST + 10 -HDM_INSERTITEM = HDM_INSERTITEMA -HDM_DELETEITEM = HDM_FIRST + 2 -HDM_GETITEMA = HDM_FIRST + 3 -HDM_GETITEMW = HDM_FIRST + 11 -HDM_GETITEM = HDM_GETITEMA -HDM_SETITEMA = HDM_FIRST + 4 -HDM_SETITEMW = HDM_FIRST + 12 -HDM_SETITEM = HDM_SETITEMA -HDM_LAYOUT = HDM_FIRST + 5 -HHT_NOWHERE = 1 -HHT_ONHEADER = 2 -HHT_ONDIVIDER = 4 -HHT_ONDIVOPEN = 8 -HHT_ABOVE = 256 -HHT_BELOW = 512 -HHT_TORIGHT = 1024 -HHT_TOLEFT = 2048 -HDM_HITTEST = HDM_FIRST + 6 -HDM_GETITEMRECT = HDM_FIRST + 7 -HDM_SETIMAGELIST = HDM_FIRST + 8 -HDM_GETIMAGELIST = HDM_FIRST + 9 -HDM_ORDERTOINDEX = HDM_FIRST + 15 -HDM_CREATEDRAGIMAGE = HDM_FIRST + 16 # wparam = which item (by index) -HDM_GETORDERARRAY = HDM_FIRST + 17 -HDM_SETORDERARRAY = HDM_FIRST + 18 -HDM_SETHOTDIVIDER = HDM_FIRST + 19 -HDM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -HDM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -HDN_ITEMCHANGINGA = HDN_FIRST - 0 -HDN_ITEMCHANGINGW = HDN_FIRST - 20 -HDN_ITEMCHANGEDA = HDN_FIRST - 1 -HDN_ITEMCHANGEDW = HDN_FIRST - 21 -HDN_ITEMCLICKA = HDN_FIRST - 2 -HDN_ITEMCLICKW = HDN_FIRST - 22 -HDN_ITEMDBLCLICKA = HDN_FIRST - 3 -HDN_ITEMDBLCLICKW = HDN_FIRST - 23 -HDN_DIVIDERDBLCLICKA = HDN_FIRST - 5 -HDN_DIVIDERDBLCLICKW = HDN_FIRST - 25 -HDN_BEGINTRACKA = HDN_FIRST - 6 -HDN_BEGINTRACKW = HDN_FIRST - 26 -HDN_ENDTRACKA = HDN_FIRST - 7 -HDN_ENDTRACKW = HDN_FIRST - 27 -HDN_TRACKA = HDN_FIRST - 8 -HDN_TRACKW = HDN_FIRST - 28 -HDN_GETDISPINFOA = HDN_FIRST - 9 -HDN_GETDISPINFOW = HDN_FIRST - 29 -HDN_BEGINDRAG = HDN_FIRST - 10 -HDN_ENDDRAG = HDN_FIRST - 11 -HDN_ITEMCHANGING = HDN_ITEMCHANGINGA -HDN_ITEMCHANGED = HDN_ITEMCHANGEDA -HDN_ITEMCLICK = HDN_ITEMCLICKA -HDN_ITEMDBLCLICK = HDN_ITEMDBLCLICKA -HDN_DIVIDERDBLCLICK = HDN_DIVIDERDBLCLICKA -HDN_BEGINTRACK = HDN_BEGINTRACKA -HDN_ENDTRACK = HDN_ENDTRACKA -HDN_TRACK = HDN_TRACKA -HDN_GETDISPINFO = HDN_GETDISPINFOA -TOOLBARCLASSNAMEA = "ToolbarWindow32" -TOOLBARCLASSNAME = TOOLBARCLASSNAMEA -CMB_MASKED = 2 -TBSTATE_CHECKED = 1 -TBSTATE_PRESSED = 2 -TBSTATE_ENABLED = 4 -TBSTATE_HIDDEN = 8 -TBSTATE_INDETERMINATE = 16 -TBSTATE_WRAP = 32 -TBSTATE_ELLIPSES = 64 -TBSTATE_MARKED = 128 -TBSTYLE_BUTTON = 0 -TBSTYLE_SEP = 1 -TBSTYLE_CHECK = 2 -TBSTYLE_GROUP = 4 -TBSTYLE_CHECKGROUP = TBSTYLE_GROUP | TBSTYLE_CHECK -TBSTYLE_DROPDOWN = 8 -TBSTYLE_AUTOSIZE = 16 # automatically calculate the cx of the button -TBSTYLE_NOPREFIX = 32 # if this button should not have accel prefix -TBSTYLE_TOOLTIPS = 256 -TBSTYLE_WRAPABLE = 512 -TBSTYLE_ALTDRAG = 1024 -TBSTYLE_FLAT = 2048 -TBSTYLE_LIST = 4096 -TBSTYLE_CUSTOMERASE = 8192 -TBSTYLE_REGISTERDROP = 16384 -TBSTYLE_TRANSPARENT = 32768 -TBSTYLE_EX_DRAWDDARROWS = 1 -BTNS_BUTTON = TBSTYLE_BUTTON -BTNS_SEP = TBSTYLE_SEP # 0x0001 -BTNS_CHECK = TBSTYLE_CHECK # 0x0002 -BTNS_GROUP = TBSTYLE_GROUP # 0x0004 -BTNS_CHECKGROUP = TBSTYLE_CHECKGROUP # (TBSTYLE_GROUP | TBSTYLE_CHECK) -BTNS_DROPDOWN = TBSTYLE_DROPDOWN # 0x0008 -BTNS_AUTOSIZE = TBSTYLE_AUTOSIZE # 0x0010; automatically calculate the cx of the button -BTNS_NOPREFIX = TBSTYLE_NOPREFIX # 0x0020; this button should not have accel prefix -BTNS_SHOWTEXT = ( - 64 # 0x0040 // ignored unless TBSTYLE_EX_MIXEDBUTTONS is set -) -BTNS_WHOLEDROPDOWN = ( - 128 # 0x0080 // draw drop-down arrow, but without split arrow section -) -TBCDRF_NOEDGES = 65536 # Don't draw button edges -TBCDRF_HILITEHOTTRACK = 131072 # Use color of the button bk when hottracked -TBCDRF_NOOFFSET = 262144 # Don't offset button if pressed -TBCDRF_NOMARK = 524288 # Don't draw default highlight of image/text for TBSTATE_MARKED -TBCDRF_NOETCHEDEFFECT = 1048576 # Don't draw etched effect for disabled items -TB_ENABLEBUTTON = WM_USER + 1 -TB_CHECKBUTTON = WM_USER + 2 -TB_PRESSBUTTON = WM_USER + 3 -TB_HIDEBUTTON = WM_USER + 4 -TB_INDETERMINATE = WM_USER + 5 -TB_MARKBUTTON = WM_USER + 6 -TB_ISBUTTONENABLED = WM_USER + 9 -TB_ISBUTTONCHECKED = WM_USER + 10 -TB_ISBUTTONPRESSED = WM_USER + 11 -TB_ISBUTTONHIDDEN = WM_USER + 12 -TB_ISBUTTONINDETERMINATE = WM_USER + 13 -TB_ISBUTTONHIGHLIGHTED = WM_USER + 14 -TB_SETSTATE = WM_USER + 17 -TB_GETSTATE = WM_USER + 18 -TB_ADDBITMAP = WM_USER + 19 -HINST_COMMCTRL = -1 -IDB_STD_SMALL_COLOR = 0 -IDB_STD_LARGE_COLOR = 1 -IDB_VIEW_SMALL_COLOR = 4 -IDB_VIEW_LARGE_COLOR = 5 -IDB_HIST_SMALL_COLOR = 8 -IDB_HIST_LARGE_COLOR = 9 -STD_CUT = 0 -STD_COPY = 1 -STD_PASTE = 2 -STD_UNDO = 3 -STD_REDOW = 4 -STD_DELETE = 5 -STD_FILENEW = 6 -STD_FILEOPEN = 7 -STD_FILESAVE = 8 -STD_PRINTPRE = 9 -STD_PROPERTIES = 10 -STD_HELP = 11 -STD_FIND = 12 -STD_REPLACE = 13 -STD_PRINT = 14 -VIEW_LARGEICONS = 0 -VIEW_SMALLICONS = 1 -VIEW_LIST = 2 -VIEW_DETAILS = 3 -VIEW_SORTNAME = 4 -VIEW_SORTSIZE = 5 -VIEW_SORTDATE = 6 -VIEW_SORTTYPE = 7 -VIEW_PARENTFOLDER = 8 -VIEW_NETCONNECT = 9 -VIEW_NETDISCONNECT = 10 -VIEW_NEWFOLDER = 11 -VIEW_VIEWMENU = 12 -HIST_BACK = 0 -HIST_FORWARD = 1 -HIST_FAVORITES = 2 -HIST_ADDTOFAVORITES = 3 -HIST_VIEWTREE = 4 -TB_ADDBUTTONSA = WM_USER + 20 -TB_INSERTBUTTONA = WM_USER + 21 -TB_ADDBUTTONS = WM_USER + 20 -TB_INSERTBUTTON = WM_USER + 21 -TB_DELETEBUTTON = WM_USER + 22 -TB_GETBUTTON = WM_USER + 23 -TB_BUTTONCOUNT = WM_USER + 24 -TB_COMMANDTOINDEX = WM_USER + 25 -TB_SAVERESTOREA = WM_USER + 26 -TB_SAVERESTOREW = WM_USER + 76 -TB_CUSTOMIZE = WM_USER + 27 -TB_ADDSTRINGA = WM_USER + 28 -TB_ADDSTRINGW = WM_USER + 77 -TB_GETITEMRECT = WM_USER + 29 -TB_BUTTONSTRUCTSIZE = WM_USER + 30 -TB_SETBUTTONSIZE = WM_USER + 31 -TB_SETBITMAPSIZE = WM_USER + 32 -TB_AUTOSIZE = WM_USER + 33 -TB_GETTOOLTIPS = WM_USER + 35 -TB_SETTOOLTIPS = WM_USER + 36 -TB_SETPARENT = WM_USER + 37 -TB_SETROWS = WM_USER + 39 -TB_GETROWS = WM_USER + 40 -TB_SETCMDID = WM_USER + 42 -TB_CHANGEBITMAP = WM_USER + 43 -TB_GETBITMAP = WM_USER + 44 -TB_GETBUTTONTEXTA = WM_USER + 45 -TB_GETBUTTONTEXTW = WM_USER + 75 -TB_REPLACEBITMAP = WM_USER + 46 -TB_SETINDENT = WM_USER + 47 -TB_SETIMAGELIST = WM_USER + 48 -TB_GETIMAGELIST = WM_USER + 49 -TB_LOADIMAGES = WM_USER + 50 -TB_GETRECT = WM_USER + 51 # wParam is the Cmd instead of index -TB_SETHOTIMAGELIST = WM_USER + 52 -TB_GETHOTIMAGELIST = WM_USER + 53 -TB_SETDISABLEDIMAGELIST = WM_USER + 54 -TB_GETDISABLEDIMAGELIST = WM_USER + 55 -TB_SETSTYLE = WM_USER + 56 -TB_GETSTYLE = WM_USER + 57 -TB_GETBUTTONSIZE = WM_USER + 58 -TB_SETBUTTONWIDTH = WM_USER + 59 -TB_SETMAXTEXTROWS = WM_USER + 60 -TB_GETTEXTROWS = WM_USER + 61 -TB_GETBUTTONTEXT = TB_GETBUTTONTEXTA -TB_SAVERESTORE = TB_SAVERESTOREA -TB_ADDSTRING = TB_ADDSTRINGA -TB_GETOBJECT = WM_USER + 62 # wParam == IID, lParam void **ppv -TB_GETHOTITEM = WM_USER + 71 -TB_SETHOTITEM = WM_USER + 72 # wParam == iHotItem -TB_SETANCHORHIGHLIGHT = WM_USER + 73 # wParam == TRUE/FALSE -TB_GETANCHORHIGHLIGHT = WM_USER + 74 -TB_MAPACCELERATORA = WM_USER + 78 # wParam == ch, lParam int * pidBtn -TBIMHT_AFTER = 1 # TRUE = insert After iButton, otherwise before -TBIMHT_BACKGROUND = 2 # TRUE iff missed buttons completely -TB_GETINSERTMARK = WM_USER + 79 # lParam == LPTBINSERTMARK -TB_SETINSERTMARK = WM_USER + 80 # lParam == LPTBINSERTMARK -TB_INSERTMARKHITTEST = WM_USER + 81 # wParam == LPPOINT lParam == LPTBINSERTMARK -TB_MOVEBUTTON = WM_USER + 82 -TB_GETMAXSIZE = WM_USER + 83 # lParam == LPSIZE -TB_SETEXTENDEDSTYLE = WM_USER + 84 # For TBSTYLE_EX_* -TB_GETEXTENDEDSTYLE = WM_USER + 85 # For TBSTYLE_EX_* -TB_GETPADDING = WM_USER + 86 -TB_SETPADDING = WM_USER + 87 -TB_SETINSERTMARKCOLOR = WM_USER + 88 -TB_GETINSERTMARKCOLOR = WM_USER + 89 -TB_SETCOLORSCHEME = CCM_SETCOLORSCHEME # lParam is color scheme -TB_GETCOLORSCHEME = CCM_GETCOLORSCHEME # fills in COLORSCHEME pointed to by lParam -TB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -TB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -TB_MAPACCELERATORW = WM_USER + 90 # wParam == ch, lParam int * pidBtn -TB_MAPACCELERATOR = TB_MAPACCELERATORA -TBBF_LARGE = 1 -TB_GETBITMAPFLAGS = WM_USER + 41 -TBIF_IMAGE = 1 -TBIF_TEXT = 2 -TBIF_STATE = 4 -TBIF_STYLE = 8 -TBIF_LPARAM = 16 -TBIF_COMMAND = 32 -TBIF_SIZE = 64 -TB_GETBUTTONINFOW = WM_USER + 63 -TB_SETBUTTONINFOW = WM_USER + 64 -TB_GETBUTTONINFOA = WM_USER + 65 -TB_SETBUTTONINFOA = WM_USER + 66 -TB_INSERTBUTTONW = WM_USER + 67 -TB_ADDBUTTONSW = WM_USER + 68 -TB_HITTEST = WM_USER + 69 -TB_SETDRAWTEXTFLAGS = WM_USER + 70 # wParam == mask lParam == bit values -TBN_GETBUTTONINFOA = TBN_FIRST - 0 -TBN_GETBUTTONINFOW = TBN_FIRST - 20 -TBN_BEGINDRAG = TBN_FIRST - 1 -TBN_ENDDRAG = TBN_FIRST - 2 -TBN_BEGINADJUST = TBN_FIRST - 3 -TBN_ENDADJUST = TBN_FIRST - 4 -TBN_RESET = TBN_FIRST - 5 -TBN_QUERYINSERT = TBN_FIRST - 6 -TBN_QUERYDELETE = TBN_FIRST - 7 -TBN_TOOLBARCHANGE = TBN_FIRST - 8 -TBN_CUSTHELP = TBN_FIRST - 9 -TBN_DROPDOWN = TBN_FIRST - 10 -TBN_GETOBJECT = TBN_FIRST - 12 -HICF_OTHER = 0 -HICF_MOUSE = 1 # Triggered by mouse -HICF_ARROWKEYS = 2 # Triggered by arrow keys -HICF_ACCELERATOR = 4 # Triggered by accelerator -HICF_DUPACCEL = 8 # This accelerator is not unique -HICF_ENTERING = 16 # idOld is invalid -HICF_LEAVING = 32 # idNew is invalid -HICF_RESELECT = 64 # hot item reselected -TBN_HOTITEMCHANGE = TBN_FIRST - 13 -TBN_DRAGOUT = ( - TBN_FIRST - 14 -) # this is sent when the user clicks down on a button then drags off the button -TBN_DELETINGBUTTON = TBN_FIRST - 15 # uses TBNOTIFY -TBN_GETDISPINFOA = ( - TBN_FIRST - 16 -) # This is sent when the toolbar needs some display information -TBN_GETDISPINFOW = ( - TBN_FIRST - 17 -) # This is sent when the toolbar needs some display information -TBN_GETINFOTIPA = TBN_FIRST - 18 -TBN_GETINFOTIPW = TBN_FIRST - 19 -TBN_GETINFOTIP = TBN_GETINFOTIPA -TBNF_IMAGE = 1 -TBNF_TEXT = 2 -TBNF_DI_SETITEM = 268435456 -TBN_GETDISPINFO = TBN_GETDISPINFOA -TBDDRET_DEFAULT = 0 -TBDDRET_NODEFAULT = 1 -TBDDRET_TREATPRESSED = 2 # Treat as a standard press button -TBN_GETBUTTONINFO = TBN_GETBUTTONINFOA -REBARCLASSNAMEA = "ReBarWindow32" -REBARCLASSNAME = REBARCLASSNAMEA -RBIM_IMAGELIST = 1 -RBS_TOOLTIPS = 256 -RBS_VARHEIGHT = 512 -RBS_BANDBORDERS = 1024 -RBS_FIXEDORDER = 2048 -RBS_REGISTERDROP = 4096 -RBS_AUTOSIZE = 8192 -RBS_VERTICALGRIPPER = ( - 16384 # this always has the vertical gripper (default for horizontal mode) -) -RBS_DBLCLKTOGGLE = 32768 -RBBS_BREAK = 1 # break to new line -RBBS_FIXEDSIZE = 2 # band can't be sized -RBBS_CHILDEDGE = 4 # edge around top & bottom of child window -RBBS_HIDDEN = 8 # don't show -RBBS_NOVERT = 16 # don't show when vertical -RBBS_FIXEDBMP = 32 # bitmap doesn't move during band resize -RBBS_VARIABLEHEIGHT = 64 # allow autosizing of this child vertically -RBBS_GRIPPERALWAYS = 128 # always show the gripper -RBBS_NOGRIPPER = 256 # never show the gripper -RBBIM_STYLE = 1 -RBBIM_COLORS = 2 -RBBIM_TEXT = 4 -RBBIM_IMAGE = 8 -RBBIM_CHILD = 16 -RBBIM_CHILDSIZE = 32 -RBBIM_SIZE = 64 -RBBIM_BACKGROUND = 128 -RBBIM_ID = 256 -RBBIM_IDEALSIZE = 512 -RBBIM_LPARAM = 1024 -RB_INSERTBANDA = WM_USER + 1 -RB_DELETEBAND = WM_USER + 2 -RB_GETBARINFO = WM_USER + 3 -RB_SETBARINFO = WM_USER + 4 -RB_SETBANDINFOA = WM_USER + 6 -RB_SETPARENT = WM_USER + 7 -RB_HITTEST = WM_USER + 8 -RB_GETRECT = WM_USER + 9 -RB_INSERTBANDW = WM_USER + 10 -RB_SETBANDINFOW = WM_USER + 11 -RB_GETBANDCOUNT = WM_USER + 12 -RB_GETROWCOUNT = WM_USER + 13 -RB_GETROWHEIGHT = WM_USER + 14 -RB_IDTOINDEX = WM_USER + 16 # wParam == id -RB_GETTOOLTIPS = WM_USER + 17 -RB_SETTOOLTIPS = WM_USER + 18 -RB_SETBKCOLOR = WM_USER + 19 # sets the default BK color -RB_GETBKCOLOR = WM_USER + 20 # defaults to CLR_NONE -RB_SETTEXTCOLOR = WM_USER + 21 -RB_GETTEXTCOLOR = WM_USER + 22 # defaults to 0x00000000 -RB_SIZETORECT = ( - WM_USER + 23 -) # resize the rebar/break bands and such to this rect (lparam) -RB_SETCOLORSCHEME = CCM_SETCOLORSCHEME # lParam is color scheme -RB_GETCOLORSCHEME = CCM_GETCOLORSCHEME # fills in COLORSCHEME pointed to by lParam -RB_INSERTBAND = RB_INSERTBANDA -RB_SETBANDINFO = RB_SETBANDINFOA -RB_BEGINDRAG = WM_USER + 24 -RB_ENDDRAG = WM_USER + 25 -RB_DRAGMOVE = WM_USER + 26 -RB_GETBARHEIGHT = WM_USER + 27 -RB_GETBANDINFOW = WM_USER + 28 -RB_GETBANDINFOA = WM_USER + 29 -RB_GETBANDINFO = RB_GETBANDINFOA -RB_MINIMIZEBAND = WM_USER + 30 -RB_MAXIMIZEBAND = WM_USER + 31 -RB_GETDROPTARGET = CCM_GETDROPTARGET -RB_GETBANDBORDERS = ( - WM_USER + 34 -) # returns in lparam = lprc the amount of edges added to band wparam -RB_SHOWBAND = WM_USER + 35 # show/hide band -RB_SETPALETTE = WM_USER + 37 -RB_GETPALETTE = WM_USER + 38 -RB_MOVEBAND = WM_USER + 39 -RB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -RB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -RBN_HEIGHTCHANGE = RBN_FIRST - 0 -RBN_GETOBJECT = RBN_FIRST - 1 -RBN_LAYOUTCHANGED = RBN_FIRST - 2 -RBN_AUTOSIZE = RBN_FIRST - 3 -RBN_BEGINDRAG = RBN_FIRST - 4 -RBN_ENDDRAG = RBN_FIRST - 5 -RBN_DELETINGBAND = RBN_FIRST - 6 # Uses NMREBAR -RBN_DELETEDBAND = RBN_FIRST - 7 # Uses NMREBAR -RBN_CHILDSIZE = RBN_FIRST - 8 -RBNM_ID = 1 -RBNM_STYLE = 2 -RBNM_LPARAM = 4 -RBHT_NOWHERE = 1 -RBHT_CAPTION = 2 -RBHT_CLIENT = 3 -RBHT_GRABBER = 4 -TOOLTIPS_CLASSA = "tooltips_class32" -TOOLTIPS_CLASS = TOOLTIPS_CLASSA -TTS_ALWAYSTIP = 1 -TTS_NOPREFIX = 2 -TTF_IDISHWND = 1 -TTF_CENTERTIP = 2 -TTF_RTLREADING = 4 -TTF_SUBCLASS = 16 -TTF_TRACK = 32 -TTF_ABSOLUTE = 128 -TTF_TRANSPARENT = 256 -TTF_DI_SETITEM = 32768 # valid only on the TTN_NEEDTEXT callback -TTDT_AUTOMATIC = 0 -TTDT_RESHOW = 1 -TTDT_AUTOPOP = 2 -TTDT_INITIAL = 3 -TTM_ACTIVATE = WM_USER + 1 -TTM_SETDELAYTIME = WM_USER + 3 -TTM_ADDTOOLA = WM_USER + 4 -TTM_ADDTOOLW = WM_USER + 50 -TTM_DELTOOLA = WM_USER + 5 -TTM_DELTOOLW = WM_USER + 51 -TTM_NEWTOOLRECTA = WM_USER + 6 -TTM_NEWTOOLRECTW = WM_USER + 52 -TTM_RELAYEVENT = WM_USER + 7 -TTM_GETTOOLINFOA = WM_USER + 8 -TTM_GETTOOLINFOW = WM_USER + 53 -TTM_SETTOOLINFOA = WM_USER + 9 -TTM_SETTOOLINFOW = WM_USER + 54 -TTM_HITTESTA = WM_USER + 10 -TTM_HITTESTW = WM_USER + 55 -TTM_GETTEXTA = WM_USER + 11 -TTM_GETTEXTW = WM_USER + 56 -TTM_UPDATETIPTEXTA = WM_USER + 12 -TTM_UPDATETIPTEXTW = WM_USER + 57 -TTM_GETTOOLCOUNT = WM_USER + 13 -TTM_ENUMTOOLSA = WM_USER + 14 -TTM_ENUMTOOLSW = WM_USER + 58 -TTM_GETCURRENTTOOLA = WM_USER + 15 -TTM_GETCURRENTTOOLW = WM_USER + 59 -TTM_WINDOWFROMPOINT = WM_USER + 16 -TTM_TRACKACTIVATE = WM_USER + 17 # wParam = TRUE/FALSE start end lparam = LPTOOLINFO -TTM_TRACKPOSITION = WM_USER + 18 # lParam = dwPos -TTM_SETTIPBKCOLOR = WM_USER + 19 -TTM_SETTIPTEXTCOLOR = WM_USER + 20 -TTM_GETDELAYTIME = WM_USER + 21 -TTM_GETTIPBKCOLOR = WM_USER + 22 -TTM_GETTIPTEXTCOLOR = WM_USER + 23 -TTM_SETMAXTIPWIDTH = WM_USER + 24 -TTM_GETMAXTIPWIDTH = WM_USER + 25 -TTM_SETMARGIN = WM_USER + 26 # lParam = lprc -TTM_GETMARGIN = WM_USER + 27 # lParam = lprc -TTM_POP = WM_USER + 28 -TTM_UPDATE = WM_USER + 29 -TTM_ADDTOOL = TTM_ADDTOOLA -TTM_DELTOOL = TTM_DELTOOLA -TTM_NEWTOOLRECT = TTM_NEWTOOLRECTA -TTM_GETTOOLINFO = TTM_GETTOOLINFOA -TTM_SETTOOLINFO = TTM_SETTOOLINFOA -TTM_HITTEST = TTM_HITTESTA -TTM_GETTEXT = TTM_GETTEXTA -TTM_UPDATETIPTEXT = TTM_UPDATETIPTEXTA -TTM_ENUMTOOLS = TTM_ENUMTOOLSA -TTM_GETCURRENTTOOL = TTM_GETCURRENTTOOLA -TTN_GETDISPINFOA = TTN_FIRST - 0 -TTN_GETDISPINFOW = TTN_FIRST - 10 -TTN_SHOW = TTN_FIRST - 1 -TTN_POP = TTN_FIRST - 2 -TTN_GETDISPINFO = TTN_GETDISPINFOA -TTN_NEEDTEXT = TTN_GETDISPINFO -TTN_NEEDTEXTA = TTN_GETDISPINFOA -TTN_NEEDTEXTW = TTN_GETDISPINFOW -SBARS_SIZEGRIP = 256 -SBARS_TOOLTIPS = 2048 -STATUSCLASSNAMEA = "msctls_statusbar32" -STATUSCLASSNAME = STATUSCLASSNAMEA -SB_SETTEXTA = WM_USER + 1 -SB_SETTEXTW = WM_USER + 11 -SB_GETTEXTA = WM_USER + 2 -SB_GETTEXTW = WM_USER + 13 -SB_GETTEXTLENGTHA = WM_USER + 3 -SB_GETTEXTLENGTHW = WM_USER + 12 -SB_GETTEXT = SB_GETTEXTA -SB_SETTEXT = SB_SETTEXTA -SB_GETTEXTLENGTH = SB_GETTEXTLENGTHA -SB_SETPARTS = WM_USER + 4 -SB_GETPARTS = WM_USER + 6 -SB_GETBORDERS = WM_USER + 7 -SB_SETMINHEIGHT = WM_USER + 8 -SB_SIMPLE = WM_USER + 9 -SB_GETRECT = WM_USER + 10 -SB_ISSIMPLE = WM_USER + 14 -SB_SETICON = WM_USER + 15 -SB_SETTIPTEXTA = WM_USER + 16 -SB_SETTIPTEXTW = WM_USER + 17 -SB_GETTIPTEXTA = WM_USER + 18 -SB_GETTIPTEXTW = WM_USER + 19 -SB_GETICON = WM_USER + 20 -SB_SETTIPTEXT = SB_SETTIPTEXTA -SB_GETTIPTEXT = SB_GETTIPTEXTA -SB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -SB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -SBT_OWNERDRAW = 4096 -SBT_NOBORDERS = 256 -SBT_POPOUT = 512 -SBT_RTLREADING = 1024 -SBT_NOTABPARSING = 2048 -SBT_TOOLTIPS = 2048 -SB_SETBKCOLOR = CCM_SETBKCOLOR # lParam = bkColor -SBN_SIMPLEMODECHANGE = SBN_FIRST - 0 -TRACKBAR_CLASSA = "msctls_trackbar32" -TRACKBAR_CLASS = TRACKBAR_CLASSA -TBS_AUTOTICKS = 1 -TBS_VERT = 2 -TBS_HORZ = 0 -TBS_TOP = 4 -TBS_BOTTOM = 0 -TBS_LEFT = 4 -TBS_RIGHT = 0 -TBS_BOTH = 8 -TBS_NOTICKS = 16 -TBS_ENABLESELRANGE = 32 -TBS_FIXEDLENGTH = 64 -TBS_NOTHUMB = 128 -TBS_TOOLTIPS = 256 -TBM_GETPOS = WM_USER -TBM_GETRANGEMIN = WM_USER + 1 -TBM_GETRANGEMAX = WM_USER + 2 -TBM_GETTIC = WM_USER + 3 -TBM_SETTIC = WM_USER + 4 -TBM_SETPOS = WM_USER + 5 -TBM_SETRANGE = WM_USER + 6 -TBM_SETRANGEMIN = WM_USER + 7 -TBM_SETRANGEMAX = WM_USER + 8 -TBM_CLEARTICS = WM_USER + 9 -TBM_SETSEL = WM_USER + 10 -TBM_SETSELSTART = WM_USER + 11 -TBM_SETSELEND = WM_USER + 12 -TBM_GETPTICS = WM_USER + 14 -TBM_GETTICPOS = WM_USER + 15 -TBM_GETNUMTICS = WM_USER + 16 -TBM_GETSELSTART = WM_USER + 17 -TBM_GETSELEND = WM_USER + 18 -TBM_CLEARSEL = WM_USER + 19 -TBM_SETTICFREQ = WM_USER + 20 -TBM_SETPAGESIZE = WM_USER + 21 -TBM_GETPAGESIZE = WM_USER + 22 -TBM_SETLINESIZE = WM_USER + 23 -TBM_GETLINESIZE = WM_USER + 24 -TBM_GETTHUMBRECT = WM_USER + 25 -TBM_GETCHANNELRECT = WM_USER + 26 -TBM_SETTHUMBLENGTH = WM_USER + 27 -TBM_GETTHUMBLENGTH = WM_USER + 28 -TBM_SETTOOLTIPS = WM_USER + 29 -TBM_GETTOOLTIPS = WM_USER + 30 -TBM_SETTIPSIDE = WM_USER + 31 -TBTS_TOP = 0 -TBTS_LEFT = 1 -TBTS_BOTTOM = 2 -TBTS_RIGHT = 3 -TBM_SETBUDDY = WM_USER + 32 # wparam = BOOL fLeft; (or right) -TBM_GETBUDDY = WM_USER + 33 # wparam = BOOL fLeft; (or right) -TBM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -TBM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -TB_LINEUP = 0 -TB_LINEDOWN = 1 -TB_PAGEUP = 2 -TB_PAGEDOWN = 3 -TB_THUMBPOSITION = 4 -TB_THUMBTRACK = 5 -TB_TOP = 6 -TB_BOTTOM = 7 -TB_ENDTRACK = 8 -TBCD_TICS = 1 -TBCD_THUMB = 2 -TBCD_CHANNEL = 3 -DL_BEGINDRAG = WM_USER + 133 -DL_DRAGGING = WM_USER + 134 -DL_DROPPED = WM_USER + 135 -DL_CANCELDRAG = WM_USER + 136 -DL_CURSORSET = 0 -DL_STOPCURSOR = 1 -DL_COPYCURSOR = 2 -DL_MOVECURSOR = 3 -DRAGLISTMSGSTRING = "commctrl_DragListMsg" -UPDOWN_CLASSA = "msctls_updown32" -UPDOWN_CLASS = UPDOWN_CLASSA -UD_MAXVAL = 32767 -UD_MINVAL = -UD_MAXVAL -UDS_WRAP = 1 -UDS_SETBUDDYINT = 2 -UDS_ALIGNRIGHT = 4 -UDS_ALIGNLEFT = 8 -UDS_AUTOBUDDY = 16 -UDS_ARROWKEYS = 32 -UDS_HORZ = 64 -UDS_NOTHOUSANDS = 128 -UDS_HOTTRACK = 256 -UDM_SETRANGE = WM_USER + 101 -UDM_GETRANGE = WM_USER + 102 -UDM_SETPOS = WM_USER + 103 -UDM_GETPOS = WM_USER + 104 -UDM_SETBUDDY = WM_USER + 105 -UDM_GETBUDDY = WM_USER + 106 -UDM_SETACCEL = WM_USER + 107 -UDM_GETACCEL = WM_USER + 108 -UDM_SETBASE = WM_USER + 109 -UDM_GETBASE = WM_USER + 110 -UDM_SETRANGE32 = WM_USER + 111 -UDM_GETRANGE32 = WM_USER + 112 # wParam & lParam are LPINT -UDM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -UDM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -UDN_DELTAPOS = UDN_FIRST - 1 -PROGRESS_CLASSA = "msctls_progress32" -PROGRESS_CLASS = PROGRESS_CLASSA -PBS_SMOOTH = 1 -PBS_VERTICAL = 4 -PBM_SETRANGE = WM_USER + 1 -PBM_SETPOS = WM_USER + 2 -PBM_DELTAPOS = WM_USER + 3 -PBM_SETSTEP = WM_USER + 4 -PBM_STEPIT = WM_USER + 5 -PBM_SETRANGE32 = WM_USER + 6 # lParam = high, wParam = low -PBM_GETRANGE = ( - WM_USER + 7 -) # wParam = return (TRUE ? low : high). lParam = PPBRANGE or NULL -PBM_GETPOS = WM_USER + 8 -PBM_SETBARCOLOR = WM_USER + 9 # lParam = bar color -PBM_SETBKCOLOR = CCM_SETBKCOLOR # lParam = bkColor -HOTKEYF_SHIFT = 1 -HOTKEYF_CONTROL = 2 -HOTKEYF_ALT = 4 -HOTKEYF_EXT = 8 -HKCOMB_NONE = 1 -HKCOMB_S = 2 -HKCOMB_C = 4 -HKCOMB_A = 8 -HKCOMB_SC = 16 -HKCOMB_SA = 32 -HKCOMB_CA = 64 -HKCOMB_SCA = 128 -HKM_SETHOTKEY = WM_USER + 1 -HKM_GETHOTKEY = WM_USER + 2 -HKM_SETRULES = WM_USER + 3 -HOTKEY_CLASSA = "msctls_hotkey32" -HOTKEY_CLASS = HOTKEY_CLASSA -CCS_TOP = 0x00000001 -CCS_NOMOVEY = 0x00000002 -CCS_BOTTOM = 0x00000003 -CCS_NORESIZE = 0x00000004 -CCS_NOPARENTALIGN = 0x00000008 -CCS_ADJUSTABLE = 0x00000020 -CCS_NODIVIDER = 0x00000040 -CCS_VERT = 0x00000080 -CCS_LEFT = CCS_VERT | CCS_TOP -CCS_RIGHT = CCS_VERT | CCS_BOTTOM -CCS_NOMOVEX = CCS_VERT | CCS_NOMOVEY -WC_LISTVIEWA = "SysListView32" -WC_LISTVIEW = WC_LISTVIEWA -LVS_ICON = 0 -LVS_REPORT = 1 -LVS_SMALLICON = 2 -LVS_LIST = 3 -LVS_TYPEMASK = 3 -LVS_SINGLESEL = 4 -LVS_SHOWSELALWAYS = 8 -LVS_SORTASCENDING = 16 -LVS_SORTDESCENDING = 32 -LVS_SHAREIMAGELISTS = 64 -LVS_NOLABELWRAP = 128 -LVS_AUTOARRANGE = 256 -LVS_EDITLABELS = 512 -LVS_OWNERDATA = 4096 -LVS_NOSCROLL = 8192 -LVS_TYPESTYLEMASK = 64512 -LVS_ALIGNTOP = 0 -LVS_ALIGNLEFT = 2048 -LVS_ALIGNMASK = 3072 -LVS_OWNERDRAWFIXED = 1024 -LVS_NOCOLUMNHEADER = 16384 -LVS_NOSORTHEADER = 32768 -LVM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -LVM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -LVM_GETBKCOLOR = LVM_FIRST + 0 -LVM_SETBKCOLOR = LVM_FIRST + 1 -LVM_GETIMAGELIST = LVM_FIRST + 2 -LVSIL_NORMAL = 0 -LVSIL_SMALL = 1 -LVSIL_STATE = 2 -LVM_SETIMAGELIST = LVM_FIRST + 3 -LVM_GETITEMCOUNT = LVM_FIRST + 4 -LVIF_TEXT = 1 -LVIF_IMAGE = 2 -LVIF_PARAM = 4 -LVIF_STATE = 8 -LVIF_INDENT = 16 -LVIF_NORECOMPUTE = 2048 -LVIS_FOCUSED = 1 -LVIS_SELECTED = 2 -LVIS_CUT = 4 -LVIS_DROPHILITED = 8 -LVIS_ACTIVATING = 32 -LVIS_OVERLAYMASK = 3840 -LVIS_STATEIMAGEMASK = 61440 -I_INDENTCALLBACK = -1 -LPSTR_TEXTCALLBACKA = -1 -LPSTR_TEXTCALLBACK = LPSTR_TEXTCALLBACKA -I_IMAGECALLBACK = -1 -LVM_GETITEMA = LVM_FIRST + 5 -LVM_GETITEMW = LVM_FIRST + 75 -LVM_GETITEM = LVM_GETITEMA -LVM_SETITEMA = LVM_FIRST + 6 -LVM_SETITEMW = LVM_FIRST + 76 -LVM_SETITEM = LVM_SETITEMA -LVM_INSERTITEMA = LVM_FIRST + 7 -LVM_INSERTITEMW = LVM_FIRST + 77 -LVM_INSERTITEM = LVM_INSERTITEMA -LVM_DELETEITEM = LVM_FIRST + 8 -LVM_DELETEALLITEMS = LVM_FIRST + 9 -LVM_GETCALLBACKMASK = LVM_FIRST + 10 -LVM_SETCALLBACKMASK = LVM_FIRST + 11 -LVNI_ALL = 0 -LVNI_FOCUSED = 1 -LVNI_SELECTED = 2 -LVNI_CUT = 4 -LVNI_DROPHILITED = 8 -LVNI_ABOVE = 256 -LVNI_BELOW = 512 -LVNI_TOLEFT = 1024 -LVNI_TORIGHT = 2048 -LVM_GETNEXTITEM = LVM_FIRST + 12 -LVFI_PARAM = 1 -LVFI_STRING = 2 -LVFI_PARTIAL = 8 -LVFI_WRAP = 32 -LVFI_NEARESTXY = 64 -LVM_FINDITEMA = LVM_FIRST + 13 -LVM_FINDITEMW = LVM_FIRST + 83 -LVM_FINDITEM = LVM_FINDITEMA -LVIR_BOUNDS = 0 -LVIR_ICON = 1 -LVIR_LABEL = 2 -LVIR_SELECTBOUNDS = 3 -LVM_GETITEMRECT = LVM_FIRST + 14 -LVM_SETITEMPOSITION = LVM_FIRST + 15 -LVM_GETITEMPOSITION = LVM_FIRST + 16 -LVM_GETSTRINGWIDTHA = LVM_FIRST + 17 -LVM_GETSTRINGWIDTHW = LVM_FIRST + 87 -LVM_GETSTRINGWIDTH = LVM_GETSTRINGWIDTHA -LVHT_NOWHERE = 1 -LVHT_ONITEMICON = 2 -LVHT_ONITEMLABEL = 4 -LVHT_ONITEMSTATEICON = 8 -LVHT_ONITEM = LVHT_ONITEMICON | LVHT_ONITEMLABEL | LVHT_ONITEMSTATEICON -LVHT_ABOVE = 8 -LVHT_BELOW = 16 -LVHT_TORIGHT = 32 -LVHT_TOLEFT = 64 -LVM_HITTEST = LVM_FIRST + 18 -LVM_ENSUREVISIBLE = LVM_FIRST + 19 -LVM_SCROLL = LVM_FIRST + 20 -LVM_REDRAWITEMS = LVM_FIRST + 21 -LVA_DEFAULT = 0 -LVA_ALIGNLEFT = 1 -LVA_ALIGNTOP = 2 -LVA_SNAPTOGRID = 5 -LVM_ARRANGE = LVM_FIRST + 22 -LVM_EDITLABELA = LVM_FIRST + 23 -LVM_EDITLABELW = LVM_FIRST + 118 -LVM_EDITLABEL = LVM_EDITLABELA -LVM_GETEDITCONTROL = LVM_FIRST + 24 -LVCF_FMT = 1 -LVCF_WIDTH = 2 -LVCF_TEXT = 4 -LVCF_SUBITEM = 8 -LVCF_IMAGE = 16 -LVCF_ORDER = 32 -LVCFMT_LEFT = 0 -LVCFMT_RIGHT = 1 -LVCFMT_CENTER = 2 -LVCFMT_JUSTIFYMASK = 3 -LVCFMT_IMAGE = 2048 -LVCFMT_BITMAP_ON_RIGHT = 4096 -LVCFMT_COL_HAS_IMAGES = 32768 -LVM_GETCOLUMNA = LVM_FIRST + 25 -LVM_GETCOLUMNW = LVM_FIRST + 95 -LVM_GETCOLUMN = LVM_GETCOLUMNA -LVM_SETCOLUMNA = LVM_FIRST + 26 -LVM_SETCOLUMNW = LVM_FIRST + 96 -LVM_SETCOLUMN = LVM_SETCOLUMNA -LVM_INSERTCOLUMNA = LVM_FIRST + 27 -LVM_INSERTCOLUMNW = LVM_FIRST + 97 -LVM_INSERTCOLUMN = LVM_INSERTCOLUMNA -LVM_DELETECOLUMN = LVM_FIRST + 28 -LVM_GETCOLUMNWIDTH = LVM_FIRST + 29 -LVSCW_AUTOSIZE = -1 -LVSCW_AUTOSIZE_USEHEADER = -2 -LVM_SETCOLUMNWIDTH = LVM_FIRST + 30 -LVM_GETHEADER = LVM_FIRST + 31 -LVM_CREATEDRAGIMAGE = LVM_FIRST + 33 -LVM_GETVIEWRECT = LVM_FIRST + 34 -LVM_GETTEXTCOLOR = LVM_FIRST + 35 -LVM_SETTEXTCOLOR = LVM_FIRST + 36 -LVM_GETTEXTBKCOLOR = LVM_FIRST + 37 -LVM_SETTEXTBKCOLOR = LVM_FIRST + 38 -LVM_GETTOPINDEX = LVM_FIRST + 39 -LVM_GETCOUNTPERPAGE = LVM_FIRST + 40 -LVM_GETORIGIN = LVM_FIRST + 41 -LVM_UPDATE = LVM_FIRST + 42 -LVM_SETITEMSTATE = LVM_FIRST + 43 -LVM_GETITEMSTATE = LVM_FIRST + 44 -LVM_GETITEMTEXTA = LVM_FIRST + 45 -LVM_GETITEMTEXTW = LVM_FIRST + 115 -LVM_GETITEMTEXT = LVM_GETITEMTEXTA -LVM_SETITEMTEXTA = LVM_FIRST + 46 -LVM_SETITEMTEXTW = LVM_FIRST + 116 -LVM_SETITEMTEXT = LVM_SETITEMTEXTA -LVSICF_NOINVALIDATEALL = 1 -LVSICF_NOSCROLL = 2 -LVM_SETITEMCOUNT = LVM_FIRST + 47 -LVM_SORTITEMS = LVM_FIRST + 48 -LVM_SETITEMPOSITION32 = LVM_FIRST + 49 -LVM_GETSELECTEDCOUNT = LVM_FIRST + 50 -LVM_GETITEMSPACING = LVM_FIRST + 51 -LVM_GETISEARCHSTRINGA = LVM_FIRST + 52 -LVM_GETISEARCHSTRINGW = LVM_FIRST + 117 -LVM_GETISEARCHSTRING = LVM_GETISEARCHSTRINGA -LVM_SETICONSPACING = LVM_FIRST + 53 -LVM_SETEXTENDEDLISTVIEWSTYLE = LVM_FIRST + 54 # optional wParam == mask -LVM_GETEXTENDEDLISTVIEWSTYLE = LVM_FIRST + 55 -LVS_EX_GRIDLINES = 1 -LVS_EX_SUBITEMIMAGES = 2 -LVS_EX_CHECKBOXES = 4 -LVS_EX_TRACKSELECT = 8 -LVS_EX_HEADERDRAGDROP = 16 -LVS_EX_FULLROWSELECT = 32 # applies to report mode only -LVS_EX_ONECLICKACTIVATE = 64 -LVS_EX_TWOCLICKACTIVATE = 128 -LVS_EX_FLATSB = 256 -LVS_EX_REGIONAL = 512 -LVS_EX_INFOTIP = 1024 # listview does InfoTips for you -LVS_EX_UNDERLINEHOT = 2048 -LVS_EX_UNDERLINECOLD = 4096 -LVS_EX_MULTIWORKAREAS = 8192 -LVM_GETSUBITEMRECT = LVM_FIRST + 56 -LVM_SUBITEMHITTEST = LVM_FIRST + 57 -LVM_SETCOLUMNORDERARRAY = LVM_FIRST + 58 -LVM_GETCOLUMNORDERARRAY = LVM_FIRST + 59 -LVM_SETHOTITEM = LVM_FIRST + 60 -LVM_GETHOTITEM = LVM_FIRST + 61 -LVM_SETHOTCURSOR = LVM_FIRST + 62 -LVM_GETHOTCURSOR = LVM_FIRST + 63 -LVM_APPROXIMATEVIEWRECT = LVM_FIRST + 64 -LV_MAX_WORKAREAS = 16 -LVM_SETWORKAREAS = LVM_FIRST + 65 -LVM_GETWORKAREAS = LVM_FIRST + 70 -LVM_GETNUMBEROFWORKAREAS = LVM_FIRST + 73 -LVM_GETSELECTIONMARK = LVM_FIRST + 66 -LVM_SETSELECTIONMARK = LVM_FIRST + 67 -LVM_SETHOVERTIME = LVM_FIRST + 71 -LVM_GETHOVERTIME = LVM_FIRST + 72 -LVM_SETTOOLTIPS = LVM_FIRST + 74 -LVM_GETTOOLTIPS = LVM_FIRST + 78 -LVBKIF_SOURCE_NONE = 0 -LVBKIF_SOURCE_HBITMAP = 1 -LVBKIF_SOURCE_URL = 2 -LVBKIF_SOURCE_MASK = 3 -LVBKIF_STYLE_NORMAL = 0 -LVBKIF_STYLE_TILE = 16 -LVBKIF_STYLE_MASK = 16 -LVM_SETBKIMAGEA = LVM_FIRST + 68 -LVM_SETBKIMAGEW = LVM_FIRST + 138 -LVM_GETBKIMAGEA = LVM_FIRST + 69 -LVM_GETBKIMAGEW = LVM_FIRST + 139 -LVKF_ALT = 1 -LVKF_CONTROL = 2 -LVKF_SHIFT = 4 -LVN_ITEMCHANGING = LVN_FIRST - 0 -LVN_ITEMCHANGED = LVN_FIRST - 1 -LVN_INSERTITEM = LVN_FIRST - 2 -LVN_DELETEITEM = LVN_FIRST - 3 -LVN_DELETEALLITEMS = LVN_FIRST - 4 -LVN_BEGINLABELEDITA = LVN_FIRST - 5 -LVN_BEGINLABELEDITW = LVN_FIRST - 75 -LVN_ENDLABELEDITA = LVN_FIRST - 6 -LVN_ENDLABELEDITW = LVN_FIRST - 76 -LVN_COLUMNCLICK = LVN_FIRST - 8 -LVN_BEGINDRAG = LVN_FIRST - 9 -LVN_BEGINRDRAG = LVN_FIRST - 11 -LVN_ODCACHEHINT = LVN_FIRST - 13 -LVN_ODFINDITEMA = LVN_FIRST - 52 -LVN_ODFINDITEMW = LVN_FIRST - 79 -LVN_ITEMACTIVATE = LVN_FIRST - 14 -LVN_ODSTATECHANGED = LVN_FIRST - 15 -LVN_ODFINDITEM = LVN_ODFINDITEMA -LVN_HOTTRACK = LVN_FIRST - 21 -LVN_GETDISPINFOA = LVN_FIRST - 50 -LVN_GETDISPINFOW = LVN_FIRST - 77 -LVN_SETDISPINFOA = LVN_FIRST - 51 -LVN_SETDISPINFOW = LVN_FIRST - 78 -LVN_BEGINLABELEDIT = LVN_BEGINLABELEDITA -LVN_ENDLABELEDIT = LVN_ENDLABELEDITA -LVN_GETDISPINFO = LVN_GETDISPINFOA -LVN_SETDISPINFO = LVN_SETDISPINFOA -LVIF_DI_SETITEM = 4096 -LVN_KEYDOWN = LVN_FIRST - 55 -LVN_MARQUEEBEGIN = LVN_FIRST - 56 -LVGIT_UNFOLDED = 1 -LVN_GETINFOTIPA = LVN_FIRST - 57 -LVN_GETINFOTIPW = LVN_FIRST - 58 -LVN_GETINFOTIP = LVN_GETINFOTIPA -WC_TREEVIEWA = "SysTreeView32" -WC_TREEVIEW = WC_TREEVIEWA -TVS_HASBUTTONS = 1 -TVS_HASLINES = 2 -TVS_LINESATROOT = 4 -TVS_EDITLABELS = 8 -TVS_DISABLEDRAGDROP = 16 -TVS_SHOWSELALWAYS = 32 -TVS_RTLREADING = 64 -TVS_NOTOOLTIPS = 128 -TVS_CHECKBOXES = 256 -TVS_TRACKSELECT = 512 -TVS_SINGLEEXPAND = 1024 -TVS_INFOTIP = 2048 -TVS_FULLROWSELECT = 4096 -TVS_NOSCROLL = 8192 -TVS_NONEVENHEIGHT = 16384 -TVIF_TEXT = 1 -TVIF_IMAGE = 2 -TVIF_PARAM = 4 -TVIF_STATE = 8 -TVIF_HANDLE = 16 -TVIF_SELECTEDIMAGE = 32 -TVIF_CHILDREN = 64 -TVIF_INTEGRAL = 128 -TVIS_SELECTED = 2 -TVIS_CUT = 4 -TVIS_DROPHILITED = 8 -TVIS_BOLD = 16 -TVIS_EXPANDED = 32 -TVIS_EXPANDEDONCE = 64 -TVIS_EXPANDPARTIAL = 128 -TVIS_OVERLAYMASK = 3840 -TVIS_STATEIMAGEMASK = 61440 -TVIS_USERMASK = 61440 -I_CHILDRENCALLBACK = -1 -TVI_ROOT = -65536 -TVI_FIRST = -65535 -TVI_LAST = -65534 -TVI_SORT = -65533 -TVM_INSERTITEMA = TV_FIRST + 0 -TVM_INSERTITEMW = TV_FIRST + 50 -TVM_INSERTITEM = TVM_INSERTITEMA -TVM_DELETEITEM = TV_FIRST + 1 -TVM_EXPAND = TV_FIRST + 2 -TVE_COLLAPSE = 1 -TVE_EXPAND = 2 -TVE_TOGGLE = 3 -TVE_EXPANDPARTIAL = 16384 -TVE_COLLAPSERESET = 32768 -TVM_GETITEMRECT = TV_FIRST + 4 -TVM_GETCOUNT = TV_FIRST + 5 -TVM_GETINDENT = TV_FIRST + 6 -TVM_SETINDENT = TV_FIRST + 7 -TVM_GETIMAGELIST = TV_FIRST + 8 -TVSIL_NORMAL = 0 -TVSIL_STATE = 2 -TVM_SETIMAGELIST = TV_FIRST + 9 -TVM_GETNEXTITEM = TV_FIRST + 10 -TVGN_ROOT = 0 -TVGN_NEXT = 1 -TVGN_PREVIOUS = 2 -TVGN_PARENT = 3 -TVGN_CHILD = 4 -TVGN_FIRSTVISIBLE = 5 -TVGN_NEXTVISIBLE = 6 -TVGN_PREVIOUSVISIBLE = 7 -TVGN_DROPHILITE = 8 -TVGN_CARET = 9 -TVGN_LASTVISIBLE = 10 -TVM_SELECTITEM = TV_FIRST + 11 -TVM_GETITEMA = TV_FIRST + 12 -TVM_GETITEMW = TV_FIRST + 62 -TVM_GETITEM = TVM_GETITEMA -TVM_SETITEMA = TV_FIRST + 13 -TVM_SETITEMW = TV_FIRST + 63 -TVM_SETITEM = TVM_SETITEMA -TVM_EDITLABELA = TV_FIRST + 14 -TVM_EDITLABELW = TV_FIRST + 65 -TVM_EDITLABEL = TVM_EDITLABELA -TVM_GETEDITCONTROL = TV_FIRST + 15 -TVM_GETVISIBLECOUNT = TV_FIRST + 16 -TVM_HITTEST = TV_FIRST + 17 -TVHT_NOWHERE = 1 -TVHT_ONITEMICON = 2 -TVHT_ONITEMLABEL = 4 -TVHT_ONITEMINDENT = 8 -TVHT_ONITEMBUTTON = 16 -TVHT_ONITEMRIGHT = 32 -TVHT_ONITEMSTATEICON = 64 -TVHT_ABOVE = 256 -TVHT_BELOW = 512 -TVHT_TORIGHT = 1024 -TVHT_TOLEFT = 2048 -TVHT_ONITEM = TVHT_ONITEMICON | TVHT_ONITEMLABEL | TVHT_ONITEMSTATEICON -TVM_CREATEDRAGIMAGE = TV_FIRST + 18 -TVM_SORTCHILDREN = TV_FIRST + 19 -TVM_ENSUREVISIBLE = TV_FIRST + 20 -TVM_SORTCHILDRENCB = TV_FIRST + 21 -TVM_ENDEDITLABELNOW = TV_FIRST + 22 -TVM_GETISEARCHSTRINGA = TV_FIRST + 23 -TVM_GETISEARCHSTRINGW = TV_FIRST + 64 -TVM_GETISEARCHSTRING = TVM_GETISEARCHSTRINGA -TVM_SETTOOLTIPS = TV_FIRST + 24 -TVM_GETTOOLTIPS = TV_FIRST + 25 -TVM_SETINSERTMARK = TV_FIRST + 26 -TVM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -TVM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -TVM_SETITEMHEIGHT = TV_FIRST + 27 -TVM_GETITEMHEIGHT = TV_FIRST + 28 -TVM_SETBKCOLOR = TV_FIRST + 29 -TVM_SETTEXTCOLOR = TV_FIRST + 30 -TVM_GETBKCOLOR = TV_FIRST + 31 -TVM_GETTEXTCOLOR = TV_FIRST + 32 -TVM_SETSCROLLTIME = TV_FIRST + 33 -TVM_GETSCROLLTIME = TV_FIRST + 34 -TVM_SETINSERTMARKCOLOR = TV_FIRST + 37 -TVM_GETINSERTMARKCOLOR = TV_FIRST + 38 -TVN_SELCHANGINGA = TVN_FIRST - 1 -TVN_SELCHANGINGW = TVN_FIRST - 50 -TVN_SELCHANGEDA = TVN_FIRST - 2 -TVN_SELCHANGEDW = TVN_FIRST - 51 -TVC_UNKNOWN = 0 -TVC_BYMOUSE = 1 -TVC_BYKEYBOARD = 2 -TVN_GETDISPINFOA = TVN_FIRST - 3 -TVN_GETDISPINFOW = TVN_FIRST - 52 -TVN_SETDISPINFOA = TVN_FIRST - 4 -TVN_SETDISPINFOW = TVN_FIRST - 53 -TVIF_DI_SETITEM = 4096 -TVN_ITEMEXPANDINGA = TVN_FIRST - 5 -TVN_ITEMEXPANDINGW = TVN_FIRST - 54 -TVN_ITEMEXPANDEDA = TVN_FIRST - 6 -TVN_ITEMEXPANDEDW = TVN_FIRST - 55 -TVN_BEGINDRAGA = TVN_FIRST - 7 -TVN_BEGINDRAGW = TVN_FIRST - 56 -TVN_BEGINRDRAGA = TVN_FIRST - 8 -TVN_BEGINRDRAGW = TVN_FIRST - 57 -TVN_DELETEITEMA = TVN_FIRST - 9 -TVN_DELETEITEMW = TVN_FIRST - 58 -TVN_BEGINLABELEDITA = TVN_FIRST - 10 -TVN_BEGINLABELEDITW = TVN_FIRST - 59 -TVN_ENDLABELEDITA = TVN_FIRST - 11 -TVN_ENDLABELEDITW = TVN_FIRST - 60 -TVN_KEYDOWN = TVN_FIRST - 12 -TVN_GETINFOTIPA = TVN_FIRST - 13 -TVN_GETINFOTIPW = TVN_FIRST - 14 -TVN_SINGLEEXPAND = TVN_FIRST - 15 -TVN_SELCHANGING = TVN_SELCHANGINGA -TVN_SELCHANGED = TVN_SELCHANGEDA -TVN_GETDISPINFO = TVN_GETDISPINFOA -TVN_SETDISPINFO = TVN_SETDISPINFOA -TVN_ITEMEXPANDING = TVN_ITEMEXPANDINGA -TVN_ITEMEXPANDED = TVN_ITEMEXPANDEDA -TVN_BEGINDRAG = TVN_BEGINDRAGA -TVN_BEGINRDRAG = TVN_BEGINRDRAGA -TVN_DELETEITEM = TVN_DELETEITEMA -TVN_BEGINLABELEDIT = TVN_BEGINLABELEDITA -TVN_ENDLABELEDIT = TVN_ENDLABELEDITA -TVN_GETINFOTIP = TVN_GETINFOTIPA -TVCDRF_NOIMAGES = 65536 -WC_COMBOBOXEXA = "ComboBoxEx32" -WC_COMBOBOXEX = WC_COMBOBOXEXA -CBEIF_TEXT = 1 -CBEIF_IMAGE = 2 -CBEIF_SELECTEDIMAGE = 4 -CBEIF_OVERLAY = 8 -CBEIF_INDENT = 16 -CBEIF_LPARAM = 32 -CBEIF_DI_SETITEM = 268435456 -CBEM_INSERTITEMA = WM_USER + 1 -CBEM_SETIMAGELIST = WM_USER + 2 -CBEM_GETIMAGELIST = WM_USER + 3 -CBEM_GETITEMA = WM_USER + 4 -CBEM_SETITEMA = WM_USER + 5 -# CBEM_DELETEITEM = CB_DELETESTRING -CBEM_GETCOMBOCONTROL = WM_USER + 6 -CBEM_GETEDITCONTROL = WM_USER + 7 -CBEM_SETEXSTYLE = WM_USER + 8 # use SETEXTENDEDSTYLE instead -CBEM_SETEXTENDEDSTYLE = WM_USER + 14 # lparam == new style, wParam (optional) == mask -CBEM_GETEXSTYLE = WM_USER + 9 # use GETEXTENDEDSTYLE instead -CBEM_GETEXTENDEDSTYLE = WM_USER + 9 -CBEM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -CBEM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -CBEM_HASEDITCHANGED = WM_USER + 10 -CBEM_INSERTITEMW = WM_USER + 11 -CBEM_SETITEMW = WM_USER + 12 -CBEM_GETITEMW = WM_USER + 13 -CBEM_INSERTITEM = CBEM_INSERTITEMA -CBEM_SETITEM = CBEM_SETITEMA -CBEM_GETITEM = CBEM_GETITEMA -CBES_EX_NOEDITIMAGE = 1 -CBES_EX_NOEDITIMAGEINDENT = 2 -CBES_EX_PATHWORDBREAKPROC = 4 -CBES_EX_NOSIZELIMIT = 8 -CBES_EX_CASESENSITIVE = 16 -CBEN_GETDISPINFO = CBEN_FIRST - 0 -CBEN_GETDISPINFOA = CBEN_FIRST - 0 -CBEN_INSERTITEM = CBEN_FIRST - 1 -CBEN_DELETEITEM = CBEN_FIRST - 2 -CBEN_BEGINEDIT = CBEN_FIRST - 4 -CBEN_ENDEDITA = CBEN_FIRST - 5 -CBEN_ENDEDITW = CBEN_FIRST - 6 -CBEN_GETDISPINFOW = CBEN_FIRST - 7 -CBEN_DRAGBEGINA = CBEN_FIRST - 8 -CBEN_DRAGBEGINW = CBEN_FIRST - 9 -CBEN_DRAGBEGIN = CBEN_DRAGBEGINA -CBEN_ENDEDIT = CBEN_ENDEDITA -CBENF_KILLFOCUS = 1 -CBENF_RETURN = 2 -CBENF_ESCAPE = 3 -CBENF_DROPDOWN = 4 -CBEMAXSTRLEN = 260 -WC_TABCONTROLA = "SysTabControl32" -WC_TABCONTROL = WC_TABCONTROLA -TCS_SCROLLOPPOSITE = 1 # assumes multiline tab -TCS_BOTTOM = 2 -TCS_RIGHT = 2 -TCS_MULTISELECT = 4 # allow multi-select in button mode -TCS_FLATBUTTONS = 8 -TCS_FORCEICONLEFT = 16 -TCS_FORCELABELLEFT = 32 -TCS_HOTTRACK = 64 -TCS_VERTICAL = 128 -TCS_TABS = 0 -TCS_BUTTONS = 256 -TCS_SINGLELINE = 0 -TCS_MULTILINE = 512 -TCS_RIGHTJUSTIFY = 0 -TCS_FIXEDWIDTH = 1024 -TCS_RAGGEDRIGHT = 2048 -TCS_FOCUSONBUTTONDOWN = 4096 -TCS_OWNERDRAWFIXED = 8192 -TCS_TOOLTIPS = 16384 -TCS_FOCUSNEVER = 32768 -TCS_EX_FLATSEPARATORS = 1 -TCS_EX_REGISTERDROP = 2 -TCM_GETIMAGELIST = TCM_FIRST + 2 -TCM_SETIMAGELIST = TCM_FIRST + 3 -TCM_GETITEMCOUNT = TCM_FIRST + 4 -TCIF_TEXT = 1 -TCIF_IMAGE = 2 -TCIF_RTLREADING = 4 -TCIF_PARAM = 8 -TCIF_STATE = 16 -TCIS_BUTTONPRESSED = 1 -TCIS_HIGHLIGHTED = 2 -TCM_GETITEMA = TCM_FIRST + 5 -TCM_GETITEMW = TCM_FIRST + 60 -TCM_GETITEM = TCM_GETITEMA -TCM_SETITEMA = TCM_FIRST + 6 -TCM_SETITEMW = TCM_FIRST + 61 -TCM_SETITEM = TCM_SETITEMA -TCM_INSERTITEMA = TCM_FIRST + 7 -TCM_INSERTITEMW = TCM_FIRST + 62 -TCM_INSERTITEM = TCM_INSERTITEMA -TCM_DELETEITEM = TCM_FIRST + 8 -TCM_DELETEALLITEMS = TCM_FIRST + 9 -TCM_GETITEMRECT = TCM_FIRST + 10 -TCM_GETCURSEL = TCM_FIRST + 11 -TCM_SETCURSEL = TCM_FIRST + 12 -TCHT_NOWHERE = 1 -TCHT_ONITEMICON = 2 -TCHT_ONITEMLABEL = 4 -TCHT_ONITEM = TCHT_ONITEMICON | TCHT_ONITEMLABEL -TCM_HITTEST = TCM_FIRST + 13 -TCM_SETITEMEXTRA = TCM_FIRST + 14 -TCM_ADJUSTRECT = TCM_FIRST + 40 -TCM_SETITEMSIZE = TCM_FIRST + 41 -TCM_REMOVEIMAGE = TCM_FIRST + 42 -TCM_SETPADDING = TCM_FIRST + 43 -TCM_GETROWCOUNT = TCM_FIRST + 44 -TCM_GETTOOLTIPS = TCM_FIRST + 45 -TCM_SETTOOLTIPS = TCM_FIRST + 46 -TCM_GETCURFOCUS = TCM_FIRST + 47 -TCM_SETCURFOCUS = TCM_FIRST + 48 -TCM_SETMINTABWIDTH = TCM_FIRST + 49 -TCM_DESELECTALL = TCM_FIRST + 50 -TCM_HIGHLIGHTITEM = TCM_FIRST + 51 -TCM_SETEXTENDEDSTYLE = TCM_FIRST + 52 # optional wParam == mask -TCM_GETEXTENDEDSTYLE = TCM_FIRST + 53 -TCM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -TCM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -TCN_KEYDOWN = TCN_FIRST - 0 -ANIMATE_CLASSA = "SysAnimate32" -ANIMATE_CLASS = ANIMATE_CLASSA -ACS_CENTER = 1 -ACS_TRANSPARENT = 2 -ACS_AUTOPLAY = 4 -ACS_TIMER = 8 # don't use threads... use timers -ACM_OPENA = WM_USER + 100 -ACM_OPENW = WM_USER + 103 -ACM_OPEN = ACM_OPENA -ACM_PLAY = WM_USER + 101 -ACM_STOP = WM_USER + 102 -ACN_START = 1 -ACN_STOP = 2 -MONTHCAL_CLASSA = "SysMonthCal32" -MONTHCAL_CLASS = MONTHCAL_CLASSA -MCM_FIRST = 4096 -MCM_GETCURSEL = MCM_FIRST + 1 -MCM_SETCURSEL = MCM_FIRST + 2 -MCM_GETMAXSELCOUNT = MCM_FIRST + 3 -MCM_SETMAXSELCOUNT = MCM_FIRST + 4 -MCM_GETSELRANGE = MCM_FIRST + 5 -MCM_SETSELRANGE = MCM_FIRST + 6 -MCM_GETMONTHRANGE = MCM_FIRST + 7 -MCM_SETDAYSTATE = MCM_FIRST + 8 -MCM_GETMINREQRECT = MCM_FIRST + 9 -MCM_SETCOLOR = MCM_FIRST + 10 -MCM_GETCOLOR = MCM_FIRST + 11 -MCSC_BACKGROUND = 0 # the background color (between months) -MCSC_TEXT = 1 # the dates -MCSC_TITLEBK = 2 # background of the title -MCSC_TITLETEXT = 3 -MCSC_MONTHBK = 4 # background within the month cal -MCSC_TRAILINGTEXT = 5 # the text color of header & trailing days -MCM_SETTODAY = MCM_FIRST + 12 -MCM_GETTODAY = MCM_FIRST + 13 -MCM_HITTEST = MCM_FIRST + 14 -MCHT_TITLE = 65536 -MCHT_CALENDAR = 131072 -MCHT_TODAYLINK = 196608 -MCHT_NEXT = 16777216 # these indicate that hitting -MCHT_PREV = 33554432 # here will go to the next/prev month -MCHT_NOWHERE = 0 -MCHT_TITLEBK = MCHT_TITLE -MCHT_TITLEMONTH = MCHT_TITLE | 1 -MCHT_TITLEYEAR = MCHT_TITLE | 2 -MCHT_TITLEBTNNEXT = MCHT_TITLE | MCHT_NEXT | 3 -MCHT_TITLEBTNPREV = MCHT_TITLE | MCHT_PREV | 3 -MCHT_CALENDARBK = MCHT_CALENDAR -MCHT_CALENDARDATE = MCHT_CALENDAR | 1 -MCHT_CALENDARDATENEXT = MCHT_CALENDARDATE | MCHT_NEXT -MCHT_CALENDARDATEPREV = MCHT_CALENDARDATE | MCHT_PREV -MCHT_CALENDARDAY = MCHT_CALENDAR | 2 -MCHT_CALENDARWEEKNUM = MCHT_CALENDAR | 3 -MCM_SETFIRSTDAYOFWEEK = MCM_FIRST + 15 -MCM_GETFIRSTDAYOFWEEK = MCM_FIRST + 16 -MCM_GETRANGE = MCM_FIRST + 17 -MCM_SETRANGE = MCM_FIRST + 18 -MCM_GETMONTHDELTA = MCM_FIRST + 19 -MCM_SETMONTHDELTA = MCM_FIRST + 20 -MCM_GETMAXTODAYWIDTH = MCM_FIRST + 21 -MCM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT -MCM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT -MCN_SELCHANGE = MCN_FIRST + 1 -MCN_GETDAYSTATE = MCN_FIRST + 3 -MCN_SELECT = MCN_FIRST + 4 -MCS_DAYSTATE = 1 -MCS_MULTISELECT = 2 -MCS_WEEKNUMBERS = 4 -MCS_NOTODAYCIRCLE = 8 -MCS_NOTODAY = 8 -GMR_VISIBLE = 0 # visible portion of display -GMR_DAYSTATE = 1 # above plus the grayed out parts of -DATETIMEPICK_CLASSA = "SysDateTimePick32" -DATETIMEPICK_CLASS = DATETIMEPICK_CLASSA -DTM_FIRST = 4096 -DTM_GETSYSTEMTIME = DTM_FIRST + 1 -DTM_SETSYSTEMTIME = DTM_FIRST + 2 -DTM_GETRANGE = DTM_FIRST + 3 -DTM_SETRANGE = DTM_FIRST + 4 -DTM_SETFORMATA = DTM_FIRST + 5 -DTM_SETFORMATW = DTM_FIRST + 50 -DTM_SETFORMAT = DTM_SETFORMATA -DTM_SETMCCOLOR = DTM_FIRST + 6 -DTM_GETMCCOLOR = DTM_FIRST + 7 -DTM_GETMONTHCAL = DTM_FIRST + 8 -DTM_SETMCFONT = DTM_FIRST + 9 -DTM_GETMCFONT = DTM_FIRST + 10 -DTS_UPDOWN = 1 # use UPDOWN instead of MONTHCAL -DTS_SHOWNONE = 2 # allow a NONE selection -DTS_SHORTDATEFORMAT = ( - 0 # use the short date format (app must forward WM_WININICHANGE messages) -) -DTS_LONGDATEFORMAT = ( - 4 # use the long date format (app must forward WM_WININICHANGE messages) -) -DTS_TIMEFORMAT = 9 # use the time format (app must forward WM_WININICHANGE messages) -DTS_APPCANPARSE = 16 # allow user entered strings (app MUST respond to DTN_USERSTRING) -DTS_RIGHTALIGN = 32 # right-align popup instead of left-align it -DTN_DATETIMECHANGE = DTN_FIRST + 1 # the systemtime has changed -DTN_USERSTRINGA = DTN_FIRST + 2 # the user has entered a string -DTN_USERSTRINGW = DTN_FIRST + 15 -DTN_USERSTRING = DTN_USERSTRINGW -DTN_WMKEYDOWNA = DTN_FIRST + 3 # modify keydown on app format field (X) -DTN_WMKEYDOWNW = DTN_FIRST + 16 -DTN_WMKEYDOWN = DTN_WMKEYDOWNA -DTN_FORMATA = DTN_FIRST + 4 # query display for app format field (X) -DTN_FORMATW = DTN_FIRST + 17 -DTN_FORMAT = DTN_FORMATA -DTN_FORMATQUERYA = DTN_FIRST + 5 # query formatting info for app format field (X) -DTN_FORMATQUERYW = DTN_FIRST + 18 -DTN_FORMATQUERY = DTN_FORMATQUERYA -DTN_DROPDOWN = DTN_FIRST + 6 # MonthCal has dropped down -DTN_CLOSEUP = DTN_FIRST + 7 # MonthCal is popping up -GDTR_MIN = 1 -GDTR_MAX = 2 -GDT_ERROR = -1 -GDT_VALID = 0 -GDT_NONE = 1 -IPM_CLEARADDRESS = WM_USER + 100 # no parameters -IPM_SETADDRESS = WM_USER + 101 # lparam = TCP/IP address -IPM_GETADDRESS = ( - WM_USER + 102 -) # lresult = # of non black fields. lparam = LPDWORD for TCP/IP address -IPM_SETRANGE = WM_USER + 103 # wparam = field, lparam = range -IPM_SETFOCUS = WM_USER + 104 # wparam = field -IPM_ISBLANK = WM_USER + 105 # no parameters -WC_IPADDRESSA = "SysIPAddress32" -WC_IPADDRESS = WC_IPADDRESSA -IPN_FIELDCHANGED = IPN_FIRST - 0 -WC_PAGESCROLLERA = "SysPager" -WC_PAGESCROLLER = WC_PAGESCROLLERA -PGS_VERT = 0 -PGS_HORZ = 1 -PGS_AUTOSCROLL = 2 -PGS_DRAGNDROP = 4 -PGF_INVISIBLE = 0 # Scroll button is not visible -PGF_NORMAL = 1 # Scroll button is in normal state -PGF_GRAYED = 2 # Scroll button is in grayed state -PGF_DEPRESSED = 4 # Scroll button is in depressed state -PGF_HOT = 8 # Scroll button is in hot state -PGB_TOPORLEFT = 0 -PGB_BOTTOMORRIGHT = 1 -PGM_SETCHILD = PGM_FIRST + 1 # lParam == hwnd -PGM_RECALCSIZE = PGM_FIRST + 2 -PGM_FORWARDMOUSE = PGM_FIRST + 3 -PGM_SETBKCOLOR = PGM_FIRST + 4 -PGM_GETBKCOLOR = PGM_FIRST + 5 -PGM_SETBORDER = PGM_FIRST + 6 -PGM_GETBORDER = PGM_FIRST + 7 -PGM_SETPOS = PGM_FIRST + 8 -PGM_GETPOS = PGM_FIRST + 9 -PGM_SETBUTTONSIZE = PGM_FIRST + 10 -PGM_GETBUTTONSIZE = PGM_FIRST + 11 -PGM_GETBUTTONSTATE = PGM_FIRST + 12 -PGM_GETDROPTARGET = CCM_GETDROPTARGET -PGN_SCROLL = PGN_FIRST - 1 -PGF_SCROLLUP = 1 -PGF_SCROLLDOWN = 2 -PGF_SCROLLLEFT = 4 -PGF_SCROLLRIGHT = 8 -PGK_SHIFT = 1 -PGK_CONTROL = 2 -PGK_MENU = 4 -PGN_CALCSIZE = PGN_FIRST - 2 -PGF_CALCWIDTH = 1 -PGF_CALCHEIGHT = 2 -WC_NATIVEFONTCTLA = "NativeFontCtl" -WC_NATIVEFONTCTL = WC_NATIVEFONTCTLA -NFS_EDIT = 1 -NFS_STATIC = 2 -NFS_LISTCOMBO = 4 -NFS_BUTTON = 8 -NFS_ALL = 16 -WM_MOUSEHOVER = 673 -WM_MOUSELEAVE = 675 -TME_HOVER = 1 -TME_LEAVE = 2 -TME_QUERY = 1073741824 -TME_CANCEL = -2147483648 -HOVER_DEFAULT = -1 -WSB_PROP_CYVSCROLL = 0x00000001 -WSB_PROP_CXHSCROLL = 0x00000002 -WSB_PROP_CYHSCROLL = 0x00000004 -WSB_PROP_CXVSCROLL = 0x00000008 -WSB_PROP_CXHTHUMB = 0x00000010 -WSB_PROP_CYVTHUMB = 0x00000020 -WSB_PROP_VBKGCOLOR = 0x00000040 -WSB_PROP_HBKGCOLOR = 0x00000080 -WSB_PROP_VSTYLE = 0x00000100 -WSB_PROP_HSTYLE = 0x00000200 -WSB_PROP_WINSTYLE = 0x00000400 -WSB_PROP_PALETTE = 0x00000800 -WSB_PROP_MASK = 0x00000FFF -FSB_FLAT_MODE = 2 -FSB_ENCARTA_MODE = 1 -FSB_REGULAR_MODE = 0 - - -def INDEXTOOVERLAYMASK(i): - return i << 8 - - -def INDEXTOSTATEIMAGEMASK(i): - return i << 12 diff --git a/lib/win32/lib/dbi.py b/lib/win32/lib/dbi.py deleted file mode 100644 index c33d6721..00000000 --- a/lib/win32/lib/dbi.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Skeleton replacement for removed dbi module. -Use of objects created by this module should be replaced with native Python objects. -Dates are now returned as datetime.datetime objects, but will still accept PyTime -objects also. -Raw data for binary fields should be passed as buffer objects for Python 2.x, -and memoryview objects in Py3k. -""" - -import warnings - -warnings.warn( - "dbi module is obsolete, code should now use native python datetime and buffer/memoryview objects", - DeprecationWarning, -) - -import datetime - -dbDate = dbiDate = datetime.datetime - -try: - dbRaw = dbiRaw = buffer -except NameError: - dbRaw = dbiRaw = memoryview - -# type names are still exported by odbc module -from odbc import * diff --git a/lib/win32/lib/mmsystem.py b/lib/win32/lib/mmsystem.py deleted file mode 100644 index 4d36b5ef..00000000 --- a/lib/win32/lib/mmsystem.py +++ /dev/null @@ -1,954 +0,0 @@ -# Generated by h2py from d:/msdev/include/mmsystem.h -MAXPNAMELEN = 32 -MAXERRORLENGTH = 256 -MAX_JOYSTICKOEMVXDNAME = 260 -MM_MICROSOFT = 1 -MM_MIDI_MAPPER = 1 -MM_WAVE_MAPPER = 2 -MM_SNDBLST_MIDIOUT = 3 -MM_SNDBLST_MIDIIN = 4 -MM_SNDBLST_SYNTH = 5 -MM_SNDBLST_WAVEOUT = 6 -MM_SNDBLST_WAVEIN = 7 -MM_ADLIB = 9 -MM_MPU401_MIDIOUT = 10 -MM_MPU401_MIDIIN = 11 -MM_PC_JOYSTICK = 12 -TIME_MS = 0x0001 -TIME_SAMPLES = 0x0002 -TIME_BYTES = 0x0004 -TIME_SMPTE = 0x0008 -TIME_MIDI = 0x0010 -TIME_TICKS = 0x0020 -MM_JOY1MOVE = 0x3A0 -MM_JOY2MOVE = 0x3A1 -MM_JOY1ZMOVE = 0x3A2 -MM_JOY2ZMOVE = 0x3A3 -MM_JOY1BUTTONDOWN = 0x3B5 -MM_JOY2BUTTONDOWN = 0x3B6 -MM_JOY1BUTTONUP = 0x3B7 -MM_JOY2BUTTONUP = 0x3B8 -MM_MCINOTIFY = 0x3B9 -MM_WOM_OPEN = 0x3BB -MM_WOM_CLOSE = 0x3BC -MM_WOM_DONE = 0x3BD -MM_WIM_OPEN = 0x3BE -MM_WIM_CLOSE = 0x3BF -MM_WIM_DATA = 0x3C0 -MM_MIM_OPEN = 0x3C1 -MM_MIM_CLOSE = 0x3C2 -MM_MIM_DATA = 0x3C3 -MM_MIM_LONGDATA = 0x3C4 -MM_MIM_ERROR = 0x3C5 -MM_MIM_LONGERROR = 0x3C6 -MM_MOM_OPEN = 0x3C7 -MM_MOM_CLOSE = 0x3C8 -MM_MOM_DONE = 0x3C9 -MM_STREAM_OPEN = 0x3D4 -MM_STREAM_CLOSE = 0x3D5 -MM_STREAM_DONE = 0x3D6 -MM_STREAM_ERROR = 0x3D7 -MM_MOM_POSITIONCB = 0x3CA -MM_MIM_MOREDATA = 0x3CC -MM_MIXM_LINE_CHANGE = 0x3D0 -MM_MIXM_CONTROL_CHANGE = 0x3D1 -MMSYSERR_BASE = 0 -WAVERR_BASE = 32 -MIDIERR_BASE = 64 -TIMERR_BASE = 96 -JOYERR_BASE = 160 -MCIERR_BASE = 256 -MIXERR_BASE = 1024 -MCI_STRING_OFFSET = 512 -MCI_VD_OFFSET = 1024 -MCI_CD_OFFSET = 1088 -MCI_WAVE_OFFSET = 1152 -MCI_SEQ_OFFSET = 1216 -MMSYSERR_NOERROR = 0 -MMSYSERR_ERROR = MMSYSERR_BASE + 1 -MMSYSERR_BADDEVICEID = MMSYSERR_BASE + 2 -MMSYSERR_NOTENABLED = MMSYSERR_BASE + 3 -MMSYSERR_ALLOCATED = MMSYSERR_BASE + 4 -MMSYSERR_INVALHANDLE = MMSYSERR_BASE + 5 -MMSYSERR_NODRIVER = MMSYSERR_BASE + 6 -MMSYSERR_NOMEM = MMSYSERR_BASE + 7 -MMSYSERR_NOTSUPPORTED = MMSYSERR_BASE + 8 -MMSYSERR_BADERRNUM = MMSYSERR_BASE + 9 -MMSYSERR_INVALFLAG = MMSYSERR_BASE + 10 -MMSYSERR_INVALPARAM = MMSYSERR_BASE + 11 -MMSYSERR_HANDLEBUSY = MMSYSERR_BASE + 12 -MMSYSERR_INVALIDALIAS = MMSYSERR_BASE + 13 -MMSYSERR_BADDB = MMSYSERR_BASE + 14 -MMSYSERR_KEYNOTFOUND = MMSYSERR_BASE + 15 -MMSYSERR_READERROR = MMSYSERR_BASE + 16 -MMSYSERR_WRITEERROR = MMSYSERR_BASE + 17 -MMSYSERR_DELETEERROR = MMSYSERR_BASE + 18 -MMSYSERR_VALNOTFOUND = MMSYSERR_BASE + 19 -MMSYSERR_NODRIVERCB = MMSYSERR_BASE + 20 -MMSYSERR_LASTERROR = MMSYSERR_BASE + 20 -DRV_LOAD = 0x0001 -DRV_ENABLE = 0x0002 -DRV_OPEN = 0x0003 -DRV_CLOSE = 0x0004 -DRV_DISABLE = 0x0005 -DRV_FREE = 0x0006 -DRV_CONFIGURE = 0x0007 -DRV_QUERYCONFIGURE = 0x0008 -DRV_INSTALL = 0x0009 -DRV_REMOVE = 0x000A -DRV_EXITSESSION = 0x000B -DRV_POWER = 0x000F -DRV_RESERVED = 0x0800 -DRV_USER = 0x4000 -DRVCNF_CANCEL = 0x0000 -DRVCNF_OK = 0x0001 -DRVCNF_RESTART = 0x0002 -DRV_CANCEL = DRVCNF_CANCEL -DRV_OK = DRVCNF_OK -DRV_RESTART = DRVCNF_RESTART -DRV_MCI_FIRST = DRV_RESERVED -DRV_MCI_LAST = DRV_RESERVED + 0xFFF -CALLBACK_TYPEMASK = 0x00070000 -CALLBACK_NULL = 0x00000000 -CALLBACK_WINDOW = 0x00010000 -CALLBACK_TASK = 0x00020000 -CALLBACK_FUNCTION = 0x00030000 -CALLBACK_THREAD = CALLBACK_TASK -CALLBACK_EVENT = 0x00050000 -SND_SYNC = 0x0000 -SND_ASYNC = 0x0001 -SND_NODEFAULT = 0x0002 -SND_MEMORY = 0x0004 -SND_LOOP = 0x0008 -SND_NOSTOP = 0x0010 -SND_NOWAIT = 0x00002000 -SND_ALIAS = 0x00010000 -SND_ALIAS_ID = 0x00110000 -SND_FILENAME = 0x00020000 -SND_RESOURCE = 0x00040004 -SND_PURGE = 0x0040 -SND_APPLICATION = 0x0080 -SND_ALIAS_START = 0 -WAVERR_BADFORMAT = WAVERR_BASE + 0 -WAVERR_STILLPLAYING = WAVERR_BASE + 1 -WAVERR_UNPREPARED = WAVERR_BASE + 2 -WAVERR_SYNC = WAVERR_BASE + 3 -WAVERR_LASTERROR = WAVERR_BASE + 3 -WOM_OPEN = MM_WOM_OPEN -WOM_CLOSE = MM_WOM_CLOSE -WOM_DONE = MM_WOM_DONE -WIM_OPEN = MM_WIM_OPEN -WIM_CLOSE = MM_WIM_CLOSE -WIM_DATA = MM_WIM_DATA -WAVE_MAPPER = -1 # 0xFFFFFFFF -WAVE_FORMAT_QUERY = 0x0001 -WAVE_ALLOWSYNC = 0x0002 -WAVE_MAPPED = 0x0004 -WAVE_FORMAT_DIRECT = 0x0008 -WAVE_FORMAT_DIRECT_QUERY = WAVE_FORMAT_QUERY | WAVE_FORMAT_DIRECT -WHDR_DONE = 0x00000001 -WHDR_PREPARED = 0x00000002 -WHDR_BEGINLOOP = 0x00000004 -WHDR_ENDLOOP = 0x00000008 -WHDR_INQUEUE = 0x00000010 -WAVECAPS_PITCH = 0x0001 -WAVECAPS_PLAYBACKRATE = 0x0002 -WAVECAPS_VOLUME = 0x0004 -WAVECAPS_LRVOLUME = 0x0008 -WAVECAPS_SYNC = 0x0010 -WAVECAPS_SAMPLEACCURATE = 0x0020 -WAVECAPS_DIRECTSOUND = 0x0040 -WAVE_INVALIDFORMAT = 0x00000000 -WAVE_FORMAT_1M08 = 0x00000001 -WAVE_FORMAT_1S08 = 0x00000002 -WAVE_FORMAT_1M16 = 0x00000004 -WAVE_FORMAT_1S16 = 0x00000008 -WAVE_FORMAT_2M08 = 0x00000010 -WAVE_FORMAT_2S08 = 0x00000020 -WAVE_FORMAT_2M16 = 0x00000040 -WAVE_FORMAT_2S16 = 0x00000080 -WAVE_FORMAT_4M08 = 0x00000100 -WAVE_FORMAT_4S08 = 0x00000200 -WAVE_FORMAT_4M16 = 0x00000400 -WAVE_FORMAT_4S16 = 0x00000800 -WAVE_FORMAT_PCM = 1 -WAVE_FORMAT_IEEE_FLOAT = 3 -MIDIERR_UNPREPARED = MIDIERR_BASE + 0 -MIDIERR_STILLPLAYING = MIDIERR_BASE + 1 -MIDIERR_NOMAP = MIDIERR_BASE + 2 -MIDIERR_NOTREADY = MIDIERR_BASE + 3 -MIDIERR_NODEVICE = MIDIERR_BASE + 4 -MIDIERR_INVALIDSETUP = MIDIERR_BASE + 5 -MIDIERR_BADOPENMODE = MIDIERR_BASE + 6 -MIDIERR_DONT_CONTINUE = MIDIERR_BASE + 7 -MIDIERR_LASTERROR = MIDIERR_BASE + 7 -MIDIPATCHSIZE = 128 -MIM_OPEN = MM_MIM_OPEN -MIM_CLOSE = MM_MIM_CLOSE -MIM_DATA = MM_MIM_DATA -MIM_LONGDATA = MM_MIM_LONGDATA -MIM_ERROR = MM_MIM_ERROR -MIM_LONGERROR = MM_MIM_LONGERROR -MOM_OPEN = MM_MOM_OPEN -MOM_CLOSE = MM_MOM_CLOSE -MOM_DONE = MM_MOM_DONE -MIM_MOREDATA = MM_MIM_MOREDATA -MOM_POSITIONCB = MM_MOM_POSITIONCB -MIDI_IO_STATUS = 0x00000020 -MIDI_CACHE_ALL = 1 -MIDI_CACHE_BESTFIT = 2 -MIDI_CACHE_QUERY = 3 -MIDI_UNCACHE = 4 -MOD_MIDIPORT = 1 -MOD_SYNTH = 2 -MOD_SQSYNTH = 3 -MOD_FMSYNTH = 4 -MOD_MAPPER = 5 -MIDICAPS_VOLUME = 0x0001 -MIDICAPS_LRVOLUME = 0x0002 -MIDICAPS_CACHE = 0x0004 -MIDICAPS_STREAM = 0x0008 -MHDR_DONE = 0x00000001 -MHDR_PREPARED = 0x00000002 -MHDR_INQUEUE = 0x00000004 -MHDR_ISSTRM = 0x00000008 -MEVT_F_SHORT = 0x00000000 -MEVT_F_LONG = -2147483648 # 0x80000000 -MEVT_F_CALLBACK = 0x40000000 - - -def MEVT_EVENTTYPE(x): - return (BYTE)(((x) >> 24) & 0xFF) - - -def MEVT_EVENTPARM(x): - return (DWORD)((x) & 0x00FFFFFF) - - -MIDISTRM_ERROR = -2 -MIDIPROP_SET = -2147483648 # 0x80000000 -MIDIPROP_GET = 0x40000000 -MIDIPROP_TIMEDIV = 0x00000001 -MIDIPROP_TEMPO = 0x00000002 -AUXCAPS_CDAUDIO = 1 -AUXCAPS_AUXIN = 2 -AUXCAPS_VOLUME = 0x0001 -AUXCAPS_LRVOLUME = 0x0002 -MIXER_SHORT_NAME_CHARS = 16 -MIXER_LONG_NAME_CHARS = 64 -MIXERR_INVALLINE = MIXERR_BASE + 0 -MIXERR_INVALCONTROL = MIXERR_BASE + 1 -MIXERR_INVALVALUE = MIXERR_BASE + 2 -MIXERR_LASTERROR = MIXERR_BASE + 2 -MIXER_OBJECTF_HANDLE = -2147483648 # 0x80000000 -MIXER_OBJECTF_MIXER = 0x00000000 -MIXER_OBJECTF_HMIXER = MIXER_OBJECTF_HANDLE | MIXER_OBJECTF_MIXER -MIXER_OBJECTF_WAVEOUT = 0x10000000 -MIXER_OBJECTF_HWAVEOUT = MIXER_OBJECTF_HANDLE | MIXER_OBJECTF_WAVEOUT -MIXER_OBJECTF_WAVEIN = 0x20000000 -MIXER_OBJECTF_HWAVEIN = MIXER_OBJECTF_HANDLE | MIXER_OBJECTF_WAVEIN -MIXER_OBJECTF_MIDIOUT = 0x30000000 -MIXER_OBJECTF_HMIDIOUT = MIXER_OBJECTF_HANDLE | MIXER_OBJECTF_MIDIOUT -MIXER_OBJECTF_MIDIIN = 0x40000000 -MIXER_OBJECTF_HMIDIIN = MIXER_OBJECTF_HANDLE | MIXER_OBJECTF_MIDIIN -MIXER_OBJECTF_AUX = 0x50000000 -MIXERLINE_LINEF_ACTIVE = 0x00000001 -MIXERLINE_LINEF_DISCONNECTED = 0x00008000 -MIXERLINE_LINEF_SOURCE = -2147483648 # 0x80000000 -MIXERLINE_COMPONENTTYPE_DST_FIRST = 0x00000000 -MIXERLINE_COMPONENTTYPE_DST_UNDEFINED = MIXERLINE_COMPONENTTYPE_DST_FIRST + 0 -MIXERLINE_COMPONENTTYPE_DST_DIGITAL = MIXERLINE_COMPONENTTYPE_DST_FIRST + 1 -MIXERLINE_COMPONENTTYPE_DST_LINE = MIXERLINE_COMPONENTTYPE_DST_FIRST + 2 -MIXERLINE_COMPONENTTYPE_DST_MONITOR = MIXERLINE_COMPONENTTYPE_DST_FIRST + 3 -MIXERLINE_COMPONENTTYPE_DST_SPEAKERS = MIXERLINE_COMPONENTTYPE_DST_FIRST + 4 -MIXERLINE_COMPONENTTYPE_DST_HEADPHONES = MIXERLINE_COMPONENTTYPE_DST_FIRST + 5 -MIXERLINE_COMPONENTTYPE_DST_TELEPHONE = MIXERLINE_COMPONENTTYPE_DST_FIRST + 6 -MIXERLINE_COMPONENTTYPE_DST_WAVEIN = MIXERLINE_COMPONENTTYPE_DST_FIRST + 7 -MIXERLINE_COMPONENTTYPE_DST_VOICEIN = MIXERLINE_COMPONENTTYPE_DST_FIRST + 8 -MIXERLINE_COMPONENTTYPE_DST_LAST = MIXERLINE_COMPONENTTYPE_DST_FIRST + 8 -MIXERLINE_COMPONENTTYPE_SRC_FIRST = 0x00001000 -MIXERLINE_COMPONENTTYPE_SRC_UNDEFINED = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 0 -MIXERLINE_COMPONENTTYPE_SRC_DIGITAL = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 1 -MIXERLINE_COMPONENTTYPE_SRC_LINE = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 2 -MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 3 -MIXERLINE_COMPONENTTYPE_SRC_SYNTHESIZER = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 4 -MIXERLINE_COMPONENTTYPE_SRC_COMPACTDISC = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 5 -MIXERLINE_COMPONENTTYPE_SRC_TELEPHONE = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 6 -MIXERLINE_COMPONENTTYPE_SRC_PCSPEAKER = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 7 -MIXERLINE_COMPONENTTYPE_SRC_WAVEOUT = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 8 -MIXERLINE_COMPONENTTYPE_SRC_AUXILIARY = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 9 -MIXERLINE_COMPONENTTYPE_SRC_ANALOG = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 10 -MIXERLINE_COMPONENTTYPE_SRC_LAST = MIXERLINE_COMPONENTTYPE_SRC_FIRST + 10 -MIXERLINE_TARGETTYPE_UNDEFINED = 0 -MIXERLINE_TARGETTYPE_WAVEOUT = 1 -MIXERLINE_TARGETTYPE_WAVEIN = 2 -MIXERLINE_TARGETTYPE_MIDIOUT = 3 -MIXERLINE_TARGETTYPE_MIDIIN = 4 -MIXERLINE_TARGETTYPE_AUX = 5 -MIXER_GETLINEINFOF_DESTINATION = 0x00000000 -MIXER_GETLINEINFOF_SOURCE = 0x00000001 -MIXER_GETLINEINFOF_LINEID = 0x00000002 -MIXER_GETLINEINFOF_COMPONENTTYPE = 0x00000003 -MIXER_GETLINEINFOF_TARGETTYPE = 0x00000004 -MIXER_GETLINEINFOF_QUERYMASK = 0x0000000F -MIXERCONTROL_CONTROLF_UNIFORM = 0x00000001 -MIXERCONTROL_CONTROLF_MULTIPLE = 0x00000002 -MIXERCONTROL_CONTROLF_DISABLED = -2147483648 # 0x80000000 -MIXERCONTROL_CT_CLASS_MASK = -268435456 # 0xF0000000 -MIXERCONTROL_CT_CLASS_CUSTOM = 0x00000000 -MIXERCONTROL_CT_CLASS_METER = 0x10000000 -MIXERCONTROL_CT_CLASS_SWITCH = 0x20000000 -MIXERCONTROL_CT_CLASS_NUMBER = 0x30000000 -MIXERCONTROL_CT_CLASS_SLIDER = 0x40000000 -MIXERCONTROL_CT_CLASS_FADER = 0x50000000 -MIXERCONTROL_CT_CLASS_TIME = 0x60000000 -MIXERCONTROL_CT_CLASS_LIST = 0x70000000 -MIXERCONTROL_CT_SUBCLASS_MASK = 0x0F000000 -MIXERCONTROL_CT_SC_SWITCH_BOOLEAN = 0x00000000 -MIXERCONTROL_CT_SC_SWITCH_BUTTON = 0x01000000 -MIXERCONTROL_CT_SC_METER_POLLED = 0x00000000 -MIXERCONTROL_CT_SC_TIME_MICROSECS = 0x00000000 -MIXERCONTROL_CT_SC_TIME_MILLISECS = 0x01000000 -MIXERCONTROL_CT_SC_LIST_SINGLE = 0x00000000 -MIXERCONTROL_CT_SC_LIST_MULTIPLE = 0x01000000 -MIXERCONTROL_CT_UNITS_MASK = 0x00FF0000 -MIXERCONTROL_CT_UNITS_CUSTOM = 0x00000000 -MIXERCONTROL_CT_UNITS_BOOLEAN = 0x00010000 -MIXERCONTROL_CT_UNITS_SIGNED = 0x00020000 -MIXERCONTROL_CT_UNITS_UNSIGNED = 0x00030000 -MIXERCONTROL_CT_UNITS_DECIBELS = 0x00040000 -MIXERCONTROL_CT_UNITS_PERCENT = 0x00050000 -MIXERCONTROL_CONTROLTYPE_CUSTOM = ( - MIXERCONTROL_CT_CLASS_CUSTOM | MIXERCONTROL_CT_UNITS_CUSTOM -) -MIXERCONTROL_CONTROLTYPE_BOOLEANMETER = ( - MIXERCONTROL_CT_CLASS_METER - | MIXERCONTROL_CT_SC_METER_POLLED - | MIXERCONTROL_CT_UNITS_BOOLEAN -) -MIXERCONTROL_CONTROLTYPE_SIGNEDMETER = ( - MIXERCONTROL_CT_CLASS_METER - | MIXERCONTROL_CT_SC_METER_POLLED - | MIXERCONTROL_CT_UNITS_SIGNED -) -MIXERCONTROL_CONTROLTYPE_PEAKMETER = MIXERCONTROL_CONTROLTYPE_SIGNEDMETER + 1 -MIXERCONTROL_CONTROLTYPE_UNSIGNEDMETER = ( - MIXERCONTROL_CT_CLASS_METER - | MIXERCONTROL_CT_SC_METER_POLLED - | MIXERCONTROL_CT_UNITS_UNSIGNED -) -MIXERCONTROL_CONTROLTYPE_BOOLEAN = ( - MIXERCONTROL_CT_CLASS_SWITCH - | MIXERCONTROL_CT_SC_SWITCH_BOOLEAN - | MIXERCONTROL_CT_UNITS_BOOLEAN -) -MIXERCONTROL_CONTROLTYPE_ONOFF = MIXERCONTROL_CONTROLTYPE_BOOLEAN + 1 -MIXERCONTROL_CONTROLTYPE_MUTE = MIXERCONTROL_CONTROLTYPE_BOOLEAN + 2 -MIXERCONTROL_CONTROLTYPE_MONO = MIXERCONTROL_CONTROLTYPE_BOOLEAN + 3 -MIXERCONTROL_CONTROLTYPE_LOUDNESS = MIXERCONTROL_CONTROLTYPE_BOOLEAN + 4 -MIXERCONTROL_CONTROLTYPE_STEREOENH = MIXERCONTROL_CONTROLTYPE_BOOLEAN + 5 -MIXERCONTROL_CONTROLTYPE_BUTTON = ( - MIXERCONTROL_CT_CLASS_SWITCH - | MIXERCONTROL_CT_SC_SWITCH_BUTTON - | MIXERCONTROL_CT_UNITS_BOOLEAN -) -MIXERCONTROL_CONTROLTYPE_DECIBELS = ( - MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_DECIBELS -) -MIXERCONTROL_CONTROLTYPE_SIGNED = ( - MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_SIGNED -) -MIXERCONTROL_CONTROLTYPE_UNSIGNED = ( - MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_UNSIGNED -) -MIXERCONTROL_CONTROLTYPE_PERCENT = ( - MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_PERCENT -) -MIXERCONTROL_CONTROLTYPE_SLIDER = ( - MIXERCONTROL_CT_CLASS_SLIDER | MIXERCONTROL_CT_UNITS_SIGNED -) -MIXERCONTROL_CONTROLTYPE_PAN = MIXERCONTROL_CONTROLTYPE_SLIDER + 1 -MIXERCONTROL_CONTROLTYPE_QSOUNDPAN = MIXERCONTROL_CONTROLTYPE_SLIDER + 2 -MIXERCONTROL_CONTROLTYPE_FADER = ( - MIXERCONTROL_CT_CLASS_FADER | MIXERCONTROL_CT_UNITS_UNSIGNED -) -MIXERCONTROL_CONTROLTYPE_VOLUME = MIXERCONTROL_CONTROLTYPE_FADER + 1 -MIXERCONTROL_CONTROLTYPE_BASS = MIXERCONTROL_CONTROLTYPE_FADER + 2 -MIXERCONTROL_CONTROLTYPE_TREBLE = MIXERCONTROL_CONTROLTYPE_FADER + 3 -MIXERCONTROL_CONTROLTYPE_EQUALIZER = MIXERCONTROL_CONTROLTYPE_FADER + 4 -MIXERCONTROL_CONTROLTYPE_SINGLESELECT = ( - MIXERCONTROL_CT_CLASS_LIST - | MIXERCONTROL_CT_SC_LIST_SINGLE - | MIXERCONTROL_CT_UNITS_BOOLEAN -) -MIXERCONTROL_CONTROLTYPE_MUX = MIXERCONTROL_CONTROLTYPE_SINGLESELECT + 1 -MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT = ( - MIXERCONTROL_CT_CLASS_LIST - | MIXERCONTROL_CT_SC_LIST_MULTIPLE - | MIXERCONTROL_CT_UNITS_BOOLEAN -) -MIXERCONTROL_CONTROLTYPE_MIXER = MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT + 1 -MIXERCONTROL_CONTROLTYPE_MICROTIME = ( - MIXERCONTROL_CT_CLASS_TIME - | MIXERCONTROL_CT_SC_TIME_MICROSECS - | MIXERCONTROL_CT_UNITS_UNSIGNED -) -MIXERCONTROL_CONTROLTYPE_MILLITIME = ( - MIXERCONTROL_CT_CLASS_TIME - | MIXERCONTROL_CT_SC_TIME_MILLISECS - | MIXERCONTROL_CT_UNITS_UNSIGNED -) -MIXER_GETLINECONTROLSF_ALL = 0x00000000 -MIXER_GETLINECONTROLSF_ONEBYID = 0x00000001 -MIXER_GETLINECONTROLSF_ONEBYTYPE = 0x00000002 -MIXER_GETLINECONTROLSF_QUERYMASK = 0x0000000F -MIXER_GETCONTROLDETAILSF_VALUE = 0x00000000 -MIXER_GETCONTROLDETAILSF_LISTTEXT = 0x00000001 -MIXER_GETCONTROLDETAILSF_QUERYMASK = 0x0000000F -MIXER_SETCONTROLDETAILSF_VALUE = 0x00000000 -MIXER_SETCONTROLDETAILSF_CUSTOM = 0x00000001 -MIXER_SETCONTROLDETAILSF_QUERYMASK = 0x0000000F -TIMERR_NOERROR = 0 -TIMERR_NOCANDO = TIMERR_BASE + 1 -TIMERR_STRUCT = TIMERR_BASE + 33 -TIME_ONESHOT = 0x0000 -TIME_PERIODIC = 0x0001 -TIME_CALLBACK_FUNCTION = 0x0000 -TIME_CALLBACK_EVENT_SET = 0x0010 -TIME_CALLBACK_EVENT_PULSE = 0x0020 -JOYERR_NOERROR = 0 -JOYERR_PARMS = JOYERR_BASE + 5 -JOYERR_NOCANDO = JOYERR_BASE + 6 -JOYERR_UNPLUGGED = JOYERR_BASE + 7 -JOY_BUTTON1 = 0x0001 -JOY_BUTTON2 = 0x0002 -JOY_BUTTON3 = 0x0004 -JOY_BUTTON4 = 0x0008 -JOY_BUTTON1CHG = 0x0100 -JOY_BUTTON2CHG = 0x0200 -JOY_BUTTON3CHG = 0x0400 -JOY_BUTTON4CHG = 0x0800 -JOY_BUTTON5 = 0x00000010 -JOY_BUTTON6 = 0x00000020 -JOY_BUTTON7 = 0x00000040 -JOY_BUTTON8 = 0x00000080 -JOY_BUTTON9 = 0x00000100 -JOY_BUTTON10 = 0x00000200 -JOY_BUTTON11 = 0x00000400 -JOY_BUTTON12 = 0x00000800 -JOY_BUTTON13 = 0x00001000 -JOY_BUTTON14 = 0x00002000 -JOY_BUTTON15 = 0x00004000 -JOY_BUTTON16 = 0x00008000 -JOY_BUTTON17 = 0x00010000 -JOY_BUTTON18 = 0x00020000 -JOY_BUTTON19 = 0x00040000 -JOY_BUTTON20 = 0x00080000 -JOY_BUTTON21 = 0x00100000 -JOY_BUTTON22 = 0x00200000 -JOY_BUTTON23 = 0x00400000 -JOY_BUTTON24 = 0x00800000 -JOY_BUTTON25 = 0x01000000 -JOY_BUTTON26 = 0x02000000 -JOY_BUTTON27 = 0x04000000 -JOY_BUTTON28 = 0x08000000 -JOY_BUTTON29 = 0x10000000 -JOY_BUTTON30 = 0x20000000 -JOY_BUTTON31 = 0x40000000 -JOY_BUTTON32 = -2147483648 # 0x80000000 -JOY_POVFORWARD = 0 -JOY_POVRIGHT = 9000 -JOY_POVBACKWARD = 18000 -JOY_POVLEFT = 27000 -JOY_RETURNX = 0x00000001 -JOY_RETURNY = 0x00000002 -JOY_RETURNZ = 0x00000004 -JOY_RETURNR = 0x00000008 -JOY_RETURNU = 0x00000010 -JOY_RETURNV = 0x00000020 -JOY_RETURNPOV = 0x00000040 -JOY_RETURNBUTTONS = 0x00000080 -JOY_RETURNRAWDATA = 0x00000100 -JOY_RETURNPOVCTS = 0x00000200 -JOY_RETURNCENTERED = 0x00000400 -JOY_USEDEADZONE = 0x00000800 -JOY_RETURNALL = ( - JOY_RETURNX - | JOY_RETURNY - | JOY_RETURNZ - | JOY_RETURNR - | JOY_RETURNU - | JOY_RETURNV - | JOY_RETURNPOV - | JOY_RETURNBUTTONS -) -JOY_CAL_READALWAYS = 0x00010000 -JOY_CAL_READXYONLY = 0x00020000 -JOY_CAL_READ3 = 0x00040000 -JOY_CAL_READ4 = 0x00080000 -JOY_CAL_READXONLY = 0x00100000 -JOY_CAL_READYONLY = 0x00200000 -JOY_CAL_READ5 = 0x00400000 -JOY_CAL_READ6 = 0x00800000 -JOY_CAL_READZONLY = 0x01000000 -JOY_CAL_READRONLY = 0x02000000 -JOY_CAL_READUONLY = 0x04000000 -JOY_CAL_READVONLY = 0x08000000 -JOYSTICKID1 = 0 -JOYSTICKID2 = 1 -JOYCAPS_HASZ = 0x0001 -JOYCAPS_HASR = 0x0002 -JOYCAPS_HASU = 0x0004 -JOYCAPS_HASV = 0x0008 -JOYCAPS_HASPOV = 0x0010 -JOYCAPS_POV4DIR = 0x0020 -JOYCAPS_POVCTS = 0x0040 -MMIOERR_BASE = 256 -MMIOERR_FILENOTFOUND = MMIOERR_BASE + 1 -MMIOERR_OUTOFMEMORY = MMIOERR_BASE + 2 -MMIOERR_CANNOTOPEN = MMIOERR_BASE + 3 -MMIOERR_CANNOTCLOSE = MMIOERR_BASE + 4 -MMIOERR_CANNOTREAD = MMIOERR_BASE + 5 -MMIOERR_CANNOTWRITE = MMIOERR_BASE + 6 -MMIOERR_CANNOTSEEK = MMIOERR_BASE + 7 -MMIOERR_CANNOTEXPAND = MMIOERR_BASE + 8 -MMIOERR_CHUNKNOTFOUND = MMIOERR_BASE + 9 -MMIOERR_UNBUFFERED = MMIOERR_BASE + 10 -MMIOERR_PATHNOTFOUND = MMIOERR_BASE + 11 -MMIOERR_ACCESSDENIED = MMIOERR_BASE + 12 -MMIOERR_SHARINGVIOLATION = MMIOERR_BASE + 13 -MMIOERR_NETWORKERROR = MMIOERR_BASE + 14 -MMIOERR_TOOMANYOPENFILES = MMIOERR_BASE + 15 -MMIOERR_INVALIDFILE = MMIOERR_BASE + 16 -CFSEPCHAR = ord("+") -MMIO_RWMODE = 0x00000003 -MMIO_SHAREMODE = 0x00000070 -MMIO_CREATE = 0x00001000 -MMIO_PARSE = 0x00000100 -MMIO_DELETE = 0x00000200 -MMIO_EXIST = 0x00004000 -MMIO_ALLOCBUF = 0x00010000 -MMIO_GETTEMP = 0x00020000 -MMIO_DIRTY = 0x10000000 -MMIO_READ = 0x00000000 -MMIO_WRITE = 0x00000001 -MMIO_READWRITE = 0x00000002 -MMIO_COMPAT = 0x00000000 -MMIO_EXCLUSIVE = 0x00000010 -MMIO_DENYWRITE = 0x00000020 -MMIO_DENYREAD = 0x00000030 -MMIO_DENYNONE = 0x00000040 -MMIO_FHOPEN = 0x0010 -MMIO_EMPTYBUF = 0x0010 -MMIO_TOUPPER = 0x0010 -MMIO_INSTALLPROC = 0x00010000 -MMIO_GLOBALPROC = 0x10000000 -MMIO_REMOVEPROC = 0x00020000 -MMIO_UNICODEPROC = 0x01000000 -MMIO_FINDPROC = 0x00040000 -MMIO_FINDCHUNK = 0x0010 -MMIO_FINDRIFF = 0x0020 -MMIO_FINDLIST = 0x0040 -MMIO_CREATERIFF = 0x0020 -MMIO_CREATELIST = 0x0040 -MMIOM_READ = MMIO_READ -MMIOM_WRITE = MMIO_WRITE -MMIOM_SEEK = 2 -MMIOM_OPEN = 3 -MMIOM_CLOSE = 4 -MMIOM_WRITEFLUSH = 5 -MMIOM_RENAME = 6 -MMIOM_USER = 0x8000 -SEEK_SET = 0 -SEEK_CUR = 1 -SEEK_END = 2 -MMIO_DEFAULTBUFFER = 8192 -MCIERR_INVALID_DEVICE_ID = MCIERR_BASE + 1 -MCIERR_UNRECOGNIZED_KEYWORD = MCIERR_BASE + 3 -MCIERR_UNRECOGNIZED_COMMAND = MCIERR_BASE + 5 -MCIERR_HARDWARE = MCIERR_BASE + 6 -MCIERR_INVALID_DEVICE_NAME = MCIERR_BASE + 7 -MCIERR_OUT_OF_MEMORY = MCIERR_BASE + 8 -MCIERR_DEVICE_OPEN = MCIERR_BASE + 9 -MCIERR_CANNOT_LOAD_DRIVER = MCIERR_BASE + 10 -MCIERR_MISSING_COMMAND_STRING = MCIERR_BASE + 11 -MCIERR_PARAM_OVERFLOW = MCIERR_BASE + 12 -MCIERR_MISSING_STRING_ARGUMENT = MCIERR_BASE + 13 -MCIERR_BAD_INTEGER = MCIERR_BASE + 14 -MCIERR_PARSER_INTERNAL = MCIERR_BASE + 15 -MCIERR_DRIVER_INTERNAL = MCIERR_BASE + 16 -MCIERR_MISSING_PARAMETER = MCIERR_BASE + 17 -MCIERR_UNSUPPORTED_FUNCTION = MCIERR_BASE + 18 -MCIERR_FILE_NOT_FOUND = MCIERR_BASE + 19 -MCIERR_DEVICE_NOT_READY = MCIERR_BASE + 20 -MCIERR_INTERNAL = MCIERR_BASE + 21 -MCIERR_DRIVER = MCIERR_BASE + 22 -MCIERR_CANNOT_USE_ALL = MCIERR_BASE + 23 -MCIERR_MULTIPLE = MCIERR_BASE + 24 -MCIERR_EXTENSION_NOT_FOUND = MCIERR_BASE + 25 -MCIERR_OUTOFRANGE = MCIERR_BASE + 26 -MCIERR_FLAGS_NOT_COMPATIBLE = MCIERR_BASE + 28 -MCIERR_FILE_NOT_SAVED = MCIERR_BASE + 30 -MCIERR_DEVICE_TYPE_REQUIRED = MCIERR_BASE + 31 -MCIERR_DEVICE_LOCKED = MCIERR_BASE + 32 -MCIERR_DUPLICATE_ALIAS = MCIERR_BASE + 33 -MCIERR_BAD_CONSTANT = MCIERR_BASE + 34 -MCIERR_MUST_USE_SHAREABLE = MCIERR_BASE + 35 -MCIERR_MISSING_DEVICE_NAME = MCIERR_BASE + 36 -MCIERR_BAD_TIME_FORMAT = MCIERR_BASE + 37 -MCIERR_NO_CLOSING_QUOTE = MCIERR_BASE + 38 -MCIERR_DUPLICATE_FLAGS = MCIERR_BASE + 39 -MCIERR_INVALID_FILE = MCIERR_BASE + 40 -MCIERR_NULL_PARAMETER_BLOCK = MCIERR_BASE + 41 -MCIERR_UNNAMED_RESOURCE = MCIERR_BASE + 42 -MCIERR_NEW_REQUIRES_ALIAS = MCIERR_BASE + 43 -MCIERR_NOTIFY_ON_AUTO_OPEN = MCIERR_BASE + 44 -MCIERR_NO_ELEMENT_ALLOWED = MCIERR_BASE + 45 -MCIERR_NONAPPLICABLE_FUNCTION = MCIERR_BASE + 46 -MCIERR_ILLEGAL_FOR_AUTO_OPEN = MCIERR_BASE + 47 -MCIERR_FILENAME_REQUIRED = MCIERR_BASE + 48 -MCIERR_EXTRA_CHARACTERS = MCIERR_BASE + 49 -MCIERR_DEVICE_NOT_INSTALLED = MCIERR_BASE + 50 -MCIERR_GET_CD = MCIERR_BASE + 51 -MCIERR_SET_CD = MCIERR_BASE + 52 -MCIERR_SET_DRIVE = MCIERR_BASE + 53 -MCIERR_DEVICE_LENGTH = MCIERR_BASE + 54 -MCIERR_DEVICE_ORD_LENGTH = MCIERR_BASE + 55 -MCIERR_NO_INTEGER = MCIERR_BASE + 56 -MCIERR_WAVE_OUTPUTSINUSE = MCIERR_BASE + 64 -MCIERR_WAVE_SETOUTPUTINUSE = MCIERR_BASE + 65 -MCIERR_WAVE_INPUTSINUSE = MCIERR_BASE + 66 -MCIERR_WAVE_SETINPUTINUSE = MCIERR_BASE + 67 -MCIERR_WAVE_OUTPUTUNSPECIFIED = MCIERR_BASE + 68 -MCIERR_WAVE_INPUTUNSPECIFIED = MCIERR_BASE + 69 -MCIERR_WAVE_OUTPUTSUNSUITABLE = MCIERR_BASE + 70 -MCIERR_WAVE_SETOUTPUTUNSUITABLE = MCIERR_BASE + 71 -MCIERR_WAVE_INPUTSUNSUITABLE = MCIERR_BASE + 72 -MCIERR_WAVE_SETINPUTUNSUITABLE = MCIERR_BASE + 73 -MCIERR_SEQ_DIV_INCOMPATIBLE = MCIERR_BASE + 80 -MCIERR_SEQ_PORT_INUSE = MCIERR_BASE + 81 -MCIERR_SEQ_PORT_NONEXISTENT = MCIERR_BASE + 82 -MCIERR_SEQ_PORT_MAPNODEVICE = MCIERR_BASE + 83 -MCIERR_SEQ_PORT_MISCERROR = MCIERR_BASE + 84 -MCIERR_SEQ_TIMER = MCIERR_BASE + 85 -MCIERR_SEQ_PORTUNSPECIFIED = MCIERR_BASE + 86 -MCIERR_SEQ_NOMIDIPRESENT = MCIERR_BASE + 87 -MCIERR_NO_WINDOW = MCIERR_BASE + 90 -MCIERR_CREATEWINDOW = MCIERR_BASE + 91 -MCIERR_FILE_READ = MCIERR_BASE + 92 -MCIERR_FILE_WRITE = MCIERR_BASE + 93 -MCIERR_NO_IDENTITY = MCIERR_BASE + 94 -MCIERR_CUSTOM_DRIVER_BASE = MCIERR_BASE + 256 -MCI_FIRST = DRV_MCI_FIRST -MCI_OPEN = 0x0803 -MCI_CLOSE = 0x0804 -MCI_ESCAPE = 0x0805 -MCI_PLAY = 0x0806 -MCI_SEEK = 0x0807 -MCI_STOP = 0x0808 -MCI_PAUSE = 0x0809 -MCI_INFO = 0x080A -MCI_GETDEVCAPS = 0x080B -MCI_SPIN = 0x080C -MCI_SET = 0x080D -MCI_STEP = 0x080E -MCI_RECORD = 0x080F -MCI_SYSINFO = 0x0810 -MCI_BREAK = 0x0811 -MCI_SAVE = 0x0813 -MCI_STATUS = 0x0814 -MCI_CUE = 0x0830 -MCI_REALIZE = 0x0840 -MCI_WINDOW = 0x0841 -MCI_PUT = 0x0842 -MCI_WHERE = 0x0843 -MCI_FREEZE = 0x0844 -MCI_UNFREEZE = 0x0845 -MCI_LOAD = 0x0850 -MCI_CUT = 0x0851 -MCI_COPY = 0x0852 -MCI_PASTE = 0x0853 -MCI_UPDATE = 0x0854 -MCI_RESUME = 0x0855 -MCI_DELETE = 0x0856 -MCI_USER_MESSAGES = DRV_MCI_FIRST + 0x400 -MCI_LAST = 0x0FFF -MCI_DEVTYPE_VCR = 513 -MCI_DEVTYPE_VIDEODISC = 514 -MCI_DEVTYPE_OVERLAY = 515 -MCI_DEVTYPE_CD_AUDIO = 516 -MCI_DEVTYPE_DAT = 517 -MCI_DEVTYPE_SCANNER = 518 -MCI_DEVTYPE_ANIMATION = 519 -MCI_DEVTYPE_DIGITAL_VIDEO = 520 -MCI_DEVTYPE_OTHER = 521 -MCI_DEVTYPE_WAVEFORM_AUDIO = 522 -MCI_DEVTYPE_SEQUENCER = 523 -MCI_DEVTYPE_FIRST = MCI_DEVTYPE_VCR -MCI_DEVTYPE_LAST = MCI_DEVTYPE_SEQUENCER -MCI_DEVTYPE_FIRST_USER = 0x1000 -MCI_MODE_NOT_READY = MCI_STRING_OFFSET + 12 -MCI_MODE_STOP = MCI_STRING_OFFSET + 13 -MCI_MODE_PLAY = MCI_STRING_OFFSET + 14 -MCI_MODE_RECORD = MCI_STRING_OFFSET + 15 -MCI_MODE_SEEK = MCI_STRING_OFFSET + 16 -MCI_MODE_PAUSE = MCI_STRING_OFFSET + 17 -MCI_MODE_OPEN = MCI_STRING_OFFSET + 18 -MCI_FORMAT_MILLISECONDS = 0 -MCI_FORMAT_HMS = 1 -MCI_FORMAT_MSF = 2 -MCI_FORMAT_FRAMES = 3 -MCI_FORMAT_SMPTE_24 = 4 -MCI_FORMAT_SMPTE_25 = 5 -MCI_FORMAT_SMPTE_30 = 6 -MCI_FORMAT_SMPTE_30DROP = 7 -MCI_FORMAT_BYTES = 8 -MCI_FORMAT_SAMPLES = 9 -MCI_FORMAT_TMSF = 10 - - -def MCI_MSF_MINUTE(msf): - return (BYTE)(msf) - - -def MCI_MSF_SECOND(msf): - return (BYTE)(((WORD)(msf)) >> 8) - - -def MCI_MSF_FRAME(msf): - return (BYTE)((msf) >> 16) - - -def MCI_TMSF_TRACK(tmsf): - return (BYTE)(tmsf) - - -def MCI_TMSF_MINUTE(tmsf): - return (BYTE)(((WORD)(tmsf)) >> 8) - - -def MCI_TMSF_SECOND(tmsf): - return (BYTE)((tmsf) >> 16) - - -def MCI_TMSF_FRAME(tmsf): - return (BYTE)((tmsf) >> 24) - - -def MCI_HMS_HOUR(hms): - return (BYTE)(hms) - - -def MCI_HMS_MINUTE(hms): - return (BYTE)(((WORD)(hms)) >> 8) - - -def MCI_HMS_SECOND(hms): - return (BYTE)((hms) >> 16) - - -MCI_NOTIFY_SUCCESSFUL = 0x0001 -MCI_NOTIFY_SUPERSEDED = 0x0002 -MCI_NOTIFY_ABORTED = 0x0004 -MCI_NOTIFY_FAILURE = 0x0008 -MCI_NOTIFY = 0x00000001 -MCI_WAIT = 0x00000002 -MCI_FROM = 0x00000004 -MCI_TO = 0x00000008 -MCI_TRACK = 0x00000010 -MCI_OPEN_SHAREABLE = 0x00000100 -MCI_OPEN_ELEMENT = 0x00000200 -MCI_OPEN_ALIAS = 0x00000400 -MCI_OPEN_ELEMENT_ID = 0x00000800 -MCI_OPEN_TYPE_ID = 0x00001000 -MCI_OPEN_TYPE = 0x00002000 -MCI_SEEK_TO_START = 0x00000100 -MCI_SEEK_TO_END = 0x00000200 -MCI_STATUS_ITEM = 0x00000100 -MCI_STATUS_START = 0x00000200 -MCI_STATUS_LENGTH = 0x00000001 -MCI_STATUS_POSITION = 0x00000002 -MCI_STATUS_NUMBER_OF_TRACKS = 0x00000003 -MCI_STATUS_MODE = 0x00000004 -MCI_STATUS_MEDIA_PRESENT = 0x00000005 -MCI_STATUS_TIME_FORMAT = 0x00000006 -MCI_STATUS_READY = 0x00000007 -MCI_STATUS_CURRENT_TRACK = 0x00000008 -MCI_INFO_PRODUCT = 0x00000100 -MCI_INFO_FILE = 0x00000200 -MCI_INFO_MEDIA_UPC = 0x00000400 -MCI_INFO_MEDIA_IDENTITY = 0x00000800 -MCI_INFO_NAME = 0x00001000 -MCI_INFO_COPYRIGHT = 0x00002000 -MCI_GETDEVCAPS_ITEM = 0x00000100 -MCI_GETDEVCAPS_CAN_RECORD = 0x00000001 -MCI_GETDEVCAPS_HAS_AUDIO = 0x00000002 -MCI_GETDEVCAPS_HAS_VIDEO = 0x00000003 -MCI_GETDEVCAPS_DEVICE_TYPE = 0x00000004 -MCI_GETDEVCAPS_USES_FILES = 0x00000005 -MCI_GETDEVCAPS_COMPOUND_DEVICE = 0x00000006 -MCI_GETDEVCAPS_CAN_EJECT = 0x00000007 -MCI_GETDEVCAPS_CAN_PLAY = 0x00000008 -MCI_GETDEVCAPS_CAN_SAVE = 0x00000009 -MCI_SYSINFO_QUANTITY = 0x00000100 -MCI_SYSINFO_OPEN = 0x00000200 -MCI_SYSINFO_NAME = 0x00000400 -MCI_SYSINFO_INSTALLNAME = 0x00000800 -MCI_SET_DOOR_OPEN = 0x00000100 -MCI_SET_DOOR_CLOSED = 0x00000200 -MCI_SET_TIME_FORMAT = 0x00000400 -MCI_SET_AUDIO = 0x00000800 -MCI_SET_VIDEO = 0x00001000 -MCI_SET_ON = 0x00002000 -MCI_SET_OFF = 0x00004000 -MCI_SET_AUDIO_ALL = 0x00000000 -MCI_SET_AUDIO_LEFT = 0x00000001 -MCI_SET_AUDIO_RIGHT = 0x00000002 -MCI_BREAK_KEY = 0x00000100 -MCI_BREAK_HWND = 0x00000200 -MCI_BREAK_OFF = 0x00000400 -MCI_RECORD_INSERT = 0x00000100 -MCI_RECORD_OVERWRITE = 0x00000200 -MCI_SAVE_FILE = 0x00000100 -MCI_LOAD_FILE = 0x00000100 -MCI_VD_MODE_PARK = MCI_VD_OFFSET + 1 -MCI_VD_MEDIA_CLV = MCI_VD_OFFSET + 2 -MCI_VD_MEDIA_CAV = MCI_VD_OFFSET + 3 -MCI_VD_MEDIA_OTHER = MCI_VD_OFFSET + 4 -MCI_VD_FORMAT_TRACK = 0x4001 -MCI_VD_PLAY_REVERSE = 0x00010000 -MCI_VD_PLAY_FAST = 0x00020000 -MCI_VD_PLAY_SPEED = 0x00040000 -MCI_VD_PLAY_SCAN = 0x00080000 -MCI_VD_PLAY_SLOW = 0x00100000 -MCI_VD_SEEK_REVERSE = 0x00010000 -MCI_VD_STATUS_SPEED = 0x00004002 -MCI_VD_STATUS_FORWARD = 0x00004003 -MCI_VD_STATUS_MEDIA_TYPE = 0x00004004 -MCI_VD_STATUS_SIDE = 0x00004005 -MCI_VD_STATUS_DISC_SIZE = 0x00004006 -MCI_VD_GETDEVCAPS_CLV = 0x00010000 -MCI_VD_GETDEVCAPS_CAV = 0x00020000 -MCI_VD_SPIN_UP = 0x00010000 -MCI_VD_SPIN_DOWN = 0x00020000 -MCI_VD_GETDEVCAPS_CAN_REVERSE = 0x00004002 -MCI_VD_GETDEVCAPS_FAST_RATE = 0x00004003 -MCI_VD_GETDEVCAPS_SLOW_RATE = 0x00004004 -MCI_VD_GETDEVCAPS_NORMAL_RATE = 0x00004005 -MCI_VD_STEP_FRAMES = 0x00010000 -MCI_VD_STEP_REVERSE = 0x00020000 -MCI_VD_ESCAPE_STRING = 0x00000100 -MCI_CDA_STATUS_TYPE_TRACK = 0x00004001 -MCI_CDA_TRACK_AUDIO = MCI_CD_OFFSET + 0 -MCI_CDA_TRACK_OTHER = MCI_CD_OFFSET + 1 -MCI_WAVE_PCM = MCI_WAVE_OFFSET + 0 -MCI_WAVE_MAPPER = MCI_WAVE_OFFSET + 1 -MCI_WAVE_OPEN_BUFFER = 0x00010000 -MCI_WAVE_SET_FORMATTAG = 0x00010000 -MCI_WAVE_SET_CHANNELS = 0x00020000 -MCI_WAVE_SET_SAMPLESPERSEC = 0x00040000 -MCI_WAVE_SET_AVGBYTESPERSEC = 0x00080000 -MCI_WAVE_SET_BLOCKALIGN = 0x00100000 -MCI_WAVE_SET_BITSPERSAMPLE = 0x00200000 -MCI_WAVE_INPUT = 0x00400000 -MCI_WAVE_OUTPUT = 0x00800000 -MCI_WAVE_STATUS_FORMATTAG = 0x00004001 -MCI_WAVE_STATUS_CHANNELS = 0x00004002 -MCI_WAVE_STATUS_SAMPLESPERSEC = 0x00004003 -MCI_WAVE_STATUS_AVGBYTESPERSEC = 0x00004004 -MCI_WAVE_STATUS_BLOCKALIGN = 0x00004005 -MCI_WAVE_STATUS_BITSPERSAMPLE = 0x00004006 -MCI_WAVE_STATUS_LEVEL = 0x00004007 -MCI_WAVE_SET_ANYINPUT = 0x04000000 -MCI_WAVE_SET_ANYOUTPUT = 0x08000000 -MCI_WAVE_GETDEVCAPS_INPUTS = 0x00004001 -MCI_WAVE_GETDEVCAPS_OUTPUTS = 0x00004002 -MCI_SEQ_DIV_PPQN = 0 + MCI_SEQ_OFFSET -MCI_SEQ_DIV_SMPTE_24 = 1 + MCI_SEQ_OFFSET -MCI_SEQ_DIV_SMPTE_25 = 2 + MCI_SEQ_OFFSET -MCI_SEQ_DIV_SMPTE_30DROP = 3 + MCI_SEQ_OFFSET -MCI_SEQ_DIV_SMPTE_30 = 4 + MCI_SEQ_OFFSET -MCI_SEQ_FORMAT_SONGPTR = 0x4001 -MCI_SEQ_FILE = 0x4002 -MCI_SEQ_MIDI = 0x4003 -MCI_SEQ_SMPTE = 0x4004 -MCI_SEQ_NONE = 65533 -MCI_SEQ_MAPPER = 65535 -MCI_SEQ_STATUS_TEMPO = 0x00004002 -MCI_SEQ_STATUS_PORT = 0x00004003 -MCI_SEQ_STATUS_SLAVE = 0x00004007 -MCI_SEQ_STATUS_MASTER = 0x00004008 -MCI_SEQ_STATUS_OFFSET = 0x00004009 -MCI_SEQ_STATUS_DIVTYPE = 0x0000400A -MCI_SEQ_STATUS_NAME = 0x0000400B -MCI_SEQ_STATUS_COPYRIGHT = 0x0000400C -MCI_SEQ_SET_TEMPO = 0x00010000 -MCI_SEQ_SET_PORT = 0x00020000 -MCI_SEQ_SET_SLAVE = 0x00040000 -MCI_SEQ_SET_MASTER = 0x00080000 -MCI_SEQ_SET_OFFSET = 0x01000000 -MCI_ANIM_OPEN_WS = 0x00010000 -MCI_ANIM_OPEN_PARENT = 0x00020000 -MCI_ANIM_OPEN_NOSTATIC = 0x00040000 -MCI_ANIM_PLAY_SPEED = 0x00010000 -MCI_ANIM_PLAY_REVERSE = 0x00020000 -MCI_ANIM_PLAY_FAST = 0x00040000 -MCI_ANIM_PLAY_SLOW = 0x00080000 -MCI_ANIM_PLAY_SCAN = 0x00100000 -MCI_ANIM_STEP_REVERSE = 0x00010000 -MCI_ANIM_STEP_FRAMES = 0x00020000 -MCI_ANIM_STATUS_SPEED = 0x00004001 -MCI_ANIM_STATUS_FORWARD = 0x00004002 -MCI_ANIM_STATUS_HWND = 0x00004003 -MCI_ANIM_STATUS_HPAL = 0x00004004 -MCI_ANIM_STATUS_STRETCH = 0x00004005 -MCI_ANIM_INFO_TEXT = 0x00010000 -MCI_ANIM_GETDEVCAPS_CAN_REVERSE = 0x00004001 -MCI_ANIM_GETDEVCAPS_FAST_RATE = 0x00004002 -MCI_ANIM_GETDEVCAPS_SLOW_RATE = 0x00004003 -MCI_ANIM_GETDEVCAPS_NORMAL_RATE = 0x00004004 -MCI_ANIM_GETDEVCAPS_PALETTES = 0x00004006 -MCI_ANIM_GETDEVCAPS_CAN_STRETCH = 0x00004007 -MCI_ANIM_GETDEVCAPS_MAX_WINDOWS = 0x00004008 -MCI_ANIM_REALIZE_NORM = 0x00010000 -MCI_ANIM_REALIZE_BKGD = 0x00020000 -MCI_ANIM_WINDOW_HWND = 0x00010000 -MCI_ANIM_WINDOW_STATE = 0x00040000 -MCI_ANIM_WINDOW_TEXT = 0x00080000 -MCI_ANIM_WINDOW_ENABLE_STRETCH = 0x00100000 -MCI_ANIM_WINDOW_DISABLE_STRETCH = 0x00200000 -MCI_ANIM_WINDOW_DEFAULT = 0x00000000 -MCI_ANIM_RECT = 0x00010000 -MCI_ANIM_PUT_SOURCE = 0x00020000 -MCI_ANIM_PUT_DESTINATION = 0x00040000 -MCI_ANIM_WHERE_SOURCE = 0x00020000 -MCI_ANIM_WHERE_DESTINATION = 0x00040000 -MCI_ANIM_UPDATE_HDC = 0x00020000 -MCI_OVLY_OPEN_WS = 0x00010000 -MCI_OVLY_OPEN_PARENT = 0x00020000 -MCI_OVLY_STATUS_HWND = 0x00004001 -MCI_OVLY_STATUS_STRETCH = 0x00004002 -MCI_OVLY_INFO_TEXT = 0x00010000 -MCI_OVLY_GETDEVCAPS_CAN_STRETCH = 0x00004001 -MCI_OVLY_GETDEVCAPS_CAN_FREEZE = 0x00004002 -MCI_OVLY_GETDEVCAPS_MAX_WINDOWS = 0x00004003 -MCI_OVLY_WINDOW_HWND = 0x00010000 -MCI_OVLY_WINDOW_STATE = 0x00040000 -MCI_OVLY_WINDOW_TEXT = 0x00080000 -MCI_OVLY_WINDOW_ENABLE_STRETCH = 0x00100000 -MCI_OVLY_WINDOW_DISABLE_STRETCH = 0x00200000 -MCI_OVLY_WINDOW_DEFAULT = 0x00000000 -MCI_OVLY_RECT = 0x00010000 -MCI_OVLY_PUT_SOURCE = 0x00020000 -MCI_OVLY_PUT_DESTINATION = 0x00040000 -MCI_OVLY_PUT_FRAME = 0x00080000 -MCI_OVLY_PUT_VIDEO = 0x00100000 -MCI_OVLY_WHERE_SOURCE = 0x00020000 -MCI_OVLY_WHERE_DESTINATION = 0x00040000 -MCI_OVLY_WHERE_FRAME = 0x00080000 -MCI_OVLY_WHERE_VIDEO = 0x00100000 -SELECTDIB = 41 - - -def DIBINDEX(n): - return MAKELONG((n), 0x10FF) diff --git a/lib/win32/lib/netbios.py b/lib/win32/lib/netbios.py deleted file mode 100644 index dca4dee6..00000000 --- a/lib/win32/lib/netbios.py +++ /dev/null @@ -1,304 +0,0 @@ -import struct -import sys - -import win32wnet - -# Constants generated by h2py from nb30.h -NCBNAMSZ = 16 -MAX_LANA = 254 -NAME_FLAGS_MASK = 0x87 -GROUP_NAME = 0x80 -UNIQUE_NAME = 0x00 -REGISTERING = 0x00 -REGISTERED = 0x04 -DEREGISTERED = 0x05 -DUPLICATE = 0x06 -DUPLICATE_DEREG = 0x07 -LISTEN_OUTSTANDING = 0x01 -CALL_PENDING = 0x02 -SESSION_ESTABLISHED = 0x03 -HANGUP_PENDING = 0x04 -HANGUP_COMPLETE = 0x05 -SESSION_ABORTED = 0x06 -ALL_TRANSPORTS = "M\0\0\0" -MS_NBF = "MNBF" -NCBCALL = 0x10 -NCBLISTEN = 0x11 -NCBHANGUP = 0x12 -NCBSEND = 0x14 -NCBRECV = 0x15 -NCBRECVANY = 0x16 -NCBCHAINSEND = 0x17 -NCBDGSEND = 0x20 -NCBDGRECV = 0x21 -NCBDGSENDBC = 0x22 -NCBDGRECVBC = 0x23 -NCBADDNAME = 0x30 -NCBDELNAME = 0x31 -NCBRESET = 0x32 -NCBASTAT = 0x33 -NCBSSTAT = 0x34 -NCBCANCEL = 0x35 -NCBADDGRNAME = 0x36 -NCBENUM = 0x37 -NCBUNLINK = 0x70 -NCBSENDNA = 0x71 -NCBCHAINSENDNA = 0x72 -NCBLANSTALERT = 0x73 -NCBACTION = 0x77 -NCBFINDNAME = 0x78 -NCBTRACE = 0x79 -ASYNCH = 0x80 -NRC_GOODRET = 0x00 -NRC_BUFLEN = 0x01 -NRC_ILLCMD = 0x03 -NRC_CMDTMO = 0x05 -NRC_INCOMP = 0x06 -NRC_BADDR = 0x07 -NRC_SNUMOUT = 0x08 -NRC_NORES = 0x09 -NRC_SCLOSED = 0x0A -NRC_CMDCAN = 0x0B -NRC_DUPNAME = 0x0D -NRC_NAMTFUL = 0x0E -NRC_ACTSES = 0x0F -NRC_LOCTFUL = 0x11 -NRC_REMTFUL = 0x12 -NRC_ILLNN = 0x13 -NRC_NOCALL = 0x14 -NRC_NOWILD = 0x15 -NRC_INUSE = 0x16 -NRC_NAMERR = 0x17 -NRC_SABORT = 0x18 -NRC_NAMCONF = 0x19 -NRC_IFBUSY = 0x21 -NRC_TOOMANY = 0x22 -NRC_BRIDGE = 0x23 -NRC_CANOCCR = 0x24 -NRC_CANCEL = 0x26 -NRC_DUPENV = 0x30 -NRC_ENVNOTDEF = 0x34 -NRC_OSRESNOTAV = 0x35 -NRC_MAXAPPS = 0x36 -NRC_NOSAPS = 0x37 -NRC_NORESOURCES = 0x38 -NRC_INVADDRESS = 0x39 -NRC_INVDDID = 0x3B -NRC_LOCKFAIL = 0x3C -NRC_OPENERR = 0x3F -NRC_SYSTEM = 0x40 -NRC_PENDING = 0xFF - - -UCHAR = "B" -WORD = "H" -DWORD = "I" -USHORT = "H" -ULONG = "I" - -ADAPTER_STATUS_ITEMS = [ - ("6s", "adapter_address"), - (UCHAR, "rev_major"), - (UCHAR, "reserved0"), - (UCHAR, "adapter_type"), - (UCHAR, "rev_minor"), - (WORD, "duration"), - (WORD, "frmr_recv"), - (WORD, "frmr_xmit"), - (WORD, "iframe_recv_err"), - (WORD, "xmit_aborts"), - (DWORD, "xmit_success"), - (DWORD, "recv_success"), - (WORD, "iframe_xmit_err"), - (WORD, "recv_buff_unavail"), - (WORD, "t1_timeouts"), - (WORD, "ti_timeouts"), - (DWORD, "reserved1"), - (WORD, "free_ncbs"), - (WORD, "max_cfg_ncbs"), - (WORD, "max_ncbs"), - (WORD, "xmit_buf_unavail"), - (WORD, "max_dgram_size"), - (WORD, "pending_sess"), - (WORD, "max_cfg_sess"), - (WORD, "max_sess"), - (WORD, "max_sess_pkt_size"), - (WORD, "name_count"), -] - -NAME_BUFFER_ITEMS = [ - (str(NCBNAMSZ) + "s", "name"), - (UCHAR, "name_num"), - (UCHAR, "name_flags"), -] - -SESSION_HEADER_ITEMS = [ - (UCHAR, "sess_name"), - (UCHAR, "num_sess"), - (UCHAR, "rcv_dg_outstanding"), - (UCHAR, "rcv_any_outstanding"), -] - -SESSION_BUFFER_ITEMS = [ - (UCHAR, "lsn"), - (UCHAR, "state"), - (str(NCBNAMSZ) + "s", "local_name"), - (str(NCBNAMSZ) + "s", "remote_name"), - (UCHAR, "rcvs_outstanding"), - (UCHAR, "sends_outstanding"), -] - -LANA_ENUM_ITEMS = [ - ("B", "length"), # Number of valid entries in lana[] - (str(MAX_LANA + 1) + "s", "lana"), -] - -FIND_NAME_HEADER_ITEMS = [ - (WORD, "node_count"), - (UCHAR, "reserved"), - (UCHAR, "unique_group"), -] - -FIND_NAME_BUFFER_ITEMS = [ - (UCHAR, "length"), - (UCHAR, "access_control"), - (UCHAR, "frame_control"), - ("6s", "destination_addr"), - ("6s", "source_addr"), - ("18s", "routing_info"), -] - -ACTION_HEADER_ITEMS = [ - (ULONG, "transport_id"), - (USHORT, "action_code"), - (USHORT, "reserved"), -] - -del UCHAR, WORD, DWORD, USHORT, ULONG - -NCB = win32wnet.NCB - - -def Netbios(ncb): - ob = ncb.Buffer - is_ours = hasattr(ob, "_pack") - if is_ours: - ob._pack() - try: - return win32wnet.Netbios(ncb) - finally: - if is_ours: - ob._unpack() - - -class NCBStruct: - def __init__(self, items): - self._format = "".join([item[0] for item in items]) - self._items = items - self._buffer_ = win32wnet.NCBBuffer(struct.calcsize(self._format)) - - for format, name in self._items: - if len(format) == 1: - if format == "c": - val = "\0" - else: - val = 0 - else: - l = int(format[:-1]) - val = "\0" * l - self.__dict__[name] = val - - def _pack(self): - vals = [] - for format, name in self._items: - try: - vals.append(self.__dict__[name]) - except KeyError: - vals.append(None) - - self._buffer_[:] = struct.pack(*(self._format,) + tuple(vals)) - - def _unpack(self): - items = struct.unpack(self._format, self._buffer_) - assert len(items) == len(self._items), "unexpected number of items to unpack!" - for (format, name), val in zip(self._items, items): - self.__dict__[name] = val - - def __setattr__(self, attr, val): - if attr not in self.__dict__ and attr[0] != "_": - for format, attr_name in self._items: - if attr == attr_name: - break - else: - raise AttributeError(attr) - self.__dict__[attr] = val - - -def ADAPTER_STATUS(): - return NCBStruct(ADAPTER_STATUS_ITEMS) - - -def NAME_BUFFER(): - return NCBStruct(NAME_BUFFER_ITEMS) - - -def SESSION_HEADER(): - return NCBStruct(SESSION_HEADER_ITEMS) - - -def SESSION_BUFFER(): - return NCBStruct(SESSION_BUFFER_ITEMS) - - -def LANA_ENUM(): - return NCBStruct(LANA_ENUM_ITEMS) - - -def FIND_NAME_HEADER(): - return NCBStruct(FIND_NAME_HEADER_ITEMS) - - -def FIND_NAME_BUFFER(): - return NCBStruct(FIND_NAME_BUFFER_ITEMS) - - -def ACTION_HEADER(): - return NCBStruct(ACTION_HEADER_ITEMS) - - -def byte_to_int(b): - """Given an element in a binary buffer, return its integer value""" - if sys.version_info >= (3, 0): - # a byte is already an int in py3k - return b - return ord(b) # its a char from a string in py2k. - - -if __name__ == "__main__": - # code ported from "HOWTO: Get the MAC Address for an Ethernet Adapter" - # MS KB ID: Q118623 - ncb = NCB() - ncb.Command = NCBENUM - la_enum = LANA_ENUM() - ncb.Buffer = la_enum - rc = Netbios(ncb) - if rc != 0: - raise RuntimeError("Unexpected result %d" % (rc,)) - for i in range(la_enum.length): - ncb.Reset() - ncb.Command = NCBRESET - ncb.Lana_num = byte_to_int(la_enum.lana[i]) - rc = Netbios(ncb) - if rc != 0: - raise RuntimeError("Unexpected result %d" % (rc,)) - ncb.Reset() - ncb.Command = NCBASTAT - ncb.Lana_num = byte_to_int(la_enum.lana[i]) - ncb.Callname = "* ".encode("ascii") # ensure bytes on py2x and 3k - adapter = ADAPTER_STATUS() - ncb.Buffer = adapter - Netbios(ncb) - print("Adapter address:", end=" ") - for ch in adapter.adapter_address: - print("%02x" % (byte_to_int(ch),), end=" ") - print() diff --git a/lib/win32/lib/ntsecuritycon.py b/lib/win32/lib/ntsecuritycon.py deleted file mode 100644 index 4d5b2ed5..00000000 --- a/lib/win32/lib/ntsecuritycon.py +++ /dev/null @@ -1,731 +0,0 @@ -# Hacked from winnt.h - -DELETE = 65536 -READ_CONTROL = 131072 -WRITE_DAC = 262144 -WRITE_OWNER = 524288 -SYNCHRONIZE = 1048576 -STANDARD_RIGHTS_REQUIRED = 983040 -STANDARD_RIGHTS_READ = READ_CONTROL -STANDARD_RIGHTS_WRITE = READ_CONTROL -STANDARD_RIGHTS_EXECUTE = READ_CONTROL -STANDARD_RIGHTS_ALL = 2031616 -SPECIFIC_RIGHTS_ALL = 65535 -ACCESS_SYSTEM_SECURITY = 16777216 -MAXIMUM_ALLOWED = 33554432 -GENERIC_READ = -2147483648 -GENERIC_WRITE = 1073741824 -GENERIC_EXECUTE = 536870912 -GENERIC_ALL = 268435456 - -# file security permissions -FILE_READ_DATA = 1 -FILE_LIST_DIRECTORY = 1 -FILE_WRITE_DATA = 2 -FILE_ADD_FILE = 2 -FILE_APPEND_DATA = 4 -FILE_ADD_SUBDIRECTORY = 4 -FILE_CREATE_PIPE_INSTANCE = 4 -FILE_READ_EA = 8 -FILE_WRITE_EA = 16 -FILE_EXECUTE = 32 -FILE_TRAVERSE = 32 -FILE_DELETE_CHILD = 64 -FILE_READ_ATTRIBUTES = 128 -FILE_WRITE_ATTRIBUTES = 256 -FILE_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 511 -FILE_GENERIC_READ = ( - STANDARD_RIGHTS_READ - | FILE_READ_DATA - | FILE_READ_ATTRIBUTES - | FILE_READ_EA - | SYNCHRONIZE -) -FILE_GENERIC_WRITE = ( - STANDARD_RIGHTS_WRITE - | FILE_WRITE_DATA - | FILE_WRITE_ATTRIBUTES - | FILE_WRITE_EA - | FILE_APPEND_DATA - | SYNCHRONIZE -) -FILE_GENERIC_EXECUTE = ( - STANDARD_RIGHTS_EXECUTE | FILE_READ_ATTRIBUTES | FILE_EXECUTE | SYNCHRONIZE -) - - -SECURITY_NULL_SID_AUTHORITY = (0, 0, 0, 0, 0, 0) -SECURITY_WORLD_SID_AUTHORITY = (0, 0, 0, 0, 0, 1) -SECURITY_LOCAL_SID_AUTHORITY = (0, 0, 0, 0, 0, 2) -SECURITY_CREATOR_SID_AUTHORITY = (0, 0, 0, 0, 0, 3) -SECURITY_NON_UNIQUE_AUTHORITY = (0, 0, 0, 0, 0, 4) -SECURITY_RESOURCE_MANAGER_AUTHORITY = (0, 0, 0, 0, 0, 9) - -SECURITY_NULL_RID = 0 -SECURITY_WORLD_RID = 0 -SECURITY_LOCAL_RID = 0x00000000 - -SECURITY_CREATOR_OWNER_RID = 0 -SECURITY_CREATOR_GROUP_RID = 1 - -SECURITY_CREATOR_OWNER_SERVER_RID = 2 -SECURITY_CREATOR_GROUP_SERVER_RID = 3 -SECURITY_CREATOR_OWNER_RIGHTS_RID = 4 - -# NT well-known SIDs -SECURITY_NT_AUTHORITY = (0, 0, 0, 0, 0, 5) - -SECURITY_DIALUP_RID = 1 -SECURITY_NETWORK_RID = 2 -SECURITY_BATCH_RID = 3 -SECURITY_INTERACTIVE_RID = 4 -SECURITY_SERVICE_RID = 6 -SECURITY_ANONYMOUS_LOGON_RID = 7 -SECURITY_PROXY_RID = 8 -SECURITY_SERVER_LOGON_RID = 9 - -SECURITY_LOGON_IDS_RID = 5 -SECURITY_LOGON_IDS_RID_COUNT = 3 - -SECURITY_LOCAL_SYSTEM_RID = 18 - -SECURITY_NT_NON_UNIQUE = 21 - -SECURITY_BUILTIN_DOMAIN_RID = 32 - -# well-known domain relative sub-authority values (RIDs)... -DOMAIN_USER_RID_ADMIN = 500 -DOMAIN_USER_RID_GUEST = 501 -DOMAIN_USER_RID_KRBTGT = 502 -DOMAIN_USER_RID_MAX = 999 - -# well-known groups ... -DOMAIN_GROUP_RID_ADMINS = 512 -DOMAIN_GROUP_RID_USERS = 513 -DOMAIN_GROUP_RID_GUESTS = 514 -DOMAIN_GROUP_RID_COMPUTERS = 515 -DOMAIN_GROUP_RID_CONTROLLERS = 516 -DOMAIN_GROUP_RID_CERT_ADMINS = 517 -DOMAIN_GROUP_RID_SCHEMA_ADMINS = 518 -DOMAIN_GROUP_RID_ENTERPRISE_ADMINS = 519 -DOMAIN_GROUP_RID_POLICY_ADMINS = 520 -DOMAIN_GROUP_RID_READONLY_CONTROLLERS = 521 - -# well-known aliases ... -DOMAIN_ALIAS_RID_ADMINS = 544 -DOMAIN_ALIAS_RID_USERS = 545 -DOMAIN_ALIAS_RID_GUESTS = 546 -DOMAIN_ALIAS_RID_POWER_USERS = 547 -DOMAIN_ALIAS_RID_ACCOUNT_OPS = 548 -DOMAIN_ALIAS_RID_SYSTEM_OPS = 549 -DOMAIN_ALIAS_RID_PRINT_OPS = 550 -DOMAIN_ALIAS_RID_BACKUP_OPS = 551 -DOMAIN_ALIAS_RID_REPLICATOR = 552 -DOMAIN_ALIAS_RID_RAS_SERVERS = 553 -DOMAIN_ALIAS_RID_PREW2KCOMPACCESS = 554 -DOMAIN_ALIAS_RID_REMOTE_DESKTOP_USERS = 555 -DOMAIN_ALIAS_RID_NETWORK_CONFIGURATION_OPS = 556 -DOMAIN_ALIAS_RID_INCOMING_FOREST_TRUST_BUILDERS = 557 -DOMAIN_ALIAS_RID_MONITORING_USERS = 558 -DOMAIN_ALIAS_RID_LOGGING_USERS = 559 -DOMAIN_ALIAS_RID_AUTHORIZATIONACCESS = 560 -DOMAIN_ALIAS_RID_TS_LICENSE_SERVERS = 561 -DOMAIN_ALIAS_RID_DCOM_USERS = 562 -DOMAIN_ALIAS_RID_IUSERS = 568 -DOMAIN_ALIAS_RID_CRYPTO_OPERATORS = 569 -DOMAIN_ALIAS_RID_CACHEABLE_PRINCIPALS_GROUP = 571 -DOMAIN_ALIAS_RID_NON_CACHEABLE_PRINCIPALS_GROUP = 572 -DOMAIN_ALIAS_RID_EVENT_LOG_READERS_GROUP = 573 - -SECURITY_MANDATORY_LABEL_AUTHORITY = (0, 0, 0, 0, 0, 16) -SECURITY_MANDATORY_UNTRUSTED_RID = 0x00000000 -SECURITY_MANDATORY_LOW_RID = 0x00001000 -SECURITY_MANDATORY_MEDIUM_RID = 0x00002000 -SECURITY_MANDATORY_HIGH_RID = 0x00003000 -SECURITY_MANDATORY_SYSTEM_RID = 0x00004000 -SECURITY_MANDATORY_PROTECTED_PROCESS_RID = 0x00005000 -SECURITY_MANDATORY_MAXIMUM_USER_RID = SECURITY_MANDATORY_SYSTEM_RID - -SYSTEM_LUID = (999, 0) -ANONYMOUS_LOGON_LUID = (998, 0) -LOCALSERVICE_LUID = (997, 0) -NETWORKSERVICE_LUID = (996, 0) -IUSER_LUID = (995, 0) - -# Group attributes - -SE_GROUP_MANDATORY = 1 -SE_GROUP_ENABLED_BY_DEFAULT = 2 -SE_GROUP_ENABLED = 4 -SE_GROUP_OWNER = 8 -SE_GROUP_USE_FOR_DENY_ONLY = 16 -SE_GROUP_INTEGRITY = 32 -SE_GROUP_INTEGRITY_ENABLED = 64 -SE_GROUP_RESOURCE = 536870912 -SE_GROUP_LOGON_ID = -1073741824 - - -# User attributes -# (None yet defined.) - -# ACE types -ACCESS_MIN_MS_ACE_TYPE = 0 -ACCESS_ALLOWED_ACE_TYPE = 0 -ACCESS_DENIED_ACE_TYPE = 1 -SYSTEM_AUDIT_ACE_TYPE = 2 -SYSTEM_ALARM_ACE_TYPE = 3 -ACCESS_MAX_MS_V2_ACE_TYPE = 3 -ACCESS_ALLOWED_COMPOUND_ACE_TYPE = 4 -ACCESS_MAX_MS_V3_ACE_TYPE = 4 -ACCESS_MIN_MS_OBJECT_ACE_TYPE = 5 -ACCESS_ALLOWED_OBJECT_ACE_TYPE = 5 -ACCESS_DENIED_OBJECT_ACE_TYPE = 6 -SYSTEM_AUDIT_OBJECT_ACE_TYPE = 7 -SYSTEM_ALARM_OBJECT_ACE_TYPE = 8 -ACCESS_MAX_MS_OBJECT_ACE_TYPE = 8 -ACCESS_MAX_MS_V4_ACE_TYPE = 8 -ACCESS_MAX_MS_ACE_TYPE = 8 -ACCESS_ALLOWED_CALLBACK_ACE_TYPE = 9 -ACCESS_DENIED_CALLBACK_ACE_TYPE = 10 -ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE = 11 -ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE = 12 -SYSTEM_AUDIT_CALLBACK_ACE_TYPE = 13 -SYSTEM_ALARM_CALLBACK_ACE_TYPE = 14 -SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE = 15 -SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE = 16 -SYSTEM_MANDATORY_LABEL_ACE_TYPE = 17 -ACCESS_MAX_MS_V5_ACE_TYPE = 17 - -# The following are the inherit flags that go into the AceFlags field -# of an Ace header. - -OBJECT_INHERIT_ACE = 1 -CONTAINER_INHERIT_ACE = 2 -NO_PROPAGATE_INHERIT_ACE = 4 -INHERIT_ONLY_ACE = 8 -VALID_INHERIT_FLAGS = 15 - - -SUCCESSFUL_ACCESS_ACE_FLAG = 64 -FAILED_ACCESS_ACE_FLAG = 128 - -SE_OWNER_DEFAULTED = 1 -SE_GROUP_DEFAULTED = 2 -SE_DACL_PRESENT = 4 -SE_DACL_DEFAULTED = 8 -SE_SACL_PRESENT = 16 -SE_SACL_DEFAULTED = 32 -SE_SELF_RELATIVE = 32768 - - -SE_PRIVILEGE_ENABLED_BY_DEFAULT = 1 -SE_PRIVILEGE_ENABLED = 2 -SE_PRIVILEGE_USED_FOR_ACCESS = -2147483648 - -PRIVILEGE_SET_ALL_NECESSARY = 1 - -# NT Defined Privileges - -SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege" -SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege" -SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege" -SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege" -SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege" -SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege" -SE_TCB_NAME = "SeTcbPrivilege" -SE_SECURITY_NAME = "SeSecurityPrivilege" -SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege" -SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege" -SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege" -SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege" -SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege" -SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege" -SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege" -SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege" -SE_BACKUP_NAME = "SeBackupPrivilege" -SE_RESTORE_NAME = "SeRestorePrivilege" -SE_SHUTDOWN_NAME = "SeShutdownPrivilege" -SE_DEBUG_NAME = "SeDebugPrivilege" -SE_AUDIT_NAME = "SeAuditPrivilege" -SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege" -SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege" -SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege" - - -# Enum SECURITY_IMPERSONATION_LEVEL: -SecurityAnonymous = 0 -SecurityIdentification = 1 -SecurityImpersonation = 2 -SecurityDelegation = 3 - -SECURITY_MAX_IMPERSONATION_LEVEL = SecurityDelegation - -DEFAULT_IMPERSONATION_LEVEL = SecurityImpersonation - -TOKEN_ASSIGN_PRIMARY = 1 -TOKEN_DUPLICATE = 2 -TOKEN_IMPERSONATE = 4 -TOKEN_QUERY = 8 -TOKEN_QUERY_SOURCE = 16 -TOKEN_ADJUST_PRIVILEGES = 32 -TOKEN_ADJUST_GROUPS = 64 -TOKEN_ADJUST_DEFAULT = 128 - -TOKEN_ALL_ACCESS = ( - STANDARD_RIGHTS_REQUIRED - | TOKEN_ASSIGN_PRIMARY - | TOKEN_DUPLICATE - | TOKEN_IMPERSONATE - | TOKEN_QUERY - | TOKEN_QUERY_SOURCE - | TOKEN_ADJUST_PRIVILEGES - | TOKEN_ADJUST_GROUPS - | TOKEN_ADJUST_DEFAULT -) - - -TOKEN_READ = STANDARD_RIGHTS_READ | TOKEN_QUERY - - -TOKEN_WRITE = ( - STANDARD_RIGHTS_WRITE - | TOKEN_ADJUST_PRIVILEGES - | TOKEN_ADJUST_GROUPS - | TOKEN_ADJUST_DEFAULT -) - -TOKEN_EXECUTE = STANDARD_RIGHTS_EXECUTE - -SidTypeUser = 1 -SidTypeGroup = 2 -SidTypeDomain = 3 -SidTypeAlias = 4 -SidTypeWellKnownGroup = 5 -SidTypeDeletedAccount = 6 -SidTypeInvalid = 7 -SidTypeUnknown = 8 -SidTypeComputer = 9 -SidTypeLabel = 10 - -# Token types -TokenPrimary = 1 -TokenImpersonation = 2 - -# TOKEN_INFORMATION_CLASS, used with Get/SetTokenInformation -TokenUser = 1 -TokenGroups = 2 -TokenPrivileges = 3 -TokenOwner = 4 -TokenPrimaryGroup = 5 -TokenDefaultDacl = 6 -TokenSource = 7 -TokenType = 8 -TokenImpersonationLevel = 9 -TokenStatistics = 10 -TokenRestrictedSids = 11 -TokenSessionId = 12 -TokenGroupsAndPrivileges = 13 -TokenSessionReference = 14 -TokenSandBoxInert = 15 -TokenAuditPolicy = 16 -TokenOrigin = 17 -TokenElevationType = 18 -TokenLinkedToken = 19 -TokenElevation = 20 -TokenHasRestrictions = 21 -TokenAccessInformation = 22 -TokenVirtualizationAllowed = 23 -TokenVirtualizationEnabled = 24 -TokenIntegrityLevel = 25 -TokenUIAccess = 26 -TokenMandatoryPolicy = 27 -TokenLogonSid = 28 - -# DirectoryService related constants. -# Generated by h2py from NtDsAPI.h -DS_BEHAVIOR_WIN2000 = 0 -DS_BEHAVIOR_WIN2003_WITH_MIXED_DOMAINS = 1 -DS_BEHAVIOR_WIN2003 = 2 -DS_SYNCED_EVENT_NAME = "NTDSInitialSyncsCompleted" -ACTRL_DS_OPEN = 0x00000000 -ACTRL_DS_CREATE_CHILD = 0x00000001 -ACTRL_DS_DELETE_CHILD = 0x00000002 -ACTRL_DS_LIST = 0x00000004 -ACTRL_DS_SELF = 0x00000008 -ACTRL_DS_READ_PROP = 0x00000010 -ACTRL_DS_WRITE_PROP = 0x00000020 -ACTRL_DS_DELETE_TREE = 0x00000040 -ACTRL_DS_LIST_OBJECT = 0x00000080 -ACTRL_DS_CONTROL_ACCESS = 0x00000100 -NTDSAPI_BIND_ALLOW_DELEGATION = 0x00000001 -DS_REPSYNC_ASYNCHRONOUS_OPERATION = 0x00000001 -DS_REPSYNC_WRITEABLE = 0x00000002 -DS_REPSYNC_PERIODIC = 0x00000004 -DS_REPSYNC_INTERSITE_MESSAGING = 0x00000008 -DS_REPSYNC_ALL_SOURCES = 0x00000010 -DS_REPSYNC_FULL = 0x00000020 -DS_REPSYNC_URGENT = 0x00000040 -DS_REPSYNC_NO_DISCARD = 0x00000080 -DS_REPSYNC_FORCE = 0x00000100 -DS_REPSYNC_ADD_REFERENCE = 0x00000200 -DS_REPSYNC_NEVER_COMPLETED = 0x00000400 -DS_REPSYNC_TWO_WAY = 0x00000800 -DS_REPSYNC_NEVER_NOTIFY = 0x00001000 -DS_REPSYNC_INITIAL = 0x00002000 -DS_REPSYNC_USE_COMPRESSION = 0x00004000 -DS_REPSYNC_ABANDONED = 0x00008000 -DS_REPSYNC_INITIAL_IN_PROGRESS = 0x00010000 -DS_REPSYNC_PARTIAL_ATTRIBUTE_SET = 0x00020000 -DS_REPSYNC_REQUEUE = 0x00040000 -DS_REPSYNC_NOTIFICATION = 0x00080000 -DS_REPSYNC_ASYNCHRONOUS_REPLICA = 0x00100000 -DS_REPSYNC_CRITICAL = 0x00200000 -DS_REPSYNC_FULL_IN_PROGRESS = 0x00400000 -DS_REPSYNC_PREEMPTED = 0x00800000 -DS_REPADD_ASYNCHRONOUS_OPERATION = 0x00000001 -DS_REPADD_WRITEABLE = 0x00000002 -DS_REPADD_INITIAL = 0x00000004 -DS_REPADD_PERIODIC = 0x00000008 -DS_REPADD_INTERSITE_MESSAGING = 0x00000010 -DS_REPADD_ASYNCHRONOUS_REPLICA = 0x00000020 -DS_REPADD_DISABLE_NOTIFICATION = 0x00000040 -DS_REPADD_DISABLE_PERIODIC = 0x00000080 -DS_REPADD_USE_COMPRESSION = 0x00000100 -DS_REPADD_NEVER_NOTIFY = 0x00000200 -DS_REPADD_TWO_WAY = 0x00000400 -DS_REPADD_CRITICAL = 0x00000800 -DS_REPDEL_ASYNCHRONOUS_OPERATION = 0x00000001 -DS_REPDEL_WRITEABLE = 0x00000002 -DS_REPDEL_INTERSITE_MESSAGING = 0x00000004 -DS_REPDEL_IGNORE_ERRORS = 0x00000008 -DS_REPDEL_LOCAL_ONLY = 0x00000010 -DS_REPDEL_NO_SOURCE = 0x00000020 -DS_REPDEL_REF_OK = 0x00000040 -DS_REPMOD_ASYNCHRONOUS_OPERATION = 0x00000001 -DS_REPMOD_WRITEABLE = 0x00000002 -DS_REPMOD_UPDATE_FLAGS = 0x00000001 -DS_REPMOD_UPDATE_ADDRESS = 0x00000002 -DS_REPMOD_UPDATE_SCHEDULE = 0x00000004 -DS_REPMOD_UPDATE_RESULT = 0x00000008 -DS_REPMOD_UPDATE_TRANSPORT = 0x00000010 -DS_REPUPD_ASYNCHRONOUS_OPERATION = 0x00000001 -DS_REPUPD_WRITEABLE = 0x00000002 -DS_REPUPD_ADD_REFERENCE = 0x00000004 -DS_REPUPD_DELETE_REFERENCE = 0x00000008 -DS_INSTANCETYPE_IS_NC_HEAD = 0x00000001 -DS_INSTANCETYPE_NC_IS_WRITEABLE = 0x00000004 -DS_INSTANCETYPE_NC_COMING = 0x00000010 -DS_INSTANCETYPE_NC_GOING = 0x00000020 -NTDSDSA_OPT_IS_GC = 1 << 0 -NTDSDSA_OPT_DISABLE_INBOUND_REPL = 1 << 1 -NTDSDSA_OPT_DISABLE_OUTBOUND_REPL = 1 << 2 -NTDSDSA_OPT_DISABLE_NTDSCONN_XLATE = 1 << 3 -NTDSCONN_OPT_IS_GENERATED = 1 << 0 -NTDSCONN_OPT_TWOWAY_SYNC = 1 << 1 -NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT = 1 << 2 -NTDSCONN_OPT_USE_NOTIFY = 1 << 3 -NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION = 1 << 4 -NTDSCONN_OPT_USER_OWNED_SCHEDULE = 1 << 5 -NTDSCONN_KCC_NO_REASON = 0 -NTDSCONN_KCC_GC_TOPOLOGY = 1 << 0 -NTDSCONN_KCC_RING_TOPOLOGY = 1 << 1 -NTDSCONN_KCC_MINIMIZE_HOPS_TOPOLOGY = 1 << 2 -NTDSCONN_KCC_STALE_SERVERS_TOPOLOGY = 1 << 3 -NTDSCONN_KCC_OSCILLATING_CONNECTION_TOPOLOGY = 1 << 4 -NTDSCONN_KCC_INTERSITE_GC_TOPOLOGY = 1 << 5 -NTDSCONN_KCC_INTERSITE_TOPOLOGY = 1 << 6 -NTDSCONN_KCC_SERVER_FAILOVER_TOPOLOGY = 1 << 7 -NTDSCONN_KCC_SITE_FAILOVER_TOPOLOGY = 1 << 8 -NTDSCONN_KCC_REDUNDANT_SERVER_TOPOLOGY = 1 << 9 -FRSCONN_PRIORITY_MASK = 0x70000000 -FRSCONN_MAX_PRIORITY = 0x8 -NTDSCONN_OPT_IGNORE_SCHEDULE_MASK = -2147483648 - -NTDSSETTINGS_OPT_IS_AUTO_TOPOLOGY_DISABLED = 1 << 0 -NTDSSETTINGS_OPT_IS_TOPL_CLEANUP_DISABLED = 1 << 1 -NTDSSETTINGS_OPT_IS_TOPL_MIN_HOPS_DISABLED = 1 << 2 -NTDSSETTINGS_OPT_IS_TOPL_DETECT_STALE_DISABLED = 1 << 3 -NTDSSETTINGS_OPT_IS_INTER_SITE_AUTO_TOPOLOGY_DISABLED = 1 << 4 -NTDSSETTINGS_OPT_IS_GROUP_CACHING_ENABLED = 1 << 5 -NTDSSETTINGS_OPT_FORCE_KCC_WHISTLER_BEHAVIOR = 1 << 6 -NTDSSETTINGS_OPT_FORCE_KCC_W2K_ELECTION = 1 << 7 -NTDSSETTINGS_OPT_IS_RAND_BH_SELECTION_DISABLED = 1 << 8 -NTDSSETTINGS_OPT_IS_SCHEDULE_HASHING_ENABLED = 1 << 9 -NTDSSETTINGS_OPT_IS_REDUNDANT_SERVER_TOPOLOGY_ENABLED = 1 << 10 -NTDSSETTINGS_DEFAULT_SERVER_REDUNDANCY = 2 -NTDSTRANSPORT_OPT_IGNORE_SCHEDULES = 1 << 0 -NTDSTRANSPORT_OPT_BRIDGES_REQUIRED = 1 << 1 -NTDSSITECONN_OPT_USE_NOTIFY = 1 << 0 -NTDSSITECONN_OPT_TWOWAY_SYNC = 1 << 1 -NTDSSITECONN_OPT_DISABLE_COMPRESSION = 1 << 2 -NTDSSITELINK_OPT_USE_NOTIFY = 1 << 0 -NTDSSITELINK_OPT_TWOWAY_SYNC = 1 << 1 -NTDSSITELINK_OPT_DISABLE_COMPRESSION = 1 << 2 -GUID_USERS_CONTAINER_A = "a9d1ca15768811d1aded00c04fd8d5cd" -GUID_COMPUTRS_CONTAINER_A = "aa312825768811d1aded00c04fd8d5cd" -GUID_SYSTEMS_CONTAINER_A = "ab1d30f3768811d1aded00c04fd8d5cd" -GUID_DOMAIN_CONTROLLERS_CONTAINER_A = "a361b2ffffd211d1aa4b00c04fd7d83a" -GUID_INFRASTRUCTURE_CONTAINER_A = "2fbac1870ade11d297c400c04fd8d5cd" -GUID_DELETED_OBJECTS_CONTAINER_A = "18e2ea80684f11d2b9aa00c04f79f805" -GUID_LOSTANDFOUND_CONTAINER_A = "ab8153b7768811d1aded00c04fd8d5cd" -GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_A = "22b70c67d56e4efb91e9300fca3dc1aa" -GUID_PROGRAM_DATA_CONTAINER_A = "09460c08ae1e4a4ea0f64aee7daa1e5a" -GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_A = "f4be92a4c777485e878e9421d53087db" -GUID_NTDS_QUOTAS_CONTAINER_A = "6227f0af1fc2410d8e3bb10615bb5b0f" -GUID_USERS_CONTAINER_BYTE = ( - "\xa9\xd1\xca\x15\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" -) -GUID_COMPUTRS_CONTAINER_BYTE = ( - "\xaa\x31\x28\x25\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" -) -GUID_SYSTEMS_CONTAINER_BYTE = ( - "\xab\x1d\x30\xf3\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" -) -GUID_DOMAIN_CONTROLLERS_CONTAINER_BYTE = ( - "\xa3\x61\xb2\xff\xff\xd2\x11\xd1\xaa\x4b\x00\xc0\x4f\xd7\xd8\x3a" -) -GUID_INFRASTRUCTURE_CONTAINER_BYTE = ( - "\x2f\xba\xc1\x87\x0a\xde\x11\xd2\x97\xc4\x00\xc0\x4f\xd8\xd5\xcd" -) -GUID_DELETED_OBJECTS_CONTAINER_BYTE = ( - "\x18\xe2\xea\x80\x68\x4f\x11\xd2\xb9\xaa\x00\xc0\x4f\x79\xf8\x05" -) -GUID_LOSTANDFOUND_CONTAINER_BYTE = ( - "\xab\x81\x53\xb7\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd" -) -GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_BYTE = ( - "\x22\xb7\x0c\x67\xd5\x6e\x4e\xfb\x91\xe9\x30\x0f\xca\x3d\xc1\xaa" -) -GUID_PROGRAM_DATA_CONTAINER_BYTE = ( - "\x09\x46\x0c\x08\xae\x1e\x4a\x4e\xa0\xf6\x4a\xee\x7d\xaa\x1e\x5a" -) -GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_BYTE = ( - "\xf4\xbe\x92\xa4\xc7\x77\x48\x5e\x87\x8e\x94\x21\xd5\x30\x87\xdb" -) -GUID_NTDS_QUOTAS_CONTAINER_BYTE = ( - "\x62\x27\xf0\xaf\x1f\xc2\x41\x0d\x8e\x3b\xb1\x06\x15\xbb\x5b\x0f" -) -DS_REPSYNCALL_NO_OPTIONS = 0x00000000 -DS_REPSYNCALL_ABORT_IF_SERVER_UNAVAILABLE = 0x00000001 -DS_REPSYNCALL_SYNC_ADJACENT_SERVERS_ONLY = 0x00000002 -DS_REPSYNCALL_ID_SERVERS_BY_DN = 0x00000004 -DS_REPSYNCALL_DO_NOT_SYNC = 0x00000008 -DS_REPSYNCALL_SKIP_INITIAL_CHECK = 0x00000010 -DS_REPSYNCALL_PUSH_CHANGES_OUTWARD = 0x00000020 -DS_REPSYNCALL_CROSS_SITE_BOUNDARIES = 0x00000040 -DS_LIST_DSA_OBJECT_FOR_SERVER = 0 -DS_LIST_DNS_HOST_NAME_FOR_SERVER = 1 -DS_LIST_ACCOUNT_OBJECT_FOR_SERVER = 2 -DS_ROLE_SCHEMA_OWNER = 0 -DS_ROLE_DOMAIN_OWNER = 1 -DS_ROLE_PDC_OWNER = 2 -DS_ROLE_RID_OWNER = 3 -DS_ROLE_INFRASTRUCTURE_OWNER = 4 -DS_SCHEMA_GUID_NOT_FOUND = 0 -DS_SCHEMA_GUID_ATTR = 1 -DS_SCHEMA_GUID_ATTR_SET = 2 -DS_SCHEMA_GUID_CLASS = 3 -DS_SCHEMA_GUID_CONTROL_RIGHT = 4 -DS_KCC_FLAG_ASYNC_OP = 1 << 0 -DS_KCC_FLAG_DAMPED = 1 << 1 -DS_EXIST_ADVISORY_MODE = 0x1 -DS_REPL_INFO_FLAG_IMPROVE_LINKED_ATTRS = 0x00000001 -DS_REPL_NBR_WRITEABLE = 0x00000010 -DS_REPL_NBR_SYNC_ON_STARTUP = 0x00000020 -DS_REPL_NBR_DO_SCHEDULED_SYNCS = 0x00000040 -DS_REPL_NBR_USE_ASYNC_INTERSITE_TRANSPORT = 0x00000080 -DS_REPL_NBR_TWO_WAY_SYNC = 0x00000200 -DS_REPL_NBR_RETURN_OBJECT_PARENTS = 0x00000800 -DS_REPL_NBR_FULL_SYNC_IN_PROGRESS = 0x00010000 -DS_REPL_NBR_FULL_SYNC_NEXT_PACKET = 0x00020000 -DS_REPL_NBR_NEVER_SYNCED = 0x00200000 -DS_REPL_NBR_PREEMPTED = 0x01000000 -DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS = 0x04000000 -DS_REPL_NBR_DISABLE_SCHEDULED_SYNC = 0x08000000 -DS_REPL_NBR_COMPRESS_CHANGES = 0x10000000 -DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS = 0x20000000 -DS_REPL_NBR_PARTIAL_ATTRIBUTE_SET = 0x40000000 -DS_REPL_NBR_MODIFIABLE_MASK = ( - DS_REPL_NBR_SYNC_ON_STARTUP - | DS_REPL_NBR_DO_SCHEDULED_SYNCS - | DS_REPL_NBR_TWO_WAY_SYNC - | DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS - | DS_REPL_NBR_DISABLE_SCHEDULED_SYNC - | DS_REPL_NBR_COMPRESS_CHANGES - | DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS -) - -# from enum DS_NAME_FORMAT -DS_UNKNOWN_NAME = 0 -DS_FQDN_1779_NAME = 1 -DS_NT4_ACCOUNT_NAME = 2 -DS_DISPLAY_NAME = 3 -DS_UNIQUE_ID_NAME = 6 -DS_CANONICAL_NAME = 7 -DS_USER_PRINCIPAL_NAME = 8 -DS_CANONICAL_NAME_EX = 9 -DS_SERVICE_PRINCIPAL_NAME = 10 -DS_SID_OR_SID_HISTORY_NAME = 11 -DS_DNS_DOMAIN_NAME = 12 - -DS_DOMAIN_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME -DS_ENTERPRISE_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME - -# from enum DS_NAME_FLAGS -DS_NAME_NO_FLAGS = 0x0 -DS_NAME_FLAG_SYNTACTICAL_ONLY = 0x1 -DS_NAME_FLAG_EVAL_AT_DC = 0x2 -DS_NAME_FLAG_GCVERIFY = 0x4 -DS_NAME_FLAG_TRUST_REFERRAL = 0x8 - -# from enum DS_NAME_ERROR -DS_NAME_NO_ERROR = 0 -DS_NAME_ERROR_RESOLVING = 1 -DS_NAME_ERROR_NOT_FOUND = 2 -DS_NAME_ERROR_NOT_UNIQUE = 3 -DS_NAME_ERROR_NO_MAPPING = 4 -DS_NAME_ERROR_DOMAIN_ONLY = 5 -DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING = 6 -DS_NAME_ERROR_TRUST_REFERRAL = 7 - - -# from enum DS_SPN_NAME_TYPE -DS_SPN_DNS_HOST = 0 -DS_SPN_DN_HOST = 1 -DS_SPN_NB_HOST = 2 -DS_SPN_DOMAIN = 3 -DS_SPN_NB_DOMAIN = 4 -DS_SPN_SERVICE = 5 - -# from enum DS_SPN_WRITE_OP -DS_SPN_ADD_SPN_OP = 0 -DS_SPN_REPLACE_SPN_OP = 1 -DS_SPN_DELETE_SPN_OP = 2 - -# Generated by h2py from DsGetDC.h -DS_FORCE_REDISCOVERY = 0x00000001 -DS_DIRECTORY_SERVICE_REQUIRED = 0x00000010 -DS_DIRECTORY_SERVICE_PREFERRED = 0x00000020 -DS_GC_SERVER_REQUIRED = 0x00000040 -DS_PDC_REQUIRED = 0x00000080 -DS_BACKGROUND_ONLY = 0x00000100 -DS_IP_REQUIRED = 0x00000200 -DS_KDC_REQUIRED = 0x00000400 -DS_TIMESERV_REQUIRED = 0x00000800 -DS_WRITABLE_REQUIRED = 0x00001000 -DS_GOOD_TIMESERV_PREFERRED = 0x00002000 -DS_AVOID_SELF = 0x00004000 -DS_ONLY_LDAP_NEEDED = 0x00008000 -DS_IS_FLAT_NAME = 0x00010000 -DS_IS_DNS_NAME = 0x00020000 -DS_RETURN_DNS_NAME = 0x40000000 -DS_RETURN_FLAT_NAME = -2147483648 -DSGETDC_VALID_FLAGS = ( - DS_FORCE_REDISCOVERY - | DS_DIRECTORY_SERVICE_REQUIRED - | DS_DIRECTORY_SERVICE_PREFERRED - | DS_GC_SERVER_REQUIRED - | DS_PDC_REQUIRED - | DS_BACKGROUND_ONLY - | DS_IP_REQUIRED - | DS_KDC_REQUIRED - | DS_TIMESERV_REQUIRED - | DS_WRITABLE_REQUIRED - | DS_GOOD_TIMESERV_PREFERRED - | DS_AVOID_SELF - | DS_ONLY_LDAP_NEEDED - | DS_IS_FLAT_NAME - | DS_IS_DNS_NAME - | DS_RETURN_FLAT_NAME - | DS_RETURN_DNS_NAME -) -DS_INET_ADDRESS = 1 -DS_NETBIOS_ADDRESS = 2 -DS_PDC_FLAG = 0x00000001 -DS_GC_FLAG = 0x00000004 -DS_LDAP_FLAG = 0x00000008 -DS_DS_FLAG = 0x00000010 -DS_KDC_FLAG = 0x00000020 -DS_TIMESERV_FLAG = 0x00000040 -DS_CLOSEST_FLAG = 0x00000080 -DS_WRITABLE_FLAG = 0x00000100 -DS_GOOD_TIMESERV_FLAG = 0x00000200 -DS_NDNC_FLAG = 0x00000400 -DS_PING_FLAGS = 0x0000FFFF -DS_DNS_CONTROLLER_FLAG = 0x20000000 -DS_DNS_DOMAIN_FLAG = 0x40000000 -DS_DNS_FOREST_FLAG = -2147483648 -DS_DOMAIN_IN_FOREST = 0x0001 -DS_DOMAIN_DIRECT_OUTBOUND = 0x0002 -DS_DOMAIN_TREE_ROOT = 0x0004 -DS_DOMAIN_PRIMARY = 0x0008 -DS_DOMAIN_NATIVE_MODE = 0x0010 -DS_DOMAIN_DIRECT_INBOUND = 0x0020 -DS_DOMAIN_VALID_FLAGS = ( - DS_DOMAIN_IN_FOREST - | DS_DOMAIN_DIRECT_OUTBOUND - | DS_DOMAIN_TREE_ROOT - | DS_DOMAIN_PRIMARY - | DS_DOMAIN_NATIVE_MODE - | DS_DOMAIN_DIRECT_INBOUND -) -DS_GFTI_UPDATE_TDO = 0x1 -DS_GFTI_VALID_FLAGS = 0x1 -DS_ONLY_DO_SITE_NAME = 0x01 -DS_NOTIFY_AFTER_SITE_RECORDS = 0x02 -DS_OPEN_VALID_OPTION_FLAGS = DS_ONLY_DO_SITE_NAME | DS_NOTIFY_AFTER_SITE_RECORDS -DS_OPEN_VALID_FLAGS = ( - DS_FORCE_REDISCOVERY - | DS_ONLY_LDAP_NEEDED - | DS_KDC_REQUIRED - | DS_PDC_REQUIRED - | DS_GC_SERVER_REQUIRED - | DS_WRITABLE_REQUIRED -) - -## from aclui.h -# SI_OBJECT_INFO.dwFlags -SI_EDIT_PERMS = 0x00000000 -SI_EDIT_OWNER = 0x00000001 -SI_EDIT_AUDITS = 0x00000002 -SI_CONTAINER = 0x00000004 -SI_READONLY = 0x00000008 -SI_ADVANCED = 0x00000010 -SI_RESET = 0x00000020 -SI_OWNER_READONLY = 0x00000040 -SI_EDIT_PROPERTIES = 0x00000080 -SI_OWNER_RECURSE = 0x00000100 -SI_NO_ACL_PROTECT = 0x00000200 -SI_NO_TREE_APPLY = 0x00000400 -SI_PAGE_TITLE = 0x00000800 -SI_SERVER_IS_DC = 0x00001000 -SI_RESET_DACL_TREE = 0x00004000 -SI_RESET_SACL_TREE = 0x00008000 -SI_OBJECT_GUID = 0x00010000 -SI_EDIT_EFFECTIVE = 0x00020000 -SI_RESET_DACL = 0x00040000 -SI_RESET_SACL = 0x00080000 -SI_RESET_OWNER = 0x00100000 -SI_NO_ADDITIONAL_PERMISSION = 0x00200000 -SI_MAY_WRITE = 0x10000000 -SI_EDIT_ALL = SI_EDIT_PERMS | SI_EDIT_OWNER | SI_EDIT_AUDITS -SI_AUDITS_ELEVATION_REQUIRED = 0x02000000 -SI_VIEW_ONLY = 0x00400000 -SI_OWNER_ELEVATION_REQUIRED = 0x04000000 -SI_PERMS_ELEVATION_REQUIRED = 0x01000000 - -# SI_ACCESS.dwFlags -SI_ACCESS_SPECIFIC = 0x00010000 -SI_ACCESS_GENERAL = 0x00020000 -SI_ACCESS_CONTAINER = 0x00040000 -SI_ACCESS_PROPERTY = 0x00080000 - -# SI_PAGE_TYPE enum -SI_PAGE_PERM = 0 -SI_PAGE_ADVPERM = 1 -SI_PAGE_AUDIT = 2 -SI_PAGE_OWNER = 3 -SI_PAGE_EFFECTIVE = 4 - -CFSTR_ACLUI_SID_INFO_LIST = "CFSTR_ACLUI_SID_INFO_LIST" -PSPCB_SI_INITDIALOG = 1025 ## WM_USER+1 diff --git a/lib/win32/lib/pywin32_bootstrap.py b/lib/win32/lib/pywin32_bootstrap.py deleted file mode 100644 index fbc4f7be..00000000 --- a/lib/win32/lib/pywin32_bootstrap.py +++ /dev/null @@ -1,29 +0,0 @@ -# Imported by pywin32.pth to bootstrap the pywin32 environment in "portable" -# environments or any other case where the post-install script isn't run. -# -# In short, there's a directory installed by pywin32 named 'pywin32_system32' -# with some important DLLs which need to be found by Python when some pywin32 -# modules are imported. -# If Python has `os.add_dll_directory()`, we need to call it with this path. -# Otherwise, we add this path to PATH. - - -try: - import pywin32_system32 -except ImportError: # Python ≥3.6: replace ImportError with ModuleNotFoundError - pass -else: - import os - - # We're guaranteed only that __path__: Iterable[str] - # https://docs.python.org/3/reference/import.html#__path__ - for path in pywin32_system32.__path__: - if os.path.isdir(path): - if hasattr(os, "add_dll_directory"): - os.add_dll_directory(path) - # This is to ensure the pywin32 path is in the beginning to find the - # pywin32 DLLs first and prevent other PATH entries to shadow them - elif not os.environ["PATH"].startswith(path): - os.environ["PATH"] = os.environ["PATH"].replace(os.pathsep + path, "") - os.environ["PATH"] = path + os.pathsep + os.environ["PATH"] - break diff --git a/lib/win32/lib/pywin32_testutil.py b/lib/win32/lib/pywin32_testutil.py deleted file mode 100644 index 41566a8e..00000000 --- a/lib/win32/lib/pywin32_testutil.py +++ /dev/null @@ -1,327 +0,0 @@ -# Utilities for the pywin32 tests -import gc -import os -import site -import sys -import unittest - -import winerror - -## -## General purpose utilities for the test suite. -## - - -# The test suite has lots of string constants containing binary data, but -# the strings are used in various "bytes" contexts. -def str2bytes(sval): - if sys.version_info < (3, 0) and isinstance(sval, str): - sval = sval.decode("latin1") - return sval.encode("latin1") - - -# Sometimes we want to pass a string that should explicitly be treated as -# a memory blob. -def str2memory(sval): - if sys.version_info < (3, 0): - return buffer(sval) - # py3k. - return memoryview(sval.encode("latin1")) - - -# Sometimes we want to pass an object that exposes its memory -def ob2memory(ob): - if sys.version_info < (3, 0): - return buffer(ob) - # py3k. - return memoryview(ob) - - -## -## unittest related stuff -## - - -# This is a specialized TestCase adaptor which wraps a real test. -class LeakTestCase(unittest.TestCase): - """An 'adaptor' which takes another test. In debug builds we execute the - test once to remove one-off side-effects, then capture the total - reference count, then execute the test a few times. If the total - refcount at the end is greater than we first captured, we have a leak! - - In release builds the test is executed just once, as normal. - - Generally used automatically by the test runner - you can safely - ignore this. - """ - - def __init__(self, real_test): - unittest.TestCase.__init__(self) - self.real_test = real_test - self.num_test_cases = 1 - self.num_leak_iters = 2 # seems to be enough! - if hasattr(sys, "gettotalrefcount"): - self.num_test_cases = self.num_test_cases + self.num_leak_iters - - def countTestCases(self): - return self.num_test_cases - - def __call__(self, result=None): - # For the COM suite's sake, always ensure we don't leak - # gateways/interfaces - from pythoncom import _GetGatewayCount, _GetInterfaceCount - - gc.collect() - ni = _GetInterfaceCount() - ng = _GetGatewayCount() - self.real_test(result) - # Failed - no point checking anything else - if result.shouldStop or not result.wasSuccessful(): - return - self._do_leak_tests(result) - gc.collect() - lost_i = _GetInterfaceCount() - ni - lost_g = _GetGatewayCount() - ng - if lost_i or lost_g: - msg = "%d interface objects and %d gateway objects leaked" % ( - lost_i, - lost_g, - ) - exc = AssertionError(msg) - result.addFailure(self.real_test, (exc.__class__, exc, None)) - - def runTest(self): - assert 0, "not used" - - def _do_leak_tests(self, result=None): - try: - gtrc = sys.gettotalrefcount - except AttributeError: - return # can't do leak tests in this build - # Assume already called once, to prime any caches etc - gc.collect() - trc = gtrc() - for i in range(self.num_leak_iters): - self.real_test(result) - if result.shouldStop: - break - del i # created after we remembered the refcount! - # int division here means one or 2 stray references won't force - # failure, but one per loop - gc.collect() - lost = (gtrc() - trc) // self.num_leak_iters - if lost < 0: - msg = "LeakTest: %s appeared to gain %d references!!" % ( - self.real_test, - -lost, - ) - result.addFailure(self.real_test, (AssertionError, msg, None)) - if lost > 0: - msg = "LeakTest: %s lost %d references" % (self.real_test, lost) - exc = AssertionError(msg) - result.addFailure(self.real_test, (exc.__class__, exc, None)) - - -class TestLoader(unittest.TestLoader): - def loadTestsFromTestCase(self, testCaseClass): - """Return a suite of all tests cases contained in testCaseClass""" - leak_tests = [] - for name in self.getTestCaseNames(testCaseClass): - real_test = testCaseClass(name) - leak_test = self._getTestWrapper(real_test) - leak_tests.append(leak_test) - return self.suiteClass(leak_tests) - - def fixupTestsForLeakTests(self, test): - if isinstance(test, unittest.TestSuite): - test._tests = [self.fixupTestsForLeakTests(t) for t in test._tests] - return test - else: - # just a normal test case. - return self._getTestWrapper(test) - - def _getTestWrapper(self, test): - # one or 2 tests in the COM test suite set this... - no_leak_tests = getattr(test, "no_leak_tests", False) - if no_leak_tests: - print("Test says it doesn't want leak tests!") - return test - return LeakTestCase(test) - - def loadTestsFromModule(self, mod): - if hasattr(mod, "suite"): - tests = mod.suite() - else: - tests = unittest.TestLoader.loadTestsFromModule(self, mod) - return self.fixupTestsForLeakTests(tests) - - def loadTestsFromName(self, name, module=None): - test = unittest.TestLoader.loadTestsFromName(self, name, module) - if isinstance(test, unittest.TestSuite): - pass # hmmm? print "Don't wrap suites yet!", test._tests - elif isinstance(test, unittest.TestCase): - test = self._getTestWrapper(test) - else: - print("XXX - what is", test) - return test - - -# Lots of classes necessary to support one simple feature: we want a 3rd -# test result state - "SKIPPED" - to indicate that the test wasn't able -# to be executed for various reasons. Inspired by bzr's tests, but it -# has other concepts, such as "Expected Failure", which we don't bother -# with. - -# win32 error codes that probably mean we need to be elevated (ie, if we -# aren't elevated, we treat these error codes as 'skipped') -non_admin_error_codes = [ - winerror.ERROR_ACCESS_DENIED, - winerror.ERROR_PRIVILEGE_NOT_HELD, -] - -_is_admin = None - - -def check_is_admin(): - global _is_admin - if _is_admin is None: - import pythoncom - from win32com.shell.shell import IsUserAnAdmin - - try: - _is_admin = IsUserAnAdmin() - except pythoncom.com_error as exc: - if exc.hresult != winerror.E_NOTIMPL: - raise - # not impl on this platform - must be old - assume is admin - _is_admin = True - return _is_admin - - -# Find a test "fixture" (eg, binary test file) expected to be very close to -# the test being run. -# If the tests are being run from the "installed" version, then these fixtures -# probably don't exist - the test is "skipped". -# But it's fatal if we think we might be running from a pywin32 source tree. -def find_test_fixture(basename, extra_dir="."): - # look for the test file in various places - candidates = [ - os.path.dirname(sys.argv[0]), - extra_dir, - ".", - ] - for candidate in candidates: - fname = os.path.join(candidate, basename) - if os.path.isfile(fname): - return fname - else: - # Can't find it - see if this is expected or not. - # This module is typically always in the installed dir, so use argv[0] - this_file = os.path.normcase(os.path.abspath(sys.argv[0])) - dirs_to_check = site.getsitepackages()[:] - if site.USER_SITE: - dirs_to_check.append(site.USER_SITE) - - for d in dirs_to_check: - d = os.path.normcase(d) - if os.path.commonprefix([this_file, d]) == d: - # looks like we are in an installed Python, so skip the text. - raise TestSkipped(f"Can't find test fixture '{fname}'") - # Looks like we are running from source, so this is fatal. - raise RuntimeError(f"Can't find test fixture '{fname}'") - - -# If this exception is raised by a test, the test is reported as a 'skip' -class TestSkipped(Exception): - pass - - -# This appears to have been "upgraded" to non-private in 3.11 -try: - TextTestResult = unittest._TextTestResult -except AttributeError: - TextTestResult = unittest.TextTestResult - - -# The 'TestResult' subclass that records the failures and has the special -# handling for the TestSkipped exception. -class TestResult(TextTestResult): - def __init__(self, *args, **kw): - super(TestResult, self).__init__(*args, **kw) - self.skips = {} # count of skips for each reason. - - def addError(self, test, err): - """Called when an error has occurred. 'err' is a tuple of values as - returned by sys.exc_info(). - """ - # translate a couple of 'well-known' exceptions into 'skipped' - import pywintypes - - exc_val = err[1] - # translate ERROR_ACCESS_DENIED for non-admin users to be skipped. - # (access denied errors for an admin user aren't expected.) - if ( - isinstance(exc_val, pywintypes.error) - and exc_val.winerror in non_admin_error_codes - and not check_is_admin() - ): - exc_val = TestSkipped(exc_val) - # and COM errors due to objects not being registered (the com test - # suite will attempt to catch this and handle it itself if the user - # is admin) - elif isinstance(exc_val, pywintypes.com_error) and exc_val.hresult in [ - winerror.CO_E_CLASSSTRING, - winerror.REGDB_E_CLASSNOTREG, - winerror.TYPE_E_LIBNOTREGISTERED, - ]: - exc_val = TestSkipped(exc_val) - # NotImplemented generally means the platform doesn't support the - # functionality. - elif isinstance(exc_val, NotImplementedError): - exc_val = TestSkipped(NotImplementedError) - - if isinstance(exc_val, TestSkipped): - reason = exc_val.args[0] - # if the reason itself is another exception, get its args. - try: - reason = tuple(reason.args) - except (AttributeError, TypeError): - pass - self.skips.setdefault(reason, 0) - self.skips[reason] += 1 - if self.showAll: - self.stream.writeln("SKIP (%s)" % (reason,)) - elif self.dots: - self.stream.write("S") - self.stream.flush() - return - super(TestResult, self).addError(test, err) - - def printErrors(self): - super(TestResult, self).printErrors() - for reason, num_skipped in self.skips.items(): - self.stream.writeln("SKIPPED: %d tests - %s" % (num_skipped, reason)) - - -# TestRunner subclass necessary just to get our TestResult hooked up. -class TestRunner(unittest.TextTestRunner): - def _makeResult(self): - return TestResult(self.stream, self.descriptions, self.verbosity) - - -# TestProgream subclass necessary just to get our TestRunner hooked up, -# which is necessary to get our TestResult hooked up *sob* -class TestProgram(unittest.TestProgram): - def runTests(self): - # clobber existing runner - *sob* - it shouldn't be this hard - self.testRunner = TestRunner(verbosity=self.verbosity) - unittest.TestProgram.runTests(self) - - -# A convenient entry-point - if used, 'SKIPPED' exceptions will be supressed. -def testmain(*args, **kw): - new_kw = kw.copy() - if "testLoader" not in new_kw: - new_kw["testLoader"] = TestLoader() - program_class = new_kw.get("testProgram", TestProgram) - program_class(*args, **new_kw) diff --git a/lib/win32/lib/pywintypes.py b/lib/win32/lib/pywintypes.py deleted file mode 100644 index 115c4f82..00000000 --- a/lib/win32/lib/pywintypes.py +++ /dev/null @@ -1,126 +0,0 @@ -# Magic utility that "redirects" to pywintypesxx.dll -import importlib.machinery -import importlib.util -import os -import sys - - -def __import_pywin32_system_module__(modname, globs): - # This has been through a number of iterations. The problem: how to - # locate pywintypesXX.dll when it may be in a number of places, and how - # to avoid ever loading it twice. This problem is compounded by the - # fact that the "right" way to do this requires win32api, but this - # itself requires pywintypesXX. - # And the killer problem is that someone may have done 'import win32api' - # before this code is called. In that case Windows will have already - # loaded pywintypesXX as part of loading win32api - but by the time - # we get here, we may locate a different one. This appears to work, but - # then starts raising bizarre TypeErrors complaining that something - # is not a pywintypes type when it clearly is! - - # So in what we hope is the last major iteration of this, we now - # rely on a _win32sysloader module, implemented in C but not relying - # on pywintypesXX.dll. It then can check if the DLL we are looking for - # lib is already loaded. - # See if this is a debug build. - suffix = "_d" if "_d.pyd" in importlib.machinery.EXTENSION_SUFFIXES else "" - filename = "%s%d%d%s.dll" % ( - modname, - sys.version_info[0], - sys.version_info[1], - suffix, - ) - if hasattr(sys, "frozen"): - # If we are running from a frozen program (py2exe, McMillan, freeze) - # then we try and load the DLL from our sys.path - # XXX - This path may also benefit from _win32sysloader? However, - # MarkH has never seen the DLL load problem with py2exe programs... - for look in sys.path: - # If the sys.path entry is a (presumably) .zip file, use the - # directory - if os.path.isfile(look): - look = os.path.dirname(look) - found = os.path.join(look, filename) - if os.path.isfile(found): - break - else: - raise ImportError( - "Module '%s' isn't in frozen sys.path %s" % (modname, sys.path) - ) - else: - # First see if it already in our process - if so, we must use that. - import _win32sysloader - - found = _win32sysloader.GetModuleFilename(filename) - if found is None: - # We ask Windows to load it next. This is in an attempt to - # get the exact same module loaded should pywintypes be imported - # first (which is how we are here) or if, eg, win32api was imported - # first thereby implicitly loading the DLL. - - # Sadly though, it doesn't quite work - if pywintypesxx.dll - # is in system32 *and* the executable's directory, on XP SP2, an - # import of win32api will cause Windows to load pywintypes - # from system32, where LoadLibrary for that name will - # load the one in the exe's dir. - # That shouldn't really matter though, so long as we only ever - # get one loaded. - found = _win32sysloader.LoadModule(filename) - if found is None: - # Windows can't find it - which although isn't relevent here, - # means that we *must* be the first win32 import, as an attempt - # to import win32api etc would fail when Windows attempts to - # locate the DLL. - # This is most likely to happen for "non-admin" installs, where - # we can't put the files anywhere else on the global path. - - # If there is a version in our Python directory, use that - if os.path.isfile(os.path.join(sys.prefix, filename)): - found = os.path.join(sys.prefix, filename) - if found is None: - # Not in the Python directory? Maybe we were installed via - # easy_install... - if os.path.isfile(os.path.join(os.path.dirname(__file__), filename)): - found = os.path.join(os.path.dirname(__file__), filename) - - # There are 2 site-packages directories - one "global" and one "user". - # We could be in either, or both (but with different versions!). Factors include - # virtualenvs, post-install script being run or not, `setup.py install` flags, etc. - - # In a worst-case, it means, say 'python -c "import win32api"' - # will not work but 'python -c "import pywintypes, win32api"' will, - # but it's better than nothing. - - # We use the same logic as pywin32_bootstrap to find potential location for the dll - # Simply import pywin32_system32 and look in the paths in pywin32_system32.__path__ - - if found is None: - import pywin32_system32 - - for path in pywin32_system32.__path__: - maybe = os.path.join(path, filename) - if os.path.isfile(maybe): - found = maybe - break - - if found is None: - # give up in disgust. - raise ImportError("No system module '%s' (%s)" % (modname, filename)) - # After importing the module, sys.modules is updated to the DLL we just - # loaded - which isn't what we want. So we update sys.modules to refer to - # this module, and update our globals from it. - old_mod = sys.modules[modname] - # Load the DLL. - loader = importlib.machinery.ExtensionFileLoader(modname, found) - spec = importlib.machinery.ModuleSpec(name=modname, loader=loader, origin=found) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - - # Check the sys.modules[] behaviour we describe above is true... - assert sys.modules[modname] is mod - # as above - re-reset to the *old* module object then update globs. - sys.modules[modname] = old_mod - globs.update(mod.__dict__) - - -__import_pywin32_system_module__("pywintypes", globals()) diff --git a/lib/win32/lib/rasutil.py b/lib/win32/lib/rasutil.py deleted file mode 100644 index fb71b4fc..00000000 --- a/lib/win32/lib/rasutil.py +++ /dev/null @@ -1,40 +0,0 @@ -import win32ras - -stateStrings = { - win32ras.RASCS_OpenPort: "OpenPort", - win32ras.RASCS_PortOpened: "PortOpened", - win32ras.RASCS_ConnectDevice: "ConnectDevice", - win32ras.RASCS_DeviceConnected: "DeviceConnected", - win32ras.RASCS_AllDevicesConnected: "AllDevicesConnected", - win32ras.RASCS_Authenticate: "Authenticate", - win32ras.RASCS_AuthNotify: "AuthNotify", - win32ras.RASCS_AuthRetry: "AuthRetry", - win32ras.RASCS_AuthCallback: "AuthCallback", - win32ras.RASCS_AuthChangePassword: "AuthChangePassword", - win32ras.RASCS_AuthProject: "AuthProject", - win32ras.RASCS_AuthLinkSpeed: "AuthLinkSpeed", - win32ras.RASCS_AuthAck: "AuthAck", - win32ras.RASCS_ReAuthenticate: "ReAuthenticate", - win32ras.RASCS_Authenticated: "Authenticated", - win32ras.RASCS_PrepareForCallback: "PrepareForCallback", - win32ras.RASCS_WaitForModemReset: "WaitForModemReset", - win32ras.RASCS_WaitForCallback: "WaitForCallback", - win32ras.RASCS_Projected: "Projected", - win32ras.RASCS_StartAuthentication: "StartAuthentication", - win32ras.RASCS_CallbackComplete: "CallbackComplete", - win32ras.RASCS_LogonNetwork: "LogonNetwork", - win32ras.RASCS_Interactive: "Interactive", - win32ras.RASCS_RetryAuthentication: "RetryAuthentication", - win32ras.RASCS_CallbackSetByCaller: "CallbackSetByCaller", - win32ras.RASCS_PasswordExpired: "PasswordExpired", - win32ras.RASCS_Connected: "Connected", - win32ras.RASCS_Disconnected: "Disconnected", -} - - -def TestCallback(hras, msg, state, error, exterror): - print("Callback called with ", hras, msg, stateStrings[state], error, exterror) - - -def test(rasName="_ Divert Off"): - return win32ras.Dial(None, None, (rasName,), TestCallback) diff --git a/lib/win32/lib/regcheck.py b/lib/win32/lib/regcheck.py deleted file mode 100644 index c07b77b7..00000000 --- a/lib/win32/lib/regcheck.py +++ /dev/null @@ -1,162 +0,0 @@ -# This module is very old and useless in this day and age! It will be -# removed in a few years (ie, 2009 or so...) - -import warnings - -warnings.warn( - "The regcheck module has been pending deprecation since build 210", - category=PendingDeprecationWarning, -) - -import os -import sys - -import regutil -import win32api -import win32con - - -def CheckRegisteredExe(exename): - try: - os.stat( - win32api.RegQueryValue( - regutil.GetRootKey(), regutil.GetAppPathsKey() + "\\" + exename - ) - ) - # except SystemError: - except (os.error, win32api.error): - print("Registration of %s - Not registered correctly" % exename) - - -def CheckPathString(pathString): - for path in pathString.split(";"): - if not os.path.isdir(path): - return "'%s' is not a valid directory!" % path - return None - - -def CheckPythonPaths(verbose): - if verbose: - print("Python Paths:") - # Check the core path - if verbose: - print("\tCore Path:", end=" ") - try: - appPath = win32api.RegQueryValue( - regutil.GetRootKey(), regutil.BuildDefaultPythonKey() + "\\PythonPath" - ) - except win32api.error as exc: - print("** does not exist - ", exc.strerror) - problem = CheckPathString(appPath) - if problem: - print(problem) - else: - if verbose: - print(appPath) - - key = win32api.RegOpenKey( - regutil.GetRootKey(), - regutil.BuildDefaultPythonKey() + "\\PythonPath", - 0, - win32con.KEY_READ, - ) - try: - keyNo = 0 - while 1: - try: - appName = win32api.RegEnumKey(key, keyNo) - appPath = win32api.RegQueryValue(key, appName) - if verbose: - print("\t" + appName + ":", end=" ") - if appPath: - problem = CheckPathString(appPath) - if problem: - print(problem) - else: - if verbose: - print(appPath) - else: - if verbose: - print("(empty)") - keyNo = keyNo + 1 - except win32api.error: - break - finally: - win32api.RegCloseKey(key) - - -def CheckHelpFiles(verbose): - if verbose: - print("Help Files:") - try: - key = win32api.RegOpenKey( - regutil.GetRootKey(), - regutil.BuildDefaultPythonKey() + "\\Help", - 0, - win32con.KEY_READ, - ) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - return - - try: - keyNo = 0 - while 1: - try: - helpDesc = win32api.RegEnumKey(key, keyNo) - helpFile = win32api.RegQueryValue(key, helpDesc) - if verbose: - print("\t" + helpDesc + ":", end=" ") - # query the os section. - try: - os.stat(helpFile) - if verbose: - print(helpFile) - except os.error: - print("** Help file %s does not exist" % helpFile) - keyNo = keyNo + 1 - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_NO_MORE_ITEMS: - raise - break - finally: - win32api.RegCloseKey(key) - - -def CheckRegisteredModules(verbose): - # Check out all registered modules. - k = regutil.BuildDefaultPythonKey() + "\\Modules" - try: - keyhandle = win32api.RegOpenKey(regutil.GetRootKey(), k) - print("WARNING: 'Modules' registry entry is deprectated and evil!") - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - return - - -def CheckRegistry(verbose=0): - # check the registered modules - if verbose and "pythonpath" in os.environ: - print("Warning - PythonPath in environment - please check it!") - # Check out all paths on sys.path - - CheckPythonPaths(verbose) - CheckHelpFiles(verbose) - CheckRegisteredModules(verbose) - CheckRegisteredExe("Python.exe") - - -if __name__ == "__main__": - if len(sys.argv) > 1 and sys.argv[1] == "-q": - verbose = 0 - else: - verbose = 1 - CheckRegistry(verbose) diff --git a/lib/win32/lib/regutil.py b/lib/win32/lib/regutil.py deleted file mode 100644 index 1abdd551..00000000 --- a/lib/win32/lib/regutil.py +++ /dev/null @@ -1,397 +0,0 @@ -# Some registry helpers. -import os -import sys - -import win32api -import win32con - -error = "Registry utility error" - -# A .py file has a CLSID associated with it (why? - dunno!) -CLSIDPyFile = "{b51df050-06ae-11cf-ad3b-524153480001}" - -RegistryIDPyFile = "Python.File" # The registry "file type" of a .py file -RegistryIDPycFile = "Python.CompiledFile" # The registry "file type" of a .pyc file - - -def BuildDefaultPythonKey(): - """Builds a string containing the path to the current registry key. - - The Python registry key contains the Python version. This function - uses the version of the DLL used by the current process to get the - registry key currently in use. - """ - return "Software\\Python\\PythonCore\\" + sys.winver - - -def GetRootKey(): - """Retrieves the Registry root in use by Python.""" - keyname = BuildDefaultPythonKey() - try: - k = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, keyname) - k.close() - return win32con.HKEY_CURRENT_USER - except win32api.error: - return win32con.HKEY_LOCAL_MACHINE - - -def GetRegistryDefaultValue(subkey, rootkey=None): - """A helper to return the default value for a key in the registry.""" - if rootkey is None: - rootkey = GetRootKey() - return win32api.RegQueryValue(rootkey, subkey) - - -def SetRegistryDefaultValue(subKey, value, rootkey=None): - """A helper to set the default value for a key in the registry""" - if rootkey is None: - rootkey = GetRootKey() - if type(value) == str: - typeId = win32con.REG_SZ - elif type(value) == int: - typeId = win32con.REG_DWORD - else: - raise TypeError("Value must be string or integer - was passed " + repr(value)) - - win32api.RegSetValue(rootkey, subKey, typeId, value) - - -def GetAppPathsKey(): - return "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths" - - -def RegisterPythonExe(exeFullPath, exeAlias=None, exeAppPath=None): - """Register a .exe file that uses Python. - - Registers the .exe with the OS. This allows the specified .exe to - be run from the command-line or start button without using the full path, - and also to setup application specific path (ie, os.environ['PATH']). - - Currently the exeAppPath is not supported, so this function is general - purpose, and not specific to Python at all. Later, exeAppPath may provide - a reasonable default that is used. - - exeFullPath -- The full path to the .exe - exeAlias = None -- An alias for the exe - if none, the base portion - of the filename is used. - exeAppPath -- Not supported. - """ - # Note - Dont work on win32s (but we dont care anymore!) - if exeAppPath: - raise error("Do not support exeAppPath argument currently") - if exeAlias is None: - exeAlias = os.path.basename(exeFullPath) - win32api.RegSetValue( - GetRootKey(), GetAppPathsKey() + "\\" + exeAlias, win32con.REG_SZ, exeFullPath - ) - - -def GetRegisteredExe(exeAlias): - """Get a registered .exe""" - return win32api.RegQueryValue(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias) - - -def UnregisterPythonExe(exeAlias): - """Unregister a .exe file that uses Python.""" - try: - win32api.RegDeleteKey(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - return - - -def RegisterNamedPath(name, path): - """Register a named path - ie, a named PythonPath entry.""" - keyStr = BuildDefaultPythonKey() + "\\PythonPath" - if name: - keyStr = keyStr + "\\" + name - win32api.RegSetValue(GetRootKey(), keyStr, win32con.REG_SZ, path) - - -def UnregisterNamedPath(name): - """Unregister a named path - ie, a named PythonPath entry.""" - keyStr = BuildDefaultPythonKey() + "\\PythonPath\\" + name - try: - win32api.RegDeleteKey(GetRootKey(), keyStr) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - return - - -def GetRegisteredNamedPath(name): - """Get a registered named path, or None if it doesnt exist.""" - keyStr = BuildDefaultPythonKey() + "\\PythonPath" - if name: - keyStr = keyStr + "\\" + name - try: - return win32api.RegQueryValue(GetRootKey(), keyStr) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - return None - - -def RegisterModule(modName, modPath): - """Register an explicit module in the registry. This forces the Python import - mechanism to locate this module directly, without a sys.path search. Thus - a registered module need not appear in sys.path at all. - - modName -- The name of the module, as used by import. - modPath -- The full path and file name of the module. - """ - try: - import os - - os.stat(modPath) - except os.error: - print("Warning: Registering non-existant module %s" % modPath) - win32api.RegSetValue( - GetRootKey(), - BuildDefaultPythonKey() + "\\Modules\\%s" % modName, - win32con.REG_SZ, - modPath, - ) - - -def UnregisterModule(modName): - """Unregister an explicit module in the registry. - - modName -- The name of the module, as used by import. - """ - try: - win32api.RegDeleteKey( - GetRootKey(), BuildDefaultPythonKey() + "\\Modules\\%s" % modName - ) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - - -def GetRegisteredHelpFile(helpDesc): - """Given a description, return the registered entry.""" - try: - return GetRegistryDefaultValue(BuildDefaultPythonKey() + "\\Help\\" + helpDesc) - except win32api.error: - try: - return GetRegistryDefaultValue( - BuildDefaultPythonKey() + "\\Help\\" + helpDesc, - win32con.HKEY_CURRENT_USER, - ) - except win32api.error: - pass - return None - - -def RegisterHelpFile(helpFile, helpPath, helpDesc=None, bCheckFile=1): - """Register a help file in the registry. - - Note that this used to support writing to the Windows Help - key, however this is no longer done, as it seems to be incompatible. - - helpFile -- the base name of the help file. - helpPath -- the path to the help file - helpDesc -- A description for the help file. If None, the helpFile param is used. - bCheckFile -- A flag indicating if the file existence should be checked. - """ - if helpDesc is None: - helpDesc = helpFile - fullHelpFile = os.path.join(helpPath, helpFile) - try: - if bCheckFile: - os.stat(fullHelpFile) - except os.error: - raise ValueError("Help file does not exist") - # Now register with Python itself. - win32api.RegSetValue( - GetRootKey(), - BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc, - win32con.REG_SZ, - fullHelpFile, - ) - - -def UnregisterHelpFile(helpFile, helpDesc=None): - """Unregister a help file in the registry. - - helpFile -- the base name of the help file. - helpDesc -- A description for the help file. If None, the helpFile param is used. - """ - key = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, - "Software\\Microsoft\\Windows\\Help", - 0, - win32con.KEY_ALL_ACCESS, - ) - try: - try: - win32api.RegDeleteValue(key, helpFile) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - finally: - win32api.RegCloseKey(key) - - # Now de-register with Python itself. - if helpDesc is None: - helpDesc = helpFile - try: - win32api.RegDeleteKey( - GetRootKey(), BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc - ) - except win32api.error as exc: - import winerror - - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - - -def RegisterCoreDLL(coredllName=None): - """Registers the core DLL in the registry. - - If no params are passed, the name of the Python DLL used in - the current process is used and registered. - """ - if coredllName is None: - coredllName = win32api.GetModuleFileName(sys.dllhandle) - # must exist! - else: - try: - os.stat(coredllName) - except os.error: - print("Warning: Registering non-existant core DLL %s" % coredllName) - - hKey = win32api.RegCreateKey(GetRootKey(), BuildDefaultPythonKey()) - try: - win32api.RegSetValue(hKey, "Dll", win32con.REG_SZ, coredllName) - finally: - win32api.RegCloseKey(hKey) - # Lastly, setup the current version to point to me. - win32api.RegSetValue( - GetRootKey(), - "Software\\Python\\PythonCore\\CurrentVersion", - win32con.REG_SZ, - sys.winver, - ) - - -def RegisterFileExtensions(defPyIcon, defPycIcon, runCommand): - """Register the core Python file extensions. - - defPyIcon -- The default icon to use for .py files, in 'fname,offset' format. - defPycIcon -- The default icon to use for .pyc files, in 'fname,offset' format. - runCommand -- The command line to use for running .py files - """ - # Register the file extensions. - pythonFileId = RegistryIDPyFile - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, ".py", win32con.REG_SZ, pythonFileId - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, pythonFileId, win32con.REG_SZ, "Python File" - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - "%s\\CLSID" % pythonFileId, - win32con.REG_SZ, - CLSIDPyFile, - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - "%s\\DefaultIcon" % pythonFileId, - win32con.REG_SZ, - defPyIcon, - ) - base = "%s\\Shell" % RegistryIDPyFile - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, base + "\\Open", win32con.REG_SZ, "Run" - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - base + "\\Open\\Command", - win32con.REG_SZ, - runCommand, - ) - - # Register the .PYC. - pythonFileId = RegistryIDPycFile - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, ".pyc", win32con.REG_SZ, pythonFileId - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - pythonFileId, - win32con.REG_SZ, - "Compiled Python File", - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - "%s\\DefaultIcon" % pythonFileId, - win32con.REG_SZ, - defPycIcon, - ) - base = "%s\\Shell" % pythonFileId - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, base + "\\Open", win32con.REG_SZ, "Run" - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - base + "\\Open\\Command", - win32con.REG_SZ, - runCommand, - ) - - -def RegisterShellCommand(shellCommand, exeCommand, shellUserCommand=None): - # Last param for "Open" - for a .py file to be executed by the command line - # or shell execute (eg, just entering "foo.py"), the Command must be "Open", - # but you may associate a different name for the right-click menu. - # In our case, normally we have "Open=Run" - base = "%s\\Shell" % RegistryIDPyFile - if shellUserCommand: - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - base + "\\%s" % (shellCommand), - win32con.REG_SZ, - shellUserCommand, - ) - - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - base + "\\%s\\Command" % (shellCommand), - win32con.REG_SZ, - exeCommand, - ) - - -def RegisterDDECommand(shellCommand, ddeApp, ddeTopic, ddeCommand): - base = "%s\\Shell" % RegistryIDPyFile - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - base + "\\%s\\ddeexec" % (shellCommand), - win32con.REG_SZ, - ddeCommand, - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - base + "\\%s\\ddeexec\\Application" % (shellCommand), - win32con.REG_SZ, - ddeApp, - ) - win32api.RegSetValue( - win32con.HKEY_CLASSES_ROOT, - base + "\\%s\\ddeexec\\Topic" % (shellCommand), - win32con.REG_SZ, - ddeTopic, - ) diff --git a/lib/win32/lib/sspi.py b/lib/win32/lib/sspi.py deleted file mode 100644 index 22c0e4a0..00000000 --- a/lib/win32/lib/sspi.py +++ /dev/null @@ -1,412 +0,0 @@ -""" -Helper classes for SSPI authentication via the win32security module. - -SSPI authentication involves a token-exchange "dance", the exact details -of which depends on the authentication provider used. There are also -a number of complex flags and constants that need to be used - in most -cases, there are reasonable defaults. - -These classes attempt to hide these details from you until you really need -to know. They are not designed to handle all cases, just the common ones. -If you need finer control than offered here, just use the win32security -functions directly. -""" -# Based on Roger Upole's sspi demos. -# $Id$ -import sspicon -import win32security - -error = win32security.error - - -class _BaseAuth(object): - def __init__(self): - self.reset() - - def reset(self): - """Reset everything to an unauthorized state""" - self.ctxt = None - self.authenticated = False - self.initiator_name = None - self.service_name = None - - # The next seq_num for an encrypt/sign operation - self.next_seq_num = 0 - - def _get_next_seq_num(self): - """Get the next sequence number for a transmission. Default - implementation is to increment a counter - """ - ret = self.next_seq_num - self.next_seq_num = self.next_seq_num + 1 - return ret - - def encrypt(self, data): - """Encrypt a string, returning a tuple of (encrypted_data, trailer). - These can be passed to decrypt to get back the original string. - """ - pkg_size_info = self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) - trailersize = pkg_size_info["SecurityTrailer"] - - encbuf = win32security.PySecBufferDescType() - encbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) - encbuf.append( - win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN) - ) - encbuf[0].Buffer = data - self.ctxt.EncryptMessage(0, encbuf, self._get_next_seq_num()) - return encbuf[0].Buffer, encbuf[1].Buffer - - def decrypt(self, data, trailer): - """Decrypt a previously encrypted string, returning the orignal data""" - encbuf = win32security.PySecBufferDescType() - encbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) - encbuf.append( - win32security.PySecBufferType(len(trailer), sspicon.SECBUFFER_TOKEN) - ) - encbuf[0].Buffer = data - encbuf[1].Buffer = trailer - self.ctxt.DecryptMessage(encbuf, self._get_next_seq_num()) - return encbuf[0].Buffer - - def sign(self, data): - """sign a string suitable for transmission, returning the signature. - Passing the data and signature to verify will determine if the data - is unchanged. - """ - pkg_size_info = self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) - sigsize = pkg_size_info["MaxSignature"] - sigbuf = win32security.PySecBufferDescType() - sigbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) - sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN)) - sigbuf[0].Buffer = data - - self.ctxt.MakeSignature(0, sigbuf, self._get_next_seq_num()) - return sigbuf[1].Buffer - - def verify(self, data, sig): - """Verifies data and its signature. If verification fails, an sspi.error - will be raised. - """ - sigbuf = win32security.PySecBufferDescType() - sigbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA)) - sigbuf.append(win32security.PySecBufferType(len(sig), sspicon.SECBUFFER_TOKEN)) - - sigbuf[0].Buffer = data - sigbuf[1].Buffer = sig - self.ctxt.VerifySignature(sigbuf, self._get_next_seq_num()) - - def unwrap(self, token): - """ - GSSAPI's unwrap with SSPI. - https://docs.microsoft.com/en-us/windows/win32/secauthn/sspi-kerberos-interoperability-with-gssapi - - Usable mainly with Kerberos SSPI package, but this is not enforced. - - Return the clear text, and a boolean that is True if the token was encrypted. - """ - buffer = win32security.PySecBufferDescType() - # This buffer will contain a "stream", which is the token coming from the other side - buffer.append( - win32security.PySecBufferType(len(token), sspicon.SECBUFFER_STREAM) - ) - buffer[0].Buffer = token - - # This buffer will receive the clear, or just unwrapped text if no encryption was used. - # Will be resized by the lib. - buffer.append(win32security.PySecBufferType(0, sspicon.SECBUFFER_DATA)) - - pfQOP = self.ctxt.DecryptMessage(buffer, self._get_next_seq_num()) - - r = buffer[1].Buffer - return r, not (pfQOP == sspicon.SECQOP_WRAP_NO_ENCRYPT) - - def wrap(self, msg, encrypt=False): - """ - GSSAPI's wrap with SSPI. - https://docs.microsoft.com/en-us/windows/win32/secauthn/sspi-kerberos-interoperability-with-gssapi - - Usable mainly with Kerberos SSPI package, but this is not enforced. - - Wrap a message to be sent to the other side. Encrypted if encrypt is True. - """ - - size_info = self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES) - trailer_size = size_info["SecurityTrailer"] - block_size = size_info["BlockSize"] - - buffer = win32security.PySecBufferDescType() - - # This buffer will contain unencrypted data to wrap, and maybe encrypt. - buffer.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) - buffer[0].Buffer = msg - - # Will receive the token that forms the beginning of the msg - buffer.append( - win32security.PySecBufferType(trailer_size, sspicon.SECBUFFER_TOKEN) - ) - - # The trailer is needed in case of block encryption - buffer.append( - win32security.PySecBufferType(block_size, sspicon.SECBUFFER_PADDING) - ) - - fQOP = 0 if encrypt else sspicon.SECQOP_WRAP_NO_ENCRYPT - self.ctxt.EncryptMessage(fQOP, buffer, self._get_next_seq_num()) - - # Sec token, then data, then padding - r = buffer[1].Buffer + buffer[0].Buffer + buffer[2].Buffer - return r - - def _amend_ctx_name(self): - """Adds initiator and service names in the security context for ease of use""" - if not self.authenticated: - raise ValueError("Sec context is not completely authenticated") - - try: - names = self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_NATIVE_NAMES) - except error: - # The SSP doesn't provide these attributes. - pass - else: - self.initiator_name, self.service_name = names - - -class ClientAuth(_BaseAuth): - """Manages the client side of an SSPI authentication handshake""" - - def __init__( - self, - pkg_name, # Name of the package to used. - client_name=None, # User for whom credentials are used. - auth_info=None, # or a tuple of (username, domain, password) - targetspn=None, # Target security context provider name. - scflags=None, # security context flags - datarep=sspicon.SECURITY_NETWORK_DREP, - ): - if scflags is None: - scflags = ( - sspicon.ISC_REQ_INTEGRITY - | sspicon.ISC_REQ_SEQUENCE_DETECT - | sspicon.ISC_REQ_REPLAY_DETECT - | sspicon.ISC_REQ_CONFIDENTIALITY - ) - self.scflags = scflags - self.datarep = datarep - self.targetspn = targetspn - self.pkg_info = win32security.QuerySecurityPackageInfo(pkg_name) - ( - self.credentials, - self.credentials_expiry, - ) = win32security.AcquireCredentialsHandle( - client_name, - self.pkg_info["Name"], - sspicon.SECPKG_CRED_OUTBOUND, - None, - auth_info, - ) - _BaseAuth.__init__(self) - - def authorize(self, sec_buffer_in): - """Perform *one* step of the client authentication process. Pass None for the first round""" - if ( - sec_buffer_in is not None - and type(sec_buffer_in) != win32security.PySecBufferDescType - ): - # User passed us the raw data - wrap it into a SecBufferDesc - sec_buffer_new = win32security.PySecBufferDescType() - tokenbuf = win32security.PySecBufferType( - self.pkg_info["MaxToken"], sspicon.SECBUFFER_TOKEN - ) - tokenbuf.Buffer = sec_buffer_in - sec_buffer_new.append(tokenbuf) - sec_buffer_in = sec_buffer_new - sec_buffer_out = win32security.PySecBufferDescType() - tokenbuf = win32security.PySecBufferType( - self.pkg_info["MaxToken"], sspicon.SECBUFFER_TOKEN - ) - sec_buffer_out.append(tokenbuf) - ## input context handle should be NULL on first call - ctxtin = self.ctxt - if self.ctxt is None: - self.ctxt = win32security.PyCtxtHandleType() - err, attr, exp = win32security.InitializeSecurityContext( - self.credentials, - ctxtin, - self.targetspn, - self.scflags, - self.datarep, - sec_buffer_in, - self.ctxt, - sec_buffer_out, - ) - # Stash these away incase someone needs to know the state from the - # final call. - self.ctxt_attr = attr - self.ctxt_expiry = exp - - if err in (sspicon.SEC_I_COMPLETE_NEEDED, sspicon.SEC_I_COMPLETE_AND_CONTINUE): - self.ctxt.CompleteAuthToken(sec_buffer_out) - - self.authenticated = err == 0 - if self.authenticated: - self._amend_ctx_name() - - return err, sec_buffer_out - - -class ServerAuth(_BaseAuth): - """Manages the server side of an SSPI authentication handshake""" - - def __init__( - self, pkg_name, spn=None, scflags=None, datarep=sspicon.SECURITY_NETWORK_DREP - ): - self.spn = spn - self.datarep = datarep - - if scflags is None: - scflags = ( - sspicon.ASC_REQ_INTEGRITY - | sspicon.ASC_REQ_SEQUENCE_DETECT - | sspicon.ASC_REQ_REPLAY_DETECT - | sspicon.ASC_REQ_CONFIDENTIALITY - ) - # Should we default to sspicon.KerbAddExtraCredentialsMessage - # if pkg_name=='Kerberos'? - self.scflags = scflags - - self.pkg_info = win32security.QuerySecurityPackageInfo(pkg_name) - - ( - self.credentials, - self.credentials_expiry, - ) = win32security.AcquireCredentialsHandle( - spn, self.pkg_info["Name"], sspicon.SECPKG_CRED_INBOUND, None, None - ) - _BaseAuth.__init__(self) - - def authorize(self, sec_buffer_in): - """Perform *one* step of the server authentication process.""" - if ( - sec_buffer_in is not None - and type(sec_buffer_in) != win32security.PySecBufferDescType - ): - # User passed us the raw data - wrap it into a SecBufferDesc - sec_buffer_new = win32security.PySecBufferDescType() - tokenbuf = win32security.PySecBufferType( - self.pkg_info["MaxToken"], sspicon.SECBUFFER_TOKEN - ) - tokenbuf.Buffer = sec_buffer_in - sec_buffer_new.append(tokenbuf) - sec_buffer_in = sec_buffer_new - - sec_buffer_out = win32security.PySecBufferDescType() - tokenbuf = win32security.PySecBufferType( - self.pkg_info["MaxToken"], sspicon.SECBUFFER_TOKEN - ) - sec_buffer_out.append(tokenbuf) - ## input context handle is None initially, then handle returned from last call thereafter - ctxtin = self.ctxt - if self.ctxt is None: - self.ctxt = win32security.PyCtxtHandleType() - err, attr, exp = win32security.AcceptSecurityContext( - self.credentials, - ctxtin, - sec_buffer_in, - self.scflags, - self.datarep, - self.ctxt, - sec_buffer_out, - ) - - # Stash these away incase someone needs to know the state from the - # final call. - self.ctxt_attr = attr - self.ctxt_expiry = exp - - if err in (sspicon.SEC_I_COMPLETE_NEEDED, sspicon.SEC_I_COMPLETE_AND_CONTINUE): - self.ctxt.CompleteAuthToken(sec_buffer_out) - - self.authenticated = err == 0 - if self.authenticated: - self._amend_ctx_name() - - return err, sec_buffer_out - - -if __name__ == "__main__": - # This is the security package (the security support provider / the security backend) - # we want to use for this example. - ssp = "Kerberos" # or "NTLM" or "Negotiate" which enable negotiation between - # Kerberos (prefered) and NTLM (if not supported on the other side). - - flags = ( - sspicon.ISC_REQ_MUTUAL_AUTH - | sspicon.ISC_REQ_INTEGRITY # mutual authentication - | sspicon.ISC_REQ_SEQUENCE_DETECT # check for integrity - | sspicon.ISC_REQ_CONFIDENTIALITY # enable out-of-order messages - | sspicon.ISC_REQ_REPLAY_DETECT # request confidentiality # request replay detection - ) - - # Get our identity, mandatory for the Kerberos case *for this example* - # Kerberos cannot be used if we don't tell it the target we want - # to authenticate to. - cred_handle, exp = win32security.AcquireCredentialsHandle( - None, ssp, sspicon.SECPKG_CRED_INBOUND, None, None - ) - cred = cred_handle.QueryCredentialsAttributes(sspicon.SECPKG_CRED_ATTR_NAMES) - print("We are:", cred) - - # Setup the 2 contexts. In real life, only one is needed: the other one is - # created in the process we want to communicate with. - sspiclient = ClientAuth(ssp, scflags=flags, targetspn=cred) - sspiserver = ServerAuth(ssp, scflags=flags) - - print( - "SSP : %s (%s)" % (sspiclient.pkg_info["Name"], sspiclient.pkg_info["Comment"]) - ) - - # Perform the authentication dance, each loop exchanging more information - # on the way to completing authentication. - sec_buffer = None - client_step = 0 - server_step = 0 - while not (sspiclient.authenticated) or len(sec_buffer[0].Buffer): - client_step += 1 - err, sec_buffer = sspiclient.authorize(sec_buffer) - print("Client step %s" % client_step) - if sspiserver.authenticated and len(sec_buffer[0].Buffer) == 0: - break - - server_step += 1 - err, sec_buffer = sspiserver.authorize(sec_buffer) - print("Server step %s" % server_step) - - # Authentication process is finished. - print("Initiator name from the service side:", sspiserver.initiator_name) - print("Service name from the client side: ", sspiclient.service_name) - - data = "hello".encode("ascii") # py3k-friendly - - # Simple signature, not compatible with GSSAPI. - sig = sspiclient.sign(data) - sspiserver.verify(data, sig) - - # Encryption - encrypted, sig = sspiclient.encrypt(data) - decrypted = sspiserver.decrypt(encrypted, sig) - assert decrypted == data - - # GSSAPI wrapping, no encryption (NTLM always encrypts) - wrapped = sspiclient.wrap(data) - unwrapped, was_encrypted = sspiserver.unwrap(wrapped) - print("encrypted ?", was_encrypted) - assert data == unwrapped - - # GSSAPI wrapping, with encryption - wrapped = sspiserver.wrap(data, encrypt=True) - unwrapped, was_encrypted = sspiclient.unwrap(wrapped) - print("encrypted ?", was_encrypted) - assert data == unwrapped - - print("cool!") diff --git a/lib/win32/lib/sspicon.py b/lib/win32/lib/sspicon.py deleted file mode 100644 index 12f7482d..00000000 --- a/lib/win32/lib/sspicon.py +++ /dev/null @@ -1,477 +0,0 @@ -# Generated by h2py from c:\microsoft sdk\include\sspi.h -ISSP_LEVEL = 32 -ISSP_MODE = 1 - - -def SEC_SUCCESS(Status): - return (Status) >= 0 - - -SECPKG_FLAG_INTEGRITY = 1 -SECPKG_FLAG_PRIVACY = 2 -SECPKG_FLAG_TOKEN_ONLY = 4 -SECPKG_FLAG_DATAGRAM = 8 -SECPKG_FLAG_CONNECTION = 16 -SECPKG_FLAG_MULTI_REQUIRED = 32 -SECPKG_FLAG_CLIENT_ONLY = 64 -SECPKG_FLAG_EXTENDED_ERROR = 128 -SECPKG_FLAG_IMPERSONATION = 256 -SECPKG_FLAG_ACCEPT_WIN32_NAME = 512 -SECPKG_FLAG_STREAM = 1024 -SECPKG_FLAG_NEGOTIABLE = 2048 -SECPKG_FLAG_GSS_COMPATIBLE = 4096 -SECPKG_FLAG_LOGON = 8192 -SECPKG_FLAG_ASCII_BUFFERS = 16384 -SECPKG_FLAG_FRAGMENT = 32768 -SECPKG_FLAG_MUTUAL_AUTH = 65536 -SECPKG_FLAG_DELEGATION = 131072 -SECPKG_FLAG_READONLY_WITH_CHECKSUM = 262144 -SECPKG_ID_NONE = 65535 - -SECBUFFER_VERSION = 0 -SECBUFFER_EMPTY = 0 -SECBUFFER_DATA = 1 -SECBUFFER_TOKEN = 2 -SECBUFFER_PKG_PARAMS = 3 -SECBUFFER_MISSING = 4 -SECBUFFER_EXTRA = 5 -SECBUFFER_STREAM_TRAILER = 6 -SECBUFFER_STREAM_HEADER = 7 -SECBUFFER_NEGOTIATION_INFO = 8 -SECBUFFER_PADDING = 9 -SECBUFFER_STREAM = 10 -SECBUFFER_MECHLIST = 11 -SECBUFFER_MECHLIST_SIGNATURE = 12 -SECBUFFER_TARGET = 13 -SECBUFFER_CHANNEL_BINDINGS = 14 -SECBUFFER_ATTRMASK = -268435456 -SECBUFFER_READONLY = -2147483648 -SECBUFFER_READONLY_WITH_CHECKSUM = 268435456 -SECBUFFER_RESERVED = 1610612736 - -SECURITY_NATIVE_DREP = 16 -SECURITY_NETWORK_DREP = 0 - -SECPKG_CRED_INBOUND = 1 -SECPKG_CRED_OUTBOUND = 2 -SECPKG_CRED_BOTH = 3 -SECPKG_CRED_DEFAULT = 4 -SECPKG_CRED_RESERVED = -268435456 - -ISC_REQ_DELEGATE = 1 -ISC_REQ_MUTUAL_AUTH = 2 -ISC_REQ_REPLAY_DETECT = 4 -ISC_REQ_SEQUENCE_DETECT = 8 -ISC_REQ_CONFIDENTIALITY = 16 -ISC_REQ_USE_SESSION_KEY = 32 -ISC_REQ_PROMPT_FOR_CREDS = 64 -ISC_REQ_USE_SUPPLIED_CREDS = 128 -ISC_REQ_ALLOCATE_MEMORY = 256 -ISC_REQ_USE_DCE_STYLE = 512 -ISC_REQ_DATAGRAM = 1024 -ISC_REQ_CONNECTION = 2048 -ISC_REQ_CALL_LEVEL = 4096 -ISC_REQ_FRAGMENT_SUPPLIED = 8192 -ISC_REQ_EXTENDED_ERROR = 16384 -ISC_REQ_STREAM = 32768 -ISC_REQ_INTEGRITY = 65536 -ISC_REQ_IDENTIFY = 131072 -ISC_REQ_NULL_SESSION = 262144 -ISC_REQ_MANUAL_CRED_VALIDATION = 524288 -ISC_REQ_RESERVED1 = 1048576 -ISC_REQ_FRAGMENT_TO_FIT = 2097152 -ISC_REQ_HTTP = 0x10000000 -ISC_RET_DELEGATE = 1 -ISC_RET_MUTUAL_AUTH = 2 -ISC_RET_REPLAY_DETECT = 4 -ISC_RET_SEQUENCE_DETECT = 8 -ISC_RET_CONFIDENTIALITY = 16 -ISC_RET_USE_SESSION_KEY = 32 -ISC_RET_USED_COLLECTED_CREDS = 64 -ISC_RET_USED_SUPPLIED_CREDS = 128 -ISC_RET_ALLOCATED_MEMORY = 256 -ISC_RET_USED_DCE_STYLE = 512 -ISC_RET_DATAGRAM = 1024 -ISC_RET_CONNECTION = 2048 -ISC_RET_INTERMEDIATE_RETURN = 4096 -ISC_RET_CALL_LEVEL = 8192 -ISC_RET_EXTENDED_ERROR = 16384 -ISC_RET_STREAM = 32768 -ISC_RET_INTEGRITY = 65536 -ISC_RET_IDENTIFY = 131072 -ISC_RET_NULL_SESSION = 262144 -ISC_RET_MANUAL_CRED_VALIDATION = 524288 -ISC_RET_RESERVED1 = 1048576 -ISC_RET_FRAGMENT_ONLY = 2097152 - -ASC_REQ_DELEGATE = 1 -ASC_REQ_MUTUAL_AUTH = 2 -ASC_REQ_REPLAY_DETECT = 4 -ASC_REQ_SEQUENCE_DETECT = 8 -ASC_REQ_CONFIDENTIALITY = 16 -ASC_REQ_USE_SESSION_KEY = 32 -ASC_REQ_ALLOCATE_MEMORY = 256 -ASC_REQ_USE_DCE_STYLE = 512 -ASC_REQ_DATAGRAM = 1024 -ASC_REQ_CONNECTION = 2048 -ASC_REQ_CALL_LEVEL = 4096 -ASC_REQ_EXTENDED_ERROR = 32768 -ASC_REQ_STREAM = 65536 -ASC_REQ_INTEGRITY = 131072 -ASC_REQ_LICENSING = 262144 -ASC_REQ_IDENTIFY = 524288 -ASC_REQ_ALLOW_NULL_SESSION = 1048576 -ASC_REQ_ALLOW_NON_USER_LOGONS = 2097152 -ASC_REQ_ALLOW_CONTEXT_REPLAY = 4194304 -ASC_REQ_FRAGMENT_TO_FIT = 8388608 -ASC_REQ_FRAGMENT_SUPPLIED = 8192 -ASC_REQ_NO_TOKEN = 16777216 -ASC_RET_DELEGATE = 1 -ASC_RET_MUTUAL_AUTH = 2 -ASC_RET_REPLAY_DETECT = 4 -ASC_RET_SEQUENCE_DETECT = 8 -ASC_RET_CONFIDENTIALITY = 16 -ASC_RET_USE_SESSION_KEY = 32 -ASC_RET_ALLOCATED_MEMORY = 256 -ASC_RET_USED_DCE_STYLE = 512 -ASC_RET_DATAGRAM = 1024 -ASC_RET_CONNECTION = 2048 -ASC_RET_CALL_LEVEL = 8192 -ASC_RET_THIRD_LEG_FAILED = 16384 -ASC_RET_EXTENDED_ERROR = 32768 -ASC_RET_STREAM = 65536 -ASC_RET_INTEGRITY = 131072 -ASC_RET_LICENSING = 262144 -ASC_RET_IDENTIFY = 524288 -ASC_RET_NULL_SESSION = 1048576 -ASC_RET_ALLOW_NON_USER_LOGONS = 2097152 -ASC_RET_ALLOW_CONTEXT_REPLAY = 4194304 -ASC_RET_FRAGMENT_ONLY = 8388608 - -SECPKG_CRED_ATTR_NAMES = 1 -SECPKG_ATTR_SIZES = 0 -SECPKG_ATTR_NAMES = 1 -SECPKG_ATTR_LIFESPAN = 2 -SECPKG_ATTR_DCE_INFO = 3 -SECPKG_ATTR_STREAM_SIZES = 4 -SECPKG_ATTR_KEY_INFO = 5 -SECPKG_ATTR_AUTHORITY = 6 -SECPKG_ATTR_PROTO_INFO = 7 -SECPKG_ATTR_PASSWORD_EXPIRY = 8 -SECPKG_ATTR_SESSION_KEY = 9 -SECPKG_ATTR_PACKAGE_INFO = 10 -SECPKG_ATTR_USER_FLAGS = 11 -SECPKG_ATTR_NEGOTIATION_INFO = 12 -SECPKG_ATTR_NATIVE_NAMES = 13 -SECPKG_ATTR_FLAGS = 14 -SECPKG_ATTR_USE_VALIDATED = 15 -SECPKG_ATTR_CREDENTIAL_NAME = 16 -SECPKG_ATTR_TARGET_INFORMATION = 17 -SECPKG_ATTR_ACCESS_TOKEN = 18 -SECPKG_ATTR_TARGET = 19 -SECPKG_ATTR_AUTHENTICATION_ID = 20 - -## attributes from schannel.h -SECPKG_ATTR_REMOTE_CERT_CONTEXT = 83 -SECPKG_ATTR_LOCAL_CERT_CONTEXT = 84 -SECPKG_ATTR_ROOT_STORE = 85 -SECPKG_ATTR_SUPPORTED_ALGS = 86 -SECPKG_ATTR_CIPHER_STRENGTHS = 87 -SECPKG_ATTR_SUPPORTED_PROTOCOLS = 88 -SECPKG_ATTR_ISSUER_LIST_EX = 89 -SECPKG_ATTR_CONNECTION_INFO = 90 -SECPKG_ATTR_EAP_KEY_BLOCK = 91 -SECPKG_ATTR_MAPPED_CRED_ATTR = 92 -SECPKG_ATTR_SESSION_INFO = 93 -SECPKG_ATTR_APP_DATA = 94 - -SECPKG_NEGOTIATION_COMPLETE = 0 -SECPKG_NEGOTIATION_OPTIMISTIC = 1 -SECPKG_NEGOTIATION_IN_PROGRESS = 2 -SECPKG_NEGOTIATION_DIRECT = 3 -SECPKG_NEGOTIATION_TRY_MULTICRED = 4 -SECPKG_CONTEXT_EXPORT_RESET_NEW = 1 -SECPKG_CONTEXT_EXPORT_DELETE_OLD = 2 -SECQOP_WRAP_NO_ENCRYPT = -2147483647 -SECURITY_ENTRYPOINT_ANSIW = "InitSecurityInterfaceW" -SECURITY_ENTRYPOINT_ANSIA = "InitSecurityInterfaceA" -SECURITY_ENTRYPOINT16 = "INITSECURITYINTERFACEA" -SECURITY_ENTRYPOINT = SECURITY_ENTRYPOINT16 -SECURITY_ENTRYPOINT_ANSI = SECURITY_ENTRYPOINT16 -SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION = 1 -SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION_2 = 2 -SASL_OPTION_SEND_SIZE = 1 -SASL_OPTION_RECV_SIZE = 2 -SASL_OPTION_AUTHZ_STRING = 3 -SASL_OPTION_AUTHZ_PROCESSING = 4 -SEC_WINNT_AUTH_IDENTITY_ANSI = 1 -SEC_WINNT_AUTH_IDENTITY_UNICODE = 2 -SEC_WINNT_AUTH_IDENTITY_VERSION = 512 -SEC_WINNT_AUTH_IDENTITY_MARSHALLED = 4 -SEC_WINNT_AUTH_IDENTITY_ONLY = 8 -SECPKG_OPTIONS_TYPE_UNKNOWN = 0 -SECPKG_OPTIONS_TYPE_LSA = 1 -SECPKG_OPTIONS_TYPE_SSPI = 2 -SECPKG_OPTIONS_PERMANENT = 1 - -SEC_E_INSUFFICIENT_MEMORY = -2146893056 -SEC_E_INVALID_HANDLE = -2146893055 -SEC_E_UNSUPPORTED_FUNCTION = -2146893054 -SEC_E_TARGET_UNKNOWN = -2146893053 -SEC_E_INTERNAL_ERROR = -2146893052 -SEC_E_SECPKG_NOT_FOUND = -2146893051 -SEC_E_NOT_OWNER = -2146893050 -SEC_E_CANNOT_INSTALL = -2146893049 -SEC_E_INVALID_TOKEN = -2146893048 -SEC_E_CANNOT_PACK = -2146893047 -SEC_E_QOP_NOT_SUPPORTED = -2146893046 -SEC_E_NO_IMPERSONATION = -2146893045 -SEC_E_LOGON_DENIED = -2146893044 -SEC_E_UNKNOWN_CREDENTIALS = -2146893043 -SEC_E_NO_CREDENTIALS = -2146893042 -SEC_E_MESSAGE_ALTERED = -2146893041 -SEC_E_OUT_OF_SEQUENCE = -2146893040 -SEC_E_NO_AUTHENTICATING_AUTHORITY = -2146893039 -SEC_I_CONTINUE_NEEDED = 590610 -SEC_I_COMPLETE_NEEDED = 590611 -SEC_I_COMPLETE_AND_CONTINUE = 590612 -SEC_I_LOCAL_LOGON = 590613 -SEC_E_BAD_PKGID = -2146893034 -SEC_E_CONTEXT_EXPIRED = -2146893033 -SEC_I_CONTEXT_EXPIRED = 590615 -SEC_E_INCOMPLETE_MESSAGE = -2146893032 -SEC_E_INCOMPLETE_CREDENTIALS = -2146893024 -SEC_E_BUFFER_TOO_SMALL = -2146893023 -SEC_I_INCOMPLETE_CREDENTIALS = 590624 -SEC_I_RENEGOTIATE = 590625 -SEC_E_WRONG_PRINCIPAL = -2146893022 -SEC_I_NO_LSA_CONTEXT = 590627 -SEC_E_TIME_SKEW = -2146893020 -SEC_E_UNTRUSTED_ROOT = -2146893019 -SEC_E_ILLEGAL_MESSAGE = -2146893018 -SEC_E_CERT_UNKNOWN = -2146893017 -SEC_E_CERT_EXPIRED = -2146893016 -SEC_E_ENCRYPT_FAILURE = -2146893015 -SEC_E_DECRYPT_FAILURE = -2146893008 -SEC_E_ALGORITHM_MISMATCH = -2146893007 -SEC_E_SECURITY_QOS_FAILED = -2146893006 -SEC_E_UNFINISHED_CONTEXT_DELETED = -2146893005 -SEC_E_NO_TGT_REPLY = -2146893004 -SEC_E_NO_IP_ADDRESSES = -2146893003 -SEC_E_WRONG_CREDENTIAL_HANDLE = -2146893002 -SEC_E_CRYPTO_SYSTEM_INVALID = -2146893001 -SEC_E_MAX_REFERRALS_EXCEEDED = -2146893000 -SEC_E_MUST_BE_KDC = -2146892999 -SEC_E_STRONG_CRYPTO_NOT_SUPPORTED = -2146892998 -SEC_E_TOO_MANY_PRINCIPALS = -2146892997 -SEC_E_NO_PA_DATA = -2146892996 -SEC_E_PKINIT_NAME_MISMATCH = -2146892995 -SEC_E_SMARTCARD_LOGON_REQUIRED = -2146892994 -SEC_E_SHUTDOWN_IN_PROGRESS = -2146892993 -SEC_E_KDC_INVALID_REQUEST = -2146892992 -SEC_E_KDC_UNABLE_TO_REFER = -2146892991 -SEC_E_KDC_UNKNOWN_ETYPE = -2146892990 -SEC_E_UNSUPPORTED_PREAUTH = -2146892989 -SEC_E_DELEGATION_REQUIRED = -2146892987 -SEC_E_BAD_BINDINGS = -2146892986 -SEC_E_MULTIPLE_ACCOUNTS = -2146892985 -SEC_E_NO_KERB_KEY = -2146892984 - -ERROR_IPSEC_QM_POLICY_EXISTS = 13000 -ERROR_IPSEC_QM_POLICY_NOT_FOUND = 13001 -ERROR_IPSEC_QM_POLICY_IN_USE = 13002 -ERROR_IPSEC_MM_POLICY_EXISTS = 13003 -ERROR_IPSEC_MM_POLICY_NOT_FOUND = 13004 -ERROR_IPSEC_MM_POLICY_IN_USE = 13005 -ERROR_IPSEC_MM_FILTER_EXISTS = 13006 -ERROR_IPSEC_MM_FILTER_NOT_FOUND = 13007 -ERROR_IPSEC_TRANSPORT_FILTER_EXISTS = 13008 -ERROR_IPSEC_TRANSPORT_FILTER_NOT_FOUND = 13009 -ERROR_IPSEC_MM_AUTH_EXISTS = 13010 -ERROR_IPSEC_MM_AUTH_NOT_FOUND = 13011 -ERROR_IPSEC_MM_AUTH_IN_USE = 13012 -ERROR_IPSEC_DEFAULT_MM_POLICY_NOT_FOUND = 13013 -ERROR_IPSEC_DEFAULT_MM_AUTH_NOT_FOUND = 13014 -ERROR_IPSEC_DEFAULT_QM_POLICY_NOT_FOUND = 13015 -ERROR_IPSEC_TUNNEL_FILTER_EXISTS = 13016 -ERROR_IPSEC_TUNNEL_FILTER_NOT_FOUND = 13017 -ERROR_IPSEC_MM_FILTER_PENDING_DELETION = 13018 -ERROR_IPSEC_TRANSPORT_FILTER_PENDING_DELETION = 13019 -ERROR_IPSEC_TUNNEL_FILTER_PENDING_DELETION = 13020 -ERROR_IPSEC_MM_POLICY_PENDING_DELETION = 13021 -ERROR_IPSEC_MM_AUTH_PENDING_DELETION = 13022 -ERROR_IPSEC_QM_POLICY_PENDING_DELETION = 13023 -WARNING_IPSEC_MM_POLICY_PRUNED = 13024 -WARNING_IPSEC_QM_POLICY_PRUNED = 13025 -ERROR_IPSEC_IKE_NEG_STATUS_BEGIN = 13800 -ERROR_IPSEC_IKE_AUTH_FAIL = 13801 -ERROR_IPSEC_IKE_ATTRIB_FAIL = 13802 -ERROR_IPSEC_IKE_NEGOTIATION_PENDING = 13803 -ERROR_IPSEC_IKE_GENERAL_PROCESSING_ERROR = 13804 -ERROR_IPSEC_IKE_TIMED_OUT = 13805 -ERROR_IPSEC_IKE_NO_CERT = 13806 -ERROR_IPSEC_IKE_SA_DELETED = 13807 -ERROR_IPSEC_IKE_SA_REAPED = 13808 -ERROR_IPSEC_IKE_MM_ACQUIRE_DROP = 13809 -ERROR_IPSEC_IKE_QM_ACQUIRE_DROP = 13810 -ERROR_IPSEC_IKE_QUEUE_DROP_MM = 13811 -ERROR_IPSEC_IKE_QUEUE_DROP_NO_MM = 13812 -ERROR_IPSEC_IKE_DROP_NO_RESPONSE = 13813 -ERROR_IPSEC_IKE_MM_DELAY_DROP = 13814 -ERROR_IPSEC_IKE_QM_DELAY_DROP = 13815 -ERROR_IPSEC_IKE_ERROR = 13816 -ERROR_IPSEC_IKE_CRL_FAILED = 13817 -ERROR_IPSEC_IKE_INVALID_KEY_USAGE = 13818 -ERROR_IPSEC_IKE_INVALID_CERT_TYPE = 13819 -ERROR_IPSEC_IKE_NO_PRIVATE_KEY = 13820 -ERROR_IPSEC_IKE_DH_FAIL = 13822 -ERROR_IPSEC_IKE_INVALID_HEADER = 13824 -ERROR_IPSEC_IKE_NO_POLICY = 13825 -ERROR_IPSEC_IKE_INVALID_SIGNATURE = 13826 -ERROR_IPSEC_IKE_KERBEROS_ERROR = 13827 -ERROR_IPSEC_IKE_NO_PUBLIC_KEY = 13828 -ERROR_IPSEC_IKE_PROCESS_ERR = 13829 -ERROR_IPSEC_IKE_PROCESS_ERR_SA = 13830 -ERROR_IPSEC_IKE_PROCESS_ERR_PROP = 13831 -ERROR_IPSEC_IKE_PROCESS_ERR_TRANS = 13832 -ERROR_IPSEC_IKE_PROCESS_ERR_KE = 13833 -ERROR_IPSEC_IKE_PROCESS_ERR_ID = 13834 -ERROR_IPSEC_IKE_PROCESS_ERR_CERT = 13835 -ERROR_IPSEC_IKE_PROCESS_ERR_CERT_REQ = 13836 -ERROR_IPSEC_IKE_PROCESS_ERR_HASH = 13837 -ERROR_IPSEC_IKE_PROCESS_ERR_SIG = 13838 -ERROR_IPSEC_IKE_PROCESS_ERR_NONCE = 13839 -ERROR_IPSEC_IKE_PROCESS_ERR_NOTIFY = 13840 -ERROR_IPSEC_IKE_PROCESS_ERR_DELETE = 13841 -ERROR_IPSEC_IKE_PROCESS_ERR_VENDOR = 13842 -ERROR_IPSEC_IKE_INVALID_PAYLOAD = 13843 -ERROR_IPSEC_IKE_LOAD_SOFT_SA = 13844 -ERROR_IPSEC_IKE_SOFT_SA_TORN_DOWN = 13845 -ERROR_IPSEC_IKE_INVALID_COOKIE = 13846 -ERROR_IPSEC_IKE_NO_PEER_CERT = 13847 -ERROR_IPSEC_IKE_PEER_CRL_FAILED = 13848 -ERROR_IPSEC_IKE_POLICY_CHANGE = 13849 -ERROR_IPSEC_IKE_NO_MM_POLICY = 13850 -ERROR_IPSEC_IKE_NOTCBPRIV = 13851 -ERROR_IPSEC_IKE_SECLOADFAIL = 13852 -ERROR_IPSEC_IKE_FAILSSPINIT = 13853 -ERROR_IPSEC_IKE_FAILQUERYSSP = 13854 -ERROR_IPSEC_IKE_SRVACQFAIL = 13855 -ERROR_IPSEC_IKE_SRVQUERYCRED = 13856 -ERROR_IPSEC_IKE_GETSPIFAIL = 13857 -ERROR_IPSEC_IKE_INVALID_FILTER = 13858 -ERROR_IPSEC_IKE_OUT_OF_MEMORY = 13859 -ERROR_IPSEC_IKE_ADD_UPDATE_KEY_FAILED = 13860 -ERROR_IPSEC_IKE_INVALID_POLICY = 13861 -ERROR_IPSEC_IKE_UNKNOWN_DOI = 13862 -ERROR_IPSEC_IKE_INVALID_SITUATION = 13863 -ERROR_IPSEC_IKE_DH_FAILURE = 13864 -ERROR_IPSEC_IKE_INVALID_GROUP = 13865 -ERROR_IPSEC_IKE_ENCRYPT = 13866 -ERROR_IPSEC_IKE_DECRYPT = 13867 -ERROR_IPSEC_IKE_POLICY_MATCH = 13868 -ERROR_IPSEC_IKE_UNSUPPORTED_ID = 13869 -ERROR_IPSEC_IKE_INVALID_HASH = 13870 -ERROR_IPSEC_IKE_INVALID_HASH_ALG = 13871 -ERROR_IPSEC_IKE_INVALID_HASH_SIZE = 13872 -ERROR_IPSEC_IKE_INVALID_ENCRYPT_ALG = 13873 -ERROR_IPSEC_IKE_INVALID_AUTH_ALG = 13874 -ERROR_IPSEC_IKE_INVALID_SIG = 13875 -ERROR_IPSEC_IKE_LOAD_FAILED = 13876 -ERROR_IPSEC_IKE_RPC_DELETE = 13877 -ERROR_IPSEC_IKE_BENIGN_REINIT = 13878 -ERROR_IPSEC_IKE_INVALID_RESPONDER_LIFETIME_NOTIFY = 13879 -ERROR_IPSEC_IKE_INVALID_CERT_KEYLEN = 13881 -ERROR_IPSEC_IKE_MM_LIMIT = 13882 -ERROR_IPSEC_IKE_NEGOTIATION_DISABLED = 13883 -ERROR_IPSEC_IKE_NEG_STATUS_END = 13884 -CRYPT_E_MSG_ERROR = -2146889727 -CRYPT_E_UNKNOWN_ALGO = -2146889726 -CRYPT_E_OID_FORMAT = -2146889725 -CRYPT_E_INVALID_MSG_TYPE = -2146889724 -CRYPT_E_UNEXPECTED_ENCODING = -2146889723 -CRYPT_E_AUTH_ATTR_MISSING = -2146889722 -CRYPT_E_HASH_VALUE = -2146889721 -CRYPT_E_INVALID_INDEX = -2146889720 -CRYPT_E_ALREADY_DECRYPTED = -2146889719 -CRYPT_E_NOT_DECRYPTED = -2146889718 -CRYPT_E_RECIPIENT_NOT_FOUND = -2146889717 -CRYPT_E_CONTROL_TYPE = -2146889716 -CRYPT_E_ISSUER_SERIALNUMBER = -2146889715 -CRYPT_E_SIGNER_NOT_FOUND = -2146889714 -CRYPT_E_ATTRIBUTES_MISSING = -2146889713 -CRYPT_E_STREAM_MSG_NOT_READY = -2146889712 -CRYPT_E_STREAM_INSUFFICIENT_DATA = -2146889711 -CRYPT_I_NEW_PROTECTION_REQUIRED = 593938 -CRYPT_E_BAD_LEN = -2146885631 -CRYPT_E_BAD_ENCODE = -2146885630 -CRYPT_E_FILE_ERROR = -2146885629 -CRYPT_E_NOT_FOUND = -2146885628 -CRYPT_E_EXISTS = -2146885627 -CRYPT_E_NO_PROVIDER = -2146885626 -CRYPT_E_SELF_SIGNED = -2146885625 -CRYPT_E_DELETED_PREV = -2146885624 -CRYPT_E_NO_MATCH = -2146885623 -CRYPT_E_UNEXPECTED_MSG_TYPE = -2146885622 -CRYPT_E_NO_KEY_PROPERTY = -2146885621 -CRYPT_E_NO_DECRYPT_CERT = -2146885620 -CRYPT_E_BAD_MSG = -2146885619 -CRYPT_E_NO_SIGNER = -2146885618 -CRYPT_E_PENDING_CLOSE = -2146885617 -CRYPT_E_REVOKED = -2146885616 -CRYPT_E_NO_REVOCATION_DLL = -2146885615 -CRYPT_E_NO_REVOCATION_CHECK = -2146885614 -CRYPT_E_REVOCATION_OFFLINE = -2146885613 -CRYPT_E_NOT_IN_REVOCATION_DATABASE = -2146885612 -CRYPT_E_INVALID_NUMERIC_STRING = -2146885600 -CRYPT_E_INVALID_PRINTABLE_STRING = -2146885599 -CRYPT_E_INVALID_IA5_STRING = -2146885598 -CRYPT_E_INVALID_X500_STRING = -2146885597 -CRYPT_E_NOT_CHAR_STRING = -2146885596 -CRYPT_E_FILERESIZED = -2146885595 -CRYPT_E_SECURITY_SETTINGS = -2146885594 -CRYPT_E_NO_VERIFY_USAGE_DLL = -2146885593 -CRYPT_E_NO_VERIFY_USAGE_CHECK = -2146885592 -CRYPT_E_VERIFY_USAGE_OFFLINE = -2146885591 -CRYPT_E_NOT_IN_CTL = -2146885590 -CRYPT_E_NO_TRUSTED_SIGNER = -2146885589 -CRYPT_E_MISSING_PUBKEY_PARA = -2146885588 -CRYPT_E_OSS_ERROR = -2146881536 - -## Kerberos message types for LsaCallAuthenticationPackage (from ntsecapi.h) -KerbDebugRequestMessage = 0 -KerbQueryTicketCacheMessage = 1 -KerbChangeMachinePasswordMessage = 2 -KerbVerifyPacMessage = 3 -KerbRetrieveTicketMessage = 4 -KerbUpdateAddressesMessage = 5 -KerbPurgeTicketCacheMessage = 6 -KerbChangePasswordMessage = 7 -KerbRetrieveEncodedTicketMessage = 8 -KerbDecryptDataMessage = 9 -KerbAddBindingCacheEntryMessage = 10 -KerbSetPasswordMessage = 11 -KerbSetPasswordExMessage = 12 -KerbVerifyCredentialsMessage = 13 -KerbQueryTicketCacheExMessage = 14 -KerbPurgeTicketCacheExMessage = 15 -KerbRefreshSmartcardCredentialsMessage = 16 -KerbAddExtraCredentialsMessage = 17 -KerbQuerySupplementalCredentialsMessage = 18 - -## messages used with msv1_0 from ntsecapi.h -MsV1_0Lm20ChallengeRequest = 0 -MsV1_0Lm20GetChallengeResponse = 1 -MsV1_0EnumerateUsers = 2 -MsV1_0GetUserInfo = 3 -MsV1_0ReLogonUsers = 4 -MsV1_0ChangePassword = 5 -MsV1_0ChangeCachedPassword = 6 -MsV1_0GenericPassthrough = 7 -MsV1_0CacheLogon = 8 -MsV1_0SubAuth = 9 -MsV1_0DeriveCredential = 10 -MsV1_0CacheLookup = 11 -MsV1_0SetProcessOption = 12 - -SEC_E_OK = 0 diff --git a/lib/win32/lib/win2kras.py b/lib/win32/lib/win2kras.py deleted file mode 100644 index f1c18b3a..00000000 --- a/lib/win32/lib/win2kras.py +++ /dev/null @@ -1,7 +0,0 @@ -# win2kras used to be an extension module with wrapped the "new" RAS functions -# in Windows 2000, so win32ras could still be used on NT/etc. -# I think in 2021 we can be confident pywin32 is not used on earlier OSs, so -# that functionality is now in win32ras. -# -# This exists just to avoid breaking old scripts. -from win32ras import * diff --git a/lib/win32/lib/win32con.py b/lib/win32/lib/win32con.py deleted file mode 100644 index 9916ba4a..00000000 --- a/lib/win32/lib/win32con.py +++ /dev/null @@ -1,5083 +0,0 @@ -# Generated by h2py from commdlg.h (plus modifications 4jan98) -WINVER = 1280 -WM_USER = 1024 -PY_0U = 0 -OFN_READONLY = 1 -OFN_OVERWRITEPROMPT = 2 -OFN_HIDEREADONLY = 4 -OFN_NOCHANGEDIR = 8 -OFN_SHOWHELP = 16 -OFN_ENABLEHOOK = 32 -OFN_ENABLETEMPLATE = 64 -OFN_ENABLETEMPLATEHANDLE = 128 -OFN_NOVALIDATE = 256 -OFN_ALLOWMULTISELECT = 512 -OFN_EXTENSIONDIFFERENT = 1024 -OFN_PATHMUSTEXIST = 2048 -OFN_FILEMUSTEXIST = 4096 -OFN_CREATEPROMPT = 8192 -OFN_SHAREAWARE = 16384 -OFN_NOREADONLYRETURN = 32768 -OFN_NOTESTFILECREATE = 65536 -OFN_NONETWORKBUTTON = 131072 -OFN_NOLONGNAMES = 262144 -OFN_EXPLORER = 524288 # new look commdlg -OFN_NODEREFERENCELINKS = 1048576 -OFN_LONGNAMES = 2097152 # force long names for 3.x modules -OFN_ENABLEINCLUDENOTIFY = 4194304 # send include message to callback -OFN_ENABLESIZING = 8388608 -OFN_DONTADDTORECENT = 33554432 -OFN_FORCESHOWHIDDEN = 268435456 # Show All files including System and hidden files -OFN_EX_NOPLACESBAR = 1 -OFN_SHAREFALLTHROUGH = 2 -OFN_SHARENOWARN = 1 -OFN_SHAREWARN = 0 -CDN_FIRST = PY_0U - 601 -CDN_LAST = PY_0U - 699 -CDN_INITDONE = CDN_FIRST - 0 -CDN_SELCHANGE = CDN_FIRST - 1 -CDN_FOLDERCHANGE = CDN_FIRST - 2 -CDN_SHAREVIOLATION = CDN_FIRST - 3 -CDN_HELP = CDN_FIRST - 4 -CDN_FILEOK = CDN_FIRST - 5 -CDN_TYPECHANGE = CDN_FIRST - 6 -CDN_INCLUDEITEM = CDN_FIRST - 7 -CDM_FIRST = WM_USER + 100 -CDM_LAST = WM_USER + 200 -CDM_GETSPEC = CDM_FIRST + 0 -CDM_GETFILEPATH = CDM_FIRST + 1 -CDM_GETFOLDERPATH = CDM_FIRST + 2 -CDM_GETFOLDERIDLIST = CDM_FIRST + 3 -CDM_SETCONTROLTEXT = CDM_FIRST + 4 -CDM_HIDECONTROL = CDM_FIRST + 5 -CDM_SETDEFEXT = CDM_FIRST + 6 -CC_RGBINIT = 1 -CC_FULLOPEN = 2 -CC_PREVENTFULLOPEN = 4 -CC_SHOWHELP = 8 -CC_ENABLEHOOK = 16 -CC_ENABLETEMPLATE = 32 -CC_ENABLETEMPLATEHANDLE = 64 -CC_SOLIDCOLOR = 128 -CC_ANYCOLOR = 256 -FR_DOWN = 1 -FR_WHOLEWORD = 2 -FR_MATCHCASE = 4 -FR_FINDNEXT = 8 -FR_REPLACE = 16 -FR_REPLACEALL = 32 -FR_DIALOGTERM = 64 -FR_SHOWHELP = 128 -FR_ENABLEHOOK = 256 -FR_ENABLETEMPLATE = 512 -FR_NOUPDOWN = 1024 -FR_NOMATCHCASE = 2048 -FR_NOWHOLEWORD = 4096 -FR_ENABLETEMPLATEHANDLE = 8192 -FR_HIDEUPDOWN = 16384 -FR_HIDEMATCHCASE = 32768 -FR_HIDEWHOLEWORD = 65536 -CF_SCREENFONTS = 1 -CF_PRINTERFONTS = 2 -CF_BOTH = CF_SCREENFONTS | CF_PRINTERFONTS -CF_SHOWHELP = 4 -CF_ENABLEHOOK = 8 -CF_ENABLETEMPLATE = 16 -CF_ENABLETEMPLATEHANDLE = 32 -CF_INITTOLOGFONTSTRUCT = 64 -CF_USESTYLE = 128 -CF_EFFECTS = 256 -CF_APPLY = 512 -CF_ANSIONLY = 1024 -CF_SCRIPTSONLY = CF_ANSIONLY -CF_NOVECTORFONTS = 2048 -CF_NOOEMFONTS = CF_NOVECTORFONTS -CF_NOSIMULATIONS = 4096 -CF_LIMITSIZE = 8192 -CF_FIXEDPITCHONLY = 16384 -CF_WYSIWYG = 32768 # must also have CF_SCREENFONTS & CF_PRINTERFONTS -CF_FORCEFONTEXIST = 65536 -CF_SCALABLEONLY = 131072 -CF_TTONLY = 262144 -CF_NOFACESEL = 524288 -CF_NOSTYLESEL = 1048576 -CF_NOSIZESEL = 2097152 -CF_SELECTSCRIPT = 4194304 -CF_NOSCRIPTSEL = 8388608 -CF_NOVERTFONTS = 16777216 -SIMULATED_FONTTYPE = 32768 -PRINTER_FONTTYPE = 16384 -SCREEN_FONTTYPE = 8192 -BOLD_FONTTYPE = 256 -ITALIC_FONTTYPE = 512 -REGULAR_FONTTYPE = 1024 -OPENTYPE_FONTTYPE = 65536 -TYPE1_FONTTYPE = 131072 -DSIG_FONTTYPE = 262144 -WM_CHOOSEFONT_GETLOGFONT = WM_USER + 1 -WM_CHOOSEFONT_SETLOGFONT = WM_USER + 101 -WM_CHOOSEFONT_SETFLAGS = WM_USER + 102 -LBSELCHSTRINGA = "commdlg_LBSelChangedNotify" -SHAREVISTRINGA = "commdlg_ShareViolation" -FILEOKSTRINGA = "commdlg_FileNameOK" -COLOROKSTRINGA = "commdlg_ColorOK" -SETRGBSTRINGA = "commdlg_SetRGBColor" -HELPMSGSTRINGA = "commdlg_help" -FINDMSGSTRINGA = "commdlg_FindReplace" -LBSELCHSTRING = LBSELCHSTRINGA -SHAREVISTRING = SHAREVISTRINGA -FILEOKSTRING = FILEOKSTRINGA -COLOROKSTRING = COLOROKSTRINGA -SETRGBSTRING = SETRGBSTRINGA -HELPMSGSTRING = HELPMSGSTRINGA -FINDMSGSTRING = FINDMSGSTRINGA -CD_LBSELNOITEMS = -1 -CD_LBSELCHANGE = 0 -CD_LBSELSUB = 1 -CD_LBSELADD = 2 -PD_ALLPAGES = 0 -PD_SELECTION = 1 -PD_PAGENUMS = 2 -PD_NOSELECTION = 4 -PD_NOPAGENUMS = 8 -PD_COLLATE = 16 -PD_PRINTTOFILE = 32 -PD_PRINTSETUP = 64 -PD_NOWARNING = 128 -PD_RETURNDC = 256 -PD_RETURNIC = 512 -PD_RETURNDEFAULT = 1024 -PD_SHOWHELP = 2048 -PD_ENABLEPRINTHOOK = 4096 -PD_ENABLESETUPHOOK = 8192 -PD_ENABLEPRINTTEMPLATE = 16384 -PD_ENABLESETUPTEMPLATE = 32768 -PD_ENABLEPRINTTEMPLATEHANDLE = 65536 -PD_ENABLESETUPTEMPLATEHANDLE = 131072 -PD_USEDEVMODECOPIES = 262144 -PD_DISABLEPRINTTOFILE = 524288 -PD_HIDEPRINTTOFILE = 1048576 -PD_NONETWORKBUTTON = 2097152 -DN_DEFAULTPRN = 1 -WM_PSD_PAGESETUPDLG = WM_USER -WM_PSD_FULLPAGERECT = WM_USER + 1 -WM_PSD_MINMARGINRECT = WM_USER + 2 -WM_PSD_MARGINRECT = WM_USER + 3 -WM_PSD_GREEKTEXTRECT = WM_USER + 4 -WM_PSD_ENVSTAMPRECT = WM_USER + 5 -WM_PSD_YAFULLPAGERECT = WM_USER + 6 -PSD_DEFAULTMINMARGINS = 0 # default (printer's) -PSD_INWININIINTLMEASURE = 0 # 1st of 4 possible -PSD_MINMARGINS = 1 # use caller's -PSD_MARGINS = 2 # use caller's -PSD_INTHOUSANDTHSOFINCHES = 4 # 2nd of 4 possible -PSD_INHUNDREDTHSOFMILLIMETERS = 8 # 3rd of 4 possible -PSD_DISABLEMARGINS = 16 -PSD_DISABLEPRINTER = 32 -PSD_NOWARNING = 128 # must be same as PD_* -PSD_DISABLEORIENTATION = 256 -PSD_RETURNDEFAULT = 1024 # must be same as PD_* -PSD_DISABLEPAPER = 512 -PSD_SHOWHELP = 2048 # must be same as PD_* -PSD_ENABLEPAGESETUPHOOK = 8192 # must be same as PD_* -PSD_ENABLEPAGESETUPTEMPLATE = 32768 # must be same as PD_* -PSD_ENABLEPAGESETUPTEMPLATEHANDLE = 131072 # must be same as PD_* -PSD_ENABLEPAGEPAINTHOOK = 262144 -PSD_DISABLEPAGEPAINTING = 524288 -PSD_NONETWORKBUTTON = 2097152 # must be same as PD_* - -# Generated by h2py from winreg.h -HKEY_CLASSES_ROOT = -2147483648 -HKEY_CURRENT_USER = -2147483647 -HKEY_LOCAL_MACHINE = -2147483646 -HKEY_USERS = -2147483645 -HKEY_PERFORMANCE_DATA = -2147483644 -HKEY_CURRENT_CONFIG = -2147483643 -HKEY_DYN_DATA = -2147483642 -HKEY_PERFORMANCE_TEXT = -2147483568 # ?? 4Jan98 -HKEY_PERFORMANCE_NLSTEXT = -2147483552 # ?? 4Jan98 - -# Generated by h2py from winuser.h -HWND_BROADCAST = 65535 -HWND_DESKTOP = 0 -HWND_TOP = 0 -HWND_BOTTOM = 1 -HWND_TOPMOST = -1 -HWND_NOTOPMOST = -2 -HWND_MESSAGE = -3 - -# winuser.h line 4601 -SM_CXSCREEN = 0 -SM_CYSCREEN = 1 -SM_CXVSCROLL = 2 -SM_CYHSCROLL = 3 -SM_CYCAPTION = 4 -SM_CXBORDER = 5 -SM_CYBORDER = 6 -SM_CXDLGFRAME = 7 -SM_CYDLGFRAME = 8 -SM_CYVTHUMB = 9 -SM_CXHTHUMB = 10 -SM_CXICON = 11 -SM_CYICON = 12 -SM_CXCURSOR = 13 -SM_CYCURSOR = 14 -SM_CYMENU = 15 -SM_CXFULLSCREEN = 16 -SM_CYFULLSCREEN = 17 -SM_CYKANJIWINDOW = 18 -SM_MOUSEPRESENT = 19 -SM_CYVSCROLL = 20 -SM_CXHSCROLL = 21 -SM_DEBUG = 22 -SM_SWAPBUTTON = 23 -SM_RESERVED1 = 24 -SM_RESERVED2 = 25 -SM_RESERVED3 = 26 -SM_RESERVED4 = 27 -SM_CXMIN = 28 -SM_CYMIN = 29 -SM_CXSIZE = 30 -SM_CYSIZE = 31 -SM_CXFRAME = 32 -SM_CYFRAME = 33 -SM_CXMINTRACK = 34 -SM_CYMINTRACK = 35 -SM_CXDOUBLECLK = 36 -SM_CYDOUBLECLK = 37 -SM_CXICONSPACING = 38 -SM_CYICONSPACING = 39 -SM_MENUDROPALIGNMENT = 40 -SM_PENWINDOWS = 41 -SM_DBCSENABLED = 42 -SM_CMOUSEBUTTONS = 43 -SM_CXFIXEDFRAME = SM_CXDLGFRAME -SM_CYFIXEDFRAME = SM_CYDLGFRAME -SM_CXSIZEFRAME = SM_CXFRAME -SM_CYSIZEFRAME = SM_CYFRAME -SM_SECURE = 44 -SM_CXEDGE = 45 -SM_CYEDGE = 46 -SM_CXMINSPACING = 47 -SM_CYMINSPACING = 48 -SM_CXSMICON = 49 -SM_CYSMICON = 50 -SM_CYSMCAPTION = 51 -SM_CXSMSIZE = 52 -SM_CYSMSIZE = 53 -SM_CXMENUSIZE = 54 -SM_CYMENUSIZE = 55 -SM_ARRANGE = 56 -SM_CXMINIMIZED = 57 -SM_CYMINIMIZED = 58 -SM_CXMAXTRACK = 59 -SM_CYMAXTRACK = 60 -SM_CXMAXIMIZED = 61 -SM_CYMAXIMIZED = 62 -SM_NETWORK = 63 -SM_CLEANBOOT = 67 -SM_CXDRAG = 68 -SM_CYDRAG = 69 -SM_SHOWSOUNDS = 70 -SM_CXMENUCHECK = 71 -SM_CYMENUCHECK = 72 -SM_SLOWMACHINE = 73 -SM_MIDEASTENABLED = 74 -SM_MOUSEWHEELPRESENT = 75 -SM_XVIRTUALSCREEN = 76 -SM_YVIRTUALSCREEN = 77 -SM_CXVIRTUALSCREEN = 78 -SM_CYVIRTUALSCREEN = 79 -SM_CMONITORS = 80 -SM_SAMEDISPLAYFORMAT = 81 -SM_CMETRICS = 83 -MNC_IGNORE = 0 -MNC_CLOSE = 1 -MNC_EXECUTE = 2 -MNC_SELECT = 3 -MNS_NOCHECK = -2147483648 -MNS_MODELESS = 1073741824 -MNS_DRAGDROP = 536870912 -MNS_AUTODISMISS = 268435456 -MNS_NOTIFYBYPOS = 134217728 -MNS_CHECKORBMP = 67108864 -MIM_MAXHEIGHT = 1 -MIM_BACKGROUND = 2 -MIM_HELPID = 4 -MIM_MENUDATA = 8 -MIM_STYLE = 16 -MIM_APPLYTOSUBMENUS = -2147483648 -MND_CONTINUE = 0 -MND_ENDMENU = 1 -MNGOF_GAP = 3 -MNGO_NOINTERFACE = 0 -MNGO_NOERROR = 1 -MIIM_STATE = 1 -MIIM_ID = 2 -MIIM_SUBMENU = 4 -MIIM_CHECKMARKS = 8 -MIIM_TYPE = 16 -MIIM_DATA = 32 -MIIM_STRING = 64 -MIIM_BITMAP = 128 -MIIM_FTYPE = 256 -HBMMENU_CALLBACK = -1 -HBMMENU_SYSTEM = 1 -HBMMENU_MBAR_RESTORE = 2 -HBMMENU_MBAR_MINIMIZE = 3 -HBMMENU_MBAR_CLOSE = 5 -HBMMENU_MBAR_CLOSE_D = 6 -HBMMENU_MBAR_MINIMIZE_D = 7 -HBMMENU_POPUP_CLOSE = 8 -HBMMENU_POPUP_RESTORE = 9 -HBMMENU_POPUP_MAXIMIZE = 10 -HBMMENU_POPUP_MINIMIZE = 11 -GMDI_USEDISABLED = 1 -GMDI_GOINTOPOPUPS = 2 -TPM_LEFTBUTTON = 0 -TPM_RIGHTBUTTON = 2 -TPM_LEFTALIGN = 0 -TPM_CENTERALIGN = 4 -TPM_RIGHTALIGN = 8 -TPM_TOPALIGN = 0 -TPM_VCENTERALIGN = 16 -TPM_BOTTOMALIGN = 32 -TPM_HORIZONTAL = 0 -TPM_VERTICAL = 64 -TPM_NONOTIFY = 128 -TPM_RETURNCMD = 256 -TPM_RECURSE = 1 -DOF_EXECUTABLE = 32769 -DOF_DOCUMENT = 32770 -DOF_DIRECTORY = 32771 -DOF_MULTIPLE = 32772 -DOF_PROGMAN = 1 -DOF_SHELLDATA = 2 -DO_DROPFILE = 1162627398 -DO_PRINTFILE = 1414419024 -DT_TOP = 0 -DT_LEFT = 0 -DT_CENTER = 1 -DT_RIGHT = 2 -DT_VCENTER = 4 -DT_BOTTOM = 8 -DT_WORDBREAK = 16 -DT_SINGLELINE = 32 -DT_EXPANDTABS = 64 -DT_TABSTOP = 128 -DT_NOCLIP = 256 -DT_EXTERNALLEADING = 512 -DT_CALCRECT = 1024 -DT_NOPREFIX = 2048 -DT_INTERNAL = 4096 -DT_EDITCONTROL = 8192 -DT_PATH_ELLIPSIS = 16384 -DT_END_ELLIPSIS = 32768 -DT_MODIFYSTRING = 65536 -DT_RTLREADING = 131072 -DT_WORD_ELLIPSIS = 262144 -DST_COMPLEX = 0 -DST_TEXT = 1 -DST_PREFIXTEXT = 2 -DST_ICON = 3 -DST_BITMAP = 4 -DSS_NORMAL = 0 -DSS_UNION = 16 -DSS_DISABLED = 32 -DSS_MONO = 128 -DSS_RIGHT = 32768 -DCX_WINDOW = 1 -DCX_CACHE = 2 -DCX_NORESETATTRS = 4 -DCX_CLIPCHILDREN = 8 -DCX_CLIPSIBLINGS = 16 -DCX_PARENTCLIP = 32 -DCX_EXCLUDERGN = 64 -DCX_INTERSECTRGN = 128 -DCX_EXCLUDEUPDATE = 256 -DCX_INTERSECTUPDATE = 512 -DCX_LOCKWINDOWUPDATE = 1024 -DCX_VALIDATE = 2097152 -CUDR_NORMAL = 0 -CUDR_NOSNAPTOGRID = 1 -CUDR_NORESOLVEPOSITIONS = 2 -CUDR_NOCLOSEGAPS = 4 -CUDR_NEGATIVECOORDS = 8 -CUDR_NOPRIMARY = 16 -RDW_INVALIDATE = 1 -RDW_INTERNALPAINT = 2 -RDW_ERASE = 4 -RDW_VALIDATE = 8 -RDW_NOINTERNALPAINT = 16 -RDW_NOERASE = 32 -RDW_NOCHILDREN = 64 -RDW_ALLCHILDREN = 128 -RDW_UPDATENOW = 256 -RDW_ERASENOW = 512 -RDW_FRAME = 1024 -RDW_NOFRAME = 2048 -SW_SCROLLCHILDREN = 1 -SW_INVALIDATE = 2 -SW_ERASE = 4 -SW_SMOOTHSCROLL = 16 # Use smooth scrolling -ESB_ENABLE_BOTH = 0 -ESB_DISABLE_BOTH = 3 -ESB_DISABLE_LEFT = 1 -ESB_DISABLE_RIGHT = 2 -ESB_DISABLE_UP = 1 -ESB_DISABLE_DOWN = 2 -ESB_DISABLE_LTUP = ESB_DISABLE_LEFT -ESB_DISABLE_RTDN = ESB_DISABLE_RIGHT -HELPINFO_WINDOW = 1 -HELPINFO_MENUITEM = 2 -MB_OK = 0 -MB_OKCANCEL = 1 -MB_ABORTRETRYIGNORE = 2 -MB_YESNOCANCEL = 3 -MB_YESNO = 4 -MB_RETRYCANCEL = 5 -MB_ICONHAND = 16 -MB_ICONQUESTION = 32 -MB_ICONEXCLAMATION = 48 -MB_ICONASTERISK = 64 -MB_ICONWARNING = MB_ICONEXCLAMATION -MB_ICONERROR = MB_ICONHAND -MB_ICONINFORMATION = MB_ICONASTERISK -MB_ICONSTOP = MB_ICONHAND -MB_DEFBUTTON1 = 0 -MB_DEFBUTTON2 = 256 -MB_DEFBUTTON3 = 512 -MB_DEFBUTTON4 = 768 -MB_APPLMODAL = 0 -MB_SYSTEMMODAL = 4096 -MB_TASKMODAL = 8192 -MB_HELP = 16384 -MB_NOFOCUS = 32768 -MB_SETFOREGROUND = 65536 -MB_DEFAULT_DESKTOP_ONLY = 131072 -MB_TOPMOST = 262144 -MB_RIGHT = 524288 -MB_RTLREADING = 1048576 -MB_SERVICE_NOTIFICATION = 2097152 -MB_TYPEMASK = 15 -MB_USERICON = 128 -MB_ICONMASK = 240 -MB_DEFMASK = 3840 -MB_MODEMASK = 12288 -MB_MISCMASK = 49152 -# winuser.h line 6373 -CWP_ALL = 0 -CWP_SKIPINVISIBLE = 1 -CWP_SKIPDISABLED = 2 -CWP_SKIPTRANSPARENT = 4 -CTLCOLOR_MSGBOX = 0 -CTLCOLOR_EDIT = 1 -CTLCOLOR_LISTBOX = 2 -CTLCOLOR_BTN = 3 -CTLCOLOR_DLG = 4 -CTLCOLOR_SCROLLBAR = 5 -CTLCOLOR_STATIC = 6 -CTLCOLOR_MAX = 7 -COLOR_SCROLLBAR = 0 -COLOR_BACKGROUND = 1 -COLOR_ACTIVECAPTION = 2 -COLOR_INACTIVECAPTION = 3 -COLOR_MENU = 4 -COLOR_WINDOW = 5 -COLOR_WINDOWFRAME = 6 -COLOR_MENUTEXT = 7 -COLOR_WINDOWTEXT = 8 -COLOR_CAPTIONTEXT = 9 -COLOR_ACTIVEBORDER = 10 -COLOR_INACTIVEBORDER = 11 -COLOR_APPWORKSPACE = 12 -COLOR_HIGHLIGHT = 13 -COLOR_HIGHLIGHTTEXT = 14 -COLOR_BTNFACE = 15 -COLOR_BTNSHADOW = 16 -COLOR_GRAYTEXT = 17 -COLOR_BTNTEXT = 18 -COLOR_INACTIVECAPTIONTEXT = 19 -COLOR_BTNHIGHLIGHT = 20 -COLOR_3DDKSHADOW = 21 -COLOR_3DLIGHT = 22 -COLOR_INFOTEXT = 23 -COLOR_INFOBK = 24 -COLOR_HOTLIGHT = 26 -COLOR_GRADIENTACTIVECAPTION = 27 -COLOR_GRADIENTINACTIVECAPTION = 28 -COLOR_DESKTOP = COLOR_BACKGROUND -COLOR_3DFACE = COLOR_BTNFACE -COLOR_3DSHADOW = COLOR_BTNSHADOW -COLOR_3DHIGHLIGHT = COLOR_BTNHIGHLIGHT -COLOR_3DHILIGHT = COLOR_BTNHIGHLIGHT -COLOR_BTNHILIGHT = COLOR_BTNHIGHLIGHT -GW_HWNDFIRST = 0 -GW_HWNDLAST = 1 -GW_HWNDNEXT = 2 -GW_HWNDPREV = 3 -GW_OWNER = 4 -GW_CHILD = 5 -GW_ENABLEDPOPUP = 6 -GW_MAX = 6 -MF_INSERT = 0 -MF_CHANGE = 128 -MF_APPEND = 256 -MF_DELETE = 512 -MF_REMOVE = 4096 -MF_BYCOMMAND = 0 -MF_BYPOSITION = 1024 -MF_SEPARATOR = 2048 -MF_ENABLED = 0 -MF_GRAYED = 1 -MF_DISABLED = 2 -MF_UNCHECKED = 0 -MF_CHECKED = 8 -MF_USECHECKBITMAPS = 512 -MF_STRING = 0 -MF_BITMAP = 4 -MF_OWNERDRAW = 256 -MF_POPUP = 16 -MF_MENUBARBREAK = 32 -MF_MENUBREAK = 64 -MF_UNHILITE = 0 -MF_HILITE = 128 -MF_DEFAULT = 4096 -MF_SYSMENU = 8192 -MF_HELP = 16384 -MF_RIGHTJUSTIFY = 16384 -MF_MOUSESELECT = 32768 -MF_END = 128 -MFT_STRING = MF_STRING -MFT_BITMAP = MF_BITMAP -MFT_MENUBARBREAK = MF_MENUBARBREAK -MFT_MENUBREAK = MF_MENUBREAK -MFT_OWNERDRAW = MF_OWNERDRAW -MFT_RADIOCHECK = 512 -MFT_SEPARATOR = MF_SEPARATOR -MFT_RIGHTORDER = 8192 -MFT_RIGHTJUSTIFY = MF_RIGHTJUSTIFY -MFS_GRAYED = 3 -MFS_DISABLED = MFS_GRAYED -MFS_CHECKED = MF_CHECKED -MFS_HILITE = MF_HILITE -MFS_ENABLED = MF_ENABLED -MFS_UNCHECKED = MF_UNCHECKED -MFS_UNHILITE = MF_UNHILITE -MFS_DEFAULT = MF_DEFAULT -MFS_MASK = 4235 -MFS_HOTTRACKDRAWN = 268435456 -MFS_CACHEDBMP = 536870912 -MFS_BOTTOMGAPDROP = 1073741824 -MFS_TOPGAPDROP = -2147483648 -MFS_GAPDROP = -1073741824 -SC_SIZE = 61440 -SC_MOVE = 61456 -SC_MINIMIZE = 61472 -SC_MAXIMIZE = 61488 -SC_NEXTWINDOW = 61504 -SC_PREVWINDOW = 61520 -SC_CLOSE = 61536 -SC_VSCROLL = 61552 -SC_HSCROLL = 61568 -SC_MOUSEMENU = 61584 -SC_KEYMENU = 61696 -SC_ARRANGE = 61712 -SC_RESTORE = 61728 -SC_TASKLIST = 61744 -SC_SCREENSAVE = 61760 -SC_HOTKEY = 61776 -SC_DEFAULT = 61792 -SC_MONITORPOWER = 61808 -SC_CONTEXTHELP = 61824 -SC_SEPARATOR = 61455 -SC_ICON = SC_MINIMIZE -SC_ZOOM = SC_MAXIMIZE -IDC_ARROW = 32512 -IDC_IBEAM = 32513 -IDC_WAIT = 32514 -IDC_CROSS = 32515 -IDC_UPARROW = 32516 -IDC_SIZE = 32640 # OBSOLETE: use IDC_SIZEALL -IDC_ICON = 32641 # OBSOLETE: use IDC_ARROW -IDC_SIZENWSE = 32642 -IDC_SIZENESW = 32643 -IDC_SIZEWE = 32644 -IDC_SIZENS = 32645 -IDC_SIZEALL = 32646 -IDC_NO = 32648 -IDC_HAND = 32649 -IDC_APPSTARTING = 32650 -IDC_HELP = 32651 -IMAGE_BITMAP = 0 -IMAGE_ICON = 1 -IMAGE_CURSOR = 2 -IMAGE_ENHMETAFILE = 3 -LR_DEFAULTCOLOR = 0 -LR_MONOCHROME = 1 -LR_COLOR = 2 -LR_COPYRETURNORG = 4 -LR_COPYDELETEORG = 8 -LR_LOADFROMFILE = 16 -LR_LOADTRANSPARENT = 32 -LR_DEFAULTSIZE = 64 -LR_LOADREALSIZE = 128 -LR_LOADMAP3DCOLORS = 4096 -LR_CREATEDIBSECTION = 8192 -LR_COPYFROMRESOURCE = 16384 -LR_SHARED = 32768 -DI_MASK = 1 -DI_IMAGE = 2 -DI_NORMAL = 3 -DI_COMPAT = 4 -DI_DEFAULTSIZE = 8 -RES_ICON = 1 -RES_CURSOR = 2 -OBM_CLOSE = 32754 -OBM_UPARROW = 32753 -OBM_DNARROW = 32752 -OBM_RGARROW = 32751 -OBM_LFARROW = 32750 -OBM_REDUCE = 32749 -OBM_ZOOM = 32748 -OBM_RESTORE = 32747 -OBM_REDUCED = 32746 -OBM_ZOOMD = 32745 -OBM_RESTORED = 32744 -OBM_UPARROWD = 32743 -OBM_DNARROWD = 32742 -OBM_RGARROWD = 32741 -OBM_LFARROWD = 32740 -OBM_MNARROW = 32739 -OBM_COMBO = 32738 -OBM_UPARROWI = 32737 -OBM_DNARROWI = 32736 -OBM_RGARROWI = 32735 -OBM_LFARROWI = 32734 -OBM_OLD_CLOSE = 32767 -OBM_SIZE = 32766 -OBM_OLD_UPARROW = 32765 -OBM_OLD_DNARROW = 32764 -OBM_OLD_RGARROW = 32763 -OBM_OLD_LFARROW = 32762 -OBM_BTSIZE = 32761 -OBM_CHECK = 32760 -OBM_CHECKBOXES = 32759 -OBM_BTNCORNERS = 32758 -OBM_OLD_REDUCE = 32757 -OBM_OLD_ZOOM = 32756 -OBM_OLD_RESTORE = 32755 -OCR_NORMAL = 32512 -OCR_IBEAM = 32513 -OCR_WAIT = 32514 -OCR_CROSS = 32515 -OCR_UP = 32516 -OCR_SIZE = 32640 -OCR_ICON = 32641 -OCR_SIZENWSE = 32642 -OCR_SIZENESW = 32643 -OCR_SIZEWE = 32644 -OCR_SIZENS = 32645 -OCR_SIZEALL = 32646 -OCR_ICOCUR = 32647 -OCR_NO = 32648 -OCR_HAND = 32649 -OCR_APPSTARTING = 32650 -# winuser.h line 7455 -OIC_SAMPLE = 32512 -OIC_HAND = 32513 -OIC_QUES = 32514 -OIC_BANG = 32515 -OIC_NOTE = 32516 -OIC_WINLOGO = 32517 -OIC_WARNING = OIC_BANG -OIC_ERROR = OIC_HAND -OIC_INFORMATION = OIC_NOTE -ORD_LANGDRIVER = 1 -IDI_APPLICATION = 32512 -IDI_HAND = 32513 -IDI_QUESTION = 32514 -IDI_EXCLAMATION = 32515 -IDI_ASTERISK = 32516 -IDI_WINLOGO = 32517 -IDI_WARNING = IDI_EXCLAMATION -IDI_ERROR = IDI_HAND -IDI_INFORMATION = IDI_ASTERISK -IDOK = 1 -IDCANCEL = 2 -IDABORT = 3 -IDRETRY = 4 -IDIGNORE = 5 -IDYES = 6 -IDNO = 7 -IDCLOSE = 8 -IDHELP = 9 -ES_LEFT = 0 -ES_CENTER = 1 -ES_RIGHT = 2 -ES_MULTILINE = 4 -ES_UPPERCASE = 8 -ES_LOWERCASE = 16 -ES_PASSWORD = 32 -ES_AUTOVSCROLL = 64 -ES_AUTOHSCROLL = 128 -ES_NOHIDESEL = 256 -ES_OEMCONVERT = 1024 -ES_READONLY = 2048 -ES_WANTRETURN = 4096 -ES_NUMBER = 8192 -EN_SETFOCUS = 256 -EN_KILLFOCUS = 512 -EN_CHANGE = 768 -EN_UPDATE = 1024 -EN_ERRSPACE = 1280 -EN_MAXTEXT = 1281 -EN_HSCROLL = 1537 -EN_VSCROLL = 1538 -EC_LEFTMARGIN = 1 -EC_RIGHTMARGIN = 2 -EC_USEFONTINFO = 65535 -EMSIS_COMPOSITIONSTRING = 1 -EIMES_GETCOMPSTRATONCE = 1 -EIMES_CANCELCOMPSTRINFOCUS = 2 -EIMES_COMPLETECOMPSTRKILLFOCUS = 4 -EM_GETSEL = 176 -EM_SETSEL = 177 -EM_GETRECT = 178 -EM_SETRECT = 179 -EM_SETRECTNP = 180 -EM_SCROLL = 181 -EM_LINESCROLL = 182 -EM_SCROLLCARET = 183 -EM_GETMODIFY = 184 -EM_SETMODIFY = 185 -EM_GETLINECOUNT = 186 -EM_LINEINDEX = 187 -EM_SETHANDLE = 188 -EM_GETHANDLE = 189 -EM_GETTHUMB = 190 -EM_LINELENGTH = 193 -EM_REPLACESEL = 194 -EM_GETLINE = 196 -EM_LIMITTEXT = 197 -EM_CANUNDO = 198 -EM_UNDO = 199 -EM_FMTLINES = 200 -EM_LINEFROMCHAR = 201 -EM_SETTABSTOPS = 203 -EM_SETPASSWORDCHAR = 204 -EM_EMPTYUNDOBUFFER = 205 -EM_GETFIRSTVISIBLELINE = 206 -EM_SETREADONLY = 207 -EM_SETWORDBREAKPROC = 208 -EM_GETWORDBREAKPROC = 209 -EM_GETPASSWORDCHAR = 210 -EM_SETMARGINS = 211 -EM_GETMARGINS = 212 -EM_SETLIMITTEXT = EM_LIMITTEXT -EM_GETLIMITTEXT = 213 -EM_POSFROMCHAR = 214 -EM_CHARFROMPOS = 215 -EM_SETIMESTATUS = 216 -EM_GETIMESTATUS = 217 -WB_LEFT = 0 -WB_RIGHT = 1 -WB_ISDELIMITER = 2 -BS_PUSHBUTTON = 0 -BS_DEFPUSHBUTTON = 1 -BS_CHECKBOX = 2 -BS_AUTOCHECKBOX = 3 -BS_RADIOBUTTON = 4 -BS_3STATE = 5 -BS_AUTO3STATE = 6 -BS_GROUPBOX = 7 -BS_USERBUTTON = 8 -BS_AUTORADIOBUTTON = 9 -BS_OWNERDRAW = 11 -BS_LEFTTEXT = 32 -BS_TEXT = 0 -BS_ICON = 64 -BS_BITMAP = 128 -BS_LEFT = 256 -BS_RIGHT = 512 -BS_CENTER = 768 -BS_TOP = 1024 -BS_BOTTOM = 2048 -BS_VCENTER = 3072 -BS_PUSHLIKE = 4096 -BS_MULTILINE = 8192 -BS_NOTIFY = 16384 -BS_FLAT = 32768 -BS_RIGHTBUTTON = BS_LEFTTEXT -BN_CLICKED = 0 -BN_PAINT = 1 -BN_HILITE = 2 -BN_UNHILITE = 3 -BN_DISABLE = 4 -BN_DOUBLECLICKED = 5 -BN_PUSHED = BN_HILITE -BN_UNPUSHED = BN_UNHILITE -BN_DBLCLK = BN_DOUBLECLICKED -BN_SETFOCUS = 6 -BN_KILLFOCUS = 7 -BM_GETCHECK = 240 -BM_SETCHECK = 241 -BM_GETSTATE = 242 -BM_SETSTATE = 243 -BM_SETSTYLE = 244 -BM_CLICK = 245 -BM_GETIMAGE = 246 -BM_SETIMAGE = 247 -BST_UNCHECKED = 0 -BST_CHECKED = 1 -BST_INDETERMINATE = 2 -BST_PUSHED = 4 -BST_FOCUS = 8 -SS_LEFT = 0 -SS_CENTER = 1 -SS_RIGHT = 2 -SS_ICON = 3 -SS_BLACKRECT = 4 -SS_GRAYRECT = 5 -SS_WHITERECT = 6 -SS_BLACKFRAME = 7 -SS_GRAYFRAME = 8 -SS_WHITEFRAME = 9 -SS_USERITEM = 10 -SS_SIMPLE = 11 -SS_LEFTNOWORDWRAP = 12 -SS_BITMAP = 14 -SS_OWNERDRAW = 13 -SS_ENHMETAFILE = 15 -SS_ETCHEDHORZ = 16 -SS_ETCHEDVERT = 17 -SS_ETCHEDFRAME = 18 -SS_TYPEMASK = 31 -SS_NOPREFIX = 128 -SS_NOTIFY = 256 -SS_CENTERIMAGE = 512 -SS_RIGHTJUST = 1024 -SS_REALSIZEIMAGE = 2048 -SS_SUNKEN = 4096 -SS_ENDELLIPSIS = 16384 -SS_PATHELLIPSIS = 32768 -SS_WORDELLIPSIS = 49152 -SS_ELLIPSISMASK = 49152 -STM_SETICON = 368 -STM_GETICON = 369 -STM_SETIMAGE = 370 -STM_GETIMAGE = 371 -STN_CLICKED = 0 -STN_DBLCLK = 1 -STN_ENABLE = 2 -STN_DISABLE = 3 -STM_MSGMAX = 372 -DWL_MSGRESULT = 0 -DWL_DLGPROC = 4 -DWL_USER = 8 -DDL_READWRITE = 0 -DDL_READONLY = 1 -DDL_HIDDEN = 2 -DDL_SYSTEM = 4 -DDL_DIRECTORY = 16 -DDL_ARCHIVE = 32 -DDL_POSTMSGS = 8192 -DDL_DRIVES = 16384 -DDL_EXCLUSIVE = 32768 - -# from winuser.h line 153 -RT_CURSOR = 1 -RT_BITMAP = 2 -RT_ICON = 3 -RT_MENU = 4 -RT_DIALOG = 5 -RT_STRING = 6 -RT_FONTDIR = 7 -RT_FONT = 8 -RT_ACCELERATOR = 9 -RT_RCDATA = 10 -RT_MESSAGETABLE = 11 -DIFFERENCE = 11 -RT_GROUP_CURSOR = RT_CURSOR + DIFFERENCE -RT_GROUP_ICON = RT_ICON + DIFFERENCE -RT_VERSION = 16 -RT_DLGINCLUDE = 17 -RT_PLUGPLAY = 19 -RT_VXD = 20 -RT_ANICURSOR = 21 -RT_ANIICON = 22 -RT_HTML = 23 -# from winuser.h line 218 -SB_HORZ = 0 -SB_VERT = 1 -SB_CTL = 2 -SB_BOTH = 3 -SB_LINEUP = 0 -SB_LINELEFT = 0 -SB_LINEDOWN = 1 -SB_LINERIGHT = 1 -SB_PAGEUP = 2 -SB_PAGELEFT = 2 -SB_PAGEDOWN = 3 -SB_PAGERIGHT = 3 -SB_THUMBPOSITION = 4 -SB_THUMBTRACK = 5 -SB_TOP = 6 -SB_LEFT = 6 -SB_BOTTOM = 7 -SB_RIGHT = 7 -SB_ENDSCROLL = 8 -SW_HIDE = 0 -SW_SHOWNORMAL = 1 -SW_NORMAL = 1 -SW_SHOWMINIMIZED = 2 -SW_SHOWMAXIMIZED = 3 -SW_MAXIMIZE = 3 -SW_SHOWNOACTIVATE = 4 -SW_SHOW = 5 -SW_MINIMIZE = 6 -SW_SHOWMINNOACTIVE = 7 -SW_SHOWNA = 8 -SW_RESTORE = 9 -SW_SHOWDEFAULT = 10 -SW_FORCEMINIMIZE = 11 -SW_MAX = 11 -HIDE_WINDOW = 0 -SHOW_OPENWINDOW = 1 -SHOW_ICONWINDOW = 2 -SHOW_FULLSCREEN = 3 -SHOW_OPENNOACTIVATE = 4 -SW_PARENTCLOSING = 1 -SW_OTHERZOOM = 2 -SW_PARENTOPENING = 3 -SW_OTHERUNZOOM = 4 -AW_HOR_POSITIVE = 1 -AW_HOR_NEGATIVE = 2 -AW_VER_POSITIVE = 4 -AW_VER_NEGATIVE = 8 -AW_CENTER = 16 -AW_HIDE = 65536 -AW_ACTIVATE = 131072 -AW_SLIDE = 262144 -AW_BLEND = 524288 -KF_EXTENDED = 256 -KF_DLGMODE = 2048 -KF_MENUMODE = 4096 -KF_ALTDOWN = 8192 -KF_REPEAT = 16384 -KF_UP = 32768 -VK_LBUTTON = 1 -VK_RBUTTON = 2 -VK_CANCEL = 3 -VK_MBUTTON = 4 -VK_BACK = 8 -VK_TAB = 9 -VK_CLEAR = 12 -VK_RETURN = 13 -VK_SHIFT = 16 -VK_CONTROL = 17 -VK_MENU = 18 -VK_PAUSE = 19 -VK_CAPITAL = 20 -VK_KANA = 21 -VK_HANGEUL = 21 # old name - should be here for compatibility -VK_HANGUL = 21 -VK_JUNJA = 23 -VK_FINAL = 24 -VK_HANJA = 25 -VK_KANJI = 25 -VK_ESCAPE = 27 -VK_CONVERT = 28 -VK_NONCONVERT = 29 -VK_ACCEPT = 30 -VK_MODECHANGE = 31 -VK_SPACE = 32 -VK_PRIOR = 33 -VK_NEXT = 34 -VK_END = 35 -VK_HOME = 36 -VK_LEFT = 37 -VK_UP = 38 -VK_RIGHT = 39 -VK_DOWN = 40 -VK_SELECT = 41 -VK_PRINT = 42 -VK_EXECUTE = 43 -VK_SNAPSHOT = 44 -VK_INSERT = 45 -VK_DELETE = 46 -VK_HELP = 47 -VK_LWIN = 91 -VK_RWIN = 92 -VK_APPS = 93 -VK_NUMPAD0 = 96 -VK_NUMPAD1 = 97 -VK_NUMPAD2 = 98 -VK_NUMPAD3 = 99 -VK_NUMPAD4 = 100 -VK_NUMPAD5 = 101 -VK_NUMPAD6 = 102 -VK_NUMPAD7 = 103 -VK_NUMPAD8 = 104 -VK_NUMPAD9 = 105 -VK_MULTIPLY = 106 -VK_ADD = 107 -VK_SEPARATOR = 108 -VK_SUBTRACT = 109 -VK_DECIMAL = 110 -VK_DIVIDE = 111 -VK_F1 = 112 -VK_F2 = 113 -VK_F3 = 114 -VK_F4 = 115 -VK_F5 = 116 -VK_F6 = 117 -VK_F7 = 118 -VK_F8 = 119 -VK_F9 = 120 -VK_F10 = 121 -VK_F11 = 122 -VK_F12 = 123 -VK_F13 = 124 -VK_F14 = 125 -VK_F15 = 126 -VK_F16 = 127 -VK_F17 = 128 -VK_F18 = 129 -VK_F19 = 130 -VK_F20 = 131 -VK_F21 = 132 -VK_F22 = 133 -VK_F23 = 134 -VK_F24 = 135 -VK_NUMLOCK = 144 -VK_SCROLL = 145 -VK_LSHIFT = 160 -VK_RSHIFT = 161 -VK_LCONTROL = 162 -VK_RCONTROL = 163 -VK_LMENU = 164 -VK_RMENU = 165 -VK_PROCESSKEY = 229 -VK_ATTN = 246 -VK_CRSEL = 247 -VK_EXSEL = 248 -VK_EREOF = 249 -VK_PLAY = 250 -VK_ZOOM = 251 -VK_NONAME = 252 -VK_PA1 = 253 -VK_OEM_CLEAR = 254 -# multi-media related "keys" -MOUSEEVENTF_XDOWN = 0x0080 -MOUSEEVENTF_XUP = 0x0100 -MOUSEEVENTF_WHEEL = 0x0800 -VK_XBUTTON1 = 0x05 -VK_XBUTTON2 = 0x06 -VK_VOLUME_MUTE = 0xAD -VK_VOLUME_DOWN = 0xAE -VK_VOLUME_UP = 0xAF -VK_MEDIA_NEXT_TRACK = 0xB0 -VK_MEDIA_PREV_TRACK = 0xB1 -VK_MEDIA_PLAY_PAUSE = 0xB3 -VK_BROWSER_BACK = 0xA6 -VK_BROWSER_FORWARD = 0xA7 -WH_MIN = -1 -WH_MSGFILTER = -1 -WH_JOURNALRECORD = 0 -WH_JOURNALPLAYBACK = 1 -WH_KEYBOARD = 2 -WH_GETMESSAGE = 3 -WH_CALLWNDPROC = 4 -WH_CBT = 5 -WH_SYSMSGFILTER = 6 -WH_MOUSE = 7 -WH_HARDWARE = 8 -WH_DEBUG = 9 -WH_SHELL = 10 -WH_FOREGROUNDIDLE = 11 -WH_CALLWNDPROCRET = 12 -WH_KEYBOARD_LL = 13 -WH_MOUSE_LL = 14 -WH_MAX = 14 -WH_MINHOOK = WH_MIN -WH_MAXHOOK = WH_MAX -HC_ACTION = 0 -HC_GETNEXT = 1 -HC_SKIP = 2 -HC_NOREMOVE = 3 -HC_NOREM = HC_NOREMOVE -HC_SYSMODALON = 4 -HC_SYSMODALOFF = 5 -HCBT_MOVESIZE = 0 -HCBT_MINMAX = 1 -HCBT_QS = 2 -HCBT_CREATEWND = 3 -HCBT_DESTROYWND = 4 -HCBT_ACTIVATE = 5 -HCBT_CLICKSKIPPED = 6 -HCBT_KEYSKIPPED = 7 -HCBT_SYSCOMMAND = 8 -HCBT_SETFOCUS = 9 -MSGF_DIALOGBOX = 0 -MSGF_MESSAGEBOX = 1 -MSGF_MENU = 2 -# MSGF_MOVE = 3 -# MSGF_SIZE = 4 -MSGF_SCROLLBAR = 5 -MSGF_NEXTWINDOW = 6 -# MSGF_MAINLOOP = 8 -MSGF_MAX = 8 -MSGF_USER = 4096 -HSHELL_WINDOWCREATED = 1 -HSHELL_WINDOWDESTROYED = 2 -HSHELL_ACTIVATESHELLWINDOW = 3 -HSHELL_WINDOWACTIVATED = 4 -HSHELL_GETMINRECT = 5 -HSHELL_REDRAW = 6 -HSHELL_TASKMAN = 7 -HSHELL_LANGUAGE = 8 -HSHELL_ACCESSIBILITYSTATE = 11 -ACCESS_STICKYKEYS = 1 -ACCESS_FILTERKEYS = 2 -ACCESS_MOUSEKEYS = 3 -# winuser.h line 624 -LLKHF_EXTENDED = 1 -LLKHF_INJECTED = 16 -LLKHF_ALTDOWN = 32 -LLKHF_UP = 128 -LLKHF_LOWER_IL_INJECTED = 2 -LLMHF_INJECTED = 1 -LLMHF_LOWER_IL_INJECTED = 2 -# line 692 -HKL_PREV = 0 -HKL_NEXT = 1 -KLF_ACTIVATE = 1 -KLF_SUBSTITUTE_OK = 2 -KLF_UNLOADPREVIOUS = 4 -KLF_REORDER = 8 -KLF_REPLACELANG = 16 -KLF_NOTELLSHELL = 128 -KLF_SETFORPROCESS = 256 -KL_NAMELENGTH = 9 -DESKTOP_READOBJECTS = 1 -DESKTOP_CREATEWINDOW = 2 -DESKTOP_CREATEMENU = 4 -DESKTOP_HOOKCONTROL = 8 -DESKTOP_JOURNALRECORD = 16 -DESKTOP_JOURNALPLAYBACK = 32 -DESKTOP_ENUMERATE = 64 -DESKTOP_WRITEOBJECTS = 128 -DESKTOP_SWITCHDESKTOP = 256 -DF_ALLOWOTHERACCOUNTHOOK = 1 -WINSTA_ENUMDESKTOPS = 1 -WINSTA_READATTRIBUTES = 2 -WINSTA_ACCESSCLIPBOARD = 4 -WINSTA_CREATEDESKTOP = 8 -WINSTA_WRITEATTRIBUTES = 16 -WINSTA_ACCESSGLOBALATOMS = 32 -WINSTA_EXITWINDOWS = 64 -WINSTA_ENUMERATE = 256 -WINSTA_READSCREEN = 512 -WSF_VISIBLE = 1 -UOI_FLAGS = 1 -UOI_NAME = 2 -UOI_TYPE = 3 -UOI_USER_SID = 4 -GWL_WNDPROC = -4 -GWL_HINSTANCE = -6 -GWL_HWNDPARENT = -8 -GWL_STYLE = -16 -GWL_EXSTYLE = -20 -GWL_USERDATA = -21 -GWL_ID = -12 -GCL_MENUNAME = -8 -GCL_HBRBACKGROUND = -10 -GCL_HCURSOR = -12 -GCL_HICON = -14 -GCL_HMODULE = -16 -GCL_CBWNDEXTRA = -18 -GCL_CBCLSEXTRA = -20 -GCL_WNDPROC = -24 -GCL_STYLE = -26 -GCW_ATOM = -32 -GCL_HICONSM = -34 -# line 1291 -WM_NULL = 0 -WM_CREATE = 1 -WM_DESTROY = 2 -WM_MOVE = 3 -WM_SIZE = 5 -WM_ACTIVATE = 6 -WA_INACTIVE = 0 -WA_ACTIVE = 1 -WA_CLICKACTIVE = 2 -WM_SETFOCUS = 7 -WM_KILLFOCUS = 8 -WM_ENABLE = 10 -WM_SETREDRAW = 11 -WM_SETTEXT = 12 -WM_GETTEXT = 13 -WM_GETTEXTLENGTH = 14 -WM_PAINT = 15 -WM_CLOSE = 16 -WM_QUERYENDSESSION = 17 -WM_QUIT = 18 -WM_QUERYOPEN = 19 -WM_ERASEBKGND = 20 -WM_SYSCOLORCHANGE = 21 -WM_ENDSESSION = 22 -WM_SHOWWINDOW = 24 -WM_WININICHANGE = 26 -WM_SETTINGCHANGE = WM_WININICHANGE -WM_DEVMODECHANGE = 27 -WM_ACTIVATEAPP = 28 -WM_FONTCHANGE = 29 -WM_TIMECHANGE = 30 -WM_CANCELMODE = 31 -WM_SETCURSOR = 32 -WM_MOUSEACTIVATE = 33 -WM_CHILDACTIVATE = 34 -WM_QUEUESYNC = 35 -WM_GETMINMAXINFO = 36 -WM_PAINTICON = 38 -WM_ICONERASEBKGND = 39 -WM_NEXTDLGCTL = 40 -WM_SPOOLERSTATUS = 42 -WM_DRAWITEM = 43 -WM_MEASUREITEM = 44 -WM_DELETEITEM = 45 -WM_VKEYTOITEM = 46 -WM_CHARTOITEM = 47 -WM_SETFONT = 48 -WM_GETFONT = 49 -WM_SETHOTKEY = 50 -WM_GETHOTKEY = 51 -WM_QUERYDRAGICON = 55 -WM_COMPAREITEM = 57 -WM_GETOBJECT = 61 -WM_COMPACTING = 65 -WM_COMMNOTIFY = 68 -WM_WINDOWPOSCHANGING = 70 -WM_WINDOWPOSCHANGED = 71 -WM_POWER = 72 -PWR_OK = 1 -PWR_FAIL = -1 -PWR_SUSPENDREQUEST = 1 -PWR_SUSPENDRESUME = 2 -PWR_CRITICALRESUME = 3 -WM_COPYDATA = 74 -WM_CANCELJOURNAL = 75 -WM_NOTIFY = 78 -WM_INPUTLANGCHANGEREQUEST = 80 -WM_INPUTLANGCHANGE = 81 -WM_TCARD = 82 -WM_HELP = 83 -WM_USERCHANGED = 84 -WM_NOTIFYFORMAT = 85 -NFR_ANSI = 1 -NFR_UNICODE = 2 -NF_QUERY = 3 -NF_REQUERY = 4 -WM_CONTEXTMENU = 123 -WM_STYLECHANGING = 124 -WM_STYLECHANGED = 125 -WM_DISPLAYCHANGE = 126 -WM_GETICON = 127 -WM_SETICON = 128 -WM_NCCREATE = 129 -WM_NCDESTROY = 130 -WM_NCCALCSIZE = 131 -WM_NCHITTEST = 132 -WM_NCPAINT = 133 -WM_NCACTIVATE = 134 -WM_GETDLGCODE = 135 -WM_SYNCPAINT = 136 -WM_NCMOUSEMOVE = 160 -WM_NCLBUTTONDOWN = 161 -WM_NCLBUTTONUP = 162 -WM_NCLBUTTONDBLCLK = 163 -WM_NCRBUTTONDOWN = 164 -WM_NCRBUTTONUP = 165 -WM_NCRBUTTONDBLCLK = 166 -WM_NCMBUTTONDOWN = 167 -WM_NCMBUTTONUP = 168 -WM_NCMBUTTONDBLCLK = 169 -WM_KEYFIRST = 256 -WM_KEYDOWN = 256 -WM_KEYUP = 257 -WM_CHAR = 258 -WM_DEADCHAR = 259 -WM_SYSKEYDOWN = 260 -WM_SYSKEYUP = 261 -WM_SYSCHAR = 262 -WM_SYSDEADCHAR = 263 -WM_KEYLAST = 264 -WM_IME_STARTCOMPOSITION = 269 -WM_IME_ENDCOMPOSITION = 270 -WM_IME_COMPOSITION = 271 -WM_IME_KEYLAST = 271 -WM_INITDIALOG = 272 -WM_COMMAND = 273 -WM_SYSCOMMAND = 274 -WM_TIMER = 275 -WM_HSCROLL = 276 -WM_VSCROLL = 277 -WM_INITMENU = 278 -WM_INITMENUPOPUP = 279 -WM_MENUSELECT = 287 -WM_MENUCHAR = 288 -WM_ENTERIDLE = 289 -WM_MENURBUTTONUP = 290 -WM_MENUDRAG = 291 -WM_MENUGETOBJECT = 292 -WM_UNINITMENUPOPUP = 293 -WM_MENUCOMMAND = 294 -WM_CTLCOLORMSGBOX = 306 -WM_CTLCOLOREDIT = 307 -WM_CTLCOLORLISTBOX = 308 -WM_CTLCOLORBTN = 309 -WM_CTLCOLORDLG = 310 -WM_CTLCOLORSCROLLBAR = 311 -WM_CTLCOLORSTATIC = 312 -WM_MOUSEFIRST = 512 -WM_MOUSEMOVE = 512 -WM_LBUTTONDOWN = 513 -WM_LBUTTONUP = 514 -WM_LBUTTONDBLCLK = 515 -WM_RBUTTONDOWN = 516 -WM_RBUTTONUP = 517 -WM_RBUTTONDBLCLK = 518 -WM_MBUTTONDOWN = 519 -WM_MBUTTONUP = 520 -WM_MBUTTONDBLCLK = 521 -WM_MOUSEWHEEL = 522 -WM_MOUSELAST = 522 -WHEEL_DELTA = 120 # Value for rolling one detent -WHEEL_PAGESCROLL = -1 # Scroll one page -WM_PARENTNOTIFY = 528 -MENULOOP_WINDOW = 0 -MENULOOP_POPUP = 1 -WM_ENTERMENULOOP = 529 -WM_EXITMENULOOP = 530 -WM_NEXTMENU = 531 -WM_SIZING = 532 -WM_CAPTURECHANGED = 533 -WM_MOVING = 534 -WM_POWERBROADCAST = 536 -PBT_APMQUERYSUSPEND = 0 -PBT_APMQUERYSTANDBY = 1 -PBT_APMQUERYSUSPENDFAILED = 2 -PBT_APMQUERYSTANDBYFAILED = 3 -PBT_APMSUSPEND = 4 -PBT_APMSTANDBY = 5 -PBT_APMRESUMECRITICAL = 6 -PBT_APMRESUMESUSPEND = 7 -PBT_APMRESUMESTANDBY = 8 -PBTF_APMRESUMEFROMFAILURE = 1 -PBT_APMBATTERYLOW = 9 -PBT_APMPOWERSTATUSCHANGE = 10 -PBT_APMOEMEVENT = 11 -PBT_APMRESUMEAUTOMATIC = 18 -WM_DEVICECHANGE = 537 -WM_MDICREATE = 544 -WM_MDIDESTROY = 545 -WM_MDIACTIVATE = 546 -WM_MDIRESTORE = 547 -WM_MDINEXT = 548 -WM_MDIMAXIMIZE = 549 -WM_MDITILE = 550 -WM_MDICASCADE = 551 -WM_MDIICONARRANGE = 552 -WM_MDIGETACTIVE = 553 -WM_MDISETMENU = 560 -WM_ENTERSIZEMOVE = 561 -WM_EXITSIZEMOVE = 562 -WM_DROPFILES = 563 -WM_MDIREFRESHMENU = 564 -WM_IME_SETCONTEXT = 641 -WM_IME_NOTIFY = 642 -WM_IME_CONTROL = 643 -WM_IME_COMPOSITIONFULL = 644 -WM_IME_SELECT = 645 -WM_IME_CHAR = 646 -WM_IME_REQUEST = 648 -WM_IME_KEYDOWN = 656 -WM_IME_KEYUP = 657 -WM_MOUSEHOVER = 673 -WM_MOUSELEAVE = 675 -WM_CUT = 768 -WM_COPY = 769 -WM_PASTE = 770 -WM_CLEAR = 771 -WM_UNDO = 772 -WM_RENDERFORMAT = 773 -WM_RENDERALLFORMATS = 774 -WM_DESTROYCLIPBOARD = 775 -WM_DRAWCLIPBOARD = 776 -WM_PAINTCLIPBOARD = 777 -WM_VSCROLLCLIPBOARD = 778 -WM_SIZECLIPBOARD = 779 -WM_ASKCBFORMATNAME = 780 -WM_CHANGECBCHAIN = 781 -WM_HSCROLLCLIPBOARD = 782 -WM_QUERYNEWPALETTE = 783 -WM_PALETTEISCHANGING = 784 -WM_PALETTECHANGED = 785 -WM_HOTKEY = 786 -WM_PRINT = 791 -WM_PRINTCLIENT = 792 -WM_HANDHELDFIRST = 856 -WM_HANDHELDLAST = 863 -WM_AFXFIRST = 864 -WM_AFXLAST = 895 -WM_PENWINFIRST = 896 -WM_PENWINLAST = 911 -WM_APP = 32768 -WMSZ_LEFT = 1 -WMSZ_RIGHT = 2 -WMSZ_TOP = 3 -WMSZ_TOPLEFT = 4 -WMSZ_TOPRIGHT = 5 -WMSZ_BOTTOM = 6 -WMSZ_BOTTOMLEFT = 7 -WMSZ_BOTTOMRIGHT = 8 -# ST_BEGINSWP = 0 -# ST_ENDSWP = 1 -HTERROR = -2 -HTTRANSPARENT = -1 -HTNOWHERE = 0 -HTCLIENT = 1 -HTCAPTION = 2 -HTSYSMENU = 3 -HTGROWBOX = 4 -HTSIZE = HTGROWBOX -HTMENU = 5 -HTHSCROLL = 6 -HTVSCROLL = 7 -HTMINBUTTON = 8 -HTMAXBUTTON = 9 -HTLEFT = 10 -HTRIGHT = 11 -HTTOP = 12 -HTTOPLEFT = 13 -HTTOPRIGHT = 14 -HTBOTTOM = 15 -HTBOTTOMLEFT = 16 -HTBOTTOMRIGHT = 17 -HTBORDER = 18 -HTREDUCE = HTMINBUTTON -HTZOOM = HTMAXBUTTON -HTSIZEFIRST = HTLEFT -HTSIZELAST = HTBOTTOMRIGHT -HTOBJECT = 19 -HTCLOSE = 20 -HTHELP = 21 -SMTO_NORMAL = 0 -SMTO_BLOCK = 1 -SMTO_ABORTIFHUNG = 2 -SMTO_NOTIMEOUTIFNOTHUNG = 8 -MA_ACTIVATE = 1 -MA_ACTIVATEANDEAT = 2 -MA_NOACTIVATE = 3 -MA_NOACTIVATEANDEAT = 4 -ICON_SMALL = 0 -ICON_BIG = 1 -SIZE_RESTORED = 0 -SIZE_MINIMIZED = 1 -SIZE_MAXIMIZED = 2 -SIZE_MAXSHOW = 3 -SIZE_MAXHIDE = 4 -SIZENORMAL = SIZE_RESTORED -SIZEICONIC = SIZE_MINIMIZED -SIZEFULLSCREEN = SIZE_MAXIMIZED -SIZEZOOMSHOW = SIZE_MAXSHOW -SIZEZOOMHIDE = SIZE_MAXHIDE -WVR_ALIGNTOP = 16 -WVR_ALIGNLEFT = 32 -WVR_ALIGNBOTTOM = 64 -WVR_ALIGNRIGHT = 128 -WVR_HREDRAW = 256 -WVR_VREDRAW = 512 -WVR_REDRAW = WVR_HREDRAW | WVR_VREDRAW -WVR_VALIDRECTS = 1024 -MK_LBUTTON = 1 -MK_RBUTTON = 2 -MK_SHIFT = 4 -MK_CONTROL = 8 -MK_MBUTTON = 16 -TME_HOVER = 1 -TME_LEAVE = 2 -TME_QUERY = 1073741824 -TME_CANCEL = -2147483648 -HOVER_DEFAULT = -1 -WS_OVERLAPPED = 0 -WS_POPUP = -2147483648 -WS_CHILD = 1073741824 -WS_MINIMIZE = 536870912 -WS_VISIBLE = 268435456 -WS_DISABLED = 134217728 -WS_CLIPSIBLINGS = 67108864 -WS_CLIPCHILDREN = 33554432 -WS_MAXIMIZE = 16777216 -WS_CAPTION = 12582912 -WS_BORDER = 8388608 -WS_DLGFRAME = 4194304 -WS_VSCROLL = 2097152 -WS_HSCROLL = 1048576 -WS_SYSMENU = 524288 -WS_THICKFRAME = 262144 -WS_GROUP = 131072 -WS_TABSTOP = 65536 -WS_MINIMIZEBOX = 131072 -WS_MAXIMIZEBOX = 65536 -WS_TILED = WS_OVERLAPPED -WS_ICONIC = WS_MINIMIZE -WS_SIZEBOX = WS_THICKFRAME -WS_OVERLAPPEDWINDOW = ( - WS_OVERLAPPED - | WS_CAPTION - | WS_SYSMENU - | WS_THICKFRAME - | WS_MINIMIZEBOX - | WS_MAXIMIZEBOX -) -WS_POPUPWINDOW = WS_POPUP | WS_BORDER | WS_SYSMENU -WS_CHILDWINDOW = WS_CHILD -WS_TILEDWINDOW = WS_OVERLAPPEDWINDOW -WS_EX_DLGMODALFRAME = 1 -WS_EX_NOPARENTNOTIFY = 4 -WS_EX_TOPMOST = 8 -WS_EX_ACCEPTFILES = 16 -WS_EX_TRANSPARENT = 32 -WS_EX_MDICHILD = 64 -WS_EX_TOOLWINDOW = 128 -WS_EX_WINDOWEDGE = 256 -WS_EX_CLIENTEDGE = 512 -WS_EX_CONTEXTHELP = 1024 -WS_EX_RIGHT = 4096 -WS_EX_LEFT = 0 -WS_EX_RTLREADING = 8192 -WS_EX_LTRREADING = 0 -WS_EX_LEFTSCROLLBAR = 16384 -WS_EX_RIGHTSCROLLBAR = 0 -WS_EX_CONTROLPARENT = 65536 -WS_EX_STATICEDGE = 131072 -WS_EX_APPWINDOW = 262144 -WS_EX_OVERLAPPEDWINDOW = WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE -WS_EX_PALETTEWINDOW = WS_EX_WINDOWEDGE | WS_EX_TOOLWINDOW | WS_EX_TOPMOST -WS_EX_LAYERED = 0x00080000 -WS_EX_NOINHERITLAYOUT = 0x00100000 -WS_EX_LAYOUTRTL = 0x00400000 -WS_EX_COMPOSITED = 0x02000000 -WS_EX_NOACTIVATE = 0x08000000 - -CS_VREDRAW = 1 -CS_HREDRAW = 2 -# CS_KEYCVTWINDOW = 0x0004 -CS_DBLCLKS = 8 -CS_OWNDC = 32 -CS_CLASSDC = 64 -CS_PARENTDC = 128 -# CS_NOKEYCVT = 0x0100 -CS_NOCLOSE = 512 -CS_SAVEBITS = 2048 -CS_BYTEALIGNCLIENT = 4096 -CS_BYTEALIGNWINDOW = 8192 -CS_GLOBALCLASS = 16384 -CS_IME = 65536 -PRF_CHECKVISIBLE = 1 -PRF_NONCLIENT = 2 -PRF_CLIENT = 4 -PRF_ERASEBKGND = 8 -PRF_CHILDREN = 16 -PRF_OWNED = 32 -BDR_RAISEDOUTER = 1 -BDR_SUNKENOUTER = 2 -BDR_RAISEDINNER = 4 -BDR_SUNKENINNER = 8 -BDR_OUTER = 3 -BDR_INNER = 12 -# BDR_RAISED = 0x0005 -# BDR_SUNKEN = 0x000a -EDGE_RAISED = BDR_RAISEDOUTER | BDR_RAISEDINNER -EDGE_SUNKEN = BDR_SUNKENOUTER | BDR_SUNKENINNER -EDGE_ETCHED = BDR_SUNKENOUTER | BDR_RAISEDINNER -EDGE_BUMP = BDR_RAISEDOUTER | BDR_SUNKENINNER - -# winuser.h line 2879 -ISMEX_NOSEND = 0 -ISMEX_SEND = 1 -ISMEX_NOTIFY = 2 -ISMEX_CALLBACK = 4 -ISMEX_REPLIED = 8 -CW_USEDEFAULT = -2147483648 -FLASHW_STOP = 0 -FLASHW_CAPTION = 1 -FLASHW_TRAY = 2 -FLASHW_ALL = FLASHW_CAPTION | FLASHW_TRAY -FLASHW_TIMER = 4 -FLASHW_TIMERNOFG = 12 - -# winuser.h line 7963 -DS_ABSALIGN = 1 -DS_SYSMODAL = 2 -DS_LOCALEDIT = 32 -DS_SETFONT = 64 -DS_MODALFRAME = 128 -DS_NOIDLEMSG = 256 -DS_SETFOREGROUND = 512 -DS_3DLOOK = 4 -DS_FIXEDSYS = 8 -DS_NOFAILCREATE = 16 -DS_CONTROL = 1024 -DS_CENTER = 2048 -DS_CENTERMOUSE = 4096 -DS_CONTEXTHELP = 8192 -DM_GETDEFID = WM_USER + 0 -DM_SETDEFID = WM_USER + 1 -DM_REPOSITION = WM_USER + 2 -# PSM_PAGEINFO = (WM_USER+100) -# PSM_SHEETINFO = (WM_USER+101) -# PSI_SETACTIVE = 0x0001 -# PSI_KILLACTIVE = 0x0002 -# PSI_APPLY = 0x0003 -# PSI_RESET = 0x0004 -# PSI_HASHELP = 0x0005 -# PSI_HELP = 0x0006 -# PSI_CHANGED = 0x0001 -# PSI_GUISTART = 0x0002 -# PSI_REBOOT = 0x0003 -# PSI_GETSIBLINGS = 0x0004 -DC_HASDEFID = 21323 -DLGC_WANTARROWS = 1 -DLGC_WANTTAB = 2 -DLGC_WANTALLKEYS = 4 -DLGC_WANTMESSAGE = 4 -DLGC_HASSETSEL = 8 -DLGC_DEFPUSHBUTTON = 16 -DLGC_UNDEFPUSHBUTTON = 32 -DLGC_RADIOBUTTON = 64 -DLGC_WANTCHARS = 128 -DLGC_STATIC = 256 -DLGC_BUTTON = 8192 -LB_CTLCODE = 0 -LB_OKAY = 0 -LB_ERR = -1 -LB_ERRSPACE = -2 -LBN_ERRSPACE = -2 -LBN_SELCHANGE = 1 -LBN_DBLCLK = 2 -LBN_SELCANCEL = 3 -LBN_SETFOCUS = 4 -LBN_KILLFOCUS = 5 -LB_ADDSTRING = 384 -LB_INSERTSTRING = 385 -LB_DELETESTRING = 386 -LB_SELITEMRANGEEX = 387 -LB_RESETCONTENT = 388 -LB_SETSEL = 389 -LB_SETCURSEL = 390 -LB_GETSEL = 391 -LB_GETCURSEL = 392 -LB_GETTEXT = 393 -LB_GETTEXTLEN = 394 -LB_GETCOUNT = 395 -LB_SELECTSTRING = 396 -LB_DIR = 397 -LB_GETTOPINDEX = 398 -LB_FINDSTRING = 399 -LB_GETSELCOUNT = 400 -LB_GETSELITEMS = 401 -LB_SETTABSTOPS = 402 -LB_GETHORIZONTALEXTENT = 403 -LB_SETHORIZONTALEXTENT = 404 -LB_SETCOLUMNWIDTH = 405 -LB_ADDFILE = 406 -LB_SETTOPINDEX = 407 -LB_GETITEMRECT = 408 -LB_GETITEMDATA = 409 -LB_SETITEMDATA = 410 -LB_SELITEMRANGE = 411 -LB_SETANCHORINDEX = 412 -LB_GETANCHORINDEX = 413 -LB_SETCARETINDEX = 414 -LB_GETCARETINDEX = 415 -LB_SETITEMHEIGHT = 416 -LB_GETITEMHEIGHT = 417 -LB_FINDSTRINGEXACT = 418 -LB_SETLOCALE = 421 -LB_GETLOCALE = 422 -LB_SETCOUNT = 423 -LB_INITSTORAGE = 424 -LB_ITEMFROMPOINT = 425 -LB_MSGMAX = 432 -LBS_NOTIFY = 1 -LBS_SORT = 2 -LBS_NOREDRAW = 4 -LBS_MULTIPLESEL = 8 -LBS_OWNERDRAWFIXED = 16 -LBS_OWNERDRAWVARIABLE = 32 -LBS_HASSTRINGS = 64 -LBS_USETABSTOPS = 128 -LBS_NOINTEGRALHEIGHT = 256 -LBS_MULTICOLUMN = 512 -LBS_WANTKEYBOARDINPUT = 1024 -LBS_EXTENDEDSEL = 2048 -LBS_DISABLENOSCROLL = 4096 -LBS_NODATA = 8192 -LBS_NOSEL = 16384 -LBS_STANDARD = LBS_NOTIFY | LBS_SORT | WS_VSCROLL | WS_BORDER -CB_OKAY = 0 -CB_ERR = -1 -CB_ERRSPACE = -2 -CBN_ERRSPACE = -1 -CBN_SELCHANGE = 1 -CBN_DBLCLK = 2 -CBN_SETFOCUS = 3 -CBN_KILLFOCUS = 4 -CBN_EDITCHANGE = 5 -CBN_EDITUPDATE = 6 -CBN_DROPDOWN = 7 -CBN_CLOSEUP = 8 -CBN_SELENDOK = 9 -CBN_SELENDCANCEL = 10 -CBS_SIMPLE = 1 -CBS_DROPDOWN = 2 -CBS_DROPDOWNLIST = 3 -CBS_OWNERDRAWFIXED = 16 -CBS_OWNERDRAWVARIABLE = 32 -CBS_AUTOHSCROLL = 64 -CBS_OEMCONVERT = 128 -CBS_SORT = 256 -CBS_HASSTRINGS = 512 -CBS_NOINTEGRALHEIGHT = 1024 -CBS_DISABLENOSCROLL = 2048 -CBS_UPPERCASE = 8192 -CBS_LOWERCASE = 16384 -CB_GETEDITSEL = 320 -CB_LIMITTEXT = 321 -CB_SETEDITSEL = 322 -CB_ADDSTRING = 323 -CB_DELETESTRING = 324 -CB_DIR = 325 -CB_GETCOUNT = 326 -CB_GETCURSEL = 327 -CB_GETLBTEXT = 328 -CB_GETLBTEXTLEN = 329 -CB_INSERTSTRING = 330 -CB_RESETCONTENT = 331 -CB_FINDSTRING = 332 -CB_SELECTSTRING = 333 -CB_SETCURSEL = 334 -CB_SHOWDROPDOWN = 335 -CB_GETITEMDATA = 336 -CB_SETITEMDATA = 337 -CB_GETDROPPEDCONTROLRECT = 338 -CB_SETITEMHEIGHT = 339 -CB_GETITEMHEIGHT = 340 -CB_SETEXTENDEDUI = 341 -CB_GETEXTENDEDUI = 342 -CB_GETDROPPEDSTATE = 343 -CB_FINDSTRINGEXACT = 344 -CB_SETLOCALE = 345 -CB_GETLOCALE = 346 -CB_GETTOPINDEX = 347 -CB_SETTOPINDEX = 348 -CB_GETHORIZONTALEXTENT = 349 -CB_SETHORIZONTALEXTENT = 350 -CB_GETDROPPEDWIDTH = 351 -CB_SETDROPPEDWIDTH = 352 -CB_INITSTORAGE = 353 -CB_MSGMAX = 354 -SBS_HORZ = 0 -SBS_VERT = 1 -SBS_TOPALIGN = 2 -SBS_LEFTALIGN = 2 -SBS_BOTTOMALIGN = 4 -SBS_RIGHTALIGN = 4 -SBS_SIZEBOXTOPLEFTALIGN = 2 -SBS_SIZEBOXBOTTOMRIGHTALIGN = 4 -SBS_SIZEBOX = 8 -SBS_SIZEGRIP = 16 -SBM_SETPOS = 224 -SBM_GETPOS = 225 -SBM_SETRANGE = 226 -SBM_SETRANGEREDRAW = 230 -SBM_GETRANGE = 227 -SBM_ENABLE_ARROWS = 228 -SBM_SETSCROLLINFO = 233 -SBM_GETSCROLLINFO = 234 -SIF_RANGE = 1 -SIF_PAGE = 2 -SIF_POS = 4 -SIF_DISABLENOSCROLL = 8 -SIF_TRACKPOS = 16 -SIF_ALL = SIF_RANGE | SIF_PAGE | SIF_POS | SIF_TRACKPOS -MDIS_ALLCHILDSTYLES = 1 -MDITILE_VERTICAL = 0 -MDITILE_HORIZONTAL = 1 -MDITILE_SKIPDISABLED = 2 -MDITILE_ZORDER = 4 - -IMC_GETCANDIDATEPOS = 7 -IMC_SETCANDIDATEPOS = 8 -IMC_GETCOMPOSITIONFONT = 9 -IMC_SETCOMPOSITIONFONT = 10 -IMC_GETCOMPOSITIONWINDOW = 11 -IMC_SETCOMPOSITIONWINDOW = 12 -IMC_GETSTATUSWINDOWPOS = 15 -IMC_SETSTATUSWINDOWPOS = 16 -IMC_CLOSESTATUSWINDOW = 33 -IMC_OPENSTATUSWINDOW = 34 -# Generated by h2py from \msvc20\include\winnt.h -# hacked and split by mhammond. -DELETE = 65536 -READ_CONTROL = 131072 -WRITE_DAC = 262144 -WRITE_OWNER = 524288 -SYNCHRONIZE = 1048576 -STANDARD_RIGHTS_REQUIRED = 983040 -STANDARD_RIGHTS_READ = READ_CONTROL -STANDARD_RIGHTS_WRITE = READ_CONTROL -STANDARD_RIGHTS_EXECUTE = READ_CONTROL -STANDARD_RIGHTS_ALL = 2031616 -SPECIFIC_RIGHTS_ALL = 65535 -ACCESS_SYSTEM_SECURITY = 16777216 -MAXIMUM_ALLOWED = 33554432 -GENERIC_READ = -2147483648 -GENERIC_WRITE = 1073741824 -GENERIC_EXECUTE = 536870912 -GENERIC_ALL = 268435456 - -SERVICE_KERNEL_DRIVER = 1 -SERVICE_FILE_SYSTEM_DRIVER = 2 -SERVICE_ADAPTER = 4 -SERVICE_RECOGNIZER_DRIVER = 8 -SERVICE_DRIVER = ( - SERVICE_KERNEL_DRIVER | SERVICE_FILE_SYSTEM_DRIVER | SERVICE_RECOGNIZER_DRIVER -) -SERVICE_WIN32_OWN_PROCESS = 16 -SERVICE_WIN32_SHARE_PROCESS = 32 -SERVICE_WIN32 = SERVICE_WIN32_OWN_PROCESS | SERVICE_WIN32_SHARE_PROCESS -SERVICE_INTERACTIVE_PROCESS = 256 -SERVICE_TYPE_ALL = ( - SERVICE_WIN32 | SERVICE_ADAPTER | SERVICE_DRIVER | SERVICE_INTERACTIVE_PROCESS -) -SERVICE_BOOT_START = 0 -SERVICE_SYSTEM_START = 1 -SERVICE_AUTO_START = 2 -SERVICE_DEMAND_START = 3 -SERVICE_DISABLED = 4 -SERVICE_ERROR_IGNORE = 0 -SERVICE_ERROR_NORMAL = 1 -SERVICE_ERROR_SEVERE = 2 -SERVICE_ERROR_CRITICAL = 3 -TAPE_ERASE_SHORT = 0 -TAPE_ERASE_LONG = 1 -TAPE_LOAD = 0 -TAPE_UNLOAD = 1 -TAPE_TENSION = 2 -TAPE_LOCK = 3 -TAPE_UNLOCK = 4 -TAPE_FORMAT = 5 -TAPE_SETMARKS = 0 -TAPE_FILEMARKS = 1 -TAPE_SHORT_FILEMARKS = 2 -TAPE_LONG_FILEMARKS = 3 -TAPE_ABSOLUTE_POSITION = 0 -TAPE_LOGICAL_POSITION = 1 -TAPE_PSEUDO_LOGICAL_POSITION = 2 -TAPE_REWIND = 0 -TAPE_ABSOLUTE_BLOCK = 1 -TAPE_LOGICAL_BLOCK = 2 -TAPE_PSEUDO_LOGICAL_BLOCK = 3 -TAPE_SPACE_END_OF_DATA = 4 -TAPE_SPACE_RELATIVE_BLOCKS = 5 -TAPE_SPACE_FILEMARKS = 6 -TAPE_SPACE_SEQUENTIAL_FMKS = 7 -TAPE_SPACE_SETMARKS = 8 -TAPE_SPACE_SEQUENTIAL_SMKS = 9 -TAPE_DRIVE_FIXED = 1 -TAPE_DRIVE_SELECT = 2 -TAPE_DRIVE_INITIATOR = 4 -TAPE_DRIVE_ERASE_SHORT = 16 -TAPE_DRIVE_ERASE_LONG = 32 -TAPE_DRIVE_ERASE_BOP_ONLY = 64 -TAPE_DRIVE_ERASE_IMMEDIATE = 128 -TAPE_DRIVE_TAPE_CAPACITY = 256 -TAPE_DRIVE_TAPE_REMAINING = 512 -TAPE_DRIVE_FIXED_BLOCK = 1024 -TAPE_DRIVE_VARIABLE_BLOCK = 2048 -TAPE_DRIVE_WRITE_PROTECT = 4096 -TAPE_DRIVE_EOT_WZ_SIZE = 8192 -TAPE_DRIVE_ECC = 65536 -TAPE_DRIVE_COMPRESSION = 131072 -TAPE_DRIVE_PADDING = 262144 -TAPE_DRIVE_REPORT_SMKS = 524288 -TAPE_DRIVE_GET_ABSOLUTE_BLK = 1048576 -TAPE_DRIVE_GET_LOGICAL_BLK = 2097152 -TAPE_DRIVE_SET_EOT_WZ_SIZE = 4194304 -TAPE_DRIVE_LOAD_UNLOAD = -2147483647 -TAPE_DRIVE_TENSION = -2147483646 -TAPE_DRIVE_LOCK_UNLOCK = -2147483644 -TAPE_DRIVE_REWIND_IMMEDIATE = -2147483640 -TAPE_DRIVE_SET_BLOCK_SIZE = -2147483632 -TAPE_DRIVE_LOAD_UNLD_IMMED = -2147483616 -TAPE_DRIVE_TENSION_IMMED = -2147483584 -TAPE_DRIVE_LOCK_UNLK_IMMED = -2147483520 -TAPE_DRIVE_SET_ECC = -2147483392 -TAPE_DRIVE_SET_COMPRESSION = -2147483136 -TAPE_DRIVE_SET_PADDING = -2147482624 -TAPE_DRIVE_SET_REPORT_SMKS = -2147481600 -TAPE_DRIVE_ABSOLUTE_BLK = -2147479552 -TAPE_DRIVE_ABS_BLK_IMMED = -2147475456 -TAPE_DRIVE_LOGICAL_BLK = -2147467264 -TAPE_DRIVE_LOG_BLK_IMMED = -2147450880 -TAPE_DRIVE_END_OF_DATA = -2147418112 -TAPE_DRIVE_RELATIVE_BLKS = -2147352576 -TAPE_DRIVE_FILEMARKS = -2147221504 -TAPE_DRIVE_SEQUENTIAL_FMKS = -2146959360 -TAPE_DRIVE_SETMARKS = -2146435072 -TAPE_DRIVE_SEQUENTIAL_SMKS = -2145386496 -TAPE_DRIVE_REVERSE_POSITION = -2143289344 -TAPE_DRIVE_SPACE_IMMEDIATE = -2139095040 -TAPE_DRIVE_WRITE_SETMARKS = -2130706432 -TAPE_DRIVE_WRITE_FILEMARKS = -2113929216 -TAPE_DRIVE_WRITE_SHORT_FMKS = -2080374784 -TAPE_DRIVE_WRITE_LONG_FMKS = -2013265920 -TAPE_DRIVE_WRITE_MARK_IMMED = -1879048192 -TAPE_DRIVE_FORMAT = -1610612736 -TAPE_DRIVE_FORMAT_IMMEDIATE = -1073741824 -TAPE_FIXED_PARTITIONS = 0 -TAPE_SELECT_PARTITIONS = 1 -TAPE_INITIATOR_PARTITIONS = 2 -# Generated by h2py from \msvc20\include\winnt.h -# hacked and split by mhammond. - -APPLICATION_ERROR_MASK = 536870912 -ERROR_SEVERITY_SUCCESS = 0 -ERROR_SEVERITY_INFORMATIONAL = 1073741824 -ERROR_SEVERITY_WARNING = -2147483648 -ERROR_SEVERITY_ERROR = -1073741824 -MINCHAR = 128 -MAXCHAR = 127 -MINSHORT = 32768 -MAXSHORT = 32767 -MINLONG = -2147483648 -MAXLONG = 2147483647 -MAXBYTE = 255 -MAXWORD = 65535 -MAXDWORD = -1 -LANG_NEUTRAL = 0 -LANG_BULGARIAN = 2 -LANG_CHINESE = 4 -LANG_CROATIAN = 26 -LANG_CZECH = 5 -LANG_DANISH = 6 -LANG_DUTCH = 19 -LANG_ENGLISH = 9 -LANG_FINNISH = 11 -LANG_FRENCH = 12 -LANG_GERMAN = 7 -LANG_GREEK = 8 -LANG_HUNGARIAN = 14 -LANG_ICELANDIC = 15 -LANG_ITALIAN = 16 -LANG_JAPANESE = 17 -LANG_KOREAN = 18 -LANG_NORWEGIAN = 20 -LANG_POLISH = 21 -LANG_PORTUGUESE = 22 -LANG_ROMANIAN = 24 -LANG_RUSSIAN = 25 -LANG_SLOVAK = 27 -LANG_SLOVENIAN = 36 -LANG_SPANISH = 10 -LANG_SWEDISH = 29 -LANG_TURKISH = 31 -SUBLANG_NEUTRAL = 0 -SUBLANG_DEFAULT = 1 -SUBLANG_SYS_DEFAULT = 2 -SUBLANG_CHINESE_TRADITIONAL = 1 -SUBLANG_CHINESE_SIMPLIFIED = 2 -SUBLANG_CHINESE_HONGKONG = 3 -SUBLANG_CHINESE_SINGAPORE = 4 -SUBLANG_DUTCH = 1 -SUBLANG_DUTCH_BELGIAN = 2 -SUBLANG_ENGLISH_US = 1 -SUBLANG_ENGLISH_UK = 2 -SUBLANG_ENGLISH_AUS = 3 -SUBLANG_ENGLISH_CAN = 4 -SUBLANG_ENGLISH_NZ = 5 -SUBLANG_ENGLISH_EIRE = 6 -SUBLANG_FRENCH = 1 -SUBLANG_FRENCH_BELGIAN = 2 -SUBLANG_FRENCH_CANADIAN = 3 -SUBLANG_FRENCH_SWISS = 4 -SUBLANG_GERMAN = 1 -SUBLANG_GERMAN_SWISS = 2 -SUBLANG_GERMAN_AUSTRIAN = 3 -SUBLANG_ITALIAN = 1 -SUBLANG_ITALIAN_SWISS = 2 -SUBLANG_NORWEGIAN_BOKMAL = 1 -SUBLANG_NORWEGIAN_NYNORSK = 2 -SUBLANG_PORTUGUESE = 2 -SUBLANG_PORTUGUESE_BRAZILIAN = 1 -SUBLANG_SPANISH = 1 -SUBLANG_SPANISH_MEXICAN = 2 -SUBLANG_SPANISH_MODERN = 3 -SORT_DEFAULT = 0 -SORT_JAPANESE_XJIS = 0 -SORT_JAPANESE_UNICODE = 1 -SORT_CHINESE_BIG5 = 0 -SORT_CHINESE_UNICODE = 1 -SORT_KOREAN_KSC = 0 -SORT_KOREAN_UNICODE = 1 - - -def PRIMARYLANGID(lgid): - return (lgid) & 1023 - - -def SUBLANGID(lgid): - return (lgid) >> 10 - - -NLS_VALID_LOCALE_MASK = 1048575 -CONTEXT_PORTABLE_32BIT = 1048576 -CONTEXT_ALPHA = 131072 -CONTEXT_CONTROL = CONTEXT_ALPHA | 1 -CONTEXT_FLOATING_POINT = CONTEXT_ALPHA | 2 -CONTEXT_INTEGER = CONTEXT_ALPHA | 4 -CONTEXT_FULL = CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER -SIZE_OF_80387_REGISTERS = 80 -CONTEXT_FULL = CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER -CONTEXT_CONTROL = 1 -CONTEXT_FLOATING_POINT = 2 -CONTEXT_INTEGER = 4 -CONTEXT_FULL = CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER -PROCESS_TERMINATE = 1 -PROCESS_CREATE_THREAD = 2 -PROCESS_VM_OPERATION = 8 -PROCESS_VM_READ = 16 -PROCESS_VM_WRITE = 32 -PROCESS_DUP_HANDLE = 64 -PROCESS_CREATE_PROCESS = 128 -PROCESS_SET_QUOTA = 256 -PROCESS_SET_INFORMATION = 512 -PROCESS_QUERY_INFORMATION = 1024 -PROCESS_SUSPEND_RESUME = 2048 -PROCESS_QUERY_LIMITED_INFORMATION = 4096 -PROCESS_SET_LIMITED_INFORMATION = 8192 -PROCESS_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 4095 -THREAD_TERMINATE = 1 -THREAD_SUSPEND_RESUME = 2 -THREAD_GET_CONTEXT = 8 -THREAD_SET_CONTEXT = 16 -THREAD_SET_INFORMATION = 32 -THREAD_QUERY_INFORMATION = 64 -THREAD_SET_THREAD_TOKEN = 128 -THREAD_IMPERSONATE = 256 -THREAD_DIRECT_IMPERSONATION = 512 -THREAD_SET_LIMITED_INFORMATION = 1024 -THREAD_QUERY_LIMITED_INFORMATION = 2048 -THREAD_RESUME = 4096 -TLS_MINIMUM_AVAILABLE = 64 -EVENT_MODIFY_STATE = 2 -MUTANT_QUERY_STATE = 1 -SEMAPHORE_MODIFY_STATE = 2 -TIME_ZONE_ID_UNKNOWN = 0 -TIME_ZONE_ID_STANDARD = 1 -TIME_ZONE_ID_DAYLIGHT = 2 -PROCESSOR_INTEL_386 = 386 -PROCESSOR_INTEL_486 = 486 -PROCESSOR_INTEL_PENTIUM = 586 -PROCESSOR_INTEL_860 = 860 -PROCESSOR_MIPS_R2000 = 2000 -PROCESSOR_MIPS_R3000 = 3000 -PROCESSOR_MIPS_R4000 = 4000 -PROCESSOR_ALPHA_21064 = 21064 -PROCESSOR_PPC_601 = 601 -PROCESSOR_PPC_603 = 603 -PROCESSOR_PPC_604 = 604 -PROCESSOR_PPC_620 = 620 -SECTION_QUERY = 1 -SECTION_MAP_WRITE = 2 -SECTION_MAP_READ = 4 -SECTION_MAP_EXECUTE = 8 -SECTION_EXTEND_SIZE = 16 -PAGE_NOACCESS = 1 -PAGE_READONLY = 2 -PAGE_READWRITE = 4 -PAGE_WRITECOPY = 8 -PAGE_EXECUTE = 16 -PAGE_EXECUTE_READ = 32 -PAGE_EXECUTE_READWRITE = 64 -PAGE_EXECUTE_WRITECOPY = 128 -PAGE_GUARD = 256 -PAGE_NOCACHE = 512 -MEM_COMMIT = 4096 -MEM_RESERVE = 8192 -MEM_DECOMMIT = 16384 -MEM_RELEASE = 32768 -MEM_FREE = 65536 -MEM_PRIVATE = 131072 -MEM_MAPPED = 262144 -MEM_TOP_DOWN = 1048576 - -# Generated by h2py from \msvc20\include\winnt.h -# hacked and split by mhammond. -SEC_FILE = 8388608 -SEC_IMAGE = 16777216 -SEC_RESERVE = 67108864 -SEC_COMMIT = 134217728 -SEC_NOCACHE = 268435456 -MEM_IMAGE = SEC_IMAGE -FILE_SHARE_READ = 1 -FILE_SHARE_WRITE = 2 -FILE_SHARE_DELETE = 4 -FILE_ATTRIBUTE_READONLY = 1 -FILE_ATTRIBUTE_HIDDEN = 2 -FILE_ATTRIBUTE_SYSTEM = 4 -FILE_ATTRIBUTE_DIRECTORY = 16 -FILE_ATTRIBUTE_ARCHIVE = 32 -FILE_ATTRIBUTE_DEVICE = 64 -FILE_ATTRIBUTE_NORMAL = 128 -FILE_ATTRIBUTE_TEMPORARY = 256 -FILE_ATTRIBUTE_SPARSE_FILE = 512 -FILE_ATTRIBUTE_REPARSE_POINT = 1024 -FILE_ATTRIBUTE_COMPRESSED = 2048 -FILE_ATTRIBUTE_OFFLINE = 4096 -FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192 -FILE_ATTRIBUTE_ENCRYPTED = 16384 -FILE_ATTRIBUTE_VIRTUAL = 65536 -# These FILE_ATTRIBUTE_* flags are apparently old definitions from Windows 95 -# and conflict with current values above - but they live on for b/w compat... -FILE_ATTRIBUTE_ATOMIC_WRITE = 512 -FILE_ATTRIBUTE_XACTION_WRITE = 1024 - -FILE_NOTIFY_CHANGE_FILE_NAME = 1 -FILE_NOTIFY_CHANGE_DIR_NAME = 2 -FILE_NOTIFY_CHANGE_ATTRIBUTES = 4 -FILE_NOTIFY_CHANGE_SIZE = 8 -FILE_NOTIFY_CHANGE_LAST_WRITE = 16 -FILE_NOTIFY_CHANGE_SECURITY = 256 -FILE_CASE_SENSITIVE_SEARCH = 1 -FILE_CASE_PRESERVED_NAMES = 2 -FILE_UNICODE_ON_DISK = 4 -FILE_PERSISTENT_ACLS = 8 -FILE_FILE_COMPRESSION = 16 -FILE_NAMED_STREAMS = 262144 -FILE_PERSISTENT_ACLS = 0x00000008 -FILE_READ_ONLY_VOLUME = 0x00080000 -FILE_SEQUENTIAL_WRITE_ONCE = 0x00100000 -FILE_SUPPORTS_ENCRYPTION = 0x00020000 -FILE_SUPPORTS_EXTENDED_ATTRIBUTES = 0x00800000 -FILE_SUPPORTS_HARD_LINKS = 0x00400000 -FILE_SUPPORTS_OBJECT_IDS = 0x00010000 -FILE_SUPPORTS_OPEN_BY_FILE_ID = 0x01000000 -FILE_SUPPORTS_REPARSE_POINTS = 0x00000080 -FILE_SUPPORTS_SPARSE_FILES = 0x00000040 -FILE_SUPPORTS_TRANSACTIONS = 0x00200000 -FILE_SUPPORTS_USN_JOURNAL = 0x02000000 -FILE_UNICODE_ON_DISK = 0x00000004 -FILE_VOLUME_IS_COMPRESSED = 0x00008000 -FILE_VOLUME_QUOTAS = 0x00000020 -FILE_VOLUME_IS_COMPRESSED = 32768 -IO_COMPLETION_MODIFY_STATE = 2 -DUPLICATE_CLOSE_SOURCE = 1 -DUPLICATE_SAME_ACCESS = 2 -SID_MAX_SUB_AUTHORITIES = 15 -SECURITY_NULL_RID = 0 -SECURITY_WORLD_RID = 0 -SECURITY_LOCAL_RID = 0x00000000 -SECURITY_CREATOR_OWNER_RID = 0 -SECURITY_CREATOR_GROUP_RID = 1 -SECURITY_DIALUP_RID = 1 -SECURITY_NETWORK_RID = 2 -SECURITY_BATCH_RID = 3 -SECURITY_INTERACTIVE_RID = 4 -SECURITY_SERVICE_RID = 6 -SECURITY_ANONYMOUS_LOGON_RID = 7 -SECURITY_LOGON_IDS_RID = 5 -SECURITY_LOGON_IDS_RID_COUNT = 3 -SECURITY_LOCAL_SYSTEM_RID = 18 -SECURITY_NT_NON_UNIQUE = 21 -SECURITY_BUILTIN_DOMAIN_RID = 32 -DOMAIN_USER_RID_ADMIN = 500 -DOMAIN_USER_RID_GUEST = 501 -DOMAIN_GROUP_RID_ADMINS = 512 -DOMAIN_GROUP_RID_USERS = 513 -DOMAIN_GROUP_RID_GUESTS = 514 -DOMAIN_ALIAS_RID_ADMINS = 544 -DOMAIN_ALIAS_RID_USERS = 545 -DOMAIN_ALIAS_RID_GUESTS = 546 -DOMAIN_ALIAS_RID_POWER_USERS = 547 -DOMAIN_ALIAS_RID_ACCOUNT_OPS = 548 -DOMAIN_ALIAS_RID_SYSTEM_OPS = 549 -DOMAIN_ALIAS_RID_PRINT_OPS = 550 -DOMAIN_ALIAS_RID_BACKUP_OPS = 551 -DOMAIN_ALIAS_RID_REPLICATOR = 552 -SE_GROUP_MANDATORY = 1 -SE_GROUP_ENABLED_BY_DEFAULT = 2 -SE_GROUP_ENABLED = 4 -SE_GROUP_OWNER = 8 -SE_GROUP_LOGON_ID = -1073741824 -ACL_REVISION = 2 -ACL_REVISION1 = 1 -ACL_REVISION2 = 2 -ACCESS_ALLOWED_ACE_TYPE = 0 -ACCESS_DENIED_ACE_TYPE = 1 -SYSTEM_AUDIT_ACE_TYPE = 2 -SYSTEM_ALARM_ACE_TYPE = 3 -OBJECT_INHERIT_ACE = 1 -CONTAINER_INHERIT_ACE = 2 -NO_PROPAGATE_INHERIT_ACE = 4 -INHERIT_ONLY_ACE = 8 -VALID_INHERIT_FLAGS = 15 -SUCCESSFUL_ACCESS_ACE_FLAG = 64 -FAILED_ACCESS_ACE_FLAG = 128 -SECURITY_DESCRIPTOR_REVISION = 1 -SECURITY_DESCRIPTOR_REVISION1 = 1 -SECURITY_DESCRIPTOR_MIN_LENGTH = 20 -SE_OWNER_DEFAULTED = 1 -SE_GROUP_DEFAULTED = 2 -SE_DACL_PRESENT = 4 -SE_DACL_DEFAULTED = 8 -SE_SACL_PRESENT = 16 -SE_SACL_DEFAULTED = 32 -SE_SELF_RELATIVE = 32768 -SE_PRIVILEGE_ENABLED_BY_DEFAULT = 1 -SE_PRIVILEGE_ENABLED = 2 -SE_PRIVILEGE_USED_FOR_ACCESS = -2147483648 -PRIVILEGE_SET_ALL_NECESSARY = 1 -SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege" -SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege" -SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege" -SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege" -SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege" -SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege" -SE_TCB_NAME = "SeTcbPrivilege" -SE_SECURITY_NAME = "SeSecurityPrivilege" -SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege" -SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege" -SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege" -SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege" -SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege" -SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege" -SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege" -SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege" -SE_BACKUP_NAME = "SeBackupPrivilege" -SE_RESTORE_NAME = "SeRestorePrivilege" -SE_SHUTDOWN_NAME = "SeShutdownPrivilege" -SE_DEBUG_NAME = "SeDebugPrivilege" -SE_AUDIT_NAME = "SeAuditPrivilege" -SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege" -SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege" -SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege" - -TOKEN_ASSIGN_PRIMARY = 1 -TOKEN_DUPLICATE = 2 -TOKEN_IMPERSONATE = 4 -TOKEN_QUERY = 8 -TOKEN_QUERY_SOURCE = 16 -TOKEN_ADJUST_PRIVILEGES = 32 -TOKEN_ADJUST_GROUPS = 64 -TOKEN_ADJUST_DEFAULT = 128 -TOKEN_ADJUST_SESSIONID = 256 -TOKEN_ALL_ACCESS = ( - STANDARD_RIGHTS_REQUIRED - | TOKEN_ASSIGN_PRIMARY - | TOKEN_DUPLICATE - | TOKEN_IMPERSONATE - | TOKEN_QUERY - | TOKEN_QUERY_SOURCE - | TOKEN_ADJUST_PRIVILEGES - | TOKEN_ADJUST_GROUPS - | TOKEN_ADJUST_DEFAULT - | TOKEN_ADJUST_SESSIONID -) -TOKEN_READ = STANDARD_RIGHTS_READ | TOKEN_QUERY -TOKEN_WRITE = ( - STANDARD_RIGHTS_WRITE - | TOKEN_ADJUST_PRIVILEGES - | TOKEN_ADJUST_GROUPS - | TOKEN_ADJUST_DEFAULT -) -TOKEN_EXECUTE = STANDARD_RIGHTS_EXECUTE -TOKEN_SOURCE_LENGTH = 8 - -KEY_QUERY_VALUE = 1 -KEY_SET_VALUE = 2 -KEY_CREATE_SUB_KEY = 4 -KEY_ENUMERATE_SUB_KEYS = 8 -KEY_NOTIFY = 16 -KEY_CREATE_LINK = 32 -KEY_WOW64_32KEY = 512 -KEY_WOW64_64KEY = 256 -KEY_WOW64_RES = 768 -KEY_READ = ( - STANDARD_RIGHTS_READ | KEY_QUERY_VALUE | KEY_ENUMERATE_SUB_KEYS | KEY_NOTIFY -) & (~SYNCHRONIZE) -KEY_WRITE = (STANDARD_RIGHTS_WRITE | KEY_SET_VALUE | KEY_CREATE_SUB_KEY) & ( - ~SYNCHRONIZE -) -KEY_EXECUTE = (KEY_READ) & (~SYNCHRONIZE) -KEY_ALL_ACCESS = ( - STANDARD_RIGHTS_ALL - | KEY_QUERY_VALUE - | KEY_SET_VALUE - | KEY_CREATE_SUB_KEY - | KEY_ENUMERATE_SUB_KEYS - | KEY_NOTIFY - | KEY_CREATE_LINK -) & (~SYNCHRONIZE) -REG_NOTIFY_CHANGE_ATTRIBUTES = 2 -REG_NOTIFY_CHANGE_SECURITY = 8 -REG_RESOURCE_REQUIREMENTS_LIST = 10 -REG_NONE = 0 # No value type -REG_SZ = 1 # Unicode nul terminated string -REG_EXPAND_SZ = 2 # Unicode nul terminated string -# (with environment variable references) -REG_BINARY = 3 # Free form binary -REG_DWORD = 4 # 32-bit number -REG_DWORD_LITTLE_ENDIAN = 4 # 32-bit number (same as REG_DWORD) -REG_DWORD_BIG_ENDIAN = 5 # 32-bit number -REG_LINK = 6 # Symbolic Link (unicode) -REG_MULTI_SZ = 7 # Multiple Unicode strings -REG_RESOURCE_LIST = 8 # Resource list in the resource map -REG_FULL_RESOURCE_DESCRIPTOR = 9 # Resource list in the hardware description -REG_RESOURCE_REQUIREMENTS_LIST = 10 -REG_QWORD = 11 # 64-bit number -REG_QWORD_LITTLE_ENDIAN = 11 # 64-bit number (same as REG_QWORD) - - -# Generated by h2py from \msvc20\include\winnt.h -# hacked and split by mhammond. -# Included from string.h -_NLSCMPERROR = 2147483647 -NULL = 0 -HEAP_NO_SERIALIZE = 1 -HEAP_GROWABLE = 2 -HEAP_GENERATE_EXCEPTIONS = 4 -HEAP_ZERO_MEMORY = 8 -HEAP_REALLOC_IN_PLACE_ONLY = 16 -HEAP_TAIL_CHECKING_ENABLED = 32 -HEAP_FREE_CHECKING_ENABLED = 64 -HEAP_DISABLE_COALESCE_ON_FREE = 128 -IS_TEXT_UNICODE_ASCII16 = 1 -IS_TEXT_UNICODE_REVERSE_ASCII16 = 16 -IS_TEXT_UNICODE_STATISTICS = 2 -IS_TEXT_UNICODE_REVERSE_STATISTICS = 32 -IS_TEXT_UNICODE_CONTROLS = 4 -IS_TEXT_UNICODE_REVERSE_CONTROLS = 64 -IS_TEXT_UNICODE_SIGNATURE = 8 -IS_TEXT_UNICODE_REVERSE_SIGNATURE = 128 -IS_TEXT_UNICODE_ILLEGAL_CHARS = 256 -IS_TEXT_UNICODE_ODD_LENGTH = 512 -IS_TEXT_UNICODE_DBCS_LEADBYTE = 1024 -IS_TEXT_UNICODE_NULL_BYTES = 4096 -IS_TEXT_UNICODE_UNICODE_MASK = 15 -IS_TEXT_UNICODE_REVERSE_MASK = 240 -IS_TEXT_UNICODE_NOT_UNICODE_MASK = 3840 -IS_TEXT_UNICODE_NOT_ASCII_MASK = 61440 -COMPRESSION_FORMAT_NONE = 0 -COMPRESSION_FORMAT_DEFAULT = 1 -COMPRESSION_FORMAT_LZNT1 = 2 -COMPRESSION_ENGINE_STANDARD = 0 -COMPRESSION_ENGINE_MAXIMUM = 256 -MESSAGE_RESOURCE_UNICODE = 1 -RTL_CRITSECT_TYPE = 0 -RTL_RESOURCE_TYPE = 1 -DLL_PROCESS_ATTACH = 1 -DLL_THREAD_ATTACH = 2 -DLL_THREAD_DETACH = 3 -DLL_PROCESS_DETACH = 0 -EVENTLOG_SEQUENTIAL_READ = 0x0001 -EVENTLOG_SEEK_READ = 0x0002 -EVENTLOG_FORWARDS_READ = 0x0004 -EVENTLOG_BACKWARDS_READ = 0x0008 -EVENTLOG_SUCCESS = 0x0000 -EVENTLOG_ERROR_TYPE = 1 -EVENTLOG_WARNING_TYPE = 2 -EVENTLOG_INFORMATION_TYPE = 4 -EVENTLOG_AUDIT_SUCCESS = 8 -EVENTLOG_AUDIT_FAILURE = 16 -EVENTLOG_START_PAIRED_EVENT = 1 -EVENTLOG_END_PAIRED_EVENT = 2 -EVENTLOG_END_ALL_PAIRED_EVENTS = 4 -EVENTLOG_PAIRED_EVENT_ACTIVE = 8 -EVENTLOG_PAIRED_EVENT_INACTIVE = 16 -# Generated by h2py from \msvc20\include\winnt.h -# hacked and split by mhammond. -OWNER_SECURITY_INFORMATION = 0x00000001 -GROUP_SECURITY_INFORMATION = 0x00000002 -DACL_SECURITY_INFORMATION = 0x00000004 -SACL_SECURITY_INFORMATION = 0x00000008 -IMAGE_SIZEOF_FILE_HEADER = 20 -IMAGE_FILE_MACHINE_UNKNOWN = 0 -IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16 -IMAGE_SIZEOF_ROM_OPTIONAL_HEADER = 56 -IMAGE_SIZEOF_STD_OPTIONAL_HEADER = 28 -IMAGE_SIZEOF_NT_OPTIONAL_HEADER = 224 -IMAGE_NT_OPTIONAL_HDR_MAGIC = 267 -IMAGE_ROM_OPTIONAL_HDR_MAGIC = 263 -IMAGE_SIZEOF_SHORT_NAME = 8 -IMAGE_SIZEOF_SECTION_HEADER = 40 -IMAGE_SIZEOF_SYMBOL = 18 -IMAGE_SYM_CLASS_NULL = 0 -IMAGE_SYM_CLASS_AUTOMATIC = 1 -IMAGE_SYM_CLASS_EXTERNAL = 2 -IMAGE_SYM_CLASS_STATIC = 3 -IMAGE_SYM_CLASS_REGISTER = 4 -IMAGE_SYM_CLASS_EXTERNAL_DEF = 5 -IMAGE_SYM_CLASS_LABEL = 6 -IMAGE_SYM_CLASS_UNDEFINED_LABEL = 7 -IMAGE_SYM_CLASS_MEMBER_OF_STRUCT = 8 -IMAGE_SYM_CLASS_ARGUMENT = 9 -IMAGE_SYM_CLASS_STRUCT_TAG = 10 -IMAGE_SYM_CLASS_MEMBER_OF_UNION = 11 -IMAGE_SYM_CLASS_UNION_TAG = 12 -IMAGE_SYM_CLASS_TYPE_DEFINITION = 13 -IMAGE_SYM_CLASS_UNDEFINED_STATIC = 14 -IMAGE_SYM_CLASS_ENUM_TAG = 15 -IMAGE_SYM_CLASS_MEMBER_OF_ENUM = 16 -IMAGE_SYM_CLASS_REGISTER_PARAM = 17 -IMAGE_SYM_CLASS_BIT_FIELD = 18 -IMAGE_SYM_CLASS_BLOCK = 100 -IMAGE_SYM_CLASS_FUNCTION = 101 -IMAGE_SYM_CLASS_END_OF_STRUCT = 102 -IMAGE_SYM_CLASS_FILE = 103 -IMAGE_SYM_CLASS_SECTION = 104 -IMAGE_SYM_CLASS_WEAK_EXTERNAL = 105 -N_BTMASK = 15 -N_TMASK = 48 -N_TMASK1 = 192 -N_TMASK2 = 240 -N_BTSHFT = 4 -N_TSHIFT = 2 -IMAGE_SIZEOF_AUX_SYMBOL = 18 -IMAGE_COMDAT_SELECT_NODUPLICATES = 1 -IMAGE_COMDAT_SELECT_ANY = 2 -IMAGE_COMDAT_SELECT_SAME_SIZE = 3 -IMAGE_COMDAT_SELECT_EXACT_MATCH = 4 -IMAGE_COMDAT_SELECT_ASSOCIATIVE = 5 -IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY = 1 -IMAGE_WEAK_EXTERN_SEARCH_LIBRARY = 2 -IMAGE_WEAK_EXTERN_SEARCH_ALIAS = 3 -IMAGE_SIZEOF_RELOCATION = 10 -IMAGE_REL_I386_SECTION = 10 -IMAGE_REL_I386_SECREL = 11 -IMAGE_REL_MIPS_REFHALF = 1 -IMAGE_REL_MIPS_REFWORD = 2 -IMAGE_REL_MIPS_JMPADDR = 3 -IMAGE_REL_MIPS_REFHI = 4 -IMAGE_REL_MIPS_REFLO = 5 -IMAGE_REL_MIPS_GPREL = 6 -IMAGE_REL_MIPS_LITERAL = 7 -IMAGE_REL_MIPS_SECTION = 10 -IMAGE_REL_MIPS_SECREL = 11 -IMAGE_REL_MIPS_REFWORDNB = 34 -IMAGE_REL_MIPS_PAIR = 37 -IMAGE_REL_ALPHA_ABSOLUTE = 0 -IMAGE_REL_ALPHA_REFLONG = 1 -IMAGE_REL_ALPHA_REFQUAD = 2 -IMAGE_REL_ALPHA_GPREL32 = 3 -IMAGE_REL_ALPHA_LITERAL = 4 -IMAGE_REL_ALPHA_LITUSE = 5 -IMAGE_REL_ALPHA_GPDISP = 6 -IMAGE_REL_ALPHA_BRADDR = 7 -IMAGE_REL_ALPHA_HINT = 8 -IMAGE_REL_ALPHA_INLINE_REFLONG = 9 -IMAGE_REL_ALPHA_REFHI = 10 -IMAGE_REL_ALPHA_REFLO = 11 -IMAGE_REL_ALPHA_PAIR = 12 -IMAGE_REL_ALPHA_MATCH = 13 -IMAGE_REL_ALPHA_SECTION = 14 -IMAGE_REL_ALPHA_SECREL = 15 -IMAGE_REL_ALPHA_REFLONGNB = 16 -IMAGE_SIZEOF_BASE_RELOCATION = 8 -IMAGE_REL_BASED_ABSOLUTE = 0 -IMAGE_REL_BASED_HIGH = 1 -IMAGE_REL_BASED_LOW = 2 -IMAGE_REL_BASED_HIGHLOW = 3 -IMAGE_REL_BASED_HIGHADJ = 4 -IMAGE_REL_BASED_MIPS_JMPADDR = 5 -IMAGE_SIZEOF_LINENUMBER = 6 -IMAGE_ARCHIVE_START_SIZE = 8 -IMAGE_ARCHIVE_START = "!\n" -IMAGE_ARCHIVE_END = "`\n" -IMAGE_ARCHIVE_PAD = "\n" -IMAGE_ARCHIVE_LINKER_MEMBER = "/ " -IMAGE_ARCHIVE_LONGNAMES_MEMBER = "// " -IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR = 60 -IMAGE_ORDINAL_FLAG = -2147483648 - - -def IMAGE_SNAP_BY_ORDINAL(Ordinal): - return (Ordinal & IMAGE_ORDINAL_FLAG) != 0 - - -def IMAGE_ORDINAL(Ordinal): - return Ordinal & 65535 - - -IMAGE_RESOURCE_NAME_IS_STRING = -2147483648 -IMAGE_RESOURCE_DATA_IS_DIRECTORY = -2147483648 -IMAGE_DEBUG_TYPE_UNKNOWN = 0 -IMAGE_DEBUG_TYPE_COFF = 1 -IMAGE_DEBUG_TYPE_CODEVIEW = 2 -IMAGE_DEBUG_TYPE_FPO = 3 -IMAGE_DEBUG_TYPE_MISC = 4 -IMAGE_DEBUG_TYPE_EXCEPTION = 5 -IMAGE_DEBUG_TYPE_FIXUP = 6 -IMAGE_DEBUG_TYPE_OMAP_TO_SRC = 7 -IMAGE_DEBUG_TYPE_OMAP_FROM_SRC = 8 -FRAME_FPO = 0 -FRAME_TRAP = 1 -FRAME_TSS = 2 -SIZEOF_RFPO_DATA = 16 -IMAGE_DEBUG_MISC_EXENAME = 1 -IMAGE_SEPARATE_DEBUG_SIGNATURE = 18756 -# Generated by h2py from \msvcnt\include\wingdi.h -# hacked and split manually by mhammond. -NEWFRAME = 1 -ABORTDOC = 2 -NEXTBAND = 3 -SETCOLORTABLE = 4 -GETCOLORTABLE = 5 -FLUSHOUTPUT = 6 -DRAFTMODE = 7 -QUERYESCSUPPORT = 8 -SETABORTPROC = 9 -STARTDOC = 10 -ENDDOC = 11 -GETPHYSPAGESIZE = 12 -GETPRINTINGOFFSET = 13 -GETSCALINGFACTOR = 14 -MFCOMMENT = 15 -GETPENWIDTH = 16 -SETCOPYCOUNT = 17 -SELECTPAPERSOURCE = 18 -DEVICEDATA = 19 -PASSTHROUGH = 19 -GETTECHNOLGY = 20 -GETTECHNOLOGY = 20 -SETLINECAP = 21 -SETLINEJOIN = 22 -SETMITERLIMIT = 23 -BANDINFO = 24 -DRAWPATTERNRECT = 25 -GETVECTORPENSIZE = 26 -GETVECTORBRUSHSIZE = 27 -ENABLEDUPLEX = 28 -GETSETPAPERBINS = 29 -GETSETPRINTORIENT = 30 -ENUMPAPERBINS = 31 -SETDIBSCALING = 32 -EPSPRINTING = 33 -ENUMPAPERMETRICS = 34 -GETSETPAPERMETRICS = 35 -POSTSCRIPT_DATA = 37 -POSTSCRIPT_IGNORE = 38 -MOUSETRAILS = 39 -GETDEVICEUNITS = 42 -GETEXTENDEDTEXTMETRICS = 256 -GETEXTENTTABLE = 257 -GETPAIRKERNTABLE = 258 -GETTRACKKERNTABLE = 259 -EXTTEXTOUT = 512 -GETFACENAME = 513 -DOWNLOADFACE = 514 -ENABLERELATIVEWIDTHS = 768 -ENABLEPAIRKERNING = 769 -SETKERNTRACK = 770 -SETALLJUSTVALUES = 771 -SETCHARSET = 772 -STRETCHBLT = 2048 -GETSETSCREENPARAMS = 3072 -BEGIN_PATH = 4096 -CLIP_TO_PATH = 4097 -END_PATH = 4098 -EXT_DEVICE_CAPS = 4099 -RESTORE_CTM = 4100 -SAVE_CTM = 4101 -SET_ARC_DIRECTION = 4102 -SET_BACKGROUND_COLOR = 4103 -SET_POLY_MODE = 4104 -SET_SCREEN_ANGLE = 4105 -SET_SPREAD = 4106 -TRANSFORM_CTM = 4107 -SET_CLIP_BOX = 4108 -SET_BOUNDS = 4109 -SET_MIRROR_MODE = 4110 -OPENCHANNEL = 4110 -DOWNLOADHEADER = 4111 -CLOSECHANNEL = 4112 -POSTSCRIPT_PASSTHROUGH = 4115 -ENCAPSULATED_POSTSCRIPT = 4116 -SP_NOTREPORTED = 16384 -SP_ERROR = -1 -SP_APPABORT = -2 -SP_USERABORT = -3 -SP_OUTOFDISK = -4 -SP_OUTOFMEMORY = -5 -PR_JOBSTATUS = 0 - -## GDI object types -OBJ_PEN = 1 -OBJ_BRUSH = 2 -OBJ_DC = 3 -OBJ_METADC = 4 -OBJ_PAL = 5 -OBJ_FONT = 6 -OBJ_BITMAP = 7 -OBJ_REGION = 8 -OBJ_METAFILE = 9 -OBJ_MEMDC = 10 -OBJ_EXTPEN = 11 -OBJ_ENHMETADC = 12 -OBJ_ENHMETAFILE = 13 -OBJ_COLORSPACE = 14 - -MWT_IDENTITY = 1 -MWT_LEFTMULTIPLY = 2 -MWT_RIGHTMULTIPLY = 3 -MWT_MIN = MWT_IDENTITY -MWT_MAX = MWT_RIGHTMULTIPLY -BI_RGB = 0 -BI_RLE8 = 1 -BI_RLE4 = 2 -BI_BITFIELDS = 3 -TMPF_FIXED_PITCH = 1 -TMPF_VECTOR = 2 -TMPF_DEVICE = 8 -TMPF_TRUETYPE = 4 -NTM_REGULAR = 64 -NTM_BOLD = 32 -NTM_ITALIC = 1 -LF_FACESIZE = 32 -LF_FULLFACESIZE = 64 -OUT_DEFAULT_PRECIS = 0 -OUT_STRING_PRECIS = 1 -OUT_CHARACTER_PRECIS = 2 -OUT_STROKE_PRECIS = 3 -OUT_TT_PRECIS = 4 -OUT_DEVICE_PRECIS = 5 -OUT_RASTER_PRECIS = 6 -OUT_TT_ONLY_PRECIS = 7 -OUT_OUTLINE_PRECIS = 8 -CLIP_DEFAULT_PRECIS = 0 -CLIP_CHARACTER_PRECIS = 1 -CLIP_STROKE_PRECIS = 2 -CLIP_MASK = 15 -CLIP_LH_ANGLES = 1 << 4 -CLIP_TT_ALWAYS = 2 << 4 -CLIP_EMBEDDED = 8 << 4 -DEFAULT_QUALITY = 0 -DRAFT_QUALITY = 1 -PROOF_QUALITY = 2 -NONANTIALIASED_QUALITY = 3 -ANTIALIASED_QUALITY = 4 -CLEARTYPE_QUALITY = 5 -CLEARTYPE_NATURAL_QUALITY = 6 -DEFAULT_PITCH = 0 -FIXED_PITCH = 1 -VARIABLE_PITCH = 2 -ANSI_CHARSET = 0 -DEFAULT_CHARSET = 1 -SYMBOL_CHARSET = 2 -SHIFTJIS_CHARSET = 128 -HANGEUL_CHARSET = 129 -CHINESEBIG5_CHARSET = 136 -OEM_CHARSET = 255 -JOHAB_CHARSET = 130 -HEBREW_CHARSET = 177 -ARABIC_CHARSET = 178 -GREEK_CHARSET = 161 -TURKISH_CHARSET = 162 -VIETNAMESE_CHARSET = 163 -THAI_CHARSET = 222 -EASTEUROPE_CHARSET = 238 -RUSSIAN_CHARSET = 204 -MAC_CHARSET = 77 -BALTIC_CHARSET = 186 -FF_DONTCARE = 0 << 4 -FF_ROMAN = 1 << 4 -FF_SWISS = 2 << 4 -FF_MODERN = 3 << 4 -FF_SCRIPT = 4 << 4 -FF_DECORATIVE = 5 << 4 -FW_DONTCARE = 0 -FW_THIN = 100 -FW_EXTRALIGHT = 200 -FW_LIGHT = 300 -FW_NORMAL = 400 -FW_MEDIUM = 500 -FW_SEMIBOLD = 600 -FW_BOLD = 700 -FW_EXTRABOLD = 800 -FW_HEAVY = 900 -FW_ULTRALIGHT = FW_EXTRALIGHT -FW_REGULAR = FW_NORMAL -FW_DEMIBOLD = FW_SEMIBOLD -FW_ULTRABOLD = FW_EXTRABOLD -FW_BLACK = FW_HEAVY -# Generated by h2py from \msvcnt\include\wingdi.h -# hacked and split manually by mhammond. -BS_SOLID = 0 -BS_NULL = 1 -BS_HOLLOW = BS_NULL -BS_HATCHED = 2 -BS_PATTERN = 3 -BS_INDEXED = 4 -BS_DIBPATTERN = 5 -BS_DIBPATTERNPT = 6 -BS_PATTERN8X8 = 7 -BS_DIBPATTERN8X8 = 8 -HS_HORIZONTAL = 0 -HS_VERTICAL = 1 -HS_FDIAGONAL = 2 -HS_BDIAGONAL = 3 -HS_CROSS = 4 -HS_DIAGCROSS = 5 -HS_FDIAGONAL1 = 6 -HS_BDIAGONAL1 = 7 -HS_SOLID = 8 -HS_DENSE1 = 9 -HS_DENSE2 = 10 -HS_DENSE3 = 11 -HS_DENSE4 = 12 -HS_DENSE5 = 13 -HS_DENSE6 = 14 -HS_DENSE7 = 15 -HS_DENSE8 = 16 -HS_NOSHADE = 17 -HS_HALFTONE = 18 -HS_SOLIDCLR = 19 -HS_DITHEREDCLR = 20 -HS_SOLIDTEXTCLR = 21 -HS_DITHEREDTEXTCLR = 22 -HS_SOLIDBKCLR = 23 -HS_DITHEREDBKCLR = 24 -HS_API_MAX = 25 -PS_SOLID = 0 -PS_DASH = 1 -PS_DOT = 2 -PS_DASHDOT = 3 -PS_DASHDOTDOT = 4 -PS_NULL = 5 -PS_INSIDEFRAME = 6 -PS_USERSTYLE = 7 -PS_ALTERNATE = 8 -PS_STYLE_MASK = 15 -PS_ENDCAP_ROUND = 0 -PS_ENDCAP_SQUARE = 256 -PS_ENDCAP_FLAT = 512 -PS_ENDCAP_MASK = 3840 -PS_JOIN_ROUND = 0 -PS_JOIN_BEVEL = 4096 -PS_JOIN_MITER = 8192 -PS_JOIN_MASK = 61440 -PS_COSMETIC = 0 -PS_GEOMETRIC = 65536 -PS_TYPE_MASK = 983040 -AD_COUNTERCLOCKWISE = 1 -AD_CLOCKWISE = 2 -DRIVERVERSION = 0 -TECHNOLOGY = 2 -HORZSIZE = 4 -VERTSIZE = 6 -HORZRES = 8 -VERTRES = 10 -BITSPIXEL = 12 -PLANES = 14 -NUMBRUSHES = 16 -NUMPENS = 18 -NUMMARKERS = 20 -NUMFONTS = 22 -NUMCOLORS = 24 -PDEVICESIZE = 26 -CURVECAPS = 28 -LINECAPS = 30 -POLYGONALCAPS = 32 -TEXTCAPS = 34 -CLIPCAPS = 36 -RASTERCAPS = 38 -ASPECTX = 40 -ASPECTY = 42 -ASPECTXY = 44 -LOGPIXELSX = 88 -LOGPIXELSY = 90 -SIZEPALETTE = 104 -NUMRESERVED = 106 -COLORRES = 108 - -PHYSICALWIDTH = 110 -PHYSICALHEIGHT = 111 -PHYSICALOFFSETX = 112 -PHYSICALOFFSETY = 113 -SCALINGFACTORX = 114 -SCALINGFACTORY = 115 -VREFRESH = 116 -DESKTOPVERTRES = 117 -DESKTOPHORZRES = 118 -BLTALIGNMENT = 119 -SHADEBLENDCAPS = 120 -COLORMGMTCAPS = 121 - -DT_PLOTTER = 0 -DT_RASDISPLAY = 1 -DT_RASPRINTER = 2 -DT_RASCAMERA = 3 -DT_CHARSTREAM = 4 -DT_METAFILE = 5 -DT_DISPFILE = 6 -CC_NONE = 0 -CC_CIRCLES = 1 -CC_PIE = 2 -CC_CHORD = 4 -CC_ELLIPSES = 8 -CC_WIDE = 16 -CC_STYLED = 32 -CC_WIDESTYLED = 64 -CC_INTERIORS = 128 -CC_ROUNDRECT = 256 -LC_NONE = 0 -LC_POLYLINE = 2 -LC_MARKER = 4 -LC_POLYMARKER = 8 -LC_WIDE = 16 -LC_STYLED = 32 -LC_WIDESTYLED = 64 -LC_INTERIORS = 128 -PC_NONE = 0 -PC_POLYGON = 1 -PC_RECTANGLE = 2 -PC_WINDPOLYGON = 4 -PC_TRAPEZOID = 4 -PC_SCANLINE = 8 -PC_WIDE = 16 -PC_STYLED = 32 -PC_WIDESTYLED = 64 -PC_INTERIORS = 128 -CP_NONE = 0 -CP_RECTANGLE = 1 -CP_REGION = 2 -TC_OP_CHARACTER = 1 -TC_OP_STROKE = 2 -TC_CP_STROKE = 4 -TC_CR_90 = 8 -TC_CR_ANY = 16 -TC_SF_X_YINDEP = 32 -TC_SA_DOUBLE = 64 -TC_SA_INTEGER = 128 -TC_SA_CONTIN = 256 -TC_EA_DOUBLE = 512 -TC_IA_ABLE = 1024 -TC_UA_ABLE = 2048 -TC_SO_ABLE = 4096 -TC_RA_ABLE = 8192 -TC_VA_ABLE = 16384 -TC_RESERVED = 32768 -TC_SCROLLBLT = 65536 -RC_BITBLT = 1 -RC_BANDING = 2 -RC_SCALING = 4 -RC_BITMAP64 = 8 -RC_GDI20_OUTPUT = 16 -RC_GDI20_STATE = 32 -RC_SAVEBITMAP = 64 -RC_DI_BITMAP = 128 -RC_PALETTE = 256 -RC_DIBTODEV = 512 -RC_BIGFONT = 1024 -RC_STRETCHBLT = 2048 -RC_FLOODFILL = 4096 -RC_STRETCHDIB = 8192 -RC_OP_DX_OUTPUT = 16384 -RC_DEVBITS = 32768 -DIB_RGB_COLORS = 0 -DIB_PAL_COLORS = 1 -DIB_PAL_INDICES = 2 -DIB_PAL_PHYSINDICES = 2 -DIB_PAL_LOGINDICES = 4 -SYSPAL_ERROR = 0 -SYSPAL_STATIC = 1 -SYSPAL_NOSTATIC = 2 -CBM_CREATEDIB = 2 -CBM_INIT = 4 -FLOODFILLBORDER = 0 -FLOODFILLSURFACE = 1 -CCHDEVICENAME = 32 -CCHFORMNAME = 32 -# Generated by h2py from \msvcnt\include\wingdi.h -# hacked and split manually by mhammond. - -# DEVMODE.dmFields -DM_SPECVERSION = 800 -DM_ORIENTATION = 1 -DM_PAPERSIZE = 2 -DM_PAPERLENGTH = 4 -DM_PAPERWIDTH = 8 -DM_SCALE = 16 -DM_POSITION = 32 -DM_NUP = 64 -DM_DISPLAYORIENTATION = 128 -DM_COPIES = 256 -DM_DEFAULTSOURCE = 512 -DM_PRINTQUALITY = 1024 -DM_COLOR = 2048 -DM_DUPLEX = 4096 -DM_YRESOLUTION = 8192 -DM_TTOPTION = 16384 -DM_COLLATE = 32768 -DM_FORMNAME = 65536 -DM_LOGPIXELS = 131072 -DM_BITSPERPEL = 262144 -DM_PELSWIDTH = 524288 -DM_PELSHEIGHT = 1048576 -DM_DISPLAYFLAGS = 2097152 -DM_DISPLAYFREQUENCY = 4194304 -DM_ICMMETHOD = 8388608 -DM_ICMINTENT = 16777216 -DM_MEDIATYPE = 33554432 -DM_DITHERTYPE = 67108864 -DM_PANNINGWIDTH = 134217728 -DM_PANNINGHEIGHT = 268435456 -DM_DISPLAYFIXEDOUTPUT = 536870912 - -# DEVMODE.dmOrientation -DMORIENT_PORTRAIT = 1 -DMORIENT_LANDSCAPE = 2 - -# DEVMODE.dmDisplayOrientation -DMDO_DEFAULT = 0 -DMDO_90 = 1 -DMDO_180 = 2 -DMDO_270 = 3 - -# DEVMODE.dmDisplayFixedOutput -DMDFO_DEFAULT = 0 -DMDFO_STRETCH = 1 -DMDFO_CENTER = 2 - -# DEVMODE.dmPaperSize -DMPAPER_LETTER = 1 -DMPAPER_LETTERSMALL = 2 -DMPAPER_TABLOID = 3 -DMPAPER_LEDGER = 4 -DMPAPER_LEGAL = 5 -DMPAPER_STATEMENT = 6 -DMPAPER_EXECUTIVE = 7 -DMPAPER_A3 = 8 -DMPAPER_A4 = 9 -DMPAPER_A4SMALL = 10 -DMPAPER_A5 = 11 -DMPAPER_B4 = 12 -DMPAPER_B5 = 13 -DMPAPER_FOLIO = 14 -DMPAPER_QUARTO = 15 -DMPAPER_10X14 = 16 -DMPAPER_11X17 = 17 -DMPAPER_NOTE = 18 -DMPAPER_ENV_9 = 19 -DMPAPER_ENV_10 = 20 -DMPAPER_ENV_11 = 21 -DMPAPER_ENV_12 = 22 -DMPAPER_ENV_14 = 23 -DMPAPER_CSHEET = 24 -DMPAPER_DSHEET = 25 -DMPAPER_ESHEET = 26 -DMPAPER_ENV_DL = 27 -DMPAPER_ENV_C5 = 28 -DMPAPER_ENV_C3 = 29 -DMPAPER_ENV_C4 = 30 -DMPAPER_ENV_C6 = 31 -DMPAPER_ENV_C65 = 32 -DMPAPER_ENV_B4 = 33 -DMPAPER_ENV_B5 = 34 -DMPAPER_ENV_B6 = 35 -DMPAPER_ENV_ITALY = 36 -DMPAPER_ENV_MONARCH = 37 -DMPAPER_ENV_PERSONAL = 38 -DMPAPER_FANFOLD_US = 39 -DMPAPER_FANFOLD_STD_GERMAN = 40 -DMPAPER_FANFOLD_LGL_GERMAN = 41 -DMPAPER_ISO_B4 = 42 -DMPAPER_JAPANESE_POSTCARD = 43 -DMPAPER_9X11 = 44 -DMPAPER_10X11 = 45 -DMPAPER_15X11 = 46 -DMPAPER_ENV_INVITE = 47 -DMPAPER_RESERVED_48 = 48 -DMPAPER_RESERVED_49 = 49 -DMPAPER_LETTER_EXTRA = 50 -DMPAPER_LEGAL_EXTRA = 51 -DMPAPER_TABLOID_EXTRA = 52 -DMPAPER_A4_EXTRA = 53 -DMPAPER_LETTER_TRANSVERSE = 54 -DMPAPER_A4_TRANSVERSE = 55 -DMPAPER_LETTER_EXTRA_TRANSVERSE = 56 -DMPAPER_A_PLUS = 57 -DMPAPER_B_PLUS = 58 -DMPAPER_LETTER_PLUS = 59 -DMPAPER_A4_PLUS = 60 -DMPAPER_A5_TRANSVERSE = 61 -DMPAPER_B5_TRANSVERSE = 62 -DMPAPER_A3_EXTRA = 63 -DMPAPER_A5_EXTRA = 64 -DMPAPER_B5_EXTRA = 65 -DMPAPER_A2 = 66 -DMPAPER_A3_TRANSVERSE = 67 -DMPAPER_A3_EXTRA_TRANSVERSE = 68 -DMPAPER_DBL_JAPANESE_POSTCARD = 69 -DMPAPER_A6 = 70 -DMPAPER_JENV_KAKU2 = 71 -DMPAPER_JENV_KAKU3 = 72 -DMPAPER_JENV_CHOU3 = 73 -DMPAPER_JENV_CHOU4 = 74 -DMPAPER_LETTER_ROTATED = 75 -DMPAPER_A3_ROTATED = 76 -DMPAPER_A4_ROTATED = 77 -DMPAPER_A5_ROTATED = 78 -DMPAPER_B4_JIS_ROTATED = 79 -DMPAPER_B5_JIS_ROTATED = 80 -DMPAPER_JAPANESE_POSTCARD_ROTATED = 81 -DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED = 82 -DMPAPER_A6_ROTATED = 83 -DMPAPER_JENV_KAKU2_ROTATED = 84 -DMPAPER_JENV_KAKU3_ROTATED = 85 -DMPAPER_JENV_CHOU3_ROTATED = 86 -DMPAPER_JENV_CHOU4_ROTATED = 87 -DMPAPER_B6_JIS = 88 -DMPAPER_B6_JIS_ROTATED = 89 -DMPAPER_12X11 = 90 -DMPAPER_JENV_YOU4 = 91 -DMPAPER_JENV_YOU4_ROTATED = 92 -DMPAPER_P16K = 93 -DMPAPER_P32K = 94 -DMPAPER_P32KBIG = 95 -DMPAPER_PENV_1 = 96 -DMPAPER_PENV_2 = 97 -DMPAPER_PENV_3 = 98 -DMPAPER_PENV_4 = 99 -DMPAPER_PENV_5 = 100 -DMPAPER_PENV_6 = 101 -DMPAPER_PENV_7 = 102 -DMPAPER_PENV_8 = 103 -DMPAPER_PENV_9 = 104 -DMPAPER_PENV_10 = 105 -DMPAPER_P16K_ROTATED = 106 -DMPAPER_P32K_ROTATED = 107 -DMPAPER_P32KBIG_ROTATED = 108 -DMPAPER_PENV_1_ROTATED = 109 -DMPAPER_PENV_2_ROTATED = 110 -DMPAPER_PENV_3_ROTATED = 111 -DMPAPER_PENV_4_ROTATED = 112 -DMPAPER_PENV_5_ROTATED = 113 -DMPAPER_PENV_6_ROTATED = 114 -DMPAPER_PENV_7_ROTATED = 115 -DMPAPER_PENV_8_ROTATED = 116 -DMPAPER_PENV_9_ROTATED = 117 -DMPAPER_PENV_10_ROTATED = 118 -DMPAPER_LAST = DMPAPER_PENV_10_ROTATED -DMPAPER_USER = 256 - -# DEVMODE.dmDefaultSource -DMBIN_UPPER = 1 -DMBIN_ONLYONE = 1 -DMBIN_LOWER = 2 -DMBIN_MIDDLE = 3 -DMBIN_MANUAL = 4 -DMBIN_ENVELOPE = 5 -DMBIN_ENVMANUAL = 6 -DMBIN_AUTO = 7 -DMBIN_TRACTOR = 8 -DMBIN_SMALLFMT = 9 -DMBIN_LARGEFMT = 10 -DMBIN_LARGECAPACITY = 11 -DMBIN_CASSETTE = 14 -DMBIN_FORMSOURCE = 15 -DMBIN_LAST = DMBIN_FORMSOURCE -DMBIN_USER = 256 - -# DEVMODE.dmPrintQuality -DMRES_DRAFT = -1 -DMRES_LOW = -2 -DMRES_MEDIUM = -3 -DMRES_HIGH = -4 - -# DEVMODE.dmColor -DMCOLOR_MONOCHROME = 1 -DMCOLOR_COLOR = 2 - -# DEVMODE.dmDuplex -DMDUP_SIMPLEX = 1 -DMDUP_VERTICAL = 2 -DMDUP_HORIZONTAL = 3 - -# DEVMODE.dmTTOption -DMTT_BITMAP = 1 -DMTT_DOWNLOAD = 2 -DMTT_SUBDEV = 3 -DMTT_DOWNLOAD_OUTLINE = 4 - -# DEVMODE.dmCollate -DMCOLLATE_FALSE = 0 -DMCOLLATE_TRUE = 1 - -# DEVMODE.dmDisplayFlags -DM_GRAYSCALE = 1 -DM_INTERLACED = 2 - -# DEVMODE.dmICMMethod -DMICMMETHOD_NONE = 1 -DMICMMETHOD_SYSTEM = 2 -DMICMMETHOD_DRIVER = 3 -DMICMMETHOD_DEVICE = 4 -DMICMMETHOD_USER = 256 - -# DEVMODE.dmICMIntent -DMICM_SATURATE = 1 -DMICM_CONTRAST = 2 -DMICM_COLORIMETRIC = 3 -DMICM_ABS_COLORIMETRIC = 4 -DMICM_USER = 256 - -# DEVMODE.dmMediaType -DMMEDIA_STANDARD = 1 -DMMEDIA_TRANSPARENCY = 2 -DMMEDIA_GLOSSY = 3 -DMMEDIA_USER = 256 - -# DEVMODE.dmDitherType -DMDITHER_NONE = 1 -DMDITHER_COARSE = 2 -DMDITHER_FINE = 3 -DMDITHER_LINEART = 4 -DMDITHER_ERRORDIFFUSION = 5 -DMDITHER_RESERVED6 = 6 -DMDITHER_RESERVED7 = 7 -DMDITHER_RESERVED8 = 8 -DMDITHER_RESERVED9 = 9 -DMDITHER_GRAYSCALE = 10 -DMDITHER_USER = 256 - -# DEVMODE.dmNup -DMNUP_SYSTEM = 1 -DMNUP_ONEUP = 2 - -# used with ExtEscape -FEATURESETTING_NUP = 0 -FEATURESETTING_OUTPUT = 1 -FEATURESETTING_PSLEVEL = 2 -FEATURESETTING_CUSTPAPER = 3 -FEATURESETTING_MIRROR = 4 -FEATURESETTING_NEGATIVE = 5 -FEATURESETTING_PROTOCOL = 6 -FEATURESETTING_PRIVATE_BEGIN = 0x1000 -FEATURESETTING_PRIVATE_END = 0x1FFF - -RDH_RECTANGLES = 1 -GGO_METRICS = 0 -GGO_BITMAP = 1 -GGO_NATIVE = 2 -TT_POLYGON_TYPE = 24 -TT_PRIM_LINE = 1 -TT_PRIM_QSPLINE = 2 -TT_AVAILABLE = 1 -TT_ENABLED = 2 -DM_UPDATE = 1 -DM_COPY = 2 -DM_PROMPT = 4 -DM_MODIFY = 8 -DM_IN_BUFFER = DM_MODIFY -DM_IN_PROMPT = DM_PROMPT -DM_OUT_BUFFER = DM_COPY -DM_OUT_DEFAULT = DM_UPDATE - -# DISPLAY_DEVICE.StateFlags -DISPLAY_DEVICE_ATTACHED_TO_DESKTOP = 1 -DISPLAY_DEVICE_MULTI_DRIVER = 2 -DISPLAY_DEVICE_PRIMARY_DEVICE = 4 -DISPLAY_DEVICE_MIRRORING_DRIVER = 8 -DISPLAY_DEVICE_VGA_COMPATIBLE = 16 -DISPLAY_DEVICE_REMOVABLE = 32 -DISPLAY_DEVICE_MODESPRUNED = 134217728 -DISPLAY_DEVICE_REMOTE = 67108864 -DISPLAY_DEVICE_DISCONNECT = 33554432 - -# DeviceCapabilities types -DC_FIELDS = 1 -DC_PAPERS = 2 -DC_PAPERSIZE = 3 -DC_MINEXTENT = 4 -DC_MAXEXTENT = 5 -DC_BINS = 6 -DC_DUPLEX = 7 -DC_SIZE = 8 -DC_EXTRA = 9 -DC_VERSION = 10 -DC_DRIVER = 11 -DC_BINNAMES = 12 -DC_ENUMRESOLUTIONS = 13 -DC_FILEDEPENDENCIES = 14 -DC_TRUETYPE = 15 -DC_PAPERNAMES = 16 -DC_ORIENTATION = 17 -DC_COPIES = 18 -DC_BINADJUST = 19 -DC_EMF_COMPLIANT = 20 -DC_DATATYPE_PRODUCED = 21 -DC_COLLATE = 22 -DC_MANUFACTURER = 23 -DC_MODEL = 24 -DC_PERSONALITY = 25 -DC_PRINTRATE = 26 -DC_PRINTRATEUNIT = 27 -DC_PRINTERMEM = 28 -DC_MEDIAREADY = 29 -DC_STAPLE = 30 -DC_PRINTRATEPPM = 31 -DC_COLORDEVICE = 32 -DC_NUP = 33 -DC_MEDIATYPENAMES = 34 -DC_MEDIATYPES = 35 - -PRINTRATEUNIT_PPM = 1 -PRINTRATEUNIT_CPS = 2 -PRINTRATEUNIT_LPM = 3 -PRINTRATEUNIT_IPM = 4 - -# TrueType constants -DCTT_BITMAP = 1 -DCTT_DOWNLOAD = 2 -DCTT_SUBDEV = 4 -DCTT_DOWNLOAD_OUTLINE = 8 - -DCBA_FACEUPNONE = 0 -DCBA_FACEUPCENTER = 1 -DCBA_FACEUPLEFT = 2 -DCBA_FACEUPRIGHT = 3 -DCBA_FACEDOWNNONE = 256 -DCBA_FACEDOWNCENTER = 257 -DCBA_FACEDOWNLEFT = 258 -DCBA_FACEDOWNRIGHT = 259 - -CA_NEGATIVE = 1 -CA_LOG_FILTER = 2 -ILLUMINANT_DEVICE_DEFAULT = 0 -ILLUMINANT_A = 1 -ILLUMINANT_B = 2 -ILLUMINANT_C = 3 -ILLUMINANT_D50 = 4 -ILLUMINANT_D55 = 5 -ILLUMINANT_D65 = 6 -ILLUMINANT_D75 = 7 -ILLUMINANT_F2 = 8 -ILLUMINANT_MAX_INDEX = ILLUMINANT_F2 -ILLUMINANT_TUNGSTEN = ILLUMINANT_A -ILLUMINANT_DAYLIGHT = ILLUMINANT_C -ILLUMINANT_FLUORESCENT = ILLUMINANT_F2 -ILLUMINANT_NTSC = ILLUMINANT_C - -# Generated by h2py from \msvcnt\include\wingdi.h -# hacked and split manually by mhammond. -FONTMAPPER_MAX = 10 -ENHMETA_SIGNATURE = 1179469088 -ENHMETA_STOCK_OBJECT = -2147483648 -EMR_HEADER = 1 -EMR_POLYBEZIER = 2 -EMR_POLYGON = 3 -EMR_POLYLINE = 4 -EMR_POLYBEZIERTO = 5 -EMR_POLYLINETO = 6 -EMR_POLYPOLYLINE = 7 -EMR_POLYPOLYGON = 8 -EMR_SETWINDOWEXTEX = 9 -EMR_SETWINDOWORGEX = 10 -EMR_SETVIEWPORTEXTEX = 11 -EMR_SETVIEWPORTORGEX = 12 -EMR_SETBRUSHORGEX = 13 -EMR_EOF = 14 -EMR_SETPIXELV = 15 -EMR_SETMAPPERFLAGS = 16 -EMR_SETMAPMODE = 17 -EMR_SETBKMODE = 18 -EMR_SETPOLYFILLMODE = 19 -EMR_SETROP2 = 20 -EMR_SETSTRETCHBLTMODE = 21 -EMR_SETTEXTALIGN = 22 -EMR_SETCOLORADJUSTMENT = 23 -EMR_SETTEXTCOLOR = 24 -EMR_SETBKCOLOR = 25 -EMR_OFFSETCLIPRGN = 26 -EMR_MOVETOEX = 27 -EMR_SETMETARGN = 28 -EMR_EXCLUDECLIPRECT = 29 -EMR_INTERSECTCLIPRECT = 30 -EMR_SCALEVIEWPORTEXTEX = 31 -EMR_SCALEWINDOWEXTEX = 32 -EMR_SAVEDC = 33 -EMR_RESTOREDC = 34 -EMR_SETWORLDTRANSFORM = 35 -EMR_MODIFYWORLDTRANSFORM = 36 -EMR_SELECTOBJECT = 37 -EMR_CREATEPEN = 38 -EMR_CREATEBRUSHINDIRECT = 39 -EMR_DELETEOBJECT = 40 -EMR_ANGLEARC = 41 -EMR_ELLIPSE = 42 -EMR_RECTANGLE = 43 -EMR_ROUNDRECT = 44 -EMR_ARC = 45 -EMR_CHORD = 46 -EMR_PIE = 47 -EMR_SELECTPALETTE = 48 -EMR_CREATEPALETTE = 49 -EMR_SETPALETTEENTRIES = 50 -EMR_RESIZEPALETTE = 51 -EMR_REALIZEPALETTE = 52 -EMR_EXTFLOODFILL = 53 -EMR_LINETO = 54 -EMR_ARCTO = 55 -EMR_POLYDRAW = 56 -EMR_SETARCDIRECTION = 57 -EMR_SETMITERLIMIT = 58 -EMR_BEGINPATH = 59 -EMR_ENDPATH = 60 -EMR_CLOSEFIGURE = 61 -EMR_FILLPATH = 62 -EMR_STROKEANDFILLPATH = 63 -EMR_STROKEPATH = 64 -EMR_FLATTENPATH = 65 -EMR_WIDENPATH = 66 -EMR_SELECTCLIPPATH = 67 -EMR_ABORTPATH = 68 -EMR_GDICOMMENT = 70 -EMR_FILLRGN = 71 -EMR_FRAMERGN = 72 -EMR_INVERTRGN = 73 -EMR_PAINTRGN = 74 -EMR_EXTSELECTCLIPRGN = 75 -EMR_BITBLT = 76 -EMR_STRETCHBLT = 77 -EMR_MASKBLT = 78 -EMR_PLGBLT = 79 -EMR_SETDIBITSTODEVICE = 80 -EMR_STRETCHDIBITS = 81 -EMR_EXTCREATEFONTINDIRECTW = 82 -EMR_EXTTEXTOUTA = 83 -EMR_EXTTEXTOUTW = 84 -EMR_POLYBEZIER16 = 85 -EMR_POLYGON16 = 86 -EMR_POLYLINE16 = 87 -EMR_POLYBEZIERTO16 = 88 -EMR_POLYLINETO16 = 89 -EMR_POLYPOLYLINE16 = 90 -EMR_POLYPOLYGON16 = 91 -EMR_POLYDRAW16 = 92 -EMR_CREATEMONOBRUSH = 93 -EMR_CREATEDIBPATTERNBRUSHPT = 94 -EMR_EXTCREATEPEN = 95 -EMR_POLYTEXTOUTA = 96 -EMR_POLYTEXTOUTW = 97 -EMR_MIN = 1 -EMR_MAX = 97 -# Generated by h2py from \msvcnt\include\wingdi.h -# hacked and split manually by mhammond. -PANOSE_COUNT = 10 -PAN_FAMILYTYPE_INDEX = 0 -PAN_SERIFSTYLE_INDEX = 1 -PAN_WEIGHT_INDEX = 2 -PAN_PROPORTION_INDEX = 3 -PAN_CONTRAST_INDEX = 4 -PAN_STROKEVARIATION_INDEX = 5 -PAN_ARMSTYLE_INDEX = 6 -PAN_LETTERFORM_INDEX = 7 -PAN_MIDLINE_INDEX = 8 -PAN_XHEIGHT_INDEX = 9 -PAN_CULTURE_LATIN = 0 -PAN_ANY = 0 -PAN_NO_FIT = 1 -PAN_FAMILY_TEXT_DISPLAY = 2 -PAN_FAMILY_SCRIPT = 3 -PAN_FAMILY_DECORATIVE = 4 -PAN_FAMILY_PICTORIAL = 5 -PAN_SERIF_COVE = 2 -PAN_SERIF_OBTUSE_COVE = 3 -PAN_SERIF_SQUARE_COVE = 4 -PAN_SERIF_OBTUSE_SQUARE_COVE = 5 -PAN_SERIF_SQUARE = 6 -PAN_SERIF_THIN = 7 -PAN_SERIF_BONE = 8 -PAN_SERIF_EXAGGERATED = 9 -PAN_SERIF_TRIANGLE = 10 -PAN_SERIF_NORMAL_SANS = 11 -PAN_SERIF_OBTUSE_SANS = 12 -PAN_SERIF_PERP_SANS = 13 -PAN_SERIF_FLARED = 14 -PAN_SERIF_ROUNDED = 15 -PAN_WEIGHT_VERY_LIGHT = 2 -PAN_WEIGHT_LIGHT = 3 -PAN_WEIGHT_THIN = 4 -PAN_WEIGHT_BOOK = 5 -PAN_WEIGHT_MEDIUM = 6 -PAN_WEIGHT_DEMI = 7 -PAN_WEIGHT_BOLD = 8 -PAN_WEIGHT_HEAVY = 9 -PAN_WEIGHT_BLACK = 10 -PAN_WEIGHT_NORD = 11 -PAN_PROP_OLD_STYLE = 2 -PAN_PROP_MODERN = 3 -PAN_PROP_EVEN_WIDTH = 4 -PAN_PROP_EXPANDED = 5 -PAN_PROP_CONDENSED = 6 -PAN_PROP_VERY_EXPANDED = 7 -PAN_PROP_VERY_CONDENSED = 8 -PAN_PROP_MONOSPACED = 9 -PAN_CONTRAST_NONE = 2 -PAN_CONTRAST_VERY_LOW = 3 -PAN_CONTRAST_LOW = 4 -PAN_CONTRAST_MEDIUM_LOW = 5 -PAN_CONTRAST_MEDIUM = 6 -PAN_CONTRAST_MEDIUM_HIGH = 7 -PAN_CONTRAST_HIGH = 8 -PAN_CONTRAST_VERY_HIGH = 9 -PAN_STROKE_GRADUAL_DIAG = 2 -PAN_STROKE_GRADUAL_TRAN = 3 -PAN_STROKE_GRADUAL_VERT = 4 -PAN_STROKE_GRADUAL_HORZ = 5 -PAN_STROKE_RAPID_VERT = 6 -PAN_STROKE_RAPID_HORZ = 7 -PAN_STROKE_INSTANT_VERT = 8 -PAN_STRAIGHT_ARMS_HORZ = 2 -PAN_STRAIGHT_ARMS_WEDGE = 3 -PAN_STRAIGHT_ARMS_VERT = 4 -PAN_STRAIGHT_ARMS_SINGLE_SERIF = 5 -PAN_STRAIGHT_ARMS_DOUBLE_SERIF = 6 -PAN_BENT_ARMS_HORZ = 7 -PAN_BENT_ARMS_WEDGE = 8 -PAN_BENT_ARMS_VERT = 9 -PAN_BENT_ARMS_SINGLE_SERIF = 10 -PAN_BENT_ARMS_DOUBLE_SERIF = 11 -PAN_LETT_NORMAL_CONTACT = 2 -PAN_LETT_NORMAL_WEIGHTED = 3 -PAN_LETT_NORMAL_BOXED = 4 -PAN_LETT_NORMAL_FLATTENED = 5 -PAN_LETT_NORMAL_ROUNDED = 6 -PAN_LETT_NORMAL_OFF_CENTER = 7 -PAN_LETT_NORMAL_SQUARE = 8 -PAN_LETT_OBLIQUE_CONTACT = 9 -PAN_LETT_OBLIQUE_WEIGHTED = 10 -PAN_LETT_OBLIQUE_BOXED = 11 -PAN_LETT_OBLIQUE_FLATTENED = 12 -PAN_LETT_OBLIQUE_ROUNDED = 13 -PAN_LETT_OBLIQUE_OFF_CENTER = 14 -PAN_LETT_OBLIQUE_SQUARE = 15 -PAN_MIDLINE_STANDARD_TRIMMED = 2 -PAN_MIDLINE_STANDARD_POINTED = 3 -PAN_MIDLINE_STANDARD_SERIFED = 4 -PAN_MIDLINE_HIGH_TRIMMED = 5 -PAN_MIDLINE_HIGH_POINTED = 6 -PAN_MIDLINE_HIGH_SERIFED = 7 -PAN_MIDLINE_CONSTANT_TRIMMED = 8 -PAN_MIDLINE_CONSTANT_POINTED = 9 -PAN_MIDLINE_CONSTANT_SERIFED = 10 -PAN_MIDLINE_LOW_TRIMMED = 11 -PAN_MIDLINE_LOW_POINTED = 12 -PAN_MIDLINE_LOW_SERIFED = 13 -PAN_XHEIGHT_CONSTANT_SMALL = 2 -PAN_XHEIGHT_CONSTANT_STD = 3 -PAN_XHEIGHT_CONSTANT_LARGE = 4 -PAN_XHEIGHT_DUCKING_SMALL = 5 -PAN_XHEIGHT_DUCKING_STD = 6 -PAN_XHEIGHT_DUCKING_LARGE = 7 -ELF_VENDOR_SIZE = 4 -ELF_VERSION = 0 -ELF_CULTURE_LATIN = 0 -RASTER_FONTTYPE = 1 -DEVICE_FONTTYPE = 2 -TRUETYPE_FONTTYPE = 4 - - -def PALETTEINDEX(i): - return 16777216 | (i) - - -PC_RESERVED = 1 -PC_EXPLICIT = 2 -PC_NOCOLLAPSE = 4 - - -def GetRValue(rgb): - return rgb & 0xFF - - -def GetGValue(rgb): - return (rgb >> 8) & 0xFF - - -def GetBValue(rgb): - return (rgb >> 16) & 0xFF - - -TRANSPARENT = 1 -OPAQUE = 2 -BKMODE_LAST = 2 -GM_COMPATIBLE = 1 -GM_ADVANCED = 2 -GM_LAST = 2 -PT_CLOSEFIGURE = 1 -PT_LINETO = 2 -PT_BEZIERTO = 4 -PT_MOVETO = 6 -MM_TEXT = 1 -MM_LOMETRIC = 2 -MM_HIMETRIC = 3 -MM_LOENGLISH = 4 -MM_HIENGLISH = 5 -MM_TWIPS = 6 -MM_ISOTROPIC = 7 -MM_ANISOTROPIC = 8 -MM_MIN = MM_TEXT -MM_MAX = MM_ANISOTROPIC -MM_MAX_FIXEDSCALE = MM_TWIPS -ABSOLUTE = 1 -RELATIVE = 2 -WHITE_BRUSH = 0 -LTGRAY_BRUSH = 1 -GRAY_BRUSH = 2 -DKGRAY_BRUSH = 3 -BLACK_BRUSH = 4 -NULL_BRUSH = 5 -HOLLOW_BRUSH = NULL_BRUSH -WHITE_PEN = 6 -BLACK_PEN = 7 -NULL_PEN = 8 -OEM_FIXED_FONT = 10 -ANSI_FIXED_FONT = 11 -ANSI_VAR_FONT = 12 -SYSTEM_FONT = 13 -DEVICE_DEFAULT_FONT = 14 -DEFAULT_PALETTE = 15 -SYSTEM_FIXED_FONT = 16 -STOCK_LAST = 16 -CLR_INVALID = -1 - -DC_BRUSH = 18 -DC_PEN = 19 - -# Exception/Status codes from winuser.h and winnt.h -STATUS_WAIT_0 = 0 -STATUS_ABANDONED_WAIT_0 = 128 -STATUS_USER_APC = 192 -STATUS_TIMEOUT = 258 -STATUS_PENDING = 259 -STATUS_SEGMENT_NOTIFICATION = 1073741829 -STATUS_GUARD_PAGE_VIOLATION = -2147483647 -STATUS_DATATYPE_MISALIGNMENT = -2147483646 -STATUS_BREAKPOINT = -2147483645 -STATUS_SINGLE_STEP = -2147483644 -STATUS_ACCESS_VIOLATION = -1073741819 -STATUS_IN_PAGE_ERROR = -1073741818 -STATUS_INVALID_HANDLE = -1073741816 -STATUS_NO_MEMORY = -1073741801 -STATUS_ILLEGAL_INSTRUCTION = -1073741795 -STATUS_NONCONTINUABLE_EXCEPTION = -1073741787 -STATUS_INVALID_DISPOSITION = -1073741786 -STATUS_ARRAY_BOUNDS_EXCEEDED = -1073741684 -STATUS_FLOAT_DENORMAL_OPERAND = -1073741683 -STATUS_FLOAT_DIVIDE_BY_ZERO = -1073741682 -STATUS_FLOAT_INEXACT_RESULT = -1073741681 -STATUS_FLOAT_INVALID_OPERATION = -1073741680 -STATUS_FLOAT_OVERFLOW = -1073741679 -STATUS_FLOAT_STACK_CHECK = -1073741678 -STATUS_FLOAT_UNDERFLOW = -1073741677 -STATUS_INTEGER_DIVIDE_BY_ZERO = -1073741676 -STATUS_INTEGER_OVERFLOW = -1073741675 -STATUS_PRIVILEGED_INSTRUCTION = -1073741674 -STATUS_STACK_OVERFLOW = -1073741571 -STATUS_CONTROL_C_EXIT = -1073741510 - - -WAIT_FAILED = -1 -WAIT_OBJECT_0 = STATUS_WAIT_0 + 0 - -WAIT_ABANDONED = STATUS_ABANDONED_WAIT_0 + 0 -WAIT_ABANDONED_0 = STATUS_ABANDONED_WAIT_0 + 0 - -WAIT_TIMEOUT = STATUS_TIMEOUT -WAIT_IO_COMPLETION = STATUS_USER_APC -STILL_ACTIVE = STATUS_PENDING -EXCEPTION_ACCESS_VIOLATION = STATUS_ACCESS_VIOLATION -EXCEPTION_DATATYPE_MISALIGNMENT = STATUS_DATATYPE_MISALIGNMENT -EXCEPTION_BREAKPOINT = STATUS_BREAKPOINT -EXCEPTION_SINGLE_STEP = STATUS_SINGLE_STEP -EXCEPTION_ARRAY_BOUNDS_EXCEEDED = STATUS_ARRAY_BOUNDS_EXCEEDED -EXCEPTION_FLT_DENORMAL_OPERAND = STATUS_FLOAT_DENORMAL_OPERAND -EXCEPTION_FLT_DIVIDE_BY_ZERO = STATUS_FLOAT_DIVIDE_BY_ZERO -EXCEPTION_FLT_INEXACT_RESULT = STATUS_FLOAT_INEXACT_RESULT -EXCEPTION_FLT_INVALID_OPERATION = STATUS_FLOAT_INVALID_OPERATION -EXCEPTION_FLT_OVERFLOW = STATUS_FLOAT_OVERFLOW -EXCEPTION_FLT_STACK_CHECK = STATUS_FLOAT_STACK_CHECK -EXCEPTION_FLT_UNDERFLOW = STATUS_FLOAT_UNDERFLOW -EXCEPTION_INT_DIVIDE_BY_ZERO = STATUS_INTEGER_DIVIDE_BY_ZERO -EXCEPTION_INT_OVERFLOW = STATUS_INTEGER_OVERFLOW -EXCEPTION_PRIV_INSTRUCTION = STATUS_PRIVILEGED_INSTRUCTION -EXCEPTION_IN_PAGE_ERROR = STATUS_IN_PAGE_ERROR -EXCEPTION_ILLEGAL_INSTRUCTION = STATUS_ILLEGAL_INSTRUCTION -EXCEPTION_NONCONTINUABLE_EXCEPTION = STATUS_NONCONTINUABLE_EXCEPTION -EXCEPTION_STACK_OVERFLOW = STATUS_STACK_OVERFLOW -EXCEPTION_INVALID_DISPOSITION = STATUS_INVALID_DISPOSITION -EXCEPTION_GUARD_PAGE = STATUS_GUARD_PAGE_VIOLATION -EXCEPTION_INVALID_HANDLE = STATUS_INVALID_HANDLE -CONTROL_C_EXIT = STATUS_CONTROL_C_EXIT - -# winuser.h line 8594 -# constants used with SystemParametersInfo -SPI_GETBEEP = 1 -SPI_SETBEEP = 2 -SPI_GETMOUSE = 3 -SPI_SETMOUSE = 4 -SPI_GETBORDER = 5 -SPI_SETBORDER = 6 -SPI_GETKEYBOARDSPEED = 10 -SPI_SETKEYBOARDSPEED = 11 -SPI_LANGDRIVER = 12 -SPI_ICONHORIZONTALSPACING = 13 -SPI_GETSCREENSAVETIMEOUT = 14 -SPI_SETSCREENSAVETIMEOUT = 15 -SPI_GETSCREENSAVEACTIVE = 16 -SPI_SETSCREENSAVEACTIVE = 17 -SPI_GETGRIDGRANULARITY = 18 -SPI_SETGRIDGRANULARITY = 19 -SPI_SETDESKWALLPAPER = 20 -SPI_SETDESKPATTERN = 21 -SPI_GETKEYBOARDDELAY = 22 -SPI_SETKEYBOARDDELAY = 23 -SPI_ICONVERTICALSPACING = 24 -SPI_GETICONTITLEWRAP = 25 -SPI_SETICONTITLEWRAP = 26 -SPI_GETMENUDROPALIGNMENT = 27 -SPI_SETMENUDROPALIGNMENT = 28 -SPI_SETDOUBLECLKWIDTH = 29 -SPI_SETDOUBLECLKHEIGHT = 30 -SPI_GETICONTITLELOGFONT = 31 -SPI_SETDOUBLECLICKTIME = 32 -SPI_SETMOUSEBUTTONSWAP = 33 -SPI_SETICONTITLELOGFONT = 34 -SPI_GETFASTTASKSWITCH = 35 -SPI_SETFASTTASKSWITCH = 36 -SPI_SETDRAGFULLWINDOWS = 37 -SPI_GETDRAGFULLWINDOWS = 38 -SPI_GETNONCLIENTMETRICS = 41 -SPI_SETNONCLIENTMETRICS = 42 -SPI_GETMINIMIZEDMETRICS = 43 -SPI_SETMINIMIZEDMETRICS = 44 -SPI_GETICONMETRICS = 45 -SPI_SETICONMETRICS = 46 -SPI_SETWORKAREA = 47 -SPI_GETWORKAREA = 48 -SPI_SETPENWINDOWS = 49 -SPI_GETFILTERKEYS = 50 -SPI_SETFILTERKEYS = 51 -SPI_GETTOGGLEKEYS = 52 -SPI_SETTOGGLEKEYS = 53 -SPI_GETMOUSEKEYS = 54 -SPI_SETMOUSEKEYS = 55 -SPI_GETSHOWSOUNDS = 56 -SPI_SETSHOWSOUNDS = 57 -SPI_GETSTICKYKEYS = 58 -SPI_SETSTICKYKEYS = 59 -SPI_GETACCESSTIMEOUT = 60 -SPI_SETACCESSTIMEOUT = 61 -SPI_GETSERIALKEYS = 62 -SPI_SETSERIALKEYS = 63 -SPI_GETSOUNDSENTRY = 64 -SPI_SETSOUNDSENTRY = 65 -SPI_GETHIGHCONTRAST = 66 -SPI_SETHIGHCONTRAST = 67 -SPI_GETKEYBOARDPREF = 68 -SPI_SETKEYBOARDPREF = 69 -SPI_GETSCREENREADER = 70 -SPI_SETSCREENREADER = 71 -SPI_GETANIMATION = 72 -SPI_SETANIMATION = 73 -SPI_GETFONTSMOOTHING = 74 -SPI_SETFONTSMOOTHING = 75 -SPI_SETDRAGWIDTH = 76 -SPI_SETDRAGHEIGHT = 77 -SPI_SETHANDHELD = 78 -SPI_GETLOWPOWERTIMEOUT = 79 -SPI_GETPOWEROFFTIMEOUT = 80 -SPI_SETLOWPOWERTIMEOUT = 81 -SPI_SETPOWEROFFTIMEOUT = 82 -SPI_GETLOWPOWERACTIVE = 83 -SPI_GETPOWEROFFACTIVE = 84 -SPI_SETLOWPOWERACTIVE = 85 -SPI_SETPOWEROFFACTIVE = 86 -SPI_SETCURSORS = 87 -SPI_SETICONS = 88 -SPI_GETDEFAULTINPUTLANG = 89 -SPI_SETDEFAULTINPUTLANG = 90 -SPI_SETLANGTOGGLE = 91 -SPI_GETWINDOWSEXTENSION = 92 -SPI_SETMOUSETRAILS = 93 -SPI_GETMOUSETRAILS = 94 -SPI_GETSNAPTODEFBUTTON = 95 -SPI_SETSNAPTODEFBUTTON = 96 -SPI_SETSCREENSAVERRUNNING = 97 -SPI_SCREENSAVERRUNNING = SPI_SETSCREENSAVERRUNNING -SPI_GETMOUSEHOVERWIDTH = 98 -SPI_SETMOUSEHOVERWIDTH = 99 -SPI_GETMOUSEHOVERHEIGHT = 100 -SPI_SETMOUSEHOVERHEIGHT = 101 -SPI_GETMOUSEHOVERTIME = 102 -SPI_SETMOUSEHOVERTIME = 103 -SPI_GETWHEELSCROLLLINES = 104 -SPI_SETWHEELSCROLLLINES = 105 -SPI_GETMENUSHOWDELAY = 106 -SPI_SETMENUSHOWDELAY = 107 - -SPI_GETSHOWIMEUI = 110 -SPI_SETSHOWIMEUI = 111 -SPI_GETMOUSESPEED = 112 -SPI_SETMOUSESPEED = 113 -SPI_GETSCREENSAVERRUNNING = 114 -SPI_GETDESKWALLPAPER = 115 - -SPI_GETACTIVEWINDOWTRACKING = 4096 -SPI_SETACTIVEWINDOWTRACKING = 4097 -SPI_GETMENUANIMATION = 4098 -SPI_SETMENUANIMATION = 4099 -SPI_GETCOMBOBOXANIMATION = 4100 -SPI_SETCOMBOBOXANIMATION = 4101 -SPI_GETLISTBOXSMOOTHSCROLLING = 4102 -SPI_SETLISTBOXSMOOTHSCROLLING = 4103 -SPI_GETGRADIENTCAPTIONS = 4104 -SPI_SETGRADIENTCAPTIONS = 4105 -SPI_GETKEYBOARDCUES = 4106 -SPI_SETKEYBOARDCUES = 4107 -SPI_GETMENUUNDERLINES = 4106 -SPI_SETMENUUNDERLINES = 4107 -SPI_GETACTIVEWNDTRKZORDER = 4108 -SPI_SETACTIVEWNDTRKZORDER = 4109 -SPI_GETHOTTRACKING = 4110 -SPI_SETHOTTRACKING = 4111 - -SPI_GETMENUFADE = 4114 -SPI_SETMENUFADE = 4115 -SPI_GETSELECTIONFADE = 4116 -SPI_SETSELECTIONFADE = 4117 -SPI_GETTOOLTIPANIMATION = 4118 -SPI_SETTOOLTIPANIMATION = 4119 -SPI_GETTOOLTIPFADE = 4120 -SPI_SETTOOLTIPFADE = 4121 -SPI_GETCURSORSHADOW = 4122 -SPI_SETCURSORSHADOW = 4123 -SPI_GETMOUSESONAR = 4124 -SPI_SETMOUSESONAR = 4125 -SPI_GETMOUSECLICKLOCK = 4126 -SPI_SETMOUSECLICKLOCK = 4127 -SPI_GETMOUSEVANISH = 4128 -SPI_SETMOUSEVANISH = 4129 -SPI_GETFLATMENU = 4130 -SPI_SETFLATMENU = 4131 -SPI_GETDROPSHADOW = 4132 -SPI_SETDROPSHADOW = 4133 -SPI_GETBLOCKSENDINPUTRESETS = 4134 -SPI_SETBLOCKSENDINPUTRESETS = 4135 -SPI_GETUIEFFECTS = 4158 -SPI_SETUIEFFECTS = 4159 - -SPI_GETFOREGROUNDLOCKTIMEOUT = 8192 -SPI_SETFOREGROUNDLOCKTIMEOUT = 8193 -SPI_GETACTIVEWNDTRKTIMEOUT = 8194 -SPI_SETACTIVEWNDTRKTIMEOUT = 8195 -SPI_GETFOREGROUNDFLASHCOUNT = 8196 -SPI_SETFOREGROUNDFLASHCOUNT = 8197 -SPI_GETCARETWIDTH = 8198 -SPI_SETCARETWIDTH = 8199 -SPI_GETMOUSECLICKLOCKTIME = 8200 -SPI_SETMOUSECLICKLOCKTIME = 8201 -SPI_GETFONTSMOOTHINGTYPE = 8202 -SPI_SETFONTSMOOTHINGTYPE = 8203 -SPI_GETFONTSMOOTHINGCONTRAST = 8204 -SPI_SETFONTSMOOTHINGCONTRAST = 8205 -SPI_GETFOCUSBORDERWIDTH = 8206 -SPI_SETFOCUSBORDERWIDTH = 8207 -SPI_GETFOCUSBORDERHEIGHT = 8208 -SPI_SETFOCUSBORDERHEIGHT = 8209 -SPI_GETFONTSMOOTHINGORIENTATION = 8210 -SPI_SETFONTSMOOTHINGORIENTATION = 8211 - -# fWinIni flags for SystemParametersInfo -SPIF_UPDATEINIFILE = 1 -SPIF_SENDWININICHANGE = 2 -SPIF_SENDCHANGE = SPIF_SENDWININICHANGE - -# used with SystemParametersInfo and SPI_GETFONTSMOOTHINGTYPE/SPI_SETFONTSMOOTHINGTYPE -FE_FONTSMOOTHINGSTANDARD = 1 -FE_FONTSMOOTHINGCLEARTYPE = 2 -FE_FONTSMOOTHINGDOCKING = 32768 - -METRICS_USEDEFAULT = -1 -ARW_BOTTOMLEFT = 0 -ARW_BOTTOMRIGHT = 1 -ARW_TOPLEFT = 2 -ARW_TOPRIGHT = 3 -ARW_STARTMASK = 3 -ARW_STARTRIGHT = 1 -ARW_STARTTOP = 2 -ARW_LEFT = 0 -ARW_RIGHT = 0 -ARW_UP = 4 -ARW_DOWN = 4 -ARW_HIDE = 8 -# ARW_VALID = 0x000F -SERKF_SERIALKEYSON = 1 -SERKF_AVAILABLE = 2 -SERKF_INDICATOR = 4 -HCF_HIGHCONTRASTON = 1 -HCF_AVAILABLE = 2 -HCF_HOTKEYACTIVE = 4 -HCF_CONFIRMHOTKEY = 8 -HCF_HOTKEYSOUND = 16 -HCF_INDICATOR = 32 -HCF_HOTKEYAVAILABLE = 64 -CDS_UPDATEREGISTRY = 1 -CDS_TEST = 2 -CDS_FULLSCREEN = 4 -CDS_GLOBAL = 8 -CDS_SET_PRIMARY = 16 -CDS_RESET = 1073741824 -CDS_SETRECT = 536870912 -CDS_NORESET = 268435456 - -# return values from ChangeDisplaySettings and ChangeDisplaySettingsEx -DISP_CHANGE_SUCCESSFUL = 0 -DISP_CHANGE_RESTART = 1 -DISP_CHANGE_FAILED = -1 -DISP_CHANGE_BADMODE = -2 -DISP_CHANGE_NOTUPDATED = -3 -DISP_CHANGE_BADFLAGS = -4 -DISP_CHANGE_BADPARAM = -5 -DISP_CHANGE_BADDUALVIEW = -6 - -ENUM_CURRENT_SETTINGS = -1 -ENUM_REGISTRY_SETTINGS = -2 -FKF_FILTERKEYSON = 1 -FKF_AVAILABLE = 2 -FKF_HOTKEYACTIVE = 4 -FKF_CONFIRMHOTKEY = 8 -FKF_HOTKEYSOUND = 16 -FKF_INDICATOR = 32 -FKF_CLICKON = 64 -SKF_STICKYKEYSON = 1 -SKF_AVAILABLE = 2 -SKF_HOTKEYACTIVE = 4 -SKF_CONFIRMHOTKEY = 8 -SKF_HOTKEYSOUND = 16 -SKF_INDICATOR = 32 -SKF_AUDIBLEFEEDBACK = 64 -SKF_TRISTATE = 128 -SKF_TWOKEYSOFF = 256 -SKF_LALTLATCHED = 268435456 -SKF_LCTLLATCHED = 67108864 -SKF_LSHIFTLATCHED = 16777216 -SKF_RALTLATCHED = 536870912 -SKF_RCTLLATCHED = 134217728 -SKF_RSHIFTLATCHED = 33554432 -SKF_LWINLATCHED = 1073741824 -SKF_RWINLATCHED = -2147483648 -SKF_LALTLOCKED = 1048576 -SKF_LCTLLOCKED = 262144 -SKF_LSHIFTLOCKED = 65536 -SKF_RALTLOCKED = 2097152 -SKF_RCTLLOCKED = 524288 -SKF_RSHIFTLOCKED = 131072 -SKF_LWINLOCKED = 4194304 -SKF_RWINLOCKED = 8388608 -MKF_MOUSEKEYSON = 1 -MKF_AVAILABLE = 2 -MKF_HOTKEYACTIVE = 4 -MKF_CONFIRMHOTKEY = 8 -MKF_HOTKEYSOUND = 16 -MKF_INDICATOR = 32 -MKF_MODIFIERS = 64 -MKF_REPLACENUMBERS = 128 -MKF_LEFTBUTTONSEL = 268435456 -MKF_RIGHTBUTTONSEL = 536870912 -MKF_LEFTBUTTONDOWN = 16777216 -MKF_RIGHTBUTTONDOWN = 33554432 -MKF_MOUSEMODE = -2147483648 -ATF_TIMEOUTON = 1 -ATF_ONOFFFEEDBACK = 2 -SSGF_NONE = 0 -SSGF_DISPLAY = 3 -SSTF_NONE = 0 -SSTF_CHARS = 1 -SSTF_BORDER = 2 -SSTF_DISPLAY = 3 -SSWF_NONE = 0 -SSWF_TITLE = 1 -SSWF_WINDOW = 2 -SSWF_DISPLAY = 3 -SSWF_CUSTOM = 4 -SSF_SOUNDSENTRYON = 1 -SSF_AVAILABLE = 2 -SSF_INDICATOR = 4 -TKF_TOGGLEKEYSON = 1 -TKF_AVAILABLE = 2 -TKF_HOTKEYACTIVE = 4 -TKF_CONFIRMHOTKEY = 8 -TKF_HOTKEYSOUND = 16 -TKF_INDICATOR = 32 -SLE_ERROR = 1 -SLE_MINORERROR = 2 -SLE_WARNING = 3 -MONITOR_DEFAULTTONULL = 0 -MONITOR_DEFAULTTOPRIMARY = 1 -MONITOR_DEFAULTTONEAREST = 2 -MONITORINFOF_PRIMARY = 1 -CCHDEVICENAME = 32 -CHILDID_SELF = 0 -INDEXID_OBJECT = 0 -INDEXID_CONTAINER = 0 -OBJID_WINDOW = 0 -OBJID_SYSMENU = -1 -OBJID_TITLEBAR = -2 -OBJID_MENU = -3 -OBJID_CLIENT = -4 -OBJID_VSCROLL = -5 -OBJID_HSCROLL = -6 -OBJID_SIZEGRIP = -7 -OBJID_CARET = -8 -OBJID_CURSOR = -9 -OBJID_ALERT = -10 -OBJID_SOUND = -11 -EVENT_MIN = 1 -EVENT_MAX = 2147483647 -EVENT_SYSTEM_SOUND = 1 -EVENT_SYSTEM_ALERT = 2 -EVENT_SYSTEM_FOREGROUND = 3 -EVENT_SYSTEM_MENUSTART = 4 -EVENT_SYSTEM_MENUEND = 5 -EVENT_SYSTEM_MENUPOPUPSTART = 6 -EVENT_SYSTEM_MENUPOPUPEND = 7 -EVENT_SYSTEM_CAPTURESTART = 8 -EVENT_SYSTEM_CAPTUREEND = 9 -EVENT_SYSTEM_MOVESIZESTART = 10 -EVENT_SYSTEM_MOVESIZEEND = 11 -EVENT_SYSTEM_CONTEXTHELPSTART = 12 -EVENT_SYSTEM_CONTEXTHELPEND = 13 -EVENT_SYSTEM_DRAGDROPSTART = 14 -EVENT_SYSTEM_DRAGDROPEND = 15 -EVENT_SYSTEM_DIALOGSTART = 16 -EVENT_SYSTEM_DIALOGEND = 17 -EVENT_SYSTEM_SCROLLINGSTART = 18 -EVENT_SYSTEM_SCROLLINGEND = 19 -EVENT_SYSTEM_SWITCHSTART = 20 -EVENT_SYSTEM_SWITCHEND = 21 -EVENT_SYSTEM_MINIMIZESTART = 22 -EVENT_SYSTEM_MINIMIZEEND = 23 -EVENT_OBJECT_CREATE = 32768 -EVENT_OBJECT_DESTROY = 32769 -EVENT_OBJECT_SHOW = 32770 -EVENT_OBJECT_HIDE = 32771 -EVENT_OBJECT_REORDER = 32772 -EVENT_OBJECT_FOCUS = 32773 -EVENT_OBJECT_SELECTION = 32774 -EVENT_OBJECT_SELECTIONADD = 32775 -EVENT_OBJECT_SELECTIONREMOVE = 32776 -EVENT_OBJECT_SELECTIONWITHIN = 32777 -EVENT_OBJECT_STATECHANGE = 32778 -EVENT_OBJECT_LOCATIONCHANGE = 32779 -EVENT_OBJECT_NAMECHANGE = 32780 -EVENT_OBJECT_DESCRIPTIONCHANGE = 32781 -EVENT_OBJECT_VALUECHANGE = 32782 -EVENT_OBJECT_PARENTCHANGE = 32783 -EVENT_OBJECT_HELPCHANGE = 32784 -EVENT_OBJECT_DEFACTIONCHANGE = 32785 -EVENT_OBJECT_ACCELERATORCHANGE = 32786 -SOUND_SYSTEM_STARTUP = 1 -SOUND_SYSTEM_SHUTDOWN = 2 -SOUND_SYSTEM_BEEP = 3 -SOUND_SYSTEM_ERROR = 4 -SOUND_SYSTEM_QUESTION = 5 -SOUND_SYSTEM_WARNING = 6 -SOUND_SYSTEM_INFORMATION = 7 -SOUND_SYSTEM_MAXIMIZE = 8 -SOUND_SYSTEM_MINIMIZE = 9 -SOUND_SYSTEM_RESTOREUP = 10 -SOUND_SYSTEM_RESTOREDOWN = 11 -SOUND_SYSTEM_APPSTART = 12 -SOUND_SYSTEM_FAULT = 13 -SOUND_SYSTEM_APPEND = 14 -SOUND_SYSTEM_MENUCOMMAND = 15 -SOUND_SYSTEM_MENUPOPUP = 16 -CSOUND_SYSTEM = 16 -ALERT_SYSTEM_INFORMATIONAL = 1 -ALERT_SYSTEM_WARNING = 2 -ALERT_SYSTEM_ERROR = 3 -ALERT_SYSTEM_QUERY = 4 -ALERT_SYSTEM_CRITICAL = 5 -CALERT_SYSTEM = 6 -WINEVENT_OUTOFCONTEXT = 0 -WINEVENT_SKIPOWNTHREAD = 1 -WINEVENT_SKIPOWNPROCESS = 2 -WINEVENT_INCONTEXT = 4 -GUI_CARETBLINKING = 1 -GUI_INMOVESIZE = 2 -GUI_INMENUMODE = 4 -GUI_SYSTEMMENUMODE = 8 -GUI_POPUPMENUMODE = 16 -STATE_SYSTEM_UNAVAILABLE = 1 -STATE_SYSTEM_SELECTED = 2 -STATE_SYSTEM_FOCUSED = 4 -STATE_SYSTEM_PRESSED = 8 -STATE_SYSTEM_CHECKED = 16 -STATE_SYSTEM_MIXED = 32 -STATE_SYSTEM_READONLY = 64 -STATE_SYSTEM_HOTTRACKED = 128 -STATE_SYSTEM_DEFAULT = 256 -STATE_SYSTEM_EXPANDED = 512 -STATE_SYSTEM_COLLAPSED = 1024 -STATE_SYSTEM_BUSY = 2048 -STATE_SYSTEM_FLOATING = 4096 -STATE_SYSTEM_MARQUEED = 8192 -STATE_SYSTEM_ANIMATED = 16384 -STATE_SYSTEM_INVISIBLE = 32768 -STATE_SYSTEM_OFFSCREEN = 65536 -STATE_SYSTEM_SIZEABLE = 131072 -STATE_SYSTEM_MOVEABLE = 262144 -STATE_SYSTEM_SELFVOICING = 524288 -STATE_SYSTEM_FOCUSABLE = 1048576 -STATE_SYSTEM_SELECTABLE = 2097152 -STATE_SYSTEM_LINKED = 4194304 -STATE_SYSTEM_TRAVERSED = 8388608 -STATE_SYSTEM_MULTISELECTABLE = 16777216 -STATE_SYSTEM_EXTSELECTABLE = 33554432 -STATE_SYSTEM_ALERT_LOW = 67108864 -STATE_SYSTEM_ALERT_MEDIUM = 134217728 -STATE_SYSTEM_ALERT_HIGH = 268435456 -STATE_SYSTEM_VALID = 536870911 -CCHILDREN_TITLEBAR = 5 -CCHILDREN_SCROLLBAR = 5 -CURSOR_SHOWING = 1 -WS_ACTIVECAPTION = 1 -GA_MIC = 1 -GA_PARENT = 1 -GA_ROOT = 2 -GA_ROOTOWNER = 3 -GA_MAC = 4 - -# winuser.h line 1979 -BF_LEFT = 1 -BF_TOP = 2 -BF_RIGHT = 4 -BF_BOTTOM = 8 -BF_TOPLEFT = BF_TOP | BF_LEFT -BF_TOPRIGHT = BF_TOP | BF_RIGHT -BF_BOTTOMLEFT = BF_BOTTOM | BF_LEFT -BF_BOTTOMRIGHT = BF_BOTTOM | BF_RIGHT -BF_RECT = BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM -BF_DIAGONAL = 16 -BF_DIAGONAL_ENDTOPRIGHT = BF_DIAGONAL | BF_TOP | BF_RIGHT -BF_DIAGONAL_ENDTOPLEFT = BF_DIAGONAL | BF_TOP | BF_LEFT -BF_DIAGONAL_ENDBOTTOMLEFT = BF_DIAGONAL | BF_BOTTOM | BF_LEFT -BF_DIAGONAL_ENDBOTTOMRIGHT = BF_DIAGONAL | BF_BOTTOM | BF_RIGHT -BF_MIDDLE = 2048 -BF_SOFT = 4096 -BF_ADJUST = 8192 -BF_FLAT = 16384 -BF_MONO = 32768 -DFC_CAPTION = 1 -DFC_MENU = 2 -DFC_SCROLL = 3 -DFC_BUTTON = 4 -DFC_POPUPMENU = 5 -DFCS_CAPTIONCLOSE = 0 -DFCS_CAPTIONMIN = 1 -DFCS_CAPTIONMAX = 2 -DFCS_CAPTIONRESTORE = 3 -DFCS_CAPTIONHELP = 4 -DFCS_MENUARROW = 0 -DFCS_MENUCHECK = 1 -DFCS_MENUBULLET = 2 -DFCS_MENUARROWRIGHT = 4 -DFCS_SCROLLUP = 0 -DFCS_SCROLLDOWN = 1 -DFCS_SCROLLLEFT = 2 -DFCS_SCROLLRIGHT = 3 -DFCS_SCROLLCOMBOBOX = 5 -DFCS_SCROLLSIZEGRIP = 8 -DFCS_SCROLLSIZEGRIPRIGHT = 16 -DFCS_BUTTONCHECK = 0 -DFCS_BUTTONRADIOIMAGE = 1 -DFCS_BUTTONRADIOMASK = 2 -DFCS_BUTTONRADIO = 4 -DFCS_BUTTON3STATE = 8 -DFCS_BUTTONPUSH = 16 -DFCS_INACTIVE = 256 -DFCS_PUSHED = 512 -DFCS_CHECKED = 1024 -DFCS_TRANSPARENT = 2048 -DFCS_HOT = 4096 -DFCS_ADJUSTRECT = 8192 -DFCS_FLAT = 16384 -DFCS_MONO = 32768 -DC_ACTIVE = 1 -DC_SMALLCAP = 2 -DC_ICON = 4 -DC_TEXT = 8 -DC_INBUTTON = 16 -DC_GRADIENT = 32 -IDANI_OPEN = 1 -IDANI_CLOSE = 2 -IDANI_CAPTION = 3 -CF_TEXT = 1 -CF_BITMAP = 2 -CF_METAFILEPICT = 3 -CF_SYLK = 4 -CF_DIF = 5 -CF_TIFF = 6 -CF_OEMTEXT = 7 -CF_DIB = 8 -CF_PALETTE = 9 -CF_PENDATA = 10 -CF_RIFF = 11 -CF_WAVE = 12 -CF_UNICODETEXT = 13 -CF_ENHMETAFILE = 14 -CF_HDROP = 15 -CF_LOCALE = 16 -CF_DIBV5 = 17 -CF_MAX = 18 -CF_OWNERDISPLAY = 128 -CF_DSPTEXT = 129 -CF_DSPBITMAP = 130 -CF_DSPMETAFILEPICT = 131 -CF_DSPENHMETAFILE = 142 -CF_PRIVATEFIRST = 512 -CF_PRIVATELAST = 767 -CF_GDIOBJFIRST = 768 -CF_GDIOBJLAST = 1023 -FVIRTKEY = 1 -FNOINVERT = 2 -FSHIFT = 4 -FCONTROL = 8 -FALT = 16 -WPF_SETMINPOSITION = 1 -WPF_RESTORETOMAXIMIZED = 2 -ODT_MENU = 1 -ODT_LISTBOX = 2 -ODT_COMBOBOX = 3 -ODT_BUTTON = 4 -ODT_STATIC = 5 -ODA_DRAWENTIRE = 1 -ODA_SELECT = 2 -ODA_FOCUS = 4 -ODS_SELECTED = 1 -ODS_GRAYED = 2 -ODS_DISABLED = 4 -ODS_CHECKED = 8 -ODS_FOCUS = 16 -ODS_DEFAULT = 32 -ODS_COMBOBOXEDIT = 4096 -ODS_HOTLIGHT = 64 -ODS_INACTIVE = 128 -PM_NOREMOVE = 0 -PM_REMOVE = 1 -PM_NOYIELD = 2 -MOD_ALT = 1 -MOD_CONTROL = 2 -MOD_SHIFT = 4 -MOD_WIN = 8 -IDHOT_SNAPWINDOW = -1 -IDHOT_SNAPDESKTOP = -2 -# EW_RESTARTWINDOWS = 0x0042 -# EW_REBOOTSYSTEM = 0x0043 -# EW_EXITANDEXECAPP = 0x0044 -ENDSESSION_LOGOFF = -2147483648 -EWX_LOGOFF = 0 -EWX_SHUTDOWN = 1 -EWX_REBOOT = 2 -EWX_FORCE = 4 -EWX_POWEROFF = 8 -EWX_FORCEIFHUNG = 16 -BSM_ALLCOMPONENTS = 0 -BSM_VXDS = 1 -BSM_NETDRIVER = 2 -BSM_INSTALLABLEDRIVERS = 4 -BSM_APPLICATIONS = 8 -BSM_ALLDESKTOPS = 16 -BSF_QUERY = 1 -BSF_IGNORECURRENTTASK = 2 -BSF_FLUSHDISK = 4 -BSF_NOHANG = 8 -BSF_POSTMESSAGE = 16 -BSF_FORCEIFHUNG = 32 -BSF_NOTIMEOUTIFNOTHUNG = 64 -BROADCAST_QUERY_DENY = 1112363332 # Return this value to deny a query. - -DBWF_LPARAMPOINTER = 32768 - -# winuser.h line 3232 -SWP_NOSIZE = 1 -SWP_NOMOVE = 2 -SWP_NOZORDER = 4 -SWP_NOREDRAW = 8 -SWP_NOACTIVATE = 16 -SWP_FRAMECHANGED = 32 -SWP_SHOWWINDOW = 64 -SWP_HIDEWINDOW = 128 -SWP_NOCOPYBITS = 256 -SWP_NOOWNERZORDER = 512 -SWP_NOSENDCHANGING = 1024 -SWP_DRAWFRAME = SWP_FRAMECHANGED -SWP_NOREPOSITION = SWP_NOOWNERZORDER -SWP_DEFERERASE = 8192 -SWP_ASYNCWINDOWPOS = 16384 - -DLGWINDOWEXTRA = 30 -# winuser.h line 4249 -KEYEVENTF_EXTENDEDKEY = 1 -KEYEVENTF_KEYUP = 2 -# if(_WIN32_WINNT >= 0x0500) -KEYEVENTF_UNICODE = 4 -KEYEVENTF_SCANCODE = 8 -# endif /* _WIN32_WINNT >= 0x0500 */ -MOUSEEVENTF_MOVE = 1 -MOUSEEVENTF_LEFTDOWN = 2 -MOUSEEVENTF_LEFTUP = 4 -MOUSEEVENTF_RIGHTDOWN = 8 -MOUSEEVENTF_RIGHTUP = 16 -MOUSEEVENTF_MIDDLEDOWN = 32 -MOUSEEVENTF_MIDDLEUP = 64 -MOUSEEVENTF_XDOWN = 128 -MOUSEEVENTF_XUP = 256 -MOUSEEVENTF_WHEEL = 2048 -# if (_WIN32_WINNT >= 0x0600) -MOUSEEVENTF_HWHEEL = 4096 -# endif -# if(WINVER >= 0x0600) -MOUSEEVENTF_MOVE_NOCOALESCE = 8192 -# endif /* WINVER >= 0x0600 */ -MOUSEEVENTF_VIRTUALDESK = 16384 -MOUSEEVENTF_ABSOLUTE = 32768 -INPUT_MOUSE = 0 -INPUT_KEYBOARD = 1 -INPUT_HARDWARE = 2 -MWMO_WAITALL = 1 -MWMO_ALERTABLE = 2 -MWMO_INPUTAVAILABLE = 4 -QS_KEY = 1 -QS_MOUSEMOVE = 2 -QS_MOUSEBUTTON = 4 -QS_POSTMESSAGE = 8 -QS_TIMER = 16 -QS_PAINT = 32 -QS_SENDMESSAGE = 64 -QS_HOTKEY = 128 -QS_MOUSE = QS_MOUSEMOVE | QS_MOUSEBUTTON -QS_INPUT = QS_MOUSE | QS_KEY -QS_ALLEVENTS = QS_INPUT | QS_POSTMESSAGE | QS_TIMER | QS_PAINT | QS_HOTKEY -QS_ALLINPUT = ( - QS_INPUT | QS_POSTMESSAGE | QS_TIMER | QS_PAINT | QS_HOTKEY | QS_SENDMESSAGE -) - - -IMN_CLOSESTATUSWINDOW = 1 -IMN_OPENSTATUSWINDOW = 2 -IMN_CHANGECANDIDATE = 3 -IMN_CLOSECANDIDATE = 4 -IMN_OPENCANDIDATE = 5 -IMN_SETCONVERSIONMODE = 6 -IMN_SETSENTENCEMODE = 7 -IMN_SETOPENSTATUS = 8 -IMN_SETCANDIDATEPOS = 9 -IMN_SETCOMPOSITIONFONT = 10 -IMN_SETCOMPOSITIONWINDOW = 11 -IMN_SETSTATUSWINDOWPOS = 12 -IMN_GUIDELINE = 13 -IMN_PRIVATE = 14 - -# winuser.h line 8518 -HELP_CONTEXT = 1 -HELP_QUIT = 2 -HELP_INDEX = 3 -HELP_CONTENTS = 3 -HELP_HELPONHELP = 4 -HELP_SETINDEX = 5 -HELP_SETCONTENTS = 5 -HELP_CONTEXTPOPUP = 8 -HELP_FORCEFILE = 9 -HELP_KEY = 257 -HELP_COMMAND = 258 -HELP_PARTIALKEY = 261 -HELP_MULTIKEY = 513 -HELP_SETWINPOS = 515 -HELP_CONTEXTMENU = 10 -HELP_FINDER = 11 -HELP_WM_HELP = 12 -HELP_SETPOPUP_POS = 13 -HELP_TCARD = 32768 -HELP_TCARD_DATA = 16 -HELP_TCARD_OTHER_CALLER = 17 -IDH_NO_HELP = 28440 -IDH_MISSING_CONTEXT = 28441 # Control doesn't have matching help context -IDH_GENERIC_HELP_BUTTON = 28442 # Property sheet help button -IDH_OK = 28443 -IDH_CANCEL = 28444 -IDH_HELP = 28445 -GR_GDIOBJECTS = 0 # Count of GDI objects -GR_USEROBJECTS = 1 # Count of USER objects -# Generated by h2py from \msvcnt\include\wingdi.h -# manually added (missed by generation some how! -SRCCOPY = 13369376 # dest = source -SRCPAINT = 15597702 # dest = source OR dest -SRCAND = 8913094 # dest = source AND dest -SRCINVERT = 6684742 # dest = source XOR dest -SRCERASE = 4457256 # dest = source AND (NOT dest ) -NOTSRCCOPY = 3342344 # dest = (NOT source) -NOTSRCERASE = 1114278 # dest = (NOT src) AND (NOT dest) -MERGECOPY = 12583114 # dest = (source AND pattern) -MERGEPAINT = 12255782 # dest = (NOT source) OR dest -PATCOPY = 15728673 # dest = pattern -PATPAINT = 16452105 # dest = DPSnoo -PATINVERT = 5898313 # dest = pattern XOR dest -DSTINVERT = 5570569 # dest = (NOT dest) -BLACKNESS = 66 # dest = BLACK -WHITENESS = 16711778 # dest = WHITE - -# hacked and split manually by mhammond. -R2_BLACK = 1 -R2_NOTMERGEPEN = 2 -R2_MASKNOTPEN = 3 -R2_NOTCOPYPEN = 4 -R2_MASKPENNOT = 5 -R2_NOT = 6 -R2_XORPEN = 7 -R2_NOTMASKPEN = 8 -R2_MASKPEN = 9 -R2_NOTXORPEN = 10 -R2_NOP = 11 -R2_MERGENOTPEN = 12 -R2_COPYPEN = 13 -R2_MERGEPENNOT = 14 -R2_MERGEPEN = 15 -R2_WHITE = 16 -R2_LAST = 16 -GDI_ERROR = -1 -ERROR = 0 -NULLREGION = 1 -SIMPLEREGION = 2 -COMPLEXREGION = 3 -RGN_ERROR = ERROR -RGN_AND = 1 -RGN_OR = 2 -RGN_XOR = 3 -RGN_DIFF = 4 -RGN_COPY = 5 -RGN_MIN = RGN_AND -RGN_MAX = RGN_COPY - -## Stretching modes used with Get/SetStretchBltMode -BLACKONWHITE = 1 -WHITEONBLACK = 2 -COLORONCOLOR = 3 -HALFTONE = 4 -MAXSTRETCHBLTMODE = 4 -STRETCH_ANDSCANS = BLACKONWHITE -STRETCH_ORSCANS = WHITEONBLACK -STRETCH_DELETESCANS = COLORONCOLOR -STRETCH_HALFTONE = HALFTONE - -ALTERNATE = 1 -WINDING = 2 -POLYFILL_LAST = 2 - -## flags used with SetLayout -LAYOUT_RTL = 1 -LAYOUT_BTT = 2 -LAYOUT_VBH = 4 -LAYOUT_ORIENTATIONMASK = LAYOUT_RTL | LAYOUT_BTT | LAYOUT_VBH -LAYOUT_BITMAPORIENTATIONPRESERVED = 8 - -TA_NOUPDATECP = 0 -TA_UPDATECP = 1 -TA_LEFT = 0 -TA_RIGHT = 2 -TA_CENTER = 6 -TA_TOP = 0 -TA_BOTTOM = 8 -TA_BASELINE = 24 -TA_MASK = TA_BASELINE + TA_CENTER + TA_UPDATECP -VTA_BASELINE = TA_BASELINE -VTA_LEFT = TA_BOTTOM -VTA_RIGHT = TA_TOP -VTA_CENTER = TA_CENTER -VTA_BOTTOM = TA_RIGHT -VTA_TOP = TA_LEFT -ETO_GRAYED = 1 -ETO_OPAQUE = 2 -ETO_CLIPPED = 4 -ASPECT_FILTERING = 1 -DCB_RESET = 1 -DCB_ACCUMULATE = 2 -DCB_DIRTY = DCB_ACCUMULATE -DCB_SET = DCB_RESET | DCB_ACCUMULATE -DCB_ENABLE = 4 -DCB_DISABLE = 8 -META_SETBKCOLOR = 513 -META_SETBKMODE = 258 -META_SETMAPMODE = 259 -META_SETROP2 = 260 -META_SETRELABS = 261 -META_SETPOLYFILLMODE = 262 -META_SETSTRETCHBLTMODE = 263 -META_SETTEXTCHAREXTRA = 264 -META_SETTEXTCOLOR = 521 -META_SETTEXTJUSTIFICATION = 522 -META_SETWINDOWORG = 523 -META_SETWINDOWEXT = 524 -META_SETVIEWPORTORG = 525 -META_SETVIEWPORTEXT = 526 -META_OFFSETWINDOWORG = 527 -META_SCALEWINDOWEXT = 1040 -META_OFFSETVIEWPORTORG = 529 -META_SCALEVIEWPORTEXT = 1042 -META_LINETO = 531 -META_MOVETO = 532 -META_EXCLUDECLIPRECT = 1045 -META_INTERSECTCLIPRECT = 1046 -META_ARC = 2071 -META_ELLIPSE = 1048 -META_FLOODFILL = 1049 -META_PIE = 2074 -META_RECTANGLE = 1051 -META_ROUNDRECT = 1564 -META_PATBLT = 1565 -META_SAVEDC = 30 -META_SETPIXEL = 1055 -META_OFFSETCLIPRGN = 544 -META_TEXTOUT = 1313 -META_BITBLT = 2338 -META_STRETCHBLT = 2851 -META_POLYGON = 804 -META_POLYLINE = 805 -META_ESCAPE = 1574 -META_RESTOREDC = 295 -META_FILLREGION = 552 -META_FRAMEREGION = 1065 -META_INVERTREGION = 298 -META_PAINTREGION = 299 -META_SELECTCLIPREGION = 300 -META_SELECTOBJECT = 301 -META_SETTEXTALIGN = 302 -META_CHORD = 2096 -META_SETMAPPERFLAGS = 561 -META_EXTTEXTOUT = 2610 -META_SETDIBTODEV = 3379 -META_SELECTPALETTE = 564 -META_REALIZEPALETTE = 53 -META_ANIMATEPALETTE = 1078 -META_SETPALENTRIES = 55 -META_POLYPOLYGON = 1336 -META_RESIZEPALETTE = 313 -META_DIBBITBLT = 2368 -META_DIBSTRETCHBLT = 2881 -META_DIBCREATEPATTERNBRUSH = 322 -META_STRETCHDIB = 3907 -META_EXTFLOODFILL = 1352 -META_DELETEOBJECT = 496 -META_CREATEPALETTE = 247 -META_CREATEPATTERNBRUSH = 505 -META_CREATEPENINDIRECT = 762 -META_CREATEFONTINDIRECT = 763 -META_CREATEBRUSHINDIRECT = 764 -META_CREATEREGION = 1791 -FILE_BEGIN = 0 -FILE_CURRENT = 1 -FILE_END = 2 -FILE_FLAG_WRITE_THROUGH = -2147483648 -FILE_FLAG_OVERLAPPED = 1073741824 -FILE_FLAG_NO_BUFFERING = 536870912 -FILE_FLAG_RANDOM_ACCESS = 268435456 -FILE_FLAG_SEQUENTIAL_SCAN = 134217728 -FILE_FLAG_DELETE_ON_CLOSE = 67108864 -FILE_FLAG_BACKUP_SEMANTICS = 33554432 -FILE_FLAG_POSIX_SEMANTICS = 16777216 -CREATE_NEW = 1 -CREATE_ALWAYS = 2 -OPEN_EXISTING = 3 -OPEN_ALWAYS = 4 -TRUNCATE_EXISTING = 5 -PIPE_ACCESS_INBOUND = 1 -PIPE_ACCESS_OUTBOUND = 2 -PIPE_ACCESS_DUPLEX = 3 -PIPE_CLIENT_END = 0 -PIPE_SERVER_END = 1 -PIPE_WAIT = 0 -PIPE_NOWAIT = 1 -PIPE_READMODE_BYTE = 0 -PIPE_READMODE_MESSAGE = 2 -PIPE_TYPE_BYTE = 0 -PIPE_TYPE_MESSAGE = 4 -PIPE_UNLIMITED_INSTANCES = 255 -SECURITY_CONTEXT_TRACKING = 262144 -SECURITY_EFFECTIVE_ONLY = 524288 -SECURITY_SQOS_PRESENT = 1048576 -SECURITY_VALID_SQOS_FLAGS = 2031616 -DTR_CONTROL_DISABLE = 0 -DTR_CONTROL_ENABLE = 1 -DTR_CONTROL_HANDSHAKE = 2 -RTS_CONTROL_DISABLE = 0 -RTS_CONTROL_ENABLE = 1 -RTS_CONTROL_HANDSHAKE = 2 -RTS_CONTROL_TOGGLE = 3 -GMEM_FIXED = 0 -GMEM_MOVEABLE = 2 -GMEM_NOCOMPACT = 16 -GMEM_NODISCARD = 32 -GMEM_ZEROINIT = 64 -GMEM_MODIFY = 128 -GMEM_DISCARDABLE = 256 -GMEM_NOT_BANKED = 4096 -GMEM_SHARE = 8192 -GMEM_DDESHARE = 8192 -GMEM_NOTIFY = 16384 -GMEM_LOWER = GMEM_NOT_BANKED -GMEM_VALID_FLAGS = 32626 -GMEM_INVALID_HANDLE = 32768 -GHND = GMEM_MOVEABLE | GMEM_ZEROINIT -GPTR = GMEM_FIXED | GMEM_ZEROINIT -GMEM_DISCARDED = 16384 -GMEM_LOCKCOUNT = 255 -LMEM_FIXED = 0 -LMEM_MOVEABLE = 2 -LMEM_NOCOMPACT = 16 -LMEM_NODISCARD = 32 -LMEM_ZEROINIT = 64 -LMEM_MODIFY = 128 -LMEM_DISCARDABLE = 3840 -LMEM_VALID_FLAGS = 3954 -LMEM_INVALID_HANDLE = 32768 -LHND = LMEM_MOVEABLE | LMEM_ZEROINIT -LPTR = LMEM_FIXED | LMEM_ZEROINIT -NONZEROLHND = LMEM_MOVEABLE -NONZEROLPTR = LMEM_FIXED -LMEM_DISCARDED = 16384 -LMEM_LOCKCOUNT = 255 -DEBUG_PROCESS = 1 -DEBUG_ONLY_THIS_PROCESS = 2 -CREATE_SUSPENDED = 4 -DETACHED_PROCESS = 8 -CREATE_NEW_CONSOLE = 16 -NORMAL_PRIORITY_CLASS = 32 -IDLE_PRIORITY_CLASS = 64 -HIGH_PRIORITY_CLASS = 128 -REALTIME_PRIORITY_CLASS = 256 -CREATE_NEW_PROCESS_GROUP = 512 -CREATE_UNICODE_ENVIRONMENT = 1024 -CREATE_SEPARATE_WOW_VDM = 2048 -CREATE_SHARED_WOW_VDM = 4096 -CREATE_DEFAULT_ERROR_MODE = 67108864 -CREATE_NO_WINDOW = 134217728 -PROFILE_USER = 268435456 -PROFILE_KERNEL = 536870912 -PROFILE_SERVER = 1073741824 -THREAD_BASE_PRIORITY_LOWRT = 15 -THREAD_BASE_PRIORITY_MAX = 2 -THREAD_BASE_PRIORITY_MIN = -2 -THREAD_BASE_PRIORITY_IDLE = -15 -THREAD_PRIORITY_LOWEST = THREAD_BASE_PRIORITY_MIN -THREAD_PRIORITY_BELOW_NORMAL = THREAD_PRIORITY_LOWEST + 1 -THREAD_PRIORITY_HIGHEST = THREAD_BASE_PRIORITY_MAX -THREAD_PRIORITY_ABOVE_NORMAL = THREAD_PRIORITY_HIGHEST - 1 -THREAD_PRIORITY_ERROR_RETURN = MAXLONG -THREAD_PRIORITY_TIME_CRITICAL = THREAD_BASE_PRIORITY_LOWRT -THREAD_PRIORITY_IDLE = THREAD_BASE_PRIORITY_IDLE -THREAD_PRIORITY_NORMAL = 0 -THREAD_MODE_BACKGROUND_BEGIN = 0x00010000 -THREAD_MODE_BACKGROUND_END = 0x00020000 - -EXCEPTION_DEBUG_EVENT = 1 -CREATE_THREAD_DEBUG_EVENT = 2 -CREATE_PROCESS_DEBUG_EVENT = 3 -EXIT_THREAD_DEBUG_EVENT = 4 -EXIT_PROCESS_DEBUG_EVENT = 5 -LOAD_DLL_DEBUG_EVENT = 6 -UNLOAD_DLL_DEBUG_EVENT = 7 -OUTPUT_DEBUG_STRING_EVENT = 8 -RIP_EVENT = 9 -DRIVE_UNKNOWN = 0 -DRIVE_NO_ROOT_DIR = 1 -DRIVE_REMOVABLE = 2 -DRIVE_FIXED = 3 -DRIVE_REMOTE = 4 -DRIVE_CDROM = 5 -DRIVE_RAMDISK = 6 -FILE_TYPE_UNKNOWN = 0 -FILE_TYPE_DISK = 1 -FILE_TYPE_CHAR = 2 -FILE_TYPE_PIPE = 3 -FILE_TYPE_REMOTE = 32768 -NOPARITY = 0 -ODDPARITY = 1 -EVENPARITY = 2 -MARKPARITY = 3 -SPACEPARITY = 4 -ONESTOPBIT = 0 -ONE5STOPBITS = 1 -TWOSTOPBITS = 2 -CBR_110 = 110 -CBR_300 = 300 -CBR_600 = 600 -CBR_1200 = 1200 -CBR_2400 = 2400 -CBR_4800 = 4800 -CBR_9600 = 9600 -CBR_14400 = 14400 -CBR_19200 = 19200 -CBR_38400 = 38400 -CBR_56000 = 56000 -CBR_57600 = 57600 -CBR_115200 = 115200 -CBR_128000 = 128000 -CBR_256000 = 256000 -S_QUEUEEMPTY = 0 -S_THRESHOLD = 1 -S_ALLTHRESHOLD = 2 -S_NORMAL = 0 -S_LEGATO = 1 -S_STACCATO = 2 -NMPWAIT_WAIT_FOREVER = -1 -NMPWAIT_NOWAIT = 1 -NMPWAIT_USE_DEFAULT_WAIT = 0 -OF_READ = 0 -OF_WRITE = 1 -OF_READWRITE = 2 -OF_SHARE_COMPAT = 0 -OF_SHARE_EXCLUSIVE = 16 -OF_SHARE_DENY_WRITE = 32 -OF_SHARE_DENY_READ = 48 -OF_SHARE_DENY_NONE = 64 -OF_PARSE = 256 -OF_DELETE = 512 -OF_VERIFY = 1024 -OF_CANCEL = 2048 -OF_CREATE = 4096 -OF_PROMPT = 8192 -OF_EXIST = 16384 -OF_REOPEN = 32768 -OFS_MAXPATHNAME = 128 -MAXINTATOM = 49152 - -# winbase.h -PROCESS_HEAP_REGION = 1 -PROCESS_HEAP_UNCOMMITTED_RANGE = 2 -PROCESS_HEAP_ENTRY_BUSY = 4 -PROCESS_HEAP_ENTRY_MOVEABLE = 16 -PROCESS_HEAP_ENTRY_DDESHARE = 32 -SCS_32BIT_BINARY = 0 -SCS_DOS_BINARY = 1 -SCS_WOW_BINARY = 2 -SCS_PIF_BINARY = 3 -SCS_POSIX_BINARY = 4 -SCS_OS216_BINARY = 5 -SEM_FAILCRITICALERRORS = 1 -SEM_NOGPFAULTERRORBOX = 2 -SEM_NOALIGNMENTFAULTEXCEPT = 4 -SEM_NOOPENFILEERRORBOX = 32768 -LOCKFILE_FAIL_IMMEDIATELY = 1 -LOCKFILE_EXCLUSIVE_LOCK = 2 -HANDLE_FLAG_INHERIT = 1 -HANDLE_FLAG_PROTECT_FROM_CLOSE = 2 -HINSTANCE_ERROR = 32 -GET_TAPE_MEDIA_INFORMATION = 0 -GET_TAPE_DRIVE_INFORMATION = 1 -SET_TAPE_MEDIA_INFORMATION = 0 -SET_TAPE_DRIVE_INFORMATION = 1 -FORMAT_MESSAGE_ALLOCATE_BUFFER = 256 -FORMAT_MESSAGE_IGNORE_INSERTS = 512 -FORMAT_MESSAGE_FROM_STRING = 1024 -FORMAT_MESSAGE_FROM_HMODULE = 2048 -FORMAT_MESSAGE_FROM_SYSTEM = 4096 -FORMAT_MESSAGE_ARGUMENT_ARRAY = 8192 -FORMAT_MESSAGE_MAX_WIDTH_MASK = 255 -BACKUP_INVALID = 0 -BACKUP_DATA = 1 -BACKUP_EA_DATA = 2 -BACKUP_SECURITY_DATA = 3 -BACKUP_ALTERNATE_DATA = 4 -BACKUP_LINK = 5 -BACKUP_PROPERTY_DATA = 6 -BACKUP_OBJECT_ID = 7 -BACKUP_REPARSE_DATA = 8 -BACKUP_SPARSE_BLOCK = 9 - -STREAM_NORMAL_ATTRIBUTE = 0 -STREAM_MODIFIED_WHEN_READ = 1 -STREAM_CONTAINS_SECURITY = 2 -STREAM_CONTAINS_PROPERTIES = 4 -STARTF_USESHOWWINDOW = 1 -STARTF_USESIZE = 2 -STARTF_USEPOSITION = 4 -STARTF_USECOUNTCHARS = 8 -STARTF_USEFILLATTRIBUTE = 16 -STARTF_FORCEONFEEDBACK = 64 -STARTF_FORCEOFFFEEDBACK = 128 -STARTF_USESTDHANDLES = 256 -STARTF_USEHOTKEY = 512 -SHUTDOWN_NORETRY = 1 -DONT_RESOLVE_DLL_REFERENCES = 1 -LOAD_LIBRARY_AS_DATAFILE = 2 -LOAD_WITH_ALTERED_SEARCH_PATH = 8 -DDD_RAW_TARGET_PATH = 1 -DDD_REMOVE_DEFINITION = 2 -DDD_EXACT_MATCH_ON_REMOVE = 4 -MOVEFILE_REPLACE_EXISTING = 1 -MOVEFILE_COPY_ALLOWED = 2 -MOVEFILE_DELAY_UNTIL_REBOOT = 4 -MAX_COMPUTERNAME_LENGTH = 15 -LOGON32_LOGON_INTERACTIVE = 2 -LOGON32_LOGON_NETWORK = 3 -LOGON32_LOGON_BATCH = 4 -LOGON32_LOGON_SERVICE = 5 -LOGON32_LOGON_UNLOCK = 7 -LOGON32_LOGON_NETWORK_CLEARTEXT = 8 -LOGON32_LOGON_NEW_CREDENTIALS = 9 -LOGON32_PROVIDER_DEFAULT = 0 -LOGON32_PROVIDER_WINNT35 = 1 -LOGON32_PROVIDER_WINNT40 = 2 -LOGON32_PROVIDER_WINNT50 = 3 -VER_PLATFORM_WIN32s = 0 -VER_PLATFORM_WIN32_WINDOWS = 1 -VER_PLATFORM_WIN32_NT = 2 -TC_NORMAL = 0 -TC_HARDERR = 1 -TC_GP_TRAP = 2 -TC_SIGNAL = 3 -AC_LINE_OFFLINE = 0 -AC_LINE_ONLINE = 1 -AC_LINE_BACKUP_POWER = 2 -AC_LINE_UNKNOWN = 255 -BATTERY_FLAG_HIGH = 1 -BATTERY_FLAG_LOW = 2 -BATTERY_FLAG_CRITICAL = 4 -BATTERY_FLAG_CHARGING = 8 -BATTERY_FLAG_NO_BATTERY = 128 -BATTERY_FLAG_UNKNOWN = 255 -BATTERY_PERCENTAGE_UNKNOWN = 255 -BATTERY_LIFE_UNKNOWN = -1 - -# Generated by h2py from d:\msdev\include\richedit.h -cchTextLimitDefault = 32767 -WM_CONTEXTMENU = 123 -WM_PRINTCLIENT = 792 -EN_MSGFILTER = 1792 -EN_REQUESTRESIZE = 1793 -EN_SELCHANGE = 1794 -EN_DROPFILES = 1795 -EN_PROTECTED = 1796 -EN_CORRECTTEXT = 1797 -EN_STOPNOUNDO = 1798 -EN_IMECHANGE = 1799 -EN_SAVECLIPBOARD = 1800 -EN_OLEOPFAILED = 1801 -ENM_NONE = 0 -ENM_CHANGE = 1 -ENM_UPDATE = 2 -ENM_SCROLL = 4 -ENM_KEYEVENTS = 65536 -ENM_MOUSEEVENTS = 131072 -ENM_REQUESTRESIZE = 262144 -ENM_SELCHANGE = 524288 -ENM_DROPFILES = 1048576 -ENM_PROTECTED = 2097152 -ENM_CORRECTTEXT = 4194304 -ENM_IMECHANGE = 8388608 -ES_SAVESEL = 32768 -ES_SUNKEN = 16384 -ES_DISABLENOSCROLL = 8192 -ES_SELECTIONBAR = 16777216 -ES_EX_NOCALLOLEINIT = 16777216 -ES_VERTICAL = 4194304 -ES_NOIME = 524288 -ES_SELFIME = 262144 -ECO_AUTOWORDSELECTION = 1 -ECO_AUTOVSCROLL = 64 -ECO_AUTOHSCROLL = 128 -ECO_NOHIDESEL = 256 -ECO_READONLY = 2048 -ECO_WANTRETURN = 4096 -ECO_SAVESEL = 32768 -ECO_SELECTIONBAR = 16777216 -ECO_VERTICAL = 4194304 -ECOOP_SET = 1 -ECOOP_OR = 2 -ECOOP_AND = 3 -ECOOP_XOR = 4 -WB_CLASSIFY = 3 -WB_MOVEWORDLEFT = 4 -WB_MOVEWORDRIGHT = 5 -WB_LEFTBREAK = 6 -WB_RIGHTBREAK = 7 -WB_MOVEWORDPREV = 4 -WB_MOVEWORDNEXT = 5 -WB_PREVBREAK = 6 -WB_NEXTBREAK = 7 -PC_FOLLOWING = 1 -PC_LEADING = 2 -PC_OVERFLOW = 3 -PC_DELIMITER = 4 -WBF_WORDWRAP = 16 -WBF_WORDBREAK = 32 -WBF_OVERFLOW = 64 -WBF_LEVEL1 = 128 -WBF_LEVEL2 = 256 -WBF_CUSTOM = 512 -CFM_BOLD = 1 -CFM_ITALIC = 2 -CFM_UNDERLINE = 4 -CFM_STRIKEOUT = 8 -CFM_PROTECTED = 16 -CFM_SIZE = -2147483648 -CFM_COLOR = 1073741824 -CFM_FACE = 536870912 -CFM_OFFSET = 268435456 -CFM_CHARSET = 134217728 -CFE_BOLD = 1 -CFE_ITALIC = 2 -CFE_UNDERLINE = 4 -CFE_STRIKEOUT = 8 -CFE_PROTECTED = 16 -CFE_AUTOCOLOR = 1073741824 -yHeightCharPtsMost = 1638 -SCF_SELECTION = 1 -SCF_WORD = 2 -SF_TEXT = 1 -SF_RTF = 2 -SF_RTFNOOBJS = 3 -SF_TEXTIZED = 4 -SFF_SELECTION = 32768 -SFF_PLAINRTF = 16384 -MAX_TAB_STOPS = 32 -lDefaultTab = 720 -PFM_STARTINDENT = 1 -PFM_RIGHTINDENT = 2 -PFM_OFFSET = 4 -PFM_ALIGNMENT = 8 -PFM_TABSTOPS = 16 -PFM_NUMBERING = 32 -PFM_OFFSETINDENT = -2147483648 -PFN_BULLET = 1 -PFA_LEFT = 1 -PFA_RIGHT = 2 -PFA_CENTER = 3 -WM_NOTIFY = 78 -SEL_EMPTY = 0 -SEL_TEXT = 1 -SEL_OBJECT = 2 -SEL_MULTICHAR = 4 -SEL_MULTIOBJECT = 8 -OLEOP_DOVERB = 1 -CF_RTF = "Rich Text Format" -CF_RTFNOOBJS = "Rich Text Format Without Objects" -CF_RETEXTOBJ = "RichEdit Text and Objects" - -# From wincon.h -RIGHT_ALT_PRESSED = 1 # the right alt key is pressed. -LEFT_ALT_PRESSED = 2 # the left alt key is pressed. -RIGHT_CTRL_PRESSED = 4 # the right ctrl key is pressed. -LEFT_CTRL_PRESSED = 8 # the left ctrl key is pressed. -SHIFT_PRESSED = 16 # the shift key is pressed. -NUMLOCK_ON = 32 # the numlock light is on. -SCROLLLOCK_ON = 64 # the scrolllock light is on. -CAPSLOCK_ON = 128 # the capslock light is on. -ENHANCED_KEY = 256 # the key is enhanced. -NLS_DBCSCHAR = 65536 # DBCS for JPN: SBCS/DBCS mode. -NLS_ALPHANUMERIC = 0 # DBCS for JPN: Alphanumeric mode. -NLS_KATAKANA = 131072 # DBCS for JPN: Katakana mode. -NLS_HIRAGANA = 262144 # DBCS for JPN: Hiragana mode. -NLS_ROMAN = 4194304 # DBCS for JPN: Roman/Noroman mode. -NLS_IME_CONVERSION = 8388608 # DBCS for JPN: IME conversion. -NLS_IME_DISABLE = 536870912 # DBCS for JPN: IME enable/disable. - -FROM_LEFT_1ST_BUTTON_PRESSED = 1 -RIGHTMOST_BUTTON_PRESSED = 2 -FROM_LEFT_2ND_BUTTON_PRESSED = 4 -FROM_LEFT_3RD_BUTTON_PRESSED = 8 -FROM_LEFT_4TH_BUTTON_PRESSED = 16 - -CTRL_C_EVENT = 0 -CTRL_BREAK_EVENT = 1 -CTRL_CLOSE_EVENT = 2 -CTRL_LOGOFF_EVENT = 5 -CTRL_SHUTDOWN_EVENT = 6 - -MOUSE_MOVED = 1 -DOUBLE_CLICK = 2 -MOUSE_WHEELED = 4 - -# property sheet window messages from prsht.h -PSM_SETCURSEL = WM_USER + 101 -PSM_REMOVEPAGE = WM_USER + 102 -PSM_ADDPAGE = WM_USER + 103 -PSM_CHANGED = WM_USER + 104 -PSM_RESTARTWINDOWS = WM_USER + 105 -PSM_REBOOTSYSTEM = WM_USER + 106 -PSM_CANCELTOCLOSE = WM_USER + 107 -PSM_QUERYSIBLINGS = WM_USER + 108 -PSM_UNCHANGED = WM_USER + 109 -PSM_APPLY = WM_USER + 110 -PSM_SETTITLEA = WM_USER + 111 -PSM_SETTITLEW = WM_USER + 120 -PSM_SETWIZBUTTONS = WM_USER + 112 -PSM_PRESSBUTTON = WM_USER + 113 -PSM_SETCURSELID = WM_USER + 114 -PSM_SETFINISHTEXTA = WM_USER + 115 -PSM_SETFINISHTEXTW = WM_USER + 121 -PSM_GETTABCONTROL = WM_USER + 116 -PSM_ISDIALOGMESSAGE = WM_USER + 117 -PSM_GETCURRENTPAGEHWND = WM_USER + 118 -PSM_INSERTPAGE = WM_USER + 119 -PSM_SETHEADERTITLEA = WM_USER + 125 -PSM_SETHEADERTITLEW = WM_USER + 126 -PSM_SETHEADERSUBTITLEA = WM_USER + 127 -PSM_SETHEADERSUBTITLEW = WM_USER + 128 -PSM_HWNDTOINDEX = WM_USER + 129 -PSM_INDEXTOHWND = WM_USER + 130 -PSM_PAGETOINDEX = WM_USER + 131 -PSM_INDEXTOPAGE = WM_USER + 132 -PSM_IDTOINDEX = WM_USER + 133 -PSM_INDEXTOID = WM_USER + 134 -PSM_GETRESULT = WM_USER + 135 -PSM_RECALCPAGESIZES = WM_USER + 136 - -# GetUserNameEx/GetComputerNameEx -NameUnknown = 0 -NameFullyQualifiedDN = 1 -NameSamCompatible = 2 -NameDisplay = 3 -NameUniqueId = 6 -NameCanonical = 7 -NameUserPrincipal = 8 -NameCanonicalEx = 9 -NameServicePrincipal = 10 -NameDnsDomain = 12 - -ComputerNameNetBIOS = 0 -ComputerNameDnsHostname = 1 -ComputerNameDnsDomain = 2 -ComputerNameDnsFullyQualified = 3 -ComputerNamePhysicalNetBIOS = 4 -ComputerNamePhysicalDnsHostname = 5 -ComputerNamePhysicalDnsDomain = 6 -ComputerNamePhysicalDnsFullyQualified = 7 - -LWA_COLORKEY = 0x00000001 -LWA_ALPHA = 0x00000002 -ULW_COLORKEY = 0x00000001 -ULW_ALPHA = 0x00000002 -ULW_OPAQUE = 0x00000004 - -# WinDef.h -TRUE = 1 -FALSE = 0 -MAX_PATH = 260 -# WinGDI.h -AC_SRC_OVER = 0 -AC_SRC_ALPHA = 1 -GRADIENT_FILL_RECT_H = 0 -GRADIENT_FILL_RECT_V = 1 -GRADIENT_FILL_TRIANGLE = 2 -GRADIENT_FILL_OP_FLAG = 255 - -## flags used with Get/SetSystemFileCacheSize -MM_WORKING_SET_MAX_HARD_ENABLE = 1 -MM_WORKING_SET_MAX_HARD_DISABLE = 2 -MM_WORKING_SET_MIN_HARD_ENABLE = 4 -MM_WORKING_SET_MIN_HARD_DISABLE = 8 - -## Flags for GetFinalPathNameByHandle -VOLUME_NAME_DOS = 0 -VOLUME_NAME_GUID = 1 -VOLUME_NAME_NT = 2 -VOLUME_NAME_NONE = 4 -FILE_NAME_NORMALIZED = 0 -FILE_NAME_OPENED = 8 - -DEVICE_NOTIFY_WINDOW_HANDLE = 0x00000000 -DEVICE_NOTIFY_SERVICE_HANDLE = 0x00000001 - -# From Dbt.h -# Generated by h2py from Dbt.h -WM_DEVICECHANGE = 0x0219 -BSF_QUERY = 0x00000001 -BSF_IGNORECURRENTTASK = 0x00000002 -BSF_FLUSHDISK = 0x00000004 -BSF_NOHANG = 0x00000008 -BSF_POSTMESSAGE = 0x00000010 -BSF_FORCEIFHUNG = 0x00000020 -BSF_NOTIMEOUTIFNOTHUNG = 0x00000040 -BSF_MSGSRV32ISOK = -2147483648 -BSF_MSGSRV32ISOK_BIT = 31 -BSM_ALLCOMPONENTS = 0x00000000 -BSM_VXDS = 0x00000001 -BSM_NETDRIVER = 0x00000002 -BSM_INSTALLABLEDRIVERS = 0x00000004 -BSM_APPLICATIONS = 0x00000008 -DBT_APPYBEGIN = 0x0000 -DBT_APPYEND = 0x0001 -DBT_DEVNODES_CHANGED = 0x0007 -DBT_QUERYCHANGECONFIG = 0x0017 -DBT_CONFIGCHANGED = 0x0018 -DBT_CONFIGCHANGECANCELED = 0x0019 -DBT_MONITORCHANGE = 0x001B -DBT_SHELLLOGGEDON = 0x0020 -DBT_CONFIGMGAPI32 = 0x0022 -DBT_VXDINITCOMPLETE = 0x0023 -DBT_VOLLOCKQUERYLOCK = 0x8041 -DBT_VOLLOCKLOCKTAKEN = 0x8042 -DBT_VOLLOCKLOCKFAILED = 0x8043 -DBT_VOLLOCKQUERYUNLOCK = 0x8044 -DBT_VOLLOCKLOCKRELEASED = 0x8045 -DBT_VOLLOCKUNLOCKFAILED = 0x8046 -LOCKP_ALLOW_WRITES = 0x01 -LOCKP_FAIL_WRITES = 0x00 -LOCKP_FAIL_MEM_MAPPING = 0x02 -LOCKP_ALLOW_MEM_MAPPING = 0x00 -LOCKP_USER_MASK = 0x03 -LOCKP_LOCK_FOR_FORMAT = 0x04 -LOCKF_LOGICAL_LOCK = 0x00 -LOCKF_PHYSICAL_LOCK = 0x01 -DBT_NO_DISK_SPACE = 0x0047 -DBT_LOW_DISK_SPACE = 0x0048 -DBT_CONFIGMGPRIVATE = 0x7FFF -DBT_DEVICEARRIVAL = 0x8000 -DBT_DEVICEQUERYREMOVE = 0x8001 -DBT_DEVICEQUERYREMOVEFAILED = 0x8002 -DBT_DEVICEREMOVEPENDING = 0x8003 -DBT_DEVICEREMOVECOMPLETE = 0x8004 -DBT_DEVICETYPESPECIFIC = 0x8005 -DBT_CUSTOMEVENT = 0x8006 -DBT_DEVTYP_OEM = 0x00000000 -DBT_DEVTYP_DEVNODE = 0x00000001 -DBT_DEVTYP_VOLUME = 0x00000002 -DBT_DEVTYP_PORT = 0x00000003 -DBT_DEVTYP_NET = 0x00000004 -DBT_DEVTYP_DEVICEINTERFACE = 0x00000005 -DBT_DEVTYP_HANDLE = 0x00000006 -DBTF_MEDIA = 0x0001 -DBTF_NET = 0x0002 -DBTF_RESOURCE = 0x00000001 -DBTF_XPORT = 0x00000002 -DBTF_SLOWNET = 0x00000004 -DBT_VPOWERDAPI = 0x8100 -DBT_USERDEFINED = 0xFFFF diff --git a/lib/win32/lib/win32cryptcon.py b/lib/win32/lib/win32cryptcon.py deleted file mode 100644 index 8d71996a..00000000 --- a/lib/win32/lib/win32cryptcon.py +++ /dev/null @@ -1,1925 +0,0 @@ -# Generated by h2py from WinCrypt.h -def GET_ALG_CLASS(x): - return x & (7 << 13) - - -def GET_ALG_TYPE(x): - return x & (15 << 9) - - -def GET_ALG_SID(x): - return x & (511) - - -ALG_CLASS_ANY = 0 -ALG_CLASS_SIGNATURE = 1 << 13 -ALG_CLASS_MSG_ENCRYPT = 2 << 13 -ALG_CLASS_DATA_ENCRYPT = 3 << 13 -ALG_CLASS_HASH = 4 << 13 -ALG_CLASS_KEY_EXCHANGE = 5 << 13 -ALG_CLASS_ALL = 7 << 13 -ALG_TYPE_ANY = 0 -ALG_TYPE_DSS = 1 << 9 -ALG_TYPE_RSA = 2 << 9 -ALG_TYPE_BLOCK = 3 << 9 -ALG_TYPE_STREAM = 4 << 9 -ALG_TYPE_DH = 5 << 9 -ALG_TYPE_SECURECHANNEL = 6 << 9 -ALG_SID_ANY = 0 -ALG_SID_RSA_ANY = 0 -ALG_SID_RSA_PKCS = 1 -ALG_SID_RSA_MSATWORK = 2 -ALG_SID_RSA_ENTRUST = 3 -ALG_SID_RSA_PGP = 4 -ALG_SID_DSS_ANY = 0 -ALG_SID_DSS_PKCS = 1 -ALG_SID_DSS_DMS = 2 -ALG_SID_DES = 1 -ALG_SID_3DES = 3 -ALG_SID_DESX = 4 -ALG_SID_IDEA = 5 -ALG_SID_CAST = 6 -ALG_SID_SAFERSK64 = 7 -ALG_SID_SAFERSK128 = 8 -ALG_SID_3DES_112 = 9 -ALG_SID_CYLINK_MEK = 12 -ALG_SID_RC5 = 13 -ALG_SID_AES_128 = 14 -ALG_SID_AES_192 = 15 -ALG_SID_AES_256 = 16 -ALG_SID_AES = 17 -ALG_SID_SKIPJACK = 10 -ALG_SID_TEK = 11 -CRYPT_MODE_CBCI = 6 -CRYPT_MODE_CFBP = 7 -CRYPT_MODE_OFBP = 8 -CRYPT_MODE_CBCOFM = 9 -CRYPT_MODE_CBCOFMI = 10 -ALG_SID_RC2 = 2 -ALG_SID_RC4 = 1 -ALG_SID_SEAL = 2 -ALG_SID_DH_SANDF = 1 -ALG_SID_DH_EPHEM = 2 -ALG_SID_AGREED_KEY_ANY = 3 -ALG_SID_KEA = 4 -ALG_SID_MD2 = 1 -ALG_SID_MD4 = 2 -ALG_SID_MD5 = 3 -ALG_SID_SHA = 4 -ALG_SID_SHA1 = 4 -ALG_SID_MAC = 5 -ALG_SID_RIPEMD = 6 -ALG_SID_RIPEMD160 = 7 -ALG_SID_SSL3SHAMD5 = 8 -ALG_SID_HMAC = 9 -ALG_SID_TLS1PRF = 10 -ALG_SID_HASH_REPLACE_OWF = 11 -ALG_SID_SHA_256 = 12 -ALG_SID_SHA_384 = 13 -ALG_SID_SHA_512 = 14 -ALG_SID_SSL3_MASTER = 1 -ALG_SID_SCHANNEL_MASTER_HASH = 2 -ALG_SID_SCHANNEL_MAC_KEY = 3 -ALG_SID_PCT1_MASTER = 4 -ALG_SID_SSL2_MASTER = 5 -ALG_SID_TLS1_MASTER = 6 -ALG_SID_SCHANNEL_ENC_KEY = 7 -ALG_SID_EXAMPLE = 80 -CALG_MD2 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD2 -CALG_MD4 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD4 -CALG_MD5 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD5 -CALG_SHA = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA -CALG_SHA1 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA1 -CALG_MAC = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MAC -CALG_RSA_SIGN = ALG_CLASS_SIGNATURE | ALG_TYPE_RSA | ALG_SID_RSA_ANY -CALG_DSS_SIGN = ALG_CLASS_SIGNATURE | ALG_TYPE_DSS | ALG_SID_DSS_ANY -CALG_NO_SIGN = ALG_CLASS_SIGNATURE | ALG_TYPE_ANY | ALG_SID_ANY -CALG_RSA_KEYX = ALG_CLASS_KEY_EXCHANGE | ALG_TYPE_RSA | ALG_SID_RSA_ANY -CALG_DES = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_DES -CALG_3DES_112 = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_3DES_112 -CALG_3DES = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_3DES -CALG_DESX = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_DESX -CALG_RC2 = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_RC2 -CALG_RC4 = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_STREAM | ALG_SID_RC4 -CALG_SEAL = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_STREAM | ALG_SID_SEAL -CALG_DH_SF = ALG_CLASS_KEY_EXCHANGE | ALG_TYPE_DH | ALG_SID_DH_SANDF -CALG_DH_EPHEM = ALG_CLASS_KEY_EXCHANGE | ALG_TYPE_DH | ALG_SID_DH_EPHEM -CALG_AGREEDKEY_ANY = ALG_CLASS_KEY_EXCHANGE | ALG_TYPE_DH | ALG_SID_AGREED_KEY_ANY -CALG_KEA_KEYX = ALG_CLASS_KEY_EXCHANGE | ALG_TYPE_DH | ALG_SID_KEA -CALG_HUGHES_MD5 = ALG_CLASS_KEY_EXCHANGE | ALG_TYPE_ANY | ALG_SID_MD5 -CALG_SKIPJACK = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_SKIPJACK -CALG_TEK = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_TEK -CALG_CYLINK_MEK = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_CYLINK_MEK -CALG_SSL3_SHAMD5 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SSL3SHAMD5 -CALG_SSL3_MASTER = ALG_CLASS_MSG_ENCRYPT | ALG_TYPE_SECURECHANNEL | ALG_SID_SSL3_MASTER -CALG_SCHANNEL_MASTER_HASH = ( - ALG_CLASS_MSG_ENCRYPT | ALG_TYPE_SECURECHANNEL | ALG_SID_SCHANNEL_MASTER_HASH -) -CALG_SCHANNEL_MAC_KEY = ( - ALG_CLASS_MSG_ENCRYPT | ALG_TYPE_SECURECHANNEL | ALG_SID_SCHANNEL_MAC_KEY -) -CALG_SCHANNEL_ENC_KEY = ( - ALG_CLASS_MSG_ENCRYPT | ALG_TYPE_SECURECHANNEL | ALG_SID_SCHANNEL_ENC_KEY -) -CALG_PCT1_MASTER = ALG_CLASS_MSG_ENCRYPT | ALG_TYPE_SECURECHANNEL | ALG_SID_PCT1_MASTER -CALG_SSL2_MASTER = ALG_CLASS_MSG_ENCRYPT | ALG_TYPE_SECURECHANNEL | ALG_SID_SSL2_MASTER -CALG_TLS1_MASTER = ALG_CLASS_MSG_ENCRYPT | ALG_TYPE_SECURECHANNEL | ALG_SID_TLS1_MASTER -CALG_RC5 = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_RC5 -CALG_HMAC = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HMAC -CALG_TLS1PRF = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_TLS1PRF -CALG_HASH_REPLACE_OWF = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HASH_REPLACE_OWF -CALG_AES_128 = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_AES_128 -CALG_AES_192 = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_AES_192 -CALG_AES_256 = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_AES_256 -CALG_AES = ALG_CLASS_DATA_ENCRYPT | ALG_TYPE_BLOCK | ALG_SID_AES -CALG_SHA_256 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_256 -CALG_SHA_384 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_384 -CALG_SHA_512 = ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_512 -CRYPT_VERIFYCONTEXT = -268435456 -CRYPT_NEWKEYSET = 0x00000008 -CRYPT_DELETEKEYSET = 0x00000010 -CRYPT_MACHINE_KEYSET = 0x00000020 -CRYPT_SILENT = 0x00000040 -CRYPT_EXPORTABLE = 0x00000001 -CRYPT_USER_PROTECTED = 0x00000002 -CRYPT_CREATE_SALT = 0x00000004 -CRYPT_UPDATE_KEY = 0x00000008 -CRYPT_NO_SALT = 0x00000010 -CRYPT_PREGEN = 0x00000040 -CRYPT_RECIPIENT = 0x00000010 -CRYPT_INITIATOR = 0x00000040 -CRYPT_ONLINE = 0x00000080 -CRYPT_SF = 0x00000100 -CRYPT_CREATE_IV = 0x00000200 -CRYPT_KEK = 0x00000400 -CRYPT_DATA_KEY = 0x00000800 -CRYPT_VOLATILE = 0x00001000 -CRYPT_SGCKEY = 0x00002000 -CRYPT_ARCHIVABLE = 0x00004000 -RSA1024BIT_KEY = 0x04000000 -CRYPT_SERVER = 0x00000400 -KEY_LENGTH_MASK = -65536 -CRYPT_Y_ONLY = 0x00000001 -CRYPT_SSL2_FALLBACK = 0x00000002 -CRYPT_DESTROYKEY = 0x00000004 -CRYPT_OAEP = 0x00000040 -CRYPT_BLOB_VER3 = 0x00000080 -CRYPT_IPSEC_HMAC_KEY = 0x00000100 -CRYPT_DECRYPT_RSA_NO_PADDING_CHECK = 0x00000020 -CRYPT_SECRETDIGEST = 0x00000001 -CRYPT_OWF_REPL_LM_HASH = 0x00000001 -CRYPT_LITTLE_ENDIAN = 0x00000001 -CRYPT_NOHASHOID = 0x00000001 -CRYPT_TYPE2_FORMAT = 0x00000002 -CRYPT_X931_FORMAT = 0x00000004 -CRYPT_MACHINE_DEFAULT = 0x00000001 -CRYPT_USER_DEFAULT = 0x00000002 -CRYPT_DELETE_DEFAULT = 0x00000004 -SIMPLEBLOB = 0x1 -PUBLICKEYBLOB = 0x6 -PRIVATEKEYBLOB = 0x7 -PLAINTEXTKEYBLOB = 0x8 -OPAQUEKEYBLOB = 0x9 -PUBLICKEYBLOBEX = 0xA -SYMMETRICWRAPKEYBLOB = 0xB -AT_KEYEXCHANGE = 1 -AT_SIGNATURE = 2 -CRYPT_USERDATA = 1 -KP_IV = 1 -KP_SALT = 2 -KP_PADDING = 3 -KP_MODE = 4 -KP_MODE_BITS = 5 -KP_PERMISSIONS = 6 -KP_ALGID = 7 -KP_BLOCKLEN = 8 -KP_KEYLEN = 9 -KP_SALT_EX = 10 -KP_P = 11 -KP_G = 12 -KP_Q = 13 -KP_X = 14 -KP_Y = 15 -KP_RA = 16 -KP_RB = 17 -KP_INFO = 18 -KP_EFFECTIVE_KEYLEN = 19 -KP_SCHANNEL_ALG = 20 -KP_CLIENT_RANDOM = 21 -KP_SERVER_RANDOM = 22 -KP_RP = 23 -KP_PRECOMP_MD5 = 24 -KP_PRECOMP_SHA = 25 -KP_CERTIFICATE = 26 -KP_CLEAR_KEY = 27 -KP_PUB_EX_LEN = 28 -KP_PUB_EX_VAL = 29 -KP_KEYVAL = 30 -KP_ADMIN_PIN = 31 -KP_KEYEXCHANGE_PIN = 32 -KP_SIGNATURE_PIN = 33 -KP_PREHASH = 34 -KP_ROUNDS = 35 -KP_OAEP_PARAMS = 36 -KP_CMS_KEY_INFO = 37 -KP_CMS_DH_KEY_INFO = 38 -KP_PUB_PARAMS = 39 -KP_VERIFY_PARAMS = 40 -KP_HIGHEST_VERSION = 41 -KP_GET_USE_COUNT = 42 -PKCS5_PADDING = 1 -RANDOM_PADDING = 2 -ZERO_PADDING = 3 -CRYPT_MODE_CBC = 1 -CRYPT_MODE_ECB = 2 -CRYPT_MODE_OFB = 3 -CRYPT_MODE_CFB = 4 -CRYPT_MODE_CTS = 5 -CRYPT_ENCRYPT = 0x0001 -CRYPT_DECRYPT = 0x0002 -CRYPT_EXPORT = 0x0004 -CRYPT_READ = 0x0008 -CRYPT_WRITE = 0x0010 -CRYPT_MAC = 0x0020 -CRYPT_EXPORT_KEY = 0x0040 -CRYPT_IMPORT_KEY = 0x0080 -CRYPT_ARCHIVE = 0x0100 -HP_ALGID = 0x0001 -HP_HASHVAL = 0x0002 -HP_HASHSIZE = 0x0004 -HP_HMAC_INFO = 0x0005 -HP_TLS1PRF_LABEL = 0x0006 -HP_TLS1PRF_SEED = 0x0007 - -CRYPT_FAILED = 0 -CRYPT_SUCCEED = 1 - - -def RCRYPT_SUCCEEDED(rt): - return (rt) == CRYPT_SUCCEED - - -def RCRYPT_FAILED(rt): - return (rt) == CRYPT_FAILED - - -PP_ENUMALGS = 1 -PP_ENUMCONTAINERS = 2 -PP_IMPTYPE = 3 -PP_NAME = 4 -PP_VERSION = 5 -PP_CONTAINER = 6 -PP_CHANGE_PASSWORD = 7 -PP_KEYSET_SEC_DESCR = 8 -PP_CERTCHAIN = 9 -PP_KEY_TYPE_SUBTYPE = 10 -PP_PROVTYPE = 16 -PP_KEYSTORAGE = 17 -PP_APPLI_CERT = 18 -PP_SYM_KEYSIZE = 19 -PP_SESSION_KEYSIZE = 20 -PP_UI_PROMPT = 21 -PP_ENUMALGS_EX = 22 -PP_ENUMMANDROOTS = 25 -PP_ENUMELECTROOTS = 26 -PP_KEYSET_TYPE = 27 -PP_ADMIN_PIN = 31 -PP_KEYEXCHANGE_PIN = 32 -PP_SIGNATURE_PIN = 33 -PP_SIG_KEYSIZE_INC = 34 -PP_KEYX_KEYSIZE_INC = 35 -PP_UNIQUE_CONTAINER = 36 -PP_SGC_INFO = 37 -PP_USE_HARDWARE_RNG = 38 -PP_KEYSPEC = 39 -PP_ENUMEX_SIGNING_PROT = 40 -PP_CRYPT_COUNT_KEY_USE = 41 -CRYPT_FIRST = 1 -CRYPT_NEXT = 2 -CRYPT_SGC_ENUM = 4 -CRYPT_IMPL_HARDWARE = 1 -CRYPT_IMPL_SOFTWARE = 2 -CRYPT_IMPL_MIXED = 3 -CRYPT_IMPL_UNKNOWN = 4 -CRYPT_IMPL_REMOVABLE = 8 -CRYPT_SEC_DESCR = 0x00000001 -CRYPT_PSTORE = 0x00000002 -CRYPT_UI_PROMPT = 0x00000004 -CRYPT_FLAG_PCT1 = 0x0001 -CRYPT_FLAG_SSL2 = 0x0002 -CRYPT_FLAG_SSL3 = 0x0004 -CRYPT_FLAG_TLS1 = 0x0008 -CRYPT_FLAG_IPSEC = 0x0010 -CRYPT_FLAG_SIGNING = 0x0020 -CRYPT_SGC = 0x0001 -CRYPT_FASTSGC = 0x0002 -PP_CLIENT_HWND = 1 -PP_CONTEXT_INFO = 11 -PP_KEYEXCHANGE_KEYSIZE = 12 -PP_SIGNATURE_KEYSIZE = 13 -PP_KEYEXCHANGE_ALG = 14 -PP_SIGNATURE_ALG = 15 -PP_DELETEKEY = 24 -PROV_RSA_FULL = 1 -PROV_RSA_SIG = 2 -PROV_DSS = 3 -PROV_FORTEZZA = 4 -PROV_MS_EXCHANGE = 5 -PROV_SSL = 6 -PROV_RSA_SCHANNEL = 12 -PROV_DSS_DH = 13 -PROV_EC_ECDSA_SIG = 14 -PROV_EC_ECNRA_SIG = 15 -PROV_EC_ECDSA_FULL = 16 -PROV_EC_ECNRA_FULL = 17 -PROV_DH_SCHANNEL = 18 -PROV_SPYRUS_LYNKS = 20 -PROV_RNG = 21 -PROV_INTEL_SEC = 22 -PROV_REPLACE_OWF = 23 -PROV_RSA_AES = 24 -MS_DEF_PROV_A = "Microsoft Base Cryptographic Provider v1.0" -MS_DEF_PROV = MS_DEF_PROV_A -MS_ENHANCED_PROV_A = "Microsoft Enhanced Cryptographic Provider v1.0" -MS_ENHANCED_PROV = MS_ENHANCED_PROV_A -MS_STRONG_PROV_A = "Microsoft Strong Cryptographic Provider" -MS_STRONG_PROV = MS_STRONG_PROV_A -MS_DEF_RSA_SIG_PROV_A = "Microsoft RSA Signature Cryptographic Provider" -MS_DEF_RSA_SIG_PROV = MS_DEF_RSA_SIG_PROV_A -MS_DEF_RSA_SCHANNEL_PROV_A = "Microsoft RSA SChannel Cryptographic Provider" -MS_DEF_RSA_SCHANNEL_PROV = MS_DEF_RSA_SCHANNEL_PROV_A -MS_DEF_DSS_PROV_A = "Microsoft Base DSS Cryptographic Provider" -MS_DEF_DSS_PROV = MS_DEF_DSS_PROV_A -MS_DEF_DSS_DH_PROV_A = "Microsoft Base DSS and Diffie-Hellman Cryptographic Provider" -MS_DEF_DSS_DH_PROV = MS_DEF_DSS_DH_PROV_A -MS_ENH_DSS_DH_PROV_A = ( - "Microsoft Enhanced DSS and Diffie-Hellman Cryptographic Provider" -) -MS_ENH_DSS_DH_PROV = MS_ENH_DSS_DH_PROV_A -MS_DEF_DH_SCHANNEL_PROV_A = "Microsoft DH SChannel Cryptographic Provider" -MS_DEF_DH_SCHANNEL_PROV = MS_DEF_DH_SCHANNEL_PROV_A -MS_SCARD_PROV_A = "Microsoft Base Smart Card Crypto Provider" -MS_SCARD_PROV = MS_SCARD_PROV_A -MS_ENH_RSA_AES_PROV_A = "Microsoft Enhanced RSA and AES Cryptographic Provider" -MS_ENH_RSA_AES_PROV = MS_ENH_RSA_AES_PROV_A -MAXUIDLEN = 64 -EXPO_OFFLOAD_REG_VALUE = "ExpoOffload" -EXPO_OFFLOAD_FUNC_NAME = "OffloadModExpo" -szKEY_CRYPTOAPI_PRIVATE_KEY_OPTIONS = "Software\\Policies\\Microsoft\\Cryptography" -szFORCE_KEY_PROTECTION = "ForceKeyProtection" -dwFORCE_KEY_PROTECTION_DISABLED = 0x0 -dwFORCE_KEY_PROTECTION_USER_SELECT = 0x1 -dwFORCE_KEY_PROTECTION_HIGH = 0x2 -szKEY_CACHE_ENABLED = "CachePrivateKeys" -szKEY_CACHE_SECONDS = "PrivateKeyLifetimeSeconds" -CUR_BLOB_VERSION = 2 -SCHANNEL_MAC_KEY = 0x00000000 -SCHANNEL_ENC_KEY = 0x00000001 -INTERNATIONAL_USAGE = 0x00000001 -szOID_RSA = "1.2.840.113549" -szOID_PKCS = "1.2.840.113549.1" -szOID_RSA_HASH = "1.2.840.113549.2" -szOID_RSA_ENCRYPT = "1.2.840.113549.3" -szOID_PKCS_1 = "1.2.840.113549.1.1" -szOID_PKCS_2 = "1.2.840.113549.1.2" -szOID_PKCS_3 = "1.2.840.113549.1.3" -szOID_PKCS_4 = "1.2.840.113549.1.4" -szOID_PKCS_5 = "1.2.840.113549.1.5" -szOID_PKCS_6 = "1.2.840.113549.1.6" -szOID_PKCS_7 = "1.2.840.113549.1.7" -szOID_PKCS_8 = "1.2.840.113549.1.8" -szOID_PKCS_9 = "1.2.840.113549.1.9" -szOID_PKCS_10 = "1.2.840.113549.1.10" -szOID_PKCS_12 = "1.2.840.113549.1.12" -szOID_RSA_RSA = "1.2.840.113549.1.1.1" -szOID_RSA_MD2RSA = "1.2.840.113549.1.1.2" -szOID_RSA_MD4RSA = "1.2.840.113549.1.1.3" -szOID_RSA_MD5RSA = "1.2.840.113549.1.1.4" -szOID_RSA_SHA1RSA = "1.2.840.113549.1.1.5" -szOID_RSA_SETOAEP_RSA = "1.2.840.113549.1.1.6" -szOID_RSA_DH = "1.2.840.113549.1.3.1" -szOID_RSA_data = "1.2.840.113549.1.7.1" -szOID_RSA_signedData = "1.2.840.113549.1.7.2" -szOID_RSA_envelopedData = "1.2.840.113549.1.7.3" -szOID_RSA_signEnvData = "1.2.840.113549.1.7.4" -szOID_RSA_digestedData = "1.2.840.113549.1.7.5" -szOID_RSA_hashedData = "1.2.840.113549.1.7.5" -szOID_RSA_encryptedData = "1.2.840.113549.1.7.6" -szOID_RSA_emailAddr = "1.2.840.113549.1.9.1" -szOID_RSA_unstructName = "1.2.840.113549.1.9.2" -szOID_RSA_contentType = "1.2.840.113549.1.9.3" -szOID_RSA_messageDigest = "1.2.840.113549.1.9.4" -szOID_RSA_signingTime = "1.2.840.113549.1.9.5" -szOID_RSA_counterSign = "1.2.840.113549.1.9.6" -szOID_RSA_challengePwd = "1.2.840.113549.1.9.7" -szOID_RSA_unstructAddr = "1.2.840.113549.1.9.8" -szOID_RSA_extCertAttrs = "1.2.840.113549.1.9.9" -szOID_RSA_certExtensions = "1.2.840.113549.1.9.14" -szOID_RSA_SMIMECapabilities = "1.2.840.113549.1.9.15" -szOID_RSA_preferSignedData = "1.2.840.113549.1.9.15.1" -szOID_RSA_SMIMEalg = "1.2.840.113549.1.9.16.3" -szOID_RSA_SMIMEalgESDH = "1.2.840.113549.1.9.16.3.5" -szOID_RSA_SMIMEalgCMS3DESwrap = "1.2.840.113549.1.9.16.3.6" -szOID_RSA_SMIMEalgCMSRC2wrap = "1.2.840.113549.1.9.16.3.7" -szOID_RSA_MD2 = "1.2.840.113549.2.2" -szOID_RSA_MD4 = "1.2.840.113549.2.4" -szOID_RSA_MD5 = "1.2.840.113549.2.5" -szOID_RSA_RC2CBC = "1.2.840.113549.3.2" -szOID_RSA_RC4 = "1.2.840.113549.3.4" -szOID_RSA_DES_EDE3_CBC = "1.2.840.113549.3.7" -szOID_RSA_RC5_CBCPad = "1.2.840.113549.3.9" -szOID_ANSI_X942 = "1.2.840.10046" -szOID_ANSI_X942_DH = "1.2.840.10046.2.1" -szOID_X957 = "1.2.840.10040" -szOID_X957_DSA = "1.2.840.10040.4.1" -szOID_X957_SHA1DSA = "1.2.840.10040.4.3" -szOID_DS = "2.5" -szOID_DSALG = "2.5.8" -szOID_DSALG_CRPT = "2.5.8.1" -szOID_DSALG_HASH = "2.5.8.2" -szOID_DSALG_SIGN = "2.5.8.3" -szOID_DSALG_RSA = "2.5.8.1.1" -szOID_OIW = "1.3.14" -szOID_OIWSEC = "1.3.14.3.2" -szOID_OIWSEC_md4RSA = "1.3.14.3.2.2" -szOID_OIWSEC_md5RSA = "1.3.14.3.2.3" -szOID_OIWSEC_md4RSA2 = "1.3.14.3.2.4" -szOID_OIWSEC_desECB = "1.3.14.3.2.6" -szOID_OIWSEC_desCBC = "1.3.14.3.2.7" -szOID_OIWSEC_desOFB = "1.3.14.3.2.8" -szOID_OIWSEC_desCFB = "1.3.14.3.2.9" -szOID_OIWSEC_desMAC = "1.3.14.3.2.10" -szOID_OIWSEC_rsaSign = "1.3.14.3.2.11" -szOID_OIWSEC_dsa = "1.3.14.3.2.12" -szOID_OIWSEC_shaDSA = "1.3.14.3.2.13" -szOID_OIWSEC_mdc2RSA = "1.3.14.3.2.14" -szOID_OIWSEC_shaRSA = "1.3.14.3.2.15" -szOID_OIWSEC_dhCommMod = "1.3.14.3.2.16" -szOID_OIWSEC_desEDE = "1.3.14.3.2.17" -szOID_OIWSEC_sha = "1.3.14.3.2.18" -szOID_OIWSEC_mdc2 = "1.3.14.3.2.19" -szOID_OIWSEC_dsaComm = "1.3.14.3.2.20" -szOID_OIWSEC_dsaCommSHA = "1.3.14.3.2.21" -szOID_OIWSEC_rsaXchg = "1.3.14.3.2.22" -szOID_OIWSEC_keyHashSeal = "1.3.14.3.2.23" -szOID_OIWSEC_md2RSASign = "1.3.14.3.2.24" -szOID_OIWSEC_md5RSASign = "1.3.14.3.2.25" -szOID_OIWSEC_sha1 = "1.3.14.3.2.26" -szOID_OIWSEC_dsaSHA1 = "1.3.14.3.2.27" -szOID_OIWSEC_dsaCommSHA1 = "1.3.14.3.2.28" -szOID_OIWSEC_sha1RSASign = "1.3.14.3.2.29" -szOID_OIWDIR = "1.3.14.7.2" -szOID_OIWDIR_CRPT = "1.3.14.7.2.1" -szOID_OIWDIR_HASH = "1.3.14.7.2.2" -szOID_OIWDIR_SIGN = "1.3.14.7.2.3" -szOID_OIWDIR_md2 = "1.3.14.7.2.2.1" -szOID_OIWDIR_md2RSA = "1.3.14.7.2.3.1" -szOID_INFOSEC = "2.16.840.1.101.2.1" -szOID_INFOSEC_sdnsSignature = "2.16.840.1.101.2.1.1.1" -szOID_INFOSEC_mosaicSignature = "2.16.840.1.101.2.1.1.2" -szOID_INFOSEC_sdnsConfidentiality = "2.16.840.1.101.2.1.1.3" -szOID_INFOSEC_mosaicConfidentiality = "2.16.840.1.101.2.1.1.4" -szOID_INFOSEC_sdnsIntegrity = "2.16.840.1.101.2.1.1.5" -szOID_INFOSEC_mosaicIntegrity = "2.16.840.1.101.2.1.1.6" -szOID_INFOSEC_sdnsTokenProtection = "2.16.840.1.101.2.1.1.7" -szOID_INFOSEC_mosaicTokenProtection = "2.16.840.1.101.2.1.1.8" -szOID_INFOSEC_sdnsKeyManagement = "2.16.840.1.101.2.1.1.9" -szOID_INFOSEC_mosaicKeyManagement = "2.16.840.1.101.2.1.1.10" -szOID_INFOSEC_sdnsKMandSig = "2.16.840.1.101.2.1.1.11" -szOID_INFOSEC_mosaicKMandSig = "2.16.840.1.101.2.1.1.12" -szOID_INFOSEC_SuiteASignature = "2.16.840.1.101.2.1.1.13" -szOID_INFOSEC_SuiteAConfidentiality = "2.16.840.1.101.2.1.1.14" -szOID_INFOSEC_SuiteAIntegrity = "2.16.840.1.101.2.1.1.15" -szOID_INFOSEC_SuiteATokenProtection = "2.16.840.1.101.2.1.1.16" -szOID_INFOSEC_SuiteAKeyManagement = "2.16.840.1.101.2.1.1.17" -szOID_INFOSEC_SuiteAKMandSig = "2.16.840.1.101.2.1.1.18" -szOID_INFOSEC_mosaicUpdatedSig = "2.16.840.1.101.2.1.1.19" -szOID_INFOSEC_mosaicKMandUpdSig = "2.16.840.1.101.2.1.1.20" -szOID_INFOSEC_mosaicUpdatedInteg = "2.16.840.1.101.2.1.1.21" -szOID_COMMON_NAME = "2.5.4.3" -szOID_SUR_NAME = "2.5.4.4" -szOID_DEVICE_SERIAL_NUMBER = "2.5.4.5" -szOID_COUNTRY_NAME = "2.5.4.6" -szOID_LOCALITY_NAME = "2.5.4.7" -szOID_STATE_OR_PROVINCE_NAME = "2.5.4.8" -szOID_STREET_ADDRESS = "2.5.4.9" -szOID_ORGANIZATION_NAME = "2.5.4.10" -szOID_ORGANIZATIONAL_UNIT_NAME = "2.5.4.11" -szOID_TITLE = "2.5.4.12" -szOID_DESCRIPTION = "2.5.4.13" -szOID_SEARCH_GUIDE = "2.5.4.14" -szOID_BUSINESS_CATEGORY = "2.5.4.15" -szOID_POSTAL_ADDRESS = "2.5.4.16" -szOID_POSTAL_CODE = "2.5.4.17" -szOID_POST_OFFICE_BOX = "2.5.4.18" -szOID_PHYSICAL_DELIVERY_OFFICE_NAME = "2.5.4.19" -szOID_TELEPHONE_NUMBER = "2.5.4.20" -szOID_TELEX_NUMBER = "2.5.4.21" -szOID_TELETEXT_TERMINAL_IDENTIFIER = "2.5.4.22" -szOID_FACSIMILE_TELEPHONE_NUMBER = "2.5.4.23" -szOID_X21_ADDRESS = "2.5.4.24" -szOID_INTERNATIONAL_ISDN_NUMBER = "2.5.4.25" -szOID_REGISTERED_ADDRESS = "2.5.4.26" -szOID_DESTINATION_INDICATOR = "2.5.4.27" -szOID_PREFERRED_DELIVERY_METHOD = "2.5.4.28" -szOID_PRESENTATION_ADDRESS = "2.5.4.29" -szOID_SUPPORTED_APPLICATION_CONTEXT = "2.5.4.30" -szOID_MEMBER = "2.5.4.31" -szOID_OWNER = "2.5.4.32" -szOID_ROLE_OCCUPANT = "2.5.4.33" -szOID_SEE_ALSO = "2.5.4.34" -szOID_USER_PASSWORD = "2.5.4.35" -szOID_USER_CERTIFICATE = "2.5.4.36" -szOID_CA_CERTIFICATE = "2.5.4.37" -szOID_AUTHORITY_REVOCATION_LIST = "2.5.4.38" -szOID_CERTIFICATE_REVOCATION_LIST = "2.5.4.39" -szOID_CROSS_CERTIFICATE_PAIR = "2.5.4.40" -szOID_GIVEN_NAME = "2.5.4.42" -szOID_INITIALS = "2.5.4.43" -szOID_DN_QUALIFIER = "2.5.4.46" -szOID_DOMAIN_COMPONENT = "0.9.2342.19200300.100.1.25" -szOID_PKCS_12_FRIENDLY_NAME_ATTR = "1.2.840.113549.1.9.20" -szOID_PKCS_12_LOCAL_KEY_ID = "1.2.840.113549.1.9.21" -szOID_PKCS_12_KEY_PROVIDER_NAME_ATTR = "1.3.6.1.4.1.311.17.1" -szOID_LOCAL_MACHINE_KEYSET = "1.3.6.1.4.1.311.17.2" -szOID_KEYID_RDN = "1.3.6.1.4.1.311.10.7.1" -CERT_RDN_ANY_TYPE = 0 -CERT_RDN_ENCODED_BLOB = 1 -CERT_RDN_OCTET_STRING = 2 -CERT_RDN_NUMERIC_STRING = 3 -CERT_RDN_PRINTABLE_STRING = 4 -CERT_RDN_TELETEX_STRING = 5 -CERT_RDN_T61_STRING = 5 -CERT_RDN_VIDEOTEX_STRING = 6 -CERT_RDN_IA5_STRING = 7 -CERT_RDN_GRAPHIC_STRING = 8 -CERT_RDN_VISIBLE_STRING = 9 -CERT_RDN_ISO646_STRING = 9 -CERT_RDN_GENERAL_STRING = 10 -CERT_RDN_UNIVERSAL_STRING = 11 -CERT_RDN_INT4_STRING = 11 -CERT_RDN_BMP_STRING = 12 -CERT_RDN_UNICODE_STRING = 12 -CERT_RDN_UTF8_STRING = 13 -CERT_RDN_TYPE_MASK = 0x000000FF -CERT_RDN_FLAGS_MASK = -16777216 -CERT_RDN_ENABLE_T61_UNICODE_FLAG = -2147483648 -CERT_RDN_ENABLE_UTF8_UNICODE_FLAG = 0x20000000 -CERT_RDN_DISABLE_CHECK_TYPE_FLAG = 0x40000000 -CERT_RDN_DISABLE_IE4_UTF8_FLAG = 0x01000000 -CERT_RSA_PUBLIC_KEY_OBJID = szOID_RSA_RSA -CERT_DEFAULT_OID_PUBLIC_KEY_SIGN = szOID_RSA_RSA -CERT_DEFAULT_OID_PUBLIC_KEY_XCHG = szOID_RSA_RSA -CERT_V1 = 0 -CERT_V2 = 1 -CERT_V3 = 2 -CERT_INFO_VERSION_FLAG = 1 -CERT_INFO_SERIAL_NUMBER_FLAG = 2 -CERT_INFO_SIGNATURE_ALGORITHM_FLAG = 3 -CERT_INFO_ISSUER_FLAG = 4 -CERT_INFO_NOT_BEFORE_FLAG = 5 -CERT_INFO_NOT_AFTER_FLAG = 6 -CERT_INFO_SUBJECT_FLAG = 7 -CERT_INFO_SUBJECT_PUBLIC_KEY_INFO_FLAG = 8 -CERT_INFO_ISSUER_UNIQUE_ID_FLAG = 9 -CERT_INFO_SUBJECT_UNIQUE_ID_FLAG = 10 -CERT_INFO_EXTENSION_FLAG = 11 -CRL_V1 = 0 -CRL_V2 = 1 -CERT_REQUEST_V1 = 0 -CERT_KEYGEN_REQUEST_V1 = 0 -CTL_V1 = 0 -CERT_ENCODING_TYPE_MASK = 0x0000FFFF -CMSG_ENCODING_TYPE_MASK = -65536 - - -def GET_CERT_ENCODING_TYPE(X): - return X & CERT_ENCODING_TYPE_MASK - - -def GET_CMSG_ENCODING_TYPE(X): - return X & CMSG_ENCODING_TYPE_MASK - - -CRYPT_ASN_ENCODING = 0x00000001 -CRYPT_NDR_ENCODING = 0x00000002 -X509_ASN_ENCODING = 0x00000001 -X509_NDR_ENCODING = 0x00000002 -PKCS_7_ASN_ENCODING = 0x00010000 -PKCS_7_NDR_ENCODING = 0x00020000 -CRYPT_FORMAT_STR_MULTI_LINE = 0x0001 -CRYPT_FORMAT_STR_NO_HEX = 0x0010 -CRYPT_FORMAT_SIMPLE = 0x0001 -CRYPT_FORMAT_X509 = 0x0002 -CRYPT_FORMAT_OID = 0x0004 -CRYPT_FORMAT_RDN_SEMICOLON = 0x0100 -CRYPT_FORMAT_RDN_CRLF = 0x0200 -CRYPT_FORMAT_RDN_UNQUOTE = 0x0400 -CRYPT_FORMAT_RDN_REVERSE = 0x0800 -CRYPT_FORMAT_COMMA = 0x1000 -CRYPT_FORMAT_SEMICOLON = CRYPT_FORMAT_RDN_SEMICOLON -CRYPT_FORMAT_CRLF = CRYPT_FORMAT_RDN_CRLF -CRYPT_ENCODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8 -CRYPT_ENCODE_ALLOC_FLAG = 0x8000 -CRYPT_UNICODE_NAME_ENCODE_ENABLE_T61_UNICODE_FLAG = CERT_RDN_ENABLE_T61_UNICODE_FLAG -CRYPT_UNICODE_NAME_ENCODE_ENABLE_UTF8_UNICODE_FLAG = CERT_RDN_ENABLE_UTF8_UNICODE_FLAG -CRYPT_UNICODE_NAME_ENCODE_DISABLE_CHECK_TYPE_FLAG = CERT_RDN_DISABLE_CHECK_TYPE_FLAG -CRYPT_SORTED_CTL_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x10000 -CRYPT_DECODE_NOCOPY_FLAG = 0x1 -CRYPT_DECODE_TO_BE_SIGNED_FLAG = 0x2 -CRYPT_DECODE_SHARE_OID_STRING_FLAG = 0x4 -CRYPT_DECODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8 -CRYPT_DECODE_ALLOC_FLAG = 0x8000 -CRYPT_UNICODE_NAME_DECODE_DISABLE_IE4_UTF8_FLAG = CERT_RDN_DISABLE_IE4_UTF8_FLAG - -CRYPT_ENCODE_DECODE_NONE = 0 -X509_CERT = 1 -X509_CERT_TO_BE_SIGNED = 2 -X509_CERT_CRL_TO_BE_SIGNED = 3 -X509_CERT_REQUEST_TO_BE_SIGNED = 4 -X509_EXTENSIONS = 5 -X509_NAME_VALUE = 6 -X509_NAME = 7 -X509_PUBLIC_KEY_INFO = 8 -X509_AUTHORITY_KEY_ID = 9 -X509_KEY_ATTRIBUTES = 10 -X509_KEY_USAGE_RESTRICTION = 11 -X509_ALTERNATE_NAME = 12 -X509_BASIC_CONSTRAINTS = 13 -X509_KEY_USAGE = 14 -X509_BASIC_CONSTRAINTS2 = 15 -X509_CERT_POLICIES = 16 -PKCS_UTC_TIME = 17 -PKCS_TIME_REQUEST = 18 -RSA_CSP_PUBLICKEYBLOB = 19 -X509_UNICODE_NAME = 20 -X509_KEYGEN_REQUEST_TO_BE_SIGNED = 21 -PKCS_ATTRIBUTE = 22 -PKCS_CONTENT_INFO_SEQUENCE_OF_ANY = 23 -X509_UNICODE_NAME_VALUE = 24 -X509_ANY_STRING = X509_NAME_VALUE -X509_UNICODE_ANY_STRING = X509_UNICODE_NAME_VALUE -X509_OCTET_STRING = 25 -X509_BITS = 26 -X509_INTEGER = 27 -X509_MULTI_BYTE_INTEGER = 28 -X509_ENUMERATED = 29 -X509_CHOICE_OF_TIME = 30 -X509_AUTHORITY_KEY_ID2 = 31 -X509_AUTHORITY_INFO_ACCESS = 32 -X509_SUBJECT_INFO_ACCESS = X509_AUTHORITY_INFO_ACCESS -X509_CRL_REASON_CODE = X509_ENUMERATED -PKCS_CONTENT_INFO = 33 -X509_SEQUENCE_OF_ANY = 34 -X509_CRL_DIST_POINTS = 35 -X509_ENHANCED_KEY_USAGE = 36 -PKCS_CTL = 37 -X509_MULTI_BYTE_UINT = 38 -X509_DSS_PUBLICKEY = X509_MULTI_BYTE_UINT -X509_DSS_PARAMETERS = 39 -X509_DSS_SIGNATURE = 40 -PKCS_RC2_CBC_PARAMETERS = 41 -PKCS_SMIME_CAPABILITIES = 42 -X509_QC_STATEMENTS_EXT = 42 -PKCS_RSA_PRIVATE_KEY = 43 -PKCS_PRIVATE_KEY_INFO = 44 -PKCS_ENCRYPTED_PRIVATE_KEY_INFO = 45 -X509_PKIX_POLICY_QUALIFIER_USERNOTICE = 46 -X509_DH_PUBLICKEY = X509_MULTI_BYTE_UINT -X509_DH_PARAMETERS = 47 -PKCS_ATTRIBUTES = 48 -PKCS_SORTED_CTL = 49 -X509_ECC_SIGNATURE = 47 -X942_DH_PARAMETERS = 50 -X509_BITS_WITHOUT_TRAILING_ZEROES = 51 -X942_OTHER_INFO = 52 -X509_CERT_PAIR = 53 -X509_ISSUING_DIST_POINT = 54 -X509_NAME_CONSTRAINTS = 55 -X509_POLICY_MAPPINGS = 56 -X509_POLICY_CONSTRAINTS = 57 -X509_CROSS_CERT_DIST_POINTS = 58 -CMC_DATA = 59 -CMC_RESPONSE = 60 -CMC_STATUS = 61 -CMC_ADD_EXTENSIONS = 62 -CMC_ADD_ATTRIBUTES = 63 -X509_CERTIFICATE_TEMPLATE = 64 -OCSP_SIGNED_REQUEST = 65 -OCSP_REQUEST = 66 -OCSP_RESPONSE = 67 -OCSP_BASIC_SIGNED_RESPONSE = 68 -OCSP_BASIC_RESPONSE = 69 -X509_LOGOTYPE_EXT = 70 -X509_BIOMETRIC_EXT = 71 -CNG_RSA_PUBLIC_KEY_BLOB = 72 -X509_OBJECT_IDENTIFIER = 73 -X509_ALGORITHM_IDENTIFIER = 74 -PKCS_RSA_SSA_PSS_PARAMETERS = 75 -PKCS_RSAES_OAEP_PARAMETERS = 76 -ECC_CMS_SHARED_INFO = 77 -TIMESTAMP_REQUEST = 78 -TIMESTAMP_RESPONSE = 79 -TIMESTAMP_INFO = 80 -X509_CERT_BUNDLE = 81 -PKCS7_SIGNER_INFO = 500 -CMS_SIGNER_INFO = 501 - -szOID_AUTHORITY_KEY_IDENTIFIER = "2.5.29.1" -szOID_KEY_ATTRIBUTES = "2.5.29.2" -szOID_CERT_POLICIES_95 = "2.5.29.3" -szOID_KEY_USAGE_RESTRICTION = "2.5.29.4" -szOID_SUBJECT_ALT_NAME = "2.5.29.7" -szOID_ISSUER_ALT_NAME = "2.5.29.8" -szOID_BASIC_CONSTRAINTS = "2.5.29.10" -szOID_KEY_USAGE = "2.5.29.15" -szOID_PRIVATEKEY_USAGE_PERIOD = "2.5.29.16" -szOID_BASIC_CONSTRAINTS2 = "2.5.29.19" -szOID_CERT_POLICIES = "2.5.29.32" -szOID_ANY_CERT_POLICY = "2.5.29.32.0" -szOID_AUTHORITY_KEY_IDENTIFIER2 = "2.5.29.35" -szOID_SUBJECT_KEY_IDENTIFIER = "2.5.29.14" -szOID_SUBJECT_ALT_NAME2 = "2.5.29.17" -szOID_ISSUER_ALT_NAME2 = "2.5.29.18" -szOID_CRL_REASON_CODE = "2.5.29.21" -szOID_REASON_CODE_HOLD = "2.5.29.23" -szOID_CRL_DIST_POINTS = "2.5.29.31" -szOID_ENHANCED_KEY_USAGE = "2.5.29.37" -szOID_CRL_NUMBER = "2.5.29.20" -szOID_DELTA_CRL_INDICATOR = "2.5.29.27" -szOID_ISSUING_DIST_POINT = "2.5.29.28" -szOID_FRESHEST_CRL = "2.5.29.46" -szOID_NAME_CONSTRAINTS = "2.5.29.30" -szOID_POLICY_MAPPINGS = "2.5.29.33" -szOID_LEGACY_POLICY_MAPPINGS = "2.5.29.5" -szOID_POLICY_CONSTRAINTS = "2.5.29.36" -szOID_RENEWAL_CERTIFICATE = "1.3.6.1.4.1.311.13.1" -szOID_ENROLLMENT_NAME_VALUE_PAIR = "1.3.6.1.4.1.311.13.2.1" -szOID_ENROLLMENT_CSP_PROVIDER = "1.3.6.1.4.1.311.13.2.2" -szOID_OS_VERSION = "1.3.6.1.4.1.311.13.2.3" -szOID_ENROLLMENT_AGENT = "1.3.6.1.4.1.311.20.2.1" -szOID_PKIX = "1.3.6.1.5.5.7" -szOID_PKIX_PE = "1.3.6.1.5.5.7.1" -szOID_AUTHORITY_INFO_ACCESS = "1.3.6.1.5.5.7.1.1" -szOID_CERT_EXTENSIONS = "1.3.6.1.4.1.311.2.1.14" -szOID_NEXT_UPDATE_LOCATION = "1.3.6.1.4.1.311.10.2" -szOID_REMOVE_CERTIFICATE = "1.3.6.1.4.1.311.10.8.1" -szOID_CROSS_CERT_DIST_POINTS = "1.3.6.1.4.1.311.10.9.1" -szOID_CTL = "1.3.6.1.4.1.311.10.1" -szOID_SORTED_CTL = "1.3.6.1.4.1.311.10.1.1" -szOID_SERIALIZED = "1.3.6.1.4.1.311.10.3.3.1" -szOID_NT_PRINCIPAL_NAME = "1.3.6.1.4.1.311.20.2.3" -szOID_PRODUCT_UPDATE = "1.3.6.1.4.1.311.31.1" -szOID_ANY_APPLICATION_POLICY = "1.3.6.1.4.1.311.10.12.1" -szOID_AUTO_ENROLL_CTL_USAGE = "1.3.6.1.4.1.311.20.1" -szOID_ENROLL_CERTTYPE_EXTENSION = "1.3.6.1.4.1.311.20.2" -szOID_CERT_MANIFOLD = "1.3.6.1.4.1.311.20.3" -szOID_CERTSRV_CA_VERSION = "1.3.6.1.4.1.311.21.1" -szOID_CERTSRV_PREVIOUS_CERT_HASH = "1.3.6.1.4.1.311.21.2" -szOID_CRL_VIRTUAL_BASE = "1.3.6.1.4.1.311.21.3" -szOID_CRL_NEXT_PUBLISH = "1.3.6.1.4.1.311.21.4" -szOID_KP_CA_EXCHANGE = "1.3.6.1.4.1.311.21.5" -szOID_KP_KEY_RECOVERY_AGENT = "1.3.6.1.4.1.311.21.6" -szOID_CERTIFICATE_TEMPLATE = "1.3.6.1.4.1.311.21.7" -szOID_ENTERPRISE_OID_ROOT = "1.3.6.1.4.1.311.21.8" -szOID_RDN_DUMMY_SIGNER = "1.3.6.1.4.1.311.21.9" -szOID_APPLICATION_CERT_POLICIES = "1.3.6.1.4.1.311.21.10" -szOID_APPLICATION_POLICY_MAPPINGS = "1.3.6.1.4.1.311.21.11" -szOID_APPLICATION_POLICY_CONSTRAINTS = "1.3.6.1.4.1.311.21.12" -szOID_ARCHIVED_KEY_ATTR = "1.3.6.1.4.1.311.21.13" -szOID_CRL_SELF_CDP = "1.3.6.1.4.1.311.21.14" -szOID_REQUIRE_CERT_CHAIN_POLICY = "1.3.6.1.4.1.311.21.15" -szOID_ARCHIVED_KEY_CERT_HASH = "1.3.6.1.4.1.311.21.16" -szOID_ISSUED_CERT_HASH = "1.3.6.1.4.1.311.21.17" -szOID_DS_EMAIL_REPLICATION = "1.3.6.1.4.1.311.21.19" -szOID_REQUEST_CLIENT_INFO = "1.3.6.1.4.1.311.21.20" -szOID_ENCRYPTED_KEY_HASH = "1.3.6.1.4.1.311.21.21" -szOID_CERTSRV_CROSSCA_VERSION = "1.3.6.1.4.1.311.21.22" -szOID_NTDS_REPLICATION = "1.3.6.1.4.1.311.25.1" -szOID_SUBJECT_DIR_ATTRS = "2.5.29.9" -szOID_PKIX_KP = "1.3.6.1.5.5.7.3" -szOID_PKIX_KP_SERVER_AUTH = "1.3.6.1.5.5.7.3.1" -szOID_PKIX_KP_CLIENT_AUTH = "1.3.6.1.5.5.7.3.2" -szOID_PKIX_KP_CODE_SIGNING = "1.3.6.1.5.5.7.3.3" -szOID_PKIX_KP_EMAIL_PROTECTION = "1.3.6.1.5.5.7.3.4" -szOID_PKIX_KP_IPSEC_END_SYSTEM = "1.3.6.1.5.5.7.3.5" -szOID_PKIX_KP_IPSEC_TUNNEL = "1.3.6.1.5.5.7.3.6" -szOID_PKIX_KP_IPSEC_USER = "1.3.6.1.5.5.7.3.7" -szOID_PKIX_KP_TIMESTAMP_SIGNING = "1.3.6.1.5.5.7.3.8" -szOID_IPSEC_KP_IKE_INTERMEDIATE = "1.3.6.1.5.5.8.2.2" -szOID_KP_CTL_USAGE_SIGNING = "1.3.6.1.4.1.311.10.3.1" -szOID_KP_TIME_STAMP_SIGNING = "1.3.6.1.4.1.311.10.3.2" -szOID_SERVER_GATED_CRYPTO = "1.3.6.1.4.1.311.10.3.3" -szOID_SGC_NETSCAPE = "2.16.840.1.113730.4.1" -szOID_KP_EFS = "1.3.6.1.4.1.311.10.3.4" -szOID_EFS_RECOVERY = "1.3.6.1.4.1.311.10.3.4.1" -szOID_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.5" -szOID_NT5_CRYPTO = "1.3.6.1.4.1.311.10.3.6" -szOID_OEM_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.7" -szOID_EMBEDDED_NT_CRYPTO = "1.3.6.1.4.1.311.10.3.8" -szOID_ROOT_LIST_SIGNER = "1.3.6.1.4.1.311.10.3.9" -szOID_KP_QUALIFIED_SUBORDINATION = "1.3.6.1.4.1.311.10.3.10" -szOID_KP_KEY_RECOVERY = "1.3.6.1.4.1.311.10.3.11" -szOID_KP_DOCUMENT_SIGNING = "1.3.6.1.4.1.311.10.3.12" -szOID_KP_LIFETIME_SIGNING = "1.3.6.1.4.1.311.10.3.13" -szOID_KP_MOBILE_DEVICE_SOFTWARE = "1.3.6.1.4.1.311.10.3.14" -szOID_DRM = "1.3.6.1.4.1.311.10.5.1" -szOID_DRM_INDIVIDUALIZATION = "1.3.6.1.4.1.311.10.5.2" -szOID_LICENSES = "1.3.6.1.4.1.311.10.6.1" -szOID_LICENSE_SERVER = "1.3.6.1.4.1.311.10.6.2" -szOID_KP_SMARTCARD_LOGON = "1.3.6.1.4.1.311.20.2.2" -szOID_YESNO_TRUST_ATTR = "1.3.6.1.4.1.311.10.4.1" -szOID_PKIX_POLICY_QUALIFIER_CPS = "1.3.6.1.5.5.7.2.1" -szOID_PKIX_POLICY_QUALIFIER_USERNOTICE = "1.3.6.1.5.5.7.2.2" -szOID_CERT_POLICIES_95_QUALIFIER1 = "2.16.840.1.113733.1.7.1.1" -CERT_UNICODE_RDN_ERR_INDEX_MASK = 0x3FF -CERT_UNICODE_RDN_ERR_INDEX_SHIFT = 22 -CERT_UNICODE_ATTR_ERR_INDEX_MASK = 0x003F -CERT_UNICODE_ATTR_ERR_INDEX_SHIFT = 16 -CERT_UNICODE_VALUE_ERR_INDEX_MASK = 0x0000FFFF -CERT_UNICODE_VALUE_ERR_INDEX_SHIFT = 0 -CERT_DIGITAL_SIGNATURE_KEY_USAGE = 0x80 -CERT_NON_REPUDIATION_KEY_USAGE = 0x40 -CERT_KEY_ENCIPHERMENT_KEY_USAGE = 0x20 -CERT_DATA_ENCIPHERMENT_KEY_USAGE = 0x10 -CERT_KEY_AGREEMENT_KEY_USAGE = 0x08 -CERT_KEY_CERT_SIGN_KEY_USAGE = 0x04 -CERT_OFFLINE_CRL_SIGN_KEY_USAGE = 0x02 -CERT_CRL_SIGN_KEY_USAGE = 0x02 -CERT_ENCIPHER_ONLY_KEY_USAGE = 0x01 -CERT_DECIPHER_ONLY_KEY_USAGE = 0x80 -CERT_ALT_NAME_OTHER_NAME = 1 -CERT_ALT_NAME_RFC822_NAME = 2 -CERT_ALT_NAME_DNS_NAME = 3 -CERT_ALT_NAME_X400_ADDRESS = 4 -CERT_ALT_NAME_DIRECTORY_NAME = 5 -CERT_ALT_NAME_EDI_PARTY_NAME = 6 -CERT_ALT_NAME_URL = 7 -CERT_ALT_NAME_IP_ADDRESS = 8 -CERT_ALT_NAME_REGISTERED_ID = 9 -CERT_ALT_NAME_ENTRY_ERR_INDEX_MASK = 0xFF -CERT_ALT_NAME_ENTRY_ERR_INDEX_SHIFT = 16 -CERT_ALT_NAME_VALUE_ERR_INDEX_MASK = 0x0000FFFF -CERT_ALT_NAME_VALUE_ERR_INDEX_SHIFT = 0 -CERT_CA_SUBJECT_FLAG = 0x80 -CERT_END_ENTITY_SUBJECT_FLAG = 0x40 -szOID_PKIX_ACC_DESCR = "1.3.6.1.5.5.7.48" -szOID_PKIX_OCSP = "1.3.6.1.5.5.7.48.1" -szOID_PKIX_CA_ISSUERS = "1.3.6.1.5.5.7.48.2" -CRL_REASON_UNSPECIFIED = 0 -CRL_REASON_KEY_COMPROMISE = 1 -CRL_REASON_CA_COMPROMISE = 2 -CRL_REASON_AFFILIATION_CHANGED = 3 -CRL_REASON_SUPERSEDED = 4 -CRL_REASON_CESSATION_OF_OPERATION = 5 -CRL_REASON_CERTIFICATE_HOLD = 6 -CRL_REASON_REMOVE_FROM_CRL = 8 -CRL_DIST_POINT_NO_NAME = 0 -CRL_DIST_POINT_FULL_NAME = 1 -CRL_DIST_POINT_ISSUER_RDN_NAME = 2 -CRL_REASON_UNUSED_FLAG = 0x80 -CRL_REASON_KEY_COMPROMISE_FLAG = 0x40 -CRL_REASON_CA_COMPROMISE_FLAG = 0x20 -CRL_REASON_AFFILIATION_CHANGED_FLAG = 0x10 -CRL_REASON_SUPERSEDED_FLAG = 0x08 -CRL_REASON_CESSATION_OF_OPERATION_FLAG = 0x04 -CRL_REASON_CERTIFICATE_HOLD_FLAG = 0x02 -CRL_DIST_POINT_ERR_INDEX_MASK = 0x7F -CRL_DIST_POINT_ERR_INDEX_SHIFT = 24 - -CRL_DIST_POINT_ERR_CRL_ISSUER_BIT = -2147483648 - -CROSS_CERT_DIST_POINT_ERR_INDEX_MASK = 0xFF -CROSS_CERT_DIST_POINT_ERR_INDEX_SHIFT = 24 - -CERT_EXCLUDED_SUBTREE_BIT = -2147483648 - -SORTED_CTL_EXT_FLAGS_OFFSET = 0 * 4 -SORTED_CTL_EXT_COUNT_OFFSET = 1 * 4 -SORTED_CTL_EXT_MAX_COLLISION_OFFSET = 2 * 4 -SORTED_CTL_EXT_HASH_BUCKET_OFFSET = 3 * 4 -SORTED_CTL_EXT_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x1 -CERT_DSS_R_LEN = 20 -CERT_DSS_S_LEN = 20 -CERT_DSS_SIGNATURE_LEN = CERT_DSS_R_LEN + CERT_DSS_S_LEN -CERT_MAX_ASN_ENCODED_DSS_SIGNATURE_LEN = 2 + 2 * (2 + 20 + 1) -CRYPT_X942_COUNTER_BYTE_LENGTH = 4 -CRYPT_X942_KEY_LENGTH_BYTE_LENGTH = 4 -CRYPT_X942_PUB_INFO_BYTE_LENGTH = 512 / 8 -CRYPT_RC2_40BIT_VERSION = 160 -CRYPT_RC2_56BIT_VERSION = 52 -CRYPT_RC2_64BIT_VERSION = 120 -CRYPT_RC2_128BIT_VERSION = 58 -szOID_VERISIGN_PRIVATE_6_9 = "2.16.840.1.113733.1.6.9" -szOID_VERISIGN_ONSITE_JURISDICTION_HASH = "2.16.840.1.113733.1.6.11" -szOID_VERISIGN_BITSTRING_6_13 = "2.16.840.1.113733.1.6.13" -szOID_VERISIGN_ISS_STRONG_CRYPTO = "2.16.840.1.113733.1.8.1" -szOID_NETSCAPE = "2.16.840.1.113730" -szOID_NETSCAPE_CERT_EXTENSION = "2.16.840.1.113730.1" -szOID_NETSCAPE_CERT_TYPE = "2.16.840.1.113730.1.1" -szOID_NETSCAPE_BASE_URL = "2.16.840.1.113730.1.2" -szOID_NETSCAPE_REVOCATION_URL = "2.16.840.1.113730.1.3" -szOID_NETSCAPE_CA_REVOCATION_URL = "2.16.840.1.113730.1.4" -szOID_NETSCAPE_CERT_RENEWAL_URL = "2.16.840.1.113730.1.7" -szOID_NETSCAPE_CA_POLICY_URL = "2.16.840.1.113730.1.8" -szOID_NETSCAPE_SSL_SERVER_NAME = "2.16.840.1.113730.1.12" -szOID_NETSCAPE_COMMENT = "2.16.840.1.113730.1.13" -szOID_NETSCAPE_DATA_TYPE = "2.16.840.1.113730.2" -szOID_NETSCAPE_CERT_SEQUENCE = "2.16.840.1.113730.2.5" -NETSCAPE_SSL_CLIENT_AUTH_CERT_TYPE = 0x80 -NETSCAPE_SSL_SERVER_AUTH_CERT_TYPE = 0x40 -NETSCAPE_SMIME_CERT_TYPE = 0x20 -NETSCAPE_SIGN_CERT_TYPE = 0x10 -NETSCAPE_SSL_CA_CERT_TYPE = 0x04 -NETSCAPE_SMIME_CA_CERT_TYPE = 0x02 -NETSCAPE_SIGN_CA_CERT_TYPE = 0x01 -szOID_CT_PKI_DATA = "1.3.6.1.5.5.7.12.2" -szOID_CT_PKI_RESPONSE = "1.3.6.1.5.5.7.12.3" -szOID_PKIX_NO_SIGNATURE = "1.3.6.1.5.5.7.6.2" -szOID_CMC = "1.3.6.1.5.5.7.7" -szOID_CMC_STATUS_INFO = "1.3.6.1.5.5.7.7.1" -szOID_CMC_IDENTIFICATION = "1.3.6.1.5.5.7.7.2" -szOID_CMC_IDENTITY_PROOF = "1.3.6.1.5.5.7.7.3" -szOID_CMC_DATA_RETURN = "1.3.6.1.5.5.7.7.4" -szOID_CMC_TRANSACTION_ID = "1.3.6.1.5.5.7.7.5" -szOID_CMC_SENDER_NONCE = "1.3.6.1.5.5.7.7.6" -szOID_CMC_RECIPIENT_NONCE = "1.3.6.1.5.5.7.7.7" -szOID_CMC_ADD_EXTENSIONS = "1.3.6.1.5.5.7.7.8" -szOID_CMC_ENCRYPTED_POP = "1.3.6.1.5.5.7.7.9" -szOID_CMC_DECRYPTED_POP = "1.3.6.1.5.5.7.7.10" -szOID_CMC_LRA_POP_WITNESS = "1.3.6.1.5.5.7.7.11" -szOID_CMC_GET_CERT = "1.3.6.1.5.5.7.7.15" -szOID_CMC_GET_CRL = "1.3.6.1.5.5.7.7.16" -szOID_CMC_REVOKE_REQUEST = "1.3.6.1.5.5.7.7.17" -szOID_CMC_REG_INFO = "1.3.6.1.5.5.7.7.18" -szOID_CMC_RESPONSE_INFO = "1.3.6.1.5.5.7.7.19" -szOID_CMC_QUERY_PENDING = "1.3.6.1.5.5.7.7.21" -szOID_CMC_ID_POP_LINK_RANDOM = "1.3.6.1.5.5.7.7.22" -szOID_CMC_ID_POP_LINK_WITNESS = "1.3.6.1.5.5.7.7.23" -szOID_CMC_ID_CONFIRM_CERT_ACCEPTANCE = "1.3.6.1.5.5.7.7.24" -szOID_CMC_ADD_ATTRIBUTES = "1.3.6.1.4.1.311.10.10.1" -CMC_TAGGED_CERT_REQUEST_CHOICE = 1 -CMC_OTHER_INFO_NO_CHOICE = 0 -CMC_OTHER_INFO_FAIL_CHOICE = 1 -CMC_OTHER_INFO_PEND_CHOICE = 2 -CMC_STATUS_SUCCESS = 0 -CMC_STATUS_FAILED = 2 -CMC_STATUS_PENDING = 3 -CMC_STATUS_NO_SUPPORT = 4 -CMC_STATUS_CONFIRM_REQUIRED = 5 -CMC_FAIL_BAD_ALG = 0 -CMC_FAIL_BAD_MESSAGE_CHECK = 1 -CMC_FAIL_BAD_REQUEST = 2 -CMC_FAIL_BAD_TIME = 3 -CMC_FAIL_BAD_CERT_ID = 4 -CMC_FAIL_UNSUPORTED_EXT = 5 -CMC_FAIL_MUST_ARCHIVE_KEYS = 6 -CMC_FAIL_BAD_IDENTITY = 7 -CMC_FAIL_POP_REQUIRED = 8 -CMC_FAIL_POP_FAILED = 9 -CMC_FAIL_NO_KEY_REUSE = 10 -CMC_FAIL_INTERNAL_CA_ERROR = 11 -CMC_FAIL_TRY_LATER = 12 -CRYPT_OID_ENCODE_OBJECT_FUNC = "CryptDllEncodeObject" -CRYPT_OID_DECODE_OBJECT_FUNC = "CryptDllDecodeObject" -CRYPT_OID_ENCODE_OBJECT_EX_FUNC = "CryptDllEncodeObjectEx" -CRYPT_OID_DECODE_OBJECT_EX_FUNC = "CryptDllDecodeObjectEx" -CRYPT_OID_CREATE_COM_OBJECT_FUNC = "CryptDllCreateCOMObject" -CRYPT_OID_VERIFY_REVOCATION_FUNC = "CertDllVerifyRevocation" -CRYPT_OID_VERIFY_CTL_USAGE_FUNC = "CertDllVerifyCTLUsage" -CRYPT_OID_FORMAT_OBJECT_FUNC = "CryptDllFormatObject" -CRYPT_OID_FIND_OID_INFO_FUNC = "CryptDllFindOIDInfo" -CRYPT_OID_FIND_LOCALIZED_NAME_FUNC = "CryptDllFindLocalizedName" - -CRYPT_OID_REGPATH = "Software\\Microsoft\\Cryptography\\OID" -CRYPT_OID_REG_ENCODING_TYPE_PREFIX = "EncodingType " -CRYPT_OID_REG_DLL_VALUE_NAME = "Dll" -CRYPT_OID_REG_FUNC_NAME_VALUE_NAME = "FuncName" -CRYPT_OID_REG_FUNC_NAME_VALUE_NAME_A = "FuncName" -CRYPT_OID_REG_FLAGS_VALUE_NAME = "CryptFlags" -CRYPT_DEFAULT_OID = "DEFAULT" -CRYPT_INSTALL_OID_FUNC_BEFORE_FLAG = 1 -CRYPT_GET_INSTALLED_OID_FUNC_FLAG = 0x1 -CRYPT_REGISTER_FIRST_INDEX = 0 -CRYPT_REGISTER_LAST_INDEX = -1 -CRYPT_MATCH_ANY_ENCODING_TYPE = -1 -CRYPT_HASH_ALG_OID_GROUP_ID = 1 -CRYPT_ENCRYPT_ALG_OID_GROUP_ID = 2 -CRYPT_PUBKEY_ALG_OID_GROUP_ID = 3 -CRYPT_SIGN_ALG_OID_GROUP_ID = 4 -CRYPT_RDN_ATTR_OID_GROUP_ID = 5 -CRYPT_EXT_OR_ATTR_OID_GROUP_ID = 6 -CRYPT_ENHKEY_USAGE_OID_GROUP_ID = 7 -CRYPT_POLICY_OID_GROUP_ID = 8 -CRYPT_TEMPLATE_OID_GROUP_ID = 9 -CRYPT_LAST_OID_GROUP_ID = 9 -CRYPT_FIRST_ALG_OID_GROUP_ID = CRYPT_HASH_ALG_OID_GROUP_ID -CRYPT_LAST_ALG_OID_GROUP_ID = CRYPT_SIGN_ALG_OID_GROUP_ID -CRYPT_OID_INHIBIT_SIGNATURE_FORMAT_FLAG = 0x1 -CRYPT_OID_USE_PUBKEY_PARA_FOR_PKCS7_FLAG = 0x2 -CRYPT_OID_NO_NULL_ALGORITHM_PARA_FLAG = 0x4 -CRYPT_OID_INFO_OID_KEY = 1 -CRYPT_OID_INFO_NAME_KEY = 2 -CRYPT_OID_INFO_ALGID_KEY = 3 -CRYPT_OID_INFO_SIGN_KEY = 4 -CRYPT_INSTALL_OID_INFO_BEFORE_FLAG = 1 -CRYPT_LOCALIZED_NAME_ENCODING_TYPE = 0 -CRYPT_LOCALIZED_NAME_OID = "LocalizedNames" -szOID_PKCS_7_DATA = "1.2.840.113549.1.7.1" -szOID_PKCS_7_SIGNED = "1.2.840.113549.1.7.2" -szOID_PKCS_7_ENVELOPED = "1.2.840.113549.1.7.3" -szOID_PKCS_7_SIGNEDANDENVELOPED = "1.2.840.113549.1.7.4" -szOID_PKCS_7_DIGESTED = "1.2.840.113549.1.7.5" -szOID_PKCS_7_ENCRYPTED = "1.2.840.113549.1.7.6" -szOID_PKCS_9_CONTENT_TYPE = "1.2.840.113549.1.9.3" -szOID_PKCS_9_MESSAGE_DIGEST = "1.2.840.113549.1.9.4" -CMSG_DATA = 1 -CMSG_SIGNED = 2 -CMSG_ENVELOPED = 3 -CMSG_SIGNED_AND_ENVELOPED = 4 -CMSG_HASHED = 5 -CMSG_ENCRYPTED = 6 - -CMSG_ALL_FLAGS = -1 -CMSG_DATA_FLAG = 1 << CMSG_DATA -CMSG_SIGNED_FLAG = 1 << CMSG_SIGNED -CMSG_ENVELOPED_FLAG = 1 << CMSG_ENVELOPED -CMSG_SIGNED_AND_ENVELOPED_FLAG = 1 << CMSG_SIGNED_AND_ENVELOPED -CMSG_HASHED_FLAG = 1 << CMSG_HASHED -CMSG_ENCRYPTED_FLAG = 1 << CMSG_ENCRYPTED -CERT_ID_ISSUER_SERIAL_NUMBER = 1 -CERT_ID_KEY_IDENTIFIER = 2 -CERT_ID_SHA1_HASH = 3 -CMSG_KEY_AGREE_EPHEMERAL_KEY_CHOICE = 1 -CMSG_KEY_AGREE_STATIC_KEY_CHOICE = 2 -CMSG_MAIL_LIST_HANDLE_KEY_CHOICE = 1 -CMSG_KEY_TRANS_RECIPIENT = 1 -CMSG_KEY_AGREE_RECIPIENT = 2 -CMSG_MAIL_LIST_RECIPIENT = 3 -CMSG_SP3_COMPATIBLE_ENCRYPT_FLAG = -2147483648 -CMSG_RC4_NO_SALT_FLAG = 0x40000000 -CMSG_INDEFINITE_LENGTH = -1 -CMSG_BARE_CONTENT_FLAG = 0x00000001 -CMSG_LENGTH_ONLY_FLAG = 0x00000002 -CMSG_DETACHED_FLAG = 0x00000004 -CMSG_AUTHENTICATED_ATTRIBUTES_FLAG = 0x00000008 -CMSG_CONTENTS_OCTETS_FLAG = 0x00000010 -CMSG_MAX_LENGTH_FLAG = 0x00000020 -CMSG_CMS_ENCAPSULATED_CONTENT_FLAG = 0x00000040 -CMSG_CRYPT_RELEASE_CONTEXT_FLAG = 0x00008000 -CMSG_TYPE_PARAM = 1 -CMSG_CONTENT_PARAM = 2 -CMSG_BARE_CONTENT_PARAM = 3 -CMSG_INNER_CONTENT_TYPE_PARAM = 4 -CMSG_SIGNER_COUNT_PARAM = 5 -CMSG_SIGNER_INFO_PARAM = 6 -CMSG_SIGNER_CERT_INFO_PARAM = 7 -CMSG_SIGNER_HASH_ALGORITHM_PARAM = 8 -CMSG_SIGNER_AUTH_ATTR_PARAM = 9 -CMSG_SIGNER_UNAUTH_ATTR_PARAM = 10 -CMSG_CERT_COUNT_PARAM = 11 -CMSG_CERT_PARAM = 12 -CMSG_CRL_COUNT_PARAM = 13 -CMSG_CRL_PARAM = 14 -CMSG_ENVELOPE_ALGORITHM_PARAM = 15 -CMSG_RECIPIENT_COUNT_PARAM = 17 -CMSG_RECIPIENT_INDEX_PARAM = 18 -CMSG_RECIPIENT_INFO_PARAM = 19 -CMSG_HASH_ALGORITHM_PARAM = 20 -CMSG_HASH_DATA_PARAM = 21 -CMSG_COMPUTED_HASH_PARAM = 22 -CMSG_ENCRYPT_PARAM = 26 -CMSG_ENCRYPTED_DIGEST = 27 -CMSG_ENCODED_SIGNER = 28 -CMSG_ENCODED_MESSAGE = 29 -CMSG_VERSION_PARAM = 30 -CMSG_ATTR_CERT_COUNT_PARAM = 31 -CMSG_ATTR_CERT_PARAM = 32 -CMSG_CMS_RECIPIENT_COUNT_PARAM = 33 -CMSG_CMS_RECIPIENT_INDEX_PARAM = 34 -CMSG_CMS_RECIPIENT_ENCRYPTED_KEY_INDEX_PARAM = 35 -CMSG_CMS_RECIPIENT_INFO_PARAM = 36 -CMSG_UNPROTECTED_ATTR_PARAM = 37 -CMSG_SIGNER_CERT_ID_PARAM = 38 -CMSG_CMS_SIGNER_INFO_PARAM = 39 -CMSG_SIGNED_DATA_V1 = 1 -CMSG_SIGNED_DATA_V3 = 3 -CMSG_SIGNED_DATA_PKCS_1_5_VERSION = CMSG_SIGNED_DATA_V1 -CMSG_SIGNED_DATA_CMS_VERSION = CMSG_SIGNED_DATA_V3 -CMSG_SIGNER_INFO_V1 = 1 -CMSG_SIGNER_INFO_V3 = 3 -CMSG_SIGNER_INFO_PKCS_1_5_VERSION = CMSG_SIGNER_INFO_V1 -CMSG_SIGNER_INFO_CMS_VERSION = CMSG_SIGNER_INFO_V3 -CMSG_HASHED_DATA_V0 = 0 -CMSG_HASHED_DATA_V2 = 2 -CMSG_HASHED_DATA_PKCS_1_5_VERSION = CMSG_HASHED_DATA_V0 -CMSG_HASHED_DATA_CMS_VERSION = CMSG_HASHED_DATA_V2 -CMSG_ENVELOPED_DATA_V0 = 0 -CMSG_ENVELOPED_DATA_V2 = 2 -CMSG_ENVELOPED_DATA_PKCS_1_5_VERSION = CMSG_ENVELOPED_DATA_V0 -CMSG_ENVELOPED_DATA_CMS_VERSION = CMSG_ENVELOPED_DATA_V2 -CMSG_KEY_AGREE_ORIGINATOR_CERT = 1 -CMSG_KEY_AGREE_ORIGINATOR_PUBLIC_KEY = 2 -CMSG_ENVELOPED_RECIPIENT_V0 = 0 -CMSG_ENVELOPED_RECIPIENT_V2 = 2 -CMSG_ENVELOPED_RECIPIENT_V3 = 3 -CMSG_ENVELOPED_RECIPIENT_V4 = 4 -CMSG_KEY_TRANS_PKCS_1_5_VERSION = CMSG_ENVELOPED_RECIPIENT_V0 -CMSG_KEY_TRANS_CMS_VERSION = CMSG_ENVELOPED_RECIPIENT_V2 -CMSG_KEY_AGREE_VERSION = CMSG_ENVELOPED_RECIPIENT_V3 -CMSG_MAIL_LIST_VERSION = CMSG_ENVELOPED_RECIPIENT_V4 -CMSG_CTRL_VERIFY_SIGNATURE = 1 -CMSG_CTRL_DECRYPT = 2 -CMSG_CTRL_VERIFY_HASH = 5 -CMSG_CTRL_ADD_SIGNER = 6 -CMSG_CTRL_DEL_SIGNER = 7 -CMSG_CTRL_ADD_SIGNER_UNAUTH_ATTR = 8 -CMSG_CTRL_DEL_SIGNER_UNAUTH_ATTR = 9 -CMSG_CTRL_ADD_CERT = 10 -CMSG_CTRL_DEL_CERT = 11 -CMSG_CTRL_ADD_CRL = 12 -CMSG_CTRL_DEL_CRL = 13 -CMSG_CTRL_ADD_ATTR_CERT = 14 -CMSG_CTRL_DEL_ATTR_CERT = 15 -CMSG_CTRL_KEY_TRANS_DECRYPT = 16 -CMSG_CTRL_KEY_AGREE_DECRYPT = 17 -CMSG_CTRL_MAIL_LIST_DECRYPT = 18 -CMSG_CTRL_VERIFY_SIGNATURE_EX = 19 -CMSG_CTRL_ADD_CMS_SIGNER_INFO = 20 -CMSG_VERIFY_SIGNER_PUBKEY = 1 -CMSG_VERIFY_SIGNER_CERT = 2 -CMSG_VERIFY_SIGNER_CHAIN = 3 -CMSG_VERIFY_SIGNER_NULL = 4 -CMSG_OID_GEN_ENCRYPT_KEY_FUNC = "CryptMsgDllGenEncryptKey" -CMSG_OID_EXPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllExportEncryptKey" -CMSG_OID_IMPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllImportEncryptKey" -CMSG_CONTENT_ENCRYPT_PAD_ENCODED_LEN_FLAG = 0x00000001 -CMSG_DEFAULT_INSTALLABLE_FUNC_OID = 1 -CMSG_CONTENT_ENCRYPT_FREE_PARA_FLAG = 0x00000001 -CMSG_CONTENT_ENCRYPT_RELEASE_CONTEXT_FLAG = 0x00008000 -CMSG_OID_GEN_CONTENT_ENCRYPT_KEY_FUNC = "CryptMsgDllGenContentEncryptKey" -CMSG_KEY_TRANS_ENCRYPT_FREE_PARA_FLAG = 0x00000001 -CMSG_OID_EXPORT_KEY_TRANS_FUNC = "CryptMsgDllExportKeyTrans" -CMSG_KEY_AGREE_ENCRYPT_FREE_PARA_FLAG = 0x00000001 -CMSG_KEY_AGREE_ENCRYPT_FREE_MATERIAL_FLAG = 0x00000002 -CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_ALG_FLAG = 0x00000004 -CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_PARA_FLAG = 0x00000008 -CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_BITS_FLAG = 0x00000010 -CMSG_OID_EXPORT_KEY_AGREE_FUNC = "CryptMsgDllExportKeyAgree" -CMSG_MAIL_LIST_ENCRYPT_FREE_PARA_FLAG = 0x00000001 -CMSG_OID_EXPORT_MAIL_LIST_FUNC = "CryptMsgDllExportMailList" -CMSG_OID_IMPORT_KEY_TRANS_FUNC = "CryptMsgDllImportKeyTrans" -CMSG_OID_IMPORT_KEY_AGREE_FUNC = "CryptMsgDllImportKeyAgree" -CMSG_OID_IMPORT_MAIL_LIST_FUNC = "CryptMsgDllImportMailList" - -# Certificate property id's used with CertGetCertificateContextProperty -CERT_KEY_PROV_HANDLE_PROP_ID = 1 -CERT_KEY_PROV_INFO_PROP_ID = 2 -CERT_SHA1_HASH_PROP_ID = 3 -CERT_MD5_HASH_PROP_ID = 4 -CERT_HASH_PROP_ID = CERT_SHA1_HASH_PROP_ID -CERT_KEY_CONTEXT_PROP_ID = 5 -CERT_KEY_SPEC_PROP_ID = 6 -CERT_IE30_RESERVED_PROP_ID = 7 -CERT_PUBKEY_HASH_RESERVED_PROP_ID = 8 -CERT_ENHKEY_USAGE_PROP_ID = 9 -CERT_CTL_USAGE_PROP_ID = CERT_ENHKEY_USAGE_PROP_ID -CERT_NEXT_UPDATE_LOCATION_PROP_ID = 10 -CERT_FRIENDLY_NAME_PROP_ID = 11 -CERT_PVK_FILE_PROP_ID = 12 -CERT_DESCRIPTION_PROP_ID = 13 -CERT_ACCESS_STATE_PROP_ID = 14 -CERT_SIGNATURE_HASH_PROP_ID = 15 -CERT_SMART_CARD_DATA_PROP_ID = 16 -CERT_EFS_PROP_ID = 17 -CERT_FORTEZZA_DATA_PROP_ID = 18 -CERT_ARCHIVED_PROP_ID = 19 -CERT_KEY_IDENTIFIER_PROP_ID = 20 -CERT_AUTO_ENROLL_PROP_ID = 21 -CERT_PUBKEY_ALG_PARA_PROP_ID = 22 -CERT_CROSS_CERT_DIST_POINTS_PROP_ID = 23 -CERT_ISSUER_PUBLIC_KEY_MD5_HASH_PROP_ID = 24 -CERT_SUBJECT_PUBLIC_KEY_MD5_HASH_PROP_ID = 25 -CERT_ENROLLMENT_PROP_ID = 26 -CERT_DATE_STAMP_PROP_ID = 27 -CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = 28 -CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = 29 -CERT_EXTENDED_ERROR_INFO_PROP_ID = 30 -CERT_RENEWAL_PROP_ID = 64 -CERT_ARCHIVED_KEY_HASH_PROP_ID = 65 -CERT_AUTO_ENROLL_RETRY_PROP_ID = 66 -CERT_AIA_URL_RETRIEVED_PROP_ID = 67 -CERT_AUTHORITY_INFO_ACCESS_PROP_ID = 68 -CERT_BACKED_UP_PROP_ID = 69 -CERT_OCSP_RESPONSE_PROP_ID = 70 -CERT_REQUEST_ORIGINATOR_PROP_ID = 71 -CERT_SOURCE_LOCATION_PROP_ID = 72 -CERT_SOURCE_URL_PROP_ID = 73 -CERT_NEW_KEY_PROP_ID = 74 -CERT_OCSP_CACHE_PREFIX_PROP_ID = 75 -CERT_SMART_CARD_ROOT_INFO_PROP_ID = 76 -CERT_NO_AUTO_EXPIRE_CHECK_PROP_ID = 77 -CERT_NCRYPT_KEY_HANDLE_PROP_ID = 78 -CERT_HCRYPTPROV_OR_NCRYPT_KEY_HANDLE_PROP_ID = 79 -CERT_SUBJECT_INFO_ACCESS_PROP_ID = 80 -CERT_CA_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID = 81 -CERT_CA_DISABLE_CRL_PROP_ID = 82 -CERT_ROOT_PROGRAM_CERT_POLICIES_PROP_ID = 83 -CERT_ROOT_PROGRAM_NAME_CONSTRAINTS_PROP_ID = 84 -CERT_SUBJECT_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID = 85 -CERT_SUBJECT_DISABLE_CRL_PROP_ID = 86 -CERT_CEP_PROP_ID = 87 -CERT_SIGN_HASH_CNG_ALG_PROP_ID = 89 -CERT_SCARD_PIN_ID_PROP_ID = 90 -CERT_SCARD_PIN_INFO_PROP_ID = 91 -CERT_FIRST_RESERVED_PROP_ID = 92 -CERT_LAST_RESERVED_PROP_ID = 0x00007FFF -CERT_FIRST_USER_PROP_ID = 0x00008000 -CERT_LAST_USER_PROP_ID = 0x0000FFFF - -szOID_CERT_PROP_ID_PREFIX = "1.3.6.1.4.1.311.10.11." -szOID_CERT_KEY_IDENTIFIER_PROP_ID = "1.3.6.1.4.1.311.10.11.20" -szOID_CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = "1.3.6.1.4.1.311.10.11.28" -szOID_CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = "1.3.6.1.4.1.311.10.11.29" -CERT_ACCESS_STATE_WRITE_PERSIST_FLAG = 0x1 -CERT_ACCESS_STATE_SYSTEM_STORE_FLAG = 0x2 -CERT_ACCESS_STATE_LM_SYSTEM_STORE_FLAG = 0x4 -CERT_SET_KEY_PROV_HANDLE_PROP_ID = 0x00000001 -CERT_SET_KEY_CONTEXT_PROP_ID = 0x00000001 -sz_CERT_STORE_PROV_MEMORY = "Memory" -sz_CERT_STORE_PROV_FILENAME_W = "File" -sz_CERT_STORE_PROV_FILENAME = sz_CERT_STORE_PROV_FILENAME_W -sz_CERT_STORE_PROV_SYSTEM_W = "System" -sz_CERT_STORE_PROV_SYSTEM = sz_CERT_STORE_PROV_SYSTEM_W -sz_CERT_STORE_PROV_PKCS7 = "PKCS7" -sz_CERT_STORE_PROV_SERIALIZED = "Serialized" -sz_CERT_STORE_PROV_COLLECTION = "Collection" -sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W = "SystemRegistry" -sz_CERT_STORE_PROV_SYSTEM_REGISTRY = sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W -sz_CERT_STORE_PROV_PHYSICAL_W = "Physical" -sz_CERT_STORE_PROV_PHYSICAL = sz_CERT_STORE_PROV_PHYSICAL_W -sz_CERT_STORE_PROV_SMART_CARD_W = "SmartCard" -sz_CERT_STORE_PROV_SMART_CARD = sz_CERT_STORE_PROV_SMART_CARD_W -sz_CERT_STORE_PROV_LDAP_W = "Ldap" -sz_CERT_STORE_PROV_LDAP = sz_CERT_STORE_PROV_LDAP_W -CERT_STORE_SIGNATURE_FLAG = 0x00000001 -CERT_STORE_TIME_VALIDITY_FLAG = 0x00000002 -CERT_STORE_REVOCATION_FLAG = 0x00000004 -CERT_STORE_NO_CRL_FLAG = 0x00010000 -CERT_STORE_NO_ISSUER_FLAG = 0x00020000 -CERT_STORE_BASE_CRL_FLAG = 0x00000100 -CERT_STORE_DELTA_CRL_FLAG = 0x00000200 -CERT_STORE_NO_CRYPT_RELEASE_FLAG = 0x00000001 -CERT_STORE_SET_LOCALIZED_NAME_FLAG = 0x00000002 -CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG = 0x00000004 -CERT_STORE_DELETE_FLAG = 0x00000010 -CERT_STORE_UNSAFE_PHYSICAL_FLAG = 0x00000020 -CERT_STORE_SHARE_STORE_FLAG = 0x00000040 -CERT_STORE_SHARE_CONTEXT_FLAG = 0x00000080 -CERT_STORE_MANIFOLD_FLAG = 0x00000100 -CERT_STORE_ENUM_ARCHIVED_FLAG = 0x00000200 -CERT_STORE_UPDATE_KEYID_FLAG = 0x00000400 -CERT_STORE_BACKUP_RESTORE_FLAG = 0x00000800 -CERT_STORE_READONLY_FLAG = 0x00008000 -CERT_STORE_OPEN_EXISTING_FLAG = 0x00004000 -CERT_STORE_CREATE_NEW_FLAG = 0x00002000 -CERT_STORE_MAXIMUM_ALLOWED_FLAG = 0x00001000 -CERT_SYSTEM_STORE_MASK = -65536 -CERT_SYSTEM_STORE_RELOCATE_FLAG = -2147483648 -CERT_SYSTEM_STORE_UNPROTECTED_FLAG = 0x40000000 -CERT_SYSTEM_STORE_LOCATION_MASK = 0x00FF0000 -CERT_SYSTEM_STORE_LOCATION_SHIFT = 16 -CERT_SYSTEM_STORE_CURRENT_USER_ID = 1 -CERT_SYSTEM_STORE_LOCAL_MACHINE_ID = 2 -CERT_SYSTEM_STORE_CURRENT_SERVICE_ID = 4 -CERT_SYSTEM_STORE_SERVICES_ID = 5 -CERT_SYSTEM_STORE_USERS_ID = 6 -CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID = 7 -CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID = 8 -CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID = 9 -CERT_SYSTEM_STORE_CURRENT_USER = ( - CERT_SYSTEM_STORE_CURRENT_USER_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -) -CERT_SYSTEM_STORE_LOCAL_MACHINE = ( - CERT_SYSTEM_STORE_LOCAL_MACHINE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -) -CERT_SYSTEM_STORE_CURRENT_SERVICE = ( - CERT_SYSTEM_STORE_CURRENT_SERVICE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -) -CERT_SYSTEM_STORE_SERVICES = ( - CERT_SYSTEM_STORE_SERVICES_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -) -CERT_SYSTEM_STORE_USERS = CERT_SYSTEM_STORE_USERS_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY = ( - CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -) -CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY = ( - CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -) -CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE = ( - CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT -) -CERT_PROT_ROOT_DISABLE_CURRENT_USER_FLAG = 0x1 -CERT_PROT_ROOT_INHIBIT_ADD_AT_INIT_FLAG = 0x2 -CERT_PROT_ROOT_INHIBIT_PURGE_LM_FLAG = 0x4 -CERT_PROT_ROOT_DISABLE_LM_AUTH_FLAG = 0x8 -CERT_PROT_ROOT_ONLY_LM_GPT_FLAG = 0x8 -CERT_PROT_ROOT_DISABLE_NT_AUTH_REQUIRED_FLAG = 0x10 -CERT_PROT_ROOT_DISABLE_NOT_DEFINED_NAME_CONSTRAINT_FLAG = 0x20 -CERT_TRUST_PUB_ALLOW_TRUST_MASK = 0x00000003 -CERT_TRUST_PUB_ALLOW_END_USER_TRUST = 0x00000000 -CERT_TRUST_PUB_ALLOW_MACHINE_ADMIN_TRUST = 0x00000001 -CERT_TRUST_PUB_ALLOW_ENTERPRISE_ADMIN_TRUST = 0x00000002 -CERT_TRUST_PUB_CHECK_PUBLISHER_REV_FLAG = 0x00000100 -CERT_TRUST_PUB_CHECK_TIMESTAMP_REV_FLAG = 0x00000200 - -CERT_AUTH_ROOT_AUTO_UPDATE_LOCAL_MACHINE_REGPATH = ( - r"Software\Microsoft\SystemCertificates\AuthRoot\AutoUpdate" -) -CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_UNTRUSTED_ROOT_LOGGING_FLAG = 0x1 -CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_PARTIAL_CHAIN_LOGGING_FLAG = 0x2 -CERT_AUTH_ROOT_AUTO_UPDATE_ROOT_DIR_URL_VALUE_NAME = "RootDirUrl" -CERT_AUTH_ROOT_AUTO_UPDATE_SYNC_DELTA_TIME_VALUE_NAME = "SyncDeltaTime" -CERT_AUTH_ROOT_AUTO_UPDATE_FLAGS_VALUE_NAME = "Flags" -CERT_AUTH_ROOT_CTL_FILENAME = "authroot.stl" -CERT_AUTH_ROOT_CTL_FILENAME_A = "authroot.stl" -CERT_AUTH_ROOT_CAB_FILENAME = "authrootstl.cab" -CERT_AUTH_ROOT_SEQ_FILENAME = "authrootseq.txt" -CERT_AUTH_ROOT_CERT_EXT = ".crt" - -CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH = ( - r"Software\Policies\Microsoft\SystemCertificates" -) -CERT_EFSBLOB_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + r"\EFS" -CERT_EFSBLOB_VALUE_NAME = "EFSBlob" -CERT_PROT_ROOT_FLAGS_REGPATH = ( - CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + r"\Root\ProtectedRoots" -) -CERT_PROT_ROOT_FLAGS_VALUE_NAME = "Flags" -CERT_TRUST_PUB_SAFER_GROUP_POLICY_REGPATH = ( - CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + r"\TrustedPublisher\Safer" -) -CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH = r"Software\Microsoft\SystemCertificates" -CERT_TRUST_PUB_SAFER_LOCAL_MACHINE_REGPATH = ( - CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH + r"\TrustedPublisher\Safer" -) -CERT_TRUST_PUB_AUTHENTICODE_FLAGS_VALUE_NAME = "AuthenticodeFlags" -CERT_OCM_SUBCOMPONENTS_LOCAL_MACHINE_REGPATH = ( - r"SOFTWARE\Microsoft\Windows\CurrentVersion\Setup\OC Manager\Subcomponents" -) -CERT_OCM_SUBCOMPONENTS_ROOT_AUTO_UPDATE_VALUE_NAME = r"RootAutoUpdate" -CERT_DISABLE_ROOT_AUTO_UPDATE_REGPATH = ( - CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + r"\AuthRoot" -) -CERT_DISABLE_ROOT_AUTO_UPDATE_VALUE_NAME = "DisableRootAutoUpdate" -CERT_AUTH_ROOT_AUTO_UPDATE_LOCAL_MACHINE_REGPATH = ( - CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH + r"\AuthRoot\AutoUpdate" -) - -CERT_REGISTRY_STORE_REMOTE_FLAG = 0x10000 -CERT_REGISTRY_STORE_SERIALIZED_FLAG = 0x20000 -CERT_REGISTRY_STORE_CLIENT_GPT_FLAG = -2147483648 -CERT_REGISTRY_STORE_LM_GPT_FLAG = 0x01000000 -CERT_REGISTRY_STORE_ROAMING_FLAG = 0x40000 -CERT_REGISTRY_STORE_MY_IE_DIRTY_FLAG = 0x80000 -CERT_IE_DIRTY_FLAGS_REGPATH = r"Software\Microsoft\Cryptography\IEDirtyFlags" - -CERT_FILE_STORE_COMMIT_ENABLE_FLAG = 0x10000 -CERT_LDAP_STORE_SIGN_FLAG = 0x10000 -CERT_LDAP_STORE_AREC_EXCLUSIVE_FLAG = 0x20000 -CERT_LDAP_STORE_OPENED_FLAG = 0x40000 -CERT_LDAP_STORE_UNBIND_FLAG = 0x80000 -CRYPT_OID_OPEN_STORE_PROV_FUNC = "CertDllOpenStoreProv" - -CERT_STORE_PROV_EXTERNAL_FLAG = 0x1 -CERT_STORE_PROV_DELETED_FLAG = 0x2 -CERT_STORE_PROV_NO_PERSIST_FLAG = 0x4 -CERT_STORE_PROV_SYSTEM_STORE_FLAG = 0x8 -CERT_STORE_PROV_LM_SYSTEM_STORE_FLAG = 0x10 -CERT_STORE_PROV_CLOSE_FUNC = 0 -CERT_STORE_PROV_READ_CERT_FUNC = 1 -CERT_STORE_PROV_WRITE_CERT_FUNC = 2 -CERT_STORE_PROV_DELETE_CERT_FUNC = 3 -CERT_STORE_PROV_SET_CERT_PROPERTY_FUNC = 4 -CERT_STORE_PROV_READ_CRL_FUNC = 5 -CERT_STORE_PROV_WRITE_CRL_FUNC = 6 -CERT_STORE_PROV_DELETE_CRL_FUNC = 7 -CERT_STORE_PROV_SET_CRL_PROPERTY_FUNC = 8 -CERT_STORE_PROV_READ_CTL_FUNC = 9 -CERT_STORE_PROV_WRITE_CTL_FUNC = 10 -CERT_STORE_PROV_DELETE_CTL_FUNC = 11 -CERT_STORE_PROV_SET_CTL_PROPERTY_FUNC = 12 -CERT_STORE_PROV_CONTROL_FUNC = 13 -CERT_STORE_PROV_FIND_CERT_FUNC = 14 -CERT_STORE_PROV_FREE_FIND_CERT_FUNC = 15 -CERT_STORE_PROV_GET_CERT_PROPERTY_FUNC = 16 -CERT_STORE_PROV_FIND_CRL_FUNC = 17 -CERT_STORE_PROV_FREE_FIND_CRL_FUNC = 18 -CERT_STORE_PROV_GET_CRL_PROPERTY_FUNC = 19 -CERT_STORE_PROV_FIND_CTL_FUNC = 20 -CERT_STORE_PROV_FREE_FIND_CTL_FUNC = 21 -CERT_STORE_PROV_GET_CTL_PROPERTY_FUNC = 22 -CERT_STORE_PROV_WRITE_ADD_FLAG = 0x1 -CERT_STORE_SAVE_AS_STORE = 1 -CERT_STORE_SAVE_AS_PKCS7 = 2 -CERT_STORE_SAVE_TO_FILE = 1 -CERT_STORE_SAVE_TO_MEMORY = 2 -CERT_STORE_SAVE_TO_FILENAME_A = 3 -CERT_STORE_SAVE_TO_FILENAME_W = 4 -CERT_STORE_SAVE_TO_FILENAME = CERT_STORE_SAVE_TO_FILENAME_W -CERT_CLOSE_STORE_FORCE_FLAG = 0x00000001 -CERT_CLOSE_STORE_CHECK_FLAG = 0x00000002 -CERT_COMPARE_MASK = 0xFFFF -CERT_COMPARE_SHIFT = 16 -CERT_COMPARE_ANY = 0 -CERT_COMPARE_SHA1_HASH = 1 -CERT_COMPARE_NAME = 2 -CERT_COMPARE_ATTR = 3 -CERT_COMPARE_MD5_HASH = 4 -CERT_COMPARE_PROPERTY = 5 -CERT_COMPARE_PUBLIC_KEY = 6 -CERT_COMPARE_HASH = CERT_COMPARE_SHA1_HASH -CERT_COMPARE_NAME_STR_A = 7 -CERT_COMPARE_NAME_STR_W = 8 -CERT_COMPARE_KEY_SPEC = 9 -CERT_COMPARE_ENHKEY_USAGE = 10 -CERT_COMPARE_CTL_USAGE = CERT_COMPARE_ENHKEY_USAGE -CERT_COMPARE_SUBJECT_CERT = 11 -CERT_COMPARE_ISSUER_OF = 12 -CERT_COMPARE_EXISTING = 13 -CERT_COMPARE_SIGNATURE_HASH = 14 -CERT_COMPARE_KEY_IDENTIFIER = 15 -CERT_COMPARE_CERT_ID = 16 -CERT_COMPARE_CROSS_CERT_DIST_POINTS = 17 -CERT_COMPARE_PUBKEY_MD5_HASH = 18 -CERT_FIND_ANY = CERT_COMPARE_ANY << CERT_COMPARE_SHIFT -CERT_FIND_SHA1_HASH = CERT_COMPARE_SHA1_HASH << CERT_COMPARE_SHIFT -CERT_FIND_MD5_HASH = CERT_COMPARE_MD5_HASH << CERT_COMPARE_SHIFT -CERT_FIND_SIGNATURE_HASH = CERT_COMPARE_SIGNATURE_HASH << CERT_COMPARE_SHIFT -CERT_FIND_KEY_IDENTIFIER = CERT_COMPARE_KEY_IDENTIFIER << CERT_COMPARE_SHIFT -CERT_FIND_HASH = CERT_FIND_SHA1_HASH -CERT_FIND_PROPERTY = CERT_COMPARE_PROPERTY << CERT_COMPARE_SHIFT -CERT_FIND_PUBLIC_KEY = CERT_COMPARE_PUBLIC_KEY << CERT_COMPARE_SHIFT -CERT_FIND_SUBJECT_NAME = ( - CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | CERT_INFO_SUBJECT_FLAG -) -CERT_FIND_SUBJECT_ATTR = ( - CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | CERT_INFO_SUBJECT_FLAG -) -CERT_FIND_ISSUER_NAME = CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | CERT_INFO_ISSUER_FLAG -CERT_FIND_ISSUER_ATTR = CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | CERT_INFO_ISSUER_FLAG -CERT_FIND_SUBJECT_STR_A = ( - CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | CERT_INFO_SUBJECT_FLAG -) -CERT_FIND_SUBJECT_STR_W = ( - CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | CERT_INFO_SUBJECT_FLAG -) -CERT_FIND_SUBJECT_STR = CERT_FIND_SUBJECT_STR_W -CERT_FIND_ISSUER_STR_A = ( - CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | CERT_INFO_ISSUER_FLAG -) -CERT_FIND_ISSUER_STR_W = ( - CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | CERT_INFO_ISSUER_FLAG -) -CERT_FIND_ISSUER_STR = CERT_FIND_ISSUER_STR_W -CERT_FIND_KEY_SPEC = CERT_COMPARE_KEY_SPEC << CERT_COMPARE_SHIFT -CERT_FIND_ENHKEY_USAGE = CERT_COMPARE_ENHKEY_USAGE << CERT_COMPARE_SHIFT -CERT_FIND_CTL_USAGE = CERT_FIND_ENHKEY_USAGE -CERT_FIND_SUBJECT_CERT = CERT_COMPARE_SUBJECT_CERT << CERT_COMPARE_SHIFT -CERT_FIND_ISSUER_OF = CERT_COMPARE_ISSUER_OF << CERT_COMPARE_SHIFT -CERT_FIND_EXISTING = CERT_COMPARE_EXISTING << CERT_COMPARE_SHIFT -CERT_FIND_CERT_ID = CERT_COMPARE_CERT_ID << CERT_COMPARE_SHIFT -CERT_FIND_CROSS_CERT_DIST_POINTS = ( - CERT_COMPARE_CROSS_CERT_DIST_POINTS << CERT_COMPARE_SHIFT -) -CERT_FIND_PUBKEY_MD5_HASH = CERT_COMPARE_PUBKEY_MD5_HASH << CERT_COMPARE_SHIFT -CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG = 0x1 -CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG = 0x2 -CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG = 0x4 -CERT_FIND_NO_ENHKEY_USAGE_FLAG = 0x8 -CERT_FIND_OR_ENHKEY_USAGE_FLAG = 0x10 -CERT_FIND_VALID_ENHKEY_USAGE_FLAG = 0x20 -CERT_FIND_OPTIONAL_CTL_USAGE_FLAG = CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG -CERT_FIND_EXT_ONLY_CTL_USAGE_FLAG = CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG -CERT_FIND_PROP_ONLY_CTL_USAGE_FLAG = CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG -CERT_FIND_NO_CTL_USAGE_FLAG = CERT_FIND_NO_ENHKEY_USAGE_FLAG -CERT_FIND_OR_CTL_USAGE_FLAG = CERT_FIND_OR_ENHKEY_USAGE_FLAG -CERT_FIND_VALID_CTL_USAGE_FLAG = CERT_FIND_VALID_ENHKEY_USAGE_FLAG -CERT_SET_PROPERTY_IGNORE_PERSIST_ERROR_FLAG = -2147483648 -CERT_SET_PROPERTY_INHIBIT_PERSIST_FLAG = 0x40000000 -CTL_ENTRY_FROM_PROP_CHAIN_FLAG = 0x1 -CRL_FIND_ANY = 0 -CRL_FIND_ISSUED_BY = 1 -CRL_FIND_EXISTING = 2 -CRL_FIND_ISSUED_FOR = 3 -CRL_FIND_ISSUED_BY_AKI_FLAG = 0x1 -CRL_FIND_ISSUED_BY_SIGNATURE_FLAG = 0x2 -CRL_FIND_ISSUED_BY_DELTA_FLAG = 0x4 -CRL_FIND_ISSUED_BY_BASE_FLAG = 0x8 -CERT_STORE_ADD_NEW = 1 -CERT_STORE_ADD_USE_EXISTING = 2 -CERT_STORE_ADD_REPLACE_EXISTING = 3 -CERT_STORE_ADD_ALWAYS = 4 -CERT_STORE_ADD_REPLACE_EXISTING_INHERIT_PROPERTIES = 5 -CERT_STORE_ADD_NEWER = 6 -CERT_STORE_ADD_NEWER_INHERIT_PROPERTIES = 7 -CERT_STORE_CERTIFICATE_CONTEXT = 1 -CERT_STORE_CRL_CONTEXT = 2 -CERT_STORE_CTL_CONTEXT = 3 - -CERT_STORE_ALL_CONTEXT_FLAG = -1 -CERT_STORE_CERTIFICATE_CONTEXT_FLAG = 1 << CERT_STORE_CERTIFICATE_CONTEXT -CERT_STORE_CRL_CONTEXT_FLAG = 1 << CERT_STORE_CRL_CONTEXT -CERT_STORE_CTL_CONTEXT_FLAG = 1 << CERT_STORE_CTL_CONTEXT -CTL_ANY_SUBJECT_TYPE = 1 -CTL_CERT_SUBJECT_TYPE = 2 -CTL_FIND_ANY = 0 -CTL_FIND_SHA1_HASH = 1 -CTL_FIND_MD5_HASH = 2 -CTL_FIND_USAGE = 3 -CTL_FIND_SUBJECT = 4 -CTL_FIND_EXISTING = 5 -CTL_FIND_NO_LIST_ID_CBDATA = -1 -CTL_FIND_SAME_USAGE_FLAG = 0x1 -CERT_STORE_CTRL_RESYNC = 1 -CERT_STORE_CTRL_NOTIFY_CHANGE = 2 -CERT_STORE_CTRL_COMMIT = 3 -CERT_STORE_CTRL_AUTO_RESYNC = 4 -CERT_STORE_CTRL_CANCEL_NOTIFY = 5 -CERT_STORE_CTRL_INHIBIT_DUPLICATE_HANDLE_FLAG = 0x1 -CERT_STORE_CTRL_COMMIT_FORCE_FLAG = 0x1 -CERT_STORE_CTRL_COMMIT_CLEAR_FLAG = 0x2 -CERT_STORE_LOCALIZED_NAME_PROP_ID = 0x1000 -CERT_CREATE_CONTEXT_NOCOPY_FLAG = 0x1 -CERT_CREATE_CONTEXT_SORTED_FLAG = 0x2 -CERT_CREATE_CONTEXT_NO_HCRYPTMSG_FLAG = 0x4 -CERT_CREATE_CONTEXT_NO_ENTRY_FLAG = 0x8 - -CERT_PHYSICAL_STORE_ADD_ENABLE_FLAG = 0x1 -CERT_PHYSICAL_STORE_OPEN_DISABLE_FLAG = 0x2 -CERT_PHYSICAL_STORE_REMOTE_OPEN_DISABLE_FLAG = 0x4 -CERT_PHYSICAL_STORE_INSERT_COMPUTER_NAME_ENABLE_FLAG = 0x8 -CERT_PHYSICAL_STORE_PREDEFINED_ENUM_FLAG = 0x1 - -# Names of physical cert stores -CERT_PHYSICAL_STORE_DEFAULT_NAME = ".Default" -CERT_PHYSICAL_STORE_GROUP_POLICY_NAME = ".GroupPolicy" -CERT_PHYSICAL_STORE_LOCAL_MACHINE_NAME = ".LocalMachine" -CERT_PHYSICAL_STORE_DS_USER_CERTIFICATE_NAME = ".UserCertificate" -CERT_PHYSICAL_STORE_LOCAL_MACHINE_GROUP_POLICY_NAME = ".LocalMachineGroupPolicy" -CERT_PHYSICAL_STORE_ENTERPRISE_NAME = ".Enterprise" -CERT_PHYSICAL_STORE_AUTH_ROOT_NAME = ".AuthRoot" -CERT_PHYSICAL_STORE_SMART_CARD_NAME = ".SmartCard" - -CRYPT_OID_OPEN_SYSTEM_STORE_PROV_FUNC = "CertDllOpenSystemStoreProv" -CRYPT_OID_REGISTER_SYSTEM_STORE_FUNC = "CertDllRegisterSystemStore" -CRYPT_OID_UNREGISTER_SYSTEM_STORE_FUNC = "CertDllUnregisterSystemStore" -CRYPT_OID_ENUM_SYSTEM_STORE_FUNC = "CertDllEnumSystemStore" -CRYPT_OID_REGISTER_PHYSICAL_STORE_FUNC = "CertDllRegisterPhysicalStore" -CRYPT_OID_UNREGISTER_PHYSICAL_STORE_FUNC = "CertDllUnregisterPhysicalStore" -CRYPT_OID_ENUM_PHYSICAL_STORE_FUNC = "CertDllEnumPhysicalStore" -CRYPT_OID_SYSTEM_STORE_LOCATION_VALUE_NAME = "SystemStoreLocation" - -CMSG_TRUSTED_SIGNER_FLAG = 0x1 -CMSG_SIGNER_ONLY_FLAG = 0x2 -CMSG_USE_SIGNER_INDEX_FLAG = 0x4 -CMSG_CMS_ENCAPSULATED_CTL_FLAG = 0x00008000 -CMSG_ENCODE_SORTED_CTL_FLAG = 0x1 -CMSG_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x2 -CERT_VERIFY_INHIBIT_CTL_UPDATE_FLAG = 0x1 -CERT_VERIFY_TRUSTED_SIGNERS_FLAG = 0x2 -CERT_VERIFY_NO_TIME_CHECK_FLAG = 0x4 -CERT_VERIFY_ALLOW_MORE_USAGE_FLAG = 0x8 -CERT_VERIFY_UPDATED_CTL_FLAG = 0x1 -CERT_CONTEXT_REVOCATION_TYPE = 1 -CERT_VERIFY_REV_CHAIN_FLAG = 0x00000001 -CERT_VERIFY_CACHE_ONLY_BASED_REVOCATION = 0x00000002 -CERT_VERIFY_REV_ACCUMULATIVE_TIMEOUT_FLAG = 0x00000004 -CERT_UNICODE_IS_RDN_ATTRS_FLAG = 0x1 -CERT_CASE_INSENSITIVE_IS_RDN_ATTRS_FLAG = 0x2 -CRYPT_VERIFY_CERT_SIGN_SUBJECT_BLOB = 1 -CRYPT_VERIFY_CERT_SIGN_SUBJECT_CERT = 2 -CRYPT_VERIFY_CERT_SIGN_SUBJECT_CRL = 3 -CRYPT_VERIFY_CERT_SIGN_ISSUER_PUBKEY = 1 -CRYPT_VERIFY_CERT_SIGN_ISSUER_CERT = 2 -CRYPT_VERIFY_CERT_SIGN_ISSUER_CHAIN = 3 -CRYPT_VERIFY_CERT_SIGN_ISSUER_NULL = 4 -CRYPT_DEFAULT_CONTEXT_AUTO_RELEASE_FLAG = 0x00000001 -CRYPT_DEFAULT_CONTEXT_PROCESS_FLAG = 0x00000002 -CRYPT_DEFAULT_CONTEXT_CERT_SIGN_OID = 1 -CRYPT_DEFAULT_CONTEXT_MULTI_CERT_SIGN_OID = 2 -CRYPT_OID_EXPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllExportPublicKeyInfoEx" -CRYPT_OID_IMPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllImportPublicKeyInfoEx" -CRYPT_ACQUIRE_CACHE_FLAG = 0x00000001 -CRYPT_ACQUIRE_USE_PROV_INFO_FLAG = 0x00000002 -CRYPT_ACQUIRE_COMPARE_KEY_FLAG = 0x00000004 -CRYPT_ACQUIRE_SILENT_FLAG = 0x00000040 -CRYPT_FIND_USER_KEYSET_FLAG = 0x00000001 -CRYPT_FIND_MACHINE_KEYSET_FLAG = 0x00000002 -CRYPT_FIND_SILENT_KEYSET_FLAG = 0x00000040 -CRYPT_OID_IMPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllImportPrivateKeyInfoEx" -CRYPT_OID_EXPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllExportPrivateKeyInfoEx" -CRYPT_DELETE_KEYSET = CRYPT_DELETEKEYSET -CERT_SIMPLE_NAME_STR = 1 -CERT_OID_NAME_STR = 2 -CERT_X500_NAME_STR = 3 -CERT_NAME_STR_SEMICOLON_FLAG = 0x40000000 -CERT_NAME_STR_NO_PLUS_FLAG = 0x20000000 -CERT_NAME_STR_NO_QUOTING_FLAG = 0x10000000 -CERT_NAME_STR_CRLF_FLAG = 0x08000000 -CERT_NAME_STR_COMMA_FLAG = 0x04000000 -CERT_NAME_STR_REVERSE_FLAG = 0x02000000 -CERT_NAME_STR_DISABLE_IE4_UTF8_FLAG = 0x00010000 -CERT_NAME_STR_ENABLE_T61_UNICODE_FLAG = 0x00020000 -CERT_NAME_STR_ENABLE_UTF8_UNICODE_FLAG = 0x00040000 -CERT_NAME_EMAIL_TYPE = 1 -CERT_NAME_RDN_TYPE = 2 -CERT_NAME_ATTR_TYPE = 3 -CERT_NAME_SIMPLE_DISPLAY_TYPE = 4 -CERT_NAME_FRIENDLY_DISPLAY_TYPE = 5 -CERT_NAME_DNS_TYPE = 6 -CERT_NAME_URL_TYPE = 7 -CERT_NAME_UPN_TYPE = 8 -CERT_NAME_ISSUER_FLAG = 0x1 -CERT_NAME_DISABLE_IE4_UTF8_FLAG = 0x00010000 -CRYPT_MESSAGE_BARE_CONTENT_OUT_FLAG = 0x00000001 -CRYPT_MESSAGE_ENCAPSULATED_CONTENT_OUT_FLAG = 0x00000002 -CRYPT_MESSAGE_KEYID_SIGNER_FLAG = 0x00000004 -CRYPT_MESSAGE_SILENT_KEYSET_FLAG = 0x00000040 -CRYPT_MESSAGE_KEYID_RECIPIENT_FLAG = 0x4 -CERT_QUERY_OBJECT_FILE = 0x00000001 -CERT_QUERY_OBJECT_BLOB = 0x00000002 -CERT_QUERY_CONTENT_CERT = 1 -CERT_QUERY_CONTENT_CTL = 2 -CERT_QUERY_CONTENT_CRL = 3 -CERT_QUERY_CONTENT_SERIALIZED_STORE = 4 -CERT_QUERY_CONTENT_SERIALIZED_CERT = 5 -CERT_QUERY_CONTENT_SERIALIZED_CTL = 6 -CERT_QUERY_CONTENT_SERIALIZED_CRL = 7 -CERT_QUERY_CONTENT_PKCS7_SIGNED = 8 -CERT_QUERY_CONTENT_PKCS7_UNSIGNED = 9 -CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED = 10 -CERT_QUERY_CONTENT_PKCS10 = 11 -CERT_QUERY_CONTENT_PFX = 12 -CERT_QUERY_CONTENT_CERT_PAIR = 13 -CERT_QUERY_CONTENT_FLAG_CERT = 1 << CERT_QUERY_CONTENT_CERT -CERT_QUERY_CONTENT_FLAG_CTL = 1 << CERT_QUERY_CONTENT_CTL -CERT_QUERY_CONTENT_FLAG_CRL = 1 << CERT_QUERY_CONTENT_CRL -CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE = 1 << CERT_QUERY_CONTENT_SERIALIZED_STORE -CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT = 1 << CERT_QUERY_CONTENT_SERIALIZED_CERT -CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL = 1 << CERT_QUERY_CONTENT_SERIALIZED_CTL -CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL = 1 << CERT_QUERY_CONTENT_SERIALIZED_CRL -CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED = 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED -CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED = 1 << CERT_QUERY_CONTENT_PKCS7_UNSIGNED -CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED = 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED -CERT_QUERY_CONTENT_FLAG_PKCS10 = 1 << CERT_QUERY_CONTENT_PKCS10 -CERT_QUERY_CONTENT_FLAG_PFX = 1 << CERT_QUERY_CONTENT_PFX -CERT_QUERY_CONTENT_FLAG_CERT_PAIR = 1 << CERT_QUERY_CONTENT_CERT_PAIR -CERT_QUERY_CONTENT_FLAG_ALL = ( - CERT_QUERY_CONTENT_FLAG_CERT - | CERT_QUERY_CONTENT_FLAG_CTL - | CERT_QUERY_CONTENT_FLAG_CRL - | CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE - | CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT - | CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL - | CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL - | CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED - | CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED - | CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED - | CERT_QUERY_CONTENT_FLAG_PKCS10 - | CERT_QUERY_CONTENT_FLAG_PFX - | CERT_QUERY_CONTENT_FLAG_CERT_PAIR -) -CERT_QUERY_FORMAT_BINARY = 1 -CERT_QUERY_FORMAT_BASE64_ENCODED = 2 -CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED = 3 -CERT_QUERY_FORMAT_FLAG_BINARY = 1 << CERT_QUERY_FORMAT_BINARY -CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED = 1 << CERT_QUERY_FORMAT_BASE64_ENCODED -CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED = ( - 1 << CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED -) -CERT_QUERY_FORMAT_FLAG_ALL = ( - CERT_QUERY_FORMAT_FLAG_BINARY - | CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED - | CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED -) - -CREDENTIAL_OID_PASSWORD_CREDENTIALS_A = 1 -CREDENTIAL_OID_PASSWORD_CREDENTIALS_W = 2 -CREDENTIAL_OID_PASSWORD_CREDENTIALS = CREDENTIAL_OID_PASSWORD_CREDENTIALS_W - -SCHEME_OID_RETRIEVE_ENCODED_OBJECT_FUNC = "SchemeDllRetrieveEncodedObject" -SCHEME_OID_RETRIEVE_ENCODED_OBJECTW_FUNC = "SchemeDllRetrieveEncodedObjectW" -CONTEXT_OID_CREATE_OBJECT_CONTEXT_FUNC = "ContextDllCreateObjectContext" -CONTEXT_OID_CERTIFICATE = 1 -CONTEXT_OID_CRL = 2 -CONTEXT_OID_CTL = 3 -CONTEXT_OID_PKCS7 = 4 -CONTEXT_OID_CAPI2_ANY = 5 -CONTEXT_OID_OCSP_RESP = 6 - -CRYPT_RETRIEVE_MULTIPLE_OBJECTS = 0x00000001 -CRYPT_CACHE_ONLY_RETRIEVAL = 0x00000002 -CRYPT_WIRE_ONLY_RETRIEVAL = 0x00000004 -CRYPT_DONT_CACHE_RESULT = 0x00000008 -CRYPT_ASYNC_RETRIEVAL = 0x00000010 -CRYPT_STICKY_CACHE_RETRIEVAL = 0x00001000 -CRYPT_LDAP_SCOPE_BASE_ONLY_RETRIEVAL = 0x00002000 -CRYPT_OFFLINE_CHECK_RETRIEVAL = 0x00004000 -CRYPT_LDAP_INSERT_ENTRY_ATTRIBUTE = 0x00008000 -CRYPT_LDAP_SIGN_RETRIEVAL = 0x00010000 -CRYPT_NO_AUTH_RETRIEVAL = 0x00020000 -CRYPT_LDAP_AREC_EXCLUSIVE_RETRIEVAL = 0x00040000 -CRYPT_AIA_RETRIEVAL = 0x00080000 -CRYPT_VERIFY_CONTEXT_SIGNATURE = 0x00000020 -CRYPT_VERIFY_DATA_HASH = 0x00000040 -CRYPT_KEEP_TIME_VALID = 0x00000080 -CRYPT_DONT_VERIFY_SIGNATURE = 0x00000100 -CRYPT_DONT_CHECK_TIME_VALIDITY = 0x00000200 -CRYPT_CHECK_FRESHNESS_TIME_VALIDITY = 0x00000400 -CRYPT_ACCUMULATIVE_TIMEOUT = 0x00000800 -CRYPT_PARAM_ASYNC_RETRIEVAL_COMPLETION = 1 -CRYPT_PARAM_CANCEL_ASYNC_RETRIEVAL = 2 -CRYPT_GET_URL_FROM_PROPERTY = 0x00000001 -CRYPT_GET_URL_FROM_EXTENSION = 0x00000002 -CRYPT_GET_URL_FROM_UNAUTH_ATTRIBUTE = 0x00000004 -CRYPT_GET_URL_FROM_AUTH_ATTRIBUTE = 0x00000008 -URL_OID_GET_OBJECT_URL_FUNC = "UrlDllGetObjectUrl" -TIME_VALID_OID_GET_OBJECT_FUNC = "TimeValidDllGetObject" -TIME_VALID_OID_FLUSH_OBJECT_FUNC = "TimeValidDllFlushObject" - -TIME_VALID_OID_GET_CTL = 1 -TIME_VALID_OID_GET_CRL = 2 -TIME_VALID_OID_GET_CRL_FROM_CERT = 3 -TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CERT = 4 -TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CRL = 5 - -TIME_VALID_OID_FLUSH_CTL = 1 -TIME_VALID_OID_FLUSH_CRL = 2 -TIME_VALID_OID_FLUSH_CRL_FROM_CERT = 3 -TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CERT = 4 -TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CRL = 5 - -CRYPTPROTECT_PROMPT_ON_UNPROTECT = 0x1 -CRYPTPROTECT_PROMPT_ON_PROTECT = 0x2 -CRYPTPROTECT_PROMPT_RESERVED = 0x04 -CRYPTPROTECT_PROMPT_STRONG = 0x08 -CRYPTPROTECT_PROMPT_REQUIRE_STRONG = 0x10 -CRYPTPROTECT_UI_FORBIDDEN = 0x1 -CRYPTPROTECT_LOCAL_MACHINE = 0x4 -CRYPTPROTECT_CRED_SYNC = 0x8 -CRYPTPROTECT_AUDIT = 0x10 -CRYPTPROTECT_NO_RECOVERY = 0x20 -CRYPTPROTECT_VERIFY_PROTECTION = 0x40 -CRYPTPROTECT_CRED_REGENERATE = 0x80 -CRYPTPROTECT_FIRST_RESERVED_FLAGVAL = 0x0FFFFFFF -CRYPTPROTECT_LAST_RESERVED_FLAGVAL = -1 -CRYPTPROTECTMEMORY_BLOCK_SIZE = 16 -CRYPTPROTECTMEMORY_SAME_PROCESS = 0x00 -CRYPTPROTECTMEMORY_CROSS_PROCESS = 0x01 -CRYPTPROTECTMEMORY_SAME_LOGON = 0x02 -CERT_CREATE_SELFSIGN_NO_SIGN = 1 -CERT_CREATE_SELFSIGN_NO_KEY_INFO = 2 -CRYPT_KEYID_MACHINE_FLAG = 0x00000020 -CRYPT_KEYID_ALLOC_FLAG = 0x00008000 -CRYPT_KEYID_DELETE_FLAG = 0x00000010 -CRYPT_KEYID_SET_NEW_FLAG = 0x00002000 -CERT_CHAIN_MAX_AIA_URL_COUNT_IN_CERT_DEFAULT = 5 -CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_COUNT_PER_CHAIN_DEFAULT = 10 -CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_BYTE_COUNT_DEFAULT = 100000 -CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_CERT_COUNT_DEFAULT = 10 -CERT_CHAIN_CACHE_END_CERT = 0x00000001 -CERT_CHAIN_THREAD_STORE_SYNC = 0x00000002 -CERT_CHAIN_CACHE_ONLY_URL_RETRIEVAL = 0x00000004 -CERT_CHAIN_USE_LOCAL_MACHINE_STORE = 0x00000008 -CERT_CHAIN_ENABLE_CACHE_AUTO_UPDATE = 0x00000010 -CERT_CHAIN_ENABLE_SHARE_STORE = 0x00000020 -CERT_TRUST_NO_ERROR = 0x00000000 -CERT_TRUST_IS_NOT_TIME_VALID = 0x00000001 -CERT_TRUST_IS_NOT_TIME_NESTED = 0x00000002 -CERT_TRUST_IS_REVOKED = 0x00000004 -CERT_TRUST_IS_NOT_SIGNATURE_VALID = 0x00000008 -CERT_TRUST_IS_NOT_VALID_FOR_USAGE = 0x00000010 -CERT_TRUST_IS_UNTRUSTED_ROOT = 0x00000020 -CERT_TRUST_REVOCATION_STATUS_UNKNOWN = 0x00000040 -CERT_TRUST_IS_CYCLIC = 0x00000080 -CERT_TRUST_INVALID_EXTENSION = 0x00000100 -CERT_TRUST_INVALID_POLICY_CONSTRAINTS = 0x00000200 -CERT_TRUST_INVALID_BASIC_CONSTRAINTS = 0x00000400 -CERT_TRUST_INVALID_NAME_CONSTRAINTS = 0x00000800 -CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT = 0x00001000 -CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT = 0x00002000 -CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT = 0x00004000 -CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT = 0x00008000 -CERT_TRUST_IS_OFFLINE_REVOCATION = 0x01000000 -CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY = 0x02000000 -CERT_TRUST_IS_PARTIAL_CHAIN = 0x00010000 -CERT_TRUST_CTL_IS_NOT_TIME_VALID = 0x00020000 -CERT_TRUST_CTL_IS_NOT_SIGNATURE_VALID = 0x00040000 -CERT_TRUST_CTL_IS_NOT_VALID_FOR_USAGE = 0x00080000 -CERT_TRUST_HAS_EXACT_MATCH_ISSUER = 0x00000001 -CERT_TRUST_HAS_KEY_MATCH_ISSUER = 0x00000002 -CERT_TRUST_HAS_NAME_MATCH_ISSUER = 0x00000004 -CERT_TRUST_IS_SELF_SIGNED = 0x00000008 -CERT_TRUST_HAS_PREFERRED_ISSUER = 0x00000100 -CERT_TRUST_HAS_ISSUANCE_CHAIN_POLICY = 0x00000200 -CERT_TRUST_HAS_VALID_NAME_CONSTRAINTS = 0x00000400 -CERT_TRUST_IS_COMPLEX_CHAIN = 0x00010000 -USAGE_MATCH_TYPE_AND = 0x00000000 -USAGE_MATCH_TYPE_OR = 0x00000001 -CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000 -CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000 -CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT = 0x40000000 -CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY = -2147483648 -CERT_CHAIN_REVOCATION_ACCUMULATIVE_TIMEOUT = 0x08000000 -CERT_CHAIN_DISABLE_PASS1_QUALITY_FILTERING = 0x00000040 -CERT_CHAIN_RETURN_LOWER_QUALITY_CONTEXTS = 0x00000080 -CERT_CHAIN_DISABLE_AUTH_ROOT_AUTO_UPDATE = 0x00000100 -CERT_CHAIN_TIMESTAMP_TIME = 0x00000200 -REVOCATION_OID_CRL_REVOCATION = 1 -CERT_CHAIN_FIND_BY_ISSUER = 1 -CERT_CHAIN_FIND_BY_ISSUER_COMPARE_KEY_FLAG = 0x0001 -CERT_CHAIN_FIND_BY_ISSUER_COMPLEX_CHAIN_FLAG = 0x0002 -CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_URL_FLAG = 0x0004 -CERT_CHAIN_FIND_BY_ISSUER_LOCAL_MACHINE_FLAG = 0x0008 -CERT_CHAIN_FIND_BY_ISSUER_NO_KEY_FLAG = 0x4000 -CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_FLAG = 0x8000 -CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG = 0x00000001 -CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG = 0x00000002 -CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG = 0x00000004 -CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008 -CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = ( - CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG - | CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG - | CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG -) -CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010 -CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020 -CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040 -CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080 -CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG = 0x00000100 -CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG = 0x00000200 -CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG = 0x00000400 -CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG = 0x00000800 -CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = ( - CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG - | CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG - | CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG - | CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG -) -CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000 -CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000 -CRYPT_OID_VERIFY_CERTIFICATE_CHAIN_POLICY_FUNC = "CertDllVerifyCertificateChainPolicy" -AUTHTYPE_CLIENT = 1 -AUTHTYPE_SERVER = 2 -BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_CA_FLAG = -2147483648 -BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_END_ENTITY_FLAG = 0x40000000 -MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG = 0x00010000 -CRYPT_STRING_BASE64HEADER = 0x00000000 -CRYPT_STRING_BASE64 = 0x00000001 -CRYPT_STRING_BINARY = 0x00000002 -CRYPT_STRING_BASE64REQUESTHEADER = 0x00000003 -CRYPT_STRING_HEX = 0x00000004 -CRYPT_STRING_HEXASCII = 0x00000005 -CRYPT_STRING_BASE64_ANY = 0x00000006 -CRYPT_STRING_ANY = 0x00000007 -CRYPT_STRING_HEX_ANY = 0x00000008 -CRYPT_STRING_BASE64X509CRLHEADER = 0x00000009 -CRYPT_STRING_HEXADDR = 0x0000000A -CRYPT_STRING_HEXASCIIADDR = 0x0000000B -CRYPT_STRING_NOCR = -2147483648 -CRYPT_USER_KEYSET = 0x00001000 -PKCS12_IMPORT_RESERVED_MASK = -65536 -REPORT_NO_PRIVATE_KEY = 0x0001 -REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY = 0x0002 -EXPORT_PRIVATE_KEYS = 0x0004 -PKCS12_EXPORT_RESERVED_MASK = -65536 - -# Certificate store provider types used with CertOpenStore -CERT_STORE_PROV_MSG = 1 -CERT_STORE_PROV_MEMORY = 2 -CERT_STORE_PROV_FILE = 3 -CERT_STORE_PROV_REG = 4 -CERT_STORE_PROV_PKCS7 = 5 -CERT_STORE_PROV_SERIALIZED = 6 -CERT_STORE_PROV_FILENAME = 8 -CERT_STORE_PROV_SYSTEM = 10 -CERT_STORE_PROV_COLLECTION = 11 -CERT_STORE_PROV_SYSTEM_REGISTRY = 13 -CERT_STORE_PROV_PHYSICAL = 14 -CERT_STORE_PROV_SMART_CARD = 15 -CERT_STORE_PROV_LDAP = 16 - -URL_OID_CERTIFICATE_ISSUER = 1 -URL_OID_CERTIFICATE_CRL_DIST_POINT = 2 -URL_OID_CTL_ISSUER = 3 -URL_OID_CTL_NEXT_UPDATE = 4 -URL_OID_CRL_ISSUER = 5 -URL_OID_CERTIFICATE_FRESHEST_CRL = 6 -URL_OID_CRL_FRESHEST_CRL = 7 -URL_OID_CROSS_CERT_DIST_POINT = 8 -URL_OID_CERTIFICATE_OCSP = 9 -URL_OID_CERTIFICATE_OCSP_AND_CRL_DIST_POINT = 10 -URL_OID_CERTIFICATE_CRL_DIST_POINT_AND_OCSP = 11 -URL_OID_CROSS_CERT_SUBJECT_INFO_ACCESS = 12 -URL_OID_CERTIFICATE_ONLY_OCSP = 13 diff --git a/lib/win32/lib/win32evtlogutil.py b/lib/win32/lib/win32evtlogutil.py deleted file mode 100644 index 78e231c9..00000000 --- a/lib/win32/lib/win32evtlogutil.py +++ /dev/null @@ -1,229 +0,0 @@ -"""Event Log Utilities - helper for win32evtlog.pyd -""" - -import win32api -import win32con -import win32evtlog -import winerror - -error = win32api.error # The error the evtlog module raises. - -langid = win32api.MAKELANGID(win32con.LANG_NEUTRAL, win32con.SUBLANG_NEUTRAL) - - -def AddSourceToRegistry( - appName, - msgDLL=None, - eventLogType="Application", - eventLogFlags=None, - categoryDLL=None, - categoryCount=0, -): - """Add a source of messages to the event log. - - Allows Python program to register a custom source of messages in the - registry. You must also provide the DLL name that has the message table, so the - full message text appears in the event log. - - Note that the win32evtlog.pyd file has a number of string entries with just "%1" - built in, so many Python programs can simply use this DLL. Disadvantages are that - you do not get language translation, and the full text is stored in the event log, - blowing the size of the log up. - """ - - # When an application uses the RegisterEventSource or OpenEventLog - # function to get a handle of an event log, the event logging service - # searches for the specified source name in the registry. You can add a - # new source name to the registry by opening a new registry subkey - # under the Application key and adding registry values to the new - # subkey. - - if msgDLL is None: - msgDLL = win32evtlog.__file__ - # Create a new key for our application - hkey = win32api.RegCreateKey( - win32con.HKEY_LOCAL_MACHINE, - "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" - % (eventLogType, appName), - ) - - # Add the Event-ID message-file name to the subkey. - win32api.RegSetValueEx( - hkey, - "EventMessageFile", # value name \ - 0, # reserved \ - win32con.REG_EXPAND_SZ, # value type \ - msgDLL, - ) - - # Set the supported types flags and add it to the subkey. - if eventLogFlags is None: - eventLogFlags = ( - win32evtlog.EVENTLOG_ERROR_TYPE - | win32evtlog.EVENTLOG_WARNING_TYPE - | win32evtlog.EVENTLOG_INFORMATION_TYPE - ) - win32api.RegSetValueEx( - hkey, # subkey handle \ - "TypesSupported", # value name \ - 0, # reserved \ - win32con.REG_DWORD, # value type \ - eventLogFlags, - ) - - if categoryCount > 0: - # Optionally, you can specify a message file that contains the categories - if categoryDLL is None: - categoryDLL = win32evtlog.__file__ - win32api.RegSetValueEx( - hkey, # subkey handle \ - "CategoryMessageFile", # value name \ - 0, # reserved \ - win32con.REG_EXPAND_SZ, # value type \ - categoryDLL, - ) - - win32api.RegSetValueEx( - hkey, # subkey handle \ - "CategoryCount", # value name \ - 0, # reserved \ - win32con.REG_DWORD, # value type \ - categoryCount, - ) - win32api.RegCloseKey(hkey) - - -def RemoveSourceFromRegistry(appName, eventLogType="Application"): - """Removes a source of messages from the event log.""" - - # Delete our key - try: - win32api.RegDeleteKey( - win32con.HKEY_LOCAL_MACHINE, - "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" - % (eventLogType, appName), - ) - except win32api.error as exc: - if exc.winerror != winerror.ERROR_FILE_NOT_FOUND: - raise - - -def ReportEvent( - appName, - eventID, - eventCategory=0, - eventType=win32evtlog.EVENTLOG_ERROR_TYPE, - strings=None, - data=None, - sid=None, -): - """Report an event for a previously added event source.""" - # Get a handle to the Application event log - hAppLog = win32evtlog.RegisterEventSource(None, appName) - - # Now report the event, which will add this event to the event log */ - win32evtlog.ReportEvent( - hAppLog, # event-log handle \ - eventType, - eventCategory, - eventID, - sid, - strings, - data, - ) - - win32evtlog.DeregisterEventSource(hAppLog) - - -def FormatMessage(eventLogRecord, logType="Application"): - """Given a tuple from ReadEventLog, and optionally where the event - record came from, load the message, and process message inserts. - - Note that this function may raise win32api.error. See also the - function SafeFormatMessage which will return None if the message can - not be processed. - """ - - # From the event log source name, we know the name of the registry - # key to look under for the name of the message DLL that contains - # the messages we need to extract with FormatMessage. So first get - # the event log source name... - keyName = "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" % ( - logType, - eventLogRecord.SourceName, - ) - - # Now open this key and get the EventMessageFile value, which is - # the name of the message DLL. - handle = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, keyName) - try: - dllNames = win32api.RegQueryValueEx(handle, "EventMessageFile")[0].split(";") - # Win2k etc appear to allow multiple DLL names - data = None - for dllName in dllNames: - try: - # Expand environment variable strings in the message DLL path name, - # in case any are there. - dllName = win32api.ExpandEnvironmentStrings(dllName) - - dllHandle = win32api.LoadLibraryEx( - dllName, 0, win32con.LOAD_LIBRARY_AS_DATAFILE - ) - try: - data = win32api.FormatMessageW( - win32con.FORMAT_MESSAGE_FROM_HMODULE, - dllHandle, - eventLogRecord.EventID, - langid, - eventLogRecord.StringInserts, - ) - finally: - win32api.FreeLibrary(dllHandle) - except win32api.error: - pass # Not in this DLL - try the next - if data is not None: - break - finally: - win32api.RegCloseKey(handle) - return data or "" # Don't want "None" ever being returned. - - -def SafeFormatMessage(eventLogRecord, logType=None): - """As for FormatMessage, except returns an error message if - the message can not be processed. - """ - if logType is None: - logType = "Application" - try: - return FormatMessage(eventLogRecord, logType) - except win32api.error: - if eventLogRecord.StringInserts is None: - desc = "" - else: - desc = ", ".join(eventLogRecord.StringInserts) - return ( - "" - % ( - winerror.HRESULT_CODE(eventLogRecord.EventID), - eventLogRecord.SourceName, - desc, - ) - ) - - -def FeedEventLogRecords( - feeder, machineName=None, logName="Application", readFlags=None -): - if readFlags is None: - readFlags = ( - win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ - ) - h = win32evtlog.OpenEventLog(machineName, logName) - try: - while 1: - objects = win32evtlog.ReadEventLog(h, readFlags, 0) - if not objects: - break - map(lambda item, feeder=feeder: feeder(*(item,)), objects) - finally: - win32evtlog.CloseEventLog(h) diff --git a/lib/win32/lib/win32gui_struct.py b/lib/win32/lib/win32gui_struct.py deleted file mode 100644 index 86623ae3..00000000 --- a/lib/win32/lib/win32gui_struct.py +++ /dev/null @@ -1,999 +0,0 @@ -# This is a work in progress - see Demos/win32gui_menu.py - -# win32gui_struct.py - helpers for working with various win32gui structures. -# As win32gui is "light-weight", it does not define objects for all possible -# win32 structures - in general, "buffer" objects are passed around - it is -# the callers responsibility to pack the buffer in the correct format. -# -# This module defines some helpers for the commonly used structures. -# -# In general, each structure has 3 functions: -# -# buffer, extras = PackSTRUCTURE(items, ...) -# item, ... = UnpackSTRUCTURE(buffer) -# buffer, extras = EmtpySTRUCTURE(...) -# -# 'extras' is always items that must be held along with the buffer, as the -# buffer refers to these object's memory. -# For structures that support a 'mask', this mask is hidden from the user - if -# 'None' is passed, the mask flag will not be set, or on return, None will -# be returned for the value if the mask is not set. -# -# NOTE: I considered making these structures look like real classes, and -# support 'attributes' etc - however, ctypes already has a good structure -# mechanism - I think it makes more sense to support ctype structures -# at the win32gui level, then there will be no need for this module at all. -# XXX - the above makes sense in terms of what is built and passed to -# win32gui (ie, the Pack* functions) - but doesn't make as much sense for -# the Unpack* functions, where the aim is user convenience. - -import array -import struct -import sys - -import commctrl -import pywintypes -import win32con -import win32gui - -is64bit = "64 bit" in sys.version - -try: - from collections import namedtuple - - def _MakeResult(names_str, values): - names = names_str.split() - nt = namedtuple(names[0], names[1:]) - return nt(*values) - -except ImportError: - # no namedtuple support - just return the values as a normal tuple. - def _MakeResult(names_str, values): - return values - - -_nmhdr_fmt = "PPi" -if is64bit: - # When the item past the NMHDR gets aligned (eg, when it is a struct) - # we need this many bytes padding. - _nmhdr_align_padding = "xxxx" -else: - _nmhdr_align_padding = "" - -# Encode a string suitable for passing in a win32gui related structure -# If win32gui is built with UNICODE defined (ie, py3k), then functions -# like InsertMenuItem are actually calling InsertMenuItemW etc, so all -# strings will need to be unicode. -if win32gui.UNICODE: - - def _make_text_buffer(text): - # XXX - at this stage win32gui.UNICODE is only True in py3k, - # and in py3k is makes sense to reject bytes. - if not isinstance(text, str): - raise TypeError("MENUITEMINFO text must be unicode") - data = (text + "\0").encode("utf-16le") - return array.array("b", data) - -else: - - def _make_text_buffer(text): - if isinstance(text, str): - text = text.encode("mbcs") - return array.array("b", text + "\0") - - -# make an 'empty' buffer, ready for filling with cch characters. -def _make_empty_text_buffer(cch): - return _make_text_buffer("\0" * cch) - - -if sys.version_info < (3, 0): - - def _make_memory(ob): - return str(buffer(ob)) - - def _make_bytes(sval): - return sval - -else: - - def _make_memory(ob): - return bytes(memoryview(ob)) - - def _make_bytes(sval): - return sval.encode("ascii") - - -# Generic WM_NOTIFY unpacking -def UnpackWMNOTIFY(lparam): - format = "PPi" - buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) - return _MakeResult("WMNOTIFY hwndFrom idFrom code", struct.unpack(format, buf)) - - -def UnpackNMITEMACTIVATE(lparam): - format = _nmhdr_fmt + _nmhdr_align_padding - if is64bit: - # the struct module doesn't handle this correctly as some of the items - # are actually structs in structs, which get individually aligned. - format = format + "iiiiiiixxxxP" - else: - format = format + "iiiiiiiP" - buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam) - return _MakeResult( - "NMITEMACTIVATE hwndFrom idFrom code iItem iSubItem uNewState uOldState uChanged actionx actiony lParam", - struct.unpack(format, buf), - ) - - -# MENUITEMINFO struct -# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/WinUI/WindowsUserInterface/Resources/Menus/MenuReference/MenuStructures/MENUITEMINFO.asp -# We use the struct module to pack and unpack strings as MENUITEMINFO -# structures. We also have special handling for the 'fMask' item in that -# structure to avoid the caller needing to explicitly check validity -# (None is used if the mask excludes/should exclude the value) -_menuiteminfo_fmt = "5i5PiP" - - -def PackMENUITEMINFO( - fType=None, - fState=None, - wID=None, - hSubMenu=None, - hbmpChecked=None, - hbmpUnchecked=None, - dwItemData=None, - text=None, - hbmpItem=None, - dwTypeData=None, -): - # 'extras' are objects the caller must keep a reference to (as their - # memory is used) for the lifetime of the INFO item. - extras = [] - # ack - dwItemData and dwTypeData were confused for a while... - assert ( - dwItemData is None or dwTypeData is None - ), "sorry - these were confused - you probably want dwItemData" - # if we are a long way past 209, then we can nuke the above... - if dwTypeData is not None: - import warnings - - warnings.warn("PackMENUITEMINFO: please use dwItemData instead of dwTypeData") - if dwItemData is None: - dwItemData = dwTypeData or 0 - - fMask = 0 - if fType is None: - fType = 0 - else: - fMask |= win32con.MIIM_FTYPE - if fState is None: - fState = 0 - else: - fMask |= win32con.MIIM_STATE - if wID is None: - wID = 0 - else: - fMask |= win32con.MIIM_ID - if hSubMenu is None: - hSubMenu = 0 - else: - fMask |= win32con.MIIM_SUBMENU - if hbmpChecked is None: - assert hbmpUnchecked is None, "neither or both checkmark bmps must be given" - hbmpChecked = hbmpUnchecked = 0 - else: - assert hbmpUnchecked is not None, "neither or both checkmark bmps must be given" - fMask |= win32con.MIIM_CHECKMARKS - if dwItemData is None: - dwItemData = 0 - else: - fMask |= win32con.MIIM_DATA - if hbmpItem is None: - hbmpItem = 0 - else: - fMask |= win32con.MIIM_BITMAP - if text is not None: - fMask |= win32con.MIIM_STRING - str_buf = _make_text_buffer(text) - cch = len(text) - # We are taking address of strbuf - it must not die until windows - # has finished with our structure. - lptext = str_buf.buffer_info()[0] - extras.append(str_buf) - else: - lptext = 0 - cch = 0 - # Create the struct. - # 'P' format does not accept PyHANDLE's ! - item = struct.pack( - _menuiteminfo_fmt, - struct.calcsize(_menuiteminfo_fmt), # cbSize - fMask, - fType, - fState, - wID, - int(hSubMenu), - int(hbmpChecked), - int(hbmpUnchecked), - dwItemData, - lptext, - cch, - int(hbmpItem), - ) - # Now copy the string to a writable buffer, so that the result - # could be passed to a 'Get' function - return array.array("b", item), extras - - -def UnpackMENUITEMINFO(s): - ( - cb, - fMask, - fType, - fState, - wID, - hSubMenu, - hbmpChecked, - hbmpUnchecked, - dwItemData, - lptext, - cch, - hbmpItem, - ) = struct.unpack(_menuiteminfo_fmt, s) - assert cb == len(s) - if fMask & win32con.MIIM_FTYPE == 0: - fType = None - if fMask & win32con.MIIM_STATE == 0: - fState = None - if fMask & win32con.MIIM_ID == 0: - wID = None - if fMask & win32con.MIIM_SUBMENU == 0: - hSubMenu = None - if fMask & win32con.MIIM_CHECKMARKS == 0: - hbmpChecked = hbmpUnchecked = None - if fMask & win32con.MIIM_DATA == 0: - dwItemData = None - if fMask & win32con.MIIM_BITMAP == 0: - hbmpItem = None - if fMask & win32con.MIIM_STRING: - text = win32gui.PyGetString(lptext, cch) - else: - text = None - return _MakeResult( - "MENUITEMINFO fType fState wID hSubMenu hbmpChecked " - "hbmpUnchecked dwItemData text hbmpItem", - ( - fType, - fState, - wID, - hSubMenu, - hbmpChecked, - hbmpUnchecked, - dwItemData, - text, - hbmpItem, - ), - ) - - -def EmptyMENUITEMINFO(mask=None, text_buf_size=512): - # text_buf_size is number of *characters* - not necessarily no of bytes. - extra = [] - if mask is None: - mask = ( - win32con.MIIM_BITMAP - | win32con.MIIM_CHECKMARKS - | win32con.MIIM_DATA - | win32con.MIIM_FTYPE - | win32con.MIIM_ID - | win32con.MIIM_STATE - | win32con.MIIM_STRING - | win32con.MIIM_SUBMENU - ) - # Note: No MIIM_TYPE - this screws win2k/98. - - if mask & win32con.MIIM_STRING: - text_buffer = _make_empty_text_buffer(text_buf_size) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - else: - text_addr = text_buf_size = 0 - - # Now copy the string to a writable buffer, so that the result - # could be passed to a 'Get' function - buf = struct.pack( - _menuiteminfo_fmt, - struct.calcsize(_menuiteminfo_fmt), # cbSize - mask, - 0, # fType, - 0, # fState, - 0, # wID, - 0, # hSubMenu, - 0, # hbmpChecked, - 0, # hbmpUnchecked, - 0, # dwItemData, - text_addr, - text_buf_size, - 0, # hbmpItem - ) - return array.array("b", buf), extra - - -# MENUINFO struct -_menuinfo_fmt = "iiiiPiP" - - -def PackMENUINFO( - dwStyle=None, - cyMax=None, - hbrBack=None, - dwContextHelpID=None, - dwMenuData=None, - fMask=0, -): - if dwStyle is None: - dwStyle = 0 - else: - fMask |= win32con.MIM_STYLE - if cyMax is None: - cyMax = 0 - else: - fMask |= win32con.MIM_MAXHEIGHT - if hbrBack is None: - hbrBack = 0 - else: - fMask |= win32con.MIM_BACKGROUND - if dwContextHelpID is None: - dwContextHelpID = 0 - else: - fMask |= win32con.MIM_HELPID - if dwMenuData is None: - dwMenuData = 0 - else: - fMask |= win32con.MIM_MENUDATA - # Create the struct. - item = struct.pack( - _menuinfo_fmt, - struct.calcsize(_menuinfo_fmt), # cbSize - fMask, - dwStyle, - cyMax, - hbrBack, - dwContextHelpID, - dwMenuData, - ) - return array.array("b", item) - - -def UnpackMENUINFO(s): - (cb, fMask, dwStyle, cyMax, hbrBack, dwContextHelpID, dwMenuData) = struct.unpack( - _menuinfo_fmt, s - ) - assert cb == len(s) - if fMask & win32con.MIM_STYLE == 0: - dwStyle = None - if fMask & win32con.MIM_MAXHEIGHT == 0: - cyMax = None - if fMask & win32con.MIM_BACKGROUND == 0: - hbrBack = None - if fMask & win32con.MIM_HELPID == 0: - dwContextHelpID = None - if fMask & win32con.MIM_MENUDATA == 0: - dwMenuData = None - return _MakeResult( - "MENUINFO dwStyle cyMax hbrBack dwContextHelpID dwMenuData", - (dwStyle, cyMax, hbrBack, dwContextHelpID, dwMenuData), - ) - - -def EmptyMENUINFO(mask=None): - if mask is None: - mask = ( - win32con.MIM_STYLE - | win32con.MIM_MAXHEIGHT - | win32con.MIM_BACKGROUND - | win32con.MIM_HELPID - | win32con.MIM_MENUDATA - ) - - buf = struct.pack( - _menuinfo_fmt, - struct.calcsize(_menuinfo_fmt), # cbSize - mask, - 0, # dwStyle - 0, # cyMax - 0, # hbrBack, - 0, # dwContextHelpID, - 0, # dwMenuData, - ) - return array.array("b", buf) - - -########################################################################## -# -# Tree View structure support - TVITEM, TVINSERTSTRUCT and TVDISPINFO -# -########################################################################## - -# XXX - Note that the following implementation of TreeView structures is ripped -# XXX - from the SpamBayes project. It may not quite work correctly yet - I -# XXX - intend checking them later - but having them is better than not at all! - -_tvitem_fmt = "iPiiPiiiiP" - - -# Helpers for the ugly win32 structure packing/unpacking -# XXX - Note that functions using _GetMaskAndVal run 3x faster if they are -# 'inlined' into the function - see PackLVITEM. If the profiler points at -# _GetMaskAndVal(), you should nuke it (patches welcome once they have been -# tested) -def _GetMaskAndVal(val, default, mask, flag): - if val is None: - return mask, default - else: - if flag is not None: - mask |= flag - return mask, val - - -def PackTVINSERTSTRUCT(parent, insertAfter, tvitem): - tvitem_buf, extra = PackTVITEM(*tvitem) - tvitem_buf = tvitem_buf.tobytes() - format = "PP%ds" % len(tvitem_buf) - return struct.pack(format, parent, insertAfter, tvitem_buf), extra - - -def PackTVITEM(hitem, state, stateMask, text, image, selimage, citems, param): - extra = [] # objects we must keep references to - mask = 0 - mask, hitem = _GetMaskAndVal(hitem, 0, mask, commctrl.TVIF_HANDLE) - mask, state = _GetMaskAndVal(state, 0, mask, commctrl.TVIF_STATE) - if not mask & commctrl.TVIF_STATE: - stateMask = 0 - mask, text = _GetMaskAndVal(text, None, mask, commctrl.TVIF_TEXT) - mask, image = _GetMaskAndVal(image, 0, mask, commctrl.TVIF_IMAGE) - mask, selimage = _GetMaskAndVal(selimage, 0, mask, commctrl.TVIF_SELECTEDIMAGE) - mask, citems = _GetMaskAndVal(citems, 0, mask, commctrl.TVIF_CHILDREN) - mask, param = _GetMaskAndVal(param, 0, mask, commctrl.TVIF_PARAM) - if text is None: - text_addr = text_len = 0 - else: - text_buffer = _make_text_buffer(text) - text_len = len(text) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - buf = struct.pack( - _tvitem_fmt, - mask, - hitem, - state, - stateMask, - text_addr, - text_len, # text - image, - selimage, - citems, - param, - ) - return array.array("b", buf), extra - - -# Make a new buffer suitable for querying hitem's attributes. -def EmptyTVITEM(hitem, mask=None, text_buf_size=512): - extra = [] # objects we must keep references to - if mask is None: - mask = ( - commctrl.TVIF_HANDLE - | commctrl.TVIF_STATE - | commctrl.TVIF_TEXT - | commctrl.TVIF_IMAGE - | commctrl.TVIF_SELECTEDIMAGE - | commctrl.TVIF_CHILDREN - | commctrl.TVIF_PARAM - ) - if mask & commctrl.TVIF_TEXT: - text_buffer = _make_empty_text_buffer(text_buf_size) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - else: - text_addr = text_buf_size = 0 - buf = struct.pack( - _tvitem_fmt, mask, hitem, 0, 0, text_addr, text_buf_size, 0, 0, 0, 0 # text - ) - return array.array("b", buf), extra - - -def UnpackTVITEM(buffer): - ( - item_mask, - item_hItem, - item_state, - item_stateMask, - item_textptr, - item_cchText, - item_image, - item_selimage, - item_cChildren, - item_param, - ) = struct.unpack(_tvitem_fmt, buffer) - # ensure only items listed by the mask are valid (except we assume the - # handle is always valid - some notifications (eg, TVN_ENDLABELEDIT) set a - # mask that doesn't include the handle, but the docs explicity say it is.) - if not (item_mask & commctrl.TVIF_TEXT): - item_textptr = item_cchText = None - if not (item_mask & commctrl.TVIF_CHILDREN): - item_cChildren = None - if not (item_mask & commctrl.TVIF_IMAGE): - item_image = None - if not (item_mask & commctrl.TVIF_PARAM): - item_param = None - if not (item_mask & commctrl.TVIF_SELECTEDIMAGE): - item_selimage = None - if not (item_mask & commctrl.TVIF_STATE): - item_state = item_stateMask = None - - if item_textptr: - text = win32gui.PyGetString(item_textptr) - else: - text = None - return _MakeResult( - "TVITEM item_hItem item_state item_stateMask " - "text item_image item_selimage item_cChildren item_param", - ( - item_hItem, - item_state, - item_stateMask, - text, - item_image, - item_selimage, - item_cChildren, - item_param, - ), - ) - - -# Unpack the lparm from a "TVNOTIFY" message -def UnpackTVNOTIFY(lparam): - item_size = struct.calcsize(_tvitem_fmt) - format = _nmhdr_fmt + _nmhdr_align_padding - if is64bit: - format = format + "ixxxx" - else: - format = format + "i" - format = format + "%ds%ds" % (item_size, item_size) - buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) - hwndFrom, id, code, action, buf_old, buf_new = struct.unpack(format, buf) - item_old = UnpackTVITEM(buf_old) - item_new = UnpackTVITEM(buf_new) - return _MakeResult( - "TVNOTIFY hwndFrom id code action item_old item_new", - (hwndFrom, id, code, action, item_old, item_new), - ) - - -def UnpackTVDISPINFO(lparam): - item_size = struct.calcsize(_tvitem_fmt) - format = "PPi%ds" % (item_size,) - buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) - hwndFrom, id, code, buf_item = struct.unpack(format, buf) - item = UnpackTVITEM(buf_item) - return _MakeResult("TVDISPINFO hwndFrom id code item", (hwndFrom, id, code, item)) - - -# -# List view items -_lvitem_fmt = "iiiiiPiiPi" - - -def PackLVITEM( - item=None, - subItem=None, - state=None, - stateMask=None, - text=None, - image=None, - param=None, - indent=None, -): - extra = [] # objects we must keep references to - mask = 0 - # _GetMaskAndVal adds quite a bit of overhead to this function. - if item is None: - item = 0 # No mask for item - if subItem is None: - subItem = 0 # No mask for sibItem - if state is None: - state = 0 - stateMask = 0 - else: - mask |= commctrl.LVIF_STATE - if stateMask is None: - stateMask = state - - if image is None: - image = 0 - else: - mask |= commctrl.LVIF_IMAGE - if param is None: - param = 0 - else: - mask |= commctrl.LVIF_PARAM - if indent is None: - indent = 0 - else: - mask |= commctrl.LVIF_INDENT - - if text is None: - text_addr = text_len = 0 - else: - mask |= commctrl.LVIF_TEXT - text_buffer = _make_text_buffer(text) - text_len = len(text) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - buf = struct.pack( - _lvitem_fmt, - mask, - item, - subItem, - state, - stateMask, - text_addr, - text_len, # text - image, - param, - indent, - ) - return array.array("b", buf), extra - - -def UnpackLVITEM(buffer): - ( - item_mask, - item_item, - item_subItem, - item_state, - item_stateMask, - item_textptr, - item_cchText, - item_image, - item_param, - item_indent, - ) = struct.unpack(_lvitem_fmt, buffer) - # ensure only items listed by the mask are valid - if not (item_mask & commctrl.LVIF_TEXT): - item_textptr = item_cchText = None - if not (item_mask & commctrl.LVIF_IMAGE): - item_image = None - if not (item_mask & commctrl.LVIF_PARAM): - item_param = None - if not (item_mask & commctrl.LVIF_INDENT): - item_indent = None - if not (item_mask & commctrl.LVIF_STATE): - item_state = item_stateMask = None - - if item_textptr: - text = win32gui.PyGetString(item_textptr) - else: - text = None - return _MakeResult( - "LVITEM item_item item_subItem item_state " - "item_stateMask text item_image item_param item_indent", - ( - item_item, - item_subItem, - item_state, - item_stateMask, - text, - item_image, - item_param, - item_indent, - ), - ) - - -# Unpack an "LVNOTIFY" message -def UnpackLVDISPINFO(lparam): - item_size = struct.calcsize(_lvitem_fmt) - format = _nmhdr_fmt + _nmhdr_align_padding + ("%ds" % (item_size,)) - buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) - hwndFrom, id, code, buf_item = struct.unpack(format, buf) - item = UnpackLVITEM(buf_item) - return _MakeResult("LVDISPINFO hwndFrom id code item", (hwndFrom, id, code, item)) - - -def UnpackLVNOTIFY(lparam): - format = _nmhdr_fmt + _nmhdr_align_padding + "7i" - if is64bit: - format = format + "xxxx" # point needs padding. - format = format + "P" - buf = win32gui.PyGetMemory(lparam, struct.calcsize(format)) - ( - hwndFrom, - id, - code, - item, - subitem, - newstate, - oldstate, - changed, - pt_x, - pt_y, - lparam, - ) = struct.unpack(format, buf) - return _MakeResult( - "UnpackLVNOTIFY hwndFrom id code item subitem " - "newstate oldstate changed pt lparam", - ( - hwndFrom, - id, - code, - item, - subitem, - newstate, - oldstate, - changed, - (pt_x, pt_y), - lparam, - ), - ) - - -# Make a new buffer suitable for querying an items attributes. -def EmptyLVITEM(item, subitem, mask=None, text_buf_size=512): - extra = [] # objects we must keep references to - if mask is None: - mask = ( - commctrl.LVIF_IMAGE - | commctrl.LVIF_INDENT - | commctrl.LVIF_TEXT - | commctrl.LVIF_PARAM - | commctrl.LVIF_STATE - ) - if mask & commctrl.LVIF_TEXT: - text_buffer = _make_empty_text_buffer(text_buf_size) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - else: - text_addr = text_buf_size = 0 - buf = struct.pack( - _lvitem_fmt, - mask, - item, - subitem, - 0, - 0, - text_addr, - text_buf_size, # text - 0, - 0, - 0, - ) - return array.array("b", buf), extra - - -# List view column structure -_lvcolumn_fmt = "iiiPiiii" - - -def PackLVCOLUMN(fmt=None, cx=None, text=None, subItem=None, image=None, order=None): - extra = [] # objects we must keep references to - mask = 0 - mask, fmt = _GetMaskAndVal(fmt, 0, mask, commctrl.LVCF_FMT) - mask, cx = _GetMaskAndVal(cx, 0, mask, commctrl.LVCF_WIDTH) - mask, text = _GetMaskAndVal(text, None, mask, commctrl.LVCF_TEXT) - mask, subItem = _GetMaskAndVal(subItem, 0, mask, commctrl.LVCF_SUBITEM) - mask, image = _GetMaskAndVal(image, 0, mask, commctrl.LVCF_IMAGE) - mask, order = _GetMaskAndVal(order, 0, mask, commctrl.LVCF_ORDER) - if text is None: - text_addr = text_len = 0 - else: - text_buffer = _make_text_buffer(text) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - text_len = len(text) - buf = struct.pack( - _lvcolumn_fmt, mask, fmt, cx, text_addr, text_len, subItem, image, order # text - ) - return array.array("b", buf), extra - - -def UnpackLVCOLUMN(lparam): - mask, fmt, cx, text_addr, text_size, subItem, image, order = struct.unpack( - _lvcolumn_fmt, lparam - ) - # ensure only items listed by the mask are valid - if not (mask & commctrl.LVCF_FMT): - fmt = None - if not (mask & commctrl.LVCF_WIDTH): - cx = None - if not (mask & commctrl.LVCF_TEXT): - text_addr = text_size = None - if not (mask & commctrl.LVCF_SUBITEM): - subItem = None - if not (mask & commctrl.LVCF_IMAGE): - image = None - if not (mask & commctrl.LVCF_ORDER): - order = None - if text_addr: - text = win32gui.PyGetString(text_addr) - else: - text = None - return _MakeResult( - "LVCOLUMN fmt cx text subItem image order", - (fmt, cx, text, subItem, image, order), - ) - - -# Make a new buffer suitable for querying an items attributes. -def EmptyLVCOLUMN(mask=None, text_buf_size=512): - extra = [] # objects we must keep references to - if mask is None: - mask = ( - commctrl.LVCF_FMT - | commctrl.LVCF_WIDTH - | commctrl.LVCF_TEXT - | commctrl.LVCF_SUBITEM - | commctrl.LVCF_IMAGE - | commctrl.LVCF_ORDER - ) - if mask & commctrl.LVCF_TEXT: - text_buffer = _make_empty_text_buffer(text_buf_size) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - else: - text_addr = text_buf_size = 0 - buf = struct.pack( - _lvcolumn_fmt, mask, 0, 0, text_addr, text_buf_size, 0, 0, 0 # text - ) - return array.array("b", buf), extra - - -# List view hit-test. -def PackLVHITTEST(pt): - format = "iiiii" - buf = struct.pack(format, pt[0], pt[1], 0, 0, 0) - return array.array("b", buf), None - - -def UnpackLVHITTEST(buf): - format = "iiiii" - x, y, flags, item, subitem = struct.unpack(format, buf) - return _MakeResult( - "LVHITTEST pt flags item subitem", ((x, y), flags, item, subitem) - ) - - -def PackHDITEM( - cxy=None, text=None, hbm=None, fmt=None, param=None, image=None, order=None -): - extra = [] # objects we must keep references to - mask = 0 - mask, cxy = _GetMaskAndVal(cxy, 0, mask, commctrl.HDI_HEIGHT) - mask, text = _GetMaskAndVal(text, None, mask, commctrl.LVCF_TEXT) - mask, hbm = _GetMaskAndVal(hbm, 0, mask, commctrl.HDI_BITMAP) - mask, fmt = _GetMaskAndVal(fmt, 0, mask, commctrl.HDI_FORMAT) - mask, param = _GetMaskAndVal(param, 0, mask, commctrl.HDI_LPARAM) - mask, image = _GetMaskAndVal(image, 0, mask, commctrl.HDI_IMAGE) - mask, order = _GetMaskAndVal(order, 0, mask, commctrl.HDI_ORDER) - - if text is None: - text_addr = text_len = 0 - else: - text_buffer = _make_text_buffer(text) - extra.append(text_buffer) - text_addr, _ = text_buffer.buffer_info() - text_len = len(text) - - format = "iiPPiiPiiii" - buf = struct.pack( - format, mask, cxy, text_addr, hbm, text_len, fmt, param, image, order, 0, 0 - ) - return array.array("b", buf), extra - - -# Device notification stuff - - -# Generic function for packing a DEV_BROADCAST_* structure - generally used -# by the other PackDEV_BROADCAST_* functions in this module. -def PackDEV_BROADCAST(devicetype, rest_fmt, rest_data, extra_data=_make_bytes("")): - # It seems a requirement is 4 byte alignment, even for the 'BYTE data[1]' - # field (eg, that would make DEV_BROADCAST_HANDLE 41 bytes, but we must - # be 44. - extra_data += _make_bytes("\0" * (4 - len(extra_data) % 4)) - format = "iii" + rest_fmt - full_size = struct.calcsize(format) + len(extra_data) - data = (full_size, devicetype, 0) + rest_data - return struct.pack(format, *data) + extra_data - - -def PackDEV_BROADCAST_HANDLE( - handle, - hdevnotify=0, - guid=_make_bytes("\0" * 16), - name_offset=0, - data=_make_bytes("\0"), -): - return PackDEV_BROADCAST( - win32con.DBT_DEVTYP_HANDLE, - "PP16sl", - (int(handle), int(hdevnotify), _make_memory(guid), name_offset), - data, - ) - - -def PackDEV_BROADCAST_VOLUME(unitmask, flags): - return PackDEV_BROADCAST(win32con.DBT_DEVTYP_VOLUME, "II", (unitmask, flags)) - - -def PackDEV_BROADCAST_DEVICEINTERFACE(classguid, name=""): - if win32gui.UNICODE: - # This really means "is py3k?" - so not accepting bytes is OK - if not isinstance(name, str): - raise TypeError("Must provide unicode for the name") - name = name.encode("utf-16le") - else: - # py2k was passed a unicode object - encode as mbcs. - if isinstance(name, str): - name = name.encode("mbcs") - - # 16 bytes for the IID followed by \0 term'd string. - rest_fmt = "16s%ds" % len(name) - # _make_memory(iid) hoops necessary to get the raw IID bytes. - rest_data = (_make_memory(pywintypes.IID(classguid)), name) - return PackDEV_BROADCAST(win32con.DBT_DEVTYP_DEVICEINTERFACE, rest_fmt, rest_data) - - -# An object returned by UnpackDEV_BROADCAST. -class DEV_BROADCAST_INFO: - def __init__(self, devicetype, **kw): - self.devicetype = devicetype - self.__dict__.update(kw) - - def __str__(self): - return "DEV_BROADCAST_INFO:" + str(self.__dict__) - - -# Support for unpacking the 'lparam' -def UnpackDEV_BROADCAST(lparam): - if lparam == 0: - return None - hdr_format = "iii" - hdr_size = struct.calcsize(hdr_format) - hdr_buf = win32gui.PyGetMemory(lparam, hdr_size) - size, devtype, reserved = struct.unpack("iii", hdr_buf) - # Due to x64 alignment issues, we need to use the full format string over - # the entire buffer. ie, on x64: - # calcsize('iiiP') != calcsize('iii')+calcsize('P') - buf = win32gui.PyGetMemory(lparam, size) - - extra = x = {} - if devtype == win32con.DBT_DEVTYP_HANDLE: - # 2 handles, a GUID, a LONG and possibly an array following... - fmt = hdr_format + "PP16sl" - ( - _, - _, - _, - x["handle"], - x["hdevnotify"], - guid_bytes, - x["nameoffset"], - ) = struct.unpack(fmt, buf[: struct.calcsize(fmt)]) - x["eventguid"] = pywintypes.IID(guid_bytes, True) - elif devtype == win32con.DBT_DEVTYP_DEVICEINTERFACE: - fmt = hdr_format + "16s" - _, _, _, guid_bytes = struct.unpack(fmt, buf[: struct.calcsize(fmt)]) - x["classguid"] = pywintypes.IID(guid_bytes, True) - x["name"] = win32gui.PyGetString(lparam + struct.calcsize(fmt)) - elif devtype == win32con.DBT_DEVTYP_VOLUME: - # int mask and flags - fmt = hdr_format + "II" - _, _, _, x["unitmask"], x["flags"] = struct.unpack( - fmt, buf[: struct.calcsize(fmt)] - ) - else: - raise NotImplementedError("unknown device type %d" % (devtype,)) - return DEV_BROADCAST_INFO(devtype, **extra) diff --git a/lib/win32/lib/win32inetcon.py b/lib/win32/lib/win32inetcon.py deleted file mode 100644 index b6e8e672..00000000 --- a/lib/win32/lib/win32inetcon.py +++ /dev/null @@ -1,1086 +0,0 @@ -# Generated by h2py from \mssdk\include\WinInet.h - -INTERNET_INVALID_PORT_NUMBER = 0 -INTERNET_DEFAULT_PORT = 0 -INTERNET_DEFAULT_FTP_PORT = 21 -INTERNET_DEFAULT_GOPHER_PORT = 70 -INTERNET_DEFAULT_HTTP_PORT = 80 -INTERNET_DEFAULT_HTTPS_PORT = 443 -INTERNET_DEFAULT_SOCKS_PORT = 1080 -INTERNET_MAX_HOST_NAME_LENGTH = 256 -INTERNET_MAX_USER_NAME_LENGTH = 128 -INTERNET_MAX_PASSWORD_LENGTH = 128 -INTERNET_MAX_PORT_NUMBER_LENGTH = 5 -INTERNET_MAX_PORT_NUMBER_VALUE = 65535 -INTERNET_MAX_PATH_LENGTH = 2048 -INTERNET_MAX_SCHEME_LENGTH = 32 -INTERNET_KEEP_ALIVE_ENABLED = 1 -INTERNET_KEEP_ALIVE_DISABLED = 0 -INTERNET_REQFLAG_FROM_CACHE = 0x00000001 -INTERNET_REQFLAG_ASYNC = 0x00000002 -INTERNET_REQFLAG_VIA_PROXY = 0x00000004 -INTERNET_REQFLAG_NO_HEADERS = 0x00000008 -INTERNET_REQFLAG_PASSIVE = 0x00000010 -INTERNET_REQFLAG_CACHE_WRITE_DISABLED = 0x00000040 -INTERNET_REQFLAG_NET_TIMEOUT = 0x00000080 -INTERNET_FLAG_RELOAD = -2147483648 -INTERNET_FLAG_RAW_DATA = 0x40000000 -INTERNET_FLAG_EXISTING_CONNECT = 0x20000000 -INTERNET_FLAG_ASYNC = 0x10000000 -INTERNET_FLAG_PASSIVE = 0x08000000 -INTERNET_FLAG_NO_CACHE_WRITE = 0x04000000 -INTERNET_FLAG_DONT_CACHE = INTERNET_FLAG_NO_CACHE_WRITE -INTERNET_FLAG_MAKE_PERSISTENT = 0x02000000 -INTERNET_FLAG_FROM_CACHE = 0x01000000 -INTERNET_FLAG_OFFLINE = INTERNET_FLAG_FROM_CACHE -INTERNET_FLAG_SECURE = 0x00800000 -INTERNET_FLAG_KEEP_CONNECTION = 0x00400000 -INTERNET_FLAG_NO_AUTO_REDIRECT = 0x00200000 -INTERNET_FLAG_READ_PREFETCH = 0x00100000 -INTERNET_FLAG_NO_COOKIES = 0x00080000 -INTERNET_FLAG_NO_AUTH = 0x00040000 -INTERNET_FLAG_RESTRICTED_ZONE = 0x00020000 -INTERNET_FLAG_CACHE_IF_NET_FAIL = 0x00010000 -INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP = 0x00008000 -INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS = 0x00004000 -INTERNET_FLAG_IGNORE_CERT_DATE_INVALID = 0x00002000 -INTERNET_FLAG_IGNORE_CERT_CN_INVALID = 0x00001000 -INTERNET_FLAG_RESYNCHRONIZE = 0x00000800 -INTERNET_FLAG_HYPERLINK = 0x00000400 -INTERNET_FLAG_NO_UI = 0x00000200 -INTERNET_FLAG_PRAGMA_NOCACHE = 0x00000100 -INTERNET_FLAG_CACHE_ASYNC = 0x00000080 -INTERNET_FLAG_FORMS_SUBMIT = 0x00000040 -INTERNET_FLAG_FWD_BACK = 0x00000020 -INTERNET_FLAG_NEED_FILE = 0x00000010 -INTERNET_FLAG_MUST_CACHE_REQUEST = INTERNET_FLAG_NEED_FILE -SECURITY_INTERNET_MASK = ( - INTERNET_FLAG_IGNORE_CERT_CN_INVALID - | INTERNET_FLAG_IGNORE_CERT_DATE_INVALID - | INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS - | INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP -) -INTERNET_ERROR_MASK_INSERT_CDROM = 0x1 -INTERNET_ERROR_MASK_COMBINED_SEC_CERT = 0x2 -INTERNET_ERROR_MASK_NEED_MSN_SSPI_PKG = 0x4 -INTERNET_ERROR_MASK_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = 0x8 -WININET_API_FLAG_ASYNC = 0x00000001 -WININET_API_FLAG_SYNC = 0x00000004 -WININET_API_FLAG_USE_CONTEXT = 0x00000008 -INTERNET_NO_CALLBACK = 0 -IDSI_FLAG_KEEP_ALIVE = 0x00000001 -IDSI_FLAG_SECURE = 0x00000002 -IDSI_FLAG_PROXY = 0x00000004 -IDSI_FLAG_TUNNEL = 0x00000008 -INTERNET_PER_CONN_FLAGS = 1 -INTERNET_PER_CONN_PROXY_SERVER = 2 -INTERNET_PER_CONN_PROXY_BYPASS = 3 -INTERNET_PER_CONN_AUTOCONFIG_URL = 4 -INTERNET_PER_CONN_AUTODISCOVERY_FLAGS = 5 -INTERNET_PER_CONN_AUTOCONFIG_SECONDARY_URL = 6 -INTERNET_PER_CONN_AUTOCONFIG_RELOAD_DELAY_MINS = 7 -INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_TIME = 8 -INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_URL = 9 -PROXY_TYPE_DIRECT = 0x00000001 -PROXY_TYPE_PROXY = 0x00000002 -PROXY_TYPE_AUTO_PROXY_URL = 0x00000004 -PROXY_TYPE_AUTO_DETECT = 0x00000008 -AUTO_PROXY_FLAG_USER_SET = 0x00000001 -AUTO_PROXY_FLAG_ALWAYS_DETECT = 0x00000002 -AUTO_PROXY_FLAG_DETECTION_RUN = 0x00000004 -AUTO_PROXY_FLAG_MIGRATED = 0x00000008 -AUTO_PROXY_FLAG_DONT_CACHE_PROXY_RESULT = 0x00000010 -AUTO_PROXY_FLAG_CACHE_INIT_RUN = 0x00000020 -AUTO_PROXY_FLAG_DETECTION_SUSPECT = 0x00000040 -ISO_FORCE_DISCONNECTED = 0x00000001 -INTERNET_RFC1123_FORMAT = 0 -INTERNET_RFC1123_BUFSIZE = 30 -ICU_ESCAPE = -2147483648 -ICU_ESCAPE_AUTHORITY = 0x00002000 -ICU_REJECT_USERPWD = 0x00004000 -ICU_USERNAME = 0x40000000 -ICU_NO_ENCODE = 0x20000000 -ICU_DECODE = 0x10000000 -ICU_NO_META = 0x08000000 -ICU_ENCODE_SPACES_ONLY = 0x04000000 -ICU_BROWSER_MODE = 0x02000000 -ICU_ENCODE_PERCENT = 0x00001000 -INTERNET_OPEN_TYPE_PRECONFIG = 0 -INTERNET_OPEN_TYPE_DIRECT = 1 -INTERNET_OPEN_TYPE_PROXY = 3 -INTERNET_OPEN_TYPE_PRECONFIG_WITH_NO_AUTOPROXY = 4 -PRE_CONFIG_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PRECONFIG -LOCAL_INTERNET_ACCESS = INTERNET_OPEN_TYPE_DIRECT -CERN_PROXY_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PROXY -INTERNET_SERVICE_FTP = 1 -INTERNET_SERVICE_GOPHER = 2 -INTERNET_SERVICE_HTTP = 3 -IRF_ASYNC = WININET_API_FLAG_ASYNC -IRF_SYNC = WININET_API_FLAG_SYNC -IRF_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT -IRF_NO_WAIT = 0x00000008 -ISO_GLOBAL = 0x00000001 -ISO_REGISTRY = 0x00000002 -ISO_VALID_FLAGS = ISO_GLOBAL | ISO_REGISTRY -INTERNET_OPTION_CALLBACK = 1 -INTERNET_OPTION_CONNECT_TIMEOUT = 2 -INTERNET_OPTION_CONNECT_RETRIES = 3 -INTERNET_OPTION_CONNECT_BACKOFF = 4 -INTERNET_OPTION_SEND_TIMEOUT = 5 -INTERNET_OPTION_CONTROL_SEND_TIMEOUT = INTERNET_OPTION_SEND_TIMEOUT -INTERNET_OPTION_RECEIVE_TIMEOUT = 6 -INTERNET_OPTION_CONTROL_RECEIVE_TIMEOUT = INTERNET_OPTION_RECEIVE_TIMEOUT -INTERNET_OPTION_DATA_SEND_TIMEOUT = 7 -INTERNET_OPTION_DATA_RECEIVE_TIMEOUT = 8 -INTERNET_OPTION_HANDLE_TYPE = 9 -INTERNET_OPTION_LISTEN_TIMEOUT = 11 -INTERNET_OPTION_READ_BUFFER_SIZE = 12 -INTERNET_OPTION_WRITE_BUFFER_SIZE = 13 -INTERNET_OPTION_ASYNC_ID = 15 -INTERNET_OPTION_ASYNC_PRIORITY = 16 -INTERNET_OPTION_PARENT_HANDLE = 21 -INTERNET_OPTION_KEEP_CONNECTION = 22 -INTERNET_OPTION_REQUEST_FLAGS = 23 -INTERNET_OPTION_EXTENDED_ERROR = 24 -INTERNET_OPTION_OFFLINE_MODE = 26 -INTERNET_OPTION_CACHE_STREAM_HANDLE = 27 -INTERNET_OPTION_USERNAME = 28 -INTERNET_OPTION_PASSWORD = 29 -INTERNET_OPTION_ASYNC = 30 -INTERNET_OPTION_SECURITY_FLAGS = 31 -INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT = 32 -INTERNET_OPTION_DATAFILE_NAME = 33 -INTERNET_OPTION_URL = 34 -INTERNET_OPTION_SECURITY_CERTIFICATE = 35 -INTERNET_OPTION_SECURITY_KEY_BITNESS = 36 -INTERNET_OPTION_REFRESH = 37 -INTERNET_OPTION_PROXY = 38 -INTERNET_OPTION_SETTINGS_CHANGED = 39 -INTERNET_OPTION_VERSION = 40 -INTERNET_OPTION_USER_AGENT = 41 -INTERNET_OPTION_END_BROWSER_SESSION = 42 -INTERNET_OPTION_PROXY_USERNAME = 43 -INTERNET_OPTION_PROXY_PASSWORD = 44 -INTERNET_OPTION_CONTEXT_VALUE = 45 -INTERNET_OPTION_CONNECT_LIMIT = 46 -INTERNET_OPTION_SECURITY_SELECT_CLIENT_CERT = 47 -INTERNET_OPTION_POLICY = 48 -INTERNET_OPTION_DISCONNECTED_TIMEOUT = 49 -INTERNET_OPTION_CONNECTED_STATE = 50 -INTERNET_OPTION_IDLE_STATE = 51 -INTERNET_OPTION_OFFLINE_SEMANTICS = 52 -INTERNET_OPTION_SECONDARY_CACHE_KEY = 53 -INTERNET_OPTION_CALLBACK_FILTER = 54 -INTERNET_OPTION_CONNECT_TIME = 55 -INTERNET_OPTION_SEND_THROUGHPUT = 56 -INTERNET_OPTION_RECEIVE_THROUGHPUT = 57 -INTERNET_OPTION_REQUEST_PRIORITY = 58 -INTERNET_OPTION_HTTP_VERSION = 59 -INTERNET_OPTION_RESET_URLCACHE_SESSION = 60 -INTERNET_OPTION_ERROR_MASK = 62 -INTERNET_OPTION_FROM_CACHE_TIMEOUT = 63 -INTERNET_OPTION_BYPASS_EDITED_ENTRY = 64 -INTERNET_OPTION_DIAGNOSTIC_SOCKET_INFO = 67 -INTERNET_OPTION_CODEPAGE = 68 -INTERNET_OPTION_CACHE_TIMESTAMPS = 69 -INTERNET_OPTION_DISABLE_AUTODIAL = 70 -INTERNET_OPTION_MAX_CONNS_PER_SERVER = 73 -INTERNET_OPTION_MAX_CONNS_PER_1_0_SERVER = 74 -INTERNET_OPTION_PER_CONNECTION_OPTION = 75 -INTERNET_OPTION_DIGEST_AUTH_UNLOAD = 76 -INTERNET_OPTION_IGNORE_OFFLINE = 77 -INTERNET_OPTION_IDENTITY = 78 -INTERNET_OPTION_REMOVE_IDENTITY = 79 -INTERNET_OPTION_ALTER_IDENTITY = 80 -INTERNET_OPTION_SUPPRESS_BEHAVIOR = 81 -INTERNET_OPTION_AUTODIAL_MODE = 82 -INTERNET_OPTION_AUTODIAL_CONNECTION = 83 -INTERNET_OPTION_CLIENT_CERT_CONTEXT = 84 -INTERNET_OPTION_AUTH_FLAGS = 85 -INTERNET_OPTION_COOKIES_3RD_PARTY = 86 -INTERNET_OPTION_DISABLE_PASSPORT_AUTH = 87 -INTERNET_OPTION_SEND_UTF8_SERVERNAME_TO_PROXY = 88 -INTERNET_OPTION_EXEMPT_CONNECTION_LIMIT = 89 -INTERNET_OPTION_ENABLE_PASSPORT_AUTH = 90 -INTERNET_OPTION_HIBERNATE_INACTIVE_WORKER_THREADS = 91 -INTERNET_OPTION_ACTIVATE_WORKER_THREADS = 92 -INTERNET_OPTION_RESTORE_WORKER_THREAD_DEFAULTS = 93 -INTERNET_OPTION_SOCKET_SEND_BUFFER_LENGTH = 94 -INTERNET_OPTION_PROXY_SETTINGS_CHANGED = 95 -INTERNET_FIRST_OPTION = INTERNET_OPTION_CALLBACK -INTERNET_LAST_OPTION = INTERNET_OPTION_PROXY_SETTINGS_CHANGED -INTERNET_PRIORITY_FOREGROUND = 1000 -INTERNET_HANDLE_TYPE_INTERNET = 1 -INTERNET_HANDLE_TYPE_CONNECT_FTP = 2 -INTERNET_HANDLE_TYPE_CONNECT_GOPHER = 3 -INTERNET_HANDLE_TYPE_CONNECT_HTTP = 4 -INTERNET_HANDLE_TYPE_FTP_FIND = 5 -INTERNET_HANDLE_TYPE_FTP_FIND_HTML = 6 -INTERNET_HANDLE_TYPE_FTP_FILE = 7 -INTERNET_HANDLE_TYPE_FTP_FILE_HTML = 8 -INTERNET_HANDLE_TYPE_GOPHER_FIND = 9 -INTERNET_HANDLE_TYPE_GOPHER_FIND_HTML = 10 -INTERNET_HANDLE_TYPE_GOPHER_FILE = 11 -INTERNET_HANDLE_TYPE_GOPHER_FILE_HTML = 12 -INTERNET_HANDLE_TYPE_HTTP_REQUEST = 13 -INTERNET_HANDLE_TYPE_FILE_REQUEST = 14 -AUTH_FLAG_DISABLE_NEGOTIATE = 0x00000001 -AUTH_FLAG_ENABLE_NEGOTIATE = 0x00000002 -SECURITY_FLAG_SECURE = 0x00000001 -SECURITY_FLAG_STRENGTH_WEAK = 0x10000000 -SECURITY_FLAG_STRENGTH_MEDIUM = 0x40000000 -SECURITY_FLAG_STRENGTH_STRONG = 0x20000000 -SECURITY_FLAG_UNKNOWNBIT = -2147483648 -SECURITY_FLAG_FORTEZZA = 0x08000000 -SECURITY_FLAG_NORMALBITNESS = SECURITY_FLAG_STRENGTH_WEAK -SECURITY_FLAG_SSL = 0x00000002 -SECURITY_FLAG_SSL3 = 0x00000004 -SECURITY_FLAG_PCT = 0x00000008 -SECURITY_FLAG_PCT4 = 0x00000010 -SECURITY_FLAG_IETFSSL4 = 0x00000020 -SECURITY_FLAG_40BIT = SECURITY_FLAG_STRENGTH_WEAK -SECURITY_FLAG_128BIT = SECURITY_FLAG_STRENGTH_STRONG -SECURITY_FLAG_56BIT = SECURITY_FLAG_STRENGTH_MEDIUM -SECURITY_FLAG_IGNORE_REVOCATION = 0x00000080 -SECURITY_FLAG_IGNORE_UNKNOWN_CA = 0x00000100 -SECURITY_FLAG_IGNORE_WRONG_USAGE = 0x00000200 -SECURITY_FLAG_IGNORE_CERT_CN_INVALID = INTERNET_FLAG_IGNORE_CERT_CN_INVALID -SECURITY_FLAG_IGNORE_CERT_DATE_INVALID = INTERNET_FLAG_IGNORE_CERT_DATE_INVALID -SECURITY_FLAG_IGNORE_CERT_WRONG_USAGE = 0x00000200 -SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTPS = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS -SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTP = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP -SECURITY_SET_MASK = ( - SECURITY_FLAG_IGNORE_REVOCATION - | SECURITY_FLAG_IGNORE_UNKNOWN_CA - | SECURITY_FLAG_IGNORE_CERT_CN_INVALID - | SECURITY_FLAG_IGNORE_CERT_DATE_INVALID - | SECURITY_FLAG_IGNORE_WRONG_USAGE -) -AUTODIAL_MODE_NEVER = 1 -AUTODIAL_MODE_ALWAYS = 2 -AUTODIAL_MODE_NO_NETWORK_PRESENT = 4 -INTERNET_STATUS_RESOLVING_NAME = 10 -INTERNET_STATUS_NAME_RESOLVED = 11 -INTERNET_STATUS_CONNECTING_TO_SERVER = 20 -INTERNET_STATUS_CONNECTED_TO_SERVER = 21 -INTERNET_STATUS_SENDING_REQUEST = 30 -INTERNET_STATUS_REQUEST_SENT = 31 -INTERNET_STATUS_RECEIVING_RESPONSE = 40 -INTERNET_STATUS_RESPONSE_RECEIVED = 41 -INTERNET_STATUS_CTL_RESPONSE_RECEIVED = 42 -INTERNET_STATUS_PREFETCH = 43 -INTERNET_STATUS_CLOSING_CONNECTION = 50 -INTERNET_STATUS_CONNECTION_CLOSED = 51 -INTERNET_STATUS_HANDLE_CREATED = 60 -INTERNET_STATUS_HANDLE_CLOSING = 70 -INTERNET_STATUS_DETECTING_PROXY = 80 -INTERNET_STATUS_REQUEST_COMPLETE = 100 -INTERNET_STATUS_REDIRECT = 110 -INTERNET_STATUS_INTERMEDIATE_RESPONSE = 120 -INTERNET_STATUS_USER_INPUT_REQUIRED = 140 -INTERNET_STATUS_STATE_CHANGE = 200 -INTERNET_STATUS_COOKIE_SENT = 320 -INTERNET_STATUS_COOKIE_RECEIVED = 321 -INTERNET_STATUS_PRIVACY_IMPACTED = 324 -INTERNET_STATUS_P3P_HEADER = 325 -INTERNET_STATUS_P3P_POLICYREF = 326 -INTERNET_STATUS_COOKIE_HISTORY = 327 -INTERNET_STATE_CONNECTED = 0x00000001 -INTERNET_STATE_DISCONNECTED = 0x00000002 -INTERNET_STATE_DISCONNECTED_BY_USER = 0x00000010 -INTERNET_STATE_IDLE = 0x00000100 -INTERNET_STATE_BUSY = 0x00000200 -FTP_TRANSFER_TYPE_UNKNOWN = 0x00000000 -FTP_TRANSFER_TYPE_ASCII = 0x00000001 -FTP_TRANSFER_TYPE_BINARY = 0x00000002 -FTP_TRANSFER_TYPE_MASK = FTP_TRANSFER_TYPE_ASCII | FTP_TRANSFER_TYPE_BINARY -MAX_GOPHER_DISPLAY_TEXT = 128 -MAX_GOPHER_SELECTOR_TEXT = 256 -MAX_GOPHER_HOST_NAME = INTERNET_MAX_HOST_NAME_LENGTH -MAX_GOPHER_LOCATOR_LENGTH = ( - 1 - + MAX_GOPHER_DISPLAY_TEXT - + 1 - + MAX_GOPHER_SELECTOR_TEXT - + 1 - + MAX_GOPHER_HOST_NAME - + 1 - + INTERNET_MAX_PORT_NUMBER_LENGTH - + 1 - + 1 - + 2 -) -GOPHER_TYPE_TEXT_FILE = 0x00000001 -GOPHER_TYPE_DIRECTORY = 0x00000002 -GOPHER_TYPE_CSO = 0x00000004 -GOPHER_TYPE_ERROR = 0x00000008 -GOPHER_TYPE_MAC_BINHEX = 0x00000010 -GOPHER_TYPE_DOS_ARCHIVE = 0x00000020 -GOPHER_TYPE_UNIX_UUENCODED = 0x00000040 -GOPHER_TYPE_INDEX_SERVER = 0x00000080 -GOPHER_TYPE_TELNET = 0x00000100 -GOPHER_TYPE_BINARY = 0x00000200 -GOPHER_TYPE_REDUNDANT = 0x00000400 -GOPHER_TYPE_TN3270 = 0x00000800 -GOPHER_TYPE_GIF = 0x00001000 -GOPHER_TYPE_IMAGE = 0x00002000 -GOPHER_TYPE_BITMAP = 0x00004000 -GOPHER_TYPE_MOVIE = 0x00008000 -GOPHER_TYPE_SOUND = 0x00010000 -GOPHER_TYPE_HTML = 0x00020000 -GOPHER_TYPE_PDF = 0x00040000 -GOPHER_TYPE_CALENDAR = 0x00080000 -GOPHER_TYPE_INLINE = 0x00100000 -GOPHER_TYPE_UNKNOWN = 0x20000000 -GOPHER_TYPE_ASK = 0x40000000 -GOPHER_TYPE_GOPHER_PLUS = -2147483648 -GOPHER_TYPE_FILE_MASK = ( - GOPHER_TYPE_TEXT_FILE - | GOPHER_TYPE_MAC_BINHEX - | GOPHER_TYPE_DOS_ARCHIVE - | GOPHER_TYPE_UNIX_UUENCODED - | GOPHER_TYPE_BINARY - | GOPHER_TYPE_GIF - | GOPHER_TYPE_IMAGE - | GOPHER_TYPE_BITMAP - | GOPHER_TYPE_MOVIE - | GOPHER_TYPE_SOUND - | GOPHER_TYPE_HTML - | GOPHER_TYPE_PDF - | GOPHER_TYPE_CALENDAR - | GOPHER_TYPE_INLINE -) -MAX_GOPHER_CATEGORY_NAME = 128 -MAX_GOPHER_ATTRIBUTE_NAME = 128 -MIN_GOPHER_ATTRIBUTE_LENGTH = 256 -GOPHER_ATTRIBUTE_ID_BASE = -1412641792 -GOPHER_CATEGORY_ID_ALL = GOPHER_ATTRIBUTE_ID_BASE + 1 -GOPHER_CATEGORY_ID_INFO = GOPHER_ATTRIBUTE_ID_BASE + 2 -GOPHER_CATEGORY_ID_ADMIN = GOPHER_ATTRIBUTE_ID_BASE + 3 -GOPHER_CATEGORY_ID_VIEWS = GOPHER_ATTRIBUTE_ID_BASE + 4 -GOPHER_CATEGORY_ID_ABSTRACT = GOPHER_ATTRIBUTE_ID_BASE + 5 -GOPHER_CATEGORY_ID_VERONICA = GOPHER_ATTRIBUTE_ID_BASE + 6 -GOPHER_CATEGORY_ID_ASK = GOPHER_ATTRIBUTE_ID_BASE + 7 -GOPHER_CATEGORY_ID_UNKNOWN = GOPHER_ATTRIBUTE_ID_BASE + 8 -GOPHER_ATTRIBUTE_ID_ALL = GOPHER_ATTRIBUTE_ID_BASE + 9 -GOPHER_ATTRIBUTE_ID_ADMIN = GOPHER_ATTRIBUTE_ID_BASE + 10 -GOPHER_ATTRIBUTE_ID_MOD_DATE = GOPHER_ATTRIBUTE_ID_BASE + 11 -GOPHER_ATTRIBUTE_ID_TTL = GOPHER_ATTRIBUTE_ID_BASE + 12 -GOPHER_ATTRIBUTE_ID_SCORE = GOPHER_ATTRIBUTE_ID_BASE + 13 -GOPHER_ATTRIBUTE_ID_RANGE = GOPHER_ATTRIBUTE_ID_BASE + 14 -GOPHER_ATTRIBUTE_ID_SITE = GOPHER_ATTRIBUTE_ID_BASE + 15 -GOPHER_ATTRIBUTE_ID_ORG = GOPHER_ATTRIBUTE_ID_BASE + 16 -GOPHER_ATTRIBUTE_ID_LOCATION = GOPHER_ATTRIBUTE_ID_BASE + 17 -GOPHER_ATTRIBUTE_ID_GEOG = GOPHER_ATTRIBUTE_ID_BASE + 18 -GOPHER_ATTRIBUTE_ID_TIMEZONE = GOPHER_ATTRIBUTE_ID_BASE + 19 -GOPHER_ATTRIBUTE_ID_PROVIDER = GOPHER_ATTRIBUTE_ID_BASE + 20 -GOPHER_ATTRIBUTE_ID_VERSION = GOPHER_ATTRIBUTE_ID_BASE + 21 -GOPHER_ATTRIBUTE_ID_ABSTRACT = GOPHER_ATTRIBUTE_ID_BASE + 22 -GOPHER_ATTRIBUTE_ID_VIEW = GOPHER_ATTRIBUTE_ID_BASE + 23 -GOPHER_ATTRIBUTE_ID_TREEWALK = GOPHER_ATTRIBUTE_ID_BASE + 24 -GOPHER_ATTRIBUTE_ID_UNKNOWN = GOPHER_ATTRIBUTE_ID_BASE + 25 -HTTP_MAJOR_VERSION = 1 -HTTP_MINOR_VERSION = 0 -HTTP_VERSIONA = "HTTP/1.0" -HTTP_VERSION = HTTP_VERSIONA -HTTP_QUERY_MIME_VERSION = 0 -HTTP_QUERY_CONTENT_TYPE = 1 -HTTP_QUERY_CONTENT_TRANSFER_ENCODING = 2 -HTTP_QUERY_CONTENT_ID = 3 -HTTP_QUERY_CONTENT_DESCRIPTION = 4 -HTTP_QUERY_CONTENT_LENGTH = 5 -HTTP_QUERY_CONTENT_LANGUAGE = 6 -HTTP_QUERY_ALLOW = 7 -HTTP_QUERY_PUBLIC = 8 -HTTP_QUERY_DATE = 9 -HTTP_QUERY_EXPIRES = 10 -HTTP_QUERY_LAST_MODIFIED = 11 -HTTP_QUERY_MESSAGE_ID = 12 -HTTP_QUERY_URI = 13 -HTTP_QUERY_DERIVED_FROM = 14 -HTTP_QUERY_COST = 15 -HTTP_QUERY_LINK = 16 -HTTP_QUERY_PRAGMA = 17 -HTTP_QUERY_VERSION = 18 -HTTP_QUERY_STATUS_CODE = 19 -HTTP_QUERY_STATUS_TEXT = 20 -HTTP_QUERY_RAW_HEADERS = 21 -HTTP_QUERY_RAW_HEADERS_CRLF = 22 -HTTP_QUERY_CONNECTION = 23 -HTTP_QUERY_ACCEPT = 24 -HTTP_QUERY_ACCEPT_CHARSET = 25 -HTTP_QUERY_ACCEPT_ENCODING = 26 -HTTP_QUERY_ACCEPT_LANGUAGE = 27 -HTTP_QUERY_AUTHORIZATION = 28 -HTTP_QUERY_CONTENT_ENCODING = 29 -HTTP_QUERY_FORWARDED = 30 -HTTP_QUERY_FROM = 31 -HTTP_QUERY_IF_MODIFIED_SINCE = 32 -HTTP_QUERY_LOCATION = 33 -HTTP_QUERY_ORIG_URI = 34 -HTTP_QUERY_REFERER = 35 -HTTP_QUERY_RETRY_AFTER = 36 -HTTP_QUERY_SERVER = 37 -HTTP_QUERY_TITLE = 38 -HTTP_QUERY_USER_AGENT = 39 -HTTP_QUERY_WWW_AUTHENTICATE = 40 -HTTP_QUERY_PROXY_AUTHENTICATE = 41 -HTTP_QUERY_ACCEPT_RANGES = 42 -HTTP_QUERY_SET_COOKIE = 43 -HTTP_QUERY_COOKIE = 44 -HTTP_QUERY_REQUEST_METHOD = 45 -HTTP_QUERY_REFRESH = 46 -HTTP_QUERY_CONTENT_DISPOSITION = 47 -HTTP_QUERY_AGE = 48 -HTTP_QUERY_CACHE_CONTROL = 49 -HTTP_QUERY_CONTENT_BASE = 50 -HTTP_QUERY_CONTENT_LOCATION = 51 -HTTP_QUERY_CONTENT_MD5 = 52 -HTTP_QUERY_CONTENT_RANGE = 53 -HTTP_QUERY_ETAG = 54 -HTTP_QUERY_HOST = 55 -HTTP_QUERY_IF_MATCH = 56 -HTTP_QUERY_IF_NONE_MATCH = 57 -HTTP_QUERY_IF_RANGE = 58 -HTTP_QUERY_IF_UNMODIFIED_SINCE = 59 -HTTP_QUERY_MAX_FORWARDS = 60 -HTTP_QUERY_PROXY_AUTHORIZATION = 61 -HTTP_QUERY_RANGE = 62 -HTTP_QUERY_TRANSFER_ENCODING = 63 -HTTP_QUERY_UPGRADE = 64 -HTTP_QUERY_VARY = 65 -HTTP_QUERY_VIA = 66 -HTTP_QUERY_WARNING = 67 -HTTP_QUERY_EXPECT = 68 -HTTP_QUERY_PROXY_CONNECTION = 69 -HTTP_QUERY_UNLESS_MODIFIED_SINCE = 70 -HTTP_QUERY_ECHO_REQUEST = 71 -HTTP_QUERY_ECHO_REPLY = 72 -HTTP_QUERY_ECHO_HEADERS = 73 -HTTP_QUERY_ECHO_HEADERS_CRLF = 74 -HTTP_QUERY_PROXY_SUPPORT = 75 -HTTP_QUERY_AUTHENTICATION_INFO = 76 -HTTP_QUERY_PASSPORT_URLS = 77 -HTTP_QUERY_PASSPORT_CONFIG = 78 -HTTP_QUERY_MAX = 78 -HTTP_QUERY_CUSTOM = 65535 -HTTP_QUERY_FLAG_REQUEST_HEADERS = -2147483648 -HTTP_QUERY_FLAG_SYSTEMTIME = 0x40000000 -HTTP_QUERY_FLAG_NUMBER = 0x20000000 -HTTP_QUERY_FLAG_COALESCE = 0x10000000 -HTTP_QUERY_MODIFIER_FLAGS_MASK = ( - HTTP_QUERY_FLAG_REQUEST_HEADERS - | HTTP_QUERY_FLAG_SYSTEMTIME - | HTTP_QUERY_FLAG_NUMBER - | HTTP_QUERY_FLAG_COALESCE -) -HTTP_QUERY_HEADER_MASK = ~HTTP_QUERY_MODIFIER_FLAGS_MASK -HTTP_STATUS_CONTINUE = 100 -HTTP_STATUS_SWITCH_PROTOCOLS = 101 -HTTP_STATUS_OK = 200 -HTTP_STATUS_CREATED = 201 -HTTP_STATUS_ACCEPTED = 202 -HTTP_STATUS_PARTIAL = 203 -HTTP_STATUS_NO_CONTENT = 204 -HTTP_STATUS_RESET_CONTENT = 205 -HTTP_STATUS_PARTIAL_CONTENT = 206 -HTTP_STATUS_WEBDAV_MULTI_STATUS = 207 -HTTP_STATUS_AMBIGUOUS = 300 -HTTP_STATUS_MOVED = 301 -HTTP_STATUS_REDIRECT = 302 -HTTP_STATUS_REDIRECT_METHOD = 303 -HTTP_STATUS_NOT_MODIFIED = 304 -HTTP_STATUS_USE_PROXY = 305 -HTTP_STATUS_REDIRECT_KEEP_VERB = 307 -HTTP_STATUS_BAD_REQUEST = 400 -HTTP_STATUS_DENIED = 401 -HTTP_STATUS_PAYMENT_REQ = 402 -HTTP_STATUS_FORBIDDEN = 403 -HTTP_STATUS_NOT_FOUND = 404 -HTTP_STATUS_BAD_METHOD = 405 -HTTP_STATUS_NONE_ACCEPTABLE = 406 -HTTP_STATUS_PROXY_AUTH_REQ = 407 -HTTP_STATUS_REQUEST_TIMEOUT = 408 -HTTP_STATUS_CONFLICT = 409 -HTTP_STATUS_GONE = 410 -HTTP_STATUS_LENGTH_REQUIRED = 411 -HTTP_STATUS_PRECOND_FAILED = 412 -HTTP_STATUS_REQUEST_TOO_LARGE = 413 -HTTP_STATUS_URI_TOO_LONG = 414 -HTTP_STATUS_UNSUPPORTED_MEDIA = 415 -HTTP_STATUS_RETRY_WITH = 449 -HTTP_STATUS_SERVER_ERROR = 500 -HTTP_STATUS_NOT_SUPPORTED = 501 -HTTP_STATUS_BAD_GATEWAY = 502 -HTTP_STATUS_SERVICE_UNAVAIL = 503 -HTTP_STATUS_GATEWAY_TIMEOUT = 504 -HTTP_STATUS_VERSION_NOT_SUP = 505 -HTTP_STATUS_FIRST = HTTP_STATUS_CONTINUE -HTTP_STATUS_LAST = HTTP_STATUS_VERSION_NOT_SUP -HTTP_ADDREQ_INDEX_MASK = 0x0000FFFF -HTTP_ADDREQ_FLAGS_MASK = -65536 -HTTP_ADDREQ_FLAG_ADD_IF_NEW = 0x10000000 -HTTP_ADDREQ_FLAG_ADD = 0x20000000 -HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA = 0x40000000 -HTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON = 0x01000000 -HTTP_ADDREQ_FLAG_COALESCE = HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA -HTTP_ADDREQ_FLAG_REPLACE = -2147483648 -HSR_ASYNC = WININET_API_FLAG_ASYNC -HSR_SYNC = WININET_API_FLAG_SYNC -HSR_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT -HSR_INITIATE = 0x00000008 -HSR_DOWNLOAD = 0x00000010 -HSR_CHUNKED = 0x00000020 -INTERNET_COOKIE_IS_SECURE = 0x01 -INTERNET_COOKIE_IS_SESSION = 0x02 -INTERNET_COOKIE_THIRD_PARTY = 0x10 -INTERNET_COOKIE_PROMPT_REQUIRED = 0x20 -INTERNET_COOKIE_EVALUATE_P3P = 0x40 -INTERNET_COOKIE_APPLY_P3P = 0x80 -INTERNET_COOKIE_P3P_ENABLED = 0x100 -INTERNET_COOKIE_IS_RESTRICTED = 0x200 -INTERNET_COOKIE_IE6 = 0x400 -INTERNET_COOKIE_IS_LEGACY = 0x800 -FLAG_ICC_FORCE_CONNECTION = 0x00000001 -FLAGS_ERROR_UI_FILTER_FOR_ERRORS = 0x01 -FLAGS_ERROR_UI_FLAGS_CHANGE_OPTIONS = 0x02 -FLAGS_ERROR_UI_FLAGS_GENERATE_DATA = 0x04 -FLAGS_ERROR_UI_FLAGS_NO_UI = 0x08 -FLAGS_ERROR_UI_SERIALIZE_DIALOGS = 0x10 -INTERNET_ERROR_BASE = 12000 -ERROR_INTERNET_OUT_OF_HANDLES = INTERNET_ERROR_BASE + 1 -ERROR_INTERNET_TIMEOUT = INTERNET_ERROR_BASE + 2 -ERROR_INTERNET_EXTENDED_ERROR = INTERNET_ERROR_BASE + 3 -ERROR_INTERNET_INTERNAL_ERROR = INTERNET_ERROR_BASE + 4 -ERROR_INTERNET_INVALID_URL = INTERNET_ERROR_BASE + 5 -ERROR_INTERNET_UNRECOGNIZED_SCHEME = INTERNET_ERROR_BASE + 6 -ERROR_INTERNET_NAME_NOT_RESOLVED = INTERNET_ERROR_BASE + 7 -ERROR_INTERNET_PROTOCOL_NOT_FOUND = INTERNET_ERROR_BASE + 8 -ERROR_INTERNET_INVALID_OPTION = INTERNET_ERROR_BASE + 9 -ERROR_INTERNET_BAD_OPTION_LENGTH = INTERNET_ERROR_BASE + 10 -ERROR_INTERNET_OPTION_NOT_SETTABLE = INTERNET_ERROR_BASE + 11 -ERROR_INTERNET_SHUTDOWN = INTERNET_ERROR_BASE + 12 -ERROR_INTERNET_INCORRECT_USER_NAME = INTERNET_ERROR_BASE + 13 -ERROR_INTERNET_INCORRECT_PASSWORD = INTERNET_ERROR_BASE + 14 -ERROR_INTERNET_LOGIN_FAILURE = INTERNET_ERROR_BASE + 15 -ERROR_INTERNET_INVALID_OPERATION = INTERNET_ERROR_BASE + 16 -ERROR_INTERNET_OPERATION_CANCELLED = INTERNET_ERROR_BASE + 17 -ERROR_INTERNET_INCORRECT_HANDLE_TYPE = INTERNET_ERROR_BASE + 18 -ERROR_INTERNET_INCORRECT_HANDLE_STATE = INTERNET_ERROR_BASE + 19 -ERROR_INTERNET_NOT_PROXY_REQUEST = INTERNET_ERROR_BASE + 20 -ERROR_INTERNET_REGISTRY_VALUE_NOT_FOUND = INTERNET_ERROR_BASE + 21 -ERROR_INTERNET_BAD_REGISTRY_PARAMETER = INTERNET_ERROR_BASE + 22 -ERROR_INTERNET_NO_DIRECT_ACCESS = INTERNET_ERROR_BASE + 23 -ERROR_INTERNET_NO_CONTEXT = INTERNET_ERROR_BASE + 24 -ERROR_INTERNET_NO_CALLBACK = INTERNET_ERROR_BASE + 25 -ERROR_INTERNET_REQUEST_PENDING = INTERNET_ERROR_BASE + 26 -ERROR_INTERNET_INCORRECT_FORMAT = INTERNET_ERROR_BASE + 27 -ERROR_INTERNET_ITEM_NOT_FOUND = INTERNET_ERROR_BASE + 28 -ERROR_INTERNET_CANNOT_CONNECT = INTERNET_ERROR_BASE + 29 -ERROR_INTERNET_CONNECTION_ABORTED = INTERNET_ERROR_BASE + 30 -ERROR_INTERNET_CONNECTION_RESET = INTERNET_ERROR_BASE + 31 -ERROR_INTERNET_FORCE_RETRY = INTERNET_ERROR_BASE + 32 -ERROR_INTERNET_INVALID_PROXY_REQUEST = INTERNET_ERROR_BASE + 33 -ERROR_INTERNET_NEED_UI = INTERNET_ERROR_BASE + 34 -ERROR_INTERNET_HANDLE_EXISTS = INTERNET_ERROR_BASE + 36 -ERROR_INTERNET_SEC_CERT_DATE_INVALID = INTERNET_ERROR_BASE + 37 -ERROR_INTERNET_SEC_CERT_CN_INVALID = INTERNET_ERROR_BASE + 38 -ERROR_INTERNET_HTTP_TO_HTTPS_ON_REDIR = INTERNET_ERROR_BASE + 39 -ERROR_INTERNET_HTTPS_TO_HTTP_ON_REDIR = INTERNET_ERROR_BASE + 40 -ERROR_INTERNET_MIXED_SECURITY = INTERNET_ERROR_BASE + 41 -ERROR_INTERNET_CHG_POST_IS_NON_SECURE = INTERNET_ERROR_BASE + 42 -ERROR_INTERNET_POST_IS_NON_SECURE = INTERNET_ERROR_BASE + 43 -ERROR_INTERNET_CLIENT_AUTH_CERT_NEEDED = INTERNET_ERROR_BASE + 44 -ERROR_INTERNET_INVALID_CA = INTERNET_ERROR_BASE + 45 -ERROR_INTERNET_CLIENT_AUTH_NOT_SETUP = INTERNET_ERROR_BASE + 46 -ERROR_INTERNET_ASYNC_THREAD_FAILED = INTERNET_ERROR_BASE + 47 -ERROR_INTERNET_REDIRECT_SCHEME_CHANGE = INTERNET_ERROR_BASE + 48 -ERROR_INTERNET_DIALOG_PENDING = INTERNET_ERROR_BASE + 49 -ERROR_INTERNET_RETRY_DIALOG = INTERNET_ERROR_BASE + 50 -ERROR_INTERNET_HTTPS_HTTP_SUBMIT_REDIR = INTERNET_ERROR_BASE + 52 -ERROR_INTERNET_INSERT_CDROM = INTERNET_ERROR_BASE + 53 -ERROR_INTERNET_FORTEZZA_LOGIN_NEEDED = INTERNET_ERROR_BASE + 54 -ERROR_INTERNET_SEC_CERT_ERRORS = INTERNET_ERROR_BASE + 55 -ERROR_INTERNET_SEC_CERT_NO_REV = INTERNET_ERROR_BASE + 56 -ERROR_INTERNET_SEC_CERT_REV_FAILED = INTERNET_ERROR_BASE + 57 -ERROR_FTP_TRANSFER_IN_PROGRESS = INTERNET_ERROR_BASE + 110 -ERROR_FTP_DROPPED = INTERNET_ERROR_BASE + 111 -ERROR_FTP_NO_PASSIVE_MODE = INTERNET_ERROR_BASE + 112 -ERROR_GOPHER_PROTOCOL_ERROR = INTERNET_ERROR_BASE + 130 -ERROR_GOPHER_NOT_FILE = INTERNET_ERROR_BASE + 131 -ERROR_GOPHER_DATA_ERROR = INTERNET_ERROR_BASE + 132 -ERROR_GOPHER_END_OF_DATA = INTERNET_ERROR_BASE + 133 -ERROR_GOPHER_INVALID_LOCATOR = INTERNET_ERROR_BASE + 134 -ERROR_GOPHER_INCORRECT_LOCATOR_TYPE = INTERNET_ERROR_BASE + 135 -ERROR_GOPHER_NOT_GOPHER_PLUS = INTERNET_ERROR_BASE + 136 -ERROR_GOPHER_ATTRIBUTE_NOT_FOUND = INTERNET_ERROR_BASE + 137 -ERROR_GOPHER_UNKNOWN_LOCATOR = INTERNET_ERROR_BASE + 138 -ERROR_HTTP_HEADER_NOT_FOUND = INTERNET_ERROR_BASE + 150 -ERROR_HTTP_DOWNLEVEL_SERVER = INTERNET_ERROR_BASE + 151 -ERROR_HTTP_INVALID_SERVER_RESPONSE = INTERNET_ERROR_BASE + 152 -ERROR_HTTP_INVALID_HEADER = INTERNET_ERROR_BASE + 153 -ERROR_HTTP_INVALID_QUERY_REQUEST = INTERNET_ERROR_BASE + 154 -ERROR_HTTP_HEADER_ALREADY_EXISTS = INTERNET_ERROR_BASE + 155 -ERROR_HTTP_REDIRECT_FAILED = INTERNET_ERROR_BASE + 156 -ERROR_HTTP_NOT_REDIRECTED = INTERNET_ERROR_BASE + 160 -ERROR_HTTP_COOKIE_NEEDS_CONFIRMATION = INTERNET_ERROR_BASE + 161 -ERROR_HTTP_COOKIE_DECLINED = INTERNET_ERROR_BASE + 162 -ERROR_HTTP_REDIRECT_NEEDS_CONFIRMATION = INTERNET_ERROR_BASE + 168 -ERROR_INTERNET_SECURITY_CHANNEL_ERROR = INTERNET_ERROR_BASE + 157 -ERROR_INTERNET_UNABLE_TO_CACHE_FILE = INTERNET_ERROR_BASE + 158 -ERROR_INTERNET_TCPIP_NOT_INSTALLED = INTERNET_ERROR_BASE + 159 -ERROR_INTERNET_DISCONNECTED = INTERNET_ERROR_BASE + 163 -ERROR_INTERNET_SERVER_UNREACHABLE = INTERNET_ERROR_BASE + 164 -ERROR_INTERNET_PROXY_SERVER_UNREACHABLE = INTERNET_ERROR_BASE + 165 -ERROR_INTERNET_BAD_AUTO_PROXY_SCRIPT = INTERNET_ERROR_BASE + 166 -ERROR_INTERNET_UNABLE_TO_DOWNLOAD_SCRIPT = INTERNET_ERROR_BASE + 167 -ERROR_INTERNET_SEC_INVALID_CERT = INTERNET_ERROR_BASE + 169 -ERROR_INTERNET_SEC_CERT_REVOKED = INTERNET_ERROR_BASE + 170 -ERROR_INTERNET_FAILED_DUETOSECURITYCHECK = INTERNET_ERROR_BASE + 171 -ERROR_INTERNET_NOT_INITIALIZED = INTERNET_ERROR_BASE + 172 -ERROR_INTERNET_NEED_MSN_SSPI_PKG = INTERNET_ERROR_BASE + 173 -ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = INTERNET_ERROR_BASE + 174 -INTERNET_ERROR_LAST = ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY -NORMAL_CACHE_ENTRY = 0x00000001 -STICKY_CACHE_ENTRY = 0x00000004 -EDITED_CACHE_ENTRY = 0x00000008 -TRACK_OFFLINE_CACHE_ENTRY = 0x00000010 -TRACK_ONLINE_CACHE_ENTRY = 0x00000020 -SPARSE_CACHE_ENTRY = 0x00010000 -COOKIE_CACHE_ENTRY = 0x00100000 -URLHISTORY_CACHE_ENTRY = 0x00200000 -URLCACHE_FIND_DEFAULT_FILTER = ( - NORMAL_CACHE_ENTRY - | COOKIE_CACHE_ENTRY - | URLHISTORY_CACHE_ENTRY - | TRACK_OFFLINE_CACHE_ENTRY - | TRACK_ONLINE_CACHE_ENTRY - | STICKY_CACHE_ENTRY -) -CACHEGROUP_ATTRIBUTE_GET_ALL = -1 -CACHEGROUP_ATTRIBUTE_BASIC = 0x00000001 -CACHEGROUP_ATTRIBUTE_FLAG = 0x00000002 -CACHEGROUP_ATTRIBUTE_TYPE = 0x00000004 -CACHEGROUP_ATTRIBUTE_QUOTA = 0x00000008 -CACHEGROUP_ATTRIBUTE_GROUPNAME = 0x00000010 -CACHEGROUP_ATTRIBUTE_STORAGE = 0x00000020 -CACHEGROUP_FLAG_NONPURGEABLE = 0x00000001 -CACHEGROUP_FLAG_GIDONLY = 0x00000004 -CACHEGROUP_FLAG_FLUSHURL_ONDELETE = 0x00000002 -CACHEGROUP_SEARCH_ALL = 0x00000000 -CACHEGROUP_SEARCH_BYURL = 0x00000001 -CACHEGROUP_TYPE_INVALID = 0x00000001 -CACHEGROUP_READWRITE_MASK = ( - CACHEGROUP_ATTRIBUTE_TYPE - | CACHEGROUP_ATTRIBUTE_QUOTA - | CACHEGROUP_ATTRIBUTE_GROUPNAME - | CACHEGROUP_ATTRIBUTE_STORAGE -) -GROUPNAME_MAX_LENGTH = 120 -GROUP_OWNER_STORAGE_SIZE = 4 -CACHE_ENTRY_ATTRIBUTE_FC = 0x00000004 -CACHE_ENTRY_HITRATE_FC = 0x00000010 -CACHE_ENTRY_MODTIME_FC = 0x00000040 -CACHE_ENTRY_EXPTIME_FC = 0x00000080 -CACHE_ENTRY_ACCTIME_FC = 0x00000100 -CACHE_ENTRY_SYNCTIME_FC = 0x00000200 -CACHE_ENTRY_HEADERINFO_FC = 0x00000400 -CACHE_ENTRY_EXEMPT_DELTA_FC = 0x00000800 -INTERNET_CACHE_GROUP_ADD = 0 -INTERNET_CACHE_GROUP_REMOVE = 1 -INTERNET_DIAL_FORCE_PROMPT = 0x2000 -INTERNET_DIAL_SHOW_OFFLINE = 0x4000 -INTERNET_DIAL_UNATTENDED = 0x8000 -INTERENT_GOONLINE_REFRESH = 0x00000001 -INTERENT_GOONLINE_MASK = 0x00000001 -INTERNET_AUTODIAL_FORCE_ONLINE = 1 -INTERNET_AUTODIAL_FORCE_UNATTENDED = 2 -INTERNET_AUTODIAL_FAILIFSECURITYCHECK = 4 -INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT = 8 -INTERNET_AUTODIAL_FLAGS_MASK = ( - INTERNET_AUTODIAL_FORCE_ONLINE - | INTERNET_AUTODIAL_FORCE_UNATTENDED - | INTERNET_AUTODIAL_FAILIFSECURITYCHECK - | INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT -) -PROXY_AUTO_DETECT_TYPE_DHCP = 1 -PROXY_AUTO_DETECT_TYPE_DNS_A = 2 -INTERNET_CONNECTION_MODEM = 0x01 -INTERNET_CONNECTION_LAN = 0x02 -INTERNET_CONNECTION_PROXY = 0x04 -INTERNET_CONNECTION_MODEM_BUSY = 0x08 -INTERNET_RAS_INSTALLED = 0x10 -INTERNET_CONNECTION_OFFLINE = 0x20 -INTERNET_CONNECTION_CONFIGURED = 0x40 -INTERNET_CUSTOMDIAL_CONNECT = 0 -INTERNET_CUSTOMDIAL_UNATTENDED = 1 -INTERNET_CUSTOMDIAL_DISCONNECT = 2 -INTERNET_CUSTOMDIAL_SHOWOFFLINE = 4 -INTERNET_CUSTOMDIAL_SAFE_FOR_UNATTENDED = 1 -INTERNET_CUSTOMDIAL_WILL_SUPPLY_STATE = 2 -INTERNET_CUSTOMDIAL_CAN_HANGUP = 4 -INTERNET_DIALSTATE_DISCONNECTED = 1 -INTERNET_IDENTITY_FLAG_PRIVATE_CACHE = 0x01 -INTERNET_IDENTITY_FLAG_SHARED_CACHE = 0x02 -INTERNET_IDENTITY_FLAG_CLEAR_DATA = 0x04 -INTERNET_IDENTITY_FLAG_CLEAR_COOKIES = 0x08 -INTERNET_IDENTITY_FLAG_CLEAR_HISTORY = 0x10 -INTERNET_IDENTITY_FLAG_CLEAR_CONTENT = 0x20 -INTERNET_SUPPRESS_RESET_ALL = 0x00 -INTERNET_SUPPRESS_COOKIE_POLICY = 0x01 -INTERNET_SUPPRESS_COOKIE_POLICY_RESET = 0x02 -PRIVACY_TEMPLATE_NO_COOKIES = 0 -PRIVACY_TEMPLATE_HIGH = 1 -PRIVACY_TEMPLATE_MEDIUM_HIGH = 2 -PRIVACY_TEMPLATE_MEDIUM = 3 -PRIVACY_TEMPLATE_MEDIUM_LOW = 4 -PRIVACY_TEMPLATE_LOW = 5 -PRIVACY_TEMPLATE_CUSTOM = 100 -PRIVACY_TEMPLATE_ADVANCED = 101 -PRIVACY_TEMPLATE_MAX = PRIVACY_TEMPLATE_LOW -PRIVACY_TYPE_FIRST_PARTY = 0 -PRIVACY_TYPE_THIRD_PARTY = 1 - -# Generated by h2py from winhttp.h -WINHTTP_FLAG_ASYNC = 0x10000000 -WINHTTP_FLAG_SECURE = 0x00800000 -WINHTTP_FLAG_ESCAPE_PERCENT = 0x00000004 -WINHTTP_FLAG_NULL_CODEPAGE = 0x00000008 -WINHTTP_FLAG_BYPASS_PROXY_CACHE = 0x00000100 -WINHTTP_FLAG_REFRESH = WINHTTP_FLAG_BYPASS_PROXY_CACHE -WINHTTP_FLAG_ESCAPE_DISABLE = 0x00000040 -WINHTTP_FLAG_ESCAPE_DISABLE_QUERY = 0x00000080 -INTERNET_SCHEME_HTTP = 1 -INTERNET_SCHEME_HTTPS = 2 -WINHTTP_AUTOPROXY_AUTO_DETECT = 0x00000001 -WINHTTP_AUTOPROXY_CONFIG_URL = 0x00000002 -WINHTTP_AUTOPROXY_RUN_INPROCESS = 0x00010000 -WINHTTP_AUTOPROXY_RUN_OUTPROCESS_ONLY = 0x00020000 -WINHTTP_AUTO_DETECT_TYPE_DHCP = 0x00000001 -WINHTTP_AUTO_DETECT_TYPE_DNS_A = 0x00000002 -WINHTTP_TIME_FORMAT_BUFSIZE = 62 -WINHTTP_ACCESS_TYPE_DEFAULT_PROXY = 0 -WINHTTP_ACCESS_TYPE_NO_PROXY = 1 -WINHTTP_ACCESS_TYPE_NAMED_PROXY = 3 -WINHTTP_OPTION_CALLBACK = 1 -WINHTTP_OPTION_RESOLVE_TIMEOUT = 2 -WINHTTP_OPTION_CONNECT_TIMEOUT = 3 -WINHTTP_OPTION_CONNECT_RETRIES = 4 -WINHTTP_OPTION_SEND_TIMEOUT = 5 -WINHTTP_OPTION_RECEIVE_TIMEOUT = 6 -WINHTTP_OPTION_RECEIVE_RESPONSE_TIMEOUT = 7 -WINHTTP_OPTION_HANDLE_TYPE = 9 -WINHTTP_OPTION_READ_BUFFER_SIZE = 12 -WINHTTP_OPTION_WRITE_BUFFER_SIZE = 13 -WINHTTP_OPTION_PARENT_HANDLE = 21 -WINHTTP_OPTION_EXTENDED_ERROR = 24 -WINHTTP_OPTION_SECURITY_FLAGS = 31 -WINHTTP_OPTION_SECURITY_CERTIFICATE_STRUCT = 32 -WINHTTP_OPTION_URL = 34 -WINHTTP_OPTION_SECURITY_KEY_BITNESS = 36 -WINHTTP_OPTION_PROXY = 38 -WINHTTP_OPTION_USER_AGENT = 41 -WINHTTP_OPTION_CONTEXT_VALUE = 45 -WINHTTP_OPTION_CLIENT_CERT_CONTEXT = 47 -WINHTTP_OPTION_REQUEST_PRIORITY = 58 -WINHTTP_OPTION_HTTP_VERSION = 59 -WINHTTP_OPTION_DISABLE_FEATURE = 63 -WINHTTP_OPTION_CODEPAGE = 68 -WINHTTP_OPTION_MAX_CONNS_PER_SERVER = 73 -WINHTTP_OPTION_MAX_CONNS_PER_1_0_SERVER = 74 -WINHTTP_OPTION_AUTOLOGON_POLICY = 77 -WINHTTP_OPTION_SERVER_CERT_CONTEXT = 78 -WINHTTP_OPTION_ENABLE_FEATURE = 79 -WINHTTP_OPTION_WORKER_THREAD_COUNT = 80 -WINHTTP_OPTION_PASSPORT_COBRANDING_TEXT = 81 -WINHTTP_OPTION_PASSPORT_COBRANDING_URL = 82 -WINHTTP_OPTION_CONFIGURE_PASSPORT_AUTH = 83 -WINHTTP_OPTION_SECURE_PROTOCOLS = 84 -WINHTTP_OPTION_ENABLETRACING = 85 -WINHTTP_OPTION_PASSPORT_SIGN_OUT = 86 -WINHTTP_OPTION_PASSPORT_RETURN_URL = 87 -WINHTTP_OPTION_REDIRECT_POLICY = 88 -WINHTTP_OPTION_MAX_HTTP_AUTOMATIC_REDIRECTS = 89 -WINHTTP_OPTION_MAX_HTTP_STATUS_CONTINUE = 90 -WINHTTP_OPTION_MAX_RESPONSE_HEADER_SIZE = 91 -WINHTTP_OPTION_MAX_RESPONSE_DRAIN_SIZE = 92 -WINHTTP_OPTION_CONNECTION_INFO = 93 -WINHTTP_OPTION_CLIENT_CERT_ISSUER_LIST = 94 -WINHTTP_OPTION_SPN = 96 -WINHTTP_OPTION_GLOBAL_PROXY_CREDS = 97 -WINHTTP_OPTION_GLOBAL_SERVER_CREDS = 98 -WINHTTP_OPTION_UNLOAD_NOTIFY_EVENT = 99 -WINHTTP_OPTION_REJECT_USERPWD_IN_URL = 100 -WINHTTP_OPTION_USE_GLOBAL_SERVER_CREDENTIALS = 101 -WINHTTP_LAST_OPTION = WINHTTP_OPTION_USE_GLOBAL_SERVER_CREDENTIALS -WINHTTP_OPTION_USERNAME = 0x1000 -WINHTTP_OPTION_PASSWORD = 0x1001 -WINHTTP_OPTION_PROXY_USERNAME = 0x1002 -WINHTTP_OPTION_PROXY_PASSWORD = 0x1003 -WINHTTP_CONNS_PER_SERVER_UNLIMITED = -1 -WINHTTP_AUTOLOGON_SECURITY_LEVEL_MEDIUM = 0 -WINHTTP_AUTOLOGON_SECURITY_LEVEL_LOW = 1 -WINHTTP_AUTOLOGON_SECURITY_LEVEL_HIGH = 2 -WINHTTP_AUTOLOGON_SECURITY_LEVEL_DEFAULT = WINHTTP_AUTOLOGON_SECURITY_LEVEL_MEDIUM -WINHTTP_OPTION_REDIRECT_POLICY_NEVER = 0 -WINHTTP_OPTION_REDIRECT_POLICY_DISALLOW_HTTPS_TO_HTTP = 1 -WINHTTP_OPTION_REDIRECT_POLICY_ALWAYS = 2 -WINHTTP_OPTION_REDIRECT_POLICY_LAST = WINHTTP_OPTION_REDIRECT_POLICY_ALWAYS -WINHTTP_OPTION_REDIRECT_POLICY_DEFAULT = ( - WINHTTP_OPTION_REDIRECT_POLICY_DISALLOW_HTTPS_TO_HTTP -) -WINHTTP_DISABLE_PASSPORT_AUTH = 0x00000000 -WINHTTP_ENABLE_PASSPORT_AUTH = 0x10000000 -WINHTTP_DISABLE_PASSPORT_KEYRING = 0x20000000 -WINHTTP_ENABLE_PASSPORT_KEYRING = 0x40000000 -WINHTTP_DISABLE_COOKIES = 0x00000001 -WINHTTP_DISABLE_REDIRECTS = 0x00000002 -WINHTTP_DISABLE_AUTHENTICATION = 0x00000004 -WINHTTP_DISABLE_KEEP_ALIVE = 0x00000008 -WINHTTP_ENABLE_SSL_REVOCATION = 0x00000001 -WINHTTP_ENABLE_SSL_REVERT_IMPERSONATION = 0x00000002 -WINHTTP_DISABLE_SPN_SERVER_PORT = 0x00000000 -WINHTTP_ENABLE_SPN_SERVER_PORT = 0x00000001 -WINHTTP_OPTION_SPN_MASK = WINHTTP_ENABLE_SPN_SERVER_PORT -WINHTTP_HANDLE_TYPE_SESSION = 1 -WINHTTP_HANDLE_TYPE_CONNECT = 2 -WINHTTP_HANDLE_TYPE_REQUEST = 3 -WINHTTP_AUTH_SCHEME_BASIC = 0x00000001 -WINHTTP_AUTH_SCHEME_NTLM = 0x00000002 -WINHTTP_AUTH_SCHEME_PASSPORT = 0x00000004 -WINHTTP_AUTH_SCHEME_DIGEST = 0x00000008 -WINHTTP_AUTH_SCHEME_NEGOTIATE = 0x00000010 -WINHTTP_AUTH_TARGET_SERVER = 0x00000000 -WINHTTP_AUTH_TARGET_PROXY = 0x00000001 -WINHTTP_CALLBACK_STATUS_FLAG_CERT_REV_FAILED = 0x00000001 -WINHTTP_CALLBACK_STATUS_FLAG_INVALID_CERT = 0x00000002 -WINHTTP_CALLBACK_STATUS_FLAG_CERT_REVOKED = 0x00000004 -WINHTTP_CALLBACK_STATUS_FLAG_INVALID_CA = 0x00000008 -WINHTTP_CALLBACK_STATUS_FLAG_CERT_CN_INVALID = 0x00000010 -WINHTTP_CALLBACK_STATUS_FLAG_CERT_DATE_INVALID = 0x00000020 -WINHTTP_CALLBACK_STATUS_FLAG_CERT_WRONG_USAGE = 0x00000040 -WINHTTP_CALLBACK_STATUS_FLAG_SECURITY_CHANNEL_ERROR = -2147483648 -WINHTTP_FLAG_SECURE_PROTOCOL_SSL2 = 0x00000008 -WINHTTP_FLAG_SECURE_PROTOCOL_SSL3 = 0x00000020 -WINHTTP_FLAG_SECURE_PROTOCOL_TLS1 = 0x00000080 -WINHTTP_FLAG_SECURE_PROTOCOL_ALL = ( - WINHTTP_FLAG_SECURE_PROTOCOL_SSL2 - | WINHTTP_FLAG_SECURE_PROTOCOL_SSL3 - | WINHTTP_FLAG_SECURE_PROTOCOL_TLS1 -) -WINHTTP_CALLBACK_STATUS_RESOLVING_NAME = 0x00000001 -WINHTTP_CALLBACK_STATUS_NAME_RESOLVED = 0x00000002 -WINHTTP_CALLBACK_STATUS_CONNECTING_TO_SERVER = 0x00000004 -WINHTTP_CALLBACK_STATUS_CONNECTED_TO_SERVER = 0x00000008 -WINHTTP_CALLBACK_STATUS_SENDING_REQUEST = 0x00000010 -WINHTTP_CALLBACK_STATUS_REQUEST_SENT = 0x00000020 -WINHTTP_CALLBACK_STATUS_RECEIVING_RESPONSE = 0x00000040 -WINHTTP_CALLBACK_STATUS_RESPONSE_RECEIVED = 0x00000080 -WINHTTP_CALLBACK_STATUS_CLOSING_CONNECTION = 0x00000100 -WINHTTP_CALLBACK_STATUS_CONNECTION_CLOSED = 0x00000200 -WINHTTP_CALLBACK_STATUS_HANDLE_CREATED = 0x00000400 -WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING = 0x00000800 -WINHTTP_CALLBACK_STATUS_DETECTING_PROXY = 0x00001000 -WINHTTP_CALLBACK_STATUS_REDIRECT = 0x00004000 -WINHTTP_CALLBACK_STATUS_INTERMEDIATE_RESPONSE = 0x00008000 -WINHTTP_CALLBACK_STATUS_SECURE_FAILURE = 0x00010000 -WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE = 0x00020000 -WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE = 0x00040000 -WINHTTP_CALLBACK_STATUS_READ_COMPLETE = 0x00080000 -WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE = 0x00100000 -WINHTTP_CALLBACK_STATUS_REQUEST_ERROR = 0x00200000 -WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE = 0x00400000 -API_RECEIVE_RESPONSE = 1 -API_QUERY_DATA_AVAILABLE = 2 -API_READ_DATA = 3 -API_WRITE_DATA = 4 -API_SEND_REQUEST = 5 -WINHTTP_CALLBACK_FLAG_RESOLVE_NAME = ( - WINHTTP_CALLBACK_STATUS_RESOLVING_NAME | WINHTTP_CALLBACK_STATUS_NAME_RESOLVED -) -WINHTTP_CALLBACK_FLAG_CONNECT_TO_SERVER = ( - WINHTTP_CALLBACK_STATUS_CONNECTING_TO_SERVER - | WINHTTP_CALLBACK_STATUS_CONNECTED_TO_SERVER -) -WINHTTP_CALLBACK_FLAG_SEND_REQUEST = ( - WINHTTP_CALLBACK_STATUS_SENDING_REQUEST | WINHTTP_CALLBACK_STATUS_REQUEST_SENT -) -WINHTTP_CALLBACK_FLAG_RECEIVE_RESPONSE = ( - WINHTTP_CALLBACK_STATUS_RECEIVING_RESPONSE - | WINHTTP_CALLBACK_STATUS_RESPONSE_RECEIVED -) -WINHTTP_CALLBACK_FLAG_CLOSE_CONNECTION = ( - WINHTTP_CALLBACK_STATUS_CLOSING_CONNECTION - | WINHTTP_CALLBACK_STATUS_CONNECTION_CLOSED -) -WINHTTP_CALLBACK_FLAG_HANDLES = ( - WINHTTP_CALLBACK_STATUS_HANDLE_CREATED | WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING -) -WINHTTP_CALLBACK_FLAG_DETECTING_PROXY = WINHTTP_CALLBACK_STATUS_DETECTING_PROXY -WINHTTP_CALLBACK_FLAG_REDIRECT = WINHTTP_CALLBACK_STATUS_REDIRECT -WINHTTP_CALLBACK_FLAG_INTERMEDIATE_RESPONSE = ( - WINHTTP_CALLBACK_STATUS_INTERMEDIATE_RESPONSE -) -WINHTTP_CALLBACK_FLAG_SECURE_FAILURE = WINHTTP_CALLBACK_STATUS_SECURE_FAILURE -WINHTTP_CALLBACK_FLAG_SENDREQUEST_COMPLETE = ( - WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE -) -WINHTTP_CALLBACK_FLAG_HEADERS_AVAILABLE = WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE -WINHTTP_CALLBACK_FLAG_DATA_AVAILABLE = WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE -WINHTTP_CALLBACK_FLAG_READ_COMPLETE = WINHTTP_CALLBACK_STATUS_READ_COMPLETE -WINHTTP_CALLBACK_FLAG_WRITE_COMPLETE = WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE -WINHTTP_CALLBACK_FLAG_REQUEST_ERROR = WINHTTP_CALLBACK_STATUS_REQUEST_ERROR -WINHTTP_CALLBACK_FLAG_ALL_COMPLETIONS = ( - WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE - | WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE - | WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE - | WINHTTP_CALLBACK_STATUS_READ_COMPLETE - | WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE - | WINHTTP_CALLBACK_STATUS_REQUEST_ERROR -) -WINHTTP_CALLBACK_FLAG_ALL_NOTIFICATIONS = -1 -WINHTTP_QUERY_MIME_VERSION = 0 -WINHTTP_QUERY_CONTENT_TYPE = 1 -WINHTTP_QUERY_CONTENT_TRANSFER_ENCODING = 2 -WINHTTP_QUERY_CONTENT_ID = 3 -WINHTTP_QUERY_CONTENT_DESCRIPTION = 4 -WINHTTP_QUERY_CONTENT_LENGTH = 5 -WINHTTP_QUERY_CONTENT_LANGUAGE = 6 -WINHTTP_QUERY_ALLOW = 7 -WINHTTP_QUERY_PUBLIC = 8 -WINHTTP_QUERY_DATE = 9 -WINHTTP_QUERY_EXPIRES = 10 -WINHTTP_QUERY_LAST_MODIFIED = 11 -WINHTTP_QUERY_MESSAGE_ID = 12 -WINHTTP_QUERY_URI = 13 -WINHTTP_QUERY_DERIVED_FROM = 14 -WINHTTP_QUERY_COST = 15 -WINHTTP_QUERY_LINK = 16 -WINHTTP_QUERY_PRAGMA = 17 -WINHTTP_QUERY_VERSION = 18 -WINHTTP_QUERY_STATUS_CODE = 19 -WINHTTP_QUERY_STATUS_TEXT = 20 -WINHTTP_QUERY_RAW_HEADERS = 21 -WINHTTP_QUERY_RAW_HEADERS_CRLF = 22 -WINHTTP_QUERY_CONNECTION = 23 -WINHTTP_QUERY_ACCEPT = 24 -WINHTTP_QUERY_ACCEPT_CHARSET = 25 -WINHTTP_QUERY_ACCEPT_ENCODING = 26 -WINHTTP_QUERY_ACCEPT_LANGUAGE = 27 -WINHTTP_QUERY_AUTHORIZATION = 28 -WINHTTP_QUERY_CONTENT_ENCODING = 29 -WINHTTP_QUERY_FORWARDED = 30 -WINHTTP_QUERY_FROM = 31 -WINHTTP_QUERY_IF_MODIFIED_SINCE = 32 -WINHTTP_QUERY_LOCATION = 33 -WINHTTP_QUERY_ORIG_URI = 34 -WINHTTP_QUERY_REFERER = 35 -WINHTTP_QUERY_RETRY_AFTER = 36 -WINHTTP_QUERY_SERVER = 37 -WINHTTP_QUERY_TITLE = 38 -WINHTTP_QUERY_USER_AGENT = 39 -WINHTTP_QUERY_WWW_AUTHENTICATE = 40 -WINHTTP_QUERY_PROXY_AUTHENTICATE = 41 -WINHTTP_QUERY_ACCEPT_RANGES = 42 -WINHTTP_QUERY_SET_COOKIE = 43 -WINHTTP_QUERY_COOKIE = 44 -WINHTTP_QUERY_REQUEST_METHOD = 45 -WINHTTP_QUERY_REFRESH = 46 -WINHTTP_QUERY_CONTENT_DISPOSITION = 47 -WINHTTP_QUERY_AGE = 48 -WINHTTP_QUERY_CACHE_CONTROL = 49 -WINHTTP_QUERY_CONTENT_BASE = 50 -WINHTTP_QUERY_CONTENT_LOCATION = 51 -WINHTTP_QUERY_CONTENT_MD5 = 52 -WINHTTP_QUERY_CONTENT_RANGE = 53 -WINHTTP_QUERY_ETAG = 54 -WINHTTP_QUERY_HOST = 55 -WINHTTP_QUERY_IF_MATCH = 56 -WINHTTP_QUERY_IF_NONE_MATCH = 57 -WINHTTP_QUERY_IF_RANGE = 58 -WINHTTP_QUERY_IF_UNMODIFIED_SINCE = 59 -WINHTTP_QUERY_MAX_FORWARDS = 60 -WINHTTP_QUERY_PROXY_AUTHORIZATION = 61 -WINHTTP_QUERY_RANGE = 62 -WINHTTP_QUERY_TRANSFER_ENCODING = 63 -WINHTTP_QUERY_UPGRADE = 64 -WINHTTP_QUERY_VARY = 65 -WINHTTP_QUERY_VIA = 66 -WINHTTP_QUERY_WARNING = 67 -WINHTTP_QUERY_EXPECT = 68 -WINHTTP_QUERY_PROXY_CONNECTION = 69 -WINHTTP_QUERY_UNLESS_MODIFIED_SINCE = 70 -WINHTTP_QUERY_PROXY_SUPPORT = 75 -WINHTTP_QUERY_AUTHENTICATION_INFO = 76 -WINHTTP_QUERY_PASSPORT_URLS = 77 -WINHTTP_QUERY_PASSPORT_CONFIG = 78 -WINHTTP_QUERY_MAX = 78 -WINHTTP_QUERY_CUSTOM = 65535 -WINHTTP_QUERY_FLAG_REQUEST_HEADERS = -2147483648 -WINHTTP_QUERY_FLAG_SYSTEMTIME = 0x40000000 -WINHTTP_QUERY_FLAG_NUMBER = 0x20000000 -WINHTTP_ADDREQ_INDEX_MASK = 0x0000FFFF -WINHTTP_ADDREQ_FLAGS_MASK = -65536 -WINHTTP_ADDREQ_FLAG_ADD_IF_NEW = 0x10000000 -WINHTTP_ADDREQ_FLAG_ADD = 0x20000000 -WINHTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA = 0x40000000 -WINHTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON = 0x01000000 -WINHTTP_ADDREQ_FLAG_COALESCE = WINHTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA -WINHTTP_ADDREQ_FLAG_REPLACE = -2147483648 -WINHTTP_IGNORE_REQUEST_TOTAL_LENGTH = 0 -WINHTTP_ERROR_BASE = 12000 -ERROR_WINHTTP_OUT_OF_HANDLES = WINHTTP_ERROR_BASE + 1 -ERROR_WINHTTP_TIMEOUT = WINHTTP_ERROR_BASE + 2 -ERROR_WINHTTP_INTERNAL_ERROR = WINHTTP_ERROR_BASE + 4 -ERROR_WINHTTP_INVALID_URL = WINHTTP_ERROR_BASE + 5 -ERROR_WINHTTP_UNRECOGNIZED_SCHEME = WINHTTP_ERROR_BASE + 6 -ERROR_WINHTTP_NAME_NOT_RESOLVED = WINHTTP_ERROR_BASE + 7 -ERROR_WINHTTP_INVALID_OPTION = WINHTTP_ERROR_BASE + 9 -ERROR_WINHTTP_OPTION_NOT_SETTABLE = WINHTTP_ERROR_BASE + 11 -ERROR_WINHTTP_SHUTDOWN = WINHTTP_ERROR_BASE + 12 -ERROR_WINHTTP_LOGIN_FAILURE = WINHTTP_ERROR_BASE + 15 -ERROR_WINHTTP_OPERATION_CANCELLED = WINHTTP_ERROR_BASE + 17 -ERROR_WINHTTP_INCORRECT_HANDLE_TYPE = WINHTTP_ERROR_BASE + 18 -ERROR_WINHTTP_INCORRECT_HANDLE_STATE = WINHTTP_ERROR_BASE + 19 -ERROR_WINHTTP_CANNOT_CONNECT = WINHTTP_ERROR_BASE + 29 -ERROR_WINHTTP_CONNECTION_ERROR = WINHTTP_ERROR_BASE + 30 -ERROR_WINHTTP_RESEND_REQUEST = WINHTTP_ERROR_BASE + 32 -ERROR_WINHTTP_CLIENT_AUTH_CERT_NEEDED = WINHTTP_ERROR_BASE + 44 -ERROR_WINHTTP_CANNOT_CALL_BEFORE_OPEN = WINHTTP_ERROR_BASE + 100 -ERROR_WINHTTP_CANNOT_CALL_BEFORE_SEND = WINHTTP_ERROR_BASE + 101 -ERROR_WINHTTP_CANNOT_CALL_AFTER_SEND = WINHTTP_ERROR_BASE + 102 -ERROR_WINHTTP_CANNOT_CALL_AFTER_OPEN = WINHTTP_ERROR_BASE + 103 -ERROR_WINHTTP_HEADER_NOT_FOUND = WINHTTP_ERROR_BASE + 150 -ERROR_WINHTTP_INVALID_SERVER_RESPONSE = WINHTTP_ERROR_BASE + 152 -ERROR_WINHTTP_INVALID_HEADER = WINHTTP_ERROR_BASE + 153 -ERROR_WINHTTP_INVALID_QUERY_REQUEST = WINHTTP_ERROR_BASE + 154 -ERROR_WINHTTP_HEADER_ALREADY_EXISTS = WINHTTP_ERROR_BASE + 155 -ERROR_WINHTTP_REDIRECT_FAILED = WINHTTP_ERROR_BASE + 156 -ERROR_WINHTTP_AUTO_PROXY_SERVICE_ERROR = WINHTTP_ERROR_BASE + 178 -ERROR_WINHTTP_BAD_AUTO_PROXY_SCRIPT = WINHTTP_ERROR_BASE + 166 -ERROR_WINHTTP_UNABLE_TO_DOWNLOAD_SCRIPT = WINHTTP_ERROR_BASE + 167 -ERROR_WINHTTP_NOT_INITIALIZED = WINHTTP_ERROR_BASE + 172 -ERROR_WINHTTP_SECURE_FAILURE = WINHTTP_ERROR_BASE + 175 -ERROR_WINHTTP_SECURE_CERT_DATE_INVALID = WINHTTP_ERROR_BASE + 37 -ERROR_WINHTTP_SECURE_CERT_CN_INVALID = WINHTTP_ERROR_BASE + 38 -ERROR_WINHTTP_SECURE_INVALID_CA = WINHTTP_ERROR_BASE + 45 -ERROR_WINHTTP_SECURE_CERT_REV_FAILED = WINHTTP_ERROR_BASE + 57 -ERROR_WINHTTP_SECURE_CHANNEL_ERROR = WINHTTP_ERROR_BASE + 157 -ERROR_WINHTTP_SECURE_INVALID_CERT = WINHTTP_ERROR_BASE + 169 -ERROR_WINHTTP_SECURE_CERT_REVOKED = WINHTTP_ERROR_BASE + 170 -ERROR_WINHTTP_SECURE_CERT_WRONG_USAGE = WINHTTP_ERROR_BASE + 179 -ERROR_WINHTTP_AUTODETECTION_FAILED = WINHTTP_ERROR_BASE + 180 -ERROR_WINHTTP_HEADER_COUNT_EXCEEDED = WINHTTP_ERROR_BASE + 181 -ERROR_WINHTTP_HEADER_SIZE_OVERFLOW = WINHTTP_ERROR_BASE + 182 -ERROR_WINHTTP_CHUNKED_ENCODING_HEADER_SIZE_OVERFLOW = WINHTTP_ERROR_BASE + 183 -ERROR_WINHTTP_RESPONSE_DRAIN_OVERFLOW = WINHTTP_ERROR_BASE + 184 -ERROR_WINHTTP_CLIENT_CERT_NO_PRIVATE_KEY = WINHTTP_ERROR_BASE + 185 -ERROR_WINHTTP_CLIENT_CERT_NO_ACCESS_PRIVATE_KEY = WINHTTP_ERROR_BASE + 186 -WINHTTP_ERROR_LAST = WINHTTP_ERROR_BASE + 186 - -WINHTTP_NO_PROXY_NAME = None -WINHTTP_NO_PROXY_BYPASS = None -WINHTTP_NO_REFERER = None -WINHTTP_DEFAULT_ACCEPT_TYPES = None -WINHTTP_NO_ADDITIONAL_HEADERS = None -WINHTTP_NO_REQUEST_DATA = None diff --git a/lib/win32/lib/win32netcon.py b/lib/win32/lib/win32netcon.py deleted file mode 100644 index 63e97a14..00000000 --- a/lib/win32/lib/win32netcon.py +++ /dev/null @@ -1,627 +0,0 @@ -# Generated by h2py from lmaccess.h - -# Included from lmcons.h -CNLEN = 15 -LM20_CNLEN = 15 -DNLEN = CNLEN -LM20_DNLEN = LM20_CNLEN -UNCLEN = CNLEN + 2 -LM20_UNCLEN = LM20_CNLEN + 2 -NNLEN = 80 -LM20_NNLEN = 12 -RMLEN = UNCLEN + 1 + NNLEN -LM20_RMLEN = LM20_UNCLEN + 1 + LM20_NNLEN -SNLEN = 80 -LM20_SNLEN = 15 -STXTLEN = 256 -LM20_STXTLEN = 63 -PATHLEN = 256 -LM20_PATHLEN = 256 -DEVLEN = 80 -LM20_DEVLEN = 8 -EVLEN = 16 -UNLEN = 256 -LM20_UNLEN = 20 -GNLEN = UNLEN -LM20_GNLEN = LM20_UNLEN -PWLEN = 256 -LM20_PWLEN = 14 -SHPWLEN = 8 -CLTYPE_LEN = 12 -MAXCOMMENTSZ = 256 -LM20_MAXCOMMENTSZ = 48 -QNLEN = NNLEN -LM20_QNLEN = LM20_NNLEN -ALERTSZ = 128 -NETBIOS_NAME_LEN = 16 -CRYPT_KEY_LEN = 7 -CRYPT_TXT_LEN = 8 -ENCRYPTED_PWLEN = 16 -SESSION_PWLEN = 24 -SESSION_CRYPT_KLEN = 21 -PARMNUM_ALL = 0 -PARM_ERROR_NONE = 0 -PARMNUM_BASE_INFOLEVEL = 1000 -NULL = 0 -PLATFORM_ID_DOS = 300 -PLATFORM_ID_OS2 = 400 -PLATFORM_ID_NT = 500 -PLATFORM_ID_OSF = 600 -PLATFORM_ID_VMS = 700 -MAX_LANMAN_MESSAGE_ID = 5799 -UF_SCRIPT = 1 -UF_ACCOUNTDISABLE = 2 -UF_HOMEDIR_REQUIRED = 8 -UF_LOCKOUT = 16 -UF_PASSWD_NOTREQD = 32 -UF_PASSWD_CANT_CHANGE = 64 -UF_TEMP_DUPLICATE_ACCOUNT = 256 -UF_NORMAL_ACCOUNT = 512 -UF_INTERDOMAIN_TRUST_ACCOUNT = 2048 -UF_WORKSTATION_TRUST_ACCOUNT = 4096 -UF_SERVER_TRUST_ACCOUNT = 8192 -UF_MACHINE_ACCOUNT_MASK = ( - UF_INTERDOMAIN_TRUST_ACCOUNT - | UF_WORKSTATION_TRUST_ACCOUNT - | UF_SERVER_TRUST_ACCOUNT -) -UF_ACCOUNT_TYPE_MASK = ( - UF_TEMP_DUPLICATE_ACCOUNT - | UF_NORMAL_ACCOUNT - | UF_INTERDOMAIN_TRUST_ACCOUNT - | UF_WORKSTATION_TRUST_ACCOUNT - | UF_SERVER_TRUST_ACCOUNT -) -UF_DONT_EXPIRE_PASSWD = 65536 -UF_MNS_LOGON_ACCOUNT = 131072 -UF_SETTABLE_BITS = ( - UF_SCRIPT - | UF_ACCOUNTDISABLE - | UF_LOCKOUT - | UF_HOMEDIR_REQUIRED - | UF_PASSWD_NOTREQD - | UF_PASSWD_CANT_CHANGE - | UF_ACCOUNT_TYPE_MASK - | UF_DONT_EXPIRE_PASSWD - | UF_MNS_LOGON_ACCOUNT -) -FILTER_TEMP_DUPLICATE_ACCOUNT = 1 -FILTER_NORMAL_ACCOUNT = 2 -FILTER_INTERDOMAIN_TRUST_ACCOUNT = 8 -FILTER_WORKSTATION_TRUST_ACCOUNT = 16 -FILTER_SERVER_TRUST_ACCOUNT = 32 -LG_INCLUDE_INDIRECT = 1 -AF_OP_PRINT = 1 -AF_OP_COMM = 2 -AF_OP_SERVER = 4 -AF_OP_ACCOUNTS = 8 -AF_SETTABLE_BITS = AF_OP_PRINT | AF_OP_COMM | AF_OP_SERVER | AF_OP_ACCOUNTS -UAS_ROLE_STANDALONE = 0 -UAS_ROLE_MEMBER = 1 -UAS_ROLE_BACKUP = 2 -UAS_ROLE_PRIMARY = 3 -USER_NAME_PARMNUM = 1 -USER_PASSWORD_PARMNUM = 3 -USER_PASSWORD_AGE_PARMNUM = 4 -USER_PRIV_PARMNUM = 5 -USER_HOME_DIR_PARMNUM = 6 -USER_COMMENT_PARMNUM = 7 -USER_FLAGS_PARMNUM = 8 -USER_SCRIPT_PATH_PARMNUM = 9 -USER_AUTH_FLAGS_PARMNUM = 10 -USER_FULL_NAME_PARMNUM = 11 -USER_USR_COMMENT_PARMNUM = 12 -USER_PARMS_PARMNUM = 13 -USER_WORKSTATIONS_PARMNUM = 14 -USER_LAST_LOGON_PARMNUM = 15 -USER_LAST_LOGOFF_PARMNUM = 16 -USER_ACCT_EXPIRES_PARMNUM = 17 -USER_MAX_STORAGE_PARMNUM = 18 -USER_UNITS_PER_WEEK_PARMNUM = 19 -USER_LOGON_HOURS_PARMNUM = 20 -USER_PAD_PW_COUNT_PARMNUM = 21 -USER_NUM_LOGONS_PARMNUM = 22 -USER_LOGON_SERVER_PARMNUM = 23 -USER_COUNTRY_CODE_PARMNUM = 24 -USER_CODE_PAGE_PARMNUM = 25 -USER_PRIMARY_GROUP_PARMNUM = 51 -USER_PROFILE = 52 -USER_PROFILE_PARMNUM = 52 -USER_HOME_DIR_DRIVE_PARMNUM = 53 -USER_NAME_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_NAME_PARMNUM -USER_PASSWORD_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_PASSWORD_PARMNUM -USER_PASSWORD_AGE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_PASSWORD_AGE_PARMNUM -USER_PRIV_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_PRIV_PARMNUM -USER_HOME_DIR_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_HOME_DIR_PARMNUM -USER_COMMENT_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_COMMENT_PARMNUM -USER_FLAGS_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_FLAGS_PARMNUM -USER_SCRIPT_PATH_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_SCRIPT_PATH_PARMNUM -USER_AUTH_FLAGS_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_AUTH_FLAGS_PARMNUM -USER_FULL_NAME_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_FULL_NAME_PARMNUM -USER_USR_COMMENT_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_USR_COMMENT_PARMNUM -USER_PARMS_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_PARMS_PARMNUM -USER_WORKSTATIONS_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_WORKSTATIONS_PARMNUM -USER_LAST_LOGON_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_LAST_LOGON_PARMNUM -USER_LAST_LOGOFF_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_LAST_LOGOFF_PARMNUM -USER_ACCT_EXPIRES_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_ACCT_EXPIRES_PARMNUM -USER_MAX_STORAGE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_MAX_STORAGE_PARMNUM -USER_UNITS_PER_WEEK_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_UNITS_PER_WEEK_PARMNUM -USER_LOGON_HOURS_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_LOGON_HOURS_PARMNUM -USER_PAD_PW_COUNT_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_PAD_PW_COUNT_PARMNUM -USER_NUM_LOGONS_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_NUM_LOGONS_PARMNUM -USER_LOGON_SERVER_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_LOGON_SERVER_PARMNUM -USER_COUNTRY_CODE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_COUNTRY_CODE_PARMNUM -USER_CODE_PAGE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_CODE_PAGE_PARMNUM -USER_PRIMARY_GROUP_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_PRIMARY_GROUP_PARMNUM -USER_HOME_DIR_DRIVE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + USER_HOME_DIR_DRIVE_PARMNUM -NULL_USERSETINFO_PASSWD = " " -UNITS_PER_DAY = 24 -UNITS_PER_WEEK = UNITS_PER_DAY * 7 -USER_PRIV_MASK = 3 -USER_PRIV_GUEST = 0 -USER_PRIV_USER = 1 -USER_PRIV_ADMIN = 2 -MAX_PASSWD_LEN = PWLEN -DEF_MIN_PWLEN = 6 -DEF_PWUNIQUENESS = 5 -DEF_MAX_PWHIST = 8 -DEF_MAX_BADPW = 0 -VALIDATED_LOGON = 0 -PASSWORD_EXPIRED = 2 -NON_VALIDATED_LOGON = 3 -VALID_LOGOFF = 1 -MODALS_MIN_PASSWD_LEN_PARMNUM = 1 -MODALS_MAX_PASSWD_AGE_PARMNUM = 2 -MODALS_MIN_PASSWD_AGE_PARMNUM = 3 -MODALS_FORCE_LOGOFF_PARMNUM = 4 -MODALS_PASSWD_HIST_LEN_PARMNUM = 5 -MODALS_ROLE_PARMNUM = 6 -MODALS_PRIMARY_PARMNUM = 7 -MODALS_DOMAIN_NAME_PARMNUM = 8 -MODALS_DOMAIN_ID_PARMNUM = 9 -MODALS_LOCKOUT_DURATION_PARMNUM = 10 -MODALS_LOCKOUT_OBSERVATION_WINDOW_PARMNUM = 11 -MODALS_LOCKOUT_THRESHOLD_PARMNUM = 12 -MODALS_MIN_PASSWD_LEN_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_MIN_PASSWD_LEN_PARMNUM -MODALS_MAX_PASSWD_AGE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_MAX_PASSWD_AGE_PARMNUM -MODALS_MIN_PASSWD_AGE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_MIN_PASSWD_AGE_PARMNUM -MODALS_FORCE_LOGOFF_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_FORCE_LOGOFF_PARMNUM -MODALS_PASSWD_HIST_LEN_INFOLEVEL = ( - PARMNUM_BASE_INFOLEVEL + MODALS_PASSWD_HIST_LEN_PARMNUM -) -MODALS_ROLE_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_ROLE_PARMNUM -MODALS_PRIMARY_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_PRIMARY_PARMNUM -MODALS_DOMAIN_NAME_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_DOMAIN_NAME_PARMNUM -MODALS_DOMAIN_ID_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + MODALS_DOMAIN_ID_PARMNUM -GROUPIDMASK = 32768 -GROUP_ALL_PARMNUM = 0 -GROUP_NAME_PARMNUM = 1 -GROUP_COMMENT_PARMNUM = 2 -GROUP_ATTRIBUTES_PARMNUM = 3 -GROUP_ALL_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + GROUP_ALL_PARMNUM -GROUP_NAME_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + GROUP_NAME_PARMNUM -GROUP_COMMENT_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + GROUP_COMMENT_PARMNUM -GROUP_ATTRIBUTES_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + GROUP_ATTRIBUTES_PARMNUM -LOCALGROUP_NAME_PARMNUM = 1 -LOCALGROUP_COMMENT_PARMNUM = 2 -MAXPERMENTRIES = 64 -ACCESS_NONE = 0 -ACCESS_READ = 1 -ACCESS_WRITE = 2 -ACCESS_CREATE = 4 -ACCESS_EXEC = 8 -ACCESS_DELETE = 16 -ACCESS_ATRIB = 32 -ACCESS_PERM = 64 -ACCESS_GROUP = 32768 -ACCESS_AUDIT = 1 -ACCESS_SUCCESS_OPEN = 16 -ACCESS_SUCCESS_WRITE = 32 -ACCESS_SUCCESS_DELETE = 64 -ACCESS_SUCCESS_ACL = 128 -ACCESS_SUCCESS_MASK = 240 -ACCESS_FAIL_OPEN = 256 -ACCESS_FAIL_WRITE = 512 -ACCESS_FAIL_DELETE = 1024 -ACCESS_FAIL_ACL = 2048 -ACCESS_FAIL_MASK = 3840 -ACCESS_FAIL_SHIFT = 4 -ACCESS_RESOURCE_NAME_PARMNUM = 1 -ACCESS_ATTR_PARMNUM = 2 -ACCESS_COUNT_PARMNUM = 3 -ACCESS_ACCESS_LIST_PARMNUM = 4 -ACCESS_RESOURCE_NAME_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + ACCESS_RESOURCE_NAME_PARMNUM -ACCESS_ATTR_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + ACCESS_ATTR_PARMNUM -ACCESS_COUNT_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + ACCESS_COUNT_PARMNUM -ACCESS_ACCESS_LIST_INFOLEVEL = PARMNUM_BASE_INFOLEVEL + ACCESS_ACCESS_LIST_PARMNUM -ACCESS_LETTERS = "RWCXDAP " -NETLOGON_CONTROL_QUERY = 1 -NETLOGON_CONTROL_REPLICATE = 2 -NETLOGON_CONTROL_SYNCHRONIZE = 3 -NETLOGON_CONTROL_PDC_REPLICATE = 4 -NETLOGON_CONTROL_REDISCOVER = 5 -NETLOGON_CONTROL_TC_QUERY = 6 -NETLOGON_CONTROL_TRANSPORT_NOTIFY = 7 -NETLOGON_CONTROL_FIND_USER = 8 -NETLOGON_CONTROL_UNLOAD_NETLOGON_DLL = 65531 -NETLOGON_CONTROL_BACKUP_CHANGE_LOG = 65532 -NETLOGON_CONTROL_TRUNCATE_LOG = 65533 -NETLOGON_CONTROL_SET_DBFLAG = 65534 -NETLOGON_CONTROL_BREAKPOINT = 65535 -NETLOGON_REPLICATION_NEEDED = 1 -NETLOGON_REPLICATION_IN_PROGRESS = 2 -NETLOGON_FULL_SYNC_REPLICATION = 4 -NETLOGON_REDO_NEEDED = 8 - -###################### -# Manual stuff - -TEXT = lambda x: x - -MAX_PREFERRED_LENGTH = -1 -PARM_ERROR_UNKNOWN = -1 -MESSAGE_FILENAME = TEXT("NETMSG") -OS2MSG_FILENAME = TEXT("BASE") -HELP_MSG_FILENAME = TEXT("NETH") -BACKUP_MSG_FILENAME = TEXT("BAK.MSG") -TIMEQ_FOREVER = -1 -USER_MAXSTORAGE_UNLIMITED = -1 -USER_NO_LOGOFF = -1 -DEF_MAX_PWAGE = TIMEQ_FOREVER -DEF_MIN_PWAGE = 0 -DEF_FORCE_LOGOFF = -1 -ONE_DAY = 1 * 24 * 3600 -GROUP_SPECIALGRP_USERS = "USERS" -GROUP_SPECIALGRP_ADMINS = "ADMINS" -GROUP_SPECIALGRP_GUESTS = "GUESTS" -GROUP_SPECIALGRP_LOCAL = "LOCAL" -ACCESS_ALL = ( - ACCESS_READ - | ACCESS_WRITE - | ACCESS_CREATE - | ACCESS_EXEC - | ACCESS_DELETE - | ACCESS_ATRIB - | ACCESS_PERM -) - -# From lmserver.h -SV_PLATFORM_ID_OS2 = 400 -SV_PLATFORM_ID_NT = 500 -MAJOR_VERSION_MASK = 15 -SV_TYPE_WORKSTATION = 1 -SV_TYPE_SERVER = 2 -SV_TYPE_SQLSERVER = 4 -SV_TYPE_DOMAIN_CTRL = 8 -SV_TYPE_DOMAIN_BAKCTRL = 16 -SV_TYPE_TIME_SOURCE = 32 -SV_TYPE_AFP = 64 -SV_TYPE_NOVELL = 128 -SV_TYPE_DOMAIN_MEMBER = 256 -SV_TYPE_PRINTQ_SERVER = 512 -SV_TYPE_DIALIN_SERVER = 1024 -SV_TYPE_XENIX_SERVER = 2048 -SV_TYPE_SERVER_UNIX = SV_TYPE_XENIX_SERVER -SV_TYPE_NT = 4096 -SV_TYPE_WFW = 8192 -SV_TYPE_SERVER_MFPN = 16384 -SV_TYPE_SERVER_NT = 32768 -SV_TYPE_POTENTIAL_BROWSER = 65536 -SV_TYPE_BACKUP_BROWSER = 131072 -SV_TYPE_MASTER_BROWSER = 262144 -SV_TYPE_DOMAIN_MASTER = 524288 -SV_TYPE_SERVER_OSF = 1048576 -SV_TYPE_SERVER_VMS = 2097152 -SV_TYPE_WINDOWS = 4194304 -SV_TYPE_DFS = 8388608 -SV_TYPE_CLUSTER_NT = 16777216 -SV_TYPE_DCE = 268435456 -SV_TYPE_ALTERNATE_XPORT = 536870912 -SV_TYPE_LOCAL_LIST_ONLY = 1073741824 -SV_TYPE_DOMAIN_ENUM = -2147483648 -SV_TYPE_ALL = -1 -SV_NODISC = -1 -SV_USERSECURITY = 1 -SV_SHARESECURITY = 0 -SV_HIDDEN = 1 -SV_VISIBLE = 0 -SV_PLATFORM_ID_PARMNUM = 101 -SV_NAME_PARMNUM = 102 -SV_VERSION_MAJOR_PARMNUM = 103 -SV_VERSION_MINOR_PARMNUM = 104 -SV_TYPE_PARMNUM = 105 -SV_COMMENT_PARMNUM = 5 -SV_USERS_PARMNUM = 107 -SV_DISC_PARMNUM = 10 -SV_HIDDEN_PARMNUM = 16 -SV_ANNOUNCE_PARMNUM = 17 -SV_ANNDELTA_PARMNUM = 18 -SV_USERPATH_PARMNUM = 112 -SV_ULIST_MTIME_PARMNUM = 401 -SV_GLIST_MTIME_PARMNUM = 402 -SV_ALIST_MTIME_PARMNUM = 403 -SV_ALERTS_PARMNUM = 11 -SV_SECURITY_PARMNUM = 405 -SV_NUMADMIN_PARMNUM = 406 -SV_LANMASK_PARMNUM = 407 -SV_GUESTACC_PARMNUM = 408 -SV_CHDEVQ_PARMNUM = 410 -SV_CHDEVJOBS_PARMNUM = 411 -SV_CONNECTIONS_PARMNUM = 412 -SV_SHARES_PARMNUM = 413 -SV_OPENFILES_PARMNUM = 414 -SV_SESSREQS_PARMNUM = 417 -SV_ACTIVELOCKS_PARMNUM = 419 -SV_NUMREQBUF_PARMNUM = 420 -SV_NUMBIGBUF_PARMNUM = 422 -SV_NUMFILETASKS_PARMNUM = 423 -SV_ALERTSCHED_PARMNUM = 37 -SV_ERRORALERT_PARMNUM = 38 -SV_LOGONALERT_PARMNUM = 39 -SV_ACCESSALERT_PARMNUM = 40 -SV_DISKALERT_PARMNUM = 41 -SV_NETIOALERT_PARMNUM = 42 -SV_MAXAUDITSZ_PARMNUM = 43 -SV_SRVHEURISTICS_PARMNUM = 431 -SV_SESSOPENS_PARMNUM = 501 -SV_SESSVCS_PARMNUM = 502 -SV_OPENSEARCH_PARMNUM = 503 -SV_SIZREQBUF_PARMNUM = 504 -SV_INITWORKITEMS_PARMNUM = 505 -SV_MAXWORKITEMS_PARMNUM = 506 -SV_RAWWORKITEMS_PARMNUM = 507 -SV_IRPSTACKSIZE_PARMNUM = 508 -SV_MAXRAWBUFLEN_PARMNUM = 509 -SV_SESSUSERS_PARMNUM = 510 -SV_SESSCONNS_PARMNUM = 511 -SV_MAXNONPAGEDMEMORYUSAGE_PARMNUM = 512 -SV_MAXPAGEDMEMORYUSAGE_PARMNUM = 513 -SV_ENABLESOFTCOMPAT_PARMNUM = 514 -SV_ENABLEFORCEDLOGOFF_PARMNUM = 515 -SV_TIMESOURCE_PARMNUM = 516 -SV_ACCEPTDOWNLEVELAPIS_PARMNUM = 517 -SV_LMANNOUNCE_PARMNUM = 518 -SV_DOMAIN_PARMNUM = 519 -SV_MAXCOPYREADLEN_PARMNUM = 520 -SV_MAXCOPYWRITELEN_PARMNUM = 521 -SV_MINKEEPSEARCH_PARMNUM = 522 -SV_MAXKEEPSEARCH_PARMNUM = 523 -SV_MINKEEPCOMPLSEARCH_PARMNUM = 524 -SV_MAXKEEPCOMPLSEARCH_PARMNUM = 525 -SV_THREADCOUNTADD_PARMNUM = 526 -SV_NUMBLOCKTHREADS_PARMNUM = 527 -SV_SCAVTIMEOUT_PARMNUM = 528 -SV_MINRCVQUEUE_PARMNUM = 529 -SV_MINFREEWORKITEMS_PARMNUM = 530 -SV_XACTMEMSIZE_PARMNUM = 531 -SV_THREADPRIORITY_PARMNUM = 532 -SV_MAXMPXCT_PARMNUM = 533 -SV_OPLOCKBREAKWAIT_PARMNUM = 534 -SV_OPLOCKBREAKRESPONSEWAIT_PARMNUM = 535 -SV_ENABLEOPLOCKS_PARMNUM = 536 -SV_ENABLEOPLOCKFORCECLOSE_PARMNUM = 537 -SV_ENABLEFCBOPENS_PARMNUM = 538 -SV_ENABLERAW_PARMNUM = 539 -SV_ENABLESHAREDNETDRIVES_PARMNUM = 540 -SV_MINFREECONNECTIONS_PARMNUM = 541 -SV_MAXFREECONNECTIONS_PARMNUM = 542 -SV_INITSESSTABLE_PARMNUM = 543 -SV_INITCONNTABLE_PARMNUM = 544 -SV_INITFILETABLE_PARMNUM = 545 -SV_INITSEARCHTABLE_PARMNUM = 546 -SV_ALERTSCHEDULE_PARMNUM = 547 -SV_ERRORTHRESHOLD_PARMNUM = 548 -SV_NETWORKERRORTHRESHOLD_PARMNUM = 549 -SV_DISKSPACETHRESHOLD_PARMNUM = 550 -SV_MAXLINKDELAY_PARMNUM = 552 -SV_MINLINKTHROUGHPUT_PARMNUM = 553 -SV_LINKINFOVALIDTIME_PARMNUM = 554 -SV_SCAVQOSINFOUPDATETIME_PARMNUM = 555 -SV_MAXWORKITEMIDLETIME_PARMNUM = 556 -SV_MAXRAWWORKITEMS_PARMNUM = 557 -SV_PRODUCTTYPE_PARMNUM = 560 -SV_SERVERSIZE_PARMNUM = 561 -SV_CONNECTIONLESSAUTODISC_PARMNUM = 562 -SV_SHARINGVIOLATIONRETRIES_PARMNUM = 563 -SV_SHARINGVIOLATIONDELAY_PARMNUM = 564 -SV_MAXGLOBALOPENSEARCH_PARMNUM = 565 -SV_REMOVEDUPLICATESEARCHES_PARMNUM = 566 -SV_LOCKVIOLATIONRETRIES_PARMNUM = 567 -SV_LOCKVIOLATIONOFFSET_PARMNUM = 568 -SV_LOCKVIOLATIONDELAY_PARMNUM = 569 -SV_MDLREADSWITCHOVER_PARMNUM = 570 -SV_CACHEDOPENLIMIT_PARMNUM = 571 -SV_CRITICALTHREADS_PARMNUM = 572 -SV_RESTRICTNULLSESSACCESS_PARMNUM = 573 -SV_ENABLEWFW311DIRECTIPX_PARMNUM = 574 -SV_OTHERQUEUEAFFINITY_PARMNUM = 575 -SV_QUEUESAMPLESECS_PARMNUM = 576 -SV_BALANCECOUNT_PARMNUM = 577 -SV_PREFERREDAFFINITY_PARMNUM = 578 -SV_MAXFREERFCBS_PARMNUM = 579 -SV_MAXFREEMFCBS_PARMNUM = 580 -SV_MAXFREELFCBS_PARMNUM = 581 -SV_MAXFREEPAGEDPOOLCHUNKS_PARMNUM = 582 -SV_MINPAGEDPOOLCHUNKSIZE_PARMNUM = 583 -SV_MAXPAGEDPOOLCHUNKSIZE_PARMNUM = 584 -SV_SENDSFROMPREFERREDPROCESSOR_PARMNUM = 585 -SV_MAXTHREADSPERQUEUE_PARMNUM = 586 -SV_CACHEDDIRECTORYLIMIT_PARMNUM = 587 -SV_MAXCOPYLENGTH_PARMNUM = 588 -SV_ENABLEBULKTRANSFER_PARMNUM = 589 -SV_ENABLECOMPRESSION_PARMNUM = 590 -SV_AUTOSHAREWKS_PARMNUM = 591 -SV_AUTOSHARESERVER_PARMNUM = 592 -SV_ENABLESECURITYSIGNATURE_PARMNUM = 593 -SV_REQUIRESECURITYSIGNATURE_PARMNUM = 594 -SV_MINCLIENTBUFFERSIZE_PARMNUM = 595 -SV_CONNECTIONNOSESSIONSTIMEOUT_PARMNUM = 596 -SVI1_NUM_ELEMENTS = 5 -SVI2_NUM_ELEMENTS = 40 -SVI3_NUM_ELEMENTS = 44 -SW_AUTOPROF_LOAD_MASK = 1 -SW_AUTOPROF_SAVE_MASK = 2 -SV_MAX_SRV_HEUR_LEN = 32 -SV_USERS_PER_LICENSE = 5 -SVTI2_REMAP_PIPE_NAMES = 2 - -# Generated by h2py from lmshare.h -SHARE_NETNAME_PARMNUM = 1 -SHARE_TYPE_PARMNUM = 3 -SHARE_REMARK_PARMNUM = 4 -SHARE_PERMISSIONS_PARMNUM = 5 -SHARE_MAX_USES_PARMNUM = 6 -SHARE_CURRENT_USES_PARMNUM = 7 -SHARE_PATH_PARMNUM = 8 -SHARE_PASSWD_PARMNUM = 9 -SHARE_FILE_SD_PARMNUM = 501 -SHI1_NUM_ELEMENTS = 4 -SHI2_NUM_ELEMENTS = 10 -STYPE_DISKTREE = 0 -STYPE_PRINTQ = 1 -STYPE_DEVICE = 2 -STYPE_IPC = 3 -STYPE_SPECIAL = -2147483648 -SHI1005_FLAGS_DFS = 1 -SHI1005_FLAGS_DFS_ROOT = 2 -COW_PERMACHINE = 4 -COW_PERUSER = 8 -CSC_CACHEABLE = 16 -CSC_NOFLOWOPS = 32 -CSC_AUTO_INWARD = 64 -CSC_AUTO_OUTWARD = 128 -SHI1005_VALID_FLAGS_SET = ( - CSC_CACHEABLE - | CSC_NOFLOWOPS - | CSC_AUTO_INWARD - | CSC_AUTO_OUTWARD - | COW_PERMACHINE - | COW_PERUSER -) -SHI1007_VALID_FLAGS_SET = SHI1005_VALID_FLAGS_SET -SESS_GUEST = 1 -SESS_NOENCRYPTION = 2 -SESI1_NUM_ELEMENTS = 8 -SESI2_NUM_ELEMENTS = 9 -PERM_FILE_READ = 1 -PERM_FILE_WRITE = 2 -PERM_FILE_CREATE = 4 - -# Generated by h2py from d:\mssdk\include\winnetwk.h -WNNC_NET_MSNET = 65536 -WNNC_NET_LANMAN = 131072 -WNNC_NET_NETWARE = 196608 -WNNC_NET_VINES = 262144 -WNNC_NET_10NET = 327680 -WNNC_NET_LOCUS = 393216 -WNNC_NET_SUN_PC_NFS = 458752 -WNNC_NET_LANSTEP = 524288 -WNNC_NET_9TILES = 589824 -WNNC_NET_LANTASTIC = 655360 -WNNC_NET_AS400 = 720896 -WNNC_NET_FTP_NFS = 786432 -WNNC_NET_PATHWORKS = 851968 -WNNC_NET_LIFENET = 917504 -WNNC_NET_POWERLAN = 983040 -WNNC_NET_BWNFS = 1048576 -WNNC_NET_COGENT = 1114112 -WNNC_NET_FARALLON = 1179648 -WNNC_NET_APPLETALK = 1245184 -WNNC_NET_INTERGRAPH = 1310720 -WNNC_NET_SYMFONET = 1376256 -WNNC_NET_CLEARCASE = 1441792 -WNNC_NET_FRONTIER = 1507328 -WNNC_NET_BMC = 1572864 -WNNC_NET_DCE = 1638400 -WNNC_NET_DECORB = 2097152 -WNNC_NET_PROTSTOR = 2162688 -WNNC_NET_FJ_REDIR = 2228224 -WNNC_NET_DISTINCT = 2293760 -WNNC_NET_TWINS = 2359296 -WNNC_NET_RDR2SAMPLE = 2424832 -RESOURCE_CONNECTED = 1 -RESOURCE_GLOBALNET = 2 -RESOURCE_REMEMBERED = 3 -RESOURCE_RECENT = 4 -RESOURCE_CONTEXT = 5 -RESOURCETYPE_ANY = 0 -RESOURCETYPE_DISK = 1 -RESOURCETYPE_PRINT = 2 -RESOURCETYPE_RESERVED = 8 -RESOURCETYPE_UNKNOWN = -1 -RESOURCEUSAGE_CONNECTABLE = 1 -RESOURCEUSAGE_CONTAINER = 2 -RESOURCEUSAGE_NOLOCALDEVICE = 4 -RESOURCEUSAGE_SIBLING = 8 -RESOURCEUSAGE_ATTACHED = 16 -RESOURCEUSAGE_ALL = ( - RESOURCEUSAGE_CONNECTABLE | RESOURCEUSAGE_CONTAINER | RESOURCEUSAGE_ATTACHED -) -RESOURCEUSAGE_RESERVED = -2147483648 -RESOURCEDISPLAYTYPE_GENERIC = 0 -RESOURCEDISPLAYTYPE_DOMAIN = 1 -RESOURCEDISPLAYTYPE_SERVER = 2 -RESOURCEDISPLAYTYPE_SHARE = 3 -RESOURCEDISPLAYTYPE_FILE = 4 -RESOURCEDISPLAYTYPE_GROUP = 5 -RESOURCEDISPLAYTYPE_NETWORK = 6 -RESOURCEDISPLAYTYPE_ROOT = 7 -RESOURCEDISPLAYTYPE_SHAREADMIN = 8 -RESOURCEDISPLAYTYPE_DIRECTORY = 9 -RESOURCEDISPLAYTYPE_TREE = 10 -RESOURCEDISPLAYTYPE_NDSCONTAINER = 11 -NETPROPERTY_PERSISTENT = 1 -CONNECT_UPDATE_PROFILE = 1 -CONNECT_UPDATE_RECENT = 2 -CONNECT_TEMPORARY = 4 -CONNECT_INTERACTIVE = 8 -CONNECT_PROMPT = 16 -CONNECT_NEED_DRIVE = 32 -CONNECT_REFCOUNT = 64 -CONNECT_REDIRECT = 128 -CONNECT_LOCALDRIVE = 256 -CONNECT_CURRENT_MEDIA = 512 -CONNECT_DEFERRED = 1024 -CONNECT_RESERVED = -16777216 -CONNDLG_RO_PATH = 1 -CONNDLG_CONN_POINT = 2 -CONNDLG_USE_MRU = 4 -CONNDLG_HIDE_BOX = 8 -CONNDLG_PERSIST = 16 -CONNDLG_NOT_PERSIST = 32 -DISC_UPDATE_PROFILE = 1 -DISC_NO_FORCE = 64 -UNIVERSAL_NAME_INFO_LEVEL = 1 -REMOTE_NAME_INFO_LEVEL = 2 -WNFMT_MULTILINE = 1 -WNFMT_ABBREVIATED = 2 -WNFMT_INENUM = 16 -WNFMT_CONNECTION = 32 -NETINFO_DLL16 = 1 -NETINFO_DISKRED = 4 -NETINFO_PRINTERRED = 8 -RP_LOGON = 1 -RP_INIFILE = 2 -PP_DISPLAYERRORS = 1 -WNCON_FORNETCARD = 1 -WNCON_NOTROUTED = 2 -WNCON_SLOWLINK = 4 -WNCON_DYNAMIC = 8 - -## NETSETUP_NAME_TYPE, used with NetValidateName -NetSetupUnknown = 0 -NetSetupMachine = 1 -NetSetupWorkgroup = 2 -NetSetupDomain = 3 -NetSetupNonExistentDomain = 4 -NetSetupDnsMachine = 5 - -## NETSETUP_JOIN_STATUS, use with NetGetJoinInformation -NetSetupUnknownStatus = 0 -NetSetupUnjoined = 1 -NetSetupWorkgroupName = 2 -NetSetupDomainName = 3 - -NetValidateAuthentication = 1 -NetValidatePasswordChange = 2 -NetValidatePasswordReset = 3 diff --git a/lib/win32/lib/win32pdhquery.py b/lib/win32/lib/win32pdhquery.py deleted file mode 100644 index a48db08a..00000000 --- a/lib/win32/lib/win32pdhquery.py +++ /dev/null @@ -1,571 +0,0 @@ -""" -Performance Data Helper (PDH) Query Classes - -Wrapper classes for end-users and high-level access to the PDH query -mechanisms. PDH is a win32-specific mechanism for accessing the -performance data made available by the system. The Python for Windows -PDH module does not implement the "Registry" interface, implementing -the more straightforward Query-based mechanism. - -The basic idea of a PDH Query is an object which can query the system -about the status of any number of "counters." The counters are paths -to a particular piece of performance data. For instance, the path -'\\Memory\\Available Bytes' describes just about exactly what it says -it does, the amount of free memory on the default computer expressed -in Bytes. These paths can be considerably more complex than this, -but part of the point of this wrapper module is to hide that -complexity from the end-user/programmer. - -EXAMPLE: A more complex Path - '\\\\RAISTLIN\\PhysicalDisk(_Total)\\Avg. Disk Bytes/Read' - Raistlin --> Computer Name - PhysicalDisk --> Object Name - _Total --> The particular Instance (in this case, all instances, i.e. all drives) - Avg. Disk Bytes/Read --> The piece of data being monitored. - -EXAMPLE: Collecting Data with a Query - As an example, the following code implements a logger which allows the - user to choose what counters they would like to log, and logs those - counters for 30 seconds, at two-second intervals. - - query = Query() - query.addcounterbybrowsing() - query.collectdatafor(30,2) - - The data is now stored in a list of lists as: - query.curresults - - The counters(paths) which were used to collect the data are: - query.curpaths - - You can use the win32pdh.ParseCounterPath(path) utility function - to turn the paths into more easily read values for your task, or - write the data to a file, or do whatever you want with it. - -OTHER NOTABLE METHODS: - query.collectdatawhile(period) # start a logging thread for collecting data - query.collectdatawhile_stop() # signal the logging thread to stop logging - query.collectdata() # run the query only once - query.addperfcounter(object, counter, machine=None) # add a standard performance counter - query.addinstcounter(object, counter,machine=None,objtype = 'Process',volatile=1,format = win32pdh.PDH_FMT_LONG) # add a possibly volatile counter - -### Known bugs and limitations ### -Due to a problem with threading under the PythonWin interpreter, there -will be no data logged if the PythonWin window is not the foreground -application. Workaround: scripts using threading should be run in the -python.exe interpreter. - -The volatile-counter handlers are possibly buggy, they haven't been -tested to any extent. The wrapper Query makes it safe to pass invalid -paths (a -1 will be returned, or the Query will be totally ignored, -depending on the missing element), so you should be able to work around -the error by including all possible paths and filtering out the -1's. - -There is no way I know of to stop a thread which is currently sleeping, -so you have to wait until the thread in collectdatawhile is activated -again. This might become a problem in situations where the collection -period is multiple minutes (or hours, or whatever). - -Should make the win32pdh.ParseCounter function available to the Query -classes as a method or something similar, so that it can be accessed -by programmes that have just picked up an instance from somewhere. - -Should explicitly mention where QueryErrors can be raised, and create a -full test set to see if there are any uncaught win32api.error's still -hanging around. - -When using the python.exe interpreter, the addcounterbybrowsing- -generated browser window is often hidden behind other windows. No known -workaround other than Alt-tabing to reach the browser window. - -### Other References ### -The win32pdhutil module (which should be in the %pythonroot%/win32/lib -directory) provides quick-and-dirty utilities for one-off access to -variables from the PDH. Almost everything in that module can be done -with a Query object, but it provides task-oriented functions for a -number of common one-off tasks. - -If you can access the MS Developers Network Library, you can find -information about the PDH API as MS describes it. For a background article, -try: -http://msdn.microsoft.com/library/en-us/dnperfmo/html/msdn_pdhlib.asp - -The reference guide for the PDH API was last spotted at: -http://msdn.microsoft.com/library/en-us/perfmon/base/using_the_pdh_interface.asp - - -In general the Python version of the API is just a wrapper around the -Query-based version of this API (as far as I can see), so you can learn what -you need to from there. From what I understand, the MSDN Online -resources are available for the price of signing up for them. I can't -guarantee how long that's supposed to last. (Or anything for that -matter). -http://premium.microsoft.com/isapi/devonly/prodinfo/msdnprod/msdnlib.idc?theURL=/msdn/library/sdkdoc/perfdata_4982.htm - -The eventual plan is for my (Mike Fletcher's) Starship account to include -a section on NT Administration, and the Query is the first project -in this plan. There should be an article describing the creation of -a simple logger there, but the example above is 90% of the work of -that project, so don't sweat it if you don't find anything there. -(currently the account hasn't been set up). -http://starship.skyport.net/crew/mcfletch/ - -If you need to contact me immediately, (why I can't imagine), you can -email me at mcfletch@golden.net, or just post your question to the -Python newsgroup with a catchy subject line. -news:comp.lang.python - -### Other Stuff ### -The Query classes are by Mike Fletcher, with the working code -being corruptions of Mark Hammonds win32pdhutil module. - -Use at your own risk, no warranties, no guarantees, no assurances, -if you use it, you accept the risk of using it, etceteras. - -""" -# Feb 12, 98 - MH added "rawaddcounter" so caller can get exception details. - -import _thread -import copy -import time - -import win32api -import win32pdh - - -class BaseQuery: - """ - Provides wrapped access to the Performance Data Helper query - objects, generally you should use the child class Query - unless you have need of doing weird things :) - - This class supports two major working paradigms. In the first, - you open the query, and run it as many times as you need, closing - the query when you're done with it. This is suitable for static - queries (ones where processes being monitored don't disappear). - - In the second, you allow the query to be opened each time and - closed afterward. This causes the base query object to be - destroyed after each call. Suitable for dynamic queries (ones - which watch processes which might be closed while watching.) - """ - - def __init__(self, paths=None): - """ - The PDH Query object is initialised with a single, optional - list argument, that must be properly formatted PDH Counter - paths. Generally this list will only be provided by the class - when it is being unpickled (removed from storage). Normal - use is to call the class with no arguments and use the various - addcounter functions (particularly, for end user's, the use of - addcounterbybrowsing is the most common approach) You might - want to provide the list directly if you want to hard-code the - elements with which your query deals (and thereby avoid the - overhead of unpickling the class). - """ - self.counters = [] - if paths: - self.paths = paths - else: - self.paths = [] - self._base = None - self.active = 0 - self.curpaths = [] - - def addcounterbybrowsing( - self, flags=win32pdh.PERF_DETAIL_WIZARD, windowtitle="Python Browser" - ): - """ - Adds possibly multiple paths to the paths attribute of the query, - does this by calling the standard counter browsing dialogue. Within - this dialogue, find the counter you want to log, and click: Add, - repeat for every path you want to log, then click on close. The - paths are appended to the non-volatile paths list for this class, - subclasses may create a function which parses the paths and decides - (via heuristics) whether to add the path to the volatile or non-volatile - path list. - e.g.: - query.addcounter() - """ - win32pdh.BrowseCounters(None, 0, self.paths.append, flags, windowtitle) - - def rawaddcounter(self, object, counter, instance=None, inum=-1, machine=None): - """ - Adds a single counter path, without catching any exceptions. - - See addcounter for details. - """ - path = win32pdh.MakeCounterPath( - (machine, object, instance, None, inum, counter) - ) - self.paths.append(path) - - def addcounter(self, object, counter, instance=None, inum=-1, machine=None): - """ - Adds a single counter path to the paths attribute. Normally - this will be called by a child class' speciality functions, - rather than being called directly by the user. (Though it isn't - hard to call manually, since almost everything is given a default) - This method is only functional when the query is closed (or hasn't - yet been opened). This is to prevent conflict in multi-threaded - query applications). - e.g.: - query.addcounter('Memory','Available Bytes') - """ - if not self.active: - try: - self.rawaddcounter(object, counter, instance, inum, machine) - return 0 - except win32api.error: - return -1 - else: - return -1 - - def open(self): - """ - Build the base query object for this wrapper, - then add all of the counters required for the query. - Raise a QueryError if we can't complete the functions. - If we are already open, then do nothing. - """ - if not self.active: # to prevent having multiple open queries - # curpaths are made accessible here because of the possibility of volatile paths - # which may be dynamically altered by subclasses. - self.curpaths = copy.copy(self.paths) - try: - base = win32pdh.OpenQuery() - for path in self.paths: - try: - self.counters.append(win32pdh.AddCounter(base, path)) - except win32api.error: # we passed a bad path - self.counters.append(0) - pass - self._base = base - self.active = 1 - return 0 # open succeeded - except: # if we encounter any errors, kill the Query - try: - self.killbase(base) - except NameError: # failed in creating query - pass - self.active = 0 - self.curpaths = [] - raise QueryError(self) - return 1 # already open - - def killbase(self, base=None): - """ - ### This is not a public method - Mission critical function to kill the win32pdh objects held - by this object. User's should generally use the close method - instead of this method, in case a sub-class has overridden - close to provide some special functionality. - """ - # Kill Pythonic references to the objects in this object's namespace - self._base = None - counters = self.counters - self.counters = [] - # we don't kill the curpaths for convenience, this allows the - # user to close a query and still access the last paths - self.active = 0 - # Now call the delete functions on all of the objects - try: - map(win32pdh.RemoveCounter, counters) - except: - pass - try: - win32pdh.CloseQuery(base) - except: - pass - del counters - del base - - def close(self): - """ - Makes certain that the underlying query object has been closed, - and that all counters have been removed from it. This is - important for reference counting. - You should only need to call close if you have previously called - open. The collectdata methods all can handle opening and - closing the query. Calling close multiple times is acceptable. - """ - try: - self.killbase(self._base) - except AttributeError: - self.killbase() - - __del__ = close - - def collectdata(self, format=win32pdh.PDH_FMT_LONG): - """ - Returns the formatted current values for the Query - """ - if self._base: # we are currently open, don't change this - return self.collectdataslave(format) - else: # need to open and then close the _base, should be used by one-offs and elements tracking application instances - self.open() # will raise QueryError if couldn't open the query - temp = self.collectdataslave(format) - self.close() # will always close - return temp - - def collectdataslave(self, format=win32pdh.PDH_FMT_LONG): - """ - ### Not a public method - Called only when the Query is known to be open, runs over - the whole set of counters, appending results to the temp, - returns the values as a list. - """ - try: - win32pdh.CollectQueryData(self._base) - temp = [] - for counter in self.counters: - ok = 0 - try: - if counter: - temp.append( - win32pdh.GetFormattedCounterValue(counter, format)[1] - ) - ok = 1 - except win32api.error: - pass - if not ok: - temp.append(-1) # a better way to signal failure??? - return temp - except ( - win32api.error - ): # will happen if, for instance, no counters are part of the query and we attempt to collect data for it. - return [-1] * len(self.counters) - - # pickle functions - def __getinitargs__(self): - """ - ### Not a public method - """ - return (self.paths,) - - -class Query(BaseQuery): - """ - Performance Data Helper(PDH) Query object: - - Provides a wrapper around the native PDH query object which - allows for query reuse, query storage, and general maintenance - functions (adding counter paths in various ways being the most - obvious ones). - """ - - def __init__(self, *args, **namedargs): - """ - The PDH Query object is initialised with a single, optional - list argument, that must be properly formatted PDH Counter - paths. Generally this list will only be provided by the class - when it is being unpickled (removed from storage). Normal - use is to call the class with no arguments and use the various - addcounter functions (particularly, for end user's, the use of - addcounterbybrowsing is the most common approach) You might - want to provide the list directly if you want to hard-code the - elements with which your query deals (and thereby avoid the - overhead of unpickling the class). - """ - self.volatilecounters = [] - BaseQuery.__init__(*(self,) + args, **namedargs) - - def addperfcounter(self, object, counter, machine=None): - """ - A "Performance Counter" is a stable, known, common counter, - such as Memory, or Processor. The use of addperfcounter by - end-users is deprecated, since the use of - addcounterbybrowsing is considerably more flexible and general. - It is provided here to allow the easy development of scripts - which need to access variables so common we know them by name - (such as Memory|Available Bytes), and to provide symmetry with - the add inst counter method. - usage: - query.addperfcounter('Memory', 'Available Bytes') - It is just as easy to access addcounter directly, the following - has an identicle effect. - query.addcounter('Memory', 'Available Bytes') - """ - BaseQuery.addcounter(self, object=object, counter=counter, machine=machine) - - def addinstcounter( - self, - object, - counter, - machine=None, - objtype="Process", - volatile=1, - format=win32pdh.PDH_FMT_LONG, - ): - """ - The purpose of using an instcounter is to track particular - instances of a counter object (e.g. a single processor, a single - running copy of a process). For instance, to track all python.exe - instances, you would need merely to ask: - query.addinstcounter('python','Virtual Bytes') - You can find the names of the objects and their available counters - by doing an addcounterbybrowsing() call on a query object (or by - looking in performance monitor's add dialog.) - - Beyond merely rearranging the call arguments to make more sense, - if the volatile flag is true, the instcounters also recalculate - the paths of the available instances on every call to open the - query. - """ - if volatile: - self.volatilecounters.append((object, counter, machine, objtype, format)) - else: - self.paths[len(self.paths) :] = self.getinstpaths( - object, counter, machine, objtype, format - ) - - def getinstpaths( - self, - object, - counter, - machine=None, - objtype="Process", - format=win32pdh.PDH_FMT_LONG, - ): - """ - ### Not an end-user function - Calculate the paths for an instance object. Should alter - to allow processing for lists of object-counter pairs. - """ - items, instances = win32pdh.EnumObjectItems(None, None, objtype, -1) - # find out how many instances of this element we have... - instances.sort() - try: - cur = instances.index(object) - except ValueError: - return [] # no instances of this object - temp = [object] - try: - while instances[cur + 1] == object: - temp.append(object) - cur = cur + 1 - except IndexError: # if we went over the end - pass - paths = [] - for ind in range(len(temp)): - # can this raise an error? - paths.append( - win32pdh.MakeCounterPath( - (machine, "Process", object, None, ind, counter) - ) - ) - return paths # should also return the number of elements for naming purposes - - def open(self, *args, **namedargs): - """ - Explicitly open a query: - When you are needing to make multiple calls to the same query, - it is most efficient to open the query, run all of the calls, - then close the query, instead of having the collectdata method - automatically open and close the query each time it runs. - There are currently no arguments to open. - """ - # do all the normal opening stuff, self._base is now the query object - BaseQuery.open(*(self,) + args, **namedargs) - # should rewrite getinstpaths to take a single tuple - paths = [] - for tup in self.volatilecounters: - paths[len(paths) :] = self.getinstpaths(*tup) - for path in paths: - try: - self.counters.append(win32pdh.AddCounter(self._base, path)) - self.curpaths.append( - path - ) # if we fail on the line above, this path won't be in the table or the counters - except win32api.error: - pass # again, what to do with a malformed path??? - - def collectdatafor(self, totalperiod, period=1): - """ - Non-threaded collection of performance data: - This method allows you to specify the total period for which you would - like to run the Query, and the time interval between individual - runs. The collected data is stored in query.curresults at the - _end_ of the run. The pathnames for the query are stored in - query.curpaths. - e.g.: - query.collectdatafor(30,2) - Will collect data for 30seconds at 2 second intervals - """ - tempresults = [] - try: - self.open() - for ind in range(totalperiod / period): - tempresults.append(self.collectdata()) - time.sleep(period) - self.curresults = tempresults - finally: - self.close() - - def collectdatawhile(self, period=1): - """ - Threaded collection of performance data: - This method sets up a simple semaphor system for signalling - when you would like to start and stop a threaded data collection - method. The collection runs every period seconds until the - semaphor attribute is set to a non-true value (which normally - should be done by calling query.collectdatawhile_stop() .) - e.g.: - query.collectdatawhile(2) - # starts the query running, returns control to the caller immediately - # is collecting data every two seconds. - # do whatever you want to do while the thread runs, then call: - query.collectdatawhile_stop() - # when you want to deal with the data. It is generally a good idea - # to sleep for period seconds yourself, since the query will not copy - # the required data until the next iteration: - time.sleep(2) - # now you can access the data from the attributes of the query - query.curresults - query.curpaths - """ - self.collectdatawhile_active = 1 - _thread.start_new_thread(self.collectdatawhile_slave, (period,)) - - def collectdatawhile_stop(self): - """ - Signals the collectdatawhile slave thread to stop collecting data - on the next logging iteration. - """ - self.collectdatawhile_active = 0 - - def collectdatawhile_slave(self, period): - """ - ### Not a public function - Does the threaded work of collecting the data and storing it - in an attribute of the class. - """ - tempresults = [] - try: - self.open() # also sets active, so can't be changed. - while self.collectdatawhile_active: - tempresults.append(self.collectdata()) - time.sleep(period) - self.curresults = tempresults - finally: - self.close() - - # pickle functions - def __getinitargs__(self): - return (self.paths,) - - def __getstate__(self): - return self.volatilecounters - - def __setstate__(self, volatilecounters): - self.volatilecounters = volatilecounters - - -class QueryError: - def __init__(self, query): - self.query = query - - def __repr__(self): - return "" % repr(self.query) - - __str__ = __repr__ diff --git a/lib/win32/lib/win32pdhutil.py b/lib/win32/lib/win32pdhutil.py deleted file mode 100644 index 7795dd90..00000000 --- a/lib/win32/lib/win32pdhutil.py +++ /dev/null @@ -1,210 +0,0 @@ -"""Utilities for the win32 Performance Data Helper module - -Example: - To get a single bit of data: - >>> import win32pdhutil - >>> win32pdhutil.GetPerformanceAttributes("Memory", "Available Bytes") - 6053888 - >>> win32pdhutil.FindPerformanceAttributesByName("python", counter="Virtual Bytes") - [22278144] - - First example returns data which is not associated with any specific instance. - - The second example reads data for a specific instance - hence the list return - - it would return one result for each instance of Python running. - - In general, it can be tricky finding exactly the "name" of the data you wish to query. - Although you can use (None,None,(eg)"Memory", -1) to do this, - the easiest way is often to simply use PerfMon to find out the names. -""" - -import time - -import win32pdh - -error = win32pdh.error - -# Handle some localization issues. -# see http://support.microsoft.com/default.aspx?scid=http://support.microsoft.com:80/support/kb/articles/Q287/1/59.asp&NoWebContent=1 -# Build a map of english_counter_name: counter_id -counter_english_map = {} - - -def find_pdh_counter_localized_name(english_name, machine_name=None): - if not counter_english_map: - import win32api - import win32con - - counter_reg_value = win32api.RegQueryValueEx( - win32con.HKEY_PERFORMANCE_DATA, "Counter 009" - ) - counter_list = counter_reg_value[0] - for i in range(0, len(counter_list) - 1, 2): - try: - counter_id = int(counter_list[i]) - except ValueError: - continue - counter_english_map[counter_list[i + 1].lower()] = counter_id - return win32pdh.LookupPerfNameByIndex( - machine_name, counter_english_map[english_name.lower()] - ) - - -def GetPerformanceAttributes( - object, counter, instance=None, inum=-1, format=win32pdh.PDH_FMT_LONG, machine=None -): - # NOTE: Many counters require 2 samples to give accurate results, - # including "% Processor Time" (as by definition, at any instant, a - # thread's CPU usage is either 0 or 100). To read counters like this, - # you should copy this function, but keep the counter open, and call - # CollectQueryData() each time you need to know. - # See http://support.microsoft.com/default.aspx?scid=kb;EN-US;q262938 - # and http://msdn.microsoft.com/library/en-us/dnperfmo/html/perfmonpt2.asp - # My older explanation for this was that the "AddCounter" process forced - # the CPU to 100%, but the above makes more sense :) - path = win32pdh.MakeCounterPath((machine, object, instance, None, inum, counter)) - hq = win32pdh.OpenQuery() - try: - hc = win32pdh.AddCounter(hq, path) - try: - win32pdh.CollectQueryData(hq) - type, val = win32pdh.GetFormattedCounterValue(hc, format) - return val - finally: - win32pdh.RemoveCounter(hc) - finally: - win32pdh.CloseQuery(hq) - - -def FindPerformanceAttributesByName( - instanceName, - object=None, - counter=None, - format=win32pdh.PDH_FMT_LONG, - machine=None, - bRefresh=0, -): - """Find performance attributes by (case insensitive) instance name. - - Given a process name, return a list with the requested attributes. - Most useful for returning a tuple of PIDs given a process name. - """ - if object is None: - object = find_pdh_counter_localized_name("Process", machine) - if counter is None: - counter = find_pdh_counter_localized_name("ID Process", machine) - if bRefresh: # PDH docs say this is how you do a refresh. - win32pdh.EnumObjects(None, machine, 0, 1) - instanceName = instanceName.lower() - items, instances = win32pdh.EnumObjectItems(None, None, object, -1) - # Track multiple instances. - instance_dict = {} - for instance in instances: - try: - instance_dict[instance] = instance_dict[instance] + 1 - except KeyError: - instance_dict[instance] = 0 - - ret = [] - for instance, max_instances in instance_dict.items(): - for inum in range(max_instances + 1): - if instance.lower() == instanceName: - ret.append( - GetPerformanceAttributes( - object, counter, instance, inum, format, machine - ) - ) - return ret - - -def ShowAllProcesses(): - object = find_pdh_counter_localized_name("Process") - items, instances = win32pdh.EnumObjectItems( - None, None, object, win32pdh.PERF_DETAIL_WIZARD - ) - # Need to track multiple instances of the same name. - instance_dict = {} - for instance in instances: - try: - instance_dict[instance] = instance_dict[instance] + 1 - except KeyError: - instance_dict[instance] = 0 - - # Bit of a hack to get useful info. - items = [find_pdh_counter_localized_name("ID Process")] + items[:5] - print("Process Name", ",".join(items)) - for instance, max_instances in instance_dict.items(): - for inum in range(max_instances + 1): - hq = win32pdh.OpenQuery() - hcs = [] - for item in items: - path = win32pdh.MakeCounterPath( - (None, object, instance, None, inum, item) - ) - hcs.append(win32pdh.AddCounter(hq, path)) - win32pdh.CollectQueryData(hq) - # as per http://support.microsoft.com/default.aspx?scid=kb;EN-US;q262938, some "%" based - # counters need two collections - time.sleep(0.01) - win32pdh.CollectQueryData(hq) - print("%-15s\t" % (instance[:15]), end=" ") - for hc in hcs: - type, val = win32pdh.GetFormattedCounterValue(hc, win32pdh.PDH_FMT_LONG) - print("%5d" % (val), end=" ") - win32pdh.RemoveCounter(hc) - print() - win32pdh.CloseQuery(hq) - - -# NOTE: This BrowseCallback doesn't seem to work on Vista for markh. -# XXX - look at why!? -# Some counters on Vista require elevation, and callback would previously -# clear exceptions without printing them. -def BrowseCallBackDemo(counters): - ## BrowseCounters can now return multiple counter paths - for counter in counters: - ( - machine, - object, - instance, - parentInstance, - index, - counterName, - ) = win32pdh.ParseCounterPath(counter) - - result = GetPerformanceAttributes( - object, counterName, instance, index, win32pdh.PDH_FMT_DOUBLE, machine - ) - print("Value of '%s' is" % counter, result) - print( - "Added '%s' on object '%s' (machine %s), instance %s(%d)-parent of %s" - % (counterName, object, machine, instance, index, parentInstance) - ) - return 0 - - -def browse( - callback=BrowseCallBackDemo, - title="Python Browser", - level=win32pdh.PERF_DETAIL_WIZARD, -): - win32pdh.BrowseCounters(None, 0, callback, level, title, ReturnMultiple=True) - - -if __name__ == "__main__": - ShowAllProcesses() - # Show how to get a couple of attributes by name. - counter = find_pdh_counter_localized_name("Virtual Bytes") - print( - "Virtual Bytes = ", FindPerformanceAttributesByName("python", counter=counter) - ) - print( - "Available Bytes = ", - GetPerformanceAttributes( - find_pdh_counter_localized_name("Memory"), - find_pdh_counter_localized_name("Available Bytes"), - ), - ) - # And a browser. - print("Browsing for counters...") - browse() diff --git a/lib/win32/lib/win32rcparser.py b/lib/win32/lib/win32rcparser.py deleted file mode 100644 index effe1429..00000000 --- a/lib/win32/lib/win32rcparser.py +++ /dev/null @@ -1,677 +0,0 @@ -# Windows dialog .RC file parser, by Adam Walker. - -# This module was adapted from the spambayes project, and is Copyright -# 2003/2004 The Python Software Foundation and is covered by the Python -# Software Foundation license. -""" -This is a parser for Windows .rc files, which are text files which define -dialogs and other Windows UI resources. -""" -__author__ = "Adam Walker" -__version__ = "0.11" - -import os -import pprint -import shlex -import stat -import sys - -import commctrl -import win32con - -_controlMap = { - "DEFPUSHBUTTON": 0x80, - "PUSHBUTTON": 0x80, - "Button": 0x80, - "GROUPBOX": 0x80, - "Static": 0x82, - "CTEXT": 0x82, - "RTEXT": 0x82, - "LTEXT": 0x82, - "LISTBOX": 0x83, - "SCROLLBAR": 0x84, - "COMBOBOX": 0x85, - "EDITTEXT": 0x81, - "ICON": 0x82, - "RICHEDIT": "RichEdit20A", -} - -# These are "default styles" for certain controls - ie, Visual Studio assumes -# the styles will be applied, and emits a "NOT {STYLE_NAME}" if it is to be -# disabled. These defaults have been determined by experimentation, so may -# not be completely accurate (most notably, some styles and/or control-types -# may be missing. -_addDefaults = { - "EDITTEXT": win32con.WS_BORDER | win32con.WS_TABSTOP, - "GROUPBOX": win32con.BS_GROUPBOX, - "LTEXT": win32con.SS_LEFT, - "DEFPUSHBUTTON": win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP, - "PUSHBUTTON": win32con.WS_TABSTOP, - "CTEXT": win32con.SS_CENTER, - "RTEXT": win32con.SS_RIGHT, - "ICON": win32con.SS_ICON, - "LISTBOX": win32con.LBS_NOTIFY, -} - -defaultControlStyle = win32con.WS_CHILD | win32con.WS_VISIBLE -defaultControlStyleEx = 0 - - -class DialogDef: - name = "" - id = 0 - style = 0 - styleEx = None - caption = "" - font = "MS Sans Serif" - fontSize = 8 - x = 0 - y = 0 - w = 0 - h = 0 - template = None - - def __init__(self, n, i): - self.name = n - self.id = i - self.styles = [] - self.stylesEx = [] - self.controls = [] - # print "dialog def for ",self.name, self.id - - def createDialogTemplate(self): - t = None - self.template = [ - [ - self.caption, - (self.x, self.y, self.w, self.h), - self.style, - self.styleEx, - (self.fontSize, self.font), - ] - ] - # Add the controls - for control in self.controls: - self.template.append(control.createDialogTemplate()) - return self.template - - -class ControlDef: - id = "" - controlType = "" - subType = "" - idNum = 0 - style = defaultControlStyle - styleEx = defaultControlStyleEx - label = "" - x = 0 - y = 0 - w = 0 - h = 0 - - def __init__(self): - self.styles = [] - self.stylesEx = [] - - def toString(self): - s = ( - "" - ) - return s - - def createDialogTemplate(self): - ct = self.controlType - if "CONTROL" == ct: - ct = self.subType - if ct in _controlMap: - ct = _controlMap[ct] - t = [ - ct, - self.label, - self.idNum, - (self.x, self.y, self.w, self.h), - self.style, - self.styleEx, - ] - # print t - return t - - -class StringDef: - def __init__(self, id, idNum, value): - self.id = id - self.idNum = idNum - self.value = value - - def __repr__(self): - return "StringDef(%r, %r, %r)" % (self.id, self.idNum, self.value) - - -class RCParser: - next_id = 1001 - dialogs = {} - _dialogs = {} - debugEnabled = False - token = "" - - def __init__(self): - self.ungot = False - self.ids = {"IDC_STATIC": -1} - self.names = {-1: "IDC_STATIC"} - self.bitmaps = {} - self.stringTable = {} - self.icons = {} - - def debug(self, *args): - if self.debugEnabled: - print(args) - - def getToken(self): - if self.ungot: - self.ungot = False - self.debug("getToken returns (ungot):", self.token) - return self.token - self.token = self.lex.get_token() - self.debug("getToken returns:", self.token) - if self.token == "": - self.token = None - return self.token - - def ungetToken(self): - self.ungot = True - - def getCheckToken(self, expected): - tok = self.getToken() - assert tok == expected, "Expected token '%s', but got token '%s'!" % ( - expected, - tok, - ) - return tok - - def getCommaToken(self): - return self.getCheckToken(",") - - # Return the *current* token as a number, only consuming a token - # if it is the negative-sign. - def currentNumberToken(self): - mult = 1 - if self.token == "-": - mult = -1 - self.getToken() - return int(self.token) * mult - - # Return the *current* token as a string literal (ie, self.token will be a - # quote. consumes all tokens until the end of the string - def currentQuotedString(self): - # Handle quoted strings - pity shlex doesn't handle it. - assert self.token.startswith('"'), self.token - bits = [self.token] - while 1: - tok = self.getToken() - if not tok.startswith('"'): - self.ungetToken() - break - bits.append(tok) - sval = "".join(bits)[1:-1] # Remove end quotes. - # Fixup quotes in the body, and all (some?) quoted characters back - # to their raw value. - for i, o in ('""', '"'), ("\\r", "\r"), ("\\n", "\n"), ("\\t", "\t"): - sval = sval.replace(i, o) - return sval - - def load(self, rcstream): - """ - RCParser.loadDialogs(rcFileName) -> None - Load the dialog information into the parser. Dialog Definations can then be accessed - using the "dialogs" dictionary member (name->DialogDef). The "ids" member contains the dictionary of id->name. - The "names" member contains the dictionary of name->id - """ - self.open(rcstream) - self.getToken() - while self.token != None: - self.parse() - self.getToken() - - def open(self, rcstream): - self.lex = shlex.shlex(rcstream) - self.lex.commenters = "//#" - - def parseH(self, file): - lex = shlex.shlex(file) - lex.commenters = "//" - token = " " - while token is not None: - token = lex.get_token() - if token == "" or token is None: - token = None - else: - if token == "define": - n = lex.get_token() - i = int(lex.get_token()) - self.ids[n] = i - if i in self.names: - # Dupe ID really isn't a problem - most consumers - # want to go from name->id, and this is OK. - # It means you can't go from id->name though. - pass - # ignore AppStudio special ones - # if not n.startswith("_APS_"): - # print "Duplicate id",i,"for",n,"is", self.names[i] - else: - self.names[i] = n - if self.next_id <= i: - self.next_id = i + 1 - - def parse(self): - noid_parsers = { - "STRINGTABLE": self.parse_stringtable, - } - - id_parsers = { - "DIALOG": self.parse_dialog, - "DIALOGEX": self.parse_dialog, - # "TEXTINCLUDE": self.parse_textinclude, - "BITMAP": self.parse_bitmap, - "ICON": self.parse_icon, - } - deep = 0 - base_token = self.token - rp = noid_parsers.get(base_token) - if rp is not None: - rp() - else: - # Not something we parse that isn't prefixed by an ID - # See if it is an ID prefixed item - if it is, our token - # is the resource ID. - resource_id = self.token - self.getToken() - if self.token is None: - return - - if "BEGIN" == self.token: - # A 'BEGIN' for a structure we don't understand - skip to the - # matching 'END' - deep = 1 - while deep != 0 and self.token is not None: - self.getToken() - self.debug("Zooming over", self.token) - if "BEGIN" == self.token: - deep += 1 - elif "END" == self.token: - deep -= 1 - else: - rp = id_parsers.get(self.token) - if rp is not None: - self.debug("Dispatching '%s'" % (self.token,)) - rp(resource_id) - else: - # We don't know what the resource type is, but we - # have already consumed the next, which can cause problems, - # so push it back. - self.debug("Skipping top-level '%s'" % base_token) - self.ungetToken() - - def addId(self, id_name): - if id_name in self.ids: - id = self.ids[id_name] - else: - # IDOK, IDCANCEL etc are special - if a real resource has this value - for n in ["IDOK", "IDCANCEL", "IDYES", "IDNO", "IDABORT"]: - if id_name == n: - v = getattr(win32con, n) - self.ids[n] = v - self.names[v] = n - return v - id = self.next_id - self.next_id += 1 - self.ids[id_name] = id - self.names[id] = id_name - return id - - def lang(self): - while ( - self.token[0:4] == "LANG" - or self.token[0:7] == "SUBLANG" - or self.token == "," - ): - self.getToken() - - def parse_textinclude(self, res_id): - while self.getToken() != "BEGIN": - pass - while 1: - if self.token == "END": - break - s = self.getToken() - - def parse_stringtable(self): - while self.getToken() != "BEGIN": - pass - while 1: - self.getToken() - if self.token == "END": - break - sid = self.token - self.getToken() - sd = StringDef(sid, self.addId(sid), self.currentQuotedString()) - self.stringTable[sid] = sd - - def parse_bitmap(self, name): - return self.parse_bitmap_or_icon(name, self.bitmaps) - - def parse_icon(self, name): - return self.parse_bitmap_or_icon(name, self.icons) - - def parse_bitmap_or_icon(self, name, dic): - self.getToken() - while not self.token.startswith('"'): - self.getToken() - bmf = self.token[1:-1] # quotes - dic[name] = bmf - - def parse_dialog(self, name): - dlg = DialogDef(name, self.addId(name)) - assert len(dlg.controls) == 0 - self._dialogs[name] = dlg - extras = [] - self.getToken() - while not self.token.isdigit(): - self.debug("extra", self.token) - extras.append(self.token) - self.getToken() - dlg.x = int(self.token) - self.getCommaToken() - self.getToken() # number - dlg.y = int(self.token) - self.getCommaToken() - self.getToken() # number - dlg.w = int(self.token) - self.getCommaToken() - self.getToken() # number - dlg.h = int(self.token) - self.getToken() - while not (self.token == None or self.token == "" or self.token == "END"): - if self.token == "STYLE": - self.dialogStyle(dlg) - elif self.token == "EXSTYLE": - self.dialogExStyle(dlg) - elif self.token == "CAPTION": - self.dialogCaption(dlg) - elif self.token == "FONT": - self.dialogFont(dlg) - elif self.token == "BEGIN": - self.controls(dlg) - else: - break - self.dialogs[name] = dlg.createDialogTemplate() - - def dialogStyle(self, dlg): - dlg.style, dlg.styles = self.styles([], win32con.DS_SETFONT) - - def dialogExStyle(self, dlg): - self.getToken() - dlg.styleEx, dlg.stylesEx = self.styles([], 0) - - def styles(self, defaults, defaultStyle): - list = defaults - style = defaultStyle - - if "STYLE" == self.token: - self.getToken() - i = 0 - Not = False - while ( - (i % 2 == 1 and ("|" == self.token or "NOT" == self.token)) or (i % 2 == 0) - ) and not self.token == None: - Not = False - if "NOT" == self.token: - Not = True - self.getToken() - i += 1 - if self.token != "|": - if self.token in win32con.__dict__: - value = getattr(win32con, self.token) - else: - if self.token in commctrl.__dict__: - value = getattr(commctrl, self.token) - else: - value = 0 - if Not: - list.append("NOT " + self.token) - self.debug("styles add Not", self.token, value) - style &= ~value - else: - list.append(self.token) - self.debug("styles add", self.token, value) - style |= value - self.getToken() - self.debug("style is ", style) - - return style, list - - def dialogCaption(self, dlg): - if "CAPTION" == self.token: - self.getToken() - self.token = self.token[1:-1] - self.debug("Caption is:", self.token) - dlg.caption = self.token - self.getToken() - - def dialogFont(self, dlg): - if "FONT" == self.token: - self.getToken() - dlg.fontSize = int(self.token) - self.getCommaToken() - self.getToken() # Font name - dlg.font = self.token[1:-1] # it's quoted - self.getToken() - while "BEGIN" != self.token: - self.getToken() - - def controls(self, dlg): - if self.token == "BEGIN": - self.getToken() - # All controls look vaguely like: - # TYPE [text, ] Control_id, l, t, r, b [, style] - # .rc parser documents all control types as: - # CHECKBOX, COMBOBOX, CONTROL, CTEXT, DEFPUSHBUTTON, EDITTEXT, GROUPBOX, - # ICON, LISTBOX, LTEXT, PUSHBUTTON, RADIOBUTTON, RTEXT, SCROLLBAR - without_text = ["EDITTEXT", "COMBOBOX", "LISTBOX", "SCROLLBAR"] - while self.token != "END": - control = ControlDef() - control.controlType = self.token - self.getToken() - if control.controlType not in without_text: - if self.token[0:1] == '"': - control.label = self.currentQuotedString() - # Some funny controls, like icons and picture controls use - # the "window text" as extra resource ID (ie, the ID of the - # icon itself). This may be either a literal, or an ID string. - elif self.token == "-" or self.token.isdigit(): - control.label = str(self.currentNumberToken()) - else: - # An ID - use the numeric equiv. - control.label = str(self.addId(self.token)) - self.getCommaToken() - self.getToken() - # Control IDs may be "names" or literal ints - if self.token == "-" or self.token.isdigit(): - control.id = self.currentNumberToken() - control.idNum = control.id - else: - # name of an ID - control.id = self.token - control.idNum = self.addId(control.id) - self.getCommaToken() - - if control.controlType == "CONTROL": - self.getToken() - control.subType = self.token[1:-1] - thisDefaultStyle = defaultControlStyle | _addDefaults.get( - control.subType, 0 - ) - # Styles - self.getCommaToken() - self.getToken() - control.style, control.styles = self.styles([], thisDefaultStyle) - else: - thisDefaultStyle = defaultControlStyle | _addDefaults.get( - control.controlType, 0 - ) - # incase no style is specified. - control.style = thisDefaultStyle - # Rect - control.x = int(self.getToken()) - self.getCommaToken() - control.y = int(self.getToken()) - self.getCommaToken() - control.w = int(self.getToken()) - self.getCommaToken() - self.getToken() - control.h = int(self.token) - self.getToken() - if self.token == ",": - self.getToken() - control.style, control.styles = self.styles([], thisDefaultStyle) - if self.token == ",": - self.getToken() - control.styleEx, control.stylesEx = self.styles( - [], defaultControlStyleEx - ) - # print control.toString() - dlg.controls.append(control) - - -def ParseStreams(rc_file, h_file): - rcp = RCParser() - if h_file: - rcp.parseH(h_file) - try: - rcp.load(rc_file) - except: - lex = getattr(rcp, "lex", None) - if lex: - print("ERROR parsing dialogs at line", lex.lineno) - print("Next 10 tokens are:") - for i in range(10): - print(lex.get_token(), end=" ") - print() - raise - return rcp - - -def Parse(rc_name, h_name=None): - if h_name: - h_file = open(h_name, "r") - else: - # See if same basename as the .rc - h_name = rc_name[:-2] + "h" - try: - h_file = open(h_name, "r") - except IOError: - # See if MSVC default of 'resource.h' in the same dir. - h_name = os.path.join(os.path.dirname(rc_name), "resource.h") - try: - h_file = open(h_name, "r") - except IOError: - # .h files are optional anyway - h_file = None - rc_file = open(rc_name, "r") - try: - return ParseStreams(rc_file, h_file) - finally: - if h_file is not None: - h_file.close() - rc_file.close() - return rcp - - -def GenerateFrozenResource(rc_name, output_name, h_name=None): - """Converts an .rc windows resource source file into a python source file - with the same basic public interface as the rest of this module. - Particularly useful for py2exe or other 'freeze' type solutions, - where a frozen .py file can be used inplace of a real .rc file. - """ - rcp = Parse(rc_name, h_name) - in_stat = os.stat(rc_name) - - out = open(output_name, "wt") - out.write("#%s\n" % output_name) - out.write("#This is a generated file. Please edit %s instead.\n" % rc_name) - out.write("__version__=%r\n" % __version__) - out.write( - "_rc_size_=%d\n_rc_mtime_=%d\n" - % (in_stat[stat.ST_SIZE], in_stat[stat.ST_MTIME]) - ) - - out.write("class StringDef:\n") - out.write("\tdef __init__(self, id, idNum, value):\n") - out.write("\t\tself.id = id\n") - out.write("\t\tself.idNum = idNum\n") - out.write("\t\tself.value = value\n") - out.write("\tdef __repr__(self):\n") - out.write( - '\t\treturn "StringDef(%r, %r, %r)" % (self.id, self.idNum, self.value)\n' - ) - - out.write("class FakeParser:\n") - - for name in "dialogs", "ids", "names", "bitmaps", "icons", "stringTable": - out.write("\t%s = \\\n" % (name,)) - pprint.pprint(getattr(rcp, name), out) - out.write("\n") - - out.write("def Parse(s):\n") - out.write("\treturn FakeParser()\n") - out.close() - - -if __name__ == "__main__": - if len(sys.argv) <= 1: - print(__doc__) - print() - print("See test_win32rcparser.py, and the win32rcparser directory (both") - print("in the test suite) for an example of this module's usage.") - else: - import pprint - - filename = sys.argv[1] - if "-v" in sys.argv: - RCParser.debugEnabled = 1 - print("Dumping all resources in '%s'" % filename) - resources = Parse(filename) - for id, ddef in resources.dialogs.items(): - print("Dialog %s (%d controls)" % (id, len(ddef))) - pprint.pprint(ddef) - print() - for id, sdef in resources.stringTable.items(): - print("String %s=%r" % (id, sdef.value)) - print() - for id, sdef in resources.bitmaps.items(): - print("Bitmap %s=%r" % (id, sdef)) - print() - for id, sdef in resources.icons.items(): - print("Icon %s=%r" % (id, sdef)) - print() diff --git a/lib/win32/lib/win32serviceutil.py b/lib/win32/lib/win32serviceutil.py deleted file mode 100644 index fb187530..00000000 --- a/lib/win32/lib/win32serviceutil.py +++ /dev/null @@ -1,1071 +0,0 @@ -# General purpose service utilities, both for standard Python scripts, -# and for for Python programs which run as services... -# -# Note that most utility functions here will raise win32api.error's -# (which is win32service.error, pywintypes.error, etc) -# when things go wrong - eg, not enough permissions to hit the -# registry etc. - -import importlib -import os -import sys -import warnings - -import pywintypes -import win32api -import win32con -import win32service -import winerror - -_d = "_d" if "_d.pyd" in importlib.machinery.EXTENSION_SUFFIXES else "" -error = RuntimeError - - -# Returns the full path to an executable for hosting a Python service - typically -# 'pythonservice.exe' -# * If you pass a param and it exists as a file, you'll get the abs path back -# * Otherwise we'll use the param instead of 'pythonservice.exe', and we will -# look for it. -def LocatePythonServiceExe(exe=None): - if not exe and hasattr(sys, "frozen"): - # If py2exe etc calls this with no exe, default is current exe, - # and all setup is their problem :) - return sys.executable - - if exe and os.path.isfile(exe): - return win32api.GetFullPathName(exe) - - # We are confused if we aren't now looking for our default. But if that - # exists as specified we assume it's good. - exe = f"pythonservice{_d}.exe" - if os.path.isfile(exe): - return win32api.GetFullPathName(exe) - - # Now we are searching for the .exe - # We are going to want it here. - correct = os.path.join(sys.exec_prefix, exe) - # Even if that file already exists, we copy the one installed by pywin32 - # in-case it was upgraded. - # pywin32 installed it next to win32service.pyd (but we can't run it from there) - maybe = os.path.join(os.path.dirname(win32service.__file__), exe) - if os.path.exists(maybe): - print(f"copying host exe '{maybe}' -> '{correct}'") - win32api.CopyFile(maybe, correct) - - if not os.path.exists(correct): - raise error(f"Can't find '{correct}'") - - # If pywintypes.dll isn't next to us, or at least next to pythonXX.dll, - # there's a good chance the service will not run. That's usually copied by - # `pywin32_postinstall`, but putting it next to the python DLL seems - # reasonable. - # (Unlike the .exe above, we don't unconditionally copy this, and possibly - # copy it to a different place. Doesn't seem a good reason for that!?) - python_dll = win32api.GetModuleFileName(sys.dllhandle) - pyw = f"pywintypes{sys.version_info[0]}{sys.version_info[1]}{_d}.dll" - correct_pyw = os.path.join(os.path.dirname(python_dll), pyw) - - if not os.path.exists(correct_pyw): - print(f"copying helper dll '{pywintypes.__file__}' -> '{correct_pyw}'") - win32api.CopyFile(pywintypes.__file__, correct_pyw) - - return correct - - -def _GetServiceShortName(longName): - # looks up a services name - # from the display name - # Thanks to Andy McKay for this code. - access = ( - win32con.KEY_READ | win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE - ) - hkey = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services", 0, access - ) - num = win32api.RegQueryInfoKey(hkey)[0] - longName = longName.lower() - # loop through number of subkeys - for x in range(0, num): - # find service name, open subkey - svc = win32api.RegEnumKey(hkey, x) - skey = win32api.RegOpenKey(hkey, svc, 0, access) - try: - # find display name - thisName = str(win32api.RegQueryValueEx(skey, "DisplayName")[0]) - if thisName.lower() == longName: - return svc - except win32api.error: - # in case there is no key called DisplayName - pass - return None - - -# Open a service given either it's long or short name. -def SmartOpenService(hscm, name, access): - try: - return win32service.OpenService(hscm, name, access) - except win32api.error as details: - if details.winerror not in [ - winerror.ERROR_SERVICE_DOES_NOT_EXIST, - winerror.ERROR_INVALID_NAME, - ]: - raise - name = win32service.GetServiceKeyName(hscm, name) - return win32service.OpenService(hscm, name, access) - - -def LocateSpecificServiceExe(serviceName): - # Return the .exe name of any service. - hkey = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, - "SYSTEM\\CurrentControlSet\\Services\\%s" % (serviceName), - 0, - win32con.KEY_ALL_ACCESS, - ) - try: - return win32api.RegQueryValueEx(hkey, "ImagePath")[0] - finally: - hkey.Close() - - -def InstallPerfmonForService(serviceName, iniName, dllName=None): - # If no DLL name, look it up in the INI file name - if not dllName: # May be empty string! - dllName = win32api.GetProfileVal("Python", "dll", "", iniName) - # Still not found - look for the standard one in the same dir as win32service.pyd - if not dllName: - try: - tryName = os.path.join( - os.path.split(win32service.__file__)[0], "perfmondata.dll" - ) - if os.path.isfile(tryName): - dllName = tryName - except AttributeError: - # Frozen app? - anyway, can't find it! - pass - if not dllName: - raise ValueError("The name of the performance DLL must be available") - dllName = win32api.GetFullPathName(dllName) - # Now setup all the required "Performance" entries. - hkey = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, - "SYSTEM\\CurrentControlSet\\Services\\%s" % (serviceName), - 0, - win32con.KEY_ALL_ACCESS, - ) - try: - subKey = win32api.RegCreateKey(hkey, "Performance") - try: - win32api.RegSetValueEx(subKey, "Library", 0, win32con.REG_SZ, dllName) - win32api.RegSetValueEx( - subKey, "Open", 0, win32con.REG_SZ, "OpenPerformanceData" - ) - win32api.RegSetValueEx( - subKey, "Close", 0, win32con.REG_SZ, "ClosePerformanceData" - ) - win32api.RegSetValueEx( - subKey, "Collect", 0, win32con.REG_SZ, "CollectPerformanceData" - ) - finally: - win32api.RegCloseKey(subKey) - finally: - win32api.RegCloseKey(hkey) - # Now do the "Lodctr" thang... - - try: - import perfmon - - path, fname = os.path.split(iniName) - oldPath = os.getcwd() - if path: - os.chdir(path) - try: - perfmon.LoadPerfCounterTextStrings("python.exe " + fname) - finally: - os.chdir(oldPath) - except win32api.error as details: - print("The service was installed OK, but the performance monitor") - print("data could not be loaded.", details) - - -def _GetCommandLine(exeName, exeArgs): - if exeArgs is not None: - return exeName + " " + exeArgs - else: - return exeName - - -def InstallService( - pythonClassString, - serviceName, - displayName, - startType=None, - errorControl=None, - bRunInteractive=0, - serviceDeps=None, - userName=None, - password=None, - exeName=None, - perfMonIni=None, - perfMonDll=None, - exeArgs=None, - description=None, - delayedstart=None, -): - # Handle the default arguments. - if startType is None: - startType = win32service.SERVICE_DEMAND_START - serviceType = win32service.SERVICE_WIN32_OWN_PROCESS - if bRunInteractive: - serviceType = serviceType | win32service.SERVICE_INTERACTIVE_PROCESS - if errorControl is None: - errorControl = win32service.SERVICE_ERROR_NORMAL - - exeName = '"%s"' % LocatePythonServiceExe(exeName) - commandLine = _GetCommandLine(exeName, exeArgs) - hscm = win32service.OpenSCManager(None, None, win32service.SC_MANAGER_ALL_ACCESS) - try: - hs = win32service.CreateService( - hscm, - serviceName, - displayName, - win32service.SERVICE_ALL_ACCESS, # desired access - serviceType, # service type - startType, - errorControl, # error control type - commandLine, - None, - 0, - serviceDeps, - userName, - password, - ) - if description is not None: - try: - win32service.ChangeServiceConfig2( - hs, win32service.SERVICE_CONFIG_DESCRIPTION, description - ) - except NotImplementedError: - pass ## ChangeServiceConfig2 and description do not exist on NT - if delayedstart is not None: - try: - win32service.ChangeServiceConfig2( - hs, - win32service.SERVICE_CONFIG_DELAYED_AUTO_START_INFO, - delayedstart, - ) - except (win32service.error, NotImplementedError): - ## delayed start only exists on Vista and later - warn only when trying to set delayed to True - warnings.warn("Delayed Start not available on this system") - win32service.CloseServiceHandle(hs) - finally: - win32service.CloseServiceHandle(hscm) - InstallPythonClassString(pythonClassString, serviceName) - # If I have performance monitor info to install, do that. - if perfMonIni is not None: - InstallPerfmonForService(serviceName, perfMonIni, perfMonDll) - - -def ChangeServiceConfig( - pythonClassString, - serviceName, - startType=None, - errorControl=None, - bRunInteractive=0, - serviceDeps=None, - userName=None, - password=None, - exeName=None, - displayName=None, - perfMonIni=None, - perfMonDll=None, - exeArgs=None, - description=None, - delayedstart=None, -): - # Before doing anything, remove any perfmon counters. - try: - import perfmon - - perfmon.UnloadPerfCounterTextStrings("python.exe " + serviceName) - except (ImportError, win32api.error): - pass - - # The EXE location may have changed - exeName = '"%s"' % LocatePythonServiceExe(exeName) - - # Handle the default arguments. - if startType is None: - startType = win32service.SERVICE_NO_CHANGE - if errorControl is None: - errorControl = win32service.SERVICE_NO_CHANGE - - hscm = win32service.OpenSCManager(None, None, win32service.SC_MANAGER_ALL_ACCESS) - serviceType = win32service.SERVICE_WIN32_OWN_PROCESS - if bRunInteractive: - serviceType = serviceType | win32service.SERVICE_INTERACTIVE_PROCESS - commandLine = _GetCommandLine(exeName, exeArgs) - try: - hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) - try: - win32service.ChangeServiceConfig( - hs, - serviceType, # service type - startType, - errorControl, # error control type - commandLine, - None, - 0, - serviceDeps, - userName, - password, - displayName, - ) - if description is not None: - try: - win32service.ChangeServiceConfig2( - hs, win32service.SERVICE_CONFIG_DESCRIPTION, description - ) - except NotImplementedError: - pass ## ChangeServiceConfig2 and description do not exist on NT - if delayedstart is not None: - try: - win32service.ChangeServiceConfig2( - hs, - win32service.SERVICE_CONFIG_DELAYED_AUTO_START_INFO, - delayedstart, - ) - except (win32service.error, NotImplementedError): - ## Delayed start only exists on Vista and later. On Nt, will raise NotImplementedError since ChangeServiceConfig2 - ## doensn't exist. On Win2k and XP, will fail with ERROR_INVALID_LEVEL - ## Warn only if trying to set delayed to True - if delayedstart: - warnings.warn("Delayed Start not available on this system") - finally: - win32service.CloseServiceHandle(hs) - finally: - win32service.CloseServiceHandle(hscm) - InstallPythonClassString(pythonClassString, serviceName) - # If I have performance monitor info to install, do that. - if perfMonIni is not None: - InstallPerfmonForService(serviceName, perfMonIni, perfMonDll) - - -def InstallPythonClassString(pythonClassString, serviceName): - # Now setup our Python specific entries. - if pythonClassString: - key = win32api.RegCreateKey( - win32con.HKEY_LOCAL_MACHINE, - "System\\CurrentControlSet\\Services\\%s\\PythonClass" % serviceName, - ) - try: - win32api.RegSetValue(key, None, win32con.REG_SZ, pythonClassString) - finally: - win32api.RegCloseKey(key) - - -# Utility functions for Services, to allow persistant properties. -def SetServiceCustomOption(serviceName, option, value): - try: - serviceName = serviceName._svc_name_ - except AttributeError: - pass - key = win32api.RegCreateKey( - win32con.HKEY_LOCAL_MACHINE, - "System\\CurrentControlSet\\Services\\%s\\Parameters" % serviceName, - ) - try: - if type(value) == type(0): - win32api.RegSetValueEx(key, option, 0, win32con.REG_DWORD, value) - else: - win32api.RegSetValueEx(key, option, 0, win32con.REG_SZ, value) - finally: - win32api.RegCloseKey(key) - - -def GetServiceCustomOption(serviceName, option, defaultValue=None): - # First param may also be a service class/instance. - # This allows services to pass "self" - try: - serviceName = serviceName._svc_name_ - except AttributeError: - pass - key = win32api.RegCreateKey( - win32con.HKEY_LOCAL_MACHINE, - "System\\CurrentControlSet\\Services\\%s\\Parameters" % serviceName, - ) - try: - try: - return win32api.RegQueryValueEx(key, option)[0] - except win32api.error: # No value. - return defaultValue - finally: - win32api.RegCloseKey(key) - - -def RemoveService(serviceName): - try: - import perfmon - - perfmon.UnloadPerfCounterTextStrings("python.exe " + serviceName) - except (ImportError, win32api.error): - pass - - hscm = win32service.OpenSCManager(None, None, win32service.SC_MANAGER_ALL_ACCESS) - try: - hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) - win32service.DeleteService(hs) - win32service.CloseServiceHandle(hs) - finally: - win32service.CloseServiceHandle(hscm) - - import win32evtlogutil - - try: - win32evtlogutil.RemoveSourceFromRegistry(serviceName) - except win32api.error: - pass - - -def ControlService(serviceName, code, machine=None): - hscm = win32service.OpenSCManager(machine, None, win32service.SC_MANAGER_ALL_ACCESS) - try: - hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) - try: - status = win32service.ControlService(hs, code) - finally: - win32service.CloseServiceHandle(hs) - finally: - win32service.CloseServiceHandle(hscm) - return status - - -def __FindSvcDeps(findName): - if type(findName) is pywintypes.UnicodeType: - findName = str(findName) - dict = {} - k = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services" - ) - num = 0 - while 1: - try: - svc = win32api.RegEnumKey(k, num) - except win32api.error: - break - num = num + 1 - sk = win32api.RegOpenKey(k, svc) - try: - deps, typ = win32api.RegQueryValueEx(sk, "DependOnService") - except win32api.error: - deps = () - for dep in deps: - dep = dep.lower() - dep_on = dict.get(dep, []) - dep_on.append(svc) - dict[dep] = dep_on - - return __ResolveDeps(findName, dict) - - -def __ResolveDeps(findName, dict): - items = dict.get(findName.lower(), []) - retList = [] - for svc in items: - retList.insert(0, svc) - retList = __ResolveDeps(svc, dict) + retList - return retList - - -def WaitForServiceStatus(serviceName, status, waitSecs, machine=None): - """Waits for the service to return the specified status. You - should have already requested the service to enter that state""" - for i in range(waitSecs * 4): - now_status = QueryServiceStatus(serviceName, machine)[1] - if now_status == status: - break - win32api.Sleep(250) - else: - raise pywintypes.error( - winerror.ERROR_SERVICE_REQUEST_TIMEOUT, - "QueryServiceStatus", - win32api.FormatMessage(winerror.ERROR_SERVICE_REQUEST_TIMEOUT)[:-2], - ) - - -def __StopServiceWithTimeout(hs, waitSecs=30): - try: - status = win32service.ControlService(hs, win32service.SERVICE_CONTROL_STOP) - except pywintypes.error as exc: - if exc.winerror != winerror.ERROR_SERVICE_NOT_ACTIVE: - raise - for i in range(waitSecs): - status = win32service.QueryServiceStatus(hs) - if status[1] == win32service.SERVICE_STOPPED: - break - win32api.Sleep(1000) - else: - raise pywintypes.error( - winerror.ERROR_SERVICE_REQUEST_TIMEOUT, - "ControlService", - win32api.FormatMessage(winerror.ERROR_SERVICE_REQUEST_TIMEOUT)[:-2], - ) - - -def StopServiceWithDeps(serviceName, machine=None, waitSecs=30): - # Stop a service recursively looking for dependant services - hscm = win32service.OpenSCManager(machine, None, win32service.SC_MANAGER_ALL_ACCESS) - try: - deps = __FindSvcDeps(serviceName) - for dep in deps: - hs = win32service.OpenService(hscm, dep, win32service.SERVICE_ALL_ACCESS) - try: - __StopServiceWithTimeout(hs, waitSecs) - finally: - win32service.CloseServiceHandle(hs) - # Now my service! - hs = win32service.OpenService( - hscm, serviceName, win32service.SERVICE_ALL_ACCESS - ) - try: - __StopServiceWithTimeout(hs, waitSecs) - finally: - win32service.CloseServiceHandle(hs) - - finally: - win32service.CloseServiceHandle(hscm) - - -def StopService(serviceName, machine=None): - return ControlService(serviceName, win32service.SERVICE_CONTROL_STOP, machine) - - -def StartService(serviceName, args=None, machine=None): - hscm = win32service.OpenSCManager(machine, None, win32service.SC_MANAGER_ALL_ACCESS) - try: - hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS) - try: - win32service.StartService(hs, args) - finally: - win32service.CloseServiceHandle(hs) - finally: - win32service.CloseServiceHandle(hscm) - - -def RestartService(serviceName, args=None, waitSeconds=30, machine=None): - "Stop the service, and then start it again (with some tolerance for allowing it to stop.)" - try: - StopService(serviceName, machine) - except pywintypes.error as exc: - # Allow only "service not running" error - if exc.winerror != winerror.ERROR_SERVICE_NOT_ACTIVE: - raise - # Give it a few goes, as the service may take time to stop - for i in range(waitSeconds): - try: - StartService(serviceName, args, machine) - break - except pywintypes.error as exc: - if exc.winerror != winerror.ERROR_SERVICE_ALREADY_RUNNING: - raise - win32api.Sleep(1000) - else: - print("Gave up waiting for the old service to stop!") - - -def _DebugCtrlHandler(evt): - if evt in (win32con.CTRL_C_EVENT, win32con.CTRL_BREAK_EVENT): - assert g_debugService - print("Stopping debug service.") - g_debugService.SvcStop() - return True - return False - - -def DebugService(cls, argv=[]): - # Run a service in "debug" mode. Re-implements what pythonservice.exe - # does when it sees a "-debug" param. - # Currently only used by "frozen" (ie, py2exe) programs (but later may - # end up being used for all services should we ever remove - # pythonservice.exe) - import servicemanager - - global g_debugService - - print("Debugging service %s - press Ctrl+C to stop." % (cls._svc_name_,)) - servicemanager.Debugging(True) - servicemanager.PrepareToHostSingle(cls) - g_debugService = cls(argv) - # Setup a ctrl+c handler to simulate a "stop" - win32api.SetConsoleCtrlHandler(_DebugCtrlHandler, True) - try: - g_debugService.SvcRun() - finally: - win32api.SetConsoleCtrlHandler(_DebugCtrlHandler, False) - servicemanager.Debugging(False) - g_debugService = None - - -def GetServiceClassString(cls, argv=None): - if argv is None: - argv = sys.argv - import pickle - - modName = pickle.whichmodule(cls, cls.__name__) - if modName == "__main__": - try: - fname = win32api.GetFullPathName(argv[0]) - path = os.path.split(fname)[0] - # Eaaaahhhh - sometimes this will be a short filename, which causes - # problems with 1.5.1 and the silly filename case rule. - filelist = win32api.FindFiles(fname) - # win32api.FindFiles will not detect files in a zip or exe. If list is empty, - # skip the test and hope the file really exists. - if len(filelist) != 0: - # Get the long name - fname = os.path.join(path, filelist[0][8]) - except win32api.error: - raise error( - "Could not resolve the path name '%s' to a full path" % (argv[0]) - ) - modName = os.path.splitext(fname)[0] - return modName + "." + cls.__name__ - - -def QueryServiceStatus(serviceName, machine=None): - hscm = win32service.OpenSCManager(machine, None, win32service.SC_MANAGER_CONNECT) - try: - hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_QUERY_STATUS) - try: - status = win32service.QueryServiceStatus(hs) - finally: - win32service.CloseServiceHandle(hs) - finally: - win32service.CloseServiceHandle(hscm) - return status - - -def usage(): - try: - fname = os.path.split(sys.argv[0])[1] - except: - fname = sys.argv[0] - print( - "Usage: '%s [options] install|update|remove|start [...]|stop|restart [...]|debug [...]'" - % fname - ) - print("Options for 'install' and 'update' commands only:") - print(" --username domain\\username : The Username the service is to run under") - print(" --password password : The password for the username") - print( - " --startup [manual|auto|disabled|delayed] : How the service starts, default = manual" - ) - print(" --interactive : Allow the service to interact with the desktop.") - print( - " --perfmonini file: .ini file to use for registering performance monitor data" - ) - print(" --perfmondll file: .dll file to use when querying the service for") - print(" performance data, default = perfmondata.dll") - print("Options for 'start' and 'stop' commands only:") - print(" --wait seconds: Wait for the service to actually start or stop.") - print(" If you specify --wait with the 'stop' option, the service") - print(" and all dependent services will be stopped, each waiting") - print(" the specified period.") - sys.exit(1) - - -def HandleCommandLine( - cls, - serviceClassString=None, - argv=None, - customInstallOptions="", - customOptionHandler=None, -): - """Utility function allowing services to process the command line. - - Allows standard commands such as 'start', 'stop', 'debug', 'install' etc. - - Install supports 'standard' command line options prefixed with '--', such as - --username, --password, etc. In addition, - the function allows custom command line options to be handled by the calling function. - """ - err = 0 - - if argv is None: - argv = sys.argv - - if len(argv) <= 1: - usage() - - serviceName = cls._svc_name_ - serviceDisplayName = cls._svc_display_name_ - if serviceClassString is None: - serviceClassString = GetServiceClassString(cls) - - # Pull apart the command line - import getopt - - try: - opts, args = getopt.getopt( - argv[1:], - customInstallOptions, - [ - "password=", - "username=", - "startup=", - "perfmonini=", - "perfmondll=", - "interactive", - "wait=", - ], - ) - except getopt.error as details: - print(details) - usage() - userName = None - password = None - perfMonIni = perfMonDll = None - startup = None - delayedstart = None - interactive = None - waitSecs = 0 - for opt, val in opts: - if opt == "--username": - userName = val - elif opt == "--password": - password = val - elif opt == "--perfmonini": - perfMonIni = val - elif opt == "--perfmondll": - perfMonDll = val - elif opt == "--interactive": - interactive = 1 - elif opt == "--startup": - map = { - "manual": win32service.SERVICE_DEMAND_START, - "auto": win32service.SERVICE_AUTO_START, - "delayed": win32service.SERVICE_AUTO_START, ## ChangeServiceConfig2 called later - "disabled": win32service.SERVICE_DISABLED, - } - try: - startup = map[val.lower()] - except KeyError: - print("'%s' is not a valid startup option" % val) - if val.lower() == "delayed": - delayedstart = True - elif val.lower() == "auto": - delayedstart = False - ## else no change - elif opt == "--wait": - try: - waitSecs = int(val) - except ValueError: - print("--wait must specify an integer number of seconds.") - usage() - - arg = args[0] - knownArg = 0 - # First we process all arguments which pass additional args on - if arg == "start": - knownArg = 1 - print("Starting service %s" % (serviceName)) - try: - StartService(serviceName, args[1:]) - if waitSecs: - WaitForServiceStatus( - serviceName, win32service.SERVICE_RUNNING, waitSecs - ) - except win32service.error as exc: - print("Error starting service: %s" % exc.strerror) - err = exc.winerror - - elif arg == "restart": - knownArg = 1 - print("Restarting service %s" % (serviceName)) - RestartService(serviceName, args[1:]) - if waitSecs: - WaitForServiceStatus(serviceName, win32service.SERVICE_RUNNING, waitSecs) - - elif arg == "debug": - knownArg = 1 - if not hasattr(sys, "frozen"): - # non-frozen services use pythonservice.exe which handles a - # -debug option - svcArgs = " ".join(args[1:]) - try: - exeName = LocateSpecificServiceExe(serviceName) - except win32api.error as exc: - if exc.winerror == winerror.ERROR_FILE_NOT_FOUND: - print("The service does not appear to be installed.") - print("Please install the service before debugging it.") - sys.exit(1) - raise - try: - os.system("%s -debug %s %s" % (exeName, serviceName, svcArgs)) - # ^C is used to kill the debug service. Sometimes Python also gets - # interrupted - ignore it... - except KeyboardInterrupt: - pass - else: - # py2exe services don't use pythonservice - so we simulate - # debugging here. - DebugService(cls, args) - - if not knownArg and len(args) != 1: - usage() # the rest of the cmds don't take addn args - - if arg == "install": - knownArg = 1 - try: - serviceDeps = cls._svc_deps_ - except AttributeError: - serviceDeps = None - try: - exeName = cls._exe_name_ - except AttributeError: - exeName = None # Default to PythonService.exe - try: - exeArgs = cls._exe_args_ - except AttributeError: - exeArgs = None - try: - description = cls._svc_description_ - except AttributeError: - description = None - print("Installing service %s" % (serviceName,)) - # Note that we install the service before calling the custom option - # handler, so if the custom handler fails, we have an installed service (from NT's POV) - # but is unlikely to work, as the Python code controlling it failed. Therefore - # we remove the service if the first bit works, but the second doesnt! - try: - InstallService( - serviceClassString, - serviceName, - serviceDisplayName, - serviceDeps=serviceDeps, - startType=startup, - bRunInteractive=interactive, - userName=userName, - password=password, - exeName=exeName, - perfMonIni=perfMonIni, - perfMonDll=perfMonDll, - exeArgs=exeArgs, - description=description, - delayedstart=delayedstart, - ) - if customOptionHandler: - customOptionHandler(*(opts,)) - print("Service installed") - except win32service.error as exc: - if exc.winerror == winerror.ERROR_SERVICE_EXISTS: - arg = "update" # Fall through to the "update" param! - else: - print( - "Error installing service: %s (%d)" % (exc.strerror, exc.winerror) - ) - err = exc.winerror - except ValueError as msg: # Can be raised by custom option handler. - print("Error installing service: %s" % str(msg)) - err = -1 - # xxx - maybe I should remove after _any_ failed install - however, - # xxx - it may be useful to help debug to leave the service as it failed. - # xxx - We really _must_ remove as per the comments above... - # As we failed here, remove the service, so the next installation - # attempt works. - try: - RemoveService(serviceName) - except win32api.error: - print("Warning - could not remove the partially installed service.") - - if arg == "update": - knownArg = 1 - try: - serviceDeps = cls._svc_deps_ - except AttributeError: - serviceDeps = None - try: - exeName = cls._exe_name_ - except AttributeError: - exeName = None # Default to PythonService.exe - try: - exeArgs = cls._exe_args_ - except AttributeError: - exeArgs = None - try: - description = cls._svc_description_ - except AttributeError: - description = None - print("Changing service configuration") - try: - ChangeServiceConfig( - serviceClassString, - serviceName, - serviceDeps=serviceDeps, - startType=startup, - bRunInteractive=interactive, - userName=userName, - password=password, - exeName=exeName, - displayName=serviceDisplayName, - perfMonIni=perfMonIni, - perfMonDll=perfMonDll, - exeArgs=exeArgs, - description=description, - delayedstart=delayedstart, - ) - if customOptionHandler: - customOptionHandler(*(opts,)) - print("Service updated") - except win32service.error as exc: - print( - "Error changing service configuration: %s (%d)" - % (exc.strerror, exc.winerror) - ) - err = exc.winerror - - elif arg == "remove": - knownArg = 1 - print("Removing service %s" % (serviceName)) - try: - RemoveService(serviceName) - print("Service removed") - except win32service.error as exc: - print("Error removing service: %s (%d)" % (exc.strerror, exc.winerror)) - err = exc.winerror - elif arg == "stop": - knownArg = 1 - print("Stopping service %s" % (serviceName)) - try: - if waitSecs: - StopServiceWithDeps(serviceName, waitSecs=waitSecs) - else: - StopService(serviceName) - except win32service.error as exc: - print("Error stopping service: %s (%d)" % (exc.strerror, exc.winerror)) - err = exc.winerror - if not knownArg: - err = -1 - print("Unknown command - '%s'" % arg) - usage() - return err - - -# -# Useful base class to build services from. -# -class ServiceFramework: - # Required Attributes: - # _svc_name_ = The service name - # _svc_display_name_ = The service display name - - # Optional Attributes: - _svc_deps_ = None # sequence of service names on which this depends - _exe_name_ = None # Default to PythonService.exe - _exe_args_ = None # Default to no arguments - _svc_description_ = ( - None # Only exists on Windows 2000 or later, ignored on windows NT - ) - - def __init__(self, args): - import servicemanager - - self.ssh = servicemanager.RegisterServiceCtrlHandler( - args[0], self.ServiceCtrlHandlerEx, True - ) - servicemanager.SetEventSourceName(self._svc_name_) - self.checkPoint = 0 - - def GetAcceptedControls(self): - # Setup the service controls we accept based on our attributes. Note - # that if you need to handle controls via SvcOther[Ex](), you must - # override this. - accepted = 0 - if hasattr(self, "SvcStop"): - accepted = accepted | win32service.SERVICE_ACCEPT_STOP - if hasattr(self, "SvcPause") and hasattr(self, "SvcContinue"): - accepted = accepted | win32service.SERVICE_ACCEPT_PAUSE_CONTINUE - if hasattr(self, "SvcShutdown"): - accepted = accepted | win32service.SERVICE_ACCEPT_SHUTDOWN - return accepted - - def ReportServiceStatus( - self, serviceStatus, waitHint=5000, win32ExitCode=0, svcExitCode=0 - ): - if self.ssh is None: # Debugging! - return - if serviceStatus == win32service.SERVICE_START_PENDING: - accepted = 0 - else: - accepted = self.GetAcceptedControls() - - if serviceStatus in [ - win32service.SERVICE_RUNNING, - win32service.SERVICE_STOPPED, - ]: - checkPoint = 0 - else: - self.checkPoint = self.checkPoint + 1 - checkPoint = self.checkPoint - - # Now report the status to the control manager - status = ( - win32service.SERVICE_WIN32_OWN_PROCESS, - serviceStatus, - accepted, # dwControlsAccepted, - win32ExitCode, # dwWin32ExitCode; - svcExitCode, # dwServiceSpecificExitCode; - checkPoint, # dwCheckPoint; - waitHint, - ) - win32service.SetServiceStatus(self.ssh, status) - - def SvcInterrogate(self): - # Assume we are running, and everyone is happy. - self.ReportServiceStatus(win32service.SERVICE_RUNNING) - - def SvcOther(self, control): - try: - print("Unknown control status - %d" % control) - except IOError: - # services may not have a valid stdout! - pass - - def ServiceCtrlHandler(self, control): - return self.ServiceCtrlHandlerEx(control, 0, None) - - # The 'Ex' functions, which take additional params - def SvcOtherEx(self, control, event_type, data): - # The default here is to call self.SvcOther as that is the old behaviour. - # If you want to take advantage of the extra data, override this method - return self.SvcOther(control) - - def ServiceCtrlHandlerEx(self, control, event_type, data): - if control == win32service.SERVICE_CONTROL_STOP: - return self.SvcStop() - elif control == win32service.SERVICE_CONTROL_PAUSE: - return self.SvcPause() - elif control == win32service.SERVICE_CONTROL_CONTINUE: - return self.SvcContinue() - elif control == win32service.SERVICE_CONTROL_INTERROGATE: - return self.SvcInterrogate() - elif control == win32service.SERVICE_CONTROL_SHUTDOWN: - return self.SvcShutdown() - else: - return self.SvcOtherEx(control, event_type, data) - - def SvcRun(self): - # This is the entry point the C framework calls when the Service is - # started. Your Service class should implement SvcDoRun(). - # Or you can override this method for more control over the Service - # statuses reported to the SCM. - - # If this method raises an exception, the C framework will detect this - # and report a SERVICE_STOPPED status with a non-zero error code. - - self.ReportServiceStatus(win32service.SERVICE_RUNNING) - self.SvcDoRun() - # Once SvcDoRun terminates, the service has stopped. - # We tell the SCM the service is still stopping - the C framework - # will automatically tell the SCM it has stopped when this returns. - self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) diff --git a/lib/win32/lib/win32timezone.py b/lib/win32/lib/win32timezone.py deleted file mode 100644 index 569ea9b9..00000000 --- a/lib/win32/lib/win32timezone.py +++ /dev/null @@ -1,1023 +0,0 @@ -# -*- coding: UTF-8 -*- - -""" -win32timezone: - Module for handling datetime.tzinfo time zones using the windows -registry for time zone information. The time zone names are dependent -on the registry entries defined by the operating system. - - This module may be tested using the doctest module. - - Written by Jason R. Coombs (jaraco@jaraco.com). - Copyright © 2003-2012. - All Rights Reserved. - - This module is licenced for use in Mark Hammond's pywin32 -library under the same terms as the pywin32 library. - - To use this time zone module with the datetime module, simply pass -the TimeZoneInfo object to the datetime constructor. For example, - ->>> import win32timezone, datetime ->>> assert 'Mountain Standard Time' in win32timezone.TimeZoneInfo.get_sorted_time_zone_names() ->>> MST = win32timezone.TimeZoneInfo('Mountain Standard Time') ->>> now = datetime.datetime.now(MST) - - The now object is now a time-zone aware object, and daylight savings- -aware methods may be called on it. - ->>> now.utcoffset() in (datetime.timedelta(-1, 61200), datetime.timedelta(-1, 64800)) -True - -(note that the result of utcoffset call will be different based on when now was -generated, unless standard time is always used) - ->>> now = datetime.datetime.now(TimeZoneInfo('Mountain Standard Time', True)) ->>> now.utcoffset() -datetime.timedelta(days=-1, seconds=61200) - ->>> aug2 = datetime.datetime(2003, 8, 2, tzinfo = MST) ->>> tuple(aug2.utctimetuple()) -(2003, 8, 2, 6, 0, 0, 5, 214, 0) ->>> nov2 = datetime.datetime(2003, 11, 25, tzinfo = MST) ->>> tuple(nov2.utctimetuple()) -(2003, 11, 25, 7, 0, 0, 1, 329, 0) - -To convert from one timezone to another, just use the astimezone method. - ->>> aug2.isoformat() -'2003-08-02T00:00:00-06:00' ->>> aug2est = aug2.astimezone(win32timezone.TimeZoneInfo('Eastern Standard Time')) ->>> aug2est.isoformat() -'2003-08-02T02:00:00-04:00' - -calling the displayName member will return the display name as set in the -registry. - ->>> est = win32timezone.TimeZoneInfo('Eastern Standard Time') ->>> str(est.displayName) -'(UTC-05:00) Eastern Time (US & Canada)' - ->>> gmt = win32timezone.TimeZoneInfo('GMT Standard Time', True) ->>> str(gmt.displayName) -'(UTC+00:00) Dublin, Edinburgh, Lisbon, London' - -To get the complete list of available time zone keys, ->>> zones = win32timezone.TimeZoneInfo.get_all_time_zones() - -If you want to get them in an order that's sorted longitudinally ->>> zones = win32timezone.TimeZoneInfo.get_sorted_time_zones() - -TimeZoneInfo now supports being pickled and comparison ->>> import pickle ->>> tz = win32timezone.TimeZoneInfo('China Standard Time') ->>> tz == pickle.loads(pickle.dumps(tz)) -True - -It's possible to construct a TimeZoneInfo from a TimeZoneDescription -including the currently-defined zone. ->>> tz = win32timezone.TimeZoneInfo(TimeZoneDefinition.current()) ->>> tz == pickle.loads(pickle.dumps(tz)) -True - ->>> aest = win32timezone.TimeZoneInfo('AUS Eastern Standard Time') ->>> est = win32timezone.TimeZoneInfo('E. Australia Standard Time') ->>> dt = datetime.datetime(2006, 11, 11, 1, 0, 0, tzinfo = aest) ->>> estdt = dt.astimezone(est) ->>> estdt.strftime('%Y-%m-%d %H:%M:%S') -'2006-11-11 00:00:00' - ->>> dt = datetime.datetime(2007, 1, 12, 1, 0, 0, tzinfo = aest) ->>> estdt = dt.astimezone(est) ->>> estdt.strftime('%Y-%m-%d %H:%M:%S') -'2007-01-12 00:00:00' - ->>> dt = datetime.datetime(2007, 6, 13, 1, 0, 0, tzinfo = aest) ->>> estdt = dt.astimezone(est) ->>> estdt.strftime('%Y-%m-%d %H:%M:%S') -'2007-06-13 01:00:00' - -Microsoft now has a patch for handling time zones in 2007 (see -http://support.microsoft.com/gp/cp_dst) - -As a result, patched systems will give an incorrect result for -dates prior to the designated year except for Vista and its -successors, which have dynamic time zone support. ->>> nov2_pre_change = datetime.datetime(2003, 11, 2, tzinfo = MST) ->>> old_response = (2003, 11, 2, 7, 0, 0, 6, 306, 0) ->>> incorrect_patch_response = (2003, 11, 2, 6, 0, 0, 6, 306, 0) ->>> pre_response = nov2_pre_change.utctimetuple() ->>> pre_response in (old_response, incorrect_patch_response) -True - -Furthermore, unpatched systems pre-Vista will give an incorrect -result for dates after 2007. ->>> nov2_post_change = datetime.datetime(2007, 11, 2, tzinfo = MST) ->>> incorrect_unpatched_response = (2007, 11, 2, 7, 0, 0, 4, 306, 0) ->>> new_response = (2007, 11, 2, 6, 0, 0, 4, 306, 0) ->>> post_response = nov2_post_change.utctimetuple() ->>> post_response in (new_response, incorrect_unpatched_response) -True - - -There is a function you can call to get some capabilities of the time -zone data. ->>> caps = GetTZCapabilities() ->>> isinstance(caps, dict) -True ->>> 'MissingTZPatch' in caps -True ->>> 'DynamicTZSupport' in caps -True - ->>> both_dates_correct = (pre_response == old_response and post_response == new_response) ->>> old_dates_wrong = (pre_response == incorrect_patch_response) ->>> new_dates_wrong = (post_response == incorrect_unpatched_response) - ->>> caps['DynamicTZSupport'] == both_dates_correct -True - ->>> (not caps['DynamicTZSupport'] and caps['MissingTZPatch']) == new_dates_wrong -True - ->>> (not caps['DynamicTZSupport'] and not caps['MissingTZPatch']) == old_dates_wrong -True - -This test helps ensure language support for unicode characters ->>> x = TIME_ZONE_INFORMATION(0, u'français') - - -Test conversion from one time zone to another at a DST boundary -=============================================================== - ->>> tz_hi = TimeZoneInfo('Hawaiian Standard Time') ->>> tz_pac = TimeZoneInfo('Pacific Standard Time') ->>> time_before = datetime.datetime(2011, 11, 5, 15, 59, 59, tzinfo=tz_hi) ->>> tz_hi.utcoffset(time_before) -datetime.timedelta(days=-1, seconds=50400) ->>> tz_hi.dst(time_before) -datetime.timedelta(0) - -Hawaii doesn't need dynamic TZ info ->>> getattr(tz_hi, 'dynamicInfo', None) - -Here's a time that gave some trouble as reported in #3523104 -because one minute later, the equivalent UTC time changes from DST -in the U.S. ->>> dt_hi = datetime.datetime(2011, 11, 5, 15, 59, 59, 0, tzinfo=tz_hi) ->>> dt_hi.timetuple() -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=15, tm_min=59, tm_sec=59, tm_wday=5, tm_yday=309, tm_isdst=0) ->>> dt_hi.utctimetuple() -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=1, tm_min=59, tm_sec=59, tm_wday=6, tm_yday=310, tm_isdst=0) - -Convert the time to pacific time. ->>> dt_pac = dt_hi.astimezone(tz_pac) ->>> dt_pac.timetuple() -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=18, tm_min=59, tm_sec=59, tm_wday=5, tm_yday=309, tm_isdst=1) - -Notice that the UTC time is almost 2am. ->>> dt_pac.utctimetuple() -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=1, tm_min=59, tm_sec=59, tm_wday=6, tm_yday=310, tm_isdst=0) - -Now do the same tests one minute later in Hawaii. ->>> time_after = datetime.datetime(2011, 11, 5, 16, 0, 0, 0, tzinfo=tz_hi) ->>> tz_hi.utcoffset(time_after) -datetime.timedelta(days=-1, seconds=50400) ->>> tz_hi.dst(time_before) -datetime.timedelta(0) - ->>> dt_hi = datetime.datetime(2011, 11, 5, 16, 0, 0, 0, tzinfo=tz_hi) ->>> print(dt_hi.timetuple()) -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=16, tm_min=0, tm_sec=0, tm_wday=5, tm_yday=309, tm_isdst=0) ->>> print(dt_hi.utctimetuple()) -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=2, tm_min=0, tm_sec=0, tm_wday=6, tm_yday=310, tm_isdst=0) - -According to the docs, this is what astimezone does. ->>> utc = (dt_hi - dt_hi.utcoffset()).replace(tzinfo=tz_pac) ->>> utc -datetime.datetime(2011, 11, 6, 2, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) ->>> tz_pac.fromutc(utc) == dt_hi.astimezone(tz_pac) -True ->>> tz_pac.fromutc(utc) -datetime.datetime(2011, 11, 5, 19, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) - -Make sure the converted time is correct. ->>> dt_pac = dt_hi.astimezone(tz_pac) ->>> dt_pac.timetuple() -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=5, tm_hour=19, tm_min=0, tm_sec=0, tm_wday=5, tm_yday=309, tm_isdst=1) ->>> dt_pac.utctimetuple() -time.struct_time(tm_year=2011, tm_mon=11, tm_mday=6, tm_hour=2, tm_min=0, tm_sec=0, tm_wday=6, tm_yday=310, tm_isdst=0) - -Check some internal methods ->>> tz_pac._getStandardBias(datetime.datetime(2011, 1, 1)) -datetime.timedelta(seconds=28800) ->>> tz_pac._getDaylightBias(datetime.datetime(2011, 1, 1)) -datetime.timedelta(seconds=25200) - -Test the offsets ->>> offset = tz_pac.utcoffset(datetime.datetime(2011, 11, 6, 2, 0)) ->>> offset == datetime.timedelta(hours=-8) -True ->>> dst_offset = tz_pac.dst(datetime.datetime(2011, 11, 6, 2, 0) + offset) ->>> dst_offset == datetime.timedelta(hours=1) -True ->>> (offset + dst_offset) == datetime.timedelta(hours=-7) -True - - -Test offsets that occur right at the DST changeover ->>> datetime.datetime.utcfromtimestamp(1320570000).replace( -... tzinfo=TimeZoneInfo.utc()).astimezone(tz_pac) -datetime.datetime(2011, 11, 6, 1, 0, tzinfo=TimeZoneInfo('Pacific Standard Time')) - -""" -__author__ = "Jason R. Coombs " - -import datetime -import logging -import operator -import re -import struct -import winreg -from itertools import count - -import win32api - -log = logging.getLogger(__file__) - - -# A couple of objects for working with objects as if they were native C-type -# structures. -class _SimpleStruct(object): - _fields_ = None # must be overridden by subclasses - - def __init__(self, *args, **kw): - for i, (name, typ) in enumerate(self._fields_): - def_arg = None - if i < len(args): - def_arg = args[i] - if name in kw: - def_arg = kw[name] - if def_arg is not None: - if not isinstance(def_arg, tuple): - def_arg = (def_arg,) - else: - def_arg = () - if len(def_arg) == 1 and isinstance(def_arg[0], typ): - # already an object of this type. - # XXX - should copy.copy??? - def_val = def_arg[0] - else: - def_val = typ(*def_arg) - setattr(self, name, def_val) - - def field_names(self): - return [f[0] for f in self._fields_] - - def __eq__(self, other): - if not hasattr(other, "_fields_"): - return False - if self._fields_ != other._fields_: - return False - for name, _ in self._fields_: - if getattr(self, name) != getattr(other, name): - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - -class SYSTEMTIME(_SimpleStruct): - _fields_ = [ - ("year", int), - ("month", int), - ("day_of_week", int), - ("day", int), - ("hour", int), - ("minute", int), - ("second", int), - ("millisecond", int), - ] - - -class TIME_ZONE_INFORMATION(_SimpleStruct): - _fields_ = [ - ("bias", int), - ("standard_name", str), - ("standard_start", SYSTEMTIME), - ("standard_bias", int), - ("daylight_name", str), - ("daylight_start", SYSTEMTIME), - ("daylight_bias", int), - ] - - -class DYNAMIC_TIME_ZONE_INFORMATION(_SimpleStruct): - _fields_ = TIME_ZONE_INFORMATION._fields_ + [ - ("key_name", str), - ("dynamic_daylight_time_disabled", bool), - ] - - -class TimeZoneDefinition(DYNAMIC_TIME_ZONE_INFORMATION): - """ - A time zone definition class based on the win32 - DYNAMIC_TIME_ZONE_INFORMATION structure. - - Describes a bias against UTC (bias), and two dates at which a separate - additional bias applies (standard_bias and daylight_bias). - """ - - def __init__(self, *args, **kwargs): - """ - Try to construct a TimeZoneDefinition from - a) [DYNAMIC_]TIME_ZONE_INFORMATION args - b) another TimeZoneDefinition - c) a byte structure (using _from_bytes) - """ - try: - super(TimeZoneDefinition, self).__init__(*args, **kwargs) - return - except (TypeError, ValueError): - pass - - try: - self.__init_from_other(*args, **kwargs) - return - except TypeError: - pass - - try: - self.__init_from_bytes(*args, **kwargs) - return - except TypeError: - pass - - raise TypeError("Invalid arguments for %s" % self.__class__) - - def __init_from_bytes( - self, - bytes, - standard_name="", - daylight_name="", - key_name="", - daylight_disabled=False, - ): - format = "3l8h8h" - components = struct.unpack(format, bytes) - bias, standard_bias, daylight_bias = components[:3] - standard_start = SYSTEMTIME(*components[3:11]) - daylight_start = SYSTEMTIME(*components[11:19]) - super(TimeZoneDefinition, self).__init__( - bias, - standard_name, - standard_start, - standard_bias, - daylight_name, - daylight_start, - daylight_bias, - key_name, - daylight_disabled, - ) - - def __init_from_other(self, other): - if not isinstance(other, TIME_ZONE_INFORMATION): - raise TypeError("Not a TIME_ZONE_INFORMATION") - for name in other.field_names(): - # explicitly get the value from the underlying structure - value = super(TimeZoneDefinition, other).__getattribute__(other, name) - setattr(self, name, value) - # consider instead of the loop above just copying the memory directly - # size = max(ctypes.sizeof(DYNAMIC_TIME_ZONE_INFO), ctypes.sizeof(other)) - # ctypes.memmove(ctypes.addressof(self), other, size) - - def __getattribute__(self, attr): - value = super(TimeZoneDefinition, self).__getattribute__(attr) - if "bias" in attr: - value = datetime.timedelta(minutes=value) - return value - - @classmethod - def current(class_): - "Windows Platform SDK GetTimeZoneInformation" - code, tzi = win32api.GetTimeZoneInformation(True) - return code, class_(*tzi) - - def set(self): - tzi = tuple(getattr(self, n) for n, t in self._fields_) - win32api.SetTimeZoneInformation(tzi) - - def copy(self): - # XXX - this is no longer a copy! - return self.__class__(self) - - def locate_daylight_start(self, year): - return self._locate_day(year, self.daylight_start) - - def locate_standard_start(self, year): - return self._locate_day(year, self.standard_start) - - @staticmethod - def _locate_day(year, cutoff): - """ - Takes a SYSTEMTIME object, such as retrieved from a TIME_ZONE_INFORMATION - structure or call to GetTimeZoneInformation and interprets it based on the given - year to identify the actual day. - - This method is necessary because the SYSTEMTIME structure refers to a day by its - day of the week and week of the month (e.g. 4th saturday in March). - - >>> SATURDAY = 6 - >>> MARCH = 3 - >>> st = SYSTEMTIME(2000, MARCH, SATURDAY, 4, 0, 0, 0, 0) - - # according to my calendar, the 4th Saturday in March in 2009 was the 28th - >>> expected_date = datetime.datetime(2009, 3, 28) - >>> TimeZoneDefinition._locate_day(2009, st) == expected_date - True - """ - # MS stores Sunday as 0, Python datetime stores Monday as zero - target_weekday = (cutoff.day_of_week + 6) % 7 - # For SYSTEMTIMEs relating to time zone inforamtion, cutoff.day - # is the week of the month - week_of_month = cutoff.day - # so the following is the first day of that week - day = (week_of_month - 1) * 7 + 1 - result = datetime.datetime( - year, - cutoff.month, - day, - cutoff.hour, - cutoff.minute, - cutoff.second, - cutoff.millisecond, - ) - # now the result is the correct week, but not necessarily the correct day of the week - days_to_go = (target_weekday - result.weekday()) % 7 - result += datetime.timedelta(days_to_go) - # if we selected a day in the month following the target month, - # move back a week or two. - # This is necessary because Microsoft defines the fifth week in a month - # to be the last week in a month and adding the time delta might have - # pushed the result into the next month. - while result.month == cutoff.month + 1: - result -= datetime.timedelta(weeks=1) - return result - - -class TimeZoneInfo(datetime.tzinfo): - """ - Main class for handling Windows time zones. - Usage: - TimeZoneInfo(" - print("%s %s %10d %s" % (date_str, attr_string, info[5], rel_name)) - total_size = total_size + info[5] - print(" " * 14 + "%3d files, %10d bytes" % (len(files), total_size)) - - -def run(args): - """run program [args] - Starts the specified program on the remote device. - """ - prog_args = [] - for arg in args: - if " " in arg: - prog_args.append('"' + arg + '"') - else: - prog_args.append(arg) - prog_args = string.join(prog_args, " ") - wincerapi.CeCreateProcess(prog_args, "", None, None, 0, 0, None, "", None) - - -def delete(args): - """delete file, ... - Delete one or more remote files - """ - for arg in args: - try: - wincerapi.CeDeleteFile(arg) - print("Deleted: %s" % arg) - except win32api.error as details: - print_error(details, "Error deleting '%s'" % arg) - - -def DumpCommands(): - print("%-10s - %s" % ("Command", "Description")) - print("%-10s - %s" % ("-------", "-----------")) - for name, item in list(globals().items()): - if type(item) == type(DumpCommands): - doc = getattr(item, "__doc__", "") - if doc: - lines = string.split(doc, "\n") - print("%-10s - %s" % (name, lines[0])) - for line in lines[1:]: - if line: - print(" " * 8, line) - - -def main(): - if len(sys.argv) < 2: - print("You must specify a command!") - DumpCommands() - return - command = sys.argv[1] - fn = globals().get(command) - if fn is None: - print("Unknown command:", command) - DumpCommands() - return - - wincerapi.CeRapiInit() - try: - verinfo = wincerapi.CeGetVersionEx() - print( - "Connected to device, CE version %d.%d %s" - % (verinfo[0], verinfo[1], verinfo[4]) - ) - try: - fn(sys.argv[2:]) - except InvalidUsage as msg: - print("Invalid syntax -", msg) - print(fn.__doc__) - - finally: - try: - wincerapi.CeRapiUninit() - except win32api.error as details: - print_error(details, "Error disconnecting") - - -if __name__ == "__main__": - main() diff --git a/lib/win32/scripts/killProcName.py b/lib/win32/scripts/killProcName.py deleted file mode 100644 index c9b399ff..00000000 --- a/lib/win32/scripts/killProcName.py +++ /dev/null @@ -1,62 +0,0 @@ -# Kills a process by process name -# -# Uses the Performance Data Helper to locate the PID, then kills it. -# Will only kill the process if there is only one process of that name -# (eg, attempting to kill "Python.exe" will only work if there is only -# one Python.exe running. (Note that the current process does not -# count - ie, if Python.exe is hosting this script, you can still kill -# another Python.exe (as long as there is only one other Python.exe) - -# Really just a demo for the win32pdh(util) module, which allows you -# to get all sorts of information about a running process and many -# other aspects of your system. - -import sys - -import win32api -import win32con -import win32pdhutil - - -def killProcName(procname): - # Change suggested by Dan Knierim, who found that this performed a - # "refresh", allowing us to kill processes created since this was run - # for the first time. - try: - win32pdhutil.GetPerformanceAttributes("Process", "ID Process", procname) - except: - pass - - pids = win32pdhutil.FindPerformanceAttributesByName(procname) - - # If _my_ pid in there, remove it! - try: - pids.remove(win32api.GetCurrentProcessId()) - except ValueError: - pass - - if len(pids) == 0: - result = "Can't find %s" % procname - elif len(pids) > 1: - result = "Found too many %s's - pids=`%s`" % (procname, pids) - else: - handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0, pids[0]) - win32api.TerminateProcess(handle, 0) - win32api.CloseHandle(handle) - result = "" - - return result - - -if __name__ == "__main__": - if len(sys.argv) > 1: - for procname in sys.argv[1:]: - result = killProcName(procname) - if result: - print(result) - print("Dumping all processes...") - win32pdhutil.ShowAllProcesses() - else: - print("Killed %s" % procname) - else: - print("Usage: killProcName.py procname ...") diff --git a/lib/win32/scripts/rasutil.py b/lib/win32/scripts/rasutil.py deleted file mode 100644 index e5a8922d..00000000 --- a/lib/win32/scripts/rasutil.py +++ /dev/null @@ -1,97 +0,0 @@ -# A demo of using the RAS API from Python -import sys - -import win32ras - - -# The error raised if we can not -class ConnectionError(Exception): - pass - - -def Connect(rasEntryName, numRetries=5): - """Make a connection to the specified RAS entry. - - Returns a tuple of (bool, handle) on success. - - bool is 1 if a new connection was established, or 0 is a connection already existed. - - handle is a RAS HANDLE that can be passed to Disconnect() to end the connection. - - Raises a ConnectionError if the connection could not be established. - """ - assert numRetries > 0 - for info in win32ras.EnumConnections(): - if info[1].lower() == rasEntryName.lower(): - print("Already connected to", rasEntryName) - return 0, info[0] - - dial_params, have_pw = win32ras.GetEntryDialParams(None, rasEntryName) - if not have_pw: - print("Error: The password is not saved for this connection") - print( - "Please connect manually selecting the 'save password' option and try again" - ) - sys.exit(1) - - print("Connecting to", rasEntryName, "...") - retryCount = numRetries - while retryCount > 0: - rasHandle, errCode = win32ras.Dial(None, None, dial_params, None) - if win32ras.IsHandleValid(rasHandle): - bValid = 1 - break - print("Retrying...") - win32api.Sleep(5000) - retryCount = retryCount - 1 - - if errCode: - raise ConnectionError(errCode, win32ras.GetErrorString(errCode)) - return 1, rasHandle - - -def Disconnect(handle): - if type(handle) == type(""): # have they passed a connection name? - for info in win32ras.EnumConnections(): - if info[1].lower() == handle.lower(): - handle = info[0] - break - else: - raise ConnectionError(0, "Not connected to entry '%s'" % handle) - - win32ras.HangUp(handle) - - -usage = """rasutil.py - Utilities for using RAS - -Usage: - rasutil [-r retryCount] [-c rasname] [-d rasname] - - -r retryCount - Number of times to retry the RAS connection - -c rasname - Connect to the phonebook entry specified by rasname - -d rasname - Disconnect from the phonebook entry specified by rasname -""" - - -def Usage(why): - print(why) - print(usage) - sys.exit(1) - - -if __name__ == "__main__": - import getopt - - try: - opts, args = getopt.getopt(sys.argv[1:], "r:c:d:") - except getopt.error as why: - Usage(why) - retries = 5 - if len(args) != 0: - Usage("Invalid argument") - - for opt, val in opts: - if opt == "-c": - Connect(val, retries) - if opt == "-d": - Disconnect(val) - if opt == "-r": - retries = int(val) diff --git a/lib/win32/scripts/regsetup.py b/lib/win32/scripts/regsetup.py deleted file mode 100644 index 885698e1..00000000 --- a/lib/win32/scripts/regsetup.py +++ /dev/null @@ -1,612 +0,0 @@ -# A tool to setup the Python registry. - - -class error(Exception): - pass - - -import sys # at least we can count on this! - - -def FileExists(fname): - """Check if a file exists. Returns true or false.""" - import os - - try: - os.stat(fname) - return 1 - except os.error as details: - return 0 - - -def IsPackageDir(path, packageName, knownFileName): - """Given a path, a ni package name, and possibly a known file name in - the root of the package, see if this path is good. - """ - import os - - if knownFileName is None: - knownFileName = "." - return FileExists(os.path.join(os.path.join(path, packageName), knownFileName)) - - -def IsDebug(): - """Return "_d" if we're running a debug version. - - This is to be used within DLL names when locating them. - """ - import importlib.machinery - - return "_d" if "_d.pyd" in importlib.machinery.EXTENSION_SUFFIXES else "" - - -def FindPackagePath(packageName, knownFileName, searchPaths): - """Find a package. - - Given a ni style package name, check the package is registered. - - First place looked is the registry for an existing entry. Then - the searchPaths are searched. - """ - import os - - import regutil - - pathLook = regutil.GetRegisteredNamedPath(packageName) - if pathLook and IsPackageDir(pathLook, packageName, knownFileName): - return pathLook, None # The currently registered one is good. - # Search down the search paths. - for pathLook in searchPaths: - if IsPackageDir(pathLook, packageName, knownFileName): - # Found it - ret = os.path.abspath(pathLook) - return ret, ret - raise error("The package %s can not be located" % packageName) - - -def FindHelpPath(helpFile, helpDesc, searchPaths): - # See if the current registry entry is OK - import os - - import win32api - import win32con - - try: - key = win32api.RegOpenKey( - win32con.HKEY_LOCAL_MACHINE, - "Software\\Microsoft\\Windows\\Help", - 0, - win32con.KEY_ALL_ACCESS, - ) - try: - try: - path = win32api.RegQueryValueEx(key, helpDesc)[0] - if FileExists(os.path.join(path, helpFile)): - return os.path.abspath(path) - except win32api.error: - pass # no registry entry. - finally: - key.Close() - except win32api.error: - pass - for pathLook in searchPaths: - if FileExists(os.path.join(pathLook, helpFile)): - return os.path.abspath(pathLook) - pathLook = os.path.join(pathLook, "Help") - if FileExists(os.path.join(pathLook, helpFile)): - return os.path.abspath(pathLook) - raise error("The help file %s can not be located" % helpFile) - - -def FindAppPath(appName, knownFileName, searchPaths): - """Find an application. - - First place looked is the registry for an existing entry. Then - the searchPaths are searched. - """ - # Look in the first path. - import os - - import regutil - - regPath = regutil.GetRegisteredNamedPath(appName) - if regPath: - pathLook = regPath.split(";")[0] - if regPath and FileExists(os.path.join(pathLook, knownFileName)): - return None # The currently registered one is good. - # Search down the search paths. - for pathLook in searchPaths: - if FileExists(os.path.join(pathLook, knownFileName)): - # Found it - return os.path.abspath(pathLook) - raise error( - "The file %s can not be located for application %s" % (knownFileName, appName) - ) - - -def FindPythonExe(exeAlias, possibleRealNames, searchPaths): - """Find an exe. - - Returns the full path to the .exe, and a boolean indicating if the current - registered entry is OK. We don't trust the already registered version even - if it exists - it may be wrong (ie, for a different Python version) - """ - import os - import sys - - import regutil - import win32api - - if possibleRealNames is None: - possibleRealNames = exeAlias - # Look first in Python's home. - found = os.path.join(sys.prefix, possibleRealNames) - if not FileExists(found): # for developers - if "64 bit" in sys.version: - found = os.path.join(sys.prefix, "PCBuild", "amd64", possibleRealNames) - else: - found = os.path.join(sys.prefix, "PCBuild", possibleRealNames) - if not FileExists(found): - found = LocateFileName(possibleRealNames, searchPaths) - - registered_ok = 0 - try: - registered = win32api.RegQueryValue( - regutil.GetRootKey(), regutil.GetAppPathsKey() + "\\" + exeAlias - ) - registered_ok = found == registered - except win32api.error: - pass - return found, registered_ok - - -def QuotedFileName(fname): - """Given a filename, return a quoted version if necessary""" - - import regutil - - try: - fname.index(" ") # Other chars forcing quote? - return '"%s"' % fname - except ValueError: - # No space in name. - return fname - - -def LocateFileName(fileNamesString, searchPaths): - """Locate a file name, anywhere on the search path. - - If the file can not be located, prompt the user to find it for us - (using a common OpenFile dialog) - - Raises KeyboardInterrupt if the user cancels. - """ - import os - - import regutil - - fileNames = fileNamesString.split(";") - for path in searchPaths: - for fileName in fileNames: - try: - retPath = os.path.join(path, fileName) - os.stat(retPath) - break - except os.error: - retPath = None - if retPath: - break - else: - fileName = fileNames[0] - try: - import win32con - import win32ui - except ImportError: - raise error( - "Need to locate the file %s, but the win32ui module is not available\nPlease run the program again, passing as a parameter the path to this file." - % fileName - ) - # Display a common dialog to locate the file. - flags = win32con.OFN_FILEMUSTEXIST - ext = os.path.splitext(fileName)[1] - filter = "Files of requested type (*%s)|*%s||" % (ext, ext) - dlg = win32ui.CreateFileDialog(1, None, fileName, flags, filter, None) - dlg.SetOFNTitle("Locate " + fileName) - if dlg.DoModal() != win32con.IDOK: - raise KeyboardInterrupt("User cancelled the process") - retPath = dlg.GetPathName() - return os.path.abspath(retPath) - - -def LocatePath(fileName, searchPaths): - """Like LocateFileName, but returns a directory only.""" - import os - - return os.path.abspath(os.path.split(LocateFileName(fileName, searchPaths))[0]) - - -def LocateOptionalPath(fileName, searchPaths): - """Like LocatePath, but returns None if the user cancels.""" - try: - return LocatePath(fileName, searchPaths) - except KeyboardInterrupt: - return None - - -def LocateOptionalFileName(fileName, searchPaths=None): - """Like LocateFileName, but returns None if the user cancels.""" - try: - return LocateFileName(fileName, searchPaths) - except KeyboardInterrupt: - return None - - -def LocatePythonCore(searchPaths): - """Locate and validate the core Python directories. Returns a list - of paths that should be used as the core (ie, un-named) portion of - the Python path. - """ - import os - - import regutil - - currentPath = regutil.GetRegisteredNamedPath(None) - if currentPath: - presearchPaths = currentPath.split(";") - else: - presearchPaths = [os.path.abspath(".")] - libPath = None - for path in presearchPaths: - if FileExists(os.path.join(path, "os.py")): - libPath = path - break - if libPath is None and searchPaths is not None: - libPath = LocatePath("os.py", searchPaths) - if libPath is None: - raise error("The core Python library could not be located.") - - corePath = None - suffix = IsDebug() - for path in presearchPaths: - if FileExists(os.path.join(path, "unicodedata%s.pyd" % suffix)): - corePath = path - break - if corePath is None and searchPaths is not None: - corePath = LocatePath("unicodedata%s.pyd" % suffix, searchPaths) - if corePath is None: - raise error("The core Python path could not be located.") - - installPath = os.path.abspath(os.path.join(libPath, "..")) - return installPath, [libPath, corePath] - - -def FindRegisterPackage(packageName, knownFile, searchPaths, registryAppName=None): - """Find and Register a package. - - Assumes the core registry setup correctly. - - In addition, if the location located by the package is already - in the **core** path, then an entry is registered, but no path. - (no other paths are checked, as the application whose path was used - may later be uninstalled. This should not happen with the core) - """ - - import regutil - - if not packageName: - raise error("A package name must be supplied") - corePaths = regutil.GetRegisteredNamedPath(None).split(";") - if not searchPaths: - searchPaths = corePaths - registryAppName = registryAppName or packageName - try: - pathLook, pathAdd = FindPackagePath(packageName, knownFile, searchPaths) - if pathAdd is not None: - if pathAdd in corePaths: - pathAdd = "" - regutil.RegisterNamedPath(registryAppName, pathAdd) - return pathLook - except error as details: - print( - "*** The %s package could not be registered - %s" % (packageName, details) - ) - print( - "*** Please ensure you have passed the correct paths on the command line." - ) - print( - "*** - For packages, you should pass a path to the packages parent directory," - ) - print("*** - and not the package directory itself...") - - -def FindRegisterApp(appName, knownFiles, searchPaths): - """Find and Register a package. - - Assumes the core registry setup correctly. - - """ - - import regutil - - if type(knownFiles) == type(""): - knownFiles = [knownFiles] - paths = [] - try: - for knownFile in knownFiles: - pathLook = FindAppPath(appName, knownFile, searchPaths) - if pathLook: - paths.append(pathLook) - except error as details: - print("*** ", details) - return - - regutil.RegisterNamedPath(appName, ";".join(paths)) - - -def FindRegisterPythonExe(exeAlias, searchPaths, actualFileNames=None): - """Find and Register a Python exe (not necessarily *the* python.exe) - - Assumes the core registry setup correctly. - """ - - import regutil - - fname, ok = FindPythonExe(exeAlias, actualFileNames, searchPaths) - if not ok: - regutil.RegisterPythonExe(fname, exeAlias) - return fname - - -def FindRegisterHelpFile(helpFile, searchPaths, helpDesc=None): - import regutil - - try: - pathLook = FindHelpPath(helpFile, helpDesc, searchPaths) - except error as details: - print("*** ", details) - return - # print "%s found at %s" % (helpFile, pathLook) - regutil.RegisterHelpFile(helpFile, pathLook, helpDesc) - - -def SetupCore(searchPaths): - """Setup the core Python information in the registry. - - This function makes no assumptions about the current state of sys.path. - - After this function has completed, you should have access to the standard - Python library, and the standard Win32 extensions - """ - - import sys - - for path in searchPaths: - sys.path.append(path) - - import os - - import regutil - import win32api - import win32con - - installPath, corePaths = LocatePythonCore(searchPaths) - # Register the core Pythonpath. - print(corePaths) - regutil.RegisterNamedPath(None, ";".join(corePaths)) - - # Register the install path. - hKey = win32api.RegCreateKey(regutil.GetRootKey(), regutil.BuildDefaultPythonKey()) - try: - # Core Paths. - win32api.RegSetValue(hKey, "InstallPath", win32con.REG_SZ, installPath) - finally: - win32api.RegCloseKey(hKey) - - # Register the win32 core paths. - win32paths = ( - os.path.abspath(os.path.split(win32api.__file__)[0]) - + ";" - + os.path.abspath( - os.path.split(LocateFileName("win32con.py;win32con.pyc", sys.path))[0] - ) - ) - - # Python has builtin support for finding a "DLLs" directory, but - # not a PCBuild. Having it in the core paths means it is ignored when - # an EXE not in the Python dir is hosting us - so we add it as a named - # value - check = os.path.join(sys.prefix, "PCBuild") - if "64 bit" in sys.version: - check = os.path.join(check, "amd64") - if os.path.isdir(check): - regutil.RegisterNamedPath("PCBuild", check) - - -def RegisterShellInfo(searchPaths): - """Registers key parts of the Python installation with the Windows Shell. - - Assumes a valid, minimal Python installation exists - (ie, SetupCore() has been previously successfully run) - """ - import regutil - import win32con - - suffix = IsDebug() - # Set up a pointer to the .exe's - exePath = FindRegisterPythonExe("Python%s.exe" % suffix, searchPaths) - regutil.SetRegistryDefaultValue(".py", "Python.File", win32con.HKEY_CLASSES_ROOT) - regutil.RegisterShellCommand("Open", QuotedFileName(exePath) + ' "%1" %*', "&Run") - regutil.SetRegistryDefaultValue( - "Python.File\\DefaultIcon", "%s,0" % exePath, win32con.HKEY_CLASSES_ROOT - ) - - FindRegisterHelpFile("Python.hlp", searchPaths, "Main Python Documentation") - FindRegisterHelpFile("ActivePython.chm", searchPaths, "Main Python Documentation") - - # We consider the win32 core, as it contains all the win32 api type - # stuff we need. - - -# FindRegisterApp("win32", ["win32con.pyc", "win32api%s.pyd" % suffix], searchPaths) - -usage = ( - """\ -regsetup.py - Setup/maintain the registry for Python apps. - -Run without options, (but possibly search paths) to repair a totally broken -python registry setup. This should allow other options to work. - -Usage: %s [options ...] paths ... --p packageName -- Find and register a package. Looks in the paths for - a sub-directory with the name of the package, and - adds a path entry for the package. --a appName -- Unconditionally add an application name to the path. - A new path entry is create with the app name, and the - paths specified are added to the registry. --c -- Add the specified paths to the core Pythonpath. - If a path appears on the core path, and a package also - needs that same path, the package will not bother - registering it. Therefore, By adding paths to the - core path, you can avoid packages re-registering the same path. --m filename -- Find and register the specific file name as a module. - Do not include a path on the filename! ---shell -- Register everything with the Win95/NT shell. ---upackage name -- Unregister the package ---uapp name -- Unregister the app (identical to --upackage) ---umodule name -- Unregister the module - ---description -- Print a description of the usage. ---examples -- Print examples of usage. -""" - % sys.argv[0] -) - -description = """\ -If no options are processed, the program attempts to validate and set -the standard Python path to the point where the standard library is -available. This can be handy if you move Python to a new drive/sub-directory, -in which case most of the options would fail (as they need at least string.py, -os.py etc to function.) -Running without options should repair Python well enough to run with -the other options. - -paths are search paths that the program will use to seek out a file. -For example, when registering the core Python, you may wish to -provide paths to non-standard places to look for the Python help files, -library files, etc. - -See also the "regcheck.py" utility which will check and dump the contents -of the registry. -""" - -examples = """\ -Examples: -"regsetup c:\\wierd\\spot\\1 c:\\wierd\\spot\\2" -Attempts to setup the core Python. Looks in some standard places, -as well as the 2 wierd spots to locate the core Python files (eg, Python.exe, -python14.dll, the standard library and Win32 Extensions. - -"regsetup -a myappname . .\subdir" -Registers a new Pythonpath entry named myappname, with "C:\\I\\AM\\HERE" and -"C:\\I\\AM\\HERE\subdir" added to the path (ie, all args are converted to -absolute paths) - -"regsetup -c c:\\my\\python\\files" -Unconditionally add "c:\\my\\python\\files" to the 'core' Python path. - -"regsetup -m some.pyd \\windows\\system" -Register the module some.pyd in \\windows\\system as a registered -module. This will allow some.pyd to be imported, even though the -windows system directory is not (usually!) on the Python Path. - -"regsetup --umodule some" -Unregister the module "some". This means normal import rules then apply -for that module. -""" - -if __name__ == "__main__": - if len(sys.argv) > 1 and sys.argv[1] in ["/?", "-?", "-help", "-h"]: - print(usage) - elif len(sys.argv) == 1 or not sys.argv[1][0] in ["/", "-"]: - # No args, or useful args. - searchPath = sys.path[:] - for arg in sys.argv[1:]: - searchPath.append(arg) - # Good chance we are being run from the "regsetup.py" directory. - # Typically this will be "\somewhere\win32\Scripts" and the - # "somewhere" and "..\Lib" should also be searched. - searchPath.append("..\\Build") - searchPath.append("..\\Lib") - searchPath.append("..") - searchPath.append("..\\..") - - # for developers: - # also search somewhere\lib, ..\build, and ..\..\build - searchPath.append("..\\..\\lib") - searchPath.append("..\\build") - if "64 bit" in sys.version: - searchPath.append("..\\..\\pcbuild\\amd64") - else: - searchPath.append("..\\..\\pcbuild") - - print("Attempting to setup/repair the Python core") - - SetupCore(searchPath) - RegisterShellInfo(searchPath) - FindRegisterHelpFile("PyWin32.chm", searchPath, "Pythonwin Reference") - # Check the registry. - print("Registration complete - checking the registry...") - import regcheck - - regcheck.CheckRegistry() - else: - searchPaths = [] - import getopt - - opts, args = getopt.getopt( - sys.argv[1:], - "p:a:m:c", - ["shell", "upackage=", "uapp=", "umodule=", "description", "examples"], - ) - for arg in args: - searchPaths.append(arg) - for o, a in opts: - if o == "--description": - print(description) - if o == "--examples": - print(examples) - if o == "--shell": - print("Registering the Python core.") - RegisterShellInfo(searchPaths) - if o == "-p": - print("Registering package", a) - FindRegisterPackage(a, None, searchPaths) - if o in ["--upackage", "--uapp"]: - import regutil - - print("Unregistering application/package", a) - regutil.UnregisterNamedPath(a) - if o == "-a": - import regutil - - path = ";".join(searchPaths) - print("Registering application", a, "to path", path) - regutil.RegisterNamedPath(a, path) - if o == "-c": - if not len(searchPaths): - raise error("-c option must provide at least one additional path") - import regutil - import win32api - - currentPaths = regutil.GetRegisteredNamedPath(None).split(";") - oldLen = len(currentPaths) - for newPath in searchPaths: - if newPath not in currentPaths: - currentPaths.append(newPath) - if len(currentPaths) != oldLen: - print( - "Registering %d new core paths" % (len(currentPaths) - oldLen) - ) - regutil.RegisterNamedPath(None, ";".join(currentPaths)) - else: - print("All specified paths are already registered.") diff --git a/lib/win32/scripts/setup_d.py b/lib/win32/scripts/setup_d.py deleted file mode 100644 index babd8359..00000000 --- a/lib/win32/scripts/setup_d.py +++ /dev/null @@ -1,110 +0,0 @@ -# Install and register pythonxx_d.dll, pywintypesxx_d.dll and pythoncomxx_d.dll -# -# Assumes the _d files can be found in the same directory as this script -# or in the cwd. - -import os -import shutil -import sys -import winreg - -import win32api - - -def usage_and_die(rc): - print() - print("This script is designed to copy and register the Python debug") - print("binaries. It looks for pythonxx_d.dll, pythoncomxx_d.dll etc,") - print("and installs them to work correctly with Python debug builds.") - print() - print("You will generally find this script in the. zip file that") - print("included these _d files. Please run this script from") - print("that directory") - sys.exit(rc) - - -if win32api.__file__.find("_d") > 0: - print("This scripts appears to be running a DEBUG version of Python.") - print("Please run it using a normal release build (python.exe)") - usage_and_die(1) - -try: - import pythoncom -except ImportError as details: - print("Could not import the release version of pythoncom") - print("The error details are: %s" % (details,)) - print("Please correct this error and rerun the script") - usage_and_die(2) - -try: - import pywintypes -except ImportError as details: - print("Could not import the release version of pywintypes") - print("The error details are: %s" % (details,)) - print("Please correct this error and rerun the script") - usage_and_die(2) - - -def _docopy(src, dest): - orig_src = src - if not os.path.isfile(src): - src = os.path.join(os.path.split(sys.argv[0])[0], src) - print( - "Can not find %s or %s to copy" - % (os.path.abspath(orig_src), os.path.abspath(src)) - ) - return 0 - try: - shutil.copy(src, dest) - print("Copied %s -> %s" % (src, dest)) - return 1 - except: - print("Error copying '%s' -> '%s'" % (src, dest)) - print(str(sys.exc_info[1])) - usage_and_die(3) - - -def _doregister(mod_name, dll_name): - assert os.path.isfile(dll_name), "Shouldn't get here if the file doesn't exist!" - try: - key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, - "Software\\Python\\PythonCore\\%s\\Modules\\%s" % (sys.winver, mod_name), - ) - except winreg.error: - try: - key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, - "Software\\Python\\PythonCore\\%s\\Modules\\%s" - % (sys.winver, mod_name), - ) - except winreg.error: - print( - "Could not find the existing '%s' module registered in the registry" - % (mod_name,) - ) - usage_and_die(4) - # Create the debug key. - sub_key = winreg.CreateKey(key, "Debug") - winreg.SetValue(sub_key, None, winreg.REG_SZ, dll_name) - print("Registered '%s' in the registry" % (dll_name,)) - - -def _domodule(mod_name, release_mod_filename): - path, fname = os.path.split(release_mod_filename) - base, ext = os.path.splitext(fname) - new_fname = base + "_d" + ext - if _docopy(new_fname, path): - _doregister(mod_name, os.path.abspath(os.path.join(path, new_fname))) - - -# First the main Python DLL. -path, fname = path, fname = os.path.split(win32api.GetModuleFileName(sys.dllhandle)) -base, ext = os.path.splitext(fname) -_docopy(base + "_d" + ext, path) - -# Then pythoncom and pywintypes. -_domodule("pythoncom", pythoncom.__file__) -_domodule("pywintypes", pywintypes.__file__) - -print("System _d files were setup.") diff --git a/lib/win32/test/handles.py b/lib/win32/test/handles.py deleted file mode 100644 index ad607353..00000000 --- a/lib/win32/test/handles.py +++ /dev/null @@ -1,171 +0,0 @@ -import sys -import unittest - -import pywintypes -import win32api - - -# A class that will never die vie refcounting, but will die via GC. -class Cycle: - def __init__(self, handle): - self.cycle = self - self.handle = handle - - -class PyHandleTestCase(unittest.TestCase): - def testCleanup1(self): - # We used to clobber all outstanding exceptions. - def f1(invalidate): - import win32event - - h = win32event.CreateEvent(None, 0, 0, None) - if invalidate: - win32api.CloseHandle(int(h)) - 1 / 0 - # If we invalidated, then the object destruction code will attempt - # to close an invalid handle. We don't wan't an exception in - # this case - - def f2(invalidate): - """This function should throw an IOError.""" - try: - f1(invalidate) - except ZeroDivisionError as exc: - raise IOError("raise 2") - - self.assertRaises(IOError, f2, False) - # Now do it again, but so the auto object destruction - # actually fails. - self.assertRaises(IOError, f2, True) - - def testCleanup2(self): - # Cause an exception during object destruction. - # The worst this does is cause an ".XXX undetected error (why=3)" - # So avoiding that is the goal - import win32event - - h = win32event.CreateEvent(None, 0, 0, None) - # Close the handle underneath the object. - win32api.CloseHandle(int(h)) - # Object destructor runs with the implicit close failing - h = None - - def testCleanup3(self): - # And again with a class - no __del__ - import win32event - - class Test: - def __init__(self): - self.h = win32event.CreateEvent(None, 0, 0, None) - win32api.CloseHandle(int(self.h)) - - t = Test() - t = None - - def testCleanupGood(self): - # And check that normal error semantics *do* work. - import win32event - - h = win32event.CreateEvent(None, 0, 0, None) - win32api.CloseHandle(int(h)) - self.assertRaises(win32api.error, h.Close) - # A following Close is documented as working - h.Close() - - def testInvalid(self): - h = pywintypes.HANDLE(-2) - try: - h.Close() - # Ideally, we'd: - # self.assertRaises(win32api.error, h.Close) - # and everywhere markh has tried, that would pass - but not on - # github automation, where the .Close apparently works fine. - # (same for -1. Using 0 appears to work fine everywhere) - # There still seems value in testing it though, so we just accept - # either working or failing. - except win32api.error: - pass - - def testOtherHandle(self): - h = pywintypes.HANDLE(1) - h2 = pywintypes.HANDLE(h) - self.assertEqual(h, h2) - # but the above doesn't really test everything - we want a way to - # pass the handle directly into PyWinLong_AsVoidPtr. One way to - # to that is to abuse win32api.GetProcAddress() - the 2nd param - # is passed to PyWinLong_AsVoidPtr() if its not a string. - # passing a handle value of '1' should work - there is something - # at that ordinal - win32api.GetProcAddress(sys.dllhandle, h) - - def testHandleInDict(self): - h = pywintypes.HANDLE(1) - d = dict(foo=h) - self.assertEqual(d["foo"], h) - - def testHandleInDictThenInt(self): - h = pywintypes.HANDLE(1) - d = dict(foo=h) - self.assertEqual(d["foo"], 1) - - def testHandleCompareNone(self): - h = pywintypes.HANDLE(1) - self.assertNotEqual(h, None) - self.assertNotEqual(None, h) - # ensure we use both __eq__ and __ne__ ops - self.assertFalse(h == None) - self.assertTrue(h != None) - - def testHandleCompareInt(self): - h = pywintypes.HANDLE(1) - self.assertNotEqual(h, 0) - self.assertEqual(h, 1) - # ensure we use both __eq__ and __ne__ ops - self.assertTrue(h == 1) - self.assertTrue(1 == h) - self.assertFalse(h != 1) - self.assertFalse(1 != h) - self.assertFalse(h == 0) - self.assertFalse(0 == h) - self.assertTrue(h != 0) - self.assertTrue(0 != h) - - def testHandleNonZero(self): - h = pywintypes.HANDLE(0) - self.assertFalse(h) - - h = pywintypes.HANDLE(1) - self.assertTrue(h) - - def testLong(self): - # sys.maxint+1 should always be a 'valid' handle, treated as an - # unsigned int, even though it is a long. Although pywin32 should not - # directly create such longs, using struct.unpack() with a P format - # may well return them. eg: - # >>> struct.unpack("P", struct.pack("P", -1)) - # (4294967295L,) - try: - big = sys.maxsize - except AttributeError: - big = sys.maxint - pywintypes.HANDLE(big + 1) - - def testGC(self): - # This used to provoke: - # Fatal Python error: unexpected exception during garbage collection - def make(): - h = pywintypes.HANDLE(-2) - c = Cycle(h) - - import gc - - make() - gc.collect() - - def testTypes(self): - self.assertRaises(TypeError, pywintypes.HANDLE, "foo") - self.assertRaises(TypeError, pywintypes.HANDLE, ()) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_clipboard.py b/lib/win32/test/test_clipboard.py deleted file mode 100644 index 843435b6..00000000 --- a/lib/win32/test/test_clipboard.py +++ /dev/null @@ -1,127 +0,0 @@ -# General test module for win32api - please add some :) -import array -import os -import sys -import unittest - -import pywintypes -import win32con -import win32gui -from pywin32_testutil import str2bytes -from win32clipboard import * - -custom_format_name = "PythonClipboardTestFormat" - - -class CrashingTestCase(unittest.TestCase): - def test_722082(self): - class crasher(object): - pass - - obj = crasher() - OpenClipboard() - try: - EmptyClipboard() - # This used to crash - now correctly raises type error. - self.assertRaises(TypeError, SetClipboardData, 0, obj) - finally: - CloseClipboard() - - -class TestBitmap(unittest.TestCase): - def setUp(self): - self.bmp_handle = None - try: - this_file = __file__ - except NameError: - this_file = sys.argv[0] - this_dir = os.path.dirname(this_file) - self.bmp_name = os.path.join( - os.path.abspath(this_dir), "..", "Demos", "images", "smiley.bmp" - ) - self.assertTrue(os.path.isfile(self.bmp_name), self.bmp_name) - flags = win32con.LR_DEFAULTSIZE | win32con.LR_LOADFROMFILE - self.bmp_handle = win32gui.LoadImage( - 0, self.bmp_name, win32con.IMAGE_BITMAP, 0, 0, flags - ) - self.assertTrue(self.bmp_handle, "Failed to get a bitmap handle") - - def tearDown(self): - if self.bmp_handle: - win32gui.DeleteObject(self.bmp_handle) - - def test_bitmap_roundtrip(self): - OpenClipboard() - try: - SetClipboardData(win32con.CF_BITMAP, self.bmp_handle) - got_handle = GetClipboardDataHandle(win32con.CF_BITMAP) - self.assertEqual(got_handle, self.bmp_handle) - finally: - CloseClipboard() - - -class TestStrings(unittest.TestCase): - def setUp(self): - OpenClipboard() - - def tearDown(self): - CloseClipboard() - - def test_unicode(self): - val = "test-\a9har" - SetClipboardData(win32con.CF_UNICODETEXT, val) - self.assertEqual(GetClipboardData(win32con.CF_UNICODETEXT), val) - - def test_unicode_text(self): - val = "test-val" - SetClipboardText(val) - # GetClipboardData doesn't to auto string conversions - so on py3k, - # CF_TEXT returns bytes. - expected = str2bytes(val) - self.assertEqual(GetClipboardData(win32con.CF_TEXT), expected) - SetClipboardText(val, win32con.CF_UNICODETEXT) - self.assertEqual(GetClipboardData(win32con.CF_UNICODETEXT), val) - - def test_string(self): - val = str2bytes("test") - SetClipboardData(win32con.CF_TEXT, val) - self.assertEqual(GetClipboardData(win32con.CF_TEXT), val) - - -class TestGlobalMemory(unittest.TestCase): - def setUp(self): - OpenClipboard() - - def tearDown(self): - CloseClipboard() - - def test_mem(self): - val = str2bytes("test") - expected = str2bytes("test\0") - SetClipboardData(win32con.CF_TEXT, val) - # Get the raw data - this will include the '\0' - raw_data = GetGlobalMemory(GetClipboardDataHandle(win32con.CF_TEXT)) - self.assertEqual(expected, raw_data) - - def test_bad_mem(self): - self.assertRaises(pywintypes.error, GetGlobalMemory, 0) - self.assertRaises(pywintypes.error, GetGlobalMemory, -1) - if sys.getwindowsversion()[0] <= 5: - # For some reason, the value '1' dies from a 64bit process, but - # "works" (ie, gives the correct exception) from a 32bit process. - # just silently skip this value on Vista. - self.assertRaises(pywintypes.error, GetGlobalMemory, 1) - - def test_custom_mem(self): - test_data = str2bytes("hello\x00\xff") - test_buffer = array.array("b", test_data) - cf = RegisterClipboardFormat(custom_format_name) - self.assertEqual(custom_format_name, GetClipboardFormatName(cf)) - SetClipboardData(cf, test_buffer) - hglobal = GetClipboardDataHandle(cf) - data = GetGlobalMemory(hglobal) - self.assertEqual(data, test_data) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_exceptions.py b/lib/win32/test/test_exceptions.py deleted file mode 100644 index 33101107..00000000 --- a/lib/win32/test/test_exceptions.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Test pywin32's error semantics""" -import sys -import unittest - -import pythoncom -import pywintypes -import win32api -import win32file -import winerror - - -class TestBase(unittest.TestCase): - def _testExceptionIndex(self, exc, index, expected): - # check the exception itself can be indexed if not py3k - if sys.version_info < (3,): - self.assertEqual(exc[index], expected) - # and that exception.args can is the same. - self.assertEqual(exc.args[index], expected) - - -class TestAPISimple(TestBase): - def _getInvalidHandleException(self): - try: - win32api.CloseHandle(1) - except win32api.error as exc: - return exc - self.fail("Didn't get invalid-handle exception.") - - def testSimple(self): - self.assertRaises(pywintypes.error, win32api.CloseHandle, 1) - - def testErrnoIndex(self): - exc = self._getInvalidHandleException() - self._testExceptionIndex(exc, 0, winerror.ERROR_INVALID_HANDLE) - - def testFuncIndex(self): - exc = self._getInvalidHandleException() - self._testExceptionIndex(exc, 1, "CloseHandle") - - def testMessageIndex(self): - exc = self._getInvalidHandleException() - expected = win32api.FormatMessage(winerror.ERROR_INVALID_HANDLE).rstrip() - self._testExceptionIndex(exc, 2, expected) - - def testUnpack(self): - try: - win32api.CloseHandle(1) - self.fail("expected exception!") - except win32api.error as exc: - self.assertEqual(exc.winerror, winerror.ERROR_INVALID_HANDLE) - self.assertEqual(exc.funcname, "CloseHandle") - expected_msg = win32api.FormatMessage( - winerror.ERROR_INVALID_HANDLE - ).rstrip() - self.assertEqual(exc.strerror, expected_msg) - - def testAsStr(self): - exc = self._getInvalidHandleException() - err_msg = win32api.FormatMessage(winerror.ERROR_INVALID_HANDLE).rstrip() - # early on the result actually *was* a tuple - it must always look like one - err_tuple = (winerror.ERROR_INVALID_HANDLE, "CloseHandle", err_msg) - self.assertEqual(str(exc), str(err_tuple)) - - def testAsTuple(self): - exc = self._getInvalidHandleException() - err_msg = win32api.FormatMessage(winerror.ERROR_INVALID_HANDLE).rstrip() - # early on the result actually *was* a tuple - it must be able to be one - err_tuple = (winerror.ERROR_INVALID_HANDLE, "CloseHandle", err_msg) - if sys.version_info < (3,): - self.assertEqual(tuple(exc), err_tuple) - else: - self.assertEqual(exc.args, err_tuple) - - def testClassName(self): - exc = self._getInvalidHandleException() - # The error class has always been named 'error'. That's not ideal :( - self.assertEqual(exc.__class__.__name__, "error") - - def testIdentity(self): - exc = self._getInvalidHandleException() - self.assertTrue(exc.__class__ is pywintypes.error) - - def testBaseClass(self): - self.assertEqual(pywintypes.error.__bases__, (Exception,)) - - def testAttributes(self): - exc = self._getInvalidHandleException() - err_msg = win32api.FormatMessage(winerror.ERROR_INVALID_HANDLE).rstrip() - self.assertEqual(exc.winerror, winerror.ERROR_INVALID_HANDLE) - self.assertEqual(exc.strerror, err_msg) - self.assertEqual(exc.funcname, "CloseHandle") - - # some tests for 'insane' args. - def testStrangeArgsNone(self): - try: - raise pywintypes.error() - self.fail("Expected exception") - except pywintypes.error as exc: - self.assertEqual(exc.args, ()) - self.assertEqual(exc.winerror, None) - self.assertEqual(exc.funcname, None) - self.assertEqual(exc.strerror, None) - - def testStrangeArgsNotEnough(self): - try: - raise pywintypes.error("foo") - self.fail("Expected exception") - except pywintypes.error as exc: - assert exc.args[0] == "foo" - # 'winerror' always args[0] - self.assertEqual(exc.winerror, "foo") - self.assertEqual(exc.funcname, None) - self.assertEqual(exc.strerror, None) - - def testStrangeArgsTooMany(self): - try: - raise pywintypes.error("foo", "bar", "you", "never", "kn", 0) - self.fail("Expected exception") - except pywintypes.error as exc: - self.assertEqual(exc.args[0], "foo") - self.assertEqual(exc.args[-1], 0) - self.assertEqual(exc.winerror, "foo") - self.assertEqual(exc.funcname, "bar") - self.assertEqual(exc.strerror, "you") - - -class TestCOMSimple(TestBase): - def _getException(self): - try: - pythoncom.StgOpenStorage("foo", None, 0) - except pythoncom.com_error as exc: - return exc - self.fail("Didn't get storage exception.") - - def testIs(self): - self.assertTrue(pythoncom.com_error is pywintypes.com_error) - - def testSimple(self): - self.assertRaises(pythoncom.com_error, pythoncom.StgOpenStorage, "foo", None, 0) - - def testErrnoIndex(self): - exc = self._getException() - self._testExceptionIndex(exc, 0, winerror.STG_E_INVALIDFLAG) - - def testMessageIndex(self): - exc = self._getException() - expected = win32api.FormatMessage(winerror.STG_E_INVALIDFLAG).rstrip() - self._testExceptionIndex(exc, 1, expected) - - def testAsStr(self): - exc = self._getException() - err_msg = win32api.FormatMessage(winerror.STG_E_INVALIDFLAG).rstrip() - # early on the result actually *was* a tuple - it must always look like one - err_tuple = (winerror.STG_E_INVALIDFLAG, err_msg, None, None) - self.assertEqual(str(exc), str(err_tuple)) - - def testAsTuple(self): - exc = self._getException() - err_msg = win32api.FormatMessage(winerror.STG_E_INVALIDFLAG).rstrip() - # early on the result actually *was* a tuple - it must be able to be one - err_tuple = (winerror.STG_E_INVALIDFLAG, err_msg, None, None) - if sys.version_info < (3,): - self.assertEqual(tuple(exc), err_tuple) - else: - self.assertEqual(exc.args, err_tuple) - - def testClassName(self): - exc = self._getException() - self.assertEqual(exc.__class__.__name__, "com_error") - - def testIdentity(self): - exc = self._getException() - self.assertTrue(exc.__class__ is pywintypes.com_error) - - def testBaseClass(self): - exc = self._getException() - self.assertEqual(pywintypes.com_error.__bases__, (Exception,)) - - def testAttributes(self): - exc = self._getException() - err_msg = win32api.FormatMessage(winerror.STG_E_INVALIDFLAG).rstrip() - self.assertEqual(exc.hresult, winerror.STG_E_INVALIDFLAG) - self.assertEqual(exc.strerror, err_msg) - self.assertEqual(exc.argerror, None) - self.assertEqual(exc.excepinfo, None) - - def testStrangeArgsNone(self): - try: - raise pywintypes.com_error() - self.fail("Expected exception") - except pywintypes.com_error as exc: - self.assertEqual(exc.args, ()) - self.assertEqual(exc.hresult, None) - self.assertEqual(exc.strerror, None) - self.assertEqual(exc.argerror, None) - self.assertEqual(exc.excepinfo, None) - - def testStrangeArgsNotEnough(self): - try: - raise pywintypes.com_error("foo") - self.fail("Expected exception") - except pywintypes.com_error as exc: - self.assertEqual(exc.args[0], "foo") - self.assertEqual(exc.hresult, "foo") - self.assertEqual(exc.strerror, None) - self.assertEqual(exc.excepinfo, None) - self.assertEqual(exc.argerror, None) - - def testStrangeArgsTooMany(self): - try: - raise pywintypes.com_error("foo", "bar", "you", "never", "kn", 0) - self.fail("Expected exception") - except pywintypes.com_error as exc: - self.assertEqual(exc.args[0], "foo") - self.assertEqual(exc.args[-1], 0) - self.assertEqual(exc.hresult, "foo") - self.assertEqual(exc.strerror, "bar") - self.assertEqual(exc.excepinfo, "you") - self.assertEqual(exc.argerror, "never") - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_odbc.py b/lib/win32/test/test_odbc.py deleted file mode 100644 index ef2b4516..00000000 --- a/lib/win32/test/test_odbc.py +++ /dev/null @@ -1,267 +0,0 @@ -# odbc test suite kindly contributed by Frank Millman. -import os -import sys -import tempfile -import unittest - -import odbc -import pythoncom -from pywin32_testutil import TestSkipped, str2bytes, str2memory -from win32com.client import constants - -# We use the DAO ODBC driver -from win32com.client.gencache import EnsureDispatch - - -class TestStuff(unittest.TestCase): - def setUp(self): - self.tablename = "pywin32test_users" - self.db_filename = None - self.conn = self.cur = None - try: - # Test any database if a connection string is supplied... - conn_str = os.environ["TEST_ODBC_CONNECTION_STRING"] - except KeyError: - # Create a local MSAccess DB for testing. - self.db_filename = tempfile.NamedTemporaryFile().name + ".mdb" - - # Create a brand-new database - what is the story with these? - for suffix in (".36", ".35", ".30"): - try: - dbe = EnsureDispatch("DAO.DBEngine" + suffix) - break - except pythoncom.com_error: - pass - else: - raise TestSkipped("Can't find a DB engine") - - workspace = dbe.Workspaces(0) - - newdb = workspace.CreateDatabase( - self.db_filename, constants.dbLangGeneral, constants.dbEncrypt - ) - - newdb.Close() - - conn_str = "Driver={Microsoft Access Driver (*.mdb)};dbq=%s;Uid=;Pwd=;" % ( - self.db_filename, - ) - ## print 'Connection string:', conn_str - self.conn = odbc.odbc(conn_str) - # And we expect a 'users' table for these tests. - self.cur = self.conn.cursor() - ## self.cur.setoutputsize(1000) - try: - self.cur.execute("""drop table %s""" % self.tablename) - except (odbc.error, odbc.progError): - pass - - ## This needs to be adjusted for sql server syntax for unicode fields - ## - memo -> TEXT - ## - varchar -> nvarchar - self.assertEqual( - self.cur.execute( - """create table %s ( - userid varchar(25), - username varchar(25), - bitfield bit, - intfield integer, - floatfield float, - datefield datetime, - rawfield varbinary(100), - longtextfield memo, - longbinaryfield image - )""" - % self.tablename - ), - -1, - ) - - def tearDown(self): - if self.cur is not None: - try: - self.cur.execute("""drop table %s""" % self.tablename) - except (odbc.error, odbc.progError) as why: - print("Failed to delete test table %s" % self.tablename, why) - - self.cur.close() - self.cur = None - if self.conn is not None: - self.conn.close() - self.conn = None - if self.db_filename is not None: - try: - os.unlink(self.db_filename) - except OSError: - pass - - def test_insert_select(self, userid="Frank", username="Frank Millman"): - self.assertEqual( - self.cur.execute( - "insert into %s (userid, username) \ - values (?,?)" - % self.tablename, - [userid, username], - ), - 1, - ) - self.assertEqual( - self.cur.execute( - "select * from %s \ - where userid = ?" - % self.tablename, - [userid.lower()], - ), - 0, - ) - self.assertEqual( - self.cur.execute( - "select * from %s \ - where username = ?" - % self.tablename, - [username.lower()], - ), - 0, - ) - - def test_insert_select_unicode(self, userid="Frank", username="Frank Millman"): - self.assertEqual( - self.cur.execute( - "insert into %s (userid, username)\ - values (?,?)" - % self.tablename, - [userid, username], - ), - 1, - ) - self.assertEqual( - self.cur.execute( - "select * from %s \ - where userid = ?" - % self.tablename, - [userid.lower()], - ), - 0, - ) - self.assertEqual( - self.cur.execute( - "select * from %s \ - where username = ?" - % self.tablename, - [username.lower()], - ), - 0, - ) - - def test_insert_select_unicode_ext(self): - userid = "t-\xe0\xf2" - username = "test-\xe0\xf2 name" - self.test_insert_select_unicode(userid, username) - - def _test_val(self, fieldName, value): - for x in range(100): - self.cur.execute("delete from %s where userid='Frank'" % self.tablename) - self.assertEqual( - self.cur.execute( - "insert into %s (userid, %s) values (?,?)" - % (self.tablename, fieldName), - ["Frank", value], - ), - 1, - ) - self.cur.execute( - "select %s from %s where userid = ?" % (fieldName, self.tablename), - ["Frank"], - ) - rows = self.cur.fetchmany() - self.assertEqual(1, len(rows)) - row = rows[0] - self.assertEqual(row[0], value) - - def testBit(self): - self._test_val("bitfield", 1) - self._test_val("bitfield", 0) - - def testInt(self): - self._test_val("intfield", 1) - self._test_val("intfield", 0) - try: - big = sys.maxsize - except AttributeError: - big = sys.maxint - self._test_val("intfield", big) - - def testFloat(self): - self._test_val("floatfield", 1.01) - self._test_val("floatfield", 0) - - def testVarchar( - self, - ): - self._test_val("username", "foo") - - def testLongVarchar(self): - """Test a long text field in excess of internal cursor data size (65536)""" - self._test_val("longtextfield", "abc" * 70000) - - def testLongBinary(self): - """Test a long raw field in excess of internal cursor data size (65536)""" - self._test_val("longbinaryfield", str2memory("\0\1\2" * 70000)) - - def testRaw(self): - ## Test binary data - self._test_val("rawfield", str2memory("\1\2\3\4\0\5\6\7\8")) - - def test_widechar(self): - """Test a unicode character that would be mangled if bound as plain character. - For example, previously the below was returned as ascii 'a' - """ - self._test_val("username", "\u0101") - - def testDates(self): - import datetime - - for v in ((1900, 12, 25, 23, 39, 59),): - d = datetime.datetime(*v) - self._test_val("datefield", d) - - def test_set_nonzero_length(self): - self.assertEqual( - self.cur.execute( - "insert into %s (userid,username) " "values (?,?)" % self.tablename, - ["Frank", "Frank Millman"], - ), - 1, - ) - self.assertEqual( - self.cur.execute("update %s set username = ?" % self.tablename, ["Frank"]), - 1, - ) - self.assertEqual(self.cur.execute("select * from %s" % self.tablename), 0) - self.assertEqual(len(self.cur.fetchone()[1]), 5) - - def test_set_zero_length(self): - self.assertEqual( - self.cur.execute( - "insert into %s (userid,username) " "values (?,?)" % self.tablename, - [str2bytes("Frank"), ""], - ), - 1, - ) - self.assertEqual(self.cur.execute("select * from %s" % self.tablename), 0) - self.assertEqual(len(self.cur.fetchone()[1]), 0) - - def test_set_zero_length_unicode(self): - self.assertEqual( - self.cur.execute( - "insert into %s (userid,username) " "values (?,?)" % self.tablename, - ["Frank", ""], - ), - 1, - ) - self.assertEqual(self.cur.execute("select * from %s" % self.tablename), 0) - self.assertEqual(len(self.cur.fetchone()[1]), 0) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_pywintypes.py b/lib/win32/test/test_pywintypes.py deleted file mode 100644 index bbd9d882..00000000 --- a/lib/win32/test/test_pywintypes.py +++ /dev/null @@ -1,116 +0,0 @@ -import datetime -import operator -import sys -import time -import unittest - -import pywintypes -from pywin32_testutil import ob2memory, str2bytes - - -class TestCase(unittest.TestCase): - def testPyTimeFormat(self): - struct_current = time.localtime() - pytime_current = pywintypes.Time(struct_current) - # try and test all the standard parts of the format - # Note we used to include '%Z' testing, but that was pretty useless as - # it always returned the local timezone. - format_strings = "%a %A %b %B %c %d %H %I %j %m %M %p %S %U %w %W %x %X %y %Y" - for fmt in format_strings.split(): - v1 = pytime_current.Format(fmt) - v2 = time.strftime(fmt, struct_current) - self.assertEqual(v1, v2, "format %s failed - %r != %r" % (fmt, v1, v2)) - - def testPyTimePrint(self): - # This used to crash with an invalid, or too early time. - # We don't really want to check that it does cause a ValueError - # (as hopefully this wont be true forever). So either working, or - # ValueError is OK. - try: - t = pywintypes.Time(-2) - t.Format() - except ValueError: - return - - def testTimeInDict(self): - d = {} - d["t1"] = pywintypes.Time(1) - self.assertEqual(d["t1"], pywintypes.Time(1)) - - def testPyTimeCompare(self): - t1 = pywintypes.Time(100) - t1_2 = pywintypes.Time(100) - t2 = pywintypes.Time(101) - - self.assertEqual(t1, t1_2) - self.assertTrue(t1 <= t1_2) - self.assertTrue(t1_2 >= t1) - - self.assertNotEqual(t1, t2) - self.assertTrue(t1 < t2) - self.assertTrue(t2 > t1) - - def testPyTimeCompareOther(self): - t1 = pywintypes.Time(100) - t2 = None - self.assertNotEqual(t1, t2) - - def testTimeTuple(self): - now = datetime.datetime.now() # has usec... - # timetuple() lost usec - pt must be <=... - pt = pywintypes.Time(now.timetuple()) - # *sob* - only if we have a datetime object can we compare like this. - if isinstance(pt, datetime.datetime): - self.assertTrue(pt <= now) - - def testTimeTuplems(self): - now = datetime.datetime.now() # has usec... - tt = now.timetuple() + (now.microsecond // 1000,) - pt = pywintypes.Time(tt) - # we can't compare if using the old type, as it loses all sub-second res. - if isinstance(pt, datetime.datetime): - # but even with datetime, we lose sub-millisecond. - expectedDelta = datetime.timedelta(milliseconds=1) - self.assertTrue(-expectedDelta < (now - pt) < expectedDelta) - - def testPyTimeFromTime(self): - t1 = pywintypes.Time(time.time()) - self.assertTrue(pywintypes.Time(t1) is t1) - - def testPyTimeTooLarge(self): - MAX_TIMESTAMP = 0x7FFFFFFFFFFFFFFF # used by some API function to mean "never" - ts = pywintypes.TimeStamp(MAX_TIMESTAMP) - self.assertEqual(ts, datetime.datetime.max) - - def testGUID(self): - s = "{00020400-0000-0000-C000-000000000046}" - iid = pywintypes.IID(s) - iid2 = pywintypes.IID(ob2memory(iid), True) - self.assertEqual(iid, iid2) - self.assertRaises( - ValueError, pywintypes.IID, str2bytes("00"), True - ) # too short - self.assertRaises(TypeError, pywintypes.IID, 0, True) # no buffer - - def testGUIDRichCmp(self): - s = "{00020400-0000-0000-C000-000000000046}" - iid = pywintypes.IID(s) - self.assertFalse(s == None) - self.assertFalse(None == s) - self.assertTrue(s != None) - self.assertTrue(None != s) - if sys.version_info > (3, 0): - self.assertRaises(TypeError, operator.gt, None, s) - self.assertRaises(TypeError, operator.gt, s, None) - self.assertRaises(TypeError, operator.lt, None, s) - self.assertRaises(TypeError, operator.lt, s, None) - - def testGUIDInDict(self): - s = "{00020400-0000-0000-C000-000000000046}" - iid = pywintypes.IID(s) - d = dict(item=iid) - self.assertEqual(d["item"], iid) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_security.py b/lib/win32/test/test_security.py deleted file mode 100644 index 42a82a13..00000000 --- a/lib/win32/test/test_security.py +++ /dev/null @@ -1,166 +0,0 @@ -# Tests for the win32security module. -import unittest - -import ntsecuritycon -import pywintypes -import win32api -import win32con -import win32security -import winerror -from pywin32_testutil import TestSkipped, ob2memory, testmain - - -class SecurityTests(unittest.TestCase): - def setUp(self): - self.pwr_sid = win32security.LookupAccountName("", "Power Users")[0] - try: - self.admin_sid = win32security.LookupAccountName("", "Administrator")[0] - except pywintypes.error as exc: - # in automation we see: - # pywintypes.error: (1332, 'LookupAccountName', 'No mapping between account names and security IDs was done.') - if exc.winerror != winerror.ERROR_NONE_MAPPED: - raise - self.admin_sid = None - - def tearDown(self): - pass - - def testEqual(self): - if self.admin_sid is None: - raise TestSkipped("No 'Administrator' account is available") - self.assertEqual( - win32security.LookupAccountName("", "Administrator")[0], - win32security.LookupAccountName("", "Administrator")[0], - ) - - def testNESID(self): - self.assertTrue(self.pwr_sid == self.pwr_sid) - if self.admin_sid: - self.assertTrue(self.pwr_sid != self.admin_sid) - - def testNEOther(self): - self.assertTrue(self.pwr_sid != None) - self.assertTrue(None != self.pwr_sid) - self.assertFalse(self.pwr_sid == None) - self.assertFalse(None == self.pwr_sid) - self.assertNotEqual(None, self.pwr_sid) - - def testSIDInDict(self): - d = dict(foo=self.pwr_sid) - self.assertEqual(d["foo"], self.pwr_sid) - - def testBuffer(self): - if self.admin_sid is None: - raise TestSkipped("No 'Administrator' account is available") - self.assertEqual( - ob2memory(win32security.LookupAccountName("", "Administrator")[0]), - ob2memory(win32security.LookupAccountName("", "Administrator")[0]), - ) - - def testMemory(self): - pwr_sid = self.pwr_sid - admin_sid = self.admin_sid - sd1 = win32security.SECURITY_DESCRIPTOR() - sd2 = win32security.SECURITY_DESCRIPTOR() - sd3 = win32security.SECURITY_DESCRIPTOR() - dacl = win32security.ACL() - dacl.AddAccessAllowedAce( - win32security.ACL_REVISION, win32con.GENERIC_READ, pwr_sid - ) - if admin_sid is not None: - dacl.AddAccessAllowedAce( - win32security.ACL_REVISION, win32con.GENERIC_ALL, admin_sid - ) - sd4 = win32security.SECURITY_DESCRIPTOR() - sacl = win32security.ACL() - if admin_sid is not None: - sacl.AddAuditAccessAce( - win32security.ACL_REVISION, win32con.DELETE, admin_sid, 1, 1 - ) - sacl.AddAuditAccessAce( - win32security.ACL_REVISION, win32con.GENERIC_ALL, pwr_sid, 1, 1 - ) - for x in range(0, 200000): - if admin_sid is not None: - sd1.SetSecurityDescriptorOwner(admin_sid, 0) - sd2.SetSecurityDescriptorGroup(pwr_sid, 0) - sd3.SetSecurityDescriptorDacl(1, dacl, 0) - sd4.SetSecurityDescriptorSacl(1, sacl, 0) - - -class DomainTests(unittest.TestCase): - def setUp(self): - self.ds_handle = None - try: - # saving the handle means the other test itself should bind faster. - self.ds_handle = win32security.DsBind() - except win32security.error as exc: - if exc.winerror != winerror.ERROR_NO_SUCH_DOMAIN: - raise - raise TestSkipped(exc) - - def tearDown(self): - if self.ds_handle is not None: - self.ds_handle.close() - - -class TestDS(DomainTests): - def testDsGetDcName(self): - # Not sure what we can actually test here! At least calling it - # does something :) - win32security.DsGetDcName() - - def testDsListServerInfo(self): - # again, not checking much, just exercising the code. - h = win32security.DsBind() - for status, ignore, site in win32security.DsListSites(h): - for status, ignore, server in win32security.DsListServersInSite(h, site): - info = win32security.DsListInfoForServer(h, server) - for status, ignore, domain in win32security.DsListDomainsInSite(h, site): - pass - - def testDsCrackNames(self): - h = win32security.DsBind() - fmt_offered = ntsecuritycon.DS_FQDN_1779_NAME - name = win32api.GetUserNameEx(fmt_offered) - result = win32security.DsCrackNames(h, 0, fmt_offered, fmt_offered, (name,)) - self.assertEqual(name, result[0][2]) - - def testDsCrackNamesSyntax(self): - # Do a syntax check only - that allows us to avoid binding. - # But must use DS_CANONICAL_NAME (or _EX) - expected = win32api.GetUserNameEx(win32api.NameCanonical) - fmt_offered = ntsecuritycon.DS_FQDN_1779_NAME - name = win32api.GetUserNameEx(fmt_offered) - result = win32security.DsCrackNames( - None, - ntsecuritycon.DS_NAME_FLAG_SYNTACTICAL_ONLY, - fmt_offered, - ntsecuritycon.DS_CANONICAL_NAME, - (name,), - ) - self.assertEqual(expected, result[0][2]) - - -class TestTranslate(DomainTests): - def _testTranslate(self, fmt_from, fmt_to): - name = win32api.GetUserNameEx(fmt_from) - expected = win32api.GetUserNameEx(fmt_to) - got = win32security.TranslateName(name, fmt_from, fmt_to) - self.assertEqual(got, expected) - - def testTranslate1(self): - self._testTranslate(win32api.NameFullyQualifiedDN, win32api.NameSamCompatible) - - def testTranslate2(self): - self._testTranslate(win32api.NameSamCompatible, win32api.NameFullyQualifiedDN) - - def testTranslate3(self): - self._testTranslate(win32api.NameFullyQualifiedDN, win32api.NameUniqueId) - - def testTranslate4(self): - self._testTranslate(win32api.NameUniqueId, win32api.NameFullyQualifiedDN) - - -if __name__ == "__main__": - testmain() diff --git a/lib/win32/test/test_sspi.py b/lib/win32/test/test_sspi.py deleted file mode 100644 index 5d349d4d..00000000 --- a/lib/win32/test/test_sspi.py +++ /dev/null @@ -1,231 +0,0 @@ -# Some tests of the win32security sspi functions. -# Stolen from Roger's original test_sspi.c, a version of which is in "Demos" -# See also the other SSPI demos. -import re -import unittest - -import sspi -import sspicon -import win32api -import win32security -from pywin32_testutil import TestSkipped, str2bytes, testmain - - -# It is quite likely that the Kerberos tests will fail due to not being -# installed. The NTLM tests do *not* get the same behaviour as they should -# always be there. -def applyHandlingSkips(func, *args): - try: - return func(*args) - except win32api.error as exc: - if exc.winerror in [ - sspicon.SEC_E_NO_CREDENTIALS, - sspicon.SEC_E_NO_AUTHENTICATING_AUTHORITY, - ]: - raise TestSkipped(exc) - raise - - -class TestSSPI(unittest.TestCase): - def assertRaisesHRESULT(self, hr, func, *args): - try: - return func(*args) - raise RuntimeError("expecting %s failure" % (hr,)) - except win32security.error as exc: - self.assertEqual(exc.winerror, hr) - - def _doAuth(self, pkg_name): - sspiclient = sspi.ClientAuth(pkg_name, targetspn=win32api.GetUserName()) - sspiserver = sspi.ServerAuth(pkg_name) - - sec_buffer = None - err = 1 - while err != 0: - err, sec_buffer = sspiclient.authorize(sec_buffer) - err, sec_buffer = sspiserver.authorize(sec_buffer) - return sspiclient, sspiserver - - def _doTestImpersonate(self, pkg_name): - # Just for the sake of code exercising! - sspiclient, sspiserver = self._doAuth(pkg_name) - sspiserver.ctxt.ImpersonateSecurityContext() - sspiserver.ctxt.RevertSecurityContext() - - def testImpersonateKerberos(self): - applyHandlingSkips(self._doTestImpersonate, "Kerberos") - - def testImpersonateNTLM(self): - self._doTestImpersonate("NTLM") - - def _doTestEncrypt(self, pkg_name): - sspiclient, sspiserver = self._doAuth(pkg_name) - - pkg_size_info = sspiclient.ctxt.QueryContextAttributes( - sspicon.SECPKG_ATTR_SIZES - ) - msg = str2bytes("some data to be encrypted ......") - - trailersize = pkg_size_info["SecurityTrailer"] - encbuf = win32security.PySecBufferDescType() - encbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) - encbuf.append( - win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN) - ) - encbuf[0].Buffer = msg - sspiclient.ctxt.EncryptMessage(0, encbuf, 1) - sspiserver.ctxt.DecryptMessage(encbuf, 1) - self.assertEqual(msg, encbuf[0].Buffer) - # and test the higher-level functions - data_in = str2bytes("hello") - data, sig = sspiclient.encrypt(data_in) - self.assertEqual(sspiserver.decrypt(data, sig), data_in) - - data, sig = sspiserver.encrypt(data_in) - self.assertEqual(sspiclient.decrypt(data, sig), data_in) - - def _doTestEncryptStream(self, pkg_name): - # Test out the SSPI/GSSAPI interop wrapping examples at - # https://docs.microsoft.com/en-us/windows/win32/secauthn/sspi-kerberos-interoperability-with-gssapi - - sspiclient, sspiserver = self._doAuth(pkg_name) - - pkg_size_info = sspiclient.ctxt.QueryContextAttributes( - sspicon.SECPKG_ATTR_SIZES - ) - msg = str2bytes("some data to be encrypted ......") - - trailersize = pkg_size_info["SecurityTrailer"] - blocksize = pkg_size_info["BlockSize"] - encbuf = win32security.PySecBufferDescType() - encbuf.append( - win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN) - ) - encbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) - encbuf.append( - win32security.PySecBufferType(blocksize, sspicon.SECBUFFER_PADDING) - ) - encbuf[1].Buffer = msg - sspiclient.ctxt.EncryptMessage(0, encbuf, 1) - - encmsg = encbuf[0].Buffer + encbuf[1].Buffer + encbuf[2].Buffer - decbuf = win32security.PySecBufferDescType() - decbuf.append( - win32security.PySecBufferType(len(encmsg), sspicon.SECBUFFER_STREAM) - ) - decbuf.append(win32security.PySecBufferType(0, sspicon.SECBUFFER_DATA)) - decbuf[0].Buffer = encmsg - - sspiserver.ctxt.DecryptMessage(decbuf, 1) - self.assertEqual(msg, decbuf[1].Buffer) - - def testEncryptNTLM(self): - self._doTestEncrypt("NTLM") - - def testEncryptStreamNTLM(self): - self._doTestEncryptStream("NTLM") - - def testEncryptKerberos(self): - applyHandlingSkips(self._doTestEncrypt, "Kerberos") - - def testEncryptStreamKerberos(self): - applyHandlingSkips(self._doTestEncryptStream, "Kerberos") - - def _doTestSign(self, pkg_name): - sspiclient, sspiserver = self._doAuth(pkg_name) - - pkg_size_info = sspiclient.ctxt.QueryContextAttributes( - sspicon.SECPKG_ATTR_SIZES - ) - msg = str2bytes("some data to be encrypted ......") - - sigsize = pkg_size_info["MaxSignature"] - sigbuf = win32security.PySecBufferDescType() - sigbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA)) - sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN)) - sigbuf[0].Buffer = msg - sspiclient.ctxt.MakeSignature(0, sigbuf, 0) - sspiserver.ctxt.VerifySignature(sigbuf, 0) - # and test the higher-level functions - sspiclient.next_seq_num = 1 - sspiserver.next_seq_num = 1 - data = str2bytes("hello") - key = sspiclient.sign(data) - sspiserver.verify(data, key) - key = sspiclient.sign(data) - self.assertRaisesHRESULT( - sspicon.SEC_E_MESSAGE_ALTERED, sspiserver.verify, data + data, key - ) - - # and the other way - key = sspiserver.sign(data) - sspiclient.verify(data, key) - key = sspiserver.sign(data) - self.assertRaisesHRESULT( - sspicon.SEC_E_MESSAGE_ALTERED, sspiclient.verify, data + data, key - ) - - def testSignNTLM(self): - self._doTestSign("NTLM") - - def testSignKerberos(self): - applyHandlingSkips(self._doTestSign, "Kerberos") - - def _testSequenceSign(self): - # Only Kerberos supports sequence detection. - sspiclient, sspiserver = self._doAuth("Kerberos") - key = sspiclient.sign(b"hello") - sspiclient.sign(b"hello") - self.assertRaisesHRESULT( - sspicon.SEC_E_OUT_OF_SEQUENCE, sspiserver.verify, b"hello", key - ) - - def testSequenceSign(self): - applyHandlingSkips(self._testSequenceSign) - - def _testSequenceEncrypt(self): - # Only Kerberos supports sequence detection. - sspiclient, sspiserver = self._doAuth("Kerberos") - blob, key = sspiclient.encrypt(b"hello") - blob, key = sspiclient.encrypt(b"hello") - self.assertRaisesHRESULT( - sspicon.SEC_E_OUT_OF_SEQUENCE, sspiserver.decrypt, blob, key - ) - - def testSequenceEncrypt(self): - applyHandlingSkips(self._testSequenceEncrypt) - - def testSecBufferRepr(self): - desc = win32security.PySecBufferDescType() - assert re.match( - "PySecBufferDesc\(ulVersion: 0 \| cBuffers: 0 \| pBuffers: 0x[\da-fA-F]{8,16}\)", - repr(desc), - ) - - buffer1 = win32security.PySecBufferType(0, sspicon.SECBUFFER_TOKEN) - assert re.match( - "PySecBuffer\(cbBuffer: 0 \| BufferType: 2 \| pvBuffer: 0x[\da-fA-F]{8,16}\)", - repr(buffer1), - ) - "PySecBuffer(cbBuffer: 0 | BufferType: 2 | pvBuffer: 0x000001B8CC6D8020)" - desc.append(buffer1) - - assert re.match( - "PySecBufferDesc\(ulVersion: 0 \| cBuffers: 1 \| pBuffers: 0x[\da-fA-F]{8,16}\)", - repr(desc), - ) - - buffer2 = win32security.PySecBufferType(4, sspicon.SECBUFFER_DATA) - assert re.match( - "PySecBuffer\(cbBuffer: 4 \| BufferType: 1 \| pvBuffer: 0x[\da-fA-F]{8,16}\)", - repr(buffer2), - ) - desc.append(buffer2) - - assert re.match( - "PySecBufferDesc\(ulVersion: 0 \| cBuffers: 2 \| pBuffers: 0x[\da-fA-F]{8,16}\)", - repr(desc), - ) - - -if __name__ == "__main__": - testmain() diff --git a/lib/win32/test/test_win32api.py b/lib/win32/test/test_win32api.py deleted file mode 100644 index 58ac1597..00000000 --- a/lib/win32/test/test_win32api.py +++ /dev/null @@ -1,268 +0,0 @@ -# General test module for win32api - please add some :) - -import datetime -import os -import sys -import tempfile -import unittest - -import win32api -import win32con -import win32event -import winerror -from pywin32_testutil import TestSkipped, str2bytes - - -class CurrentUserTestCase(unittest.TestCase): - def testGetCurrentUser(self): - domain = win32api.GetDomainName() - if domain == "NT AUTHORITY": - # Running as a service account, so the comparison will fail - raise TestSkipped("running as service account") - name = "%s\\%s" % (domain, win32api.GetUserName()) - self.assertEqual(name, win32api.GetUserNameEx(win32api.NameSamCompatible)) - - -class TestTime(unittest.TestCase): - def testTimezone(self): - # GetTimeZoneInformation - rc, tzinfo = win32api.GetTimeZoneInformation() - if rc == win32con.TIME_ZONE_ID_DAYLIGHT: - tz_str = tzinfo[4] - tz_time = tzinfo[5] - else: - tz_str = tzinfo[1] - tz_time = tzinfo[2] - # for the sake of code exercise but don't output - tz_str.encode() - if not isinstance(tz_time, datetime.datetime) and not isinstance( - tz_time, tuple - ): - tz_time.Format() - - def TestDateFormat(self): - DATE_LONGDATE = 2 - date_flags = DATE_LONGDATE - win32api.GetDateFormat(0, date_flags, None) - win32api.GetDateFormat(0, date_flags, 0) - win32api.GetDateFormat(0, date_flags, datetime.datetime.now()) - win32api.GetDateFormat(0, date_flags, time.time()) - - def TestTimeFormat(self): - win32api.GetTimeFormat(0, 0, None) - win32api.GetTimeFormat(0, 0, 0) - win32api.GetTimeFormat(0, 0, datetime.datetime.now()) - win32api.GetTimeFormat(0, 0, time.time()) - - -class Registry(unittest.TestCase): - key_name = r"PythonTestHarness\Whatever" - - def test1(self): - # This used to leave a stale exception behind. - def reg_operation(): - hkey = win32api.RegCreateKey(win32con.HKEY_CURRENT_USER, self.key_name) - x = 3 / 0 # or a statement like: raise 'error' - - # do the test - try: - try: - try: - reg_operation() - except: - 1 / 0 # Force exception - finally: - win32api.RegDeleteKey(win32con.HKEY_CURRENT_USER, self.key_name) - except ZeroDivisionError: - pass - - def testValues(self): - key_name = r"PythonTestHarness\win32api" - ## tuples containing value name, value type, data - values = ( - (None, win32con.REG_SZ, "This is default unnamed value"), - ("REG_SZ", win32con.REG_SZ, "REG_SZ text data"), - ("REG_EXPAND_SZ", win32con.REG_EXPAND_SZ, "%systemdir%"), - ## REG_MULTI_SZ value needs to be a list since strings are returned as a list - ( - "REG_MULTI_SZ", - win32con.REG_MULTI_SZ, - ["string 1", "string 2", "string 3", "string 4"], - ), - ("REG_MULTI_SZ_empty", win32con.REG_MULTI_SZ, []), - ("REG_DWORD", win32con.REG_DWORD, 666), - ("REG_QWORD_INT", win32con.REG_QWORD, 99), - ("REG_QWORD", win32con.REG_QWORD, 2**33), - ( - "REG_BINARY", - win32con.REG_BINARY, - str2bytes("\x00\x01\x02\x03\x04\x05\x06\x07\x08\x01\x00"), - ), - ) - - hkey = win32api.RegCreateKey(win32con.HKEY_CURRENT_USER, key_name) - for value_name, reg_type, data in values: - win32api.RegSetValueEx(hkey, value_name, None, reg_type, data) - - for value_name, orig_type, orig_data in values: - data, typ = win32api.RegQueryValueEx(hkey, value_name) - self.assertEqual(typ, orig_type) - self.assertEqual(data, orig_data) - - def testNotifyChange(self): - def change(): - hkey = win32api.RegCreateKey(win32con.HKEY_CURRENT_USER, self.key_name) - try: - win32api.RegSetValue(hkey, None, win32con.REG_SZ, "foo") - finally: - win32api.RegDeleteKey(win32con.HKEY_CURRENT_USER, self.key_name) - - evt = win32event.CreateEvent(None, 0, 0, None) - ## REG_NOTIFY_CHANGE_LAST_SET - values - ## REG_CHANGE_NOTIFY_NAME - keys - ## REG_NOTIFY_CHANGE_SECURITY - security descriptor - ## REG_NOTIFY_CHANGE_ATTRIBUTES - win32api.RegNotifyChangeKeyValue( - win32con.HKEY_CURRENT_USER, - 1, - win32api.REG_NOTIFY_CHANGE_LAST_SET, - evt, - True, - ) - ret_code = win32event.WaitForSingleObject(evt, 0) - # Should be no change. - self.assertTrue(ret_code == win32con.WAIT_TIMEOUT) - change() - # Our event should now be in a signalled state. - ret_code = win32event.WaitForSingleObject(evt, 0) - self.assertTrue(ret_code == win32con.WAIT_OBJECT_0) - - -class FileNames(unittest.TestCase): - def testShortLongPathNames(self): - try: - me = __file__ - except NameError: - me = sys.argv[0] - fname = os.path.abspath(me).lower() - short_name = win32api.GetShortPathName(fname).lower() - long_name = win32api.GetLongPathName(short_name).lower() - self.assertTrue( - long_name == fname, - "Expected long name ('%s') to be original name ('%s')" % (long_name, fname), - ) - self.assertEqual(long_name, win32api.GetLongPathNameW(short_name).lower()) - long_name = win32api.GetLongPathNameW(short_name).lower() - self.assertTrue( - type(long_name) == str, - "GetLongPathNameW returned type '%s'" % (type(long_name),), - ) - self.assertTrue( - long_name == fname, - "Expected long name ('%s') to be original name ('%s')" % (long_name, fname), - ) - - def testShortUnicodeNames(self): - try: - me = __file__ - except NameError: - me = sys.argv[0] - fname = os.path.abspath(me).lower() - # passing unicode should cause GetShortPathNameW to be called. - short_name = win32api.GetShortPathName(str(fname)).lower() - self.assertTrue(isinstance(short_name, str)) - long_name = win32api.GetLongPathName(short_name).lower() - self.assertTrue( - long_name == fname, - "Expected long name ('%s') to be original name ('%s')" % (long_name, fname), - ) - self.assertEqual(long_name, win32api.GetLongPathNameW(short_name).lower()) - long_name = win32api.GetLongPathNameW(short_name).lower() - self.assertTrue( - type(long_name) == str, - "GetLongPathNameW returned type '%s'" % (type(long_name),), - ) - self.assertTrue( - long_name == fname, - "Expected long name ('%s') to be original name ('%s')" % (long_name, fname), - ) - - def testLongLongPathNames(self): - # We need filename where the FQN is > 256 - simplest way is to create a - # 250 character directory in the cwd (except - cwd may be on a drive - # not supporting \\\\?\\ (eg, network share) - so use temp. - import win32file - - basename = "a" * 250 - # but we need to ensure we use the 'long' version of the - # temp dir for later comparison. - long_temp_dir = win32api.GetLongPathNameW(tempfile.gettempdir()) - fname = "\\\\?\\" + os.path.join(long_temp_dir, basename) - try: - win32file.CreateDirectoryW(fname, None) - except win32api.error as details: - if details.winerror != winerror.ERROR_ALREADY_EXISTS: - raise - try: - # GetFileAttributes automatically calls GetFileAttributesW when - # passed unicode - try: - attr = win32api.GetFileAttributes(fname) - except win32api.error as details: - if details.winerror != winerror.ERROR_FILENAME_EXCED_RANGE: - raise - - attr = win32api.GetFileAttributes(str(fname)) - self.assertTrue(attr & win32con.FILE_ATTRIBUTE_DIRECTORY, attr) - - long_name = win32api.GetLongPathNameW(fname) - self.assertEqual(long_name.lower(), fname.lower()) - finally: - win32file.RemoveDirectory(fname) - - -class FormatMessage(unittest.TestCase): - def test_FromString(self): - msg = "Hello %1, how are you %2?" - inserts = ["Mark", "today"] - result = win32api.FormatMessage( - win32con.FORMAT_MESSAGE_FROM_STRING, - msg, # source - 0, # ID - 0, # LangID - inserts, - ) - self.assertEqual(result, "Hello Mark, how are you today?") - - -class Misc(unittest.TestCase): - def test_last_error(self): - for x in (0, 1, -1, winerror.TRUST_E_PROVIDER_UNKNOWN): - win32api.SetLastError(x) - self.assertEqual(x, win32api.GetLastError()) - - def testVkKeyScan(self): - # hopefully ' ' doesn't depend on the locale! - self.assertEqual(win32api.VkKeyScan(" "), 32) - - def testVkKeyScanEx(self): - # hopefully ' ' doesn't depend on the locale! - self.assertEqual(win32api.VkKeyScanEx(" ", 0), 32) - - def testGetSystemPowerStatus(self): - # Dummy - sps = win32api.GetSystemPowerStatus() - self.assertIsInstance(sps, dict) - test_keys = ( - "ACLineStatus", - "BatteryFlag", - "BatteryLifePercent", - "SystemStatusFlag", - "BatteryLifeTime", - "BatteryFullLifeTime", - ) - self.assertEqual(set(test_keys), set(sps.keys())) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32crypt.py b/lib/win32/test/test_win32crypt.py deleted file mode 100644 index 4d1fa712..00000000 --- a/lib/win32/test/test_win32crypt.py +++ /dev/null @@ -1,129 +0,0 @@ -# Test module for win32crypt - -import contextlib -import unittest -from typing import Any, Iterator - -import win32crypt -from pywin32_testutil import TestSkipped, find_test_fixture, testmain -from win32cryptcon import * - - -class Crypt(unittest.TestCase): - def testSimple(self): - data = b"My test data" - entropy = None - desc = "My description" - flags = 0 - ps = None - blob = win32crypt.CryptProtectData(data, desc, entropy, None, ps, flags) - got_desc, got_data = win32crypt.CryptUnprotectData( - blob, entropy, None, ps, flags - ) - self.assertEqual(data, got_data) - self.assertEqual(desc, got_desc) - - def testEntropy(self): - data = b"My test data" - entropy = b"My test entropy" - desc = "My description" - flags = 0 - ps = None - blob = win32crypt.CryptProtectData(data, desc, entropy, None, ps, flags) - got_desc, got_data = win32crypt.CryptUnprotectData( - blob, entropy, None, ps, flags - ) - self.assertEqual(data, got_data) - self.assertEqual(desc, got_desc) - - -# via https://github.com/mhammond/pywin32/issues/1859 -_LOCAL_MACHINE = "LocalMachine" -_CURRENT_USER = "CurrentUser" - - -@contextlib.contextmanager -def open_windows_certstore(store_name: str, store_location: str) -> Iterator[Any]: - """Open a windows certificate store - - :param store_name: store name - :param store_location: store location - :return: handle to cert store - """ - handle = None - try: - handle = win32crypt.CertOpenStore( - CERT_STORE_PROV_SYSTEM, - 0, - None, - CERT_SYSTEM_STORE_LOCAL_MACHINE - if store_location == _LOCAL_MACHINE - else CERT_SYSTEM_STORE_CURRENT_USER, - store_name, - ) - yield handle - finally: - if handle is not None: - handle.CertCloseStore() - - -class TestCerts(unittest.TestCase): - def readCertFile(self, file_name): - with open(find_test_fixture(file_name), "rb") as f: - buf = bytearray(f.read()) - return win32crypt.CryptQueryObject( - CERT_QUERY_OBJECT_BLOB, - buf, - CERT_QUERY_CONTENT_FLAG_CERT, - CERT_QUERY_FORMAT_FLAG_ALL, - 0, - ) - - def testReadCertFiles(self): - # readCertFile has Python read the file and load it as a blob. - # win32crypt can read the file directly - let's check that works too - # (ideally we'd compare the 2 approaches etc, but the objects don't support - # equality checks etc, so this will do for now.) - # No need to do this for different filenames! - filename = "win32crypt_testcert_base64.cer" - cert = win32crypt.CryptQueryObject( - CERT_QUERY_OBJECT_FILE, - find_test_fixture(filename), - CERT_QUERY_CONTENT_FLAG_CERT, - CERT_QUERY_FORMAT_FLAG_ALL, - 0, - ) - self.assertEqual(cert["FormatType"], CERT_QUERY_FORMAT_BASE64_ENCODED) - self.assertEqual(cert["ContentType"], CERT_QUERY_CONTENT_CERT) - - def checkCertFile(self, filename, expected_format): - cert = self.readCertFile(filename) - self.assertEqual(cert["FormatType"], expected_format) - self.assertEqual(cert["ContentType"], CERT_QUERY_CONTENT_CERT) - - with open_windows_certstore(_CURRENT_USER, "Temp") as store: - context = store.CertAddCertificateContextToStore( - cert["Context"], CERT_STORE_ADD_REPLACE_EXISTING - ) - # Getting 2 certs here - main thing is we get 1! - self.assertTrue(len(store.CertEnumCertificatesInStore())) - self.assertFalse(len(store.CertEnumCTLsInStore())) - context.CertFreeCertificateContext() - try: - context.CertFreeCertificateContext() - except ValueError: - pass - else: - raise RuntimeError("should not be able to close the context twice") - - def testCertBase64(self): - self.checkCertFile( - "win32crypt_testcert_base64.cer", CERT_QUERY_FORMAT_BASE64_ENCODED - ) - - def testCertBinary(self): - self.checkCertFile("win32crypt_testcert_bin.cer", CERT_QUERY_FORMAT_BINARY) - - -if __name__ == "__main__": - testmain() diff --git a/lib/win32/test/test_win32event.py b/lib/win32/test/test_win32event.py deleted file mode 100644 index 9e6efa3f..00000000 --- a/lib/win32/test/test_win32event.py +++ /dev/null @@ -1,119 +0,0 @@ -import unittest - -import pywintypes -import win32event - - -class TestWaitableTimer(unittest.TestCase): - def testWaitableFire(self): - h = win32event.CreateWaitableTimer(None, 0, None) - dt = -160 # 160 ns. - win32event.SetWaitableTimer(h, dt, 0, None, None, 0) - rc = win32event.WaitForSingleObject(h, 1000) - self.assertEqual(rc, win32event.WAIT_OBJECT_0) - - def testCreateWaitableTimerEx(self): - h = win32event.CreateWaitableTimerEx( - None, - None, - win32event.CREATE_WAITABLE_TIMER_HIGH_RESOLUTION, - win32event.TIMER_ALL_ACCESS, - ) - dt = -160 # 160 ns. - win32event.SetWaitableTimer(h, dt, 0, None, None, 0) - rc = win32event.WaitForSingleObject(h, 1000) - self.assertEqual(rc, win32event.WAIT_OBJECT_0) - - def testWaitableTrigger(self): - h = win32event.CreateWaitableTimer(None, 0, None) - # for the sake of this, pass a long that doesn't fit in an int. - dt = -2000000000 - win32event.SetWaitableTimer(h, dt, 0, None, None, 0) - rc = win32event.WaitForSingleObject(h, 10) # 10 ms. - self.assertEqual(rc, win32event.WAIT_TIMEOUT) - - def testWaitableError(self): - h = win32event.CreateWaitableTimer(None, 0, None) - h.close() - self.assertRaises( - pywintypes.error, win32event.SetWaitableTimer, h, -42, 0, None, None, 0 - ) - - -class TestWaitFunctions(unittest.TestCase): - def testMsgWaitForMultipleObjects(self): - # this function used to segfault when called with an empty list - res = win32event.MsgWaitForMultipleObjects([], 0, 0, 0) - self.assertEqual(res, win32event.WAIT_TIMEOUT) - - def testMsgWaitForMultipleObjects2(self): - # test with non-empty list - event = win32event.CreateEvent(None, 0, 0, None) - res = win32event.MsgWaitForMultipleObjects([event], 0, 0, 0) - self.assertEqual(res, win32event.WAIT_TIMEOUT) - - def testMsgWaitForMultipleObjectsEx(self): - # this function used to segfault when called with an empty list - res = win32event.MsgWaitForMultipleObjectsEx([], 0, 0, 0) - self.assertEqual(res, win32event.WAIT_TIMEOUT) - - def testMsgWaitForMultipleObjectsEx2(self): - # test with non-empty list - event = win32event.CreateEvent(None, 0, 0, None) - res = win32event.MsgWaitForMultipleObjectsEx([event], 0, 0, 0) - self.assertEqual(res, win32event.WAIT_TIMEOUT) - - -class TestEvent(unittest.TestCase): - def assertSignaled(self, event): - self.assertEqual( - win32event.WaitForSingleObject(event, 0), win32event.WAIT_OBJECT_0 - ) - - def assertNotSignaled(self, event): - self.assertEqual( - win32event.WaitForSingleObject(event, 0), win32event.WAIT_TIMEOUT - ) - - def testCreateEvent(self): - event = win32event.CreateEvent(None, False, False, None) - self.assertNotSignaled(event) - event = win32event.CreateEvent(None, False, True, None) - self.assertSignaled(event) - self.assertNotSignaled(event) - event = win32event.CreateEvent(None, True, True, None) - self.assertSignaled(event) - self.assertSignaled(event) - - def testSetEvent(self): - event = win32event.CreateEvent(None, True, False, None) - self.assertNotSignaled(event) - res = win32event.SetEvent(event) - self.assertEqual(res, None) - self.assertSignaled(event) - event.close() - self.assertRaises(pywintypes.error, win32event.SetEvent, event) - - def testResetEvent(self): - event = win32event.CreateEvent(None, True, True, None) - self.assertSignaled(event) - res = win32event.ResetEvent(event) - self.assertEqual(res, None) - self.assertNotSignaled(event) - event.close() - self.assertRaises(pywintypes.error, win32event.ResetEvent, event) - - -class TestMutex(unittest.TestCase): - def testReleaseMutex(self): - mutex = win32event.CreateMutex(None, True, None) - res = win32event.ReleaseMutex(mutex) - self.assertEqual(res, None) - res = win32event.WaitForSingleObject(mutex, 0) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - mutex.close() - self.assertRaises(pywintypes.error, win32event.ReleaseMutex, mutex) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32file.py b/lib/win32/test/test_win32file.py deleted file mode 100644 index da998032..00000000 --- a/lib/win32/test/test_win32file.py +++ /dev/null @@ -1,1085 +0,0 @@ -import datetime -import os -import random -import shutil -import socket -import tempfile -import threading -import time -import unittest - -import ntsecuritycon -import pywintypes -import win32api -import win32con -import win32event -import win32file -import win32pipe -import win32timezone -import winerror -from pywin32_testutil import TestSkipped, str2bytes, testmain - -try: - set -except NameError: - from sets import Set as set - - -class TestReadBuffer(unittest.TestCase): - def testLen(self): - buffer = win32file.AllocateReadBuffer(1) - self.assertEqual(len(buffer), 1) - - def testSimpleIndex(self): - buffer = win32file.AllocateReadBuffer(1) - buffer[0] = 0xFF - self.assertEqual(buffer[0], 0xFF) - - def testSimpleSlice(self): - buffer = win32file.AllocateReadBuffer(2) - val = str2bytes("\0\0") - buffer[:2] = val - self.assertEqual(buffer[0:2], val) - - -class TestSimpleOps(unittest.TestCase): - def testSimpleFiles(self): - fd, filename = tempfile.mkstemp() - os.close(fd) - os.unlink(filename) - handle = win32file.CreateFile( - filename, win32file.GENERIC_WRITE, 0, None, win32con.CREATE_NEW, 0, None - ) - test_data = str2bytes("Hello\0there") - try: - win32file.WriteFile(handle, test_data) - handle.Close() - # Try and open for read - handle = win32file.CreateFile( - filename, - win32file.GENERIC_READ, - 0, - None, - win32con.OPEN_EXISTING, - 0, - None, - ) - rc, data = win32file.ReadFile(handle, 1024) - self.assertEqual(data, test_data) - finally: - handle.Close() - try: - os.unlink(filename) - except os.error: - pass - - # A simple test using normal read/write operations. - def testMoreFiles(self): - # Create a file in the %TEMP% directory. - testName = os.path.join(win32api.GetTempPath(), "win32filetest.dat") - desiredAccess = win32file.GENERIC_READ | win32file.GENERIC_WRITE - # Set a flag to delete the file automatically when it is closed. - fileFlags = win32file.FILE_FLAG_DELETE_ON_CLOSE - h = win32file.CreateFile( - testName, - desiredAccess, - win32file.FILE_SHARE_READ, - None, - win32file.CREATE_ALWAYS, - fileFlags, - 0, - ) - - # Write a known number of bytes to the file. - data = str2bytes("z") * 1025 - - win32file.WriteFile(h, data) - - self.assertTrue( - win32file.GetFileSize(h) == len(data), - "WARNING: Written file does not have the same size as the length of the data in it!", - ) - - # Ensure we can read the data back. - win32file.SetFilePointer(h, 0, win32file.FILE_BEGIN) - hr, read_data = win32file.ReadFile( - h, len(data) + 10 - ) # + 10 to get anything extra - self.assertTrue(hr == 0, "Readfile returned %d" % hr) - - self.assertTrue(read_data == data, "Read data is not what we wrote!") - - # Now truncate the file at 1/2 its existing size. - newSize = len(data) // 2 - win32file.SetFilePointer(h, newSize, win32file.FILE_BEGIN) - win32file.SetEndOfFile(h) - self.assertEqual(win32file.GetFileSize(h), newSize) - - # GetFileAttributesEx/GetFileAttributesExW tests. - self.assertEqual( - win32file.GetFileAttributesEx(testName), - win32file.GetFileAttributesExW(testName), - ) - - attr, ct, at, wt, size = win32file.GetFileAttributesEx(testName) - self.assertTrue( - size == newSize, - "Expected GetFileAttributesEx to return the same size as GetFileSize()", - ) - self.assertTrue( - attr == win32file.GetFileAttributes(testName), - "Expected GetFileAttributesEx to return the same attributes as GetFileAttributes", - ) - - h = None # Close the file by removing the last reference to the handle! - - self.assertTrue( - not os.path.isfile(testName), "After closing the file, it still exists!" - ) - - def testFilePointer(self): - # via [ 979270 ] SetFilePointer fails with negative offset - - # Create a file in the %TEMP% directory. - filename = os.path.join(win32api.GetTempPath(), "win32filetest.dat") - - f = win32file.CreateFile( - filename, - win32file.GENERIC_READ | win32file.GENERIC_WRITE, - 0, - None, - win32file.CREATE_ALWAYS, - win32file.FILE_ATTRIBUTE_NORMAL, - 0, - ) - try: - # Write some data - data = str2bytes("Some data") - (res, written) = win32file.WriteFile(f, data) - - self.assertFalse(res) - self.assertEqual(written, len(data)) - - # Move at the beginning and read the data - win32file.SetFilePointer(f, 0, win32file.FILE_BEGIN) - (res, s) = win32file.ReadFile(f, len(data)) - - self.assertFalse(res) - self.assertEqual(s, data) - - # Move at the end and read the data - win32file.SetFilePointer(f, -len(data), win32file.FILE_END) - (res, s) = win32file.ReadFile(f, len(data)) - - self.assertFalse(res) - self.assertEqual(s, data) - finally: - f.Close() - os.unlink(filename) - - def testFileTimesTimezones(self): - filename = tempfile.mktemp("-testFileTimes") - # now() is always returning a timestamp with microseconds but the - # file APIs all have zero microseconds, so some comparisons fail. - now_utc = win32timezone.utcnow().replace(microsecond=0) - now_local = now_utc.astimezone(win32timezone.TimeZoneInfo.local()) - h = win32file.CreateFile( - filename, - win32file.GENERIC_READ | win32file.GENERIC_WRITE, - 0, - None, - win32file.CREATE_ALWAYS, - 0, - 0, - ) - try: - win32file.SetFileTime(h, now_utc, now_utc, now_utc) - ct, at, wt = win32file.GetFileTime(h) - self.assertEqual(now_local, ct) - self.assertEqual(now_local, at) - self.assertEqual(now_local, wt) - # and the reverse - set local, check against utc - win32file.SetFileTime(h, now_local, now_local, now_local) - ct, at, wt = win32file.GetFileTime(h) - self.assertEqual(now_utc, ct) - self.assertEqual(now_utc, at) - self.assertEqual(now_utc, wt) - finally: - h.close() - os.unlink(filename) - - def testFileTimes(self): - from win32timezone import TimeZoneInfo - - # now() is always returning a timestamp with microseconds but the - # file APIs all have zero microseconds, so some comparisons fail. - now = datetime.datetime.now(tz=TimeZoneInfo.utc()).replace(microsecond=0) - nowish = now + datetime.timedelta(seconds=1) - later = now + datetime.timedelta(seconds=120) - - filename = tempfile.mktemp("-testFileTimes") - # Windows docs the 'last time' isn't valid until the last write - # handle is closed - so create the file, then re-open it to check. - open(filename, "w").close() - f = win32file.CreateFile( - filename, - win32file.GENERIC_READ | win32file.GENERIC_WRITE, - 0, - None, - win32con.OPEN_EXISTING, - 0, - None, - ) - try: - ct, at, wt = win32file.GetFileTime(f) - self.assertTrue( - ct >= now, - "File was created in the past - now=%s, created=%s" % (now, ct), - ) - self.assertTrue(now <= ct <= nowish, (now, ct)) - self.assertTrue( - wt >= now, - "File was written-to in the past now=%s, written=%s" % (now, wt), - ) - self.assertTrue(now <= wt <= nowish, (now, wt)) - - # Now set the times. - win32file.SetFileTime(f, later, later, later, UTCTimes=True) - # Get them back. - ct, at, wt = win32file.GetFileTime(f) - # XXX - the builtin PyTime type appears to be out by a dst offset. - # just ignore that type here... - self.assertEqual(ct, later) - self.assertEqual(at, later) - self.assertEqual(wt, later) - - finally: - f.Close() - os.unlink(filename) - - -class TestGetFileInfoByHandleEx(unittest.TestCase): - __handle = __filename = None - - def setUp(self): - fd, self.__filename = tempfile.mkstemp() - os.close(fd) - - def tearDown(self): - if self.__handle is not None: - self.__handle.Close() - if self.__filename is not None: - try: - os.unlink(self.__filename) - except OSError: - pass - self.__handle = self.__filename = None - - def testFileBasicInfo(self): - attr = win32file.GetFileAttributes(self.__filename) - f = win32file.CreateFile( - self.__filename, - win32file.GENERIC_READ, - 0, - None, - win32con.OPEN_EXISTING, - 0, - None, - ) - self.__handle = f - ct, at, wt = win32file.GetFileTime(f) - - # bug #752: this throws ERROR_BAD_LENGTH (24) in x86 binaries of build 221 - basic_info = win32file.GetFileInformationByHandleEx(f, win32file.FileBasicInfo) - - self.assertEqual(ct, basic_info["CreationTime"]) - self.assertEqual(at, basic_info["LastAccessTime"]) - self.assertEqual(wt, basic_info["LastWriteTime"]) - self.assertEqual(attr, basic_info["FileAttributes"]) - - -class TestOverlapped(unittest.TestCase): - def testSimpleOverlapped(self): - # Create a file in the %TEMP% directory. - import win32event - - testName = os.path.join(win32api.GetTempPath(), "win32filetest.dat") - desiredAccess = win32file.GENERIC_WRITE - overlapped = pywintypes.OVERLAPPED() - evt = win32event.CreateEvent(None, 0, 0, None) - overlapped.hEvent = evt - # Create the file and write shit-loads of data to it. - h = win32file.CreateFile( - testName, desiredAccess, 0, None, win32file.CREATE_ALWAYS, 0, 0 - ) - chunk_data = str2bytes("z") * 0x8000 - num_loops = 512 - expected_size = num_loops * len(chunk_data) - for i in range(num_loops): - win32file.WriteFile(h, chunk_data, overlapped) - win32event.WaitForSingleObject(overlapped.hEvent, win32event.INFINITE) - overlapped.Offset = overlapped.Offset + len(chunk_data) - h.Close() - # Now read the data back overlapped - overlapped = pywintypes.OVERLAPPED() - evt = win32event.CreateEvent(None, 0, 0, None) - overlapped.hEvent = evt - desiredAccess = win32file.GENERIC_READ - h = win32file.CreateFile( - testName, desiredAccess, 0, None, win32file.OPEN_EXISTING, 0, 0 - ) - buffer = win32file.AllocateReadBuffer(0xFFFF) - while 1: - try: - hr, data = win32file.ReadFile(h, buffer, overlapped) - win32event.WaitForSingleObject(overlapped.hEvent, win32event.INFINITE) - overlapped.Offset = overlapped.Offset + len(data) - if not data is buffer: - self.fail( - "Unexpected result from ReadFile - should be the same buffer we passed it" - ) - except win32api.error: - break - h.Close() - - def testCompletionPortsMultiple(self): - # Mainly checking that we can "associate" an existing handle. This - # failed in build 203. - ioport = win32file.CreateIoCompletionPort( - win32file.INVALID_HANDLE_VALUE, 0, 0, 0 - ) - socks = [] - for PORT in range(9123, 9125): - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind(("", PORT)) - sock.listen(1) - socks.append(sock) - new = win32file.CreateIoCompletionPort(sock.fileno(), ioport, PORT, 0) - assert new is ioport - for s in socks: - s.close() - hv = int(ioport) - ioport = new = None - # The handle itself should be closed now (unless we leak references!) - # Check that. - try: - win32file.CloseHandle(hv) - raise RuntimeError("Expected close to fail!") - except win32file.error as details: - self.assertEqual(details.winerror, winerror.ERROR_INVALID_HANDLE) - - def testCompletionPortsQueued(self): - class Foo: - pass - - io_req_port = win32file.CreateIoCompletionPort(-1, None, 0, 0) - overlapped = pywintypes.OVERLAPPED() - overlapped.object = Foo() - win32file.PostQueuedCompletionStatus(io_req_port, 0, 99, overlapped) - errCode, bytes, key, overlapped = win32file.GetQueuedCompletionStatus( - io_req_port, win32event.INFINITE - ) - self.assertEqual(errCode, 0) - self.assertTrue(isinstance(overlapped.object, Foo)) - - def _IOCPServerThread(self, handle, port, drop_overlapped_reference): - overlapped = pywintypes.OVERLAPPED() - win32pipe.ConnectNamedPipe(handle, overlapped) - if drop_overlapped_reference: - # Be naughty - the overlapped object is now dead, but - # GetQueuedCompletionStatus will still find it. Our check of - # reference counting should catch that error. - overlapped = None - # even if we fail, be sure to close the handle; prevents hangs - # on Vista 64... - try: - self.assertRaises( - RuntimeError, win32file.GetQueuedCompletionStatus, port, -1 - ) - finally: - handle.Close() - return - - result = win32file.GetQueuedCompletionStatus(port, -1) - ol2 = result[-1] - self.assertTrue(ol2 is overlapped) - data = win32file.ReadFile(handle, 512)[1] - win32file.WriteFile(handle, data) - - def testCompletionPortsNonQueued(self, test_overlapped_death=0): - # In 204 we had a reference count bug when OVERLAPPED objects were - # associated with a completion port other than via - # PostQueuedCompletionStatus. This test is based on the reproduction - # reported with that bug. - # Create the pipe. - BUFSIZE = 512 - pipe_name = r"\\.\pipe\pywin32_test_pipe" - handle = win32pipe.CreateNamedPipe( - pipe_name, - win32pipe.PIPE_ACCESS_DUPLEX | win32file.FILE_FLAG_OVERLAPPED, - win32pipe.PIPE_TYPE_MESSAGE - | win32pipe.PIPE_READMODE_MESSAGE - | win32pipe.PIPE_WAIT, - 1, - BUFSIZE, - BUFSIZE, - win32pipe.NMPWAIT_WAIT_FOREVER, - None, - ) - # Create an IOCP and associate it with the handle. - port = win32file.CreateIoCompletionPort(-1, 0, 0, 0) - win32file.CreateIoCompletionPort(handle, port, 1, 0) - - t = threading.Thread( - target=self._IOCPServerThread, args=(handle, port, test_overlapped_death) - ) - t.setDaemon(True) # avoid hanging entire test suite on failure. - t.start() - try: - time.sleep(0.1) # let thread do its thing. - try: - win32pipe.CallNamedPipe( - r"\\.\pipe\pywin32_test_pipe", str2bytes("Hello there"), BUFSIZE, 0 - ) - except win32pipe.error: - # Testing for overlapped death causes this - if not test_overlapped_death: - raise - finally: - if not test_overlapped_death: - handle.Close() - t.join(3) - self.assertFalse(t.is_alive(), "thread didn't finish") - - def testCompletionPortsNonQueuedBadReference(self): - self.testCompletionPortsNonQueued(True) - - def testHashable(self): - overlapped = pywintypes.OVERLAPPED() - d = {} - d[overlapped] = "hello" - self.assertEqual(d[overlapped], "hello") - - def testComparable(self): - overlapped = pywintypes.OVERLAPPED() - self.assertEqual(overlapped, overlapped) - # ensure we explicitly test the operators. - self.assertTrue(overlapped == overlapped) - self.assertFalse(overlapped != overlapped) - - def testComparable2(self): - # 2 overlapped objects compare equal if their contents are the same. - overlapped1 = pywintypes.OVERLAPPED() - overlapped2 = pywintypes.OVERLAPPED() - self.assertEqual(overlapped1, overlapped2) - # ensure we explicitly test the operators. - self.assertTrue(overlapped1 == overlapped2) - self.assertFalse(overlapped1 != overlapped2) - # now change something in one of them - should no longer be equal. - overlapped1.hEvent = 1 - self.assertNotEqual(overlapped1, overlapped2) - # ensure we explicitly test the operators. - self.assertFalse(overlapped1 == overlapped2) - self.assertTrue(overlapped1 != overlapped2) - - -class TestSocketExtensions(unittest.TestCase): - def acceptWorker(self, port, running_event, stopped_event): - listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - listener.bind(("", port)) - listener.listen(200) - - # create accept socket - accepter = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - # An overlapped - overlapped = pywintypes.OVERLAPPED() - overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) - # accept the connection. - # We used to allow strings etc to be passed here, and they would be - # modified! Obviously this is evil :) - buffer = " " * 1024 # EVIL - SHOULD NOT BE ALLOWED. - self.assertRaises( - TypeError, win32file.AcceptEx, listener, accepter, buffer, overlapped - ) - - # This is the correct way to allocate the buffer... - buffer = win32file.AllocateReadBuffer(1024) - rc = win32file.AcceptEx(listener, accepter, buffer, overlapped) - self.assertEqual(rc, winerror.ERROR_IO_PENDING) - # Set the event to say we are all ready - running_event.set() - # and wait for the connection. - rc = win32event.WaitForSingleObject(overlapped.hEvent, 2000) - if rc == win32event.WAIT_TIMEOUT: - self.fail("timed out waiting for a connection") - nbytes = win32file.GetOverlappedResult(listener.fileno(), overlapped, False) - # fam, loc, rem = win32file.GetAcceptExSockaddrs(accepter, buffer) - accepter.send(buffer[:nbytes]) - # NOT set in a finally - this means *successfully* stopped! - stopped_event.set() - - def testAcceptEx(self): - port = 4680 - running = threading.Event() - stopped = threading.Event() - t = threading.Thread(target=self.acceptWorker, args=(port, running, stopped)) - t.start() - running.wait(2) - if not running.isSet(): - self.fail("AcceptEx Worker thread failed to start") - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect(("127.0.0.1", port)) - win32file.WSASend(s, str2bytes("hello"), None) - overlapped = pywintypes.OVERLAPPED() - overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) - # Like above - WSARecv used to allow strings as the receive buffer!! - buffer = " " * 10 - self.assertRaises(TypeError, win32file.WSARecv, s, buffer, overlapped) - # This one should work :) - buffer = win32file.AllocateReadBuffer(10) - win32file.WSARecv(s, buffer, overlapped) - nbytes = win32file.GetOverlappedResult(s.fileno(), overlapped, True) - got = buffer[:nbytes] - self.assertEqual(got, str2bytes("hello")) - # thread should have stopped - stopped.wait(2) - if not stopped.isSet(): - self.fail("AcceptEx Worker thread failed to successfully stop") - - -class TestFindFiles(unittest.TestCase): - def testIter(self): - dir = os.path.join(os.getcwd(), "*") - files = win32file.FindFilesW(dir) - set1 = set() - set1.update(files) - set2 = set() - for file in win32file.FindFilesIterator(dir): - set2.add(file) - assert len(set2) > 5, "This directory has less than 5 files!?" - self.assertEqual(set1, set2) - - def testBadDir(self): - dir = os.path.join(os.getcwd(), "a dir that doesnt exist", "*") - self.assertRaises(win32file.error, win32file.FindFilesIterator, dir) - - def testEmptySpec(self): - spec = os.path.join(os.getcwd(), "*.foo_bar") - num = 0 - for i in win32file.FindFilesIterator(spec): - num += 1 - self.assertEqual(0, num) - - def testEmptyDir(self): - test_path = os.path.join(win32api.GetTempPath(), "win32file_test_directory") - try: - # Note: previously used shutil.rmtree, but when looking for - # reference count leaks, that function showed leaks! os.rmdir - # doesn't have that problem. - os.rmdir(test_path) - except os.error: - pass - os.mkdir(test_path) - try: - num = 0 - for i in win32file.FindFilesIterator(os.path.join(test_path, "*")): - num += 1 - # Expecting "." and ".." only - self.assertEqual(2, num) - finally: - os.rmdir(test_path) - - -class TestDirectoryChanges(unittest.TestCase): - num_test_dirs = 1 - - def setUp(self): - self.watcher_threads = [] - self.watcher_thread_changes = [] - self.dir_names = [] - self.dir_handles = [] - for i in range(self.num_test_dirs): - td = tempfile.mktemp("-test-directory-changes-%d" % i) - os.mkdir(td) - self.dir_names.append(td) - hdir = win32file.CreateFile( - td, - ntsecuritycon.FILE_LIST_DIRECTORY, - win32con.FILE_SHARE_READ, - None, # security desc - win32con.OPEN_EXISTING, - win32con.FILE_FLAG_BACKUP_SEMANTICS | win32con.FILE_FLAG_OVERLAPPED, - None, - ) - self.dir_handles.append(hdir) - - changes = [] - t = threading.Thread( - target=self._watcherThreadOverlapped, args=(td, hdir, changes) - ) - t.start() - self.watcher_threads.append(t) - self.watcher_thread_changes.append(changes) - - def _watcherThread(self, dn, dh, changes): - # A synchronous version: - # XXX - not used - I was having a whole lot of problems trying to - # get this to work. Specifically: - # * ReadDirectoryChangesW without an OVERLAPPED blocks infinitely. - # * If another thread attempts to close the handle while - # ReadDirectoryChangesW is waiting on it, the ::CloseHandle() method - # blocks (which has nothing to do with the GIL - it is correctly - # managed) - # Which ends up with no way to kill the thread! - flags = win32con.FILE_NOTIFY_CHANGE_FILE_NAME - while 1: - try: - print("waiting", dh) - changes = win32file.ReadDirectoryChangesW( - dh, 8192, False, flags # sub-tree - ) - print("got", changes) - except: - raise - changes.extend(changes) - - def _watcherThreadOverlapped(self, dn, dh, changes): - flags = win32con.FILE_NOTIFY_CHANGE_FILE_NAME - buf = win32file.AllocateReadBuffer(8192) - overlapped = pywintypes.OVERLAPPED() - overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) - while 1: - win32file.ReadDirectoryChangesW( - dh, buf, False, flags, overlapped # sub-tree - ) - # Wait for our event, or for 5 seconds. - rc = win32event.WaitForSingleObject(overlapped.hEvent, 5000) - if rc == win32event.WAIT_OBJECT_0: - # got some data! Must use GetOverlappedResult to find out - # how much is valid! 0 generally means the handle has - # been closed. Blocking is OK here, as the event has - # already been set. - nbytes = win32file.GetOverlappedResult(dh, overlapped, True) - if nbytes: - bits = win32file.FILE_NOTIFY_INFORMATION(buf, nbytes) - changes.extend(bits) - else: - # This is "normal" exit - our 'tearDown' closes the - # handle. - # print "looks like dir handle was closed!" - return - else: - print("ERROR: Watcher thread timed-out!") - return # kill the thread! - - def tearDown(self): - # be careful about raising errors at teardown! - for h in self.dir_handles: - # See comments in _watcherThread above - this appears to - # deadlock if a synchronous ReadDirectoryChangesW is waiting... - # (No such problems with an asynch ReadDirectoryChangesW) - h.Close() - for dn in self.dir_names: - try: - shutil.rmtree(dn) - except OSError: - print("FAILED to remove directory", dn) - - for t in self.watcher_threads: - # closing dir handle should have killed threads! - t.join(5) - if t.is_alive(): - print("FAILED to wait for thread termination") - - def stablize(self): - time.sleep(0.5) - - def testSimple(self): - self.stablize() - for dn in self.dir_names: - fn = os.path.join(dn, "test_file") - open(fn, "w").close() - - self.stablize() - changes = self.watcher_thread_changes[0] - self.assertEqual(changes, [(1, "test_file")]) - - def testSmall(self): - self.stablize() - for dn in self.dir_names: - fn = os.path.join(dn, "x") - open(fn, "w").close() - - self.stablize() - changes = self.watcher_thread_changes[0] - self.assertEqual(changes, [(1, "x")]) - - -class TestEncrypt(unittest.TestCase): - def testEncrypt(self): - fname = tempfile.mktemp("win32file_test") - f = open(fname, "wb") - f.write(str2bytes("hello")) - f.close() - f = None - try: - try: - win32file.EncryptFile(fname) - except win32file.error as details: - if details.winerror != winerror.ERROR_ACCESS_DENIED: - raise - print("It appears this is not NTFS - cant encrypt/decrypt") - win32file.DecryptFile(fname) - finally: - if f is not None: - f.close() - os.unlink(fname) - - -class TestConnect(unittest.TestCase): - def connect_thread_runner(self, expect_payload, giveup_event): - # As Windows 2000 doesn't do ConnectEx, we need to use a non-blocking - # accept, as our test connection may never come. May as well use - # AcceptEx for this... - listener = socket.socket() - self.addr = ("localhost", random.randint(10000, 64000)) - listener.bind(self.addr) - listener.listen(1) - - # create accept socket - accepter = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - # An overlapped - overlapped = pywintypes.OVERLAPPED() - overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) - # accept the connection. - if expect_payload: - buf_size = 1024 - else: - # when we don't expect data we must be careful to only pass the - # exact number of bytes for the endpoint data... - buf_size = win32file.CalculateSocketEndPointSize(listener) - - buffer = win32file.AllocateReadBuffer(buf_size) - win32file.AcceptEx(listener, accepter, buffer, overlapped) - # wait for the connection or our test to fail. - events = giveup_event, overlapped.hEvent - rc = win32event.WaitForMultipleObjects(events, False, 2000) - if rc == win32event.WAIT_TIMEOUT: - self.fail("timed out waiting for a connection") - if rc == win32event.WAIT_OBJECT_0: - # Our main thread running the test failed and will never connect. - return - # must be a connection. - nbytes = win32file.GetOverlappedResult(listener.fileno(), overlapped, False) - if expect_payload: - self.request = buffer[:nbytes] - accepter.send(str2bytes("some expected response")) - - def test_connect_with_payload(self): - giveup_event = win32event.CreateEvent(None, 0, 0, None) - t = threading.Thread( - target=self.connect_thread_runner, args=(True, giveup_event) - ) - t.start() - time.sleep(0.1) - s2 = socket.socket() - ol = pywintypes.OVERLAPPED() - s2.bind(("0.0.0.0", 0)) # connectex requires the socket be bound beforehand - try: - win32file.ConnectEx(s2, self.addr, ol, str2bytes("some expected request")) - except win32file.error as exc: - win32event.SetEvent(giveup_event) - if exc.winerror == 10022: # WSAEINVAL - raise TestSkipped("ConnectEx is not available on this platform") - raise # some error error we don't expect. - # We occasionally see ERROR_CONNECTION_REFUSED in automation - try: - win32file.GetOverlappedResult(s2.fileno(), ol, 1) - except win32file.error as exc: - win32event.SetEvent(giveup_event) - if exc.winerror == winerror.ERROR_CONNECTION_REFUSED: - raise TestSkipped("Assuming ERROR_CONNECTION_REFUSED is transient") - raise - ol = pywintypes.OVERLAPPED() - buff = win32file.AllocateReadBuffer(1024) - win32file.WSARecv(s2, buff, ol, 0) - length = win32file.GetOverlappedResult(s2.fileno(), ol, 1) - self.response = buff[:length] - self.assertEqual(self.response, str2bytes("some expected response")) - self.assertEqual(self.request, str2bytes("some expected request")) - t.join(5) - self.assertFalse(t.is_alive(), "worker thread didn't terminate") - - def test_connect_without_payload(self): - giveup_event = win32event.CreateEvent(None, 0, 0, None) - t = threading.Thread( - target=self.connect_thread_runner, args=(False, giveup_event) - ) - t.start() - time.sleep(0.1) - s2 = socket.socket() - ol = pywintypes.OVERLAPPED() - s2.bind(("0.0.0.0", 0)) # connectex requires the socket be bound beforehand - try: - win32file.ConnectEx(s2, self.addr, ol) - except win32file.error as exc: - win32event.SetEvent(giveup_event) - if exc.winerror == 10022: # WSAEINVAL - raise TestSkipped("ConnectEx is not available on this platform") - raise # some error error we don't expect. - # We occasionally see ERROR_CONNECTION_REFUSED in automation - try: - win32file.GetOverlappedResult(s2.fileno(), ol, 1) - except win32file.error as exc: - win32event.SetEvent(giveup_event) - if exc.winerror == winerror.ERROR_CONNECTION_REFUSED: - raise TestSkipped("Assuming ERROR_CONNECTION_REFUSED is transient") - raise - - ol = pywintypes.OVERLAPPED() - buff = win32file.AllocateReadBuffer(1024) - win32file.WSARecv(s2, buff, ol, 0) - length = win32file.GetOverlappedResult(s2.fileno(), ol, 1) - self.response = buff[:length] - self.assertEqual(self.response, str2bytes("some expected response")) - t.join(5) - self.assertFalse(t.is_alive(), "worker thread didn't terminate") - - -class TestTransmit(unittest.TestCase): - def test_transmit(self): - import binascii - - bytes = os.urandom(1024 * 1024) - val = binascii.hexlify(bytes) - val_length = len(val) - f = tempfile.TemporaryFile() - f.write(val) - - def runner(): - s1 = socket.socket() - # binding fails occasionally on github CI with: - # OSError: [WinError 10013] An attempt was made to access a socket in a way forbidden by its access permissions - # which probably just means the random port is already in use, so - # let that happen a few times. - for i in range(5): - self.addr = ("localhost", random.randint(10000, 64000)) - try: - s1.bind(self.addr) - break - except os.error as exc: - if exc.winerror != 10013: - raise - print("Failed to use port", self.addr, "trying another random one") - else: - raise RuntimeError("Failed to find an available port to bind to.") - s1.listen(1) - cli, addr = s1.accept() - buf = 1 - self.request = [] - while buf: - buf = cli.recv(1024 * 100) - self.request.append(buf) - - th = threading.Thread(target=runner) - th.start() - time.sleep(0.5) - s2 = socket.socket() - s2.connect(self.addr) - - length = 0 - aaa = str2bytes("[AAA]") - bbb = str2bytes("[BBB]") - ccc = str2bytes("[CCC]") - ddd = str2bytes("[DDD]") - empty = str2bytes("") - ol = pywintypes.OVERLAPPED() - f.seek(0) - win32file.TransmitFile( - s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0 - ) - length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) - - ol = pywintypes.OVERLAPPED() - f.seek(0) - win32file.TransmitFile( - s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, aaa, bbb - ) - length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) - - ol = pywintypes.OVERLAPPED() - f.seek(0) - win32file.TransmitFile( - s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, empty, empty - ) - length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) - - ol = pywintypes.OVERLAPPED() - f.seek(0) - win32file.TransmitFile( - s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, None, ccc - ) - length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) - - ol = pywintypes.OVERLAPPED() - f.seek(0) - win32file.TransmitFile( - s2, win32file._get_osfhandle(f.fileno()), val_length, 0, ol, 0, ddd - ) - length += win32file.GetOverlappedResult(s2.fileno(), ol, 1) - - s2.close() - th.join() - buf = str2bytes("").join(self.request) - self.assertEqual(length, len(buf)) - expected = val + aaa + val + bbb + val + val + ccc + ddd + val - self.assertEqual(type(expected), type(buf)) - self.assertEqual(expected, buf) - - -class TestWSAEnumNetworkEvents(unittest.TestCase): - def test_basics(self): - s = socket.socket() - e = win32event.CreateEvent(None, 1, 0, None) - win32file.WSAEventSelect(s, e, 0) - self.assertEqual(win32file.WSAEnumNetworkEvents(s), {}) - self.assertEqual(win32file.WSAEnumNetworkEvents(s, e), {}) - self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, s, e, 3) - self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, s, "spam") - self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, "spam", e) - self.assertRaises(TypeError, win32file.WSAEnumNetworkEvents, "spam") - f = open("NUL") - h = win32file._get_osfhandle(f.fileno()) - self.assertRaises(win32file.error, win32file.WSAEnumNetworkEvents, h) - self.assertRaises(win32file.error, win32file.WSAEnumNetworkEvents, s, h) - try: - win32file.WSAEnumNetworkEvents(h) - except win32file.error as e: - self.assertEqual(e.winerror, win32file.WSAENOTSOCK) - try: - win32file.WSAEnumNetworkEvents(s, h) - except win32file.error as e: - # According to the docs it would seem reasonable that - # this would fail with WSAEINVAL, but it doesn't. - self.assertEqual(e.winerror, win32file.WSAENOTSOCK) - - def test_functional(self): - # This is not really a unit test, but it does exercise the code - # quite well and can serve as an example of WSAEventSelect and - # WSAEnumNetworkEvents usage. - port = socket.socket() - port.setblocking(0) - port_event = win32event.CreateEvent(None, 0, 0, None) - win32file.WSAEventSelect( - port, port_event, win32file.FD_ACCEPT | win32file.FD_CLOSE - ) - port.bind(("127.0.0.1", 0)) - port.listen(10) - - client = socket.socket() - client.setblocking(0) - client_event = win32event.CreateEvent(None, 0, 0, None) - win32file.WSAEventSelect( - client, - client_event, - win32file.FD_CONNECT - | win32file.FD_READ - | win32file.FD_WRITE - | win32file.FD_CLOSE, - ) - err = client.connect_ex(port.getsockname()) - self.assertEqual(err, win32file.WSAEWOULDBLOCK) - - res = win32event.WaitForSingleObject(port_event, 1000) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - events = win32file.WSAEnumNetworkEvents(port, port_event) - self.assertEqual(events, {win32file.FD_ACCEPT: 0}) - - server, addr = port.accept() - server.setblocking(0) - server_event = win32event.CreateEvent(None, 1, 0, None) - win32file.WSAEventSelect( - server, - server_event, - win32file.FD_READ | win32file.FD_WRITE | win32file.FD_CLOSE, - ) - res = win32event.WaitForSingleObject(server_event, 1000) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - events = win32file.WSAEnumNetworkEvents(server, server_event) - self.assertEqual(events, {win32file.FD_WRITE: 0}) - - res = win32event.WaitForSingleObject(client_event, 1000) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - events = win32file.WSAEnumNetworkEvents(client, client_event) - self.assertEqual(events, {win32file.FD_CONNECT: 0, win32file.FD_WRITE: 0}) - sent = 0 - data = str2bytes("x") * 16 * 1024 - while sent < 16 * 1024 * 1024: - try: - sent += client.send(data) - except socket.error as e: - if e.args[0] == win32file.WSAEINTR: - continue - elif e.args[0] in (win32file.WSAEWOULDBLOCK, win32file.WSAENOBUFS): - break - else: - raise - else: - self.fail("could not find socket buffer limit") - - events = win32file.WSAEnumNetworkEvents(client) - self.assertEqual(events, {}) - - res = win32event.WaitForSingleObject(server_event, 1000) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - events = win32file.WSAEnumNetworkEvents(server, server_event) - self.assertEqual(events, {win32file.FD_READ: 0}) - - received = 0 - while received < sent: - try: - received += len(server.recv(16 * 1024)) - except socket.error as e: - if e.args[0] in [win32file.WSAEINTR, win32file.WSAEWOULDBLOCK]: - continue - else: - raise - - self.assertEqual(received, sent) - events = win32file.WSAEnumNetworkEvents(server) - self.assertEqual(events, {}) - - res = win32event.WaitForSingleObject(client_event, 1000) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - events = win32file.WSAEnumNetworkEvents(client, client_event) - self.assertEqual(events, {win32file.FD_WRITE: 0}) - - client.shutdown(socket.SHUT_WR) - res = win32event.WaitForSingleObject(server_event, 1000) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - # strange timing issues... - for i in range(5): - events = win32file.WSAEnumNetworkEvents(server, server_event) - if events: - break - win32api.Sleep(100) - else: - raise AssertionError("failed to get events") - self.assertEqual(events, {win32file.FD_CLOSE: 0}) - events = win32file.WSAEnumNetworkEvents(client) - self.assertEqual(events, {}) - - server.close() - res = win32event.WaitForSingleObject(client_event, 1000) - self.assertEqual(res, win32event.WAIT_OBJECT_0) - events = win32file.WSAEnumNetworkEvents(client, client_event) - self.assertEqual(events, {win32file.FD_CLOSE: 0}) - - client.close() - events = win32file.WSAEnumNetworkEvents(port) - self.assertEqual(events, {}) - - -if __name__ == "__main__": - testmain() diff --git a/lib/win32/test/test_win32gui.py b/lib/win32/test/test_win32gui.py deleted file mode 100644 index ad96bd43..00000000 --- a/lib/win32/test/test_win32gui.py +++ /dev/null @@ -1,65 +0,0 @@ -# tests for win32gui -import array -import operator -import unittest - -import pywin32_testutil -import win32gui - - -class TestPyGetString(unittest.TestCase): - def test_get_string(self): - # test invalid addresses cause a ValueError rather than crash! - self.assertRaises(ValueError, win32gui.PyGetString, 0) - self.assertRaises(ValueError, win32gui.PyGetString, 1) - self.assertRaises(ValueError, win32gui.PyGetString, 1, 1) - - -class TestPyGetMemory(unittest.TestCase): - def test_ob(self): - # Check the PyGetMemory result and a bytes string can be compared - test_data = b"\0\1\2\3\4\5\6" - c = array.array("b", test_data) - addr, buflen = c.buffer_info() - got = win32gui.PyGetMemory(addr, buflen) - self.assertEqual(len(got), len(test_data)) - self.assertEqual(bytes(got), test_data) - - def test_memory_index(self): - # Check we can index into the buffer object returned by PyGetMemory - test_data = b"\0\1\2\3\4\5\6" - c = array.array("b", test_data) - addr, buflen = c.buffer_info() - got = win32gui.PyGetMemory(addr, buflen) - self.assertEqual(got[0], 0) - - def test_memory_slice(self): - # Check we can slice the buffer object returned by PyGetMemory - test_data = b"\0\1\2\3\4\5\6" - c = array.array("b", test_data) - addr, buflen = c.buffer_info() - got = win32gui.PyGetMemory(addr, buflen) - self.assertEqual(list(got[0:3]), [0, 1, 2]) - - def test_real_view(self): - # Do the PyGetMemory, then change the original memory, then ensure - # the initial object we fetched sees the new value. - test_data = b"\0\1\2\3\4\5\6" - c = array.array("b", test_data) - addr, buflen = c.buffer_info() - got = win32gui.PyGetMemory(addr, buflen) - self.assertEqual(got[0], 0) - c[0] = 1 - self.assertEqual(got[0], 1) - - def test_memory_not_writable(self): - # Check the buffer object fetched by PyGetMemory isn't writable. - test_data = b"\0\1\2\3\4\5\6" - c = array.array("b", test_data) - addr, buflen = c.buffer_info() - got = win32gui.PyGetMemory(addr, buflen) - self.assertRaises(TypeError, operator.setitem, got, 0, 1) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32guistruct.py b/lib/win32/test/test_win32guistruct.py deleted file mode 100644 index 063cea3a..00000000 --- a/lib/win32/test/test_win32guistruct.py +++ /dev/null @@ -1,330 +0,0 @@ -import array -import unittest - -import pythoncom -import win32con -import win32gui -import win32gui_struct - - -class TestBase(unittest.TestCase): - def assertDictEquals(self, d, **kw): - checked = dict() - for n, v in kw.items(): - self.assertEqual(v, d[n], "'%s' doesn't match: %r != %r" % (n, v, d[n])) - checked[n] = True - checked_keys = list(checked.keys()) - passed_keys = list(kw.keys()) - checked_keys.sort() - passed_keys.sort() - self.assertEqual(checked_keys, passed_keys) - - -class TestMenuItemInfo(TestBase): - def _testPackUnpack(self, text): - vals = dict( - fType=win32con.MFT_MENUBARBREAK, - fState=win32con.MFS_CHECKED, - wID=123, - hSubMenu=1234, - hbmpChecked=12345, - hbmpUnchecked=123456, - dwItemData=1234567, - text=text, - hbmpItem=321, - ) - mii, extras = win32gui_struct.PackMENUITEMINFO(**vals) - ( - fType, - fState, - wID, - hSubMenu, - hbmpChecked, - hbmpUnchecked, - dwItemData, - text, - hbmpItem, - ) = win32gui_struct.UnpackMENUITEMINFO(mii) - self.assertDictEquals( - vals, - fType=fType, - fState=fState, - wID=wID, - hSubMenu=hSubMenu, - hbmpChecked=hbmpChecked, - hbmpUnchecked=hbmpUnchecked, - dwItemData=dwItemData, - text=text, - hbmpItem=hbmpItem, - ) - - def testPackUnpack(self): - self._testPackUnpack("Hello") - - def testPackUnpackNone(self): - self._testPackUnpack(None) - - def testEmptyMenuItemInfo(self): - mii, extra = win32gui_struct.EmptyMENUITEMINFO() - ( - fType, - fState, - wID, - hSubMenu, - hbmpChecked, - hbmpUnchecked, - dwItemData, - text, - hbmpItem, - ) = win32gui_struct.UnpackMENUITEMINFO(mii) - self.assertEqual(fType, 0) - self.assertEqual(fState, 0) - self.assertEqual(wID, 0) - self.assertEqual(hSubMenu, 0) - self.assertEqual(hbmpChecked, 0) - self.assertEqual(hbmpUnchecked, 0) - self.assertEqual(dwItemData, 0) - self.assertEqual(hbmpItem, 0) - # it's not clear if UnpackMENUITEMINFO() should ignore cch, instead - # assuming it is a buffer size rather than 'current length' - but it - # never has (and this gives us every \0 in the string), and actually - # helps us test the unicode/str semantics. - self.assertEqual(text, "\0" * len(text)) - - -class TestMenuInfo(TestBase): - def testPackUnpack(self): - vals = dict(dwStyle=1, cyMax=2, hbrBack=3, dwContextHelpID=4, dwMenuData=5) - - mi = win32gui_struct.PackMENUINFO(**vals) - ( - dwStyle, - cyMax, - hbrBack, - dwContextHelpID, - dwMenuData, - ) = win32gui_struct.UnpackMENUINFO(mi) - - self.assertDictEquals( - vals, - dwStyle=dwStyle, - cyMax=cyMax, - hbrBack=hbrBack, - dwContextHelpID=dwContextHelpID, - dwMenuData=dwMenuData, - ) - - def testEmptyMenuItemInfo(self): - mi = win32gui_struct.EmptyMENUINFO() - ( - dwStyle, - cyMax, - hbrBack, - dwContextHelpID, - dwMenuData, - ) = win32gui_struct.UnpackMENUINFO(mi) - self.assertEqual(dwStyle, 0) - self.assertEqual(cyMax, 0) - self.assertEqual(hbrBack, 0) - self.assertEqual(dwContextHelpID, 0) - self.assertEqual(dwMenuData, 0) - - -class TestTreeViewItem(TestBase): - def _testPackUnpack(self, text): - vals = dict( - hitem=1, - state=2, - stateMask=3, - text=text, - image=4, - selimage=5, - citems=6, - param=7, - ) - - ti, extra = win32gui_struct.PackTVITEM(**vals) - ( - hitem, - state, - stateMask, - text, - image, - selimage, - citems, - param, - ) = win32gui_struct.UnpackTVITEM(ti) - - self.assertDictEquals( - vals, - hitem=hitem, - state=state, - stateMask=stateMask, - text=text, - image=image, - selimage=selimage, - citems=citems, - param=param, - ) - - def testPackUnpack(self): - self._testPackUnpack("Hello") - - def testPackUnpackNone(self): - self._testPackUnpack(None) - - def testEmpty(self): - ti, extras = win32gui_struct.EmptyTVITEM(0) - ( - hitem, - state, - stateMask, - text, - image, - selimage, - citems, - param, - ) = win32gui_struct.UnpackTVITEM(ti) - self.assertEqual(hitem, 0) - self.assertEqual(state, 0) - self.assertEqual(stateMask, 0) - self.assertEqual(text, "") - self.assertEqual(image, 0) - self.assertEqual(selimage, 0) - self.assertEqual(citems, 0) - self.assertEqual(param, 0) - - -class TestListViewItem(TestBase): - def _testPackUnpack(self, text): - vals = dict( - item=None, - subItem=None, - state=1, - stateMask=2, - text=text, - image=3, - param=4, - indent=5, - ) - - ti, extra = win32gui_struct.PackLVITEM(**vals) - ( - item, - subItem, - state, - stateMask, - text, - image, - param, - indent, - ) = win32gui_struct.UnpackLVITEM(ti) - - # patch expected values. - vals["item"] = 0 - vals["subItem"] = 0 - self.assertDictEquals( - vals, - item=item, - subItem=subItem, - state=state, - stateMask=stateMask, - text=text, - image=image, - param=param, - indent=indent, - ) - - def testPackUnpack(self): - self._testPackUnpack("Hello") - - def testPackUnpackNone(self): - self._testPackUnpack(None) - - def testEmpty(self): - ti, extras = win32gui_struct.EmptyLVITEM(1, 2) - ( - item, - subItem, - state, - stateMask, - text, - image, - param, - indent, - ) = win32gui_struct.UnpackLVITEM(ti) - self.assertEqual(item, 1) - self.assertEqual(subItem, 2) - self.assertEqual(state, 0) - self.assertEqual(stateMask, 0) - self.assertEqual(text, "") - self.assertEqual(image, 0) - self.assertEqual(param, 0) - self.assertEqual(indent, 0) - - -class TestLVColumn(TestBase): - def _testPackUnpack(self, text): - vals = dict(fmt=1, cx=2, text=text, subItem=3, image=4, order=5) - - ti, extra = win32gui_struct.PackLVCOLUMN(**vals) - fmt, cx, text, subItem, image, order = win32gui_struct.UnpackLVCOLUMN(ti) - - self.assertDictEquals( - vals, fmt=fmt, cx=cx, text=text, subItem=subItem, image=image, order=order - ) - - def testPackUnpack(self): - self._testPackUnpack("Hello") - - def testPackUnpackNone(self): - self._testPackUnpack(None) - - def testEmpty(self): - ti, extras = win32gui_struct.EmptyLVCOLUMN() - fmt, cx, text, subItem, image, order = win32gui_struct.UnpackLVCOLUMN(ti) - self.assertEqual(fmt, 0) - self.assertEqual(cx, 0) - self.assertEqual(text, "") - self.assertEqual(subItem, 0) - self.assertEqual(image, 0) - self.assertEqual(order, 0) - - -class TestDEV_BROADCAST_HANDLE(TestBase): - def testPackUnpack(self): - s = win32gui_struct.PackDEV_BROADCAST_HANDLE(123) - c = array.array("b", s) - got = win32gui_struct.UnpackDEV_BROADCAST(c.buffer_info()[0]) - self.assertEqual(got.handle, 123) - - def testGUID(self): - s = win32gui_struct.PackDEV_BROADCAST_HANDLE(123, guid=pythoncom.IID_IUnknown) - c = array.array("b", s) - got = win32gui_struct.UnpackDEV_BROADCAST(c.buffer_info()[0]) - self.assertEqual(got.handle, 123) - self.assertEqual(got.eventguid, pythoncom.IID_IUnknown) - - -class TestDEV_BROADCAST_DEVICEINTERFACE(TestBase): - def testPackUnpack(self): - s = win32gui_struct.PackDEV_BROADCAST_DEVICEINTERFACE( - pythoncom.IID_IUnknown, "hello" - ) - c = array.array("b", s) - got = win32gui_struct.UnpackDEV_BROADCAST(c.buffer_info()[0]) - self.assertEqual(got.classguid, pythoncom.IID_IUnknown) - self.assertEqual(got.name, "hello") - - -class TestDEV_BROADCAST_VOLUME(TestBase): - def testPackUnpack(self): - s = win32gui_struct.PackDEV_BROADCAST_VOLUME(123, 456) - c = array.array("b", s) - got = win32gui_struct.UnpackDEV_BROADCAST(c.buffer_info()[0]) - self.assertEqual(got.unitmask, 123) - self.assertEqual(got.flags, 456) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32inet.py b/lib/win32/test/test_win32inet.py deleted file mode 100644 index 044ef8e6..00000000 --- a/lib/win32/test/test_win32inet.py +++ /dev/null @@ -1,93 +0,0 @@ -import unittest - -import winerror -from pywin32_testutil import str2bytes # py3k-friendly helper -from pywin32_testutil import TestSkipped, testmain -from win32inet import * -from win32inetcon import * - - -class CookieTests(unittest.TestCase): - def testCookies(self): - data = "TestData=Test" - InternetSetCookie("http://www.python.org", None, data) - got = InternetGetCookie("http://www.python.org", None) - # handle that there might already be cookies for the domain. - bits = map(lambda x: x.strip(), got.split(";")) - self.assertTrue(data in bits) - - def testCookiesEmpty(self): - try: - InternetGetCookie("http://site-with-no-cookie.python.org", None) - self.fail("expected win32 exception") - except error as exc: - self.assertEqual(exc.winerror, winerror.ERROR_NO_MORE_ITEMS) - - -class UrlTests(unittest.TestCase): - def testSimpleCanonicalize(self): - ret = InternetCanonicalizeUrl("foo bar") - self.assertEqual(ret, "foo%20bar") - - def testLongCanonicalize(self): - # a 4k URL causes the underlying API to request a bigger buffer" - big = "x" * 2048 - ret = InternetCanonicalizeUrl(big + " " + big) - self.assertEqual(ret, big + "%20" + big) - - -class TestNetwork(unittest.TestCase): - def setUp(self): - self.hi = InternetOpen("test", INTERNET_OPEN_TYPE_DIRECT, None, None, 0) - - def tearDown(self): - self.hi.Close() - - def testPythonDotOrg(self): - hdl = InternetOpenUrl( - self.hi, "http://www.python.org", None, INTERNET_FLAG_EXISTING_CONNECT - ) - chunks = [] - while 1: - chunk = InternetReadFile(hdl, 1024) - if not chunk: - break - chunks.append(chunk) - data = str2bytes("").join(chunks) - assert data.find(str2bytes("Python")) > 0, repr( - data - ) # This must appear somewhere on the main page! - - def testFtpCommand(self): - # ftp.python.org doesn't exist. ftp.gnu.org is what Python's urllib - # test code uses. - # (As of 2020 it doesn't! Unsurprisingly, it's difficult to find a good - # test server. This test sometimes works, but often doesn't - so handle - # failure here as a "skip") - try: - hcon = InternetConnect( - self.hi, - "ftp.gnu.org", - INTERNET_INVALID_PORT_NUMBER, - None, - None, # username/password - INTERNET_SERVICE_FTP, - 0, - 0, - ) - try: - hftp = FtpCommand(hcon, True, FTP_TRANSFER_TYPE_ASCII, "NLST", 0) - try: - print("Connected - response info is", InternetGetLastResponseInfo()) - got = InternetReadFile(hftp, 2048) - print("Read", len(got), "bytes") - finally: - hftp.Close() - finally: - hcon.Close() - except error as e: - raise TestSkipped(e) - - -if __name__ == "__main__": - testmain() diff --git a/lib/win32/test/test_win32net.py b/lib/win32/test/test_win32net.py deleted file mode 100644 index 93071963..00000000 --- a/lib/win32/test/test_win32net.py +++ /dev/null @@ -1,24 +0,0 @@ -import unittest - -import win32net -import win32netcon - - -class TestCase(unittest.TestCase): - def testGroupsGoodResume(self, server=None): - res = 0 - level = 0 # setting it to 1 will provide more detailed info - while True: - (user_list, total, res) = win32net.NetGroupEnum(server, level, res) - for i in user_list: - pass - if not res: - break - - def testGroupsBadResume(self, server=None): - res = 1 # Can't pass this first time round. - self.assertRaises(win32net.error, win32net.NetGroupEnum, server, 0, res) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32pipe.py b/lib/win32/test/test_win32pipe.py deleted file mode 100644 index cbe647dd..00000000 --- a/lib/win32/test/test_win32pipe.py +++ /dev/null @@ -1,153 +0,0 @@ -import threading -import time -import unittest - -import pywintypes -import win32con -import win32event -import win32file -import win32pipe -import winerror -from pywin32_testutil import str2bytes # py3k-friendly helper - - -class PipeTests(unittest.TestCase): - pipename = "\\\\.\\pipe\\python_test_pipe" - - def _serverThread(self, pipe_handle, event, wait_time): - # just do one connection and terminate. - hr = win32pipe.ConnectNamedPipe(pipe_handle) - self.assertTrue( - hr in (0, winerror.ERROR_PIPE_CONNECTED), "Got error code 0x%x" % (hr,) - ) - hr, got = win32file.ReadFile(pipe_handle, 100) - self.assertEqual(got, str2bytes("foo\0bar")) - time.sleep(wait_time) - win32file.WriteFile(pipe_handle, str2bytes("bar\0foo")) - pipe_handle.Close() - event.set() - - def startPipeServer(self, event, wait_time=0): - openMode = win32pipe.PIPE_ACCESS_DUPLEX - pipeMode = win32pipe.PIPE_TYPE_MESSAGE | win32pipe.PIPE_WAIT - - sa = pywintypes.SECURITY_ATTRIBUTES() - sa.SetSecurityDescriptorDacl(1, None, 0) - - pipe_handle = win32pipe.CreateNamedPipe( - self.pipename, - openMode, - pipeMode, - win32pipe.PIPE_UNLIMITED_INSTANCES, - 0, - 0, - 2000, - sa, - ) - - threading.Thread( - target=self._serverThread, args=(pipe_handle, event, wait_time) - ).start() - - def testCallNamedPipe(self): - event = threading.Event() - self.startPipeServer(event) - - got = win32pipe.CallNamedPipe( - self.pipename, str2bytes("foo\0bar"), 1024, win32pipe.NMPWAIT_WAIT_FOREVER - ) - self.assertEqual(got, str2bytes("bar\0foo")) - event.wait(5) - self.assertTrue(event.isSet(), "Pipe server thread didn't terminate") - - def testTransactNamedPipeBlocking(self): - event = threading.Event() - self.startPipeServer(event) - open_mode = win32con.GENERIC_READ | win32con.GENERIC_WRITE - - hpipe = win32file.CreateFile( - self.pipename, - open_mode, - 0, # no sharing - None, # default security - win32con.OPEN_EXISTING, - 0, # win32con.FILE_FLAG_OVERLAPPED, - None, - ) - - # set to message mode. - win32pipe.SetNamedPipeHandleState( - hpipe, win32pipe.PIPE_READMODE_MESSAGE, None, None - ) - - hr, got = win32pipe.TransactNamedPipe(hpipe, str2bytes("foo\0bar"), 1024, None) - self.assertEqual(got, str2bytes("bar\0foo")) - event.wait(5) - self.assertTrue(event.isSet(), "Pipe server thread didn't terminate") - - def testTransactNamedPipeBlockingBuffer(self): - # Like testTransactNamedPipeBlocking, but a pre-allocated buffer is - # passed (not really that useful, but it exercises the code path) - event = threading.Event() - self.startPipeServer(event) - open_mode = win32con.GENERIC_READ | win32con.GENERIC_WRITE - - hpipe = win32file.CreateFile( - self.pipename, - open_mode, - 0, # no sharing - None, # default security - win32con.OPEN_EXISTING, - 0, # win32con.FILE_FLAG_OVERLAPPED, - None, - ) - - # set to message mode. - win32pipe.SetNamedPipeHandleState( - hpipe, win32pipe.PIPE_READMODE_MESSAGE, None, None - ) - - buffer = win32file.AllocateReadBuffer(1024) - hr, got = win32pipe.TransactNamedPipe( - hpipe, str2bytes("foo\0bar"), buffer, None - ) - self.assertEqual(got, str2bytes("bar\0foo")) - event.wait(5) - self.assertTrue(event.isSet(), "Pipe server thread didn't terminate") - - def testTransactNamedPipeAsync(self): - event = threading.Event() - overlapped = pywintypes.OVERLAPPED() - overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) - self.startPipeServer(event, 0.5) - open_mode = win32con.GENERIC_READ | win32con.GENERIC_WRITE - - hpipe = win32file.CreateFile( - self.pipename, - open_mode, - 0, # no sharing - None, # default security - win32con.OPEN_EXISTING, - win32con.FILE_FLAG_OVERLAPPED, - None, - ) - - # set to message mode. - win32pipe.SetNamedPipeHandleState( - hpipe, win32pipe.PIPE_READMODE_MESSAGE, None, None - ) - - buffer = win32file.AllocateReadBuffer(1024) - hr, got = win32pipe.TransactNamedPipe( - hpipe, str2bytes("foo\0bar"), buffer, overlapped - ) - self.assertEqual(hr, winerror.ERROR_IO_PENDING) - nbytes = win32file.GetOverlappedResult(hpipe, overlapped, True) - got = buffer[:nbytes] - self.assertEqual(got, str2bytes("bar\0foo")) - event.wait(5) - self.assertTrue(event.isSet(), "Pipe server thread didn't terminate") - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32print.py b/lib/win32/test/test_win32print.py deleted file mode 100644 index 71fd2c03..00000000 --- a/lib/win32/test/test_win32print.py +++ /dev/null @@ -1,24 +0,0 @@ -# Tests (scarce) for win32print module - -import unittest - -import win32print as wprn - - -class Win32PrintTestCase(unittest.TestCase): - def setUp(self): - self.printer_idx = 0 - self.printer_levels_all = list(range(1, 10)) - self.local_printers = wprn.EnumPrinters(wprn.PRINTER_ENUM_LOCAL, None, 1) - - def test_printer_levels_read_dummy(self): - if not self.local_printers: - print("Test didn't run (no local printers)!") - return - ph = wprn.OpenPrinter(self.local_printers[self.printer_idx][2]) - for level in self.printer_levels_all: - wprn.GetPrinter(ph, level) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32profile.py b/lib/win32/test/test_win32profile.py deleted file mode 100644 index 84dc83bc..00000000 --- a/lib/win32/test/test_win32profile.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Test win32profile""" -import os -import unittest - -import win32profile - - -class Tester(unittest.TestCase): - def test_environment(self): - os.environ["FOO"] = "bar=baz" - env = win32profile.GetEnvironmentStrings() - assert "FOO" in env - assert env["FOO"] == "bar=baz" - assert os.environ["FOO"] == "bar=baz" - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32rcparser.py b/lib/win32/test/test_win32rcparser.py deleted file mode 100644 index 6eae6884..00000000 --- a/lib/win32/test/test_win32rcparser.py +++ /dev/null @@ -1,70 +0,0 @@ -import os -import tempfile -import unittest - -import win32con -import win32rcparser - - -class TestParser(unittest.TestCase): - def setUp(self): - rc_file = os.path.join(os.path.dirname(__file__), "win32rcparser", "test.rc") - self.resources = win32rcparser.Parse(rc_file) - - def testStrings(self): - for sid, expected in ( - ("IDS_TEST_STRING4", "Test 'single quoted' string"), - ("IDS_TEST_STRING1", 'Test "quoted" string'), - ("IDS_TEST_STRING3", 'String with single " quote'), - ("IDS_TEST_STRING2", "Test string"), - ): - got = self.resources.stringTable[sid].value - self.assertEqual(got, expected) - - def testStandardIds(self): - for idc in "IDOK IDCANCEL".split(): - correct = getattr(win32con, idc) - self.assertEqual(self.resources.names[correct], idc) - self.assertEqual(self.resources.ids[idc], correct) - - def testTabStop(self): - d = self.resources.dialogs["IDD_TEST_DIALOG2"] - tabstop_names = ["IDC_EDIT1", "IDOK"] # should have WS_TABSTOP - tabstop_ids = [self.resources.ids[name] for name in tabstop_names] - notabstop_names = ["IDC_EDIT2"] # should have WS_TABSTOP - notabstop_ids = [self.resources.ids[name] for name in notabstop_names] - num_ok = 0 - for cdef in d[1:]: # skip dlgdef - # print cdef - cid = cdef[2] - style = cdef[-2] - styleex = cdef[-1] - if cid in tabstop_ids: - self.assertEqual(style & win32con.WS_TABSTOP, win32con.WS_TABSTOP) - num_ok += 1 - elif cid in notabstop_ids: - self.assertEqual(style & win32con.WS_TABSTOP, 0) - num_ok += 1 - self.assertEqual(num_ok, len(tabstop_ids) + len(notabstop_ids)) - - -class TestGenerated(TestParser): - def setUp(self): - # don't call base! - rc_file = os.path.join(os.path.dirname(__file__), "win32rcparser", "test.rc") - py_file = tempfile.mktemp("test_win32rcparser.py") - try: - win32rcparser.GenerateFrozenResource(rc_file, py_file) - py_source = open(py_file).read() - finally: - if os.path.isfile(py_file): - os.unlink(py_file) - - # poor-man's import :) - globs = {} - exec(py_source, globs, globs) - self.resources = globs["FakeParser"]() - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32timezone.py b/lib/win32/test/test_win32timezone.py deleted file mode 100644 index 6dba51ed..00000000 --- a/lib/win32/test/test_win32timezone.py +++ /dev/null @@ -1,27 +0,0 @@ -# Test module for win32timezone - -import doctest -import sys -import unittest - -import win32timezone - - -class Win32TimeZoneTest(unittest.TestCase): - def testWin32TZ(self): - # On 3.7 and later, the repr() for datetime objects changed to use kwargs - eg, - # eg, `datetime.timedelta(0, 10800)` is now `datetime.timedelta(seconds=10800)`. - # So we just skip the tests on 3.5 and 3.6 - if sys.version_info < (3, 7): - from pywin32_testutil import TestSkipped - - raise TestSkipped( - "The repr() for datetime objects makes this test fail in 3.5 and 3.6" - ) - - failed, total = doctest.testmod(win32timezone, verbose=False) - self.assertFalse(failed) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/test_win32trace.py b/lib/win32/test/test_win32trace.py deleted file mode 100644 index 8b241cbb..00000000 --- a/lib/win32/test/test_win32trace.py +++ /dev/null @@ -1,369 +0,0 @@ -import os -import sys -import threading -import time -import unittest - -import win32trace -from pywin32_testutil import TestSkipped - -if __name__ == "__main__": - this_file = sys.argv[0] -else: - this_file = __file__ - - -def SkipIfCI(): - # This test often fails in CI, probably when it is being run multiple times - # (ie, for different Python versions) - # Github actions always have a `CI` variable. - if "CI" in os.environ: - raise TestSkipped("We skip this test on CI") - - -def CheckNoOtherReaders(): - win32trace.write("Hi") - time.sleep(0.05) - if win32trace.read() != "Hi": - # Reset everything so following tests still fail with this error! - win32trace.TermRead() - win32trace.TermWrite() - raise RuntimeError( - "An existing win32trace reader appears to be " - "running - please stop this process and try again" - ) - - -class TestInitOps(unittest.TestCase): - def setUp(self): - SkipIfCI() - # clear old data - win32trace.InitRead() - win32trace.read() - win32trace.TermRead() - - def tearDown(self): - try: - win32trace.TermRead() - except win32trace.error: - pass - try: - win32trace.TermWrite() - except win32trace.error: - pass - - def testInitTermRead(self): - self.assertRaises(win32trace.error, win32trace.read) - win32trace.InitRead() - result = win32trace.read() - self.assertEqual(result, "") - win32trace.TermRead() - self.assertRaises(win32trace.error, win32trace.read) - - win32trace.InitRead() - self.assertRaises(win32trace.error, win32trace.InitRead) - win32trace.InitWrite() - self.assertRaises(win32trace.error, win32trace.InitWrite) - win32trace.TermWrite() - win32trace.TermRead() - - def testInitTermWrite(self): - self.assertRaises(win32trace.error, win32trace.write, "Hei") - win32trace.InitWrite() - win32trace.write("Johan Galtung") - win32trace.TermWrite() - self.assertRaises(win32trace.error, win32trace.write, "Hei") - - def testTermSematics(self): - win32trace.InitWrite() - win32trace.write("Ta da") - - # if we both Write and Read are terminated at the same time, - # we lose the data as the win32 object is closed. Note that - # if another writer is running, we do *not* lose the data - so - # test for either the correct data or an empty string - win32trace.TermWrite() - win32trace.InitRead() - self.assertTrue(win32trace.read() in ("Ta da", "")) - win32trace.TermRead() - - # we keep the data because we init read before terminating write - win32trace.InitWrite() - win32trace.write("Ta da") - win32trace.InitRead() - win32trace.TermWrite() - self.assertEqual("Ta da", win32trace.read()) - win32trace.TermRead() - - -class BasicSetupTearDown(unittest.TestCase): - def setUp(self): - SkipIfCI() - win32trace.InitRead() - # If any other writers are running (even if not actively writing), - # terminating the module will *not* close the handle, meaning old data - # will remain. This can cause other tests to fail. - win32trace.read() - win32trace.InitWrite() - - def tearDown(self): - win32trace.TermWrite() - win32trace.TermRead() - - -class TestModuleOps(BasicSetupTearDown): - def testRoundTrip(self): - win32trace.write("Syver Enstad") - syverEnstad = win32trace.read() - self.assertEqual("Syver Enstad", syverEnstad) - - def testRoundTripUnicode(self): - win32trace.write("\xa9opyright Syver Enstad") - syverEnstad = win32trace.read() - # str objects are always returned in py2k (latin-1 encoding was used - # on unicode objects) - self.assertEqual("\xa9opyright Syver Enstad", syverEnstad) - - def testBlockingRead(self): - win32trace.write("Syver Enstad") - self.assertEqual("Syver Enstad", win32trace.blockingread()) - - def testBlockingReadUnicode(self): - win32trace.write("\xa9opyright Syver Enstad") - # str objects are always returned in py2k (latin-1 encoding was used - # on unicode objects) - self.assertEqual("\xa9opyright Syver Enstad", win32trace.blockingread()) - - def testFlush(self): - win32trace.flush() - - -class TestTraceObjectOps(BasicSetupTearDown): - def testInit(self): - win32trace.TermRead() - win32trace.TermWrite() - traceObject = win32trace.GetTracer() - self.assertRaises(win32trace.error, traceObject.read) - self.assertRaises(win32trace.error, traceObject.write, "") - win32trace.InitRead() - win32trace.InitWrite() - self.assertEqual("", traceObject.read()) - traceObject.write("Syver") - - def testFlush(self): - traceObject = win32trace.GetTracer() - traceObject.flush() - - def testIsatty(self): - tracer = win32trace.GetTracer() - assert tracer.isatty() == False - - def testRoundTrip(self): - traceObject = win32trace.GetTracer() - traceObject.write("Syver Enstad") - self.assertEqual("Syver Enstad", traceObject.read()) - - -class WriterThread(threading.Thread): - def run(self): - self.writeCount = 0 - for each in range(self.BucketCount): - win32trace.write(str(each)) - self.writeCount = self.BucketCount - - def verifyWritten(self): - return self.writeCount == self.BucketCount - - -class TestMultipleThreadsWriting(unittest.TestCase): - # FullBucket is the thread count - FullBucket = 50 - BucketCount = 9 # buckets must be a single digit number (ie. less than 10) - - def setUp(self): - SkipIfCI() - WriterThread.BucketCount = self.BucketCount - win32trace.InitRead() - win32trace.read() # clear any old data. - win32trace.InitWrite() - CheckNoOtherReaders() - self.threads = [WriterThread() for each in range(self.FullBucket)] - self.buckets = list(range(self.BucketCount)) - for each in self.buckets: - self.buckets[each] = 0 - - def tearDown(self): - win32trace.TermRead() - win32trace.TermWrite() - - def areBucketsFull(self): - bucketsAreFull = True - for each in self.buckets: - assert each <= self.FullBucket, each - if each != self.FullBucket: - bucketsAreFull = False - break - return bucketsAreFull - - def read(self): - while 1: - readString = win32trace.blockingread() - for ch in readString: - integer = int(ch) - count = self.buckets[integer] - assert count != -1 - self.buckets[integer] = count + 1 - if self.buckets[integer] == self.FullBucket: - if self.areBucketsFull(): - return - - def testThreads(self): - for each in self.threads: - each.start() - self.read() - for each in self.threads: - each.join() - for each in self.threads: - assert each.verifyWritten() - assert self.areBucketsFull() - - -class TestHugeChunks(unittest.TestCase): - # BiggestChunk is the size where we stop stressing the writer - BiggestChunk = 2**16 # 256k should do it. - - def setUp(self): - SkipIfCI() - win32trace.InitRead() - win32trace.read() # clear any old data - win32trace.InitWrite() - - def testHugeChunks(self): - data = "*" * 1023 + "\n" - while len(data) <= self.BiggestChunk: - win32trace.write(data) - data = data + data - # If we made it here, we passed. - - def tearDown(self): - win32trace.TermRead() - win32trace.TermWrite() - - -import win32event -import win32process - - -class TraceWriteProcess: - def __init__(self, threadCount): - self.exitCode = -1 - self.threadCount = threadCount - - def start(self): - procHandle, threadHandle, procId, threadId = win32process.CreateProcess( - None, # appName - 'python.exe "%s" /run_test_process %s %s' - % (this_file, self.BucketCount, self.threadCount), - None, # process security - None, # thread security - 0, # inherit handles - win32process.NORMAL_PRIORITY_CLASS, - None, # new environment - None, # Current directory - win32process.STARTUPINFO(), # startup info - ) - self.processHandle = procHandle - - def join(self): - win32event.WaitForSingleObject(self.processHandle, win32event.INFINITE) - self.exitCode = win32process.GetExitCodeProcess(self.processHandle) - - def verifyWritten(self): - return self.exitCode == 0 - - -class TestOutofProcess(unittest.TestCase): - BucketCount = 9 - FullBucket = 50 - - def setUp(self): - SkipIfCI() - win32trace.InitRead() - TraceWriteProcess.BucketCount = self.BucketCount - self.setUpWriters() - self.buckets = list(range(self.BucketCount)) - for each in self.buckets: - self.buckets[each] = 0 - - def tearDown(self): - win32trace.TermRead() - - def setUpWriters(self): - self.processes = [] - # 5 processes, quot threads in each process - quot, remainder = divmod(self.FullBucket, 5) - for each in range(5): - self.processes.append(TraceWriteProcess(quot)) - if remainder: - self.processes.append(TraceWriteProcess(remainder)) - - def areBucketsFull(self): - bucketsAreFull = True - for each in self.buckets: - assert each <= self.FullBucket, each - if each != self.FullBucket: - bucketsAreFull = False - break - return bucketsAreFull - - def read(self): - while 1: - readString = win32trace.blockingread() - for ch in readString: - integer = int(ch) - count = self.buckets[integer] - assert count != -1 - self.buckets[integer] = count + 1 - if self.buckets[integer] == self.FullBucket: - if self.areBucketsFull(): - return - - def testProcesses(self): - for each in self.processes: - each.start() - self.read() - for each in self.processes: - each.join() - for each in self.processes: - assert each.verifyWritten() - assert self.areBucketsFull() - - -def _RunAsTestProcess(): - # Run as an external process by the main tests. - WriterThread.BucketCount = int(sys.argv[2]) - threadCount = int(sys.argv[3]) - threads = [WriterThread() for each in range(threadCount)] - win32trace.InitWrite() - for t in threads: - t.start() - for t in threads: - t.join() - for t in threads: - if not t.verifyWritten(): - sys.exit(-1) - - -if __name__ == "__main__": - if sys.argv[1:2] == ["/run_test_process"]: - _RunAsTestProcess() - sys.exit(0) - # If some other win32traceutil reader is running, these tests fail - # badly (as the other reader sometimes sees the output!) - win32trace.InitRead() - win32trace.InitWrite() - CheckNoOtherReaders() - # reset state so test env is back to normal - win32trace.TermRead() - win32trace.TermWrite() - unittest.main() diff --git a/lib/win32/test/test_win32wnet.py b/lib/win32/test/test_win32wnet.py deleted file mode 100644 index 04ad8a4d..00000000 --- a/lib/win32/test/test_win32wnet.py +++ /dev/null @@ -1,175 +0,0 @@ -import unittest - -import netbios -import win32api -import win32wnet -from pywin32_testutil import str2bytes - -RESOURCE_CONNECTED = 0x00000001 -RESOURCE_GLOBALNET = 0x00000002 -RESOURCE_REMEMBERED = 0x00000003 -RESOURCE_RECENT = 0x00000004 -RESOURCE_CONTEXT = 0x00000005 -RESOURCETYPE_ANY = 0x00000000 -RESOURCETYPE_DISK = 0x00000001 -RESOURCETYPE_PRINT = 0x00000002 -RESOURCETYPE_RESERVED = 0x00000008 -RESOURCETYPE_UNKNOWN = 0xFFFFFFFF -RESOURCEUSAGE_CONNECTABLE = 0x00000001 -RESOURCEUSAGE_CONTAINER = 0x00000002 -RESOURCEDISPLAYTYPE_GENERIC = 0x00000000 -RESOURCEDISPLAYTYPE_DOMAIN = 0x00000001 -RESOURCEDISPLAYTYPE_SERVER = 0x00000002 -RESOURCEDISPLAYTYPE_SHARE = 0x00000003 - - -NETRESOURCE_attributes = [ - ("dwScope", int), - ("dwType", int), - ("dwDisplayType", int), - ("dwUsage", int), - ("lpLocalName", str), - ("lpRemoteName", str), - ("lpComment", str), - ("lpProvider", str), -] - -NCB_attributes = [ - ("Command", int), - ("Retcode", int), - ("Lsn", int), - ("Num", int), - # ("Bufflen", int), - read-only - ("Callname", str), - ("Name", str), - ("Rto", int), - ("Sto", int), - ("Lana_num", int), - ("Cmd_cplt", int), - ("Event", int), - ("Post", int), -] - - -class TestCase(unittest.TestCase): - def testGetUser(self): - self.assertEqual(win32api.GetUserName(), win32wnet.WNetGetUser()) - - def _checkItemAttributes(self, item, attrs): - for attr, typ in attrs: - val = getattr(item, attr) - if typ is int: - self.assertTrue( - type(val) in (int,), "Attr %r has value %r" % (attr, val) - ) - new_val = val + 1 - elif typ is str: - if val is not None: - # on py2k, must be string or unicode. py3k must be string or bytes. - self.assertTrue( - type(val) in (str, str), "Attr %r has value %r" % (attr, val) - ) - new_val = val + " new value" - else: - new_val = "new value" - else: - self.fail("Don't know what %s is" % (typ,)) - # set the attribute just to make sure we can. - setattr(item, attr, new_val) - - def testNETRESOURCE(self): - nr = win32wnet.NETRESOURCE() - self._checkItemAttributes(nr, NETRESOURCE_attributes) - - def testWNetEnumResource(self): - handle = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET, RESOURCETYPE_ANY, 0, None) - try: - while 1: - items = win32wnet.WNetEnumResource(handle, 0) - if len(items) == 0: - break - for item in items: - self._checkItemAttributes(item, NETRESOURCE_attributes) - finally: - handle.Close() - - def testNCB(self): - ncb = win32wnet.NCB() - self._checkItemAttributes(ncb, NCB_attributes) - - def testNetbios(self): - # taken from the demo code in netbios.py - ncb = win32wnet.NCB() - ncb.Command = netbios.NCBENUM - la_enum = netbios.LANA_ENUM() - ncb.Buffer = la_enum - rc = win32wnet.Netbios(ncb) - self.assertEqual(rc, 0) - for i in range(la_enum.length): - ncb.Reset() - ncb.Command = netbios.NCBRESET - ncb.Lana_num = netbios.byte_to_int(la_enum.lana[i]) - rc = Netbios(ncb) - self.assertEqual(rc, 0) - ncb.Reset() - ncb.Command = netbios.NCBASTAT - ncb.Lana_num = byte_to_int(la_enum.lana[i]) - ncb.Callname = str2bytes("* ") # ensure bytes on py2x and 3k - adapter = netbios.ADAPTER_STATUS() - ncb.Buffer = adapter - Netbios(ncb) - # expect 6 bytes in the mac address. - self.assertTrue(len(adapter.adapter_address), 6) - - def iterConnectableShares(self): - nr = win32wnet.NETRESOURCE() - nr.dwScope = RESOURCE_GLOBALNET - nr.dwUsage = RESOURCEUSAGE_CONTAINER - nr.lpRemoteName = "\\\\" + win32api.GetComputerName() - - handle = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET, RESOURCETYPE_ANY, 0, nr) - while 1: - items = win32wnet.WNetEnumResource(handle, 0) - if len(items) == 0: - break - for item in items: - if item.dwDisplayType == RESOURCEDISPLAYTYPE_SHARE: - yield item - - def findUnusedDriveLetter(self): - existing = [ - x[0].lower() for x in win32api.GetLogicalDriveStrings().split("\0") if x - ] - handle = win32wnet.WNetOpenEnum(RESOURCE_REMEMBERED, RESOURCETYPE_DISK, 0, None) - try: - while 1: - items = win32wnet.WNetEnumResource(handle, 0) - if len(items) == 0: - break - xtra = [i.lpLocalName[0].lower() for i in items if i.lpLocalName] - existing.extend(xtra) - finally: - handle.Close() - for maybe in "defghijklmnopqrstuvwxyz": - if maybe not in existing: - return maybe - self.fail("All drive mappings are taken?") - - def testAddConnection(self): - localName = self.findUnusedDriveLetter() + ":" - for share in self.iterConnectableShares(): - share.lpLocalName = localName - win32wnet.WNetAddConnection2(share) - win32wnet.WNetCancelConnection2(localName, 0, 0) - break - - def testAddConnectionOld(self): - localName = self.findUnusedDriveLetter() + ":" - for share in self.iterConnectableShares(): - win32wnet.WNetAddConnection2(share.dwType, localName, share.lpRemoteName) - win32wnet.WNetCancelConnection2(localName, 0, 0) - break - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32/test/testall.py b/lib/win32/test/testall.py deleted file mode 100644 index d3df2630..00000000 --- a/lib/win32/test/testall.py +++ /dev/null @@ -1,230 +0,0 @@ -import os -import re -import sys -import traceback -import unittest - -import pywin32_testutil - -# A list of demos that depend on user-interface of *any* kind. Tests listed -# here are not suitable for unattended testing. -ui_demos = """GetSaveFileName print_desktop win32cred_demo win32gui_demo - win32gui_dialog win32gui_menu win32gui_taskbar - win32rcparser_demo winprocess win32console_demo - win32clipboard_bitmapdemo - win32gui_devicenotify - NetValidatePasswordPolicy""".split() -# Other demos known as 'bad' (or at least highly unlikely to work) -# cerapi: no CE module is built (CE via pywin32 appears dead) -# desktopmanager: hangs (well, hangs for 60secs or so...) -# EvtSubscribe_*: must be run together: -# SystemParametersInfo: a couple of the params cause markh to hang, and there's -# no great reason to adjust (twice!) all those system settings! -bad_demos = """cerapi desktopmanager win32comport_demo - EvtSubscribe_pull EvtSubscribe_push - SystemParametersInfo - """.split() - -argvs = { - "rastest": ("-l",), -} - -no_user_interaction = True - -# re to pull apart an exception line into the exception type and the args. -re_exception = re.compile("([a-zA-Z0-9_.]*): (.*)$") - - -def find_exception_in_output(data): - have_traceback = False - for line in data.splitlines(): - line = line.decode("ascii") # not sure what the correct encoding is... - if line.startswith("Traceback ("): - have_traceback = True - continue - if line.startswith(" "): - continue - if have_traceback: - # first line not starting with a space since the traceback. - # must be the exception! - m = re_exception.match(line) - if m: - exc_type, args = m.groups() - # get hacky - get the *real* exception object from the name. - bits = exc_type.split(".", 1) - if len(bits) > 1: - mod = __import__(bits[0]) - exc = getattr(mod, bits[1]) - else: - # probably builtin - exc = eval(bits[0]) - else: - # hrm - probably just an exception with no args - try: - exc = eval(line.strip()) - args = "()" - except: - return None - # try and turn the args into real args. - try: - args = eval(args) - except: - pass - if not isinstance(args, tuple): - args = (args,) - # try and instantiate the exception. - try: - ret = exc(*args) - except: - ret = None - return ret - # apparently not - keep looking... - have_traceback = False - - -class TestRunner: - def __init__(self, argv): - self.argv = argv - self.__name__ = "Test Runner for cmdline {}".format(argv) - - def __call__(self): - import subprocess - - p = subprocess.Popen( - self.argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - output, _ = p.communicate() - rc = p.returncode - - if rc: - base = os.path.basename(self.argv[1]) - # See if we can detect and reconstruct an exception in the output. - reconstituted = find_exception_in_output(output) - if reconstituted is not None: - raise reconstituted - raise AssertionError( - "%s failed with exit code %s. Output is:\n%s" % (base, rc, output) - ) - - -def get_demo_tests(): - import win32api - - ret = [] - demo_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "Demos")) - assert os.path.isdir(demo_dir), demo_dir - for name in os.listdir(demo_dir): - base, ext = os.path.splitext(name) - if base in ui_demos and no_user_interaction: - continue - # Skip any other files than .py and bad tests in any case - if ext != ".py" or base in bad_demos: - continue - argv = (sys.executable, os.path.join(demo_dir, base + ".py")) + argvs.get( - base, () - ) - ret.append( - unittest.FunctionTestCase( - TestRunner(argv), description="win32/demos/" + name - ) - ) - return ret - - -def import_all(): - # Some hacks for import order - dde depends on win32ui - try: - import win32ui - except ImportError: - pass # 'what-ev-a....' - - import win32api - - dir = os.path.dirname(win32api.__file__) - num = 0 - is_debug = os.path.basename(win32api.__file__).endswith("_d") - for name in os.listdir(dir): - base, ext = os.path.splitext(name) - # handle `modname.cp310-win_amd64.pyd` etc - base = base.split(".")[0] - if ( - (ext == ".pyd") - and name != "_winxptheme.pyd" - and ( - is_debug - and base.endswith("_d") - or not is_debug - and not base.endswith("_d") - ) - ): - try: - __import__(base) - except: - print("FAILED to import", name) - raise - num += 1 - - -def suite(): - # Loop over all .py files here, except me :) - try: - me = __file__ - except NameError: - me = sys.argv[0] - me = os.path.abspath(me) - files = os.listdir(os.path.dirname(me)) - suite = unittest.TestSuite() - suite.addTest(unittest.FunctionTestCase(import_all)) - for file in files: - base, ext = os.path.splitext(file) - if ext == ".py" and os.path.basename(me) != file: - try: - mod = __import__(base) - except: - print("FAILED to import test module %r" % base) - traceback.print_exc() - continue - if hasattr(mod, "suite"): - test = mod.suite() - else: - test = unittest.defaultTestLoader.loadTestsFromModule(mod) - suite.addTest(test) - for test in get_demo_tests(): - suite.addTest(test) - return suite - - -class CustomLoader(pywin32_testutil.TestLoader): - def loadTestsFromModule(self, module): - return self.fixupTestsForLeakTests(suite()) - - -if __name__ == "__main__": - import argparse - - parser = argparse.ArgumentParser(description="Test runner for PyWin32/win32") - parser.add_argument( - "-no-user-interaction", - default=False, - action="store_true", - help="(This is now the default - use `-user-interaction` to include them)", - ) - - parser.add_argument( - "-user-interaction", - action="store_true", - help="Include tests which require user interaction", - ) - - parsed_args, remains = parser.parse_known_args() - - if parsed_args.no_user_interaction: - print( - "Note: -no-user-interaction is now the default, run with `-user-interaction` to include them." - ) - - no_user_interaction = not parsed_args.user_interaction - - sys.argv = [sys.argv[0]] + remains - - pywin32_testutil.testmain(testLoader=CustomLoader()) diff --git a/lib/win32/test/win32rcparser/python.bmp b/lib/win32/test/win32rcparser/python.bmp deleted file mode 100644 index f6747697..00000000 Binary files a/lib/win32/test/win32rcparser/python.bmp and /dev/null differ diff --git a/lib/win32/test/win32rcparser/python.ico b/lib/win32/test/win32rcparser/python.ico deleted file mode 100644 index f714eea4..00000000 Binary files a/lib/win32/test/win32rcparser/python.ico and /dev/null differ diff --git a/lib/win32/test/win32rcparser/test.h b/lib/win32/test/win32rcparser/test.h deleted file mode 100644 index 0d19cefe..00000000 --- a/lib/win32/test/win32rcparser/test.h +++ /dev/null @@ -1,46 +0,0 @@ -//{{NO_DEPENDENCIES}} -// Microsoft Developer Studio generated include file. -// Used by test.rc -// -#define IDS_TEST_STRING1 51 -#define IDS_TEST_STRING2 52 -#define IDS_TEST_STRING3 53 -#define IDS_TEST_STRING4 54 -#define IDS_TEST_STRING5 55 -#define IDS_TEST_STRING6 56 -#define IDS_TEST_STRING7 57 -#define IDD_TEST_DIALOG1 101 -#define IDD_TEST_DIALOG2 102 -#define IDB_PYTHON 103 -#define IDI_PYTHON 105 -#define IDD_TEST_DIALOG3 105 -#define IDC_EDIT1 1000 -#define IDC_CHECK1 1001 -#define IDC_EDIT2 1001 -#define IDC_COMBO1 1002 -#define IDC_SPIN1 1003 -#define IDC_PROGRESS1 1004 -#define IDC_SLIDER1 1005 -#define IDC_LIST1 1006 -#define IDC_TREE1 1007 -#define IDC_TAB1 1008 -#define IDC_ANIMATE1 1009 -#define IDC_RICHEDIT1 1010 -#define IDC_DATETIMEPICKER1 1011 -#define IDC_MONTHCALENDAR1 1012 -#define IDC_SCROLLBAR1 1013 -#define IDC_SCROLLBAR2 1014 -#define IDC_LIST2 1015 -#define IDC_HELLO 1016 -#define IDC_HELLO2 1017 - -// Next default values for new objects -// -#ifdef APSTUDIO_INVOKED -#ifndef APSTUDIO_READONLY_SYMBOLS -#define _APS_NEXT_RESOURCE_VALUE 107 -#define _APS_NEXT_COMMAND_VALUE 40002 -#define _APS_NEXT_CONTROL_VALUE 1018 -#define _APS_NEXT_SYMED_VALUE 101 -#endif -#endif diff --git a/lib/win32/test/win32rcparser/test.rc b/lib/win32/test/win32rcparser/test.rc deleted file mode 100644 index 9e4c7080..00000000 --- a/lib/win32/test/win32rcparser/test.rc +++ /dev/null @@ -1,216 +0,0 @@ -//Microsoft Developer Studio generated resource script. -// -#include "test.h" - -#define APSTUDIO_READONLY_SYMBOLS -///////////////////////////////////////////////////////////////////////////// -// -// Generated from the TEXTINCLUDE 2 resource. -// -#include "afxres.h" - -///////////////////////////////////////////////////////////////////////////// -#undef APSTUDIO_READONLY_SYMBOLS - -///////////////////////////////////////////////////////////////////////////// -// English (Australia) resources - -#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENA) -#ifdef _WIN32 -LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_AUS -#pragma code_page(1252) -#endif //_WIN32 - -#ifdef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// TEXTINCLUDE -// - -1 TEXTINCLUDE DISCARDABLE -BEGIN - "test.h\0" -END - -2 TEXTINCLUDE DISCARDABLE -BEGIN - "#include ""afxres.h""\r\n" - "\0" -END - -3 TEXTINCLUDE DISCARDABLE -BEGIN - "\r\n" - "\0" -END - -#endif // APSTUDIO_INVOKED - - -///////////////////////////////////////////////////////////////////////////// -// -// Dialog -// - -IDD_TEST_DIALOG1 DIALOG DISCARDABLE 0, 0, 186, 95 -STYLE DS_MODALFRAME | WS_POPUP | WS_CAPTION | WS_SYSMENU -CAPTION "Test Dialog" -FONT 8, "MS Sans Serif" -BEGIN - DEFPUSHBUTTON "OK",IDOK,129,7,50,14 - PUSHBUTTON "Cancel",IDCANCEL,129,24,50,14 - ICON IDI_PYTHON,IDC_STATIC,142,47,21,20 - LTEXT "An icon",IDC_STATIC,140,70,34,9 -END - -IDD_TEST_DIALOG2 DIALOG DISCARDABLE 0, 0, 186, 95 -STYLE DS_MODALFRAME | WS_POPUP | WS_CAPTION | WS_SYSMENU -CAPTION "Test Dialog" -FONT 8, "MS Sans Serif" -BEGIN - DEFPUSHBUTTON "OK",IDOK,129,7,50,14 - PUSHBUTTON "Cancel",IDCANCEL,129,24,50,14,NOT WS_TABSTOP - CONTROL 103,IDC_STATIC,"Static",SS_BITMAP,139,49,32,32 - LTEXT "A bitmap",IDC_STATIC,135,72,34,9 - EDITTEXT IDC_EDIT1,59,7,59,14,ES_AUTOHSCROLL - EDITTEXT IDC_EDIT2,59,31,60,15,ES_AUTOHSCROLL | NOT WS_TABSTOP - LTEXT "Tabstop",IDC_STATIC,7,9,43,10 - LTEXT "Not Tabstop",IDC_STATIC,7,33,43,10 -END - -IDD_TEST_DIALOG3 DIALOGEX 0, 0, 232, 310 -STYLE DS_MODALFRAME | WS_POPUP | WS_CAPTION | WS_SYSMENU -CAPTION "Dialog" -FONT 8, "MS Sans Serif", 0, 0, 0x1 -BEGIN - GROUPBOX "Frame",IDC_STATIC,7,7,218,41 - LTEXT "Left Static",IDC_STATIC,16,17,73,11 - EDITTEXT IDC_EDIT1,103,15,112,12,ES_AUTOHSCROLL - LTEXT "Right Static",IDC_STATIC,16,30,73,11,0,WS_EX_RIGHT - CONTROL "",IDC_RICHEDIT1,"RICHEDIT",ES_AUTOHSCROLL | WS_BORDER | - WS_TABSTOP,103,31,113,14 - CONTROL "Check1",IDC_CHECK1,"Button",BS_AUTOCHECKBOX | - WS_TABSTOP,7,52,68,12 - COMBOBOX IDC_COMBO1,85,52,82,35,CBS_DROPDOWNLIST | CBS_SORT | - WS_VSCROLL | WS_TABSTOP - CONTROL "Spin1",IDC_SPIN1,"msctls_updown32",UDS_ARROWKEYS,7,71, - 14,22 - CONTROL "Progress1",IDC_PROGRESS1,"msctls_progress32",WS_BORDER, - 39,72,153,13 - SCROLLBAR IDC_SCROLLBAR2,207,55,13,57,SBS_VERT - CONTROL "Slider1",IDC_SLIDER1,"msctls_trackbar32",TBS_BOTH | - TBS_NOTICKS | WS_TABSTOP,35,91,159,7 - SCROLLBAR IDC_SCROLLBAR1,37,102,155,11 - CONTROL "Tab1",IDC_TAB1,"SysTabControl32",0x0,7,120,217,43 - CONTROL "Animate1",IDC_ANIMATE1,"SysAnimate32",WS_BORDER | - WS_TABSTOP,7,171,46,42 - CONTROL "List1",IDC_LIST1,"SysListView32",WS_BORDER | WS_TABSTOP, - 63,171,53,43 - CONTROL "Tree1",IDC_TREE1,"SysTreeView32",WS_BORDER | WS_TABSTOP, - 126,171,50,43 - CONTROL "MonthCalendar1",IDC_MONTHCALENDAR1,"SysMonthCal32", - MCS_NOTODAY | WS_TABSTOP,7,219,140,84 - CONTROL "DateTimePicker1",IDC_DATETIMEPICKER1,"SysDateTimePick32", - DTS_RIGHTALIGN | WS_TABSTOP,174,221,51,15 - DEFPUSHBUTTON "OK",IDOK,175,289,50,14 - PUSHBUTTON "Hello",IDC_HELLO,175,271,50,14 - PUSHBUTTON "Hello",IDC_HELLO2,175,240,50,26,BS_ICON - LISTBOX IDC_LIST2,184,171,40,45,LBS_SORT | LBS_NOINTEGRALHEIGHT | - WS_VSCROLL | WS_TABSTOP -END - - -///////////////////////////////////////////////////////////////////////////// -// -// DESIGNINFO -// - -#ifdef APSTUDIO_INVOKED -GUIDELINES DESIGNINFO DISCARDABLE -BEGIN - IDD_TEST_DIALOG1, DIALOG - BEGIN - LEFTMARGIN, 7 - RIGHTMARGIN, 179 - TOPMARGIN, 7 - BOTTOMMARGIN, 88 - END - - IDD_TEST_DIALOG2, DIALOG - BEGIN - LEFTMARGIN, 7 - RIGHTMARGIN, 179 - TOPMARGIN, 7 - BOTTOMMARGIN, 88 - END - - IDD_TEST_DIALOG3, DIALOG - BEGIN - LEFTMARGIN, 7 - RIGHTMARGIN, 225 - TOPMARGIN, 7 - BOTTOMMARGIN, 303 - END -END -#endif // APSTUDIO_INVOKED - - -///////////////////////////////////////////////////////////////////////////// -// -// Icon -// - -// Icon with lowest ID value placed first to ensure application icon -// remains consistent on all systems. -IDI_PYTHON ICON DISCARDABLE "python.ico" - -///////////////////////////////////////////////////////////////////////////// -// -// Bitmap -// - -IDB_PYTHON BITMAP DISCARDABLE "python.bmp" - -///////////////////////////////////////////////////////////////////////////// -// -// Dialog Info -// - -IDD_TEST_DIALOG3 DLGINIT -BEGIN - IDC_COMBO1, 0x403, 6, 0 -0x7449, 0x6d65, 0x0031, - IDC_COMBO1, 0x403, 6, 0 -0x7449, 0x6d65, 0x0032, - 0 -END - - -///////////////////////////////////////////////////////////////////////////// -// -// String Table -// - -STRINGTABLE DISCARDABLE -BEGIN - IDS_TEST_STRING1 "Test ""quoted"" string" - IDS_TEST_STRING2 "Test string" - IDS_TEST_STRING3 "String with single "" quote" - IDS_TEST_STRING4 "Test 'single quoted' string" -END - -#endif // English (Australia) resources -///////////////////////////////////////////////////////////////////////////// - - - -#ifndef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// Generated from the TEXTINCLUDE 3 resource. -// - - -///////////////////////////////////////////////////////////////////////////// -#endif // not APSTUDIO_INVOKED - diff --git a/lib/win32com/HTML/GeneratedSupport.html b/lib/win32com/HTML/GeneratedSupport.html deleted file mode 100644 index b6fa50f7..00000000 --- a/lib/win32com/HTML/GeneratedSupport.html +++ /dev/null @@ -1,104 +0,0 @@ - - - - -Generated Python COM Support - - - - - - -

Python and COM - Blowing the others away

-

Generated Python COM Support

-

This file describes how the Python COM extensions support "generated files". The information contained here is for expert Python users, and people who need to take advantage of the advanced features of the support. More general information is available in the Quick Start to Client Side COM documentation.

-

Introduction

-

Generated Python COM support means that a .py file exists behind a particular COM object. This .py file is created by a generation process from a COM type library.

-

This documentation talks about the process of the creation of the .py files.

-

Design Goals

-

The main design goal is that the Python programmer need not know much about the type library they wish to work with. They need not know the name of a specific Python module to use a type library. COM uses an IID, version and LCID to identify a type library. Therefore, the Python programmer only need know this information to obtain a Python module.

-

How to generate support files

-

Support files can be generated either "off-line" by the makepy utility, or in custom Python code.

-

Using makepy is in many ways far simpler - you simply pick the type library and you are ready to go! The Quick Start to Client Side COM documentation describes this process.

-

Often however, you will want to use code to ensure the type library has been processed. This document describes that process.

-

Usage

-

The win32com.client.gencache module implements all functionality. As described above, if you wish to generate support from code, you need to know the IID, version and LCID of the type library.

-

The following functions are defined. The best examples of their usage is probably in the Pythonwin OCX Demos, and the COM Test Suite (particularly testMSOffice.py)

-

Note that the gencache.py file supports being run from the command line, and provides some utilities for managing the cache. Run the file to see usage options.

-

Using makepy to help with the runtime generation

-

makepy supports a "-i" option, to print information about a type library. When you select a type library, makepy will print out 2 lines of code that you cant paste into your application. This will then allow your module to generate the makepy .py file at runtime, but will only take you a few seconds!

-

win32com.client.gencache functions

-

def MakeModuleForTypelib(typelibCLSID, lcid, major, minor, progressInstance = None):

-

Generate support for a type library.

-

Given the IID, LCID and version information for a type library, generate and import the necessary support files.

-

Returns

-

The Python module. No exceptions are caught.

-

Params

-

typelibCLSID
-IID of the type library.

-

major
-Integer major version.

-

minor
-Integer minor version.

-

lcid
-Integer LCID for the library.

-

progressInstance
-A class instance to use as the progress indicator, or None to use the default GUI one. 

-

def EnsureModule(typelibCLSID, lcid, major, minor, progressInstance = None):

-

Ensure Python support is loaded for a type library, generating if necessary.

-

Given the IID, LCID and version information for a type library, check and if necessary generate, then import the necessary support files.

-

Returns:

-

The Python module. No exceptions are caught during the generate process.

-

Params

-

typelibCLSID
-IID of the type library.

-

major
-Integer major version.

-

minor
-Integer minor version.

-

lcid
-Integer LCID for the library.

-

progressInstance
-A class instance to use as the progress indicator, or None to use the default GUI one. 

-

 

-

def GetClassForProgID(progid):

-

Get a Python class for a Program ID

-

Given a Program ID, return a Python class which wraps the COM object

-

Returns

-

The Python class, or None if no module is available.

-

Params

-

progid
-
A COM ProgramID or IID (eg, "Word.Application")

-

 

-

def GetModuleForProgID(progid):

-

Get a Python module for a Program ID

-

Given a Program ID, return a Python module which contains the class which wraps the COM object.

-

Returns

-

The Python module, or None if no module is available.

-

Params:

-

progid
-
A COM ProgramID or IID (eg, "Word.Application")

-

 

-

def GetModuleForCLSID(clsid):

-

Get a Python module for a CLSID

-

Given a CLSID, return a Python module which contains the class which wraps the COM object.

-

Returns

-

The Python module, or None if no module is available.

-

Params

-

progid
-
A COM CLSID (ie, not the description)

-

 

-

def GetModuleForTypelib(typelibCLSID, lcid, major, minor):

-

Get a Python module for a type library ID

-

Returns

-

An imported Python module, else None

-

Params:

-

typelibCLSID
-IID of the type library.

-

major
-Integer major version.

-

minor
-Integer minor version

-

lcid
-Integer LCID for the library.

- diff --git a/lib/win32com/HTML/PythonCOM.html b/lib/win32com/HTML/PythonCOM.html deleted file mode 100644 index ed48a897..00000000 --- a/lib/win32com/HTML/PythonCOM.html +++ /dev/null @@ -1,90 +0,0 @@ - - - - -Untitled - - - - -

Python and COM - Blowing the others away

-

Python and COM - Implementation Details

-

Introduction

-

This document describes the technical implementation of the COM support in Python. It is primarily concerned with the underlying C++ interface to COM, although general Python issues are touched.

-

This document is targeted at people who wish to maintain/enhance the standard COM support (typically by writing extension modules). For information on using Python and COM from a Python programmers perspective, please see the documentation index.

-

General COM Support.

-

COM support in Python can be broken into 2 general areas - C++ support, and Python support. C++ support exists in the core PythonCOM module (plus any PythonCOM extension modules). Python support exists in the .py files that accompany the core module.

-

Naming Conventions

-

The naming conventions used by Python code will be:

- -
    -
  • The Python "New Import" (ni) module will be used, allowing packages, or nested modules.
  • -
  • The package name will be "win32com".
  • -
  • The core module name will be "pythoncom" (ie, "win32com.pythoncom")
- -

The rest of the naming conventions are yet to be worked out.

-

Core COM support.

-

This section is involved with the core C++ support in "pythoncom".

-

The organisation of PythonCOM support falls into 3 discrete areas.

-

COM Client Support

-

This is the ability to manipulate other COM objects via their exposed interface. This includes use of IDispatch (eg using Python to start Microsoft Word, open a file, and print it.) but also all client side IUnknown derived objects fall into this category, including ITypeLib and IConnectionPoint support.

-

COM Server Support

-

This is ability for Python to create COM Servers, which can be manipulated by another COM client. This includes server side IDispatch (eg, Visual Basic starting a Python interpreter, and asking it to evaluate some code) but also all supported server side IUnknown derived classes.

-

Python/COM type and value conversion

-

This is internal code used by the above areas to managed the conversion to and from Python/COM types and values. This includes code to convert an arbitrary Python object into a COM variant, manages return types, and a few other helpers.

-

COM Structures and Python Types

-

OLE supports many C level structures for the COM API, which must be mapped to Python.

-

VARIANT

-

Variants are never exposed as such to Python programs. The internal framework always converts all variants to and from Python types. In some cases, type descriptions may be used, which force specific mappings, although in general the automatic conversion works fine.

-

TYPEDESC

-

A tuple, containing the elements of the C union. This union will be correctly decoded by the support code.

-

ELEMDESC

-

A tuple of TYPEDESC and PARAMDESC objects.

-

FUNCDESC

-

A funcdesc is a large and unwieldy tuple. Documentation to be supplied.

-

IID/CLSID

-

A native IID in Python is a special type, defined in pythoncom. Whenever a CLSID/IID is required, typically either an object, a tuple of type "iii(iiiiiiii)" or string can be used.

-

Helper functions are available to convert to and from IID/CLSID and strings.

-

COM Framework

-

Both client and server side support have a specific framework in place to assist in supporting the widest possible set of interfaces. The framework allows external extension DLLs to be written, which extend the interfaces available to the Python user.

-

This allows the core PythonCOM module to support a wide set of common interfaces, and other extensions to support anything obscure.

-

Client Framework

-

QueryInterface and Types

-

When the only support required by Python is IDispatch, everything is simple - every object returned from QueryInterface is a PyIDispatch object. But this does not extend to other types, such as ITypeLib, IConnectionPoint etc., which are required for full COM support.

-

For example, consider the following C++ psuedo-code:

-

IConnectionPoint *conPt;
-someIDispatch->QueryInterface(IID_IConnectionPoint, (void **)&conPt);
-// Note the IID_ and type of the * could be anything!

-

This cast, and knowledge of a specific IID_* to type must be simulated in Python.

-

Python/COM will therefore maintain a map of UID's to Python type objects. Whenever QueryInterface is called, Python will lookup this map, to determine if the object type is supported. If the object is supported, then an object of that type will be returned. If the object is not supported, then a PyIUnknown object will be returned.

-

Note that PyIDispatch will be supported by the core engine. Therefore:

-

>>> disp=someobj.QueryInterface(win32com.IID_Dispatch)

-

will return a PyIDispatch object, whereas

-

>>> unk=someobj.QueryInterface(SomeUnknownIID) # returns PyIUnknown
->>> disp=unk.QueryInterface(win32com.IID_Dispatch)
->>> unk.Release() # Clean up now, rather than waiting for unk death.

-

Is needed to convert to an IDispatch object.

-

Core Support

-

The core COM support module will support the IUnknown, IDispatch, ITypeInfo, ITypeLib and IConnectionPointContainer and IConnectionPoint interfaces. This implies the core COM module supports 6 different OLE client object types, mapped to the 6 IID_*'s representing the objects. (The IConnection* objects allow for Python to repsond to COM events)

-

A psuedo-inheritance scheme is used. The Python types are all derived from the Python IUnknown type (PyIUnknown). Therefore all IUnknown methods are automatically available to all types, just as it should be. The PyIUnknown type manages all object reference counts and destruction.

-

Extensibility

-

To provide the above functionality, a Python map is provided, which maps from a GUID to a Python type object.

-

The advantage of this scheme is an external extension modules can hook into the core support. For example, imagine the following code:

-

>>> import myextracom # external .pyd supporting some interface.
-# myextracom.pyd will do the equivilent of

-

# pythoncom.mapSupportedTypes(myextracom.IID_Extra, myextracom.ExtraType)
->>> someobj.QueryInterface(myextracom.IID_Extra)

-

Would correctly return an object defined in the extension module.

-

Server Framework

-

General Framework

-

A server framework has been put in place which provides the following features:

-

All Interfaces provide VTBL support - this means that the Servers exposed by Python are callable from C++ and other compiled languages.

-

Supports full "inproc" servers. This means that no external .EXE is needed making Python COM servers available in almost all cases.

-

An extensible model which allows for extension modules to provide server support for interfaces defined in that module. A map is provided which maps from a GUID to a function pointer which creates the interface.

-

Python and Variant Types Conversion

-

In general, Python and COM are both "type-less". COM is type-less via the VARIANT object, which supports many types, and Python is type-less due to its object model.

-

There are a number of areas where Python and OLE clash.

-

Parameters and conversions.

-

For simple calls, there are 2 helpers available which will convert to and from PyObjects and VARIANTS. The call to convert a Python object to a VARIANT is simple in that it returns a VARIANT of the most appropriate type for the Python object - ie, the type of the Python object determines the resulting VARIANT type.

-

There are also more complex conversion routines available, wrapped in a C++ helper class. Typically, these helpers are used whenever a specific variant type is known (eg, when an ITypeInfo is available for the object being used). In this case, all efforts are made to convert the Python type to the requested variant type - ie, in this situation, the VARIANT type determines how the Python object is coerced. In addition, this code supports the use of "ByRef" and pointer paramaters, providing and freeing any buffers necessary for the call.

- diff --git a/lib/win32com/HTML/QuickStartClientCom.html b/lib/win32com/HTML/QuickStartClientCom.html deleted file mode 100644 index f3a02742..00000000 --- a/lib/win32com/HTML/QuickStartClientCom.html +++ /dev/null @@ -1,82 +0,0 @@ - - - - -Quick Start to Client side COM and Python - - - - -

Quick Start to Client side COM and Python

-

Introduction

-

This documents how to quickly start using COM from Python. It is not a thorough discussion of the COM system, or of the concepts introduced by COM.

-

Other good information on COM can be found in various conference tutorials - please see the collection of Mark's conference tutorials

-

For information on implementing COM objects using Python, please see a Quick Start to Server side COM and Python

-

In this document we discuss the following topics:

- - - -

Quick Start

-

To use a COM object from Python

-

import win32com.client
-o = win32com.client.Dispatch("Object.Name")
-o.Method()
-o.property = "New Value"
-print o.property

-

Example

-

o = win32com.client.Dispatch("Excel.Application")
-o.Visible = 1
-o.Workbooks.Add() # for office 97 – 95 a bit different!
-o.Cells(1,1).Value = "Hello"

-

And we will see the word "Hello" appear in the top cell.

-

How do I know which methods and properties are available?

-

Good question. This is hard! You need to use the documentation with the products, or possibly a COM browser. Note however that COM browsers typically rely on these objects registering themselves in certain ways, and many objects to not do this. You are just expected to know.

-

The Python COM browser

-

PythonCOM comes with a basic COM browser that may show you the information you need. Note that this package requires Pythonwin (ie, the MFC GUI environment) to be installed for this to work.

-

There are far better COM browsers available - I tend to use the one that comes with MSVC, or this one!

-

To run the browser, simply select it from the Pythonwin Tools menu, or double-click on the file win32com\client\combrowse.py

-

Static Dispatch (or Type Safe) objects

-

In the above examples, if we printed the 'repr(o)' object above, it would have resulted in

-

<COMObject Excel.Application>

-

This reflects that the object is a generic COM object that Python has no special knowledge of (other than the name you used to create it!). This is known as a "dynamic dispatch" object, as all knowledge is built dynamically. The win32com package also has the concept of static dispatch objects, which gives Python up-front knowledge about the objects that it is working with (including arguments, argument types, etc)

-

In a nutshell, Static Dispatch involves the generation of a .py file that contains support for the specific object. For more overview information, please see the documentation references above.

-

The generation and management of the .py files is somewhat automatic, and involves one of 2 steps:

- -
    -
  • Using makepy.py to select a COM library. This process is very similar to Visual Basic, where you select from a list of all objects installed on your system, and once selected the objects are magically useable.
- -

or

- -
    -
  • Use explicit code to check for, and possibly generate, support at run-time. This is very powerful, as it allows the developer to avoid ensuring the user has selected the appropriate type library. This option is extremely powerful for OCX users, as it allows Python code to sub-class an OCX control, but the actual sub-class can be generated at run-time. Use makepy.py with a -i option to see how to include this support in your Python code.
- -

The win32com.client.gencache module manages these generated files. This module has some documentation of its own, but you probably don't need to know the gory details!

-

How do I get at the generated module?

-

You will notice that the generated file name is long and cryptic - obviously not designed for humans to work with! So how do you get at the module object for the generated code?

-

Hopefully, the answer is you shouldn't need to. All generated file support is generally available directly via win32com.client.Dispatch and win32com.client.constants. But should you ever really need the Python module object, the win32com.client.gencache module has functions specifically for this. The functions GetModuleForCLSID and GetModuleForProgID both return Python module objects that you can use in your code. See the docstrings in the gencache code for more details.

-

To generate Python Sources supporting a COM object

-

Example using Microsoft Office 97.

-

Either:

- -
    -
  • Run 'win32com\client\makepy.py' (eg, run it from the command window, or double-click on it) and a list will be presented. Select the Type Library 'Microsoft Word 8.0 Object Library'
  • -
  • From a command prompt, run the command 'makepy.py "Microsoft Word 8.0 Object Library"' (include the double quotes). This simply avoids the selection process.
  • -
  • If you desire, you can also use explicit code to generate it just before you need to use it at runtime. Run 'makepy.py -i "Microsoft Word 8.0 Object Library"' (include the double quotes) to see how to do this.
- -

And that is it! Nothing more needed. No special import statements needed! Now, you simply need say

-

>>> import win32com.client

-

>>> w=win32com.client.Dispatch("Word.Application")

-

>>> w.Visible=1

-

>>> w

-

<win32com.gen_py.Microsoft Word 8.0 Object Library._Application>

-

Note that now Python knows the explicit type of the object.

-

Using COM Constants

-

Makepy automatically installs all generated constants from a type library in an object called win32com.clients.constants. You do not need to do anything special to make these constants work, other than create the object itself (ie, in the example above, the constants relating to Word would automatically be available after the w=win32com.client.Dispatch("Word.Application") statement.

-

For example, immediately after executing the code above, you could execute the following:

-

>>> w.WindowState = win32com.client.constants.wdWindowStateMinimize

-

and Word will Minimize.

- diff --git a/lib/win32com/HTML/QuickStartServerCom.html b/lib/win32com/HTML/QuickStartServerCom.html deleted file mode 100644 index b956daaf..00000000 --- a/lib/win32com/HTML/QuickStartServerCom.html +++ /dev/null @@ -1,195 +0,0 @@ - - - - -Quick Start to Server Side COM and Python - - - - - - -

Quick Start to Server side COM and Python

-

Introduction

-

This documents how to quickly start implementing COM objects in Python. It is not a thorough discussion of the COM system, or of the concepts introduced by COM.

-

For more details information on Python and COM, please see the COM Tutorial given by Greg Stein and Mark Hammond at SPAM 6 (HTML format) or download the same tutorial in PowerPoint format.

-

For information on using external COM objects from Python, please see a Quick Start to Client side COM and Python.

-

In this document we discuss the core functionality, registering the server, testing the class, debugging the class, exception handling and server policies (phew!)

-

Implement the core functionality

-

Implement a stand-alone Python class with your functionality

-

class HelloWorld:

- - -

def __init__(self):

- - -

self.softspace = 1

-

self.noCalls = 0

-
- -

def Hello(self, who):

- - -

self.noCalls = self.noCalls + 1

-

# insert "softspace" number of spaces

-

return "Hello" + " " * self.softspace + who

-
-
-
- -

This is obviously a very simple server. In particular, custom error handling would be needed for a production class server. In addition, there are some contrived properties just for demonstration purposes.

-

Make Unicode concessions

-

At this stage, Python and Unicode don’t really work well together. All strings which come from COM will actually be Unicode objects rather than string objects.

-

To make this code work in a COM environment, the last line of the "Hello" method must become:

- - - - -

return "Hello" + " " * self.softspace + str(who)

-
-
-
- -

Note the conversion of the "who" to "str(who)". This forces the Unicode object into a native Python string object.

-

For details on how to debug COM Servers to find this sort of error, please see debugging the class

-

Annotate the class with win32com specific attributes

-

This is not a complete list of names, simply a list of properties used by this sample.

- - - - - - - - - - - - - -
-

Property Name

-

Description

-

_public_methods_

-

List of all method names exposed to remote COM clients

-

_public_attrs_

-

List of all attribute names exposed to remote COM clients

-

_readonly_attrs_

-

List of all attributes which can be accessed, but not set.

- -

We change the class header to become:

-

class HelloWorld:

- - -

_public_methods_ = ['Hello']

-

_public_attrs_ = ['softspace', 'noCalls']

-

_readonly_attrs_ = ['noCalls']

-

def __init__(self):

-

[Same from here…]

-
- -

Registering and assigning a CLSID for the object

-

COM requires that all objects use a unique CLSID and be registered under a "user friendly" name. This documents the process.

-

Generating the CLSID

-

Microsoft Visual C++ comes with various tools for generating CLSID's, which are quite suitable. Alternatively, the pythoncom module exports the function CreateGuid() to generate these identifiers.

-

>>> import pythoncom
->>> print pythoncom.CreateGuid()
-{7CC9F362-486D-11D1-BB48-0000E838A65F}

-

Obviously the GUID that you get will be different than that displayed here.

-

Preparing for registration of the Class

-

The win32com package allows yet more annotations to be applied to a class, allowing registration to be effected with 2 lines in your source file. The registration annotations used by this sample are:

- - - - - - - - - - - - - - - - - - - -
-

Property Name

-

Description

-

_reg_clsid_

-

The CLSID of the COM object

-

_reg_progid_

-

The "program ID", or Name, of the COM Server. This is the name the user usually uses to instantiate the object

-

_reg_desc_

-

Optional: The description of the COM Server. Used primarily for COM browsers. If not specified, the _reg_progid_ is used as the description.

-

_reg_class_spec_

-

Optional: A string which represents how Python can create the class instance. The string is of format
-[package.subpackage.]module.class

-

The portion up to the class name must be valid for Python to "import", and the class portion must be a valid attribute in the specified class.

-

This is optional from build 124 of Pythoncom., and has been removed from this sample.

-

_reg_remove_keys_

-

Optional: A list of tuples of extra registry keys to be removed when uninstalling the server. Each tuple is of format ("key", root), where key is a string, and root is one of the win32con.HKEY_* constants (this item is optional, defaulting to HKEY_CLASSES_ROOT)

- -

Note there are quite a few other keys available. Also note that these annotations are not required - they just make registration simple. Helper functions in the module win32com.server.register allow you to explicitly specify each of these attributes without attaching them to the class.

-

The header of our class now becomes:

-

class HelloWorld:

- - -

_reg_clsid_ = "{7CC9F362-486D-11D1-BB48-0000E838A65F}"

-

_reg_desc_ = "Python Test COM Server"

-

_reg_progid_ = "Python.TestServer"

-

_public_methods_ = ['Hello']

-

[same from here]

-
- -

Registering the Class

-

The idiom that most Python COM Servers use is that they register themselves when run as a script (ie, when executed from the command line.) Thus the standard "if __name__=='__main___':" technique works well.

-

win32com.server.register contains a number of helper functions. The easiest to use is "UseCommandLine".

-

Registration becomes as simple as:

-

if __name__=='__main__':
- # ni only for 1.4!
- import ni, win32com.server.register
- win32com.server.register.UseCommandLine(HelloWorld)

-

Running the script will register our test server.

-

Testing our Class

-

For the purposes of this demonstration, we will test the class using Visual Basic. This code should run under any version of Visual Basic, including VBA found in Microsoft Office. Any COM compliant package could be used alternatively. VB has been used just to prove there is no "smoke and mirrors. For information on how to test the server using Python, please see the Quick Start to Client side COM documentation.

-

This is not a tutorial in VB. The code is just presented! Run it, and it will work!

-

Debugging the COM Server

-

When things go wrong in COM Servers, there is often nowhere useful for the Python traceback to go, even if such a traceback is generated.

-

Rather than discuss how it works, I will just present the procedure to debug your server:

-

To register a debug version of your class, run the script (as above) but pass in a "--debug" parameter. Eg, for the server above, use the command line "testcomserver.py --debug".

-

To see the debug output generated (and any print statements you may choose to add!) you can simply select the "Remote Debug Trace Collector" from the Pythonwin Tools menu, or run the script "win32traceutil.py" from Windows Explorer or a Command Prompt.

-

Exception Handling

-

Servers need to be able to provide exception information to their client. In some cases, it may be a simple return code (such as E_NOTIMPLEMENTED), but often it can contain much richer information, describing the error on detail, and even a help file and topic where more information can be found.

-

We use Python class based exceptions to provide this information. The COM framework will examine the exception, and look for certain known attributes. These attributes will be copied across to the COM exception, and passed back to the client.

-

The following attributes are supported, and correspond to the equivalent entry in the COM Exception structure:
-scode, code, description, source, helpfile and helpcontext

-

To make working with exceptions easier, there is a helper module "win32com.server.exception.py", which defines a single class. An example of its usage would be:

-

raise COMException(desc="Must be a string",scode=winerror.E_INVALIDARG,helpfile="myhelp.hlp",...)

-

(Note the COMException class supports (and translates) "desc" as a shortcut for "description", but the framework requires "description")

-

Server Policies

-

This is information about how it all hangs together. The casual COM author need not know this.

-

Whenever a Python Server needs to be created, the C++ framework first instantiates a "policy" object. This "policy" object is the gatekeeper for the COM Server - it is responsible for creating the underlying Python object that is the server (ie, your object), and also for translating the underlying COM requests for the object.

-

This policy object handles all of the underlying COM functionality. For example, COM requires all methods and properties to have unique numeric ID's associated with them. The policy object manages the creation of these ID's for the underlying Python methods and attributes. Similarly, when the client whishes to call a method with ID 123, the policy object translates this back to the actual method, and makes the call.

-

It should be noted that the operation of the "policy" object could be dictated by the Python object - the policy object has many defaults, but the actual Python class can always dictate its operation.

-

Default Policy attributes

-

The default policy object has a few special attributes that define who the object is exposed to COM. The example above shows the _public_methods attribute, but this section describes all such attributes in detail.

-
_public_methods_
-

Required list of strings, containing the names of all methods to be exposed to COM. It is possible this will be enhanced in the future (eg, possibly '*' will be recognised to say all methods, or some other ideas…)

-
_public_attrs_
-

Optional list of strings containing all attribute names to be exposed, both for reading and writing. The attribute names must be valid instance variables.

-
_readonly_attrs_
-

Optional list of strings defining the name of attributes exposed read-only.

-
_com_interfaces_
-

Optional list of IIDs exposed by this object. If this attribute is missing, IID_IDispatch is assumed (ie, if not supplied, the COM object will be created as a normal Automation object.

-

and actual instance attributes:

-

_dynamic_ : optional method

-

_value_ : optional attribute

-

_query_interface_ : optional method

-

_NewEnum : optional method

-

_Evaluate : optional method

- diff --git a/lib/win32com/HTML/docindex.html b/lib/win32com/HTML/docindex.html deleted file mode 100644 index fff74de8..00000000 --- a/lib/win32com/HTML/docindex.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - -win32com Documentation Index - - - - -

Python and COM - Blowing the others away

-

PythonCOM Documentation Index

-

The following documentation is available

-

A Quick Start to Client Side COM (including makepy)

-

A Quick Start to Server Side COM

-

Information on generated Python files (ie, what makepy generates)

-

An advanced VARIANT object which can give more control over parameter types

-

A brief description of the win32com package structure

-

Python COM Implementation documentation

-

Misc stuff I don’t know where to put anywhere else

-

ActiveX Scripting

-

ActiveX Scripting Demos

- diff --git a/lib/win32com/HTML/image/BTN_HomePage.gif b/lib/win32com/HTML/image/BTN_HomePage.gif deleted file mode 100644 index 5767fa0f..00000000 Binary files a/lib/win32com/HTML/image/BTN_HomePage.gif and /dev/null differ diff --git a/lib/win32com/HTML/image/BTN_ManualTop.gif b/lib/win32com/HTML/image/BTN_ManualTop.gif deleted file mode 100644 index 4200880a..00000000 Binary files a/lib/win32com/HTML/image/BTN_ManualTop.gif and /dev/null differ diff --git a/lib/win32com/HTML/image/BTN_NextPage.gif b/lib/win32com/HTML/image/BTN_NextPage.gif deleted file mode 100644 index 3d055fea..00000000 Binary files a/lib/win32com/HTML/image/BTN_NextPage.gif and /dev/null differ diff --git a/lib/win32com/HTML/image/BTN_PrevPage.gif b/lib/win32com/HTML/image/BTN_PrevPage.gif deleted file mode 100644 index 955eae6c..00000000 Binary files a/lib/win32com/HTML/image/BTN_PrevPage.gif and /dev/null differ diff --git a/lib/win32com/HTML/image/blank.gif b/lib/win32com/HTML/image/blank.gif deleted file mode 100644 index a58ecfec..00000000 Binary files a/lib/win32com/HTML/image/blank.gif and /dev/null differ diff --git a/lib/win32com/HTML/image/pycom_blowing.gif b/lib/win32com/HTML/image/pycom_blowing.gif deleted file mode 100644 index 0d65a292..00000000 Binary files a/lib/win32com/HTML/image/pycom_blowing.gif and /dev/null differ diff --git a/lib/win32com/HTML/image/pythoncom.gif b/lib/win32com/HTML/image/pythoncom.gif deleted file mode 100644 index 6b5020a1..00000000 Binary files a/lib/win32com/HTML/image/pythoncom.gif and /dev/null differ diff --git a/lib/win32com/HTML/image/www_icon.gif b/lib/win32com/HTML/image/www_icon.gif deleted file mode 100644 index 4239d2c3..00000000 Binary files a/lib/win32com/HTML/image/www_icon.gif and /dev/null differ diff --git a/lib/win32com/HTML/index.html b/lib/win32com/HTML/index.html deleted file mode 100644 index 2ab0176a..00000000 --- a/lib/win32com/HTML/index.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - -win32com - - - - - -

-

-

Python and COM

-

Introduction

-

Python has an excellent interface to COM (also known variously as OLE2, ActiveX, etc).

-

The Python COM package can be used to interface to almost any COM program (such as the MS-Office suite), write servers that can be hosted by any COM client (such as Visual Basic or C++), and has even been used to provide the core ActiveX Scripting Support.

- - - -
- - -

Documentation

-

Preliminary Active Scripting and Debugging documentation is available.

-

2 Quick-Start guides have been provided, which also contain other links. See the Quick Start for Client side COM and the Quick Start for Server side COM

-

-
- - diff --git a/lib/win32com/HTML/misc.html b/lib/win32com/HTML/misc.html deleted file mode 100644 index 4374d221..00000000 --- a/lib/win32com/HTML/misc.html +++ /dev/null @@ -1,18 +0,0 @@ - - - - -Misc win32com Stuff - - - - - - -

Misc stuff I don’t know where to put anywhere else

-

Client Side Dispatch

-

Using win32com.client.Dispatch automatically invokes all the win32com client side "smarts", including automatic usage of generated .py files etc.

-

If you wish to avoid that, and use truly "dynamic" objects (ie, there is generated .py support available, but you wish to avoid it), you can use win32com.client.dynamic.Dispatch

-

_print_details_() method
-If win32com.client.dynamic.Dispatch is used, the objects have a _print_details_() method available, which prints all relevant knowledge about an object (for example, all methods and properties). For objects that do not expose runtime type information, _print_details_ may not list anything.

- diff --git a/lib/win32com/HTML/package.html b/lib/win32com/HTML/package.html deleted file mode 100644 index e98f0875..00000000 --- a/lib/win32com/HTML/package.html +++ /dev/null @@ -1,37 +0,0 @@ - - - - -The win32com package - - - - -

Python and COM - Blowing the others away

-

The win32com package

-

This document describes the win32com package in general terms.

-

The COM support can be thought of as existing in 2 main portions - the C++ support code (the core PythonCOM module), and helper code, implemented in Python. The total package is known as "win32com".

-

The win32com support is stand-alone. It does not require Pythonwin.

-

The win32com package

-

To facilitate an orderly framework, the Python "ni" module has been used, and the entire package is known as "win32com". As is normal for such packages, win32com itself does not provide any functionality. Some of the modules are described below:

- -
    -
  • win32com.pythoncom - core C++ support.
    -This module is rarely used directly by programmers - instead the other "helper" module are used, which themselves draw on the core pythoncom services.
  • -
  • win32com.client package
    -
    Support for COM clients used by Python. Some of the modules in this package allow for dynamic usage of COM clients, a module for generating .py files for certain COM servers, etc.
  • -
  • win32com.server package
    -
    Support for COM servers written in Python. The modules in this package provide most of the underlying framework for magically turning Python classes into COM servers, exposing the correct public methods, registering your server in the registry, etc.
  • -
  • win32com.axscript
    -
    ActiveX Scripting implementation for Python.
  • -
  • win32com.axdebug
    -
    Active Debugging implementation for Python
  • -
  • win32com.mapi
    -
    Utilities for working with MAPI and the Microsoft Exchange Server
- -

 

-

The pythoncom module

-

The pythoncom module is the underlying C++ support for all COM related objects. In general, Python programmers will not use this module directly, but use win32com helper classes and functions.

-

This module exposes a C++ like interface to COM - there are objects implemented in pythoncom that have methods "QueryInterface()", "Invoke()", just like the C++ API. If you are using COM in C++, you would not call a method directly, you would use pObject->Invoke( …, MethodId, argArray…). Similarly, if you are using pythoncom directly, you must also use the Invoke method to call an object's exposed method.

-

There are some Python wrappers for hiding this raw interface, meaning you should almost never need to use the pythoncom module directly. These helpers translate a "natural" looking interface (eg, obj.SomeMethod()) into the underlying Invoke call.

- diff --git a/lib/win32com/HTML/variant.html b/lib/win32com/HTML/variant.html deleted file mode 100644 index 2631203e..00000000 --- a/lib/win32com/HTML/variant.html +++ /dev/null @@ -1,162 +0,0 @@ - - - win32com.client.VARIANT - - - -

Introduction

-

-win32com attempts to provide a seamless COM interface and hide many COM -implementation details, including the use of COM VARIANT structures. This -means that in most cases, you just call a COM object using normal Python -objects as parameters and get back normal Python objects as results. -

- -

-However, in some cases this doesn't work very well, particularly when using -"dynamic" (aka late-bound) objects, or when using "makepy" (aka early-bound) -objects which only declare a parameter is a VARIANT. -

- -

-The win32com.client.VARIANT object is designed to overcome these -problems. -

- -

Drawbacks

-The primary issue with this approach is that the programmer must learn more -about COM VARIANTs than otherwise - they need to know concepts such as -variants being byref, holding arrays, or that some may hold 32bit -unsigned integers while others hold 64bit signed ints, and they need to -understand this in the context of a single method call. In short, this is -a relatively advanced feature. The good news though is that use of these -objects should never cause your program to hard-crash - the worst you should -expect are Python or COM exceptions being thrown. - -

The VARIANT object

- -The VARIANT object lives in win32com.client. The constructor -takes 2 parameters - the 'variant type' and the value. The 'variant type' is -an integer and can be one or more of the pythoncom.VT_* values, -possibly or'd together. - -

For example, to create a VARIANT object which defines a byref array of -32bit integers, you could use: - -

->>> from win32com.client import VARIANT
->>> import pythoncom
->>> v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_ARRAY | pythoncom.VT_I4,
-...             [1,2,3,4])
->>> v
-win32com.client.VARIANT(24579, [1, 2, 3, 4])
->>>
-
- -This variable can then be used whereever a COM VARIANT is expected. - -

Example usage with dynamic objects.

- -For this example we will use the COM object used for win32com testing, -PyCOMTest.PyCOMTest. This object defines a method which is -defined in IDL as: -
-HRESULT DoubleInOutString([in,out] BSTR *str);
-
- -As you can see, it takes a single string parameter which is also used as -an "out" parameter - the single parameter will be updated after the call. -The implementation of the method simply "doubles" the string. - -

If the object has a type-library, this method works fine with makepy -generated support. For example: - -

->>> from win32com.client.gencache import EnsureDispatch
->>> ob = EnsureDispatch("PyCOMTest.PyCOMTest")
->>> ob.DoubleInOutString("Hello")
-u'HelloHello'
->>>
-
- -However, if makepy support is not available the method does not work as -expected. For the next example we will use DumbDispatch to -simulate the object not having a type-library. - -
->>> import win32com.client.dynamic
->>> ob = win32com.client.dynamic.DumbDispatch("PyCOMTest.PyCOMTest")
->>> ob.DoubleInOutString("Hello")
->>>
-
- -As you can see, no result came back from the function. This is because -win32com has no type information available to use, so doesn't know the -parameter should be passed as a byref parameter. To work -around this, we can use the VARIANT object. - -

The following example explicitly creates a VARIANT object with a -variant type of a byref string and a value 'Hello'. After making the -call with this VARIANT the value is updated. - -

->>> import win32com.client.dynamic
->>> from win32com.client import VARIANT
->>> import pythoncom
->>> ob = win32com.client.dynamic.DumbDispatch("PyCOMTest.PyCOMTest")
->>> variant = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_BSTR, "Hello")
->>> variant.value # check the value before the call.
-'Hello'
->>> ob.DoubleInOutString(variant)
->>> variant.value
-u'HelloHello'
->>>
-
- -

Usage with generated objects

- -In most cases, objects with makepy support (ie, 'generated' objects) don't -need to use the VARIANT object - the type information means win32com can guess -the right thing to pass. However, in some cases the VARIANT object can still -be useful. - -Imagine a poorly specified object with IDL like: - -
-HRESULT DoSomething([in] VARIANT value);
-
- -But also imagine that the object has a limitation that if the parameter is an -integer, it must be a 32bit unsigned value - any other integer representation -will fail. - -

If you just pass a regular Python integer to this function, it will -generally be passed as a 32bit signed integer and given the limitation above, -will fail. The VARIANT object allows you to work around the limitation - just -create a variant object VARIANT(pythoncom.VT_UI4, int_value) and -pass that - the function will then be called with the explicit type you -specified and will succeed. - -

Note that you can not use a VARIANT object to override the types described -in a type library. If a makepy generated class specifies that a VT_UI2 is -expected, attempting to pass a VARIANT object will fail. In this case you -would need to hack around the problem. For example, imagine ob -was a COM object which a method called foo and you wanted to -override the type declaration for foo by passing a VARIANT. -You could do something like: - -

->>> import win32com.client.dynamic
->>> from win32com.client import VARIANT
->>> import pythoncom
->>> dumbob = win32com.client.dynamic.DumbDispatch(ob)
->>> variant = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_BSTR, "Hello")
->>> dumbob.foo(variant)
-
- -The code above converts the makepy supported ob into a -'dumb' (ie, non-makepy supported) version of the object, which will then -allow you to use VARIANT objects for the problematic methods. - - - diff --git a/lib/win32com/License.txt b/lib/win32com/License.txt deleted file mode 100644 index 6c1884e9..00000000 --- a/lib/win32com/License.txt +++ /dev/null @@ -1,30 +0,0 @@ -Unless stated in the specfic source file, this work is -Copyright (c) 1996-2008, Greg Stein and Mark Hammond. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -Redistributions of source code must retain the above copyright notice, -this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in -the documentation and/or other materials provided with the distribution. - -Neither names of Greg Stein, Mark Hammond nor the name of contributors may be used -to endorse or promote products derived from this software without -specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS -IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/win32com/__init__.py b/lib/win32com/__init__.py deleted file mode 100644 index 4832d9e1..00000000 --- a/lib/win32com/__init__.py +++ /dev/null @@ -1,132 +0,0 @@ -# -# Initialization for the win32com package -# - -import os -import sys - -import pythoncom -import win32api - -# flag if we are in a "frozen" build. -_frozen = getattr(sys, "frozen", 1 == 0) -# pythoncom dumbly defaults this to zero - we believe sys.frozen over it. -if _frozen and not getattr(pythoncom, "frozen", 0): - pythoncom.frozen = sys.frozen - -# Add support for an external "COM Extensions" path. -# Concept is that you can register a seperate path to be used for -# COM extensions, outside of the win32com directory. These modules, however, -# look identical to win32com built-in modules. -# This is the technique that we use for the "standard" COM extensions. -# eg "win32com.mapi" or "win32com.axscript" both work, even though they do not -# live under the main win32com directory. -__gen_path__ = "" -__build_path__ = None -### TODO - Load _all_ \\Extensions subkeys - for now, we only read the default -### Modules will work if loaded into "win32comext" path. - - -def SetupEnvironment(): - HKEY_LOCAL_MACHINE = -2147483646 # Avoid pulling in win32con for just these... - KEY_QUERY_VALUE = 0x1 - # Open the root key once, as this is quite slow on NT. - try: - keyName = "SOFTWARE\\Python\\PythonCore\\%s\\PythonPath\\win32com" % sys.winver - key = win32api.RegOpenKey(HKEY_LOCAL_MACHINE, keyName, 0, KEY_QUERY_VALUE) - except (win32api.error, AttributeError): - key = None - - try: - found = 0 - if key is not None: - try: - __path__.append(win32api.RegQueryValue(key, "Extensions")) - found = 1 - except win32api.error: - # Nothing registered - pass - if not found: - try: - __path__.append( - win32api.GetFullPathName(__path__[0] + "\\..\\win32comext") - ) - except win32api.error: - # Give up in disgust! - pass - - # For the sake of developers, we also look up a "BuildPath" key - # If extension modules add support, we can load their .pyd's from a completely - # different directory (see the comments below) - try: - if key is not None: - global __build_path__ - __build_path__ = win32api.RegQueryValue(key, "BuildPath") - __path__.append(__build_path__) - except win32api.error: - # __build_path__ neednt be defined. - pass - global __gen_path__ - if key is not None: - try: - __gen_path__ = win32api.RegQueryValue(key, "GenPath") - except win32api.error: - pass - finally: - if key is not None: - key.Close() - - -# A Helper for developers. A sub-package's __init__ can call this help function, -# which allows the .pyd files for the extension to live in a special "Build" directory -# (which the win32com developers do!) -def __PackageSupportBuildPath__(package_path): - # See if we have a special directory for the binaries (for developers) - if not _frozen and __build_path__: - package_path.append(__build_path__) - - -if not _frozen: - SetupEnvironment() - -# If we don't have a special __gen_path__, see if we have a gen_py as a -# normal module and use that (ie, "win32com.gen_py" may already exist as -# a package. -if not __gen_path__: - try: - import win32com.gen_py - - # hrmph - 3.3 throws: TypeError: '_NamespacePath' object does not support indexing - # attempting to get __path__[0] - but I can't quickly repro this stand-alone. - # Work around it by using an iterator. - __gen_path__ = next(iter(sys.modules["win32com.gen_py"].__path__)) - except ImportError: - # If a win32com\gen_py directory already exists, then we use it - # (gencache doesn't insist it have an __init__, but our __import__ - # above does! - __gen_path__ = os.path.abspath(os.path.join(__path__[0], "gen_py")) - if not os.path.isdir(__gen_path__): - # We used to dynamically create a directory under win32com - - # but this sucks. If the dir doesn't already exist, we we - # create a version specific directory under the user temp - # directory. - __gen_path__ = os.path.join( - win32api.GetTempPath(), - "gen_py", - "%d.%d" % (sys.version_info[0], sys.version_info[1]), - ) - -# we must have a __gen_path__, but may not have a gen_py module - -# set that up. -if "win32com.gen_py" not in sys.modules: - # Create a "win32com.gen_py", but with a custom __path__ - import types - - gen_py = types.ModuleType("win32com.gen_py") - gen_py.__path__ = [__gen_path__] - sys.modules[gen_py.__name__] = gen_py - del types -gen_py = sys.modules["win32com.gen_py"] - -# get rid of these for module users -del os, sys, win32api, pythoncom diff --git a/lib/win32com/client/CLSIDToClass.py b/lib/win32com/client/CLSIDToClass.py deleted file mode 100644 index fde32743..00000000 --- a/lib/win32com/client/CLSIDToClass.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Manages a dictionary of CLSID strings to Python classes. - -Primary use of this module is to allow modules generated by -makepy.py to share classes. @makepy@ automatically generates code -which interacts with this module. You should never need to reference -this module directly. - -This module only provides support for modules which have been previously -been imported. The gencache module provides some support for loading modules -on demand - once done, this module supports it... - -As an example, the MSACCESS.TLB type library makes reference to the -CLSID of the Database object, as defined in DAO3032.DLL. This -allows code using the MSAccess wrapper to natively use Databases. - -This obviously applies to all cooperating objects, not just DAO and -Access. -""" -mapCLSIDToClass = {} - - -def RegisterCLSID(clsid, pythonClass): - """Register a class that wraps a CLSID - - This function allows a CLSID to be globally associated with a class. - Certain module will automatically convert an IDispatch object to an - instance of the associated class. - """ - - mapCLSIDToClass[str(clsid)] = pythonClass - - -def RegisterCLSIDsFromDict(dict): - """Register a dictionary of CLSID's and classes. - - This module performs the same function as @RegisterCLSID@, but for - an entire dictionary of associations. - - Typically called by makepy generated modules at import time. - """ - mapCLSIDToClass.update(dict) - - -def GetClass(clsid): - """Given a CLSID, return the globally associated class. - - clsid -- a string CLSID representation to check. - """ - return mapCLSIDToClass[clsid] - - -def HasClass(clsid): - """Determines if the CLSID has an associated class. - - clsid -- the string CLSID to check - """ - return clsid in mapCLSIDToClass diff --git a/lib/win32com/client/__init__.py b/lib/win32com/client/__init__.py deleted file mode 100644 index 91b4b303..00000000 --- a/lib/win32com/client/__init__.py +++ /dev/null @@ -1,714 +0,0 @@ -# This module exists to create the "best" dispatch object for a given -# object. If "makepy" support for a given object is detected, it is -# used, otherwise a dynamic dispatch object. - -# Note that if the unknown dispatch object then returns a known -# dispatch object, the known class will be used. This contrasts -# with dynamic.Dispatch behaviour, where dynamic objects are always used. - -import sys - -import pythoncom -import pywintypes - -from . import dynamic, gencache - -_PyIDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch] - - -def __WrapDispatch( - dispatch, - userName=None, - resultCLSID=None, - typeinfo=None, - UnicodeToString=None, - clsctx=pythoncom.CLSCTX_SERVER, - WrapperClass=None, -): - """ - Helper function to return a makepy generated class for a CLSID if it exists, - otherwise cope by using CDispatch. - """ - assert UnicodeToString is None, "this is deprecated and will go away" - if resultCLSID is None: - try: - typeinfo = dispatch.GetTypeInfo() - if ( - typeinfo is not None - ): # Some objects return NULL, some raise exceptions... - resultCLSID = str(typeinfo.GetTypeAttr()[0]) - except (pythoncom.com_error, AttributeError): - pass - if resultCLSID is not None: - from . import gencache - - # Attempt to load generated module support - # This may load the module, and make it available - klass = gencache.GetClassForCLSID(resultCLSID) - if klass is not None: - return klass(dispatch) - - # Return a "dynamic" object - best we can do! - if WrapperClass is None: - WrapperClass = CDispatch - return dynamic.Dispatch(dispatch, userName, WrapperClass, typeinfo, clsctx=clsctx) - - -def GetObject(Pathname=None, Class=None, clsctx=None): - """ - Mimic VB's GetObject() function. - - ob = GetObject(Class = "ProgID") or GetObject(Class = clsid) will - connect to an already running instance of the COM object. - - ob = GetObject(r"c:\blah\blah\foo.xls") (aka the COM moniker syntax) - will return a ready to use Python wrapping of the required COM object. - - Note: You must specifiy one or the other of these arguments. I know - this isn't pretty, but it is what VB does. Blech. If you don't - I'll throw ValueError at you. :) - - This will most likely throw pythoncom.com_error if anything fails. - """ - if clsctx is None: - clsctx = pythoncom.CLSCTX_ALL - - if (Pathname is None and Class is None) or ( - Pathname is not None and Class is not None - ): - raise ValueError( - "You must specify a value for Pathname or Class, but not both." - ) - - if Class is not None: - return GetActiveObject(Class, clsctx) - else: - return Moniker(Pathname, clsctx) - - -def GetActiveObject(Class, clsctx=pythoncom.CLSCTX_ALL): - """ - Python friendly version of GetObject's ProgID/CLSID functionality. - """ - resultCLSID = pywintypes.IID(Class) - dispatch = pythoncom.GetActiveObject(resultCLSID) - dispatch = dispatch.QueryInterface(pythoncom.IID_IDispatch) - return __WrapDispatch(dispatch, Class, resultCLSID=resultCLSID, clsctx=clsctx) - - -def Moniker(Pathname, clsctx=pythoncom.CLSCTX_ALL): - """ - Python friendly version of GetObject's moniker functionality. - """ - moniker, i, bindCtx = pythoncom.MkParseDisplayName(Pathname) - dispatch = moniker.BindToObject(bindCtx, None, pythoncom.IID_IDispatch) - return __WrapDispatch(dispatch, Pathname, clsctx=clsctx) - - -def Dispatch( - dispatch, - userName=None, - resultCLSID=None, - typeinfo=None, - UnicodeToString=None, - clsctx=pythoncom.CLSCTX_SERVER, -): - """Creates a Dispatch based COM object.""" - assert UnicodeToString is None, "this is deprecated and will go away" - dispatch, userName = dynamic._GetGoodDispatchAndUserName(dispatch, userName, clsctx) - return __WrapDispatch(dispatch, userName, resultCLSID, typeinfo, clsctx=clsctx) - - -def DispatchEx( - clsid, - machine=None, - userName=None, - resultCLSID=None, - typeinfo=None, - UnicodeToString=None, - clsctx=None, -): - """Creates a Dispatch based COM object on a specific machine.""" - assert UnicodeToString is None, "this is deprecated and will go away" - # If InProc is registered, DCOM will use it regardless of the machine name - # (and regardless of the DCOM config for the object.) So unless the user - # specifies otherwise, we exclude inproc apps when a remote machine is used. - if clsctx is None: - clsctx = pythoncom.CLSCTX_SERVER - if machine is not None: - clsctx = clsctx & ~pythoncom.CLSCTX_INPROC - if machine is None: - serverInfo = None - else: - serverInfo = (machine,) - if userName is None: - userName = clsid - dispatch = pythoncom.CoCreateInstanceEx( - clsid, None, clsctx, serverInfo, (pythoncom.IID_IDispatch,) - )[0] - return Dispatch(dispatch, userName, resultCLSID, typeinfo, clsctx=clsctx) - - -class CDispatch(dynamic.CDispatch): - """ - The dynamic class used as a last resort. - The purpose of this overriding of dynamic.CDispatch is to perpetuate the policy - of using the makepy generated wrapper Python class instead of dynamic.CDispatch - if/when possible. - """ - - def _wrap_dispatch_( - self, ob, userName=None, returnCLSID=None, UnicodeToString=None - ): - assert UnicodeToString is None, "this is deprecated and will go away" - return Dispatch(ob, userName, returnCLSID, None) - - def __dir__(self): - return dynamic.CDispatch.__dir__(self) - - -def CastTo(ob, target, typelib=None): - """'Cast' a COM object to another interface""" - # todo - should support target being an IID - mod = None - if ( - typelib is not None - ): # caller specified target typelib (TypelibSpec). See e.g. selecttlb.EnumTlbs(). - mod = gencache.MakeModuleForTypelib( - typelib.clsid, typelib.lcid, int(typelib.major, 16), int(typelib.minor, 16) - ) - if not hasattr(mod, target): - raise ValueError( - "The interface name '%s' does not appear in the " - "specified library %r" % (target, typelib.ver_desc) - ) - - elif hasattr(target, "index"): # string like - # for now, we assume makepy for this to work. - if "CLSID" not in ob.__class__.__dict__: - # Eeek - no makepy support - try and build it. - ob = gencache.EnsureDispatch(ob) - if "CLSID" not in ob.__class__.__dict__: - raise ValueError("Must be a makepy-able object for this to work") - clsid = ob.CLSID - # Lots of hoops to support "demand-build" - ie, generating - # code for an interface first time it is used. We assume the - # interface name exists in the same library as the object. - # This is generally the case - only referenced typelibs may be - # a problem, and we can handle that later. Maybe - # So get the generated module for the library itself, then - # find the interface CLSID there. - mod = gencache.GetModuleForCLSID(clsid) - # Get the 'root' module. - mod = gencache.GetModuleForTypelib( - mod.CLSID, mod.LCID, mod.MajorVersion, mod.MinorVersion - ) - # Find the CLSID of the target - target_clsid = mod.NamesToIIDMap.get(target) - if target_clsid is None: - raise ValueError( - "The interface name '%s' does not appear in the " - "same library as object '%r'" % (target, ob) - ) - mod = gencache.GetModuleForCLSID(target_clsid) - if mod is not None: - target_class = getattr(mod, target) - # resolve coclass to interface - target_class = getattr(target_class, "default_interface", target_class) - return target_class(ob) # auto QI magic happens - raise ValueError - - -class Constants: - """A container for generated COM constants.""" - - def __init__(self): - self.__dicts__ = [] # A list of dictionaries - - def __getattr__(self, a): - for d in self.__dicts__: - if a in d: - return d[a] - raise AttributeError(a) - - -# And create an instance. -constants = Constants() - - -# A helpers for DispatchWithEvents - this becomes __setattr__ for the -# temporary class. -def _event_setattr_(self, attr, val): - try: - # Does the COM object have an attribute of this name? - self.__class__.__bases__[0].__setattr__(self, attr, val) - except AttributeError: - # Otherwise just stash it away in the instance. - self.__dict__[attr] = val - - -# An instance of this "proxy" is created to break the COM circular references -# that exist (ie, when we connect to the COM events, COM keeps a reference -# to the object. Thus, the Event connection must be manually broken before -# our object can die. This solves the problem by manually breaking the connection -# to the real object as the proxy dies. -class EventsProxy: - def __init__(self, ob): - self.__dict__["_obj_"] = ob - - def __del__(self): - try: - # If there is a COM error on disconnection we should - # just ignore it - object probably already shut down... - self._obj_.close() - except pythoncom.com_error: - pass - - def __getattr__(self, attr): - return getattr(self._obj_, attr) - - def __setattr__(self, attr, val): - setattr(self._obj_, attr, val) - - -def DispatchWithEvents(clsid, user_event_class): - """Create a COM object that can fire events to a user defined class. - clsid -- The ProgID or CLSID of the object to create. - user_event_class -- A Python class object that responds to the events. - - This requires makepy support for the COM object being created. If - this support does not exist it will be automatically generated by - this function. If the object does not support makepy, a TypeError - exception will be raised. - - The result is a class instance that both represents the COM object - and handles events from the COM object. - - It is important to note that the returned instance is not a direct - instance of the user_event_class, but an instance of a temporary - class object that derives from three classes: - * The makepy generated class for the COM object - * The makepy generated class for the COM events - * The user_event_class as passed to this function. - - If this is not suitable, see the getevents function for an alternative - technique of handling events. - - Object Lifetimes: Whenever the object returned from this function is - cleaned-up by Python, the events will be disconnected from - the COM object. This is almost always what should happen, - but see the documentation for getevents() for more details. - - Example: - - >>> class IEEvents: - ... def OnVisible(self, visible): - ... print "Visible changed:", visible - ... - >>> ie = DispatchWithEvents("InternetExplorer.Application", IEEvents) - >>> ie.Visible = 1 - Visible changed: 1 - >>> - """ - # Create/Get the object. - disp = Dispatch(clsid) - if not disp.__class__.__dict__.get( - "CLSID" - ): # Eeek - no makepy support - try and build it. - try: - ti = disp._oleobj_.GetTypeInfo() - disp_clsid = ti.GetTypeAttr()[0] - tlb, index = ti.GetContainingTypeLib() - tla = tlb.GetLibAttr() - gencache.EnsureModule(tla[0], tla[1], tla[3], tla[4], bValidateFile=0) - # Get the class from the module. - disp_class = gencache.GetClassForProgID(str(disp_clsid)) - except pythoncom.com_error: - raise TypeError( - "This COM object can not automate the makepy process - please run makepy manually for this object" - ) - else: - disp_class = disp.__class__ - # If the clsid was an object, get the clsid - clsid = disp_class.CLSID - # Create a new class that derives from 3 classes - the dispatch class, the event sink class and the user class. - # XXX - we are still "classic style" classes in py2x, so we need can't yet - # use 'type()' everywhere - revisit soon, as py2x will move to new-style too... - try: - from types import ClassType as new_type - except ImportError: - new_type = type # py3k - events_class = getevents(clsid) - if events_class is None: - raise ValueError("This COM object does not support events.") - result_class = new_type( - "COMEventClass", - (disp_class, events_class, user_event_class), - {"__setattr__": _event_setattr_}, - ) - instance = result_class( - disp._oleobj_ - ) # This only calls the first base class __init__. - events_class.__init__(instance, instance) - if hasattr(user_event_class, "__init__"): - user_event_class.__init__(instance) - return EventsProxy(instance) - - -def WithEvents(disp, user_event_class): - """Similar to DispatchWithEvents - except that the returned - object is *not* also usable as the original Dispatch object - that is - the returned object is not dispatchable. - - The difference is best summarised by example. - - >>> class IEEvents: - ... def OnVisible(self, visible): - ... print "Visible changed:", visible - ... - >>> ie = Dispatch("InternetExplorer.Application") - >>> ie_events = WithEvents(ie, IEEvents) - >>> ie.Visible = 1 - Visible changed: 1 - - Compare with the code sample for DispatchWithEvents, where you get a - single object that is both the interface and the event handler. Note that - the event handler instance will *not* be able to use 'self.' to refer to - IE's methods and properties. - - This is mainly useful where using DispatchWithEvents causes - circular reference problems that the simple proxy doesn't deal with - """ - disp = Dispatch(disp) - if not disp.__class__.__dict__.get( - "CLSID" - ): # Eeek - no makepy support - try and build it. - try: - ti = disp._oleobj_.GetTypeInfo() - disp_clsid = ti.GetTypeAttr()[0] - tlb, index = ti.GetContainingTypeLib() - tla = tlb.GetLibAttr() - gencache.EnsureModule(tla[0], tla[1], tla[3], tla[4], bValidateFile=0) - # Get the class from the module. - disp_class = gencache.GetClassForProgID(str(disp_clsid)) - except pythoncom.com_error: - raise TypeError( - "This COM object can not automate the makepy process - please run makepy manually for this object" - ) - else: - disp_class = disp.__class__ - # Get the clsid - clsid = disp_class.CLSID - # Create a new class that derives from 2 classes - the event sink - # class and the user class. - try: - from types import ClassType as new_type - except ImportError: - new_type = type # py3k - events_class = getevents(clsid) - if events_class is None: - raise ValueError("This COM object does not support events.") - result_class = new_type("COMEventClass", (events_class, user_event_class), {}) - instance = result_class(disp) # This only calls the first base class __init__. - if hasattr(user_event_class, "__init__"): - user_event_class.__init__(instance) - return instance - - -def getevents(clsid): - """Determine the default outgoing interface for a class, given - either a clsid or progid. It returns a class - you can - conveniently derive your own handler from this class and implement - the appropriate methods. - - This method relies on the classes produced by makepy. You must use - either makepy or the gencache module to ensure that the - appropriate support classes have been generated for the com server - that you will be handling events from. - - Beware of COM circular references. When the Events class is connected - to the COM object, the COM object itself keeps a reference to the Python - events class. Thus, neither the Events instance or the COM object will - ever die by themselves. The 'close' method on the events instance - must be called to break this chain and allow standard Python collection - rules to manage object lifetimes. Note that DispatchWithEvents() does - work around this problem by the use of a proxy object, but if you use - the getevents() function yourself, you must make your own arrangements - to manage this circular reference issue. - - Beware of creating Python circular references: this will happen if your - handler has a reference to an object that has a reference back to - the event source. Call the 'close' method to break the chain. - - Example: - - >>>win32com.client.gencache.EnsureModule('{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}',0,1,1) - >> - >>> class InternetExplorerEvents(win32com.client.getevents("InternetExplorer.Application.1")): - ... def OnVisible(self, Visible): - ... print "Visibility changed: ", Visible - ... - >>> - >>> ie=win32com.client.Dispatch("InternetExplorer.Application.1") - >>> events=InternetExplorerEvents(ie) - >>> ie.Visible=1 - Visibility changed: 1 - >>> - """ - - # find clsid given progid or clsid - clsid = str(pywintypes.IID(clsid)) - # return default outgoing interface for that class - klass = gencache.GetClassForCLSID(clsid) - try: - return klass.default_source - except AttributeError: - # See if we have a coclass for the interfaces. - try: - return gencache.GetClassForCLSID(klass.coclass_clsid).default_source - except AttributeError: - return None - - -# A Record object, as used by the COM struct support -def Record(name, object): - """Creates a new record object, given the name of the record, - and an object from the same type library. - - Example usage would be: - app = win32com.client.Dispatch("Some.Application") - point = win32com.client.Record("SomeAppPoint", app) - point.x = 0 - point.y = 0 - app.MoveTo(point) - """ - # XXX - to do - probably should allow "object" to already be a module object. - from . import gencache - - object = gencache.EnsureDispatch(object) - module = sys.modules[object.__class__.__module__] - # to allow us to work correctly with "demand generated" code, - # we must use the typelib CLSID to obtain the module - # (otherwise we get the sub-module for the object, which - # does not hold the records) - # thus, package may be module, or may be module's parent if demand generated. - package = gencache.GetModuleForTypelib( - module.CLSID, module.LCID, module.MajorVersion, module.MinorVersion - ) - try: - struct_guid = package.RecordMap[name] - except KeyError: - raise ValueError( - "The structure '%s' is not defined in module '%s'" % (name, package) - ) - return pythoncom.GetRecordFromGuids( - module.CLSID, module.MajorVersion, module.MinorVersion, module.LCID, struct_guid - ) - - -############################################ -# The base of all makepy generated classes -############################################ -class DispatchBaseClass: - def __init__(self, oobj=None): - if oobj is None: - oobj = pythoncom.new(self.CLSID) - elif isinstance(oobj, DispatchBaseClass): - try: - oobj = oobj._oleobj_.QueryInterface( - self.CLSID, pythoncom.IID_IDispatch - ) # Must be a valid COM instance - except pythoncom.com_error as details: - import winerror - - # Some stupid objects fail here, even tho it is _already_ IDispatch!!?? - # Eg, Lotus notes. - # So just let it use the existing object if E_NOINTERFACE - if details.hresult != winerror.E_NOINTERFACE: - raise - oobj = oobj._oleobj_ - self.__dict__["_oleobj_"] = oobj # so we dont call __setattr__ - - def __dir__(self): - lst = ( - list(self.__dict__.keys()) - + dir(self.__class__) - + list(self._prop_map_get_.keys()) - + list(self._prop_map_put_.keys()) - ) - try: - lst += [p.Name for p in self.Properties_] - except AttributeError: - pass - return list(set(lst)) - - # Provide a prettier name than the CLSID - def __repr__(self): - # Need to get the docstring for the module for this class. - try: - mod_doc = sys.modules[self.__class__.__module__].__doc__ - if mod_doc: - mod_name = "win32com.gen_py." + mod_doc - else: - mod_name = sys.modules[self.__class__.__module__].__name__ - except KeyError: - mod_name = "win32com.gen_py.unknown" - return "<%s.%s instance at 0x%s>" % ( - mod_name, - self.__class__.__name__, - id(self), - ) - - # Delegate comparison to the oleobjs, as they know how to do identity. - def __eq__(self, other): - other = getattr(other, "_oleobj_", other) - return self._oleobj_ == other - - def __ne__(self, other): - other = getattr(other, "_oleobj_", other) - return self._oleobj_ != other - - def _ApplyTypes_(self, dispid, wFlags, retType, argTypes, user, resultCLSID, *args): - return self._get_good_object_( - self._oleobj_.InvokeTypes(dispid, 0, wFlags, retType, argTypes, *args), - user, - resultCLSID, - ) - - def __getattr__(self, attr): - args = self._prop_map_get_.get(attr) - if args is None: - raise AttributeError( - "'%s' object has no attribute '%s'" % (repr(self), attr) - ) - return self._ApplyTypes_(*args) - - def __setattr__(self, attr, value): - if attr in self.__dict__: - self.__dict__[attr] = value - return - try: - args, defArgs = self._prop_map_put_[attr] - except KeyError: - raise AttributeError( - "'%s' object has no attribute '%s'" % (repr(self), attr) - ) - self._oleobj_.Invoke(*(args + (value,) + defArgs)) - - def _get_good_single_object_(self, obj, obUserName=None, resultCLSID=None): - return _get_good_single_object_(obj, obUserName, resultCLSID) - - def _get_good_object_(self, obj, obUserName=None, resultCLSID=None): - return _get_good_object_(obj, obUserName, resultCLSID) - - -# XXX - These should be consolidated with dynamic.py versions. -def _get_good_single_object_(obj, obUserName=None, resultCLSID=None): - if _PyIDispatchType == type(obj): - return Dispatch(obj, obUserName, resultCLSID) - return obj - - -def _get_good_object_(obj, obUserName=None, resultCLSID=None): - if obj is None: - return None - elif isinstance(obj, tuple): - obUserNameTuple = (obUserName,) * len(obj) - resultCLSIDTuple = (resultCLSID,) * len(obj) - return tuple(map(_get_good_object_, obj, obUserNameTuple, resultCLSIDTuple)) - else: - return _get_good_single_object_(obj, obUserName, resultCLSID) - - -class CoClassBaseClass: - def __init__(self, oobj=None): - if oobj is None: - oobj = pythoncom.new(self.CLSID) - dispobj = self.__dict__["_dispobj_"] = self.default_interface(oobj) - # See comments below re the special methods. - for maybe in [ - "__call__", - "__str__", - "__int__", - "__iter__", - "__len__", - "__nonzero__", - ]: - if hasattr(dispobj, maybe): - setattr(self, maybe, getattr(self, "__maybe" + maybe)) - - def __repr__(self): - return "" % (__doc__, self.__class__.__name__) - - def __getattr__(self, attr): - d = self.__dict__["_dispobj_"] - if d is not None: - return getattr(d, attr) - raise AttributeError(attr) - - def __setattr__(self, attr, value): - if attr in self.__dict__: - self.__dict__[attr] = value - return - try: - d = self.__dict__["_dispobj_"] - if d is not None: - d.__setattr__(attr, value) - return - except AttributeError: - pass - self.__dict__[attr] = value - - # Special methods don't use __getattr__ etc, so explicitly delegate here. - # Note however, that not all are safe to let bubble up - things like - # `bool(ob)` will break if the object defines __int__ but then raises an - # attribute error - eg, see #1753. - # It depends on what the wrapped COM object actually defines whether these - # will exist on the underlying object, so __init__ explicitly checks if they - # do and if so, wires them up. - - def __maybe__call__(self, *args, **kwargs): - return self.__dict__["_dispobj_"].__call__(*args, **kwargs) - - def __maybe__str__(self, *args): - return self.__dict__["_dispobj_"].__str__(*args) - - def __maybe__int__(self, *args): - return self.__dict__["_dispobj_"].__int__(*args) - - def __maybe__iter__(self): - return self.__dict__["_dispobj_"].__iter__() - - def __maybe__len__(self): - return self.__dict__["_dispobj_"].__len__() - - def __maybe__nonzero__(self): - return self.__dict__["_dispobj_"].__nonzero__() - - -# A very simple VARIANT class. Only to be used with poorly-implemented COM -# objects. If an object accepts an arg which is a simple "VARIANT", but still -# is very pickly about the actual variant type (eg, isn't happy with a VT_I4, -# which it would get from a Python integer), you can use this to force a -# particular VT. -class VARIANT(object): - def __init__(self, vt, value): - self.varianttype = vt - self._value = value - - # 'value' is a property so when set by pythoncom it gets any magic wrapping - # which normally happens for result objects - def _get_value(self): - return self._value - - def _set_value(self, newval): - self._value = _get_good_object_(newval) - - def _del_value(self): - del self._value - - value = property(_get_value, _set_value, _del_value) - - def __repr__(self): - return "win32com.client.VARIANT(%r, %r)" % (self.varianttype, self._value) diff --git a/lib/win32com/client/build.py b/lib/win32com/client/build.py deleted file mode 100644 index ce4e2e4a..00000000 --- a/lib/win32com/client/build.py +++ /dev/null @@ -1,789 +0,0 @@ -"""Contains knowledge to build a COM object definition. - -This module is used by both the @dynamic@ and @makepy@ modules to build -all knowledge of a COM object. - -This module contains classes which contain the actual knowledge of the object. -This include parameter and return type information, the COM dispid and CLSID, etc. - -Other modules may use this information to generate .py files, use the information -dynamically, or possibly even generate .html documentation for objects. -""" - -# -# NOTES: DispatchItem and MapEntry used by dynamic.py. -# the rest is used by makepy.py -# -# OleItem, DispatchItem, MapEntry, BuildCallList() is used by makepy - -import datetime -import string -import sys -from keyword import iskeyword - -import pythoncom -import winerror -from pywintypes import TimeType - - -# It isn't really clear what the quoting rules are in a C/IDL string and -# literals like a quote char and backslashes makes life a little painful to -# always render the string perfectly - so just punt and fall-back to a repr() -def _makeDocString(s): - if sys.version_info < (3,): - s = s.encode("mbcs") - return repr(s) - - -error = "PythonCOM.Client.Build error" - - -class NotSupportedException(Exception): - pass # Raised when we cant support a param type. - - -DropIndirection = "DropIndirection" - -NoTranslateTypes = [ - pythoncom.VT_BOOL, - pythoncom.VT_CLSID, - pythoncom.VT_CY, - pythoncom.VT_DATE, - pythoncom.VT_DECIMAL, - pythoncom.VT_EMPTY, - pythoncom.VT_ERROR, - pythoncom.VT_FILETIME, - pythoncom.VT_HRESULT, - pythoncom.VT_I1, - pythoncom.VT_I2, - pythoncom.VT_I4, - pythoncom.VT_I8, - pythoncom.VT_INT, - pythoncom.VT_NULL, - pythoncom.VT_R4, - pythoncom.VT_R8, - pythoncom.VT_NULL, - pythoncom.VT_STREAM, - pythoncom.VT_UI1, - pythoncom.VT_UI2, - pythoncom.VT_UI4, - pythoncom.VT_UI8, - pythoncom.VT_UINT, - pythoncom.VT_VOID, -] - -NoTranslateMap = {} -for v in NoTranslateTypes: - NoTranslateMap[v] = None - - -class MapEntry: - "Simple holder for named attibutes - items in a map." - - def __init__( - self, - desc_or_id, - names=None, - doc=None, - resultCLSID=pythoncom.IID_NULL, - resultDoc=None, - hidden=0, - ): - if type(desc_or_id) == type(0): - self.dispid = desc_or_id - self.desc = None - else: - self.dispid = desc_or_id[0] - self.desc = desc_or_id - - self.names = names - self.doc = doc - self.resultCLSID = resultCLSID - self.resultDocumentation = resultDoc - self.wasProperty = ( - 0 # Have I been transformed into a function so I can pass args? - ) - self.hidden = hidden - - def __repr__(self): - return ( - "MapEntry(dispid={s.dispid}, desc={s.desc}, names={s.names}, doc={s.doc!r}, " - "resultCLSID={s.resultCLSID}, resultDocumentation={s.resultDocumentation}, " - "wasProperty={s.wasProperty}, hidden={s.hidden}" - ).format(s=self) - - def GetResultCLSID(self): - rc = self.resultCLSID - if rc == pythoncom.IID_NULL: - return None - return rc - - # Return a string, suitable for output - either "'{...}'" or "None" - def GetResultCLSIDStr(self): - rc = self.GetResultCLSID() - if rc is None: - return "None" - return repr( - str(rc) - ) # Convert the IID object to a string, then to a string in a string. - - def GetResultName(self): - if self.resultDocumentation is None: - return None - return self.resultDocumentation[0] - - -class OleItem: - typename = "OleItem" - - def __init__(self, doc=None): - self.doc = doc - if self.doc: - self.python_name = MakePublicAttributeName(self.doc[0]) - else: - self.python_name = None - self.bWritten = 0 - self.bIsDispatch = 0 - self.bIsSink = 0 - self.clsid = None - self.co_class = None - - -class DispatchItem(OleItem): - typename = "DispatchItem" - - def __init__(self, typeinfo=None, attr=None, doc=None, bForUser=1): - OleItem.__init__(self, doc) - self.propMap = {} - self.propMapGet = {} - self.propMapPut = {} - self.mapFuncs = {} - self.defaultDispatchName = None - self.hidden = 0 - - if typeinfo: - self.Build(typeinfo, attr, bForUser) - - def _propMapPutCheck_(self, key, item): - ins, outs, opts = self.CountInOutOptArgs(item.desc[2]) - if ins > 1: # if a Put property takes more than 1 arg: - if opts + 1 == ins or ins == item.desc[6] + 1: - newKey = "Set" + key - deleteExisting = 0 # This one is still OK - else: - deleteExisting = 1 # No good to us - if key in self.mapFuncs or key in self.propMapGet: - newKey = "Set" + key - else: - newKey = key - item.wasProperty = 1 - self.mapFuncs[newKey] = item - if deleteExisting: - del self.propMapPut[key] - - def _propMapGetCheck_(self, key, item): - ins, outs, opts = self.CountInOutOptArgs(item.desc[2]) - if ins > 0: # if a Get property takes _any_ in args: - if item.desc[6] == ins or ins == opts: - newKey = "Get" + key - deleteExisting = 0 # This one is still OK - else: - deleteExisting = 1 # No good to us - if key in self.mapFuncs: - newKey = "Get" + key - else: - newKey = key - item.wasProperty = 1 - self.mapFuncs[newKey] = item - if deleteExisting: - del self.propMapGet[key] - - def _AddFunc_(self, typeinfo, fdesc, bForUser): - assert fdesc.desckind == pythoncom.DESCKIND_FUNCDESC - id = fdesc.memid - funcflags = fdesc.wFuncFlags - try: - names = typeinfo.GetNames(id) - name = names[0] - except pythoncom.ole_error: - name = "" - names = None - - doc = None - try: - if bForUser: - doc = typeinfo.GetDocumentation(id) - except pythoncom.ole_error: - pass - - if id == 0 and name: - self.defaultDispatchName = name - - invkind = fdesc.invkind - - # We need to translate any Alias', Enums, structs etc in result and args - typerepr, flag, defval = fdesc.rettype - # sys.stderr.write("%s result - %s -> " % (name, typerepr)) - typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo) - # sys.stderr.write("%s\n" % (typerepr,)) - fdesc.rettype = typerepr, flag, defval, resultCLSID - # Translate any Alias or Enums in argument list. - argList = [] - for argDesc in fdesc.args: - typerepr, flag, defval = argDesc - # sys.stderr.write("%s arg - %s -> " % (name, typerepr)) - arg_type, arg_clsid, arg_doc = _ResolveType(typerepr, typeinfo) - argDesc = arg_type, flag, defval, arg_clsid - # sys.stderr.write("%s\n" % (argDesc[0],)) - argList.append(argDesc) - fdesc.args = tuple(argList) - - hidden = (funcflags & pythoncom.FUNCFLAG_FHIDDEN) != 0 - if invkind == pythoncom.INVOKE_PROPERTYGET: - map = self.propMapGet - # This is not the best solution, but I dont think there is - # one without specific "set" syntax. - # If there is a single PUT or PUTREF, it will function as a property. - # If there are both, then the PUT remains a property, and the PUTREF - # gets transformed into a function. - # (in vb, PUT=="obj=other_obj", PUTREF="set obj=other_obj - elif invkind in (pythoncom.INVOKE_PROPERTYPUT, pythoncom.INVOKE_PROPERTYPUTREF): - # Special case - existing = self.propMapPut.get(name, None) - if existing is not None: - if existing.desc[4] == pythoncom.INVOKE_PROPERTYPUT: # Keep this one - map = self.mapFuncs - name = "Set" + name - else: # Existing becomes a func. - existing.wasProperty = 1 - self.mapFuncs["Set" + name] = existing - map = self.propMapPut # existing gets overwritten below. - else: - map = self.propMapPut # first time weve seen it. - - elif invkind == pythoncom.INVOKE_FUNC: - map = self.mapFuncs - else: - map = None - if not map is None: - # if map.has_key(name): - # sys.stderr.write("Warning - overwriting existing method/attribute %s\n" % name) - map[name] = MapEntry(fdesc, names, doc, resultCLSID, resultDoc, hidden) - # any methods that can't be reached via DISPATCH we return None - # for, so dynamic dispatch doesnt see it. - if fdesc.funckind != pythoncom.FUNC_DISPATCH: - return None - return (name, map) - return None - - def _AddVar_(self, typeinfo, vardesc, bForUser): - ### need pythoncom.VARFLAG_FRESTRICTED ... - ### then check it - assert vardesc.desckind == pythoncom.DESCKIND_VARDESC - - if vardesc.varkind == pythoncom.VAR_DISPATCH: - id = vardesc.memid - names = typeinfo.GetNames(id) - # Translate any Alias or Enums in result. - typerepr, flags, defval = vardesc.elemdescVar - typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo) - vardesc.elemdescVar = typerepr, flags, defval - doc = None - try: - if bForUser: - doc = typeinfo.GetDocumentation(id) - except pythoncom.ole_error: - pass - - # handle the enumerator specially - map = self.propMap - # Check if the element is hidden. - hidden = (vardesc.wVarFlags & 0x40) != 0 # VARFLAG_FHIDDEN - map[names[0]] = MapEntry( - vardesc, names, doc, resultCLSID, resultDoc, hidden - ) - return (names[0], map) - else: - return None - - def Build(self, typeinfo, attr, bForUser=1): - self.clsid = attr[0] - self.bIsDispatch = (attr.wTypeFlags & pythoncom.TYPEFLAG_FDISPATCHABLE) != 0 - if typeinfo is None: - return - # Loop over all methods - for j in range(attr[6]): - fdesc = typeinfo.GetFuncDesc(j) - self._AddFunc_(typeinfo, fdesc, bForUser) - - # Loop over all variables (ie, properties) - for j in range(attr[7]): - fdesc = typeinfo.GetVarDesc(j) - self._AddVar_(typeinfo, fdesc, bForUser) - - # Now post-process the maps. For any "Get" or "Set" properties - # that have arguments, we must turn them into methods. If a method - # of the same name already exists, change the name. - for key, item in list(self.propMapGet.items()): - self._propMapGetCheck_(key, item) - - for key, item in list(self.propMapPut.items()): - self._propMapPutCheck_(key, item) - - def CountInOutOptArgs(self, argTuple): - "Return tuple counting in/outs/OPTS. Sum of result may not be len(argTuple), as some args may be in/out." - ins = out = opts = 0 - for argCheck in argTuple: - inOut = argCheck[1] - if inOut == 0: - ins = ins + 1 - out = out + 1 - else: - if inOut & pythoncom.PARAMFLAG_FIN: - ins = ins + 1 - if inOut & pythoncom.PARAMFLAG_FOPT: - opts = opts + 1 - if inOut & pythoncom.PARAMFLAG_FOUT: - out = out + 1 - return ins, out, opts - - def MakeFuncMethod(self, entry, name, bMakeClass=1): - # If we have a type description, and not varargs... - if entry.desc is not None and (len(entry.desc) < 6 or entry.desc[6] != -1): - return self.MakeDispatchFuncMethod(entry, name, bMakeClass) - else: - return self.MakeVarArgsFuncMethod(entry, name, bMakeClass) - - def MakeDispatchFuncMethod(self, entry, name, bMakeClass=1): - fdesc = entry.desc - doc = entry.doc - names = entry.names - ret = [] - if bMakeClass: - linePrefix = "\t" - defNamedOptArg = "defaultNamedOptArg" - defNamedNotOptArg = "defaultNamedNotOptArg" - defUnnamedArg = "defaultUnnamedArg" - else: - linePrefix = "" - defNamedOptArg = "pythoncom.Missing" - defNamedNotOptArg = "pythoncom.Missing" - defUnnamedArg = "pythoncom.Missing" - defOutArg = "pythoncom.Missing" - id = fdesc[0] - - s = ( - linePrefix - + "def " - + name - + "(self" - + BuildCallList( - fdesc, - names, - defNamedOptArg, - defNamedNotOptArg, - defUnnamedArg, - defOutArg, - ) - + "):" - ) - ret.append(s) - if doc and doc[1]: - ret.append(linePrefix + "\t" + _makeDocString(doc[1])) - - resclsid = entry.GetResultCLSID() - if resclsid: - resclsid = "'%s'" % resclsid - else: - resclsid = "None" - # Strip the default values from the arg desc - retDesc = fdesc[8][:2] - argsDesc = tuple([what[:2] for what in fdesc[2]]) - # The runtime translation of the return types is expensive, so when we know the - # return type of the function, there is no need to check the type at runtime. - # To qualify, this function must return a "simple" type, and have no byref args. - # Check if we have byrefs or anything in the args which mean we still need a translate. - param_flags = [what[1] for what in fdesc[2]] - bad_params = [ - flag - for flag in param_flags - if flag & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FRETVAL) != 0 - ] - s = None - if len(bad_params) == 0 and len(retDesc) == 2 and retDesc[1] == 0: - rd = retDesc[0] - if rd in NoTranslateMap: - s = "%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)" % ( - linePrefix, - id, - fdesc[4], - retDesc, - argsDesc, - _BuildArgList(fdesc, names), - ) - elif rd in [pythoncom.VT_DISPATCH, pythoncom.VT_UNKNOWN]: - s = "%s\tret = self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)\n" % ( - linePrefix, - id, - fdesc[4], - retDesc, - repr(argsDesc), - _BuildArgList(fdesc, names), - ) - s = s + "%s\tif ret is not None:\n" % (linePrefix,) - if rd == pythoncom.VT_UNKNOWN: - s = s + "%s\t\t# See if this IUnknown is really an IDispatch\n" % ( - linePrefix, - ) - s = s + "%s\t\ttry:\n" % (linePrefix,) - s = ( - s - + "%s\t\t\tret = ret.QueryInterface(pythoncom.IID_IDispatch)\n" - % (linePrefix,) - ) - s = s + "%s\t\texcept pythoncom.error:\n" % (linePrefix,) - s = s + "%s\t\t\treturn ret\n" % (linePrefix,) - s = s + "%s\t\tret = Dispatch(ret, %s, %s)\n" % ( - linePrefix, - repr(name), - resclsid, - ) - s = s + "%s\treturn ret" % (linePrefix) - elif rd == pythoncom.VT_BSTR: - s = "%s\t# Result is a Unicode object\n" % (linePrefix,) - s = ( - s - + "%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)" - % ( - linePrefix, - id, - fdesc[4], - retDesc, - repr(argsDesc), - _BuildArgList(fdesc, names), - ) - ) - # else s remains None - if s is None: - s = "%s\treturn self._ApplyTypes_(%d, %s, %s, %s, %s, %s%s)" % ( - linePrefix, - id, - fdesc[4], - retDesc, - argsDesc, - repr(name), - resclsid, - _BuildArgList(fdesc, names), - ) - - ret.append(s) - ret.append("") - return ret - - def MakeVarArgsFuncMethod(self, entry, name, bMakeClass=1): - fdesc = entry.desc - names = entry.names - doc = entry.doc - ret = [] - argPrefix = "self" - if bMakeClass: - linePrefix = "\t" - else: - linePrefix = "" - ret.append(linePrefix + "def " + name + "(" + argPrefix + ", *args):") - if doc and doc[1]: - ret.append(linePrefix + "\t" + _makeDocString(doc[1])) - if fdesc: - invoketype = fdesc[4] - else: - invoketype = pythoncom.DISPATCH_METHOD - s = linePrefix + "\treturn self._get_good_object_(self._oleobj_.Invoke(*((" - ret.append( - s + str(entry.dispid) + ",0,%d,1)+args)),'%s')" % (invoketype, names[0]) - ) - ret.append("") - return ret - - -# Note - "DispatchItem" poorly named - need a new intermediate class. -class VTableItem(DispatchItem): - def Build(self, typeinfo, attr, bForUser=1): - DispatchItem.Build(self, typeinfo, attr, bForUser) - assert typeinfo is not None, "Cant build vtables without type info!" - - meth_list = ( - list(self.mapFuncs.values()) - + list(self.propMapGet.values()) - + list(self.propMapPut.values()) - ) - meth_list.sort(key=lambda m: m.desc[7]) - - # Now turn this list into the run-time representation - # (ready for immediate use or writing to gencache) - self.vtableFuncs = [] - for entry in meth_list: - self.vtableFuncs.append((entry.names, entry.dispid, entry.desc)) - - -# A Lazy dispatch item - builds an item on request using info from -# an ITypeComp. The dynamic module makes the called to build each item, -# and also holds the references to the typeinfo and typecomp. -class LazyDispatchItem(DispatchItem): - typename = "LazyDispatchItem" - - def __init__(self, attr, doc): - self.clsid = attr[0] - DispatchItem.__init__(self, None, attr, doc, 0) - - -typeSubstMap = { - pythoncom.VT_INT: pythoncom.VT_I4, - pythoncom.VT_UINT: pythoncom.VT_UI4, - pythoncom.VT_HRESULT: pythoncom.VT_I4, -} - - -def _ResolveType(typerepr, itypeinfo): - # Resolve VT_USERDEFINED (often aliases or typed IDispatches) - - if type(typerepr) == tuple: - indir_vt, subrepr = typerepr - if indir_vt == pythoncom.VT_PTR: - # If it is a VT_PTR to a VT_USERDEFINED that is an IDispatch/IUnknown, - # then it resolves to simply the object. - # Otherwise, it becomes a ByRef of the resolved type - # We need to drop an indirection level on pointer to user defined interfaces. - # eg, (VT_PTR, (VT_USERDEFINED, somehandle)) needs to become VT_DISPATCH - # only when "somehandle" is an object. - # but (VT_PTR, (VT_USERDEFINED, otherhandle)) doesnt get the indirection dropped. - was_user = type(subrepr) == tuple and subrepr[0] == pythoncom.VT_USERDEFINED - subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo) - if was_user and subrepr in [ - pythoncom.VT_DISPATCH, - pythoncom.VT_UNKNOWN, - pythoncom.VT_RECORD, - ]: - # Drop the VT_PTR indirection - return subrepr, sub_clsid, sub_doc - # Change PTR indirection to byref - return subrepr | pythoncom.VT_BYREF, sub_clsid, sub_doc - if indir_vt == pythoncom.VT_SAFEARRAY: - # resolve the array element, and convert to VT_ARRAY - subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo) - return pythoncom.VT_ARRAY | subrepr, sub_clsid, sub_doc - if indir_vt == pythoncom.VT_CARRAY: # runtime has no support for this yet. - # resolve the array element, and convert to VT_CARRAY - # sheesh - return _something_ - return pythoncom.VT_CARRAY, None, None - if indir_vt == pythoncom.VT_USERDEFINED: - try: - resultTypeInfo = itypeinfo.GetRefTypeInfo(subrepr) - except pythoncom.com_error as details: - if details.hresult in [ - winerror.TYPE_E_CANTLOADLIBRARY, - winerror.TYPE_E_LIBNOTREGISTERED, - ]: - # an unregistered interface - return pythoncom.VT_UNKNOWN, None, None - raise - - resultAttr = resultTypeInfo.GetTypeAttr() - typeKind = resultAttr.typekind - if typeKind == pythoncom.TKIND_ALIAS: - tdesc = resultAttr.tdescAlias - return _ResolveType(tdesc, resultTypeInfo) - elif typeKind in [pythoncom.TKIND_ENUM, pythoncom.TKIND_MODULE]: - # For now, assume Long - return pythoncom.VT_I4, None, None - - elif typeKind == pythoncom.TKIND_DISPATCH: - clsid = resultTypeInfo.GetTypeAttr()[0] - retdoc = resultTypeInfo.GetDocumentation(-1) - return pythoncom.VT_DISPATCH, clsid, retdoc - - elif typeKind in [pythoncom.TKIND_INTERFACE, pythoncom.TKIND_COCLASS]: - # XXX - should probably get default interface for CO_CLASS??? - clsid = resultTypeInfo.GetTypeAttr()[0] - retdoc = resultTypeInfo.GetDocumentation(-1) - return pythoncom.VT_UNKNOWN, clsid, retdoc - - elif typeKind == pythoncom.TKIND_RECORD: - return pythoncom.VT_RECORD, None, None - raise NotSupportedException("Can not resolve alias or user-defined type") - return typeSubstMap.get(typerepr, typerepr), None, None - - -def _BuildArgList(fdesc, names): - "Builds list of args to the underlying Invoke method." - # Word has TypeInfo for Insert() method, but says "no args" - numArgs = max(fdesc[6], len(fdesc[2])) - names = list(names) - while None in names: - i = names.index(None) - names[i] = "arg%d" % (i,) - # We've seen 'source safe' libraries offer the name of 'ret' params in - # 'names' - although we can't reproduce this, it would be insane to offer - # more args than we have arg infos for - hence the upper limit on names... - names = list(map(MakePublicAttributeName, names[1 : (numArgs + 1)])) - name_num = 0 - while len(names) < numArgs: - names.append("arg%d" % (len(names),)) - # As per BuildCallList(), avoid huge lines. - # Hack a "\n" at the end of every 5th name - "strides" would be handy - # here but don't exist in 2.2 - for i in range(0, len(names), 5): - names[i] = names[i] + "\n\t\t\t" - return "," + ", ".join(names) - - -valid_identifier_chars = string.ascii_letters + string.digits + "_" - - -def demunge_leading_underscores(className): - i = 0 - while className[i] == "_": - i += 1 - assert i >= 2, "Should only be here with names starting with '__'" - return className[i - 1 :] + className[: i - 1] - - -# Given a "public name" (eg, the name of a class, function, etc) -# make sure it is a legal (and reasonable!) Python name. -def MakePublicAttributeName(className, is_global=False): - # Given a class attribute that needs to be public, convert it to a - # reasonable name. - # Also need to be careful that the munging doesnt - # create duplicates - eg, just removing a leading "_" is likely to cause - # a clash. - # if is_global is True, then the name is a global variable that may - # overwrite a builtin - eg, "None" - if className[:2] == "__": - return demunge_leading_underscores(className) - elif className == "None": - # assign to None is evil (and SyntaxError in 2.4, even though - # iskeyword says False there) - note that if it was a global - # it would get picked up below - className = "NONE" - elif iskeyword(className): - # most keywords are lower case (except True, False etc in py3k) - ret = className.capitalize() - # but those which aren't get forced upper. - if ret == className: - ret = ret.upper() - return ret - elif is_global and hasattr(__builtins__, className): - # builtins may be mixed case. If capitalizing it doesn't change it, - # force to all uppercase (eg, "None", "True" become "NONE", "TRUE" - ret = className.capitalize() - if ret == className: # didn't change - force all uppercase. - ret = ret.upper() - return ret - # Strip non printable chars - return "".join([char for char in className if char in valid_identifier_chars]) - - -# Given a default value passed by a type library, return a string with -# an appropriate repr() for the type. -# Takes a raw ELEMDESC and returns a repr string, or None -# (NOTE: The string itself may be '"None"', which is valid, and different to None. -# XXX - To do: Dates are probably screwed, but can they come in? -def MakeDefaultArgRepr(defArgVal): - try: - inOut = defArgVal[1] - except IndexError: - # something strange - assume is in param. - inOut = pythoncom.PARAMFLAG_FIN - - if inOut & pythoncom.PARAMFLAG_FHASDEFAULT: - # times need special handling... - val = defArgVal[2] - if isinstance(val, datetime.datetime): - # VARIANT <-> SYSTEMTIME conversions always lose any sub-second - # resolution, so just use a 'timetuple' here. - return repr(tuple(val.utctimetuple())) - if type(val) is TimeType: - # must be the 'old' pywintypes time object... - year = val.year - month = val.month - day = val.day - hour = val.hour - minute = val.minute - second = val.second - msec = val.msec - return ( - "pywintypes.Time((%(year)d, %(month)d, %(day)d, %(hour)d, %(minute)d, %(second)d,0,0,0,%(msec)d))" - % locals() - ) - return repr(val) - return None - - -def BuildCallList( - fdesc, - names, - defNamedOptArg, - defNamedNotOptArg, - defUnnamedArg, - defOutArg, - is_comment=False, -): - "Builds a Python declaration for a method." - # Names[0] is the func name - param names are from 1. - numArgs = len(fdesc[2]) - numOptArgs = fdesc[6] - strval = "" - if numOptArgs == -1: # Special value that says "var args after here" - firstOptArg = numArgs - numArgs = numArgs - 1 - else: - firstOptArg = numArgs - numOptArgs - for arg in range(numArgs): - try: - argName = names[arg + 1] - namedArg = argName is not None - except IndexError: - namedArg = 0 - if not namedArg: - argName = "arg%d" % (arg) - thisdesc = fdesc[2][arg] - # See if the IDL specified a default value - defArgVal = MakeDefaultArgRepr(thisdesc) - if defArgVal is None: - # Out params always get their special default - if ( - thisdesc[1] & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FIN) - == pythoncom.PARAMFLAG_FOUT - ): - defArgVal = defOutArg - else: - # Unnamed arg - always allow default values. - if namedArg: - # Is a named argument - if arg >= firstOptArg: - defArgVal = defNamedOptArg - else: - defArgVal = defNamedNotOptArg - else: - defArgVal = defUnnamedArg - - argName = MakePublicAttributeName(argName) - # insanely long lines with an 'encoding' flag crashes python 2.4.0 - # keep 5 args per line - # This may still fail if the arg names are insane, but that seems - # unlikely. See also _BuildArgList() - if (arg + 1) % 5 == 0: - strval = strval + "\n" - if is_comment: - strval = strval + "#" - strval = strval + "\t\t\t" - strval = strval + ", " + argName - if defArgVal: - strval = strval + "=" + defArgVal - if numOptArgs == -1: - strval = strval + ", *" + names[-1] - - return strval - - -if __name__ == "__main__": - print("Use 'makepy.py' to generate Python code - this module is just a helper") diff --git a/lib/win32com/client/combrowse.py b/lib/win32com/client/combrowse.py deleted file mode 100644 index 10eeaedf..00000000 --- a/lib/win32com/client/combrowse.py +++ /dev/null @@ -1,619 +0,0 @@ -"""A utility for browsing COM objects. - - Usage: - - Command Prompt - - Use the command *"python.exe combrowse.py"*. This will display - display a fairly small, modal dialog. - - Pythonwin - - Use the "Run Script" menu item, and this will create the browser in an - MDI window. This window can be fully resized. - - Details - - This module allows browsing of registered Type Libraries, COM categories, - and running COM objects. The display is similar to the Pythonwin object - browser, and displays the objects in a hierarchical window. - - Note that this module requires the win32ui (ie, Pythonwin) distribution to - work. - -""" -import sys - -import pythoncom -import win32api -import win32con -import win32ui -from pywin.tools import browser -from win32com.client import util - - -class HLIRoot(browser.HLIPythonObject): - def __init__(self, title): - super().__init__(name=title) - - def GetSubList(self): - return [ - HLIHeadingCategory(), - HLI_IEnumMoniker( - pythoncom.GetRunningObjectTable().EnumRunning(), "Running Objects" - ), - HLIHeadingRegisterdTypeLibs(), - ] - - def __cmp__(self, other): - return cmp(self.name, other.name) - - -class HLICOM(browser.HLIPythonObject): - def GetText(self): - return self.name - - def CalculateIsExpandable(self): - return 1 - - -class HLICLSID(HLICOM): - def __init__(self, myobject, name=None): - if type(myobject) == type(""): - myobject = pythoncom.MakeIID(myobject) - if name is None: - try: - name = pythoncom.ProgIDFromCLSID(myobject) - except pythoncom.com_error: - name = str(myobject) - name = "IID: " + name - HLICOM.__init__(self, myobject, name) - - def CalculateIsExpandable(self): - return 0 - - def GetSubList(self): - return [] - - -class HLI_Interface(HLICOM): - pass - - -class HLI_Enum(HLI_Interface): - def GetBitmapColumn(self): - return 0 # Always a folder. - - def CalculateIsExpandable(self): - if self.myobject is not None: - rc = len(self.myobject.Next(1)) > 0 - self.myobject.Reset() - else: - rc = 0 - return rc - - pass - - -class HLI_IEnumMoniker(HLI_Enum): - def GetSubList(self): - ctx = pythoncom.CreateBindCtx() - ret = [] - for mon in util.Enumerator(self.myobject): - ret.append(HLI_IMoniker(mon, mon.GetDisplayName(ctx, None))) - return ret - - -class HLI_IMoniker(HLI_Interface): - def GetSubList(self): - ret = [] - ret.append(browser.MakeHLI(self.myobject.Hash(), "Hash Value")) - subenum = self.myobject.Enum(1) - ret.append(HLI_IEnumMoniker(subenum, "Sub Monikers")) - return ret - - -class HLIHeadingCategory(HLICOM): - "A tree heading for registered categories" - - def GetText(self): - return "Registered Categories" - - def GetSubList(self): - catinf = pythoncom.CoCreateInstance( - pythoncom.CLSID_StdComponentCategoriesMgr, - None, - pythoncom.CLSCTX_INPROC, - pythoncom.IID_ICatInformation, - ) - enum = util.Enumerator(catinf.EnumCategories()) - ret = [] - try: - for catid, lcid, desc in enum: - ret.append(HLICategory((catid, lcid, desc))) - except pythoncom.com_error: - # Registered categories occasionally seem to give spurious errors. - pass # Use what we already have. - return ret - - -class HLICategory(HLICOM): - "An actual Registered Category" - - def GetText(self): - desc = self.myobject[2] - if not desc: - desc = "(unnamed category)" - return desc - - def GetSubList(self): - win32ui.DoWaitCursor(1) - catid, lcid, desc = self.myobject - catinf = pythoncom.CoCreateInstance( - pythoncom.CLSID_StdComponentCategoriesMgr, - None, - pythoncom.CLSCTX_INPROC, - pythoncom.IID_ICatInformation, - ) - ret = [] - for clsid in util.Enumerator(catinf.EnumClassesOfCategories((catid,), ())): - ret.append(HLICLSID(clsid)) - win32ui.DoWaitCursor(0) - - return ret - - -class HLIHelpFile(HLICOM): - def CalculateIsExpandable(self): - return 0 - - def GetText(self): - import os - - fname, ctx = self.myobject - base = os.path.split(fname)[1] - return "Help reference in %s" % (base) - - def TakeDefaultAction(self): - fname, ctx = self.myobject - if ctx: - cmd = win32con.HELP_CONTEXT - else: - cmd = win32con.HELP_FINDER - win32api.WinHelp(win32ui.GetMainFrame().GetSafeHwnd(), fname, cmd, ctx) - - def GetBitmapColumn(self): - return 6 - - -class HLIRegisteredTypeLibrary(HLICOM): - def GetSubList(self): - import os - - clsidstr, versionStr = self.myobject - collected = [] - helpPath = "" - key = win32api.RegOpenKey( - win32con.HKEY_CLASSES_ROOT, "TypeLib\\%s\\%s" % (clsidstr, versionStr) - ) - win32ui.DoWaitCursor(1) - try: - num = 0 - while 1: - try: - subKey = win32api.RegEnumKey(key, num) - except win32api.error: - break - hSubKey = win32api.RegOpenKey(key, subKey) - try: - value, typ = win32api.RegQueryValueEx(hSubKey, None) - if typ == win32con.REG_EXPAND_SZ: - value = win32api.ExpandEnvironmentStrings(value) - except win32api.error: - value = "" - if subKey == "HELPDIR": - helpPath = value - elif subKey == "Flags": - flags = value - else: - try: - lcid = int(subKey) - lcidkey = win32api.RegOpenKey(key, subKey) - # Enumerate the platforms - lcidnum = 0 - while 1: - try: - platform = win32api.RegEnumKey(lcidkey, lcidnum) - except win32api.error: - break - try: - hplatform = win32api.RegOpenKey(lcidkey, platform) - fname, typ = win32api.RegQueryValueEx(hplatform, None) - if typ == win32con.REG_EXPAND_SZ: - fname = win32api.ExpandEnvironmentStrings(fname) - except win32api.error: - fname = "" - collected.append((lcid, platform, fname)) - lcidnum = lcidnum + 1 - win32api.RegCloseKey(lcidkey) - except ValueError: - pass - num = num + 1 - finally: - win32ui.DoWaitCursor(0) - win32api.RegCloseKey(key) - # Now, loop over my collected objects, adding a TypeLib and a HelpFile - ret = [] - # if helpPath: ret.append(browser.MakeHLI(helpPath, "Help Path")) - ret.append(HLICLSID(clsidstr)) - for lcid, platform, fname in collected: - extraDescs = [] - if platform != "win32": - extraDescs.append(platform) - if lcid: - extraDescs.append("locale=%s" % lcid) - extraDesc = "" - if extraDescs: - extraDesc = " (%s)" % ", ".join(extraDescs) - ret.append(HLITypeLib(fname, "Type Library" + extraDesc)) - ret.sort() - return ret - - -class HLITypeLibEntry(HLICOM): - def GetText(self): - tlb, index = self.myobject - name, doc, ctx, helpFile = tlb.GetDocumentation(index) - try: - typedesc = HLITypeKinds[tlb.GetTypeInfoType(index)][1] - except KeyError: - typedesc = "Unknown!" - return name + " - " + typedesc - - def GetSubList(self): - tlb, index = self.myobject - name, doc, ctx, helpFile = tlb.GetDocumentation(index) - ret = [] - if doc: - ret.append(browser.HLIDocString(doc, "Doc")) - if helpFile: - ret.append(HLIHelpFile((helpFile, ctx))) - return ret - - -class HLICoClass(HLITypeLibEntry): - def GetSubList(self): - ret = HLITypeLibEntry.GetSubList(self) - tlb, index = self.myobject - typeinfo = tlb.GetTypeInfo(index) - attr = typeinfo.GetTypeAttr() - for j in range(attr[8]): - flags = typeinfo.GetImplTypeFlags(j) - refType = typeinfo.GetRefTypeInfo(typeinfo.GetRefTypeOfImplType(j)) - refAttr = refType.GetTypeAttr() - ret.append( - browser.MakeHLI(refAttr[0], "Name=%s, Flags = %d" % (refAttr[0], flags)) - ) - return ret - - -class HLITypeLibMethod(HLITypeLibEntry): - def __init__(self, ob, name=None): - self.entry_type = "Method" - HLITypeLibEntry.__init__(self, ob, name) - - def GetSubList(self): - ret = HLITypeLibEntry.GetSubList(self) - tlb, index = self.myobject - typeinfo = tlb.GetTypeInfo(index) - attr = typeinfo.GetTypeAttr() - for i in range(attr[7]): - ret.append(HLITypeLibProperty((typeinfo, i))) - for i in range(attr[6]): - ret.append(HLITypeLibFunction((typeinfo, i))) - return ret - - -class HLITypeLibEnum(HLITypeLibEntry): - def __init__(self, myitem): - typelib, index = myitem - typeinfo = typelib.GetTypeInfo(index) - self.id = typeinfo.GetVarDesc(index)[0] - name = typeinfo.GetNames(self.id)[0] - HLITypeLibEntry.__init__(self, myitem, name) - - def GetText(self): - return self.name + " - Enum/Module" - - def GetSubList(self): - ret = [] - typelib, index = self.myobject - typeinfo = typelib.GetTypeInfo(index) - attr = typeinfo.GetTypeAttr() - for j in range(attr[7]): - vdesc = typeinfo.GetVarDesc(j) - name = typeinfo.GetNames(vdesc[0])[0] - ret.append(browser.MakeHLI(vdesc[1], name)) - return ret - - -class HLITypeLibProperty(HLICOM): - def __init__(self, myitem): - typeinfo, index = myitem - self.id = typeinfo.GetVarDesc(index)[0] - name = typeinfo.GetNames(self.id)[0] - HLICOM.__init__(self, myitem, name) - - def GetText(self): - return self.name + " - Property" - - def GetSubList(self): - ret = [] - typeinfo, index = self.myobject - names = typeinfo.GetNames(self.id) - if len(names) > 1: - ret.append(browser.MakeHLI(names[1:], "Named Params")) - vd = typeinfo.GetVarDesc(index) - ret.append(browser.MakeHLI(self.id, "Dispatch ID")) - ret.append(browser.MakeHLI(vd[1], "Value")) - ret.append(browser.MakeHLI(vd[2], "Elem Desc")) - ret.append(browser.MakeHLI(vd[3], "Var Flags")) - ret.append(browser.MakeHLI(vd[4], "Var Kind")) - return ret - - -class HLITypeLibFunction(HLICOM): - funckinds = { - pythoncom.FUNC_VIRTUAL: "Virtual", - pythoncom.FUNC_PUREVIRTUAL: "Pure Virtual", - pythoncom.FUNC_STATIC: "Static", - pythoncom.FUNC_DISPATCH: "Dispatch", - } - invokekinds = { - pythoncom.INVOKE_FUNC: "Function", - pythoncom.INVOKE_PROPERTYGET: "Property Get", - pythoncom.INVOKE_PROPERTYPUT: "Property Put", - pythoncom.INVOKE_PROPERTYPUTREF: "Property Put by reference", - } - funcflags = [ - (pythoncom.FUNCFLAG_FRESTRICTED, "Restricted"), - (pythoncom.FUNCFLAG_FSOURCE, "Source"), - (pythoncom.FUNCFLAG_FBINDABLE, "Bindable"), - (pythoncom.FUNCFLAG_FREQUESTEDIT, "Request Edit"), - (pythoncom.FUNCFLAG_FDISPLAYBIND, "Display Bind"), - (pythoncom.FUNCFLAG_FDEFAULTBIND, "Default Bind"), - (pythoncom.FUNCFLAG_FHIDDEN, "Hidden"), - (pythoncom.FUNCFLAG_FUSESGETLASTERROR, "Uses GetLastError"), - ] - - vartypes = { - pythoncom.VT_EMPTY: "Empty", - pythoncom.VT_NULL: "NULL", - pythoncom.VT_I2: "Integer 2", - pythoncom.VT_I4: "Integer 4", - pythoncom.VT_R4: "Real 4", - pythoncom.VT_R8: "Real 8", - pythoncom.VT_CY: "CY", - pythoncom.VT_DATE: "Date", - pythoncom.VT_BSTR: "String", - pythoncom.VT_DISPATCH: "IDispatch", - pythoncom.VT_ERROR: "Error", - pythoncom.VT_BOOL: "BOOL", - pythoncom.VT_VARIANT: "Variant", - pythoncom.VT_UNKNOWN: "IUnknown", - pythoncom.VT_DECIMAL: "Decimal", - pythoncom.VT_I1: "Integer 1", - pythoncom.VT_UI1: "Unsigned integer 1", - pythoncom.VT_UI2: "Unsigned integer 2", - pythoncom.VT_UI4: "Unsigned integer 4", - pythoncom.VT_I8: "Integer 8", - pythoncom.VT_UI8: "Unsigned integer 8", - pythoncom.VT_INT: "Integer", - pythoncom.VT_UINT: "Unsigned integer", - pythoncom.VT_VOID: "Void", - pythoncom.VT_HRESULT: "HRESULT", - pythoncom.VT_PTR: "Pointer", - pythoncom.VT_SAFEARRAY: "SafeArray", - pythoncom.VT_CARRAY: "C Array", - pythoncom.VT_USERDEFINED: "User Defined", - pythoncom.VT_LPSTR: "Pointer to string", - pythoncom.VT_LPWSTR: "Pointer to Wide String", - pythoncom.VT_FILETIME: "File time", - pythoncom.VT_BLOB: "Blob", - pythoncom.VT_STREAM: "IStream", - pythoncom.VT_STORAGE: "IStorage", - pythoncom.VT_STORED_OBJECT: "Stored object", - pythoncom.VT_STREAMED_OBJECT: "Streamed object", - pythoncom.VT_BLOB_OBJECT: "Blob object", - pythoncom.VT_CF: "CF", - pythoncom.VT_CLSID: "CLSID", - } - - type_flags = [ - (pythoncom.VT_VECTOR, "Vector"), - (pythoncom.VT_ARRAY, "Array"), - (pythoncom.VT_BYREF, "ByRef"), - (pythoncom.VT_RESERVED, "Reserved"), - ] - - def __init__(self, myitem): - typeinfo, index = myitem - self.id = typeinfo.GetFuncDesc(index)[0] - name = typeinfo.GetNames(self.id)[0] - HLICOM.__init__(self, myitem, name) - - def GetText(self): - return self.name + " - Function" - - def MakeReturnTypeName(self, typ): - justtyp = typ & pythoncom.VT_TYPEMASK - try: - typname = self.vartypes[justtyp] - except KeyError: - typname = "?Bad type?" - for flag, desc in self.type_flags: - if flag & typ: - typname = "%s(%s)" % (desc, typname) - return typname - - def MakeReturnType(self, returnTypeDesc): - if type(returnTypeDesc) == type(()): - first = returnTypeDesc[0] - result = self.MakeReturnType(first) - if first != pythoncom.VT_USERDEFINED: - result = result + " " + self.MakeReturnType(returnTypeDesc[1]) - return result - else: - return self.MakeReturnTypeName(returnTypeDesc) - - def GetSubList(self): - ret = [] - typeinfo, index = self.myobject - names = typeinfo.GetNames(self.id) - ret.append(browser.MakeHLI(self.id, "Dispatch ID")) - if len(names) > 1: - ret.append(browser.MakeHLI(", ".join(names[1:]), "Named Params")) - fd = typeinfo.GetFuncDesc(index) - if fd[1]: - ret.append(browser.MakeHLI(fd[1], "Possible result values")) - if fd[8]: - typ, flags, default = fd[8] - val = self.MakeReturnType(typ) - if flags: - val = "%s (Flags=%d, default=%s)" % (val, flags, default) - ret.append(browser.MakeHLI(val, "Return Type")) - - for argDesc in fd[2]: - typ, flags, default = argDesc - val = self.MakeReturnType(typ) - if flags: - val = "%s (Flags=%d)" % (val, flags) - if default is not None: - val = "%s (Default=%s)" % (val, default) - ret.append(browser.MakeHLI(val, "Argument")) - - try: - fkind = self.funckinds[fd[3]] - except KeyError: - fkind = "Unknown" - ret.append(browser.MakeHLI(fkind, "Function Kind")) - try: - ikind = self.invokekinds[fd[4]] - except KeyError: - ikind = "Unknown" - ret.append(browser.MakeHLI(ikind, "Invoke Kind")) - # 5 = call conv - # 5 = offset vtbl - ret.append(browser.MakeHLI(fd[6], "Number Optional Params")) - flagDescs = [] - for flag, desc in self.funcflags: - if flag & fd[9]: - flagDescs.append(desc) - if flagDescs: - ret.append(browser.MakeHLI(", ".join(flagDescs), "Function Flags")) - return ret - - -HLITypeKinds = { - pythoncom.TKIND_ENUM: (HLITypeLibEnum, "Enumeration"), - pythoncom.TKIND_RECORD: (HLITypeLibEntry, "Record"), - pythoncom.TKIND_MODULE: (HLITypeLibEnum, "Module"), - pythoncom.TKIND_INTERFACE: (HLITypeLibMethod, "Interface"), - pythoncom.TKIND_DISPATCH: (HLITypeLibMethod, "Dispatch"), - pythoncom.TKIND_COCLASS: (HLICoClass, "CoClass"), - pythoncom.TKIND_ALIAS: (HLITypeLibEntry, "Alias"), - pythoncom.TKIND_UNION: (HLITypeLibEntry, "Union"), -} - - -class HLITypeLib(HLICOM): - def GetSubList(self): - ret = [] - ret.append(browser.MakeHLI(self.myobject, "Filename")) - try: - tlb = pythoncom.LoadTypeLib(self.myobject) - except pythoncom.com_error: - return [browser.MakeHLI("%s can not be loaded" % self.myobject)] - - for i in range(tlb.GetTypeInfoCount()): - try: - ret.append(HLITypeKinds[tlb.GetTypeInfoType(i)][0]((tlb, i))) - except pythoncom.com_error: - ret.append(browser.MakeHLI("The type info can not be loaded!")) - ret.sort() - return ret - - -class HLIHeadingRegisterdTypeLibs(HLICOM): - "A tree heading for registered type libraries" - - def GetText(self): - return "Registered Type Libraries" - - def GetSubList(self): - # Explicit lookup in the registry. - ret = [] - key = win32api.RegOpenKey(win32con.HKEY_CLASSES_ROOT, "TypeLib") - win32ui.DoWaitCursor(1) - try: - num = 0 - while 1: - try: - keyName = win32api.RegEnumKey(key, num) - except win32api.error: - break - # Enumerate all version info - subKey = win32api.RegOpenKey(key, keyName) - name = None - try: - subNum = 0 - bestVersion = 0.0 - while 1: - try: - versionStr = win32api.RegEnumKey(subKey, subNum) - except win32api.error: - break - try: - versionFlt = float(versionStr) - except ValueError: - versionFlt = 0 # ???? - if versionFlt > bestVersion: - bestVersion = versionFlt - name = win32api.RegQueryValue(subKey, versionStr) - subNum = subNum + 1 - finally: - win32api.RegCloseKey(subKey) - if name is not None: - ret.append(HLIRegisteredTypeLibrary((keyName, versionStr), name)) - num = num + 1 - finally: - win32api.RegCloseKey(key) - win32ui.DoWaitCursor(0) - ret.sort() - return ret - - -def main(modal=True, mdi=False): - from pywin.tools import hierlist - - root = HLIRoot("COM Browser") - if mdi and "pywin.framework.app" in sys.modules: - # do it in a MDI window - browser.MakeTemplate() - browser.template.OpenObject(root) - else: - dlg = browser.dynamic_browser(root) - if modal: - dlg.DoModal() - else: - dlg.CreateWindow() - dlg.ShowWindow() - - -if __name__ == "__main__": - main(modal=win32api.GetConsoleTitle()) - - ni = pythoncom._GetInterfaceCount() - ng = pythoncom._GetGatewayCount() - if ni or ng: - print("Warning - exiting with %d/%d objects alive" % (ni, ng)) diff --git a/lib/win32com/client/connect.py b/lib/win32com/client/connect.py deleted file mode 100644 index 24788204..00000000 --- a/lib/win32com/client/connect.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Utilities for working with Connections""" -import pythoncom -import win32com.server.util - - -class SimpleConnection: - "A simple, single connection object" - - def __init__(self, coInstance=None, eventInstance=None, eventCLSID=None, debug=0): - self.cp = None - self.cookie = None - self.debug = debug - if not coInstance is None: - self.Connect(coInstance, eventInstance, eventCLSID) - - def __del__(self): - try: - self.Disconnect() - except pythoncom.error: - # Ignore disconnection as we are torn down. - pass - - def _wrap(self, obj): - useDispatcher = None - if self.debug: - from win32com.server import dispatcher - - useDispatcher = dispatcher.DefaultDebugDispatcher - return win32com.server.util.wrap(obj, useDispatcher=useDispatcher) - - def Connect(self, coInstance, eventInstance, eventCLSID=None): - try: - oleobj = coInstance._oleobj_ - except AttributeError: - oleobj = coInstance - cpc = oleobj.QueryInterface(pythoncom.IID_IConnectionPointContainer) - if eventCLSID is None: - eventCLSID = eventInstance.CLSID - comEventInstance = self._wrap(eventInstance) - self.cp = cpc.FindConnectionPoint(eventCLSID) - self.cookie = self.cp.Advise(comEventInstance) - - def Disconnect(self): - if not self.cp is None: - if self.cookie: - self.cp.Unadvise(self.cookie) - self.cookie = None - self.cp = None diff --git a/lib/win32com/client/dynamic.py b/lib/win32com/client/dynamic.py deleted file mode 100644 index 449ad928..00000000 --- a/lib/win32com/client/dynamic.py +++ /dev/null @@ -1,708 +0,0 @@ -"""Support for dynamic COM client support. - -Introduction - Dynamic COM client support is the ability to use a COM server without - prior knowledge of the server. This can be used to talk to almost all - COM servers, including much of MS Office. - - In general, you should not use this module directly - see below. - -Example - >>> import win32com.client - >>> xl = win32com.client.Dispatch("Excel.Application") - # The line above invokes the functionality of this class. - # xl is now an object we can use to talk to Excel. - >>> xl.Visible = 1 # The Excel window becomes visible. - -""" -import traceback -import types - -import pythoncom # Needed as code we eval() references it. -import win32com.client -import winerror -from pywintypes import IIDType - -from . import build - -debugging = 0 # General debugging -debugging_attr = 0 # Debugging dynamic attribute lookups. - -LCID = 0x0 - -# These errors generally mean the property or method exists, -# but can't be used in this context - eg, property instead of a method, etc. -# Used to determine if we have a real error or not. -ERRORS_BAD_CONTEXT = [ - winerror.DISP_E_MEMBERNOTFOUND, - winerror.DISP_E_BADPARAMCOUNT, - winerror.DISP_E_PARAMNOTOPTIONAL, - winerror.DISP_E_TYPEMISMATCH, - winerror.E_INVALIDARG, -] - -ALL_INVOKE_TYPES = [ - pythoncom.INVOKE_PROPERTYGET, - pythoncom.INVOKE_PROPERTYPUT, - pythoncom.INVOKE_PROPERTYPUTREF, - pythoncom.INVOKE_FUNC, -] - - -def debug_print(*args): - if debugging: - for arg in args: - print(arg, end=" ") - print() - - -def debug_attr_print(*args): - if debugging_attr: - for arg in args: - print(arg, end=" ") - print() - - -def MakeMethod(func, inst, cls): - return types.MethodType(func, inst) - - -# get the type objects for IDispatch and IUnknown -PyIDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch] -PyIUnknownType = pythoncom.TypeIIDs[pythoncom.IID_IUnknown] - -_GoodDispatchTypes = (str, IIDType) -_defaultDispatchItem = build.DispatchItem - - -def _GetGoodDispatch(IDispatch, clsctx=pythoncom.CLSCTX_SERVER): - # quick return for most common case - if isinstance(IDispatch, PyIDispatchType): - return IDispatch - if isinstance(IDispatch, _GoodDispatchTypes): - try: - IDispatch = pythoncom.connect(IDispatch) - except pythoncom.ole_error: - IDispatch = pythoncom.CoCreateInstance( - IDispatch, None, clsctx, pythoncom.IID_IDispatch - ) - else: - # may already be a wrapped class. - IDispatch = getattr(IDispatch, "_oleobj_", IDispatch) - return IDispatch - - -def _GetGoodDispatchAndUserName(IDispatch, userName, clsctx): - # Get a dispatch object, and a 'user name' (ie, the name as - # displayed to the user in repr() etc. - if userName is None: - if isinstance(IDispatch, str): - userName = IDispatch - ## ??? else userName remains None ??? - else: - userName = str(userName) - return (_GetGoodDispatch(IDispatch, clsctx), userName) - - -def _GetDescInvokeType(entry, invoke_type): - # determine the wFlags argument passed as input to IDispatch::Invoke - # Only ever called by __getattr__ and __setattr__ from dynamic objects! - # * `entry` is a MapEntry with whatever typeinfo we have about the property we are getting/setting. - # * `invoke_type` is either INVOKE_PROPERTYGET | INVOKE_PROPERTYSET and really just - # means "called by __getattr__" or "called by __setattr__" - if not entry or not entry.desc: - return invoke_type - - if entry.desc.desckind == pythoncom.DESCKIND_VARDESC: - return invoke_type - - # So it's a FUNCDESC - just use what it specifies. - return entry.desc.invkind - - -def Dispatch( - IDispatch, - userName=None, - createClass=None, - typeinfo=None, - UnicodeToString=None, - clsctx=pythoncom.CLSCTX_SERVER, -): - assert UnicodeToString is None, "this is deprecated and will go away" - IDispatch, userName = _GetGoodDispatchAndUserName(IDispatch, userName, clsctx) - if createClass is None: - createClass = CDispatch - lazydata = None - try: - if typeinfo is None: - typeinfo = IDispatch.GetTypeInfo() - if typeinfo is not None: - try: - # try for a typecomp - typecomp = typeinfo.GetTypeComp() - lazydata = typeinfo, typecomp - except pythoncom.com_error: - pass - except pythoncom.com_error: - typeinfo = None - olerepr = MakeOleRepr(IDispatch, typeinfo, lazydata) - return createClass(IDispatch, olerepr, userName, lazydata=lazydata) - - -def MakeOleRepr(IDispatch, typeinfo, typecomp): - olerepr = None - if typeinfo is not None: - try: - attr = typeinfo.GetTypeAttr() - # If the type info is a special DUAL interface, magically turn it into - # a DISPATCH typeinfo. - if ( - attr[5] == pythoncom.TKIND_INTERFACE - and attr[11] & pythoncom.TYPEFLAG_FDUAL - ): - # Get corresponding Disp interface; - # -1 is a special value which does this for us. - href = typeinfo.GetRefTypeOfImplType(-1) - typeinfo = typeinfo.GetRefTypeInfo(href) - attr = typeinfo.GetTypeAttr() - if typecomp is None: - olerepr = build.DispatchItem(typeinfo, attr, None, 0) - else: - olerepr = build.LazyDispatchItem(attr, None) - except pythoncom.ole_error: - pass - if olerepr is None: - olerepr = build.DispatchItem() - return olerepr - - -def DumbDispatch( - IDispatch, - userName=None, - createClass=None, - UnicodeToString=None, - clsctx=pythoncom.CLSCTX_SERVER, -): - "Dispatch with no type info" - assert UnicodeToString is None, "this is deprecated and will go away" - IDispatch, userName = _GetGoodDispatchAndUserName(IDispatch, userName, clsctx) - if createClass is None: - createClass = CDispatch - return createClass(IDispatch, build.DispatchItem(), userName) - - -class CDispatch: - def __init__( - self, IDispatch, olerepr, userName=None, UnicodeToString=None, lazydata=None - ): - assert UnicodeToString is None, "this is deprecated and will go away" - if userName is None: - userName = "" - self.__dict__["_oleobj_"] = IDispatch - self.__dict__["_username_"] = userName - self.__dict__["_olerepr_"] = olerepr - self.__dict__["_mapCachedItems_"] = {} - self.__dict__["_builtMethods_"] = {} - self.__dict__["_enum_"] = None - self.__dict__["_unicode_to_string_"] = None - self.__dict__["_lazydata_"] = lazydata - - def __call__(self, *args): - "Provide 'default dispatch' COM functionality - allow instance to be called" - if self._olerepr_.defaultDispatchName: - invkind, dispid = self._find_dispatch_type_( - self._olerepr_.defaultDispatchName - ) - else: - invkind, dispid = ( - pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET, - pythoncom.DISPID_VALUE, - ) - if invkind is not None: - allArgs = (dispid, LCID, invkind, 1) + args - return self._get_good_object_( - self._oleobj_.Invoke(*allArgs), self._olerepr_.defaultDispatchName, None - ) - raise TypeError("This dispatch object does not define a default method") - - def __bool__(self): - return True # ie "if object:" should always be "true" - without this, __len__ is tried. - # _Possibly_ want to defer to __len__ if available, but Im not sure this is - # desirable??? - - def __repr__(self): - return "" % (self._username_) - - def __str__(self): - # __str__ is used when the user does "print object", so we gracefully - # fall back to the __repr__ if the object has no default method. - try: - return str(self.__call__()) - except pythoncom.com_error as details: - if details.hresult not in ERRORS_BAD_CONTEXT: - raise - return self.__repr__() - - def __dir__(self): - lst = list(self.__dict__.keys()) + dir(self.__class__) + self._dir_ole_() - try: - lst += [p.Name for p in self.Properties_] - except AttributeError: - pass - return list(set(lst)) - - def _dir_ole_(self): - items_dict = {} - for iTI in range(0, self._oleobj_.GetTypeInfoCount()): - typeInfo = self._oleobj_.GetTypeInfo(iTI) - self._UpdateWithITypeInfo_(items_dict, typeInfo) - return list(items_dict.keys()) - - def _UpdateWithITypeInfo_(self, items_dict, typeInfo): - typeInfos = [typeInfo] - # suppress IDispatch and IUnknown methods - inspectedIIDs = {pythoncom.IID_IDispatch: None} - - while len(typeInfos) > 0: - typeInfo = typeInfos.pop() - typeAttr = typeInfo.GetTypeAttr() - - if typeAttr.iid not in inspectedIIDs: - inspectedIIDs[typeAttr.iid] = None - for iFun in range(0, typeAttr.cFuncs): - funDesc = typeInfo.GetFuncDesc(iFun) - funName = typeInfo.GetNames(funDesc.memid)[0] - if funName not in items_dict: - items_dict[funName] = None - - # Inspect the type info of all implemented types - # E.g. IShellDispatch5 implements IShellDispatch4 which implements IShellDispatch3 ... - for iImplType in range(0, typeAttr.cImplTypes): - iRefType = typeInfo.GetRefTypeOfImplType(iImplType) - refTypeInfo = typeInfo.GetRefTypeInfo(iRefType) - typeInfos.append(refTypeInfo) - - # Delegate comparison to the oleobjs, as they know how to do identity. - def __eq__(self, other): - other = getattr(other, "_oleobj_", other) - return self._oleobj_ == other - - def __ne__(self, other): - other = getattr(other, "_oleobj_", other) - return self._oleobj_ != other - - def __int__(self): - return int(self.__call__()) - - def __len__(self): - invkind, dispid = self._find_dispatch_type_("Count") - if invkind: - return self._oleobj_.Invoke(dispid, LCID, invkind, 1) - raise TypeError("This dispatch object does not define a Count method") - - def _NewEnum(self): - try: - invkind = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET - enum = self._oleobj_.InvokeTypes( - pythoncom.DISPID_NEWENUM, LCID, invkind, (13, 10), () - ) - except pythoncom.com_error: - return None # no enumerator for this object. - from . import util - - return util.WrapEnum(enum, None) - - def __getitem__(self, index): # syver modified - # Improved __getitem__ courtesy Syver Enstad - # Must check _NewEnum before Item, to ensure b/w compat. - if isinstance(index, int): - if self.__dict__["_enum_"] is None: - self.__dict__["_enum_"] = self._NewEnum() - if self.__dict__["_enum_"] is not None: - return self._get_good_object_(self._enum_.__getitem__(index)) - # See if we have an "Item" method/property we can use (goes hand in hand with Count() above!) - invkind, dispid = self._find_dispatch_type_("Item") - if invkind is not None: - return self._get_good_object_( - self._oleobj_.Invoke(dispid, LCID, invkind, 1, index) - ) - raise TypeError("This object does not support enumeration") - - def __setitem__(self, index, *args): - # XXX - todo - We should support calling Item() here too! - # print "__setitem__ with", index, args - if self._olerepr_.defaultDispatchName: - invkind, dispid = self._find_dispatch_type_( - self._olerepr_.defaultDispatchName - ) - else: - invkind, dispid = ( - pythoncom.DISPATCH_PROPERTYPUT | pythoncom.DISPATCH_PROPERTYPUTREF, - pythoncom.DISPID_VALUE, - ) - if invkind is not None: - allArgs = (dispid, LCID, invkind, 0, index) + args - return self._get_good_object_( - self._oleobj_.Invoke(*allArgs), self._olerepr_.defaultDispatchName, None - ) - raise TypeError("This dispatch object does not define a default method") - - def _find_dispatch_type_(self, methodName): - if methodName in self._olerepr_.mapFuncs: - item = self._olerepr_.mapFuncs[methodName] - return item.desc[4], item.dispid - - if methodName in self._olerepr_.propMapGet: - item = self._olerepr_.propMapGet[methodName] - return item.desc[4], item.dispid - - try: - dispid = self._oleobj_.GetIDsOfNames(0, methodName) - except: ### what error? - return None, None - return pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET, dispid - - def _ApplyTypes_(self, dispid, wFlags, retType, argTypes, user, resultCLSID, *args): - result = self._oleobj_.InvokeTypes( - *(dispid, LCID, wFlags, retType, argTypes) + args - ) - return self._get_good_object_(result, user, resultCLSID) - - def _wrap_dispatch_( - self, ob, userName=None, returnCLSID=None, UnicodeToString=None - ): - # Given a dispatch object, wrap it in a class - assert UnicodeToString is None, "this is deprecated and will go away" - return Dispatch(ob, userName) - - def _get_good_single_object_(self, ob, userName=None, ReturnCLSID=None): - if isinstance(ob, PyIDispatchType): - # make a new instance of (probably this) class. - return self._wrap_dispatch_(ob, userName, ReturnCLSID) - if isinstance(ob, PyIUnknownType): - try: - ob = ob.QueryInterface(pythoncom.IID_IDispatch) - except pythoncom.com_error: - # It is an IUnknown, but not an IDispatch, so just let it through. - return ob - return self._wrap_dispatch_(ob, userName, ReturnCLSID) - return ob - - def _get_good_object_(self, ob, userName=None, ReturnCLSID=None): - """Given an object (usually the retval from a method), make it a good object to return. - Basically checks if it is a COM object, and wraps it up. - Also handles the fact that a retval may be a tuple of retvals""" - if ob is None: # Quick exit! - return None - elif isinstance(ob, tuple): - return tuple( - map( - lambda o, s=self, oun=userName, rc=ReturnCLSID: s._get_good_single_object_( - o, oun, rc - ), - ob, - ) - ) - else: - return self._get_good_single_object_(ob) - - def _make_method_(self, name): - "Make a method object - Assumes in olerepr funcmap" - methodName = build.MakePublicAttributeName(name) # translate keywords etc. - methodCodeList = self._olerepr_.MakeFuncMethod( - self._olerepr_.mapFuncs[name], methodName, 0 - ) - methodCode = "\n".join(methodCodeList) - try: - # print "Method code for %s is:\n" % self._username_, methodCode - # self._print_details_() - codeObject = compile(methodCode, "" % self._username_, "exec") - # Exec the code object - tempNameSpace = {} - # "Dispatch" in the exec'd code is win32com.client.Dispatch, not ours. - globNameSpace = globals().copy() - globNameSpace["Dispatch"] = win32com.client.Dispatch - exec( - codeObject, globNameSpace, tempNameSpace - ) # self.__dict__, self.__dict__ - name = methodName - # Save the function in map. - fn = self._builtMethods_[name] = tempNameSpace[name] - newMeth = MakeMethod(fn, self, self.__class__) - return newMeth - except: - debug_print("Error building OLE definition for code ", methodCode) - traceback.print_exc() - return None - - def _Release_(self): - """Cleanup object - like a close - to force cleanup when you dont - want to rely on Python's reference counting.""" - for childCont in self._mapCachedItems_.values(): - childCont._Release_() - self._mapCachedItems_ = {} - if self._oleobj_: - self._oleobj_.Release() - self.__dict__["_oleobj_"] = None - if self._olerepr_: - self.__dict__["_olerepr_"] = None - self._enum_ = None - - def _proc_(self, name, *args): - """Call the named method as a procedure, rather than function. - Mainly used by Word.Basic, which whinges about such things.""" - try: - item = self._olerepr_.mapFuncs[name] - dispId = item.dispid - return self._get_good_object_( - self._oleobj_.Invoke(*(dispId, LCID, item.desc[4], 0) + (args)) - ) - except KeyError: - raise AttributeError(name) - - def _print_details_(self): - "Debug routine - dumps what it knows about an object." - print("AxDispatch container", self._username_) - try: - print("Methods:") - for method in self._olerepr_.mapFuncs.keys(): - print("\t", method) - print("Props:") - for prop, entry in self._olerepr_.propMap.items(): - print("\t%s = 0x%x - %s" % (prop, entry.dispid, repr(entry))) - print("Get Props:") - for prop, entry in self._olerepr_.propMapGet.items(): - print("\t%s = 0x%x - %s" % (prop, entry.dispid, repr(entry))) - print("Put Props:") - for prop, entry in self._olerepr_.propMapPut.items(): - print("\t%s = 0x%x - %s" % (prop, entry.dispid, repr(entry))) - except: - traceback.print_exc() - - def __LazyMap__(self, attr): - try: - if self._LazyAddAttr_(attr): - debug_attr_print( - "%s.__LazyMap__(%s) added something" % (self._username_, attr) - ) - return 1 - except AttributeError: - return 0 - - # Using the typecomp, lazily create a new attribute definition. - def _LazyAddAttr_(self, attr): - if self._lazydata_ is None: - return 0 - res = 0 - typeinfo, typecomp = self._lazydata_ - olerepr = self._olerepr_ - # We need to explicitly check each invoke type individually - simply - # specifying '0' will bind to "any member", which may not be the one - # we are actually after (ie, we may be after prop_get, but returned - # the info for the prop_put.) - for i in ALL_INVOKE_TYPES: - try: - x, t = typecomp.Bind(attr, i) - # Support 'Get' and 'Set' properties - see - # bug 1587023 - if x == 0 and attr[:3] in ("Set", "Get"): - x, t = typecomp.Bind(attr[3:], i) - if x == pythoncom.DESCKIND_FUNCDESC: # it's a FUNCDESC - r = olerepr._AddFunc_(typeinfo, t, 0) - elif x == pythoncom.DESCKIND_VARDESC: # it's a VARDESC - r = olerepr._AddVar_(typeinfo, t, 0) - else: # not found or TYPEDESC/IMPLICITAPP - r = None - if not r is None: - key, map = r[0], r[1] - item = map[key] - if map == olerepr.propMapPut: - olerepr._propMapPutCheck_(key, item) - elif map == olerepr.propMapGet: - olerepr._propMapGetCheck_(key, item) - res = 1 - except: - pass - return res - - def _FlagAsMethod(self, *methodNames): - """Flag these attribute names as being methods. - Some objects do not correctly differentiate methods and - properties, leading to problems when calling these methods. - - Specifically, trying to say: ob.SomeFunc() - may yield an exception "None object is not callable" - In this case, an attempt to fetch the *property* has worked - and returned None, rather than indicating it is really a method. - Calling: ob._FlagAsMethod("SomeFunc") - should then allow this to work. - """ - for name in methodNames: - details = build.MapEntry(self.__AttrToID__(name), (name,)) - self._olerepr_.mapFuncs[name] = details - - def __AttrToID__(self, attr): - debug_attr_print( - "Calling GetIDsOfNames for property %s in Dispatch container %s" - % (attr, self._username_) - ) - return self._oleobj_.GetIDsOfNames(0, attr) - - def __getattr__(self, attr): - if attr == "__iter__": - # We can't handle this as a normal method, as if the attribute - # exists, then it must return an iterable object. - try: - invkind = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET - enum = self._oleobj_.InvokeTypes( - pythoncom.DISPID_NEWENUM, LCID, invkind, (13, 10), () - ) - except pythoncom.com_error: - raise AttributeError("This object can not function as an iterator") - - # We must return a callable object. - class Factory: - def __init__(self, ob): - self.ob = ob - - def __call__(self): - import win32com.client.util - - return win32com.client.util.Iterator(self.ob) - - return Factory(enum) - - if attr.startswith("_") and attr.endswith("_"): # Fast-track. - raise AttributeError(attr) - # If a known method, create new instance and return. - try: - return MakeMethod(self._builtMethods_[attr], self, self.__class__) - except KeyError: - pass - # XXX - Note that we current are case sensitive in the method. - # debug_attr_print("GetAttr called for %s on DispatchContainer %s" % (attr,self._username_)) - # First check if it is in the method map. Note that an actual method - # must not yet exist, (otherwise we would not be here). This - # means we create the actual method object - which also means - # this code will never be asked for that method name again. - if attr in self._olerepr_.mapFuncs: - return self._make_method_(attr) - - # Delegate to property maps/cached items - retEntry = None - if self._olerepr_ and self._oleobj_: - # first check general property map, then specific "put" map. - retEntry = self._olerepr_.propMap.get(attr) - if retEntry is None: - retEntry = self._olerepr_.propMapGet.get(attr) - # Not found so far - See what COM says. - if retEntry is None: - try: - if self.__LazyMap__(attr): - if attr in self._olerepr_.mapFuncs: - return self._make_method_(attr) - retEntry = self._olerepr_.propMap.get(attr) - if retEntry is None: - retEntry = self._olerepr_.propMapGet.get(attr) - if retEntry is None: - retEntry = build.MapEntry(self.__AttrToID__(attr), (attr,)) - except pythoncom.ole_error: - pass # No prop by that name - retEntry remains None. - - if retEntry is not None: # see if in my cache - try: - ret = self._mapCachedItems_[retEntry.dispid] - debug_attr_print("Cached items has attribute!", ret) - return ret - except (KeyError, AttributeError): - debug_attr_print("Attribute %s not in cache" % attr) - - # If we are still here, and have a retEntry, get the OLE item - if retEntry is not None: - invoke_type = _GetDescInvokeType(retEntry, pythoncom.INVOKE_PROPERTYGET) - debug_attr_print( - "Getting property Id 0x%x from OLE object" % retEntry.dispid - ) - try: - ret = self._oleobj_.Invoke(retEntry.dispid, 0, invoke_type, 1) - except pythoncom.com_error as details: - if details.hresult in ERRORS_BAD_CONTEXT: - # May be a method. - self._olerepr_.mapFuncs[attr] = retEntry - return self._make_method_(attr) - raise - debug_attr_print("OLE returned ", ret) - return self._get_good_object_(ret) - - # no where else to look. - raise AttributeError("%s.%s" % (self._username_, attr)) - - def __setattr__(self, attr, value): - if ( - attr in self.__dict__ - ): # Fast-track - if already in our dict, just make the assignment. - # XXX - should maybe check method map - if someone assigns to a method, - # it could mean something special (not sure what, tho!) - self.__dict__[attr] = value - return - # Allow property assignment. - debug_attr_print( - "SetAttr called for %s.%s=%s on DispatchContainer" - % (self._username_, attr, repr(value)) - ) - - if self._olerepr_: - # Check the "general" property map. - if attr in self._olerepr_.propMap: - entry = self._olerepr_.propMap[attr] - invoke_type = _GetDescInvokeType(entry, pythoncom.INVOKE_PROPERTYPUT) - self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value) - return - # Check the specific "put" map. - if attr in self._olerepr_.propMapPut: - entry = self._olerepr_.propMapPut[attr] - invoke_type = _GetDescInvokeType(entry, pythoncom.INVOKE_PROPERTYPUT) - self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value) - return - - # Try the OLE Object - if self._oleobj_: - if self.__LazyMap__(attr): - # Check the "general" property map. - if attr in self._olerepr_.propMap: - entry = self._olerepr_.propMap[attr] - invoke_type = _GetDescInvokeType( - entry, pythoncom.INVOKE_PROPERTYPUT - ) - self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value) - return - # Check the specific "put" map. - if attr in self._olerepr_.propMapPut: - entry = self._olerepr_.propMapPut[attr] - invoke_type = _GetDescInvokeType( - entry, pythoncom.INVOKE_PROPERTYPUT - ) - self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value) - return - try: - entry = build.MapEntry(self.__AttrToID__(attr), (attr,)) - except pythoncom.com_error: - # No attribute of that name - entry = None - if entry is not None: - try: - invoke_type = _GetDescInvokeType( - entry, pythoncom.INVOKE_PROPERTYPUT - ) - self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value) - self._olerepr_.propMap[attr] = entry - debug_attr_print( - "__setattr__ property %s (id=0x%x) in Dispatch container %s" - % (attr, entry.dispid, self._username_) - ) - return - except pythoncom.com_error: - pass - raise AttributeError( - "Property '%s.%s' can not be set." % (self._username_, attr) - ) diff --git a/lib/win32com/client/gencache.py b/lib/win32com/client/gencache.py deleted file mode 100644 index 99e9c6cb..00000000 --- a/lib/win32com/client/gencache.py +++ /dev/null @@ -1,799 +0,0 @@ -"""Manages the cache of generated Python code. - -Description - This file manages the cache of generated Python code. When run from the - command line, it also provides a number of options for managing that cache. - -Implementation - Each typelib is generated into a filename of format "{guid}x{lcid}x{major}x{minor}.py" - - An external persistant dictionary maps from all known IIDs in all known type libraries - to the type library itself. - - Thus, whenever Python code knows the IID of an object, it can find the IID, LCID and version of - the type library which supports it. Given this information, it can find the Python module - with the support. - - If necessary, this support can be generated on the fly. - -Hacks, to do, etc - Currently just uses a pickled dictionary, but should used some sort of indexed file. - Maybe an OLE2 compound file, or a bsddb file? -""" -import glob -import os -import sys -from importlib import reload - -import pythoncom -import pywintypes -import win32com -import win32com.client - -from . import CLSIDToClass - -bForDemandDefault = 0 # Default value of bForDemand - toggle this to change the world - see also makepy.py - -# The global dictionary -clsidToTypelib = {} - -# If we have a different version of the typelib generated, this -# maps the "requested version" to the "generated version". -versionRedirectMap = {} - -# There is no reason we *must* be readonly in a .zip, but we are now, -# Rather than check for ".zip" or other tricks, PEP302 defines -# a "__loader__" attribute, so we use that. -# (Later, it may become necessary to check if the __loader__ can update files, -# as a .zip loader potentially could - but punt all that until a need arises) -is_readonly = is_zip = hasattr(win32com, "__loader__") and hasattr( - win32com.__loader__, "archive" -) - -# A dictionary of ITypeLibrary objects for demand generation explicitly handed to us -# Keyed by usual clsid, lcid, major, minor -demandGeneratedTypeLibraries = {} - -import pickle as pickle - - -def __init__(): - # Initialize the module. Called once explicitly at module import below. - try: - _LoadDicts() - except IOError: - Rebuild() - - -pickleVersion = 1 - - -def _SaveDicts(): - if is_readonly: - raise RuntimeError( - "Trying to write to a readonly gencache ('%s')!" % win32com.__gen_path__ - ) - f = open(os.path.join(GetGeneratePath(), "dicts.dat"), "wb") - try: - p = pickle.Pickler(f) - p.dump(pickleVersion) - p.dump(clsidToTypelib) - finally: - f.close() - - -def _LoadDicts(): - # Load the dictionary from a .zip file if that is where we live. - if is_zip: - import io as io - - loader = win32com.__loader__ - arc_path = loader.archive - dicts_path = os.path.join(win32com.__gen_path__, "dicts.dat") - if dicts_path.startswith(arc_path): - dicts_path = dicts_path[len(arc_path) + 1 :] - else: - # Hm. See below. - return - try: - data = loader.get_data(dicts_path) - except AttributeError: - # The __loader__ has no get_data method. See below. - return - except IOError: - # Our gencache is in a .zip file (and almost certainly readonly) - # but no dicts file. That actually needn't be fatal for a frozen - # application. Assuming they call "EnsureModule" with the same - # typelib IDs they have been frozen with, that EnsureModule will - # correctly re-build the dicts on the fly. However, objects that - # rely on the gencache but have not done an EnsureModule will - # fail (but their apps are likely to fail running from source - # with a clean gencache anyway, as then they would be getting - # Dynamic objects until the cache is built - so the best answer - # for these apps is to call EnsureModule, rather than freezing - # the dict) - return - f = io.BytesIO(data) - else: - # NOTE: IOError on file open must be caught by caller. - f = open(os.path.join(win32com.__gen_path__, "dicts.dat"), "rb") - try: - p = pickle.Unpickler(f) - version = p.load() - global clsidToTypelib - clsidToTypelib = p.load() - versionRedirectMap.clear() - finally: - f.close() - - -def GetGeneratedFileName(clsid, lcid, major, minor): - """Given the clsid, lcid, major and minor for a type lib, return - the file name (no extension) providing this support. - """ - return str(clsid).upper()[1:-1] + "x%sx%sx%s" % (lcid, major, minor) - - -def SplitGeneratedFileName(fname): - """Reverse of GetGeneratedFileName()""" - return tuple(fname.split("x", 4)) - - -def GetGeneratePath(): - """Returns the name of the path to generate to. - Checks the directory is OK. - """ - assert not is_readonly, "Why do you want the genpath for a readonly store?" - try: - os.makedirs(win32com.__gen_path__) - # os.mkdir(win32com.__gen_path__) - except os.error: - pass - try: - fname = os.path.join(win32com.__gen_path__, "__init__.py") - os.stat(fname) - except os.error: - f = open(fname, "w") - f.write( - "# Generated file - this directory may be deleted to reset the COM cache...\n" - ) - f.write("import win32com\n") - f.write( - "if __path__[:-1] != win32com.__gen_path__: __path__.append(win32com.__gen_path__)\n" - ) - f.close() - - return win32com.__gen_path__ - - -# -# The helpers for win32com.client.Dispatch and OCX clients. -# -def GetClassForProgID(progid): - """Get a Python class for a Program ID - - Given a Program ID, return a Python class which wraps the COM object - - Returns the Python class, or None if no module is available. - - Params - progid -- A COM ProgramID or IID (eg, "Word.Application") - """ - clsid = pywintypes.IID(progid) # This auto-converts named to IDs. - return GetClassForCLSID(clsid) - - -def GetClassForCLSID(clsid): - """Get a Python class for a CLSID - - Given a CLSID, return a Python class which wraps the COM object - - Returns the Python class, or None if no module is available. - - Params - clsid -- A COM CLSID (or string repr of one) - """ - # first, take a short-cut - we may already have generated support ready-to-roll. - clsid = str(clsid) - if CLSIDToClass.HasClass(clsid): - return CLSIDToClass.GetClass(clsid) - mod = GetModuleForCLSID(clsid) - if mod is None: - return None - try: - return CLSIDToClass.GetClass(clsid) - except KeyError: - return None - - -def GetModuleForProgID(progid): - """Get a Python module for a Program ID - - Given a Program ID, return a Python module which contains the - class which wraps the COM object. - - Returns the Python module, or None if no module is available. - - Params - progid -- A COM ProgramID or IID (eg, "Word.Application") - """ - try: - iid = pywintypes.IID(progid) - except pywintypes.com_error: - return None - return GetModuleForCLSID(iid) - - -def GetModuleForCLSID(clsid): - """Get a Python module for a CLSID - - Given a CLSID, return a Python module which contains the - class which wraps the COM object. - - Returns the Python module, or None if no module is available. - - Params - progid -- A COM CLSID (ie, not the description) - """ - clsid_str = str(clsid) - try: - typelibCLSID, lcid, major, minor = clsidToTypelib[clsid_str] - except KeyError: - return None - - try: - mod = GetModuleForTypelib(typelibCLSID, lcid, major, minor) - except ImportError: - mod = None - if mod is not None: - sub_mod = mod.CLSIDToPackageMap.get(clsid_str) - if sub_mod is None: - sub_mod = mod.VTablesToPackageMap.get(clsid_str) - if sub_mod is not None: - sub_mod_name = mod.__name__ + "." + sub_mod - try: - __import__(sub_mod_name) - except ImportError: - info = typelibCLSID, lcid, major, minor - # Force the generation. If this typelibrary has explicitly been added, - # use it (it may not be registered, causing a lookup by clsid to fail) - if info in demandGeneratedTypeLibraries: - info = demandGeneratedTypeLibraries[info] - from . import makepy - - makepy.GenerateChildFromTypeLibSpec(sub_mod, info) - # Generate does an import... - mod = sys.modules[sub_mod_name] - return mod - - -def GetModuleForTypelib(typelibCLSID, lcid, major, minor): - """Get a Python module for a type library ID - - Given the CLSID of a typelibrary, return an imported Python module, - else None - - Params - typelibCLSID -- IID of the type library. - major -- Integer major version. - minor -- Integer minor version - lcid -- Integer LCID for the library. - """ - modName = GetGeneratedFileName(typelibCLSID, lcid, major, minor) - mod = _GetModule(modName) - # If the import worked, it doesn't mean we have actually added this - # module to our cache though - check that here. - if "_in_gencache_" not in mod.__dict__: - AddModuleToCache(typelibCLSID, lcid, major, minor) - assert "_in_gencache_" in mod.__dict__ - return mod - - -def MakeModuleForTypelib( - typelibCLSID, - lcid, - major, - minor, - progressInstance=None, - bForDemand=bForDemandDefault, - bBuildHidden=1, -): - """Generate support for a type library. - - Given the IID, LCID and version information for a type library, generate - and import the necessary support files. - - Returns the Python module. No exceptions are caught. - - Params - typelibCLSID -- IID of the type library. - major -- Integer major version. - minor -- Integer minor version. - lcid -- Integer LCID for the library. - progressInstance -- Instance to use as progress indicator, or None to - use the GUI progress bar. - """ - from . import makepy - - makepy.GenerateFromTypeLibSpec( - (typelibCLSID, lcid, major, minor), - progressInstance=progressInstance, - bForDemand=bForDemand, - bBuildHidden=bBuildHidden, - ) - return GetModuleForTypelib(typelibCLSID, lcid, major, minor) - - -def MakeModuleForTypelibInterface( - typelib_ob, progressInstance=None, bForDemand=bForDemandDefault, bBuildHidden=1 -): - """Generate support for a type library. - - Given a PyITypeLib interface generate and import the necessary support files. This is useful - for getting makepy support for a typelibrary that is not registered - the caller can locate - and load the type library itself, rather than relying on COM to find it. - - Returns the Python module. - - Params - typelib_ob -- The type library itself - progressInstance -- Instance to use as progress indicator, or None to - use the GUI progress bar. - """ - from . import makepy - - try: - makepy.GenerateFromTypeLibSpec( - typelib_ob, - progressInstance=progressInstance, - bForDemand=bForDemandDefault, - bBuildHidden=bBuildHidden, - ) - except pywintypes.com_error: - return None - tla = typelib_ob.GetLibAttr() - guid = tla[0] - lcid = tla[1] - major = tla[3] - minor = tla[4] - return GetModuleForTypelib(guid, lcid, major, minor) - - -def EnsureModuleForTypelibInterface( - typelib_ob, progressInstance=None, bForDemand=bForDemandDefault, bBuildHidden=1 -): - """Check we have support for a type library, generating if not. - - Given a PyITypeLib interface generate and import the necessary - support files if necessary. This is useful for getting makepy support - for a typelibrary that is not registered - the caller can locate and - load the type library itself, rather than relying on COM to find it. - - Returns the Python module. - - Params - typelib_ob -- The type library itself - progressInstance -- Instance to use as progress indicator, or None to - use the GUI progress bar. - """ - tla = typelib_ob.GetLibAttr() - guid = tla[0] - lcid = tla[1] - major = tla[3] - minor = tla[4] - - # If demand generated, save the typelib interface away for later use - if bForDemand: - demandGeneratedTypeLibraries[(str(guid), lcid, major, minor)] = typelib_ob - - try: - return GetModuleForTypelib(guid, lcid, major, minor) - except ImportError: - pass - # Generate it. - return MakeModuleForTypelibInterface( - typelib_ob, progressInstance, bForDemand, bBuildHidden - ) - - -def ForgetAboutTypelibInterface(typelib_ob): - """Drop any references to a typelib previously added with EnsureModuleForTypelibInterface and forDemand""" - tla = typelib_ob.GetLibAttr() - guid = tla[0] - lcid = tla[1] - major = tla[3] - minor = tla[4] - info = str(guid), lcid, major, minor - try: - del demandGeneratedTypeLibraries[info] - except KeyError: - # Not worth raising an exception - maybe they dont know we only remember for demand generated, etc. - print( - "ForgetAboutTypelibInterface:: Warning - type library with info %s is not being remembered!" - % (info,) - ) - # and drop any version redirects to it - for key, val in list(versionRedirectMap.items()): - if val == info: - del versionRedirectMap[key] - - -def EnsureModule( - typelibCLSID, - lcid, - major, - minor, - progressInstance=None, - bValidateFile=not is_readonly, - bForDemand=bForDemandDefault, - bBuildHidden=1, -): - """Ensure Python support is loaded for a type library, generating if necessary. - - Given the IID, LCID and version information for a type library, check and if - necessary (re)generate, then import the necessary support files. If we regenerate the file, there - is no way to totally snuff out all instances of the old module in Python, and thus we will regenerate the file more than necessary, - unless makepy/genpy is modified accordingly. - - - Returns the Python module. No exceptions are caught during the generate process. - - Params - typelibCLSID -- IID of the type library. - major -- Integer major version. - minor -- Integer minor version - lcid -- Integer LCID for the library. - progressInstance -- Instance to use as progress indicator, or None to - use the GUI progress bar. - bValidateFile -- Whether or not to perform cache validation or not - bForDemand -- Should a complete generation happen now, or on demand? - bBuildHidden -- Should hidden members/attributes etc be generated? - """ - bReloadNeeded = 0 - try: - try: - module = GetModuleForTypelib(typelibCLSID, lcid, major, minor) - except ImportError: - # If we get an ImportError - # We may still find a valid cache file under a different MinorVersion # - # (which windows will search out for us) - # print "Loading reg typelib", typelibCLSID, major, minor, lcid - module = None - try: - tlbAttr = pythoncom.LoadRegTypeLib( - typelibCLSID, major, minor, lcid - ).GetLibAttr() - # if the above line doesn't throw a pythoncom.com_error, check if - # it is actually a different lib than we requested, and if so, suck it in - if tlbAttr[1] != lcid or tlbAttr[4] != minor: - # print "Trying 2nd minor #", tlbAttr[1], tlbAttr[3], tlbAttr[4] - try: - module = GetModuleForTypelib( - typelibCLSID, tlbAttr[1], tlbAttr[3], tlbAttr[4] - ) - except ImportError: - # We don't have a module, but we do have a better minor - # version - remember that. - minor = tlbAttr[4] - # else module remains None - except pythoncom.com_error: - # couldn't load any typelib - mod remains None - pass - if module is not None and bValidateFile: - assert not is_readonly, "Can't validate in a read-only gencache" - try: - typLibPath = pythoncom.QueryPathOfRegTypeLib( - typelibCLSID, major, minor, lcid - ) - # windows seems to add an extra \0 (via the underlying BSTR) - # The mainwin toolkit does not add this erroneous \0 - if typLibPath[-1] == "\0": - typLibPath = typLibPath[:-1] - suf = getattr(os.path, "supports_unicode_filenames", 0) - if not suf: - # can't pass unicode filenames directly - convert - try: - typLibPath = typLibPath.encode(sys.getfilesystemencoding()) - except AttributeError: # no sys.getfilesystemencoding - typLibPath = str(typLibPath) - tlbAttributes = pythoncom.LoadRegTypeLib( - typelibCLSID, major, minor, lcid - ).GetLibAttr() - except pythoncom.com_error: - # We have a module, but no type lib - we should still - # run with what we have though - the typelib may not be - # deployed here. - bValidateFile = 0 - if module is not None and bValidateFile: - assert not is_readonly, "Can't validate in a read-only gencache" - filePathPrefix = "%s\\%s" % ( - GetGeneratePath(), - GetGeneratedFileName(typelibCLSID, lcid, major, minor), - ) - filePath = filePathPrefix + ".py" - filePathPyc = filePathPrefix + ".py" - if __debug__: - filePathPyc = filePathPyc + "c" - else: - filePathPyc = filePathPyc + "o" - # Verify that type library is up to date. - # If we have a differing MinorVersion or genpy has bumped versions, update the file - from . import genpy - - if ( - module.MinorVersion != tlbAttributes[4] - or genpy.makepy_version != module.makepy_version - ): - # print "Version skew: %d, %d" % (module.MinorVersion, tlbAttributes[4]) - # try to erase the bad file from the cache - try: - os.unlink(filePath) - except os.error: - pass - try: - os.unlink(filePathPyc) - except os.error: - pass - if os.path.isdir(filePathPrefix): - import shutil - - shutil.rmtree(filePathPrefix) - minor = tlbAttributes[4] - module = None - bReloadNeeded = 1 - else: - minor = module.MinorVersion - filePathPrefix = "%s\\%s" % ( - GetGeneratePath(), - GetGeneratedFileName(typelibCLSID, lcid, major, minor), - ) - filePath = filePathPrefix + ".py" - filePathPyc = filePathPrefix + ".pyc" - # print "Trying py stat: ", filePath - fModTimeSet = 0 - try: - pyModTime = os.stat(filePath)[8] - fModTimeSet = 1 - except os.error as e: - # If .py file fails, try .pyc file - # print "Trying pyc stat", filePathPyc - try: - pyModTime = os.stat(filePathPyc)[8] - fModTimeSet = 1 - except os.error as e: - pass - # print "Trying stat typelib", pyModTime - # print str(typLibPath) - typLibModTime = os.stat(typLibPath)[8] - if fModTimeSet and (typLibModTime > pyModTime): - bReloadNeeded = 1 - module = None - except (ImportError, os.error): - module = None - if module is None: - # We need to build an item. If we are in a read-only cache, we - # can't/don't want to do this - so before giving up, check for - # a different minor version in our cache - according to COM, this is OK - if is_readonly: - key = str(typelibCLSID), lcid, major, minor - # If we have been asked before, get last result. - try: - return versionRedirectMap[key] - except KeyError: - pass - # Find other candidates. - items = [] - for desc in GetGeneratedInfos(): - if key[0] == desc[0] and key[1] == desc[1] and key[2] == desc[2]: - items.append(desc) - if items: - # Items are all identical, except for last tuple element - # We want the latest minor version we have - so just sort and grab last - items.sort() - new_minor = items[-1][3] - ret = GetModuleForTypelib(typelibCLSID, lcid, major, new_minor) - else: - ret = None - # remember and return - versionRedirectMap[key] = ret - return ret - # print "Rebuilding: ", major, minor - module = MakeModuleForTypelib( - typelibCLSID, - lcid, - major, - minor, - progressInstance, - bForDemand=bForDemand, - bBuildHidden=bBuildHidden, - ) - # If we replaced something, reload it - if bReloadNeeded: - module = reload(module) - AddModuleToCache(typelibCLSID, lcid, major, minor) - return module - - -def EnsureDispatch( - prog_id, bForDemand=1 -): # New fn, so we default the new demand feature to on! - """Given a COM prog_id, return an object that is using makepy support, building if necessary""" - disp = win32com.client.Dispatch(prog_id) - if not disp.__dict__.get("CLSID"): # Eeek - no makepy support - try and build it. - try: - ti = disp._oleobj_.GetTypeInfo() - disp_clsid = ti.GetTypeAttr()[0] - tlb, index = ti.GetContainingTypeLib() - tla = tlb.GetLibAttr() - mod = EnsureModule(tla[0], tla[1], tla[3], tla[4], bForDemand=bForDemand) - GetModuleForCLSID(disp_clsid) - # Get the class from the module. - from . import CLSIDToClass - - disp_class = CLSIDToClass.GetClass(str(disp_clsid)) - disp = disp_class(disp._oleobj_) - except pythoncom.com_error: - raise TypeError( - "This COM object can not automate the makepy process - please run makepy manually for this object" - ) - return disp - - -def AddModuleToCache( - typelibclsid, lcid, major, minor, verbose=1, bFlushNow=not is_readonly -): - """Add a newly generated file to the cache dictionary.""" - fname = GetGeneratedFileName(typelibclsid, lcid, major, minor) - mod = _GetModule(fname) - # if mod._in_gencache_ is already true, then we are reloading this - # module - this doesn't mean anything special though! - mod._in_gencache_ = 1 - info = str(typelibclsid), lcid, major, minor - dict_modified = False - - def SetTypelibForAllClsids(dict): - nonlocal dict_modified - for clsid, cls in dict.items(): - if clsidToTypelib.get(clsid) != info: - clsidToTypelib[clsid] = info - dict_modified = True - - SetTypelibForAllClsids(mod.CLSIDToClassMap) - SetTypelibForAllClsids(mod.CLSIDToPackageMap) - SetTypelibForAllClsids(mod.VTablesToClassMap) - SetTypelibForAllClsids(mod.VTablesToPackageMap) - - # If this lib was previously redirected, drop it - if info in versionRedirectMap: - del versionRedirectMap[info] - if bFlushNow and dict_modified: - _SaveDicts() - - -def GetGeneratedInfos(): - zip_pos = win32com.__gen_path__.find(".zip\\") - if zip_pos >= 0: - import zipfile - - zip_file = win32com.__gen_path__[: zip_pos + 4] - zip_path = win32com.__gen_path__[zip_pos + 5 :].replace("\\", "/") - zf = zipfile.ZipFile(zip_file) - infos = {} - for n in zf.namelist(): - if not n.startswith(zip_path): - continue - base = n[len(zip_path) + 1 :].split("/")[0] - try: - iid, lcid, major, minor = base.split("x") - lcid = int(lcid) - major = int(major) - minor = int(minor) - iid = pywintypes.IID("{" + iid + "}") - except ValueError: - continue - except pywintypes.com_error: - # invalid IID - continue - infos[(iid, lcid, major, minor)] = 1 - zf.close() - return list(infos.keys()) - else: - # on the file system - files = glob.glob(win32com.__gen_path__ + "\\*") - ret = [] - for file in files: - if not os.path.isdir(file) and not os.path.splitext(file)[1] == ".py": - continue - name = os.path.splitext(os.path.split(file)[1])[0] - try: - iid, lcid, major, minor = name.split("x") - iid = pywintypes.IID("{" + iid + "}") - lcid = int(lcid) - major = int(major) - minor = int(minor) - except ValueError: - continue - except pywintypes.com_error: - # invalid IID - continue - ret.append((iid, lcid, major, minor)) - return ret - - -def _GetModule(fname): - """Given the name of a module in the gen_py directory, import and return it.""" - mod_name = "win32com.gen_py.%s" % fname - mod = __import__(mod_name) - return sys.modules[mod_name] - - -def Rebuild(verbose=1): - """Rebuild the cache indexes from the file system.""" - clsidToTypelib.clear() - infos = GetGeneratedInfos() - if verbose and len(infos): # Dont bother reporting this when directory is empty! - print("Rebuilding cache of generated files for COM support...") - for info in infos: - iid, lcid, major, minor = info - if verbose: - print("Checking", GetGeneratedFileName(*info)) - try: - AddModuleToCache(iid, lcid, major, minor, verbose, 0) - except: - print( - "Could not add module %s - %s: %s" - % (info, sys.exc_info()[0], sys.exc_info()[1]) - ) - if verbose and len(infos): # Dont bother reporting this when directory is empty! - print("Done.") - _SaveDicts() - - -def _Dump(): - print("Cache is in directory", win32com.__gen_path__) - # Build a unique dir - d = {} - for clsid, (typelibCLSID, lcid, major, minor) in clsidToTypelib.items(): - d[typelibCLSID, lcid, major, minor] = None - for typelibCLSID, lcid, major, minor in d.keys(): - mod = GetModuleForTypelib(typelibCLSID, lcid, major, minor) - print("%s - %s" % (mod.__doc__, typelibCLSID)) - - -# Boot up -__init__() - - -def usage(): - usageString = """\ - Usage: gencache [-q] [-d] [-r] - - -q - Quiet - -d - Dump the cache (typelibrary description and filename). - -r - Rebuild the cache dictionary from the existing .py files - """ - print(usageString) - sys.exit(1) - - -if __name__ == "__main__": - import getopt - - try: - opts, args = getopt.getopt(sys.argv[1:], "qrd") - except getopt.error as message: - print(message) - usage() - - # we only have options - complain about real args, or none at all! - if len(sys.argv) == 1 or args: - print(usage()) - - verbose = 1 - for opt, val in opts: - if opt == "-d": # Dump - _Dump() - if opt == "-r": - Rebuild(verbose) - if opt == "-q": - verbose = 0 diff --git a/lib/win32com/client/genpy.py b/lib/win32com/client/genpy.py deleted file mode 100644 index 3eba075f..00000000 --- a/lib/win32com/client/genpy.py +++ /dev/null @@ -1,1411 +0,0 @@ -"""genpy.py - The worker for makepy. See makepy.py for more details - -This code was moved simply to speed Python in normal circumstances. As the makepy.py -is normally run from the command line, it reparses the code each time. Now makepy -is nothing more than the command line handler and public interface. - -The makepy command line etc handling is also getting large enough in its own right! -""" - -# NOTE - now supports a "demand" mechanism - the top-level is a package, and -# each class etc can be made individually. -# This should eventually become the default. -# Then the old non-package technique should be removed. -# There should be no b/w compat issues, and will just help clean the code. -# This will be done once the new "demand" mechanism gets a good workout. -import os -import sys -import time - -import pythoncom -import win32com - -from . import build - -error = "makepy.error" -makepy_version = "0.5.01" # Written to generated file. - -GEN_FULL = "full" -GEN_DEMAND_BASE = "demand(base)" -GEN_DEMAND_CHILD = "demand(child)" - -# This map is used purely for the users benefit -it shows the -# raw, underlying type of Alias/Enums, etc. The COM implementation -# does not use this map at runtime - all Alias/Enum have already -# been translated. -mapVTToTypeString = { - pythoncom.VT_I2: "types.IntType", - pythoncom.VT_I4: "types.IntType", - pythoncom.VT_R4: "types.FloatType", - pythoncom.VT_R8: "types.FloatType", - pythoncom.VT_BSTR: "types.StringType", - pythoncom.VT_BOOL: "types.IntType", - pythoncom.VT_VARIANT: "types.TypeType", - pythoncom.VT_I1: "types.IntType", - pythoncom.VT_UI1: "types.IntType", - pythoncom.VT_UI2: "types.IntType", - pythoncom.VT_UI4: "types.IntType", - pythoncom.VT_I8: "types.LongType", - pythoncom.VT_UI8: "types.LongType", - pythoncom.VT_INT: "types.IntType", - pythoncom.VT_DATE: "pythoncom.PyTimeType", - pythoncom.VT_UINT: "types.IntType", -} - - -# Given a propget function's arg desc, return the default parameters for all -# params bar the first. Eg, then Python does a: -# object.Property = "foo" -# Python can only pass the "foo" value. If the property has -# multiple args, and the rest have default values, this allows -# Python to correctly pass those defaults. -def MakeDefaultArgsForPropertyPut(argsDesc): - ret = [] - for desc in argsDesc[1:]: - default = build.MakeDefaultArgRepr(desc) - if default is None: - break - ret.append(default) - return tuple(ret) - - -def MakeMapLineEntry(dispid, wFlags, retType, argTypes, user, resultCLSID): - # Strip the default value - argTypes = tuple([what[:2] for what in argTypes]) - return '(%s, %d, %s, %s, "%s", %s)' % ( - dispid, - wFlags, - retType[:2], - argTypes, - user, - resultCLSID, - ) - - -def MakeEventMethodName(eventName): - if eventName[:2] == "On": - return eventName - else: - return "On" + eventName - - -def WriteSinkEventMap(obj, stream): - print("\t_dispid_to_func_ = {", file=stream) - for name, entry in ( - list(obj.propMapGet.items()) - + list(obj.propMapPut.items()) - + list(obj.mapFuncs.items()) - ): - fdesc = entry.desc - print( - '\t\t%9d : "%s",' % (fdesc.memid, MakeEventMethodName(entry.names[0])), - file=stream, - ) - print("\t\t}", file=stream) - - -# MI is used to join my writable helpers, and the OLE -# classes. -class WritableItem: - # __cmp__ used for sorting in py2x... - def __cmp__(self, other): - "Compare for sorting" - ret = cmp(self.order, other.order) - if ret == 0 and self.doc: - ret = cmp(self.doc[0], other.doc[0]) - return ret - - # ... but not used in py3k - __lt__ minimum needed there - def __lt__(self, other): # py3k variant - if self.order == other.order: - return self.doc < other.doc - return self.order < other.order - - def __repr__(self): - return "OleItem: doc=%s, order=%d" % (repr(self.doc), self.order) - - -class RecordItem(build.OleItem, WritableItem): - order = 9 - typename = "RECORD" - - def __init__(self, typeInfo, typeAttr, doc=None, bForUser=1): - ## sys.stderr.write("Record %s: size %s\n" % (doc,typeAttr.cbSizeInstance)) - ## sys.stderr.write(" cVars = %s\n" % (typeAttr.cVars,)) - ## for i in range(typeAttr.cVars): - ## vdesc = typeInfo.GetVarDesc(i) - ## sys.stderr.write(" Var %d has value %s, type %d, desc=%s\n" % (i, vdesc.value, vdesc.varkind, vdesc.elemdescVar)) - ## sys.stderr.write(" Doc is %s\n" % (typeInfo.GetDocumentation(vdesc.memid),)) - - build.OleItem.__init__(self, doc) - self.clsid = typeAttr[0] - - def WriteClass(self, generator): - pass - - -# Given an enum, write all aliases for it. -# (no longer necessary for new style code, but still used for old code. -def WriteAliasesForItem(item, aliasItems, stream): - for alias in aliasItems.values(): - if item.doc and alias.aliasDoc and (alias.aliasDoc[0] == item.doc[0]): - alias.WriteAliasItem(aliasItems, stream) - - -class AliasItem(build.OleItem, WritableItem): - order = 2 - typename = "ALIAS" - - def __init__(self, typeinfo, attr, doc=None, bForUser=1): - build.OleItem.__init__(self, doc) - - ai = attr[14] - self.attr = attr - if type(ai) == type(()) and type(ai[1]) == type( - 0 - ): # XXX - This is a hack - why tuples? Need to resolve? - href = ai[1] - alinfo = typeinfo.GetRefTypeInfo(href) - self.aliasDoc = alinfo.GetDocumentation(-1) - self.aliasAttr = alinfo.GetTypeAttr() - else: - self.aliasDoc = None - self.aliasAttr = None - - def WriteAliasItem(self, aliasDict, stream): - # we could have been written as part of an alias dependency - if self.bWritten: - return - - if self.aliasDoc: - depName = self.aliasDoc[0] - if depName in aliasDict: - aliasDict[depName].WriteAliasItem(aliasDict, stream) - print(self.doc[0] + " = " + depName, file=stream) - else: - ai = self.attr[14] - if type(ai) == type(0): - try: - typeStr = mapVTToTypeString[ai] - print("# %s=%s" % (self.doc[0], typeStr), file=stream) - except KeyError: - print( - self.doc[0] + " = None # Can't convert alias info " + str(ai), - file=stream, - ) - print(file=stream) - self.bWritten = 1 - - -class EnumerationItem(build.OleItem, WritableItem): - order = 1 - typename = "ENUMERATION" - - def __init__(self, typeinfo, attr, doc=None, bForUser=1): - build.OleItem.__init__(self, doc) - - self.clsid = attr[0] - self.mapVars = {} - typeFlags = attr[11] - self.hidden = ( - typeFlags & pythoncom.TYPEFLAG_FHIDDEN - or typeFlags & pythoncom.TYPEFLAG_FRESTRICTED - ) - - for j in range(attr[7]): - vdesc = typeinfo.GetVarDesc(j) - name = typeinfo.GetNames(vdesc[0])[0] - self.mapVars[name] = build.MapEntry(vdesc) - - ## def WriteEnumerationHeaders(self, aliasItems, stream): - ## enumName = self.doc[0] - ## print >> stream "%s=constants # Compatibility with previous versions." % (enumName) - ## WriteAliasesForItem(self, aliasItems) - - def WriteEnumerationItems(self, stream): - num = 0 - enumName = self.doc[0] - # Write in name alpha order - names = list(self.mapVars.keys()) - names.sort() - for name in names: - entry = self.mapVars[name] - vdesc = entry.desc - if vdesc[4] == pythoncom.VAR_CONST: - val = vdesc[1] - - use = repr(val) - # Make sure the repr of the value is valid python syntax - # still could cause an error on import if it contains a module or type name - # not available in the global namespace - try: - compile(use, "", "eval") - except SyntaxError: - # At least add the repr as a string, so it can be investigated further - # Sanitize it, in case the repr contains its own quotes. (??? line breaks too ???) - use = use.replace('"', "'") - use = ( - '"' - + use - + '"' - + " # This VARIANT type cannot be converted automatically" - ) - print( - "\t%-30s=%-10s # from enum %s" - % (build.MakePublicAttributeName(name, True), use, enumName), - file=stream, - ) - num += 1 - return num - - -class VTableItem(build.VTableItem, WritableItem): - order = 4 - - def WriteClass(self, generator): - self.WriteVTableMap(generator) - self.bWritten = 1 - - def WriteVTableMap(self, generator): - stream = generator.file - print( - "%s_vtables_dispatch_ = %d" % (self.python_name, self.bIsDispatch), - file=stream, - ) - print("%s_vtables_ = [" % (self.python_name,), file=stream) - for v in self.vtableFuncs: - names, dispid, desc = v - assert desc.desckind == pythoncom.DESCKIND_FUNCDESC - arg_reprs = [] - # more hoops so we don't generate huge lines. - item_num = 0 - print("\t((", end=" ", file=stream) - for name in names: - print(repr(name), ",", end=" ", file=stream) - item_num = item_num + 1 - if item_num % 5 == 0: - print("\n\t\t\t", end=" ", file=stream) - print( - "), %d, (%r, %r, [" % (dispid, desc.memid, desc.scodeArray), - end=" ", - file=stream, - ) - for arg in desc.args: - item_num = item_num + 1 - if item_num % 5 == 0: - print("\n\t\t\t", end=" ", file=stream) - defval = build.MakeDefaultArgRepr(arg) - if arg[3] is None: - arg3_repr = None - else: - arg3_repr = repr(arg[3]) - print( - repr((arg[0], arg[1], defval, arg3_repr)), ",", end=" ", file=stream - ) - print("],", end=" ", file=stream) - print(repr(desc.funckind), ",", end=" ", file=stream) - print(repr(desc.invkind), ",", end=" ", file=stream) - print(repr(desc.callconv), ",", end=" ", file=stream) - print(repr(desc.cParamsOpt), ",", end=" ", file=stream) - print(repr(desc.oVft), ",", end=" ", file=stream) - print(repr(desc.rettype), ",", end=" ", file=stream) - print(repr(desc.wFuncFlags), ",", end=" ", file=stream) - print(")),", file=stream) - print("]", file=stream) - print(file=stream) - - -class DispatchItem(build.DispatchItem, WritableItem): - order = 3 - - def __init__(self, typeinfo, attr, doc=None): - build.DispatchItem.__init__(self, typeinfo, attr, doc) - self.type_attr = attr - self.coclass_clsid = None - - def WriteClass(self, generator): - if ( - not self.bIsDispatch - and not self.type_attr.typekind == pythoncom.TKIND_DISPATCH - ): - return - # This is pretty screwey - now we have vtable support we - # should probably rethink this (ie, maybe write both sides for sinks, etc) - if self.bIsSink: - self.WriteEventSinkClassHeader(generator) - self.WriteCallbackClassBody(generator) - else: - self.WriteClassHeader(generator) - self.WriteClassBody(generator) - print(file=generator.file) - self.bWritten = 1 - - def WriteClassHeader(self, generator): - generator.checkWriteDispatchBaseClass() - doc = self.doc - stream = generator.file - print("class " + self.python_name + "(DispatchBaseClass):", file=stream) - if doc[1]: - print("\t" + build._makeDocString(doc[1]), file=stream) - try: - progId = pythoncom.ProgIDFromCLSID(self.clsid) - print( - "\t# This class is creatable by the name '%s'" % (progId), file=stream - ) - except pythoncom.com_error: - pass - print("\tCLSID = " + repr(self.clsid), file=stream) - if self.coclass_clsid is None: - print("\tcoclass_clsid = None", file=stream) - else: - print("\tcoclass_clsid = " + repr(self.coclass_clsid), file=stream) - print(file=stream) - self.bWritten = 1 - - def WriteEventSinkClassHeader(self, generator): - generator.checkWriteEventBaseClass() - doc = self.doc - stream = generator.file - print("class " + self.python_name + ":", file=stream) - if doc[1]: - print("\t" + build._makeDocString(doc[1]), file=stream) - try: - progId = pythoncom.ProgIDFromCLSID(self.clsid) - print( - "\t# This class is creatable by the name '%s'" % (progId), file=stream - ) - except pythoncom.com_error: - pass - print("\tCLSID = CLSID_Sink = " + repr(self.clsid), file=stream) - if self.coclass_clsid is None: - print("\tcoclass_clsid = None", file=stream) - else: - print("\tcoclass_clsid = " + repr(self.coclass_clsid), file=stream) - print("\t_public_methods_ = [] # For COM Server support", file=stream) - WriteSinkEventMap(self, stream) - print(file=stream) - print("\tdef __init__(self, oobj = None):", file=stream) - print("\t\tif oobj is None:", file=stream) - print("\t\t\tself._olecp = None", file=stream) - print("\t\telse:", file=stream) - print("\t\t\timport win32com.server.util", file=stream) - print( - "\t\t\tfrom win32com.server.policy import EventHandlerPolicy", file=stream - ) - print( - "\t\t\tcpc=oobj._oleobj_.QueryInterface(pythoncom.IID_IConnectionPointContainer)", - file=stream, - ) - print("\t\t\tcp=cpc.FindConnectionPoint(self.CLSID_Sink)", file=stream) - print( - "\t\t\tcookie=cp.Advise(win32com.server.util.wrap(self, usePolicy=EventHandlerPolicy))", - file=stream, - ) - print("\t\t\tself._olecp,self._olecp_cookie = cp,cookie", file=stream) - print("\tdef __del__(self):", file=stream) - print("\t\ttry:", file=stream) - print("\t\t\tself.close()", file=stream) - print("\t\texcept pythoncom.com_error:", file=stream) - print("\t\t\tpass", file=stream) - print("\tdef close(self):", file=stream) - print("\t\tif self._olecp is not None:", file=stream) - print( - "\t\t\tcp,cookie,self._olecp,self._olecp_cookie = self._olecp,self._olecp_cookie,None,None", - file=stream, - ) - print("\t\t\tcp.Unadvise(cookie)", file=stream) - print("\tdef _query_interface_(self, iid):", file=stream) - print("\t\timport win32com.server.util", file=stream) - print( - "\t\tif iid==self.CLSID_Sink: return win32com.server.util.wrap(self)", - file=stream, - ) - print(file=stream) - self.bWritten = 1 - - def WriteCallbackClassBody(self, generator): - stream = generator.file - print("\t# Event Handlers", file=stream) - print( - "\t# If you create handlers, they should have the following prototypes:", - file=stream, - ) - for name, entry in ( - list(self.propMapGet.items()) - + list(self.propMapPut.items()) - + list(self.mapFuncs.items()) - ): - fdesc = entry.desc - methName = MakeEventMethodName(entry.names[0]) - print( - "#\tdef " - + methName - + "(self" - + build.BuildCallList( - fdesc, - entry.names, - "defaultNamedOptArg", - "defaultNamedNotOptArg", - "defaultUnnamedArg", - "pythoncom.Missing", - is_comment=True, - ) - + "):", - file=stream, - ) - if entry.doc and entry.doc[1]: - print("#\t\t" + build._makeDocString(entry.doc[1]), file=stream) - print(file=stream) - self.bWritten = 1 - - def WriteClassBody(self, generator): - stream = generator.file - # Write in alpha order. - names = list(self.mapFuncs.keys()) - names.sort() - specialItems = { - "count": None, - "item": None, - "value": None, - "_newenum": None, - } # If found, will end up with (entry, invoke_tupe) - itemCount = None - for name in names: - entry = self.mapFuncs[name] - assert entry.desc.desckind == pythoncom.DESCKIND_FUNCDESC - # skip [restricted] methods, unless it is the - # enumerator (which, being part of the "system", - # we know about and can use) - dispid = entry.desc.memid - if ( - entry.desc.wFuncFlags & pythoncom.FUNCFLAG_FRESTRICTED - and dispid != pythoncom.DISPID_NEWENUM - ): - continue - # If not accessible via IDispatch, then we can't use it here. - if entry.desc.funckind != pythoncom.FUNC_DISPATCH: - continue - if dispid == pythoncom.DISPID_VALUE: - lkey = "value" - elif dispid == pythoncom.DISPID_NEWENUM: - specialItems["_newenum"] = (entry, entry.desc.invkind, None) - continue # Dont build this one now! - else: - lkey = name.lower() - if ( - lkey in specialItems and specialItems[lkey] is None - ): # remember if a special one. - specialItems[lkey] = (entry, entry.desc.invkind, None) - if generator.bBuildHidden or not entry.hidden: - if entry.GetResultName(): - print("\t# Result is of type " + entry.GetResultName(), file=stream) - if entry.wasProperty: - print( - "\t# The method %s is actually a property, but must be used as a method to correctly pass the arguments" - % name, - file=stream, - ) - ret = self.MakeFuncMethod(entry, build.MakePublicAttributeName(name)) - for line in ret: - print(line, file=stream) - print("\t_prop_map_get_ = {", file=stream) - names = list(self.propMap.keys()) - names.sort() - for key in names: - entry = self.propMap[key] - if generator.bBuildHidden or not entry.hidden: - resultName = entry.GetResultName() - if resultName: - print( - "\t\t# Property '%s' is an object of type '%s'" - % (key, resultName), - file=stream, - ) - lkey = key.lower() - details = entry.desc - resultDesc = details[2] - argDesc = () - mapEntry = MakeMapLineEntry( - details.memid, - pythoncom.DISPATCH_PROPERTYGET, - resultDesc, - argDesc, - key, - entry.GetResultCLSIDStr(), - ) - - if details.memid == pythoncom.DISPID_VALUE: - lkey = "value" - elif details.memid == pythoncom.DISPID_NEWENUM: - lkey = "_newenum" - else: - lkey = key.lower() - if ( - lkey in specialItems and specialItems[lkey] is None - ): # remember if a special one. - specialItems[lkey] = ( - entry, - pythoncom.DISPATCH_PROPERTYGET, - mapEntry, - ) - # All special methods, except _newenum, are written - # "normally". This is a mess! - if details.memid == pythoncom.DISPID_NEWENUM: - continue - - print( - '\t\t"%s": %s,' % (build.MakePublicAttributeName(key), mapEntry), - file=stream, - ) - names = list(self.propMapGet.keys()) - names.sort() - for key in names: - entry = self.propMapGet[key] - if generator.bBuildHidden or not entry.hidden: - if entry.GetResultName(): - print( - "\t\t# Method '%s' returns object of type '%s'" - % (key, entry.GetResultName()), - file=stream, - ) - details = entry.desc - assert details.desckind == pythoncom.DESCKIND_FUNCDESC - lkey = key.lower() - argDesc = details[2] - resultDesc = details[8] - mapEntry = MakeMapLineEntry( - details[0], - pythoncom.DISPATCH_PROPERTYGET, - resultDesc, - argDesc, - key, - entry.GetResultCLSIDStr(), - ) - if details.memid == pythoncom.DISPID_VALUE: - lkey = "value" - elif details.memid == pythoncom.DISPID_NEWENUM: - lkey = "_newenum" - else: - lkey = key.lower() - if ( - lkey in specialItems and specialItems[lkey] is None - ): # remember if a special one. - specialItems[lkey] = ( - entry, - pythoncom.DISPATCH_PROPERTYGET, - mapEntry, - ) - # All special methods, except _newenum, are written - # "normally". This is a mess! - if details.memid == pythoncom.DISPID_NEWENUM: - continue - print( - '\t\t"%s": %s,' % (build.MakePublicAttributeName(key), mapEntry), - file=stream, - ) - - print("\t}", file=stream) - - print("\t_prop_map_put_ = {", file=stream) - # These are "Invoke" args - names = list(self.propMap.keys()) - names.sort() - for key in names: - entry = self.propMap[key] - if generator.bBuildHidden or not entry.hidden: - lkey = key.lower() - details = entry.desc - # If default arg is None, write an empty tuple - defArgDesc = build.MakeDefaultArgRepr(details[2]) - if defArgDesc is None: - defArgDesc = "" - else: - defArgDesc = defArgDesc + "," - print( - '\t\t"%s" : ((%s, LCID, %d, 0),(%s)),' - % ( - build.MakePublicAttributeName(key), - details[0], - pythoncom.DISPATCH_PROPERTYPUT, - defArgDesc, - ), - file=stream, - ) - - names = list(self.propMapPut.keys()) - names.sort() - for key in names: - entry = self.propMapPut[key] - if generator.bBuildHidden or not entry.hidden: - details = entry.desc - defArgDesc = MakeDefaultArgsForPropertyPut(details[2]) - print( - '\t\t"%s": ((%s, LCID, %d, 0),%s),' - % ( - build.MakePublicAttributeName(key), - details[0], - details[4], - defArgDesc, - ), - file=stream, - ) - print("\t}", file=stream) - - if specialItems["value"]: - entry, invoketype, propArgs = specialItems["value"] - if propArgs is None: - typename = "method" - ret = self.MakeFuncMethod(entry, "__call__") - else: - typename = "property" - ret = [ - "\tdef __call__(self):\n\t\treturn self._ApplyTypes_(*%s)" - % propArgs - ] - print( - "\t# Default %s for this class is '%s'" % (typename, entry.names[0]), - file=stream, - ) - for line in ret: - print(line, file=stream) - print("\tdef __str__(self, *args):", file=stream) - print("\t\treturn str(self.__call__(*args))", file=stream) - print("\tdef __int__(self, *args):", file=stream) - print("\t\treturn int(self.__call__(*args))", file=stream) - - # _NewEnum (DISPID_NEWENUM) does not appear in typelib for many office objects, - # but it can still be retrieved at runtime, so always create __iter__. - # Also, some of those same objects use 1-based indexing, causing the old-style - # __getitem__ iteration to fail for index 0 where the dynamic iteration succeeds. - if specialItems["_newenum"]: - enumEntry, invoketype, propArgs = specialItems["_newenum"] - assert enumEntry.desc.desckind == pythoncom.DESCKIND_FUNCDESC - invkind = enumEntry.desc.invkind - # ??? Wouldn't this be the resultCLSID for the iterator itself, rather than the resultCLSID - # for the result of each Next() call, which is what it's used for ??? - resultCLSID = enumEntry.GetResultCLSIDStr() - else: - invkind = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET - resultCLSID = "None" - # If we dont have a good CLSID for the enum result, assume it is the same as the Item() method. - if resultCLSID == "None" and "Item" in self.mapFuncs: - resultCLSID = self.mapFuncs["Item"].GetResultCLSIDStr() - print("\tdef __iter__(self):", file=stream) - print('\t\t"Return a Python iterator for this object"', file=stream) - print("\t\ttry:", file=stream) - print( - "\t\t\tob = self._oleobj_.InvokeTypes(%d,LCID,%d,(13, 10),())" - % (pythoncom.DISPID_NEWENUM, invkind), - file=stream, - ) - print("\t\texcept pythoncom.error:", file=stream) - print( - '\t\t\traise TypeError("This object does not support enumeration")', - file=stream, - ) - # Iterator is wrapped as PyIEnumVariant, and each result of __next__ is Dispatch'ed if necessary - print( - "\t\treturn win32com.client.util.Iterator(ob, %s)" % resultCLSID, - file=stream, - ) - - if specialItems["item"]: - entry, invoketype, propArgs = specialItems["item"] - resultCLSID = entry.GetResultCLSIDStr() - print( - "\t#This class has Item property/method which allows indexed access with the object[key] syntax.", - file=stream, - ) - print( - "\t#Some objects will accept a string or other type of key in addition to integers.", - file=stream, - ) - print( - "\t#Note that many Office objects do not use zero-based indexing.", - file=stream, - ) - print("\tdef __getitem__(self, key):", file=stream) - print( - '\t\treturn self._get_good_object_(self._oleobj_.Invoke(*(%d, LCID, %d, 1, key)), "Item", %s)' - % (entry.desc.memid, invoketype, resultCLSID), - file=stream, - ) - - if specialItems["count"]: - entry, invoketype, propArgs = specialItems["count"] - if propArgs is None: - typename = "method" - ret = self.MakeFuncMethod(entry, "__len__") - else: - typename = "property" - ret = [ - "\tdef __len__(self):\n\t\treturn self._ApplyTypes_(*%s)" % propArgs - ] - print( - "\t#This class has Count() %s - allow len(ob) to provide this" - % (typename), - file=stream, - ) - for line in ret: - print(line, file=stream) - # Also include a __nonzero__ - print( - "\t#This class has a __len__ - this is needed so 'if object:' always returns TRUE.", - file=stream, - ) - print("\tdef __nonzero__(self):", file=stream) - print("\t\treturn True", file=stream) - - -class CoClassItem(build.OleItem, WritableItem): - order = 5 - typename = "COCLASS" - - def __init__(self, typeinfo, attr, doc=None, sources=[], interfaces=[], bForUser=1): - build.OleItem.__init__(self, doc) - self.clsid = attr[0] - self.sources = sources - self.interfaces = interfaces - self.bIsDispatch = 1 # Pretend it is so it is written to the class map. - - def WriteClass(self, generator): - generator.checkWriteCoClassBaseClass() - doc = self.doc - stream = generator.file - if generator.generate_type == GEN_DEMAND_CHILD: - # Some special imports we must setup. - referenced_items = [] - for ref, flag in self.sources: - referenced_items.append(ref) - for ref, flag in self.interfaces: - referenced_items.append(ref) - print("import sys", file=stream) - for ref in referenced_items: - print( - "__import__('%s.%s')" % (generator.base_mod_name, ref.python_name), - file=stream, - ) - print( - "%s = sys.modules['%s.%s'].%s" - % ( - ref.python_name, - generator.base_mod_name, - ref.python_name, - ref.python_name, - ), - file=stream, - ) - # And pretend we have written it - the name is now available as if we had! - ref.bWritten = 1 - try: - progId = pythoncom.ProgIDFromCLSID(self.clsid) - print("# This CoClass is known by the name '%s'" % (progId), file=stream) - except pythoncom.com_error: - pass - print( - "class %s(CoClassBaseClass): # A CoClass" % (self.python_name), file=stream - ) - if doc and doc[1]: - print("\t# " + doc[1], file=stream) - print("\tCLSID = %r" % (self.clsid,), file=stream) - print("\tcoclass_sources = [", file=stream) - defItem = None - for item, flag in self.sources: - if flag & pythoncom.IMPLTYPEFLAG_FDEFAULT: - defItem = item - # If we have written a Python class, reference the name - - # otherwise just the IID. - if item.bWritten: - key = item.python_name - else: - key = repr(str(item.clsid)) # really the iid. - print("\t\t%s," % (key), file=stream) - print("\t]", file=stream) - if defItem: - if defItem.bWritten: - defName = defItem.python_name - else: - defName = repr(str(defItem.clsid)) # really the iid. - print("\tdefault_source = %s" % (defName,), file=stream) - print("\tcoclass_interfaces = [", file=stream) - defItem = None - for item, flag in self.interfaces: - if flag & pythoncom.IMPLTYPEFLAG_FDEFAULT: # and dual: - defItem = item - # If we have written a class, reference its name, otherwise the IID - if item.bWritten: - key = item.python_name - else: - key = repr(str(item.clsid)) # really the iid. - print("\t\t%s," % (key,), file=stream) - print("\t]", file=stream) - if defItem: - if defItem.bWritten: - defName = defItem.python_name - else: - defName = repr(str(defItem.clsid)) # really the iid. - print("\tdefault_interface = %s" % (defName,), file=stream) - self.bWritten = 1 - print(file=stream) - - -class GeneratorProgress: - def __init__(self): - pass - - def Starting(self, tlb_desc): - """Called when the process starts.""" - self.tlb_desc = tlb_desc - - def Finished(self): - """Called when the process is complete.""" - - def SetDescription(self, desc, maxticks=None): - """We are entering a major step. If maxticks, then this - is how many ticks we expect to make until finished - """ - - def Tick(self, desc=None): - """Minor progress step. Can provide new description if necessary""" - - def VerboseProgress(self, desc): - """Verbose/Debugging output.""" - - def LogWarning(self, desc): - """If a warning is generated""" - - def LogBeginGenerate(self, filename): - pass - - def Close(self): - pass - - -class Generator: - def __init__( - self, - typelib, - sourceFilename, - progressObject, - bBuildHidden=1, - bUnicodeToString=None, - ): - assert bUnicodeToString is None, "this is deprecated and will go away" - self.bHaveWrittenDispatchBaseClass = 0 - self.bHaveWrittenCoClassBaseClass = 0 - self.bHaveWrittenEventBaseClass = 0 - self.typelib = typelib - self.sourceFilename = sourceFilename - self.bBuildHidden = bBuildHidden - self.progress = progressObject - # These 2 are later additions and most of the code still 'print's... - self.file = None - - def CollectOleItemInfosFromType(self): - ret = [] - for i in range(self.typelib.GetTypeInfoCount()): - info = self.typelib.GetTypeInfo(i) - infotype = self.typelib.GetTypeInfoType(i) - doc = self.typelib.GetDocumentation(i) - attr = info.GetTypeAttr() - ret.append((info, infotype, doc, attr)) - return ret - - def _Build_CoClass(self, type_info_tuple): - info, infotype, doc, attr = type_info_tuple - # find the source and dispinterfaces for the coclass - child_infos = [] - for j in range(attr[8]): - flags = info.GetImplTypeFlags(j) - try: - refType = info.GetRefTypeInfo(info.GetRefTypeOfImplType(j)) - except pythoncom.com_error: - # Can't load a dependent typelib? - continue - refAttr = refType.GetTypeAttr() - child_infos.append( - ( - info, - refAttr.typekind, - refType, - refType.GetDocumentation(-1), - refAttr, - flags, - ) - ) - - # Done generating children - now the CoClass itself. - newItem = CoClassItem(info, attr, doc) - return newItem, child_infos - - def _Build_CoClassChildren(self, coclass, coclass_info, oleItems, vtableItems): - sources = {} - interfaces = {} - for info, info_type, refType, doc, refAttr, flags in coclass_info: - # sys.stderr.write("Attr typeflags for coclass referenced object %s=%d (%d), typekind=%d\n" % (name, refAttr.wTypeFlags, refAttr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL,refAttr.typekind)) - if refAttr.typekind == pythoncom.TKIND_DISPATCH or ( - refAttr.typekind == pythoncom.TKIND_INTERFACE - and refAttr[11] & pythoncom.TYPEFLAG_FDISPATCHABLE - ): - clsid = refAttr[0] - if clsid in oleItems: - dispItem = oleItems[clsid] - else: - dispItem = DispatchItem(refType, refAttr, doc) - oleItems[dispItem.clsid] = dispItem - dispItem.coclass_clsid = coclass.clsid - if flags & pythoncom.IMPLTYPEFLAG_FSOURCE: - dispItem.bIsSink = 1 - sources[dispItem.clsid] = (dispItem, flags) - else: - interfaces[dispItem.clsid] = (dispItem, flags) - # If dual interface, make do that too. - if clsid not in vtableItems and refAttr[11] & pythoncom.TYPEFLAG_FDUAL: - refType = refType.GetRefTypeInfo(refType.GetRefTypeOfImplType(-1)) - refAttr = refType.GetTypeAttr() - assert ( - refAttr.typekind == pythoncom.TKIND_INTERFACE - ), "must be interface bynow!" - vtableItem = VTableItem(refType, refAttr, doc) - vtableItems[clsid] = vtableItem - coclass.sources = list(sources.values()) - coclass.interfaces = list(interfaces.values()) - - def _Build_Interface(self, type_info_tuple): - info, infotype, doc, attr = type_info_tuple - oleItem = vtableItem = None - if infotype == pythoncom.TKIND_DISPATCH or ( - infotype == pythoncom.TKIND_INTERFACE - and attr[11] & pythoncom.TYPEFLAG_FDISPATCHABLE - ): - oleItem = DispatchItem(info, attr, doc) - # If this DISPATCH interface dual, then build that too. - if attr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL: - # Get the vtable interface - refhtype = info.GetRefTypeOfImplType(-1) - info = info.GetRefTypeInfo(refhtype) - attr = info.GetTypeAttr() - infotype = pythoncom.TKIND_INTERFACE - else: - infotype = None - assert infotype in [ - None, - pythoncom.TKIND_INTERFACE, - ], "Must be a real interface at this point" - if infotype == pythoncom.TKIND_INTERFACE: - vtableItem = VTableItem(info, attr, doc) - return oleItem, vtableItem - - def BuildOleItemsFromType(self): - assert ( - self.bBuildHidden - ), "This code doesnt look at the hidden flag - I thought everyone set it true!?!?!" - oleItems = {} - enumItems = {} - recordItems = {} - vtableItems = {} - - for type_info_tuple in self.CollectOleItemInfosFromType(): - info, infotype, doc, attr = type_info_tuple - clsid = attr[0] - if infotype == pythoncom.TKIND_ENUM or infotype == pythoncom.TKIND_MODULE: - newItem = EnumerationItem(info, attr, doc) - enumItems[newItem.doc[0]] = newItem - # We never hide interfaces (MSAccess, for example, nominates interfaces as - # hidden, assuming that you only ever use them via the CoClass) - elif infotype in [pythoncom.TKIND_DISPATCH, pythoncom.TKIND_INTERFACE]: - if clsid not in oleItems: - oleItem, vtableItem = self._Build_Interface(type_info_tuple) - oleItems[clsid] = oleItem # Even "None" goes in here. - if vtableItem is not None: - vtableItems[clsid] = vtableItem - elif ( - infotype == pythoncom.TKIND_RECORD or infotype == pythoncom.TKIND_UNION - ): - newItem = RecordItem(info, attr, doc) - recordItems[newItem.clsid] = newItem - elif infotype == pythoncom.TKIND_ALIAS: - # We dont care about alias' - handled intrinsicly. - continue - elif infotype == pythoncom.TKIND_COCLASS: - newItem, child_infos = self._Build_CoClass(type_info_tuple) - self._Build_CoClassChildren(newItem, child_infos, oleItems, vtableItems) - oleItems[newItem.clsid] = newItem - else: - self.progress.LogWarning("Unknown TKIND found: %d" % infotype) - - return oleItems, enumItems, recordItems, vtableItems - - def open_writer(self, filename, encoding="mbcs"): - # A place to put code to open a file with the appropriate encoding. - # Does *not* set self.file - just opens and returns a file. - # Actually returns a handle to a temp file - finish_writer then deletes - # the filename asked for and puts everything back in place. This - # is so errors don't leave a 1/2 generated file around causing bizarre - # errors later, and so that multiple processes writing the same file - # don't step on each others' toes. - # Could be a classmethod one day... - temp_filename = self.get_temp_filename(filename) - return open(temp_filename, "wt", encoding=encoding) - - def finish_writer(self, filename, f, worked): - f.close() - try: - os.unlink(filename) - except os.error: - pass - temp_filename = self.get_temp_filename(filename) - if worked: - try: - os.rename(temp_filename, filename) - except os.error: - # If we are really unlucky, another process may have written the - # file in between our calls to os.unlink and os.rename. So try - # again, but only once. - # There are still some race conditions, but they seem difficult to - # fix, and they probably occur much less frequently: - # * The os.rename failure could occur more than once if more than - # two processes are involved. - # * In between os.unlink and os.rename, another process could try - # to import the module, having seen that it already exists. - # * If another process starts a COM server while we are still - # generating __init__.py, that process sees that the folder - # already exists and assumes that __init__.py is already there - # as well. - try: - os.unlink(filename) - except os.error: - pass - os.rename(temp_filename, filename) - else: - os.unlink(temp_filename) - - def get_temp_filename(self, filename): - return "%s.%d.temp" % (filename, os.getpid()) - - def generate(self, file, is_for_demand=0): - if is_for_demand: - self.generate_type = GEN_DEMAND_BASE - else: - self.generate_type = GEN_FULL - self.file = file - self.do_generate() - self.file = None - self.progress.Finished() - - def do_gen_file_header(self): - la = self.typelib.GetLibAttr() - moduleDoc = self.typelib.GetDocumentation(-1) - docDesc = "" - if moduleDoc[1]: - docDesc = moduleDoc[1] - - # Reset all the 'per file' state - self.bHaveWrittenDispatchBaseClass = 0 - self.bHaveWrittenCoClassBaseClass = 0 - self.bHaveWrittenEventBaseClass = 0 - # You must provide a file correctly configured for writing unicode. - # We assert this is it may indicate somewhere in pywin32 that needs - # upgrading. - assert self.file.encoding, self.file - encoding = self.file.encoding # or "mbcs" - - print("# -*- coding: %s -*-" % (encoding,), file=self.file) - print("# Created by makepy.py version %s" % (makepy_version,), file=self.file) - print( - "# By python version %s" % (sys.version.replace("\n", "-"),), file=self.file - ) - if self.sourceFilename: - print( - "# From type library '%s'" % (os.path.split(self.sourceFilename)[1],), - file=self.file, - ) - print("# On %s" % time.ctime(time.time()), file=self.file) - - print(build._makeDocString(docDesc), file=self.file) - - print("makepy_version =", repr(makepy_version), file=self.file) - print("python_version = 0x%x" % (sys.hexversion,), file=self.file) - print(file=self.file) - print( - "import win32com.client.CLSIDToClass, pythoncom, pywintypes", file=self.file - ) - print("import win32com.client.util", file=self.file) - print("from pywintypes import IID", file=self.file) - print("from win32com.client import Dispatch", file=self.file) - print(file=self.file) - print( - "# The following 3 lines may need tweaking for the particular server", - file=self.file, - ) - print( - "# Candidates are pythoncom.Missing, .Empty and .ArgNotFound", - file=self.file, - ) - print("defaultNamedOptArg=pythoncom.Empty", file=self.file) - print("defaultNamedNotOptArg=pythoncom.Empty", file=self.file) - print("defaultUnnamedArg=pythoncom.Empty", file=self.file) - print(file=self.file) - print("CLSID = " + repr(la[0]), file=self.file) - print("MajorVersion = " + str(la[3]), file=self.file) - print("MinorVersion = " + str(la[4]), file=self.file) - print("LibraryFlags = " + str(la[5]), file=self.file) - print("LCID = " + hex(la[1]), file=self.file) - print(file=self.file) - - def do_generate(self): - moduleDoc = self.typelib.GetDocumentation(-1) - stream = self.file - docDesc = "" - if moduleDoc[1]: - docDesc = moduleDoc[1] - self.progress.Starting(docDesc) - self.progress.SetDescription("Building definitions from type library...") - - self.do_gen_file_header() - - oleItems, enumItems, recordItems, vtableItems = self.BuildOleItemsFromType() - - self.progress.SetDescription( - "Generating...", len(oleItems) + len(enumItems) + len(vtableItems) - ) - - # Generate the constants and their support. - if enumItems: - print("class constants:", file=stream) - items = list(enumItems.values()) - items.sort() - num_written = 0 - for oleitem in items: - num_written += oleitem.WriteEnumerationItems(stream) - self.progress.Tick() - if not num_written: - print("\tpass", file=stream) - print(file=stream) - - if self.generate_type == GEN_FULL: - items = [l for l in oleItems.values() if l is not None] - items.sort() - for oleitem in items: - self.progress.Tick() - oleitem.WriteClass(self) - - items = list(vtableItems.values()) - items.sort() - for oleitem in items: - self.progress.Tick() - oleitem.WriteClass(self) - else: - self.progress.Tick(len(oleItems) + len(vtableItems)) - - print("RecordMap = {", file=stream) - for record in recordItems.values(): - if record.clsid == pythoncom.IID_NULL: - print( - "\t###%s: %s, # Record disabled because it doesn't have a non-null GUID" - % (repr(record.doc[0]), repr(str(record.clsid))), - file=stream, - ) - else: - print( - "\t%s: %s," % (repr(record.doc[0]), repr(str(record.clsid))), - file=stream, - ) - print("}", file=stream) - print(file=stream) - - # Write out _all_ my generated CLSID's in the map - if self.generate_type == GEN_FULL: - print("CLSIDToClassMap = {", file=stream) - for item in oleItems.values(): - if item is not None and item.bWritten: - print( - "\t'%s' : %s," % (str(item.clsid), item.python_name), - file=stream, - ) - print("}", file=stream) - print("CLSIDToPackageMap = {}", file=stream) - print( - "win32com.client.CLSIDToClass.RegisterCLSIDsFromDict( CLSIDToClassMap )", - file=stream, - ) - print("VTablesToPackageMap = {}", file=stream) - print("VTablesToClassMap = {", file=stream) - for item in vtableItems.values(): - print("\t'%s' : '%s'," % (item.clsid, item.python_name), file=stream) - print("}", file=stream) - print(file=stream) - - else: - print("CLSIDToClassMap = {}", file=stream) - print("CLSIDToPackageMap = {", file=stream) - for item in oleItems.values(): - if item is not None: - print( - "\t'%s' : %s," % (str(item.clsid), repr(item.python_name)), - file=stream, - ) - print("}", file=stream) - print("VTablesToClassMap = {}", file=stream) - print("VTablesToPackageMap = {", file=stream) - for item in vtableItems.values(): - print("\t'%s' : '%s'," % (item.clsid, item.python_name), file=stream) - print("}", file=stream) - print(file=stream) - - print(file=stream) - # Bit of a hack - build a temp map of iteItems + vtableItems - coClasses - map = {} - for item in oleItems.values(): - if item is not None and not isinstance(item, CoClassItem): - map[item.python_name] = item.clsid - for item in vtableItems.values(): # No nones or CoClasses in this map - map[item.python_name] = item.clsid - - print("NamesToIIDMap = {", file=stream) - for name, iid in map.items(): - print("\t'%s' : '%s'," % (name, iid), file=stream) - print("}", file=stream) - print(file=stream) - - if enumItems: - print( - "win32com.client.constants.__dicts__.append(constants.__dict__)", - file=stream, - ) - print(file=stream) - - def generate_child(self, child, dir): - "Generate a single child. May force a few children to be built as we generate deps" - self.generate_type = GEN_DEMAND_CHILD - - la = self.typelib.GetLibAttr() - lcid = la[1] - clsid = la[0] - major = la[3] - minor = la[4] - self.base_mod_name = ( - "win32com.gen_py." + str(clsid)[1:-1] + "x%sx%sx%s" % (lcid, major, minor) - ) - try: - # Process the type library's CoClass objects, looking for the - # specified name, or where a child has the specified name. - # This ensures that all interesting things (including event interfaces) - # are generated correctly. - oleItems = {} - vtableItems = {} - infos = self.CollectOleItemInfosFromType() - found = 0 - for type_info_tuple in infos: - info, infotype, doc, attr = type_info_tuple - if infotype == pythoncom.TKIND_COCLASS: - coClassItem, child_infos = self._Build_CoClass(type_info_tuple) - found = build.MakePublicAttributeName(doc[0]) == child - if not found: - # OK, check the child interfaces - for ( - info, - info_type, - refType, - doc, - refAttr, - flags, - ) in child_infos: - if build.MakePublicAttributeName(doc[0]) == child: - found = 1 - break - if found: - oleItems[coClassItem.clsid] = coClassItem - self._Build_CoClassChildren( - coClassItem, child_infos, oleItems, vtableItems - ) - break - if not found: - # Doesn't appear in a class defn - look in the interface objects for it - for type_info_tuple in infos: - info, infotype, doc, attr = type_info_tuple - if infotype in [ - pythoncom.TKIND_INTERFACE, - pythoncom.TKIND_DISPATCH, - ]: - if build.MakePublicAttributeName(doc[0]) == child: - found = 1 - oleItem, vtableItem = self._Build_Interface(type_info_tuple) - oleItems[clsid] = oleItem # Even "None" goes in here. - if vtableItem is not None: - vtableItems[clsid] = vtableItem - - assert ( - found - ), "Cant find the '%s' interface in the CoClasses, or the interfaces" % ( - child, - ) - # Make a map of iid: dispitem, vtableitem) - items = {} - for key, value in oleItems.items(): - items[key] = (value, None) - for key, value in vtableItems.items(): - existing = items.get(key, None) - if existing is not None: - new_val = existing[0], value - else: - new_val = None, value - items[key] = new_val - - self.progress.SetDescription("Generating...", len(items)) - for oleitem, vtableitem in items.values(): - an_item = oleitem or vtableitem - assert not self.file, "already have a file?" - # like makepy.py, we gen to a .temp file so failure doesn't - # leave a 1/2 generated mess. - out_name = os.path.join(dir, an_item.python_name) + ".py" - worked = False - self.file = self.open_writer(out_name) - try: - if oleitem is not None: - self.do_gen_child_item(oleitem) - if vtableitem is not None: - self.do_gen_child_item(vtableitem) - self.progress.Tick() - worked = True - finally: - self.finish_writer(out_name, self.file, worked) - self.file = None - finally: - self.progress.Finished() - - def do_gen_child_item(self, oleitem): - moduleDoc = self.typelib.GetDocumentation(-1) - docDesc = "" - if moduleDoc[1]: - docDesc = moduleDoc[1] - self.progress.Starting(docDesc) - self.progress.SetDescription("Building definitions from type library...") - self.do_gen_file_header() - oleitem.WriteClass(self) - if oleitem.bWritten: - print( - 'win32com.client.CLSIDToClass.RegisterCLSID( "%s", %s )' - % (oleitem.clsid, oleitem.python_name), - file=self.file, - ) - - def checkWriteDispatchBaseClass(self): - if not self.bHaveWrittenDispatchBaseClass: - print("from win32com.client import DispatchBaseClass", file=self.file) - self.bHaveWrittenDispatchBaseClass = 1 - - def checkWriteCoClassBaseClass(self): - if not self.bHaveWrittenCoClassBaseClass: - print("from win32com.client import CoClassBaseClass", file=self.file) - self.bHaveWrittenCoClassBaseClass = 1 - - def checkWriteEventBaseClass(self): - # Not a base class as such... - if not self.bHaveWrittenEventBaseClass: - # Nothing to do any more! - self.bHaveWrittenEventBaseClass = 1 - - -if __name__ == "__main__": - print("This is a worker module. Please use makepy to generate Python files.") diff --git a/lib/win32com/client/makepy.py b/lib/win32com/client/makepy.py deleted file mode 100644 index 041f18a4..00000000 --- a/lib/win32com/client/makepy.py +++ /dev/null @@ -1,459 +0,0 @@ -# Originally written by Curt Hagenlocher, and various bits -# and pieces by Mark Hammond (and now Greg Stein has had -# a go too :-) - -# Note that the main worker code has been moved to genpy.py -# As this is normally run from the command line, it reparses the code each time. -# Now this is nothing more than the command line handler and public interface. - -# XXX - TO DO -# XXX - Greg and Mark have some ideas for a revamp - just no -# time - if you want to help, contact us for details. -# Main idea is to drop the classes exported and move to a more -# traditional data driven model. - -"""Generate a .py file from an OLE TypeLibrary file. - - - This module is concerned only with the actual writing of - a .py file. It draws on the @build@ module, which builds - the knowledge of a COM interface. - -""" -usageHelp = """ \ - -Usage: - - makepy.py [-i] [-v|q] [-h] [-u] [-o output_file] [-d] [typelib, ...] - - -i -- Show information for the specified typelib. - - -v -- Verbose output. - - -q -- Quiet output. - - -h -- Do not generate hidden methods. - - -u -- Python 1.5 and earlier: Do NOT convert all Unicode objects to - strings. - - Python 1.6 and later: Convert all Unicode objects to strings. - - -o -- Create output in a specified output file. If the path leading - to the file does not exist, any missing directories will be - created. - NOTE: -o cannot be used with -d. This will generate an error. - - -d -- Generate the base code now and the class code on demand. - Recommended for large type libraries. - - typelib -- A TLB, DLL, OCX or anything containing COM type information. - If a typelib is not specified, a window containing a textbox - will open from which you can select a registered type - library. - -Examples: - - makepy.py -d - - Presents a list of registered type libraries from which you can make - a selection. - - makepy.py -d "Microsoft Excel 8.0 Object Library" - - Generate support for the type library with the specified description - (in this case, the MS Excel object model). - -""" - -import importlib -import os -import sys - -import pythoncom -from win32com.client import Dispatch, gencache, genpy, selecttlb - -bForDemandDefault = 0 # Default value of bForDemand - toggle this to change the world - see also gencache.py - -error = "makepy.error" - - -def usage(): - sys.stderr.write(usageHelp) - sys.exit(2) - - -def ShowInfo(spec): - if not spec: - tlbSpec = selecttlb.SelectTlb(excludeFlags=selecttlb.FLAG_HIDDEN) - if tlbSpec is None: - return - try: - tlb = pythoncom.LoadRegTypeLib( - tlbSpec.clsid, tlbSpec.major, tlbSpec.minor, tlbSpec.lcid - ) - except pythoncom.com_error: # May be badly registered. - sys.stderr.write( - "Warning - could not load registered typelib '%s'\n" % (tlbSpec.clsid) - ) - tlb = None - - infos = [(tlb, tlbSpec)] - else: - infos = GetTypeLibsForSpec(spec) - for tlb, tlbSpec in infos: - desc = tlbSpec.desc - if desc is None: - if tlb is None: - desc = "" % (tlbSpec.dll) - else: - desc = tlb.GetDocumentation(-1)[0] - print(desc) - print( - " %s, lcid=%s, major=%s, minor=%s" - % (tlbSpec.clsid, tlbSpec.lcid, tlbSpec.major, tlbSpec.minor) - ) - print(" >>> # Use these commands in Python code to auto generate .py support") - print(" >>> from win32com.client import gencache") - print( - " >>> gencache.EnsureModule('%s', %s, %s, %s)" - % (tlbSpec.clsid, tlbSpec.lcid, tlbSpec.major, tlbSpec.minor) - ) - - -class SimpleProgress(genpy.GeneratorProgress): - """A simple progress class prints its output to stderr""" - - def __init__(self, verboseLevel): - self.verboseLevel = verboseLevel - - def Close(self): - pass - - def Finished(self): - if self.verboseLevel > 1: - sys.stderr.write("Generation complete..\n") - - def SetDescription(self, desc, maxticks=None): - if self.verboseLevel: - sys.stderr.write(desc + "\n") - - def Tick(self, desc=None): - pass - - def VerboseProgress(self, desc, verboseLevel=2): - if self.verboseLevel >= verboseLevel: - sys.stderr.write(desc + "\n") - - def LogBeginGenerate(self, filename): - self.VerboseProgress("Generating to %s" % filename, 1) - - def LogWarning(self, desc): - self.VerboseProgress("WARNING: " + desc, 1) - - -class GUIProgress(SimpleProgress): - def __init__(self, verboseLevel): - # Import some modules we need to we can trap failure now. - import pywin # nopycln: import - import win32ui - - SimpleProgress.__init__(self, verboseLevel) - self.dialog = None - - def Close(self): - if self.dialog is not None: - self.dialog.Close() - self.dialog = None - - def Starting(self, tlb_desc): - SimpleProgress.Starting(self, tlb_desc) - if self.dialog is None: - from pywin.dialogs import status - - self.dialog = status.ThreadedStatusProgressDialog(tlb_desc) - else: - self.dialog.SetTitle(tlb_desc) - - def SetDescription(self, desc, maxticks=None): - self.dialog.SetText(desc) - if maxticks: - self.dialog.SetMaxTicks(maxticks) - - def Tick(self, desc=None): - self.dialog.Tick() - if desc is not None: - self.dialog.SetText(desc) - - -def GetTypeLibsForSpec(arg): - """Given an argument on the command line (either a file name, library - description, or ProgID of an object) return a list of actual typelibs - to use.""" - typelibs = [] - try: - try: - tlb = pythoncom.LoadTypeLib(arg) - spec = selecttlb.TypelibSpec(None, 0, 0, 0) - spec.FromTypelib(tlb, arg) - typelibs.append((tlb, spec)) - except pythoncom.com_error: - # See if it is a description - tlbs = selecttlb.FindTlbsWithDescription(arg) - if len(tlbs) == 0: - # Maybe it is the name of a COM object? - try: - ob = Dispatch(arg) - # and if so, it must support typelib info - tlb, index = ob._oleobj_.GetTypeInfo().GetContainingTypeLib() - spec = selecttlb.TypelibSpec(None, 0, 0, 0) - spec.FromTypelib(tlb) - tlbs.append(spec) - except pythoncom.com_error: - pass - if len(tlbs) == 0: - print("Could not locate a type library matching '%s'" % (arg)) - for spec in tlbs: - # Version numbers not always reliable if enumerated from registry. - # (as some libs use hex, other's dont. Both examples from MS, of course.) - if spec.dll is None: - tlb = pythoncom.LoadRegTypeLib( - spec.clsid, spec.major, spec.minor, spec.lcid - ) - else: - tlb = pythoncom.LoadTypeLib(spec.dll) - - # We have a typelib, but it may not be exactly what we specified - # (due to automatic version matching of COM). So we query what we really have! - attr = tlb.GetLibAttr() - spec.major = attr[3] - spec.minor = attr[4] - spec.lcid = attr[1] - typelibs.append((tlb, spec)) - return typelibs - except pythoncom.com_error: - t, v, tb = sys.exc_info() - sys.stderr.write("Unable to load type library from '%s' - %s\n" % (arg, v)) - tb = None # Storing tb in a local is a cycle! - sys.exit(1) - - -def GenerateFromTypeLibSpec( - typelibInfo, - file=None, - verboseLevel=None, - progressInstance=None, - bUnicodeToString=None, - bForDemand=bForDemandDefault, - bBuildHidden=1, -): - assert bUnicodeToString is None, "this is deprecated and will go away" - if verboseLevel is None: - verboseLevel = 0 # By default, we use no gui and no verbose level! - - if bForDemand and file is not None: - raise RuntimeError( - "You can only perform a demand-build when the output goes to the gen_py directory" - ) - if isinstance(typelibInfo, tuple): - # Tuple - typelibCLSID, lcid, major, minor = typelibInfo - tlb = pythoncom.LoadRegTypeLib(typelibCLSID, major, minor, lcid) - spec = selecttlb.TypelibSpec(typelibCLSID, lcid, major, minor) - spec.FromTypelib(tlb, str(typelibCLSID)) - typelibs = [(tlb, spec)] - elif isinstance(typelibInfo, selecttlb.TypelibSpec): - if typelibInfo.dll is None: - # Version numbers not always reliable if enumerated from registry. - tlb = pythoncom.LoadRegTypeLib( - typelibInfo.clsid, - typelibInfo.major, - typelibInfo.minor, - typelibInfo.lcid, - ) - else: - tlb = pythoncom.LoadTypeLib(typelibInfo.dll) - typelibs = [(tlb, typelibInfo)] - elif hasattr(typelibInfo, "GetLibAttr"): - # A real typelib object! - # Could also use isinstance(typelibInfo, PyITypeLib) instead, but PyITypeLib is not directly exposed by pythoncom. - # pythoncom.TypeIIDs[pythoncom.IID_ITypeLib] seems to work - tla = typelibInfo.GetLibAttr() - guid = tla[0] - lcid = tla[1] - major = tla[3] - minor = tla[4] - spec = selecttlb.TypelibSpec(guid, lcid, major, minor) - typelibs = [(typelibInfo, spec)] - else: - typelibs = GetTypeLibsForSpec(typelibInfo) - - if progressInstance is None: - progressInstance = SimpleProgress(verboseLevel) - progress = progressInstance - - bToGenDir = file is None - - for typelib, info in typelibs: - gen = genpy.Generator(typelib, info.dll, progress, bBuildHidden=bBuildHidden) - - if file is None: - this_name = gencache.GetGeneratedFileName( - info.clsid, info.lcid, info.major, info.minor - ) - full_name = os.path.join(gencache.GetGeneratePath(), this_name) - if bForDemand: - try: - os.unlink(full_name + ".py") - except os.error: - pass - try: - os.unlink(full_name + ".pyc") - except os.error: - pass - try: - os.unlink(full_name + ".pyo") - except os.error: - pass - if not os.path.isdir(full_name): - os.mkdir(full_name) - outputName = os.path.join(full_name, "__init__.py") - else: - outputName = full_name + ".py" - fileUse = gen.open_writer(outputName) - progress.LogBeginGenerate(outputName) - else: - fileUse = file - - worked = False - try: - gen.generate(fileUse, bForDemand) - worked = True - finally: - if file is None: - gen.finish_writer(outputName, fileUse, worked) - importlib.invalidate_caches() - if bToGenDir: - progress.SetDescription("Importing module") - gencache.AddModuleToCache(info.clsid, info.lcid, info.major, info.minor) - - progress.Close() - - -def GenerateChildFromTypeLibSpec( - child, typelibInfo, verboseLevel=None, progressInstance=None, bUnicodeToString=None -): - assert bUnicodeToString is None, "this is deprecated and will go away" - if verboseLevel is None: - verboseLevel = ( - 0 # By default, we use no gui, and no verbose level for the children. - ) - if type(typelibInfo) == type(()): - typelibCLSID, lcid, major, minor = typelibInfo - tlb = pythoncom.LoadRegTypeLib(typelibCLSID, major, minor, lcid) - else: - tlb = typelibInfo - tla = typelibInfo.GetLibAttr() - typelibCLSID = tla[0] - lcid = tla[1] - major = tla[3] - minor = tla[4] - spec = selecttlb.TypelibSpec(typelibCLSID, lcid, major, minor) - spec.FromTypelib(tlb, str(typelibCLSID)) - typelibs = [(tlb, spec)] - - if progressInstance is None: - progressInstance = SimpleProgress(verboseLevel) - progress = progressInstance - - for typelib, info in typelibs: - dir_name = gencache.GetGeneratedFileName( - info.clsid, info.lcid, info.major, info.minor - ) - dir_path_name = os.path.join(gencache.GetGeneratePath(), dir_name) - progress.LogBeginGenerate(dir_path_name) - - gen = genpy.Generator(typelib, info.dll, progress) - gen.generate_child(child, dir_path_name) - progress.SetDescription("Importing module") - importlib.invalidate_caches() - __import__("win32com.gen_py." + dir_name + "." + child) - progress.Close() - - -def main(): - import getopt - - hiddenSpec = 1 - outputName = None - verboseLevel = 1 - doit = 1 - bForDemand = bForDemandDefault - try: - opts, args = getopt.getopt(sys.argv[1:], "vo:huiqd") - for o, v in opts: - if o == "-h": - hiddenSpec = 0 - elif o == "-o": - outputName = v - elif o == "-v": - verboseLevel = verboseLevel + 1 - elif o == "-q": - verboseLevel = verboseLevel - 1 - elif o == "-i": - if len(args) == 0: - ShowInfo(None) - else: - for arg in args: - ShowInfo(arg) - doit = 0 - elif o == "-d": - bForDemand = not bForDemand - - except (getopt.error, error) as msg: - sys.stderr.write(str(msg) + "\n") - usage() - - if bForDemand and outputName is not None: - sys.stderr.write("Can not use -d and -o together\n") - usage() - - if not doit: - return 0 - if len(args) == 0: - rc = selecttlb.SelectTlb() - if rc is None: - sys.exit(1) - args = [rc] - - if outputName is not None: - path = os.path.dirname(outputName) - if path != "" and not os.path.exists(path): - os.makedirs(path) - if sys.version_info > (3, 0): - f = open(outputName, "wt", encoding="mbcs") - else: - import codecs # not available in py3k. - - f = codecs.open(outputName, "w", "mbcs") - else: - f = None - - for arg in args: - GenerateFromTypeLibSpec( - arg, - f, - verboseLevel=verboseLevel, - bForDemand=bForDemand, - bBuildHidden=hiddenSpec, - ) - - if f: - f.close() - - -if __name__ == "__main__": - rc = main() - if rc: - sys.exit(rc) - sys.exit(0) diff --git a/lib/win32com/client/selecttlb.py b/lib/win32com/client/selecttlb.py deleted file mode 100644 index bb21a429..00000000 --- a/lib/win32com/client/selecttlb.py +++ /dev/null @@ -1,183 +0,0 @@ -"""Utilities for selecting and enumerating the Type Libraries installed on the system -""" - -import pythoncom -import win32api -import win32con - - -class TypelibSpec: - def __init__(self, clsid, lcid, major, minor, flags=0): - self.clsid = str(clsid) - self.lcid = int(lcid) - # We avoid assuming 'major' or 'minor' are integers - when - # read from the registry there is some confusion about if - # they are base 10 or base 16 (they *should* be base 16, but - # how they are written is beyond our control.) - self.major = major - self.minor = minor - self.dll = None - self.desc = None - self.ver_desc = None - self.flags = flags - - # For the SelectList - def __getitem__(self, item): - if item == 0: - return self.ver_desc - raise IndexError("Cant index me!") - - def __lt__(self, other): # rich-cmp/py3k-friendly version - me = ( - (self.ver_desc or "").lower(), - (self.desc or "").lower(), - self.major, - self.minor, - ) - them = ( - (other.ver_desc or "").lower(), - (other.desc or "").lower(), - other.major, - other.minor, - ) - return me < them - - def __eq__(self, other): # rich-cmp/py3k-friendly version - return ( - (self.ver_desc or "").lower() == (other.ver_desc or "").lower() - and (self.desc or "").lower() == (other.desc or "").lower() - and self.major == other.major - and self.minor == other.minor - ) - - def Resolve(self): - if self.dll is None: - return 0 - tlb = pythoncom.LoadTypeLib(self.dll) - self.FromTypelib(tlb, None) - return 1 - - def FromTypelib(self, typelib, dllName=None): - la = typelib.GetLibAttr() - self.clsid = str(la[0]) - self.lcid = la[1] - self.major = la[3] - self.minor = la[4] - if dllName: - self.dll = dllName - - -def EnumKeys(root): - index = 0 - ret = [] - while 1: - try: - item = win32api.RegEnumKey(root, index) - except win32api.error: - break - try: - # Note this doesn't handle REG_EXPAND_SZ, but the implementation - # here doesn't need to - that is handled as the data is read. - val = win32api.RegQueryValue(root, item) - except win32api.error: - val = "" # code using this assumes a string. - - ret.append((item, val)) - index = index + 1 - return ret - - -FLAG_RESTRICTED = 1 -FLAG_CONTROL = 2 -FLAG_HIDDEN = 4 - - -def EnumTlbs(excludeFlags=0): - """Return a list of TypelibSpec objects, one for each registered library.""" - key = win32api.RegOpenKey(win32con.HKEY_CLASSES_ROOT, "Typelib") - iids = EnumKeys(key) - results = [] - for iid, crap in iids: - try: - key2 = win32api.RegOpenKey(key, str(iid)) - except win32api.error: - # A few good reasons for this, including "access denied". - continue - for version, tlbdesc in EnumKeys(key2): - major_minor = version.split(".", 1) - if len(major_minor) < 2: - major_minor.append("0") - # For some reason, this code used to assume the values were hex. - # This seems to not be true - particularly for CDO 1.21 - # *sigh* - it appears there are no rules here at all, so when we need - # to know the info, we must load the tlb by filename and request it. - # The Resolve() method on the TypelibSpec does this. - # For this reason, keep the version numbers as strings - that - # way we can't be wrong! Let code that really needs an int to work - # out what to do. FWIW, http://support.microsoft.com/kb/816970 is - # pretty clear that they *should* be hex. - major = major_minor[0] - minor = major_minor[1] - key3 = win32api.RegOpenKey(key2, str(version)) - try: - # The "FLAGS" are at this point - flags = int(win32api.RegQueryValue(key3, "FLAGS")) - except (win32api.error, ValueError): - flags = 0 - if flags & excludeFlags == 0: - for lcid, crap in EnumKeys(key3): - try: - lcid = int(lcid) - except ValueError: # not an LCID entry - continue - # Check for both "{lcid}\win32" and "{lcid}\win64" keys. - try: - key4 = win32api.RegOpenKey(key3, "%s\\win32" % (lcid,)) - except win32api.error: - try: - key4 = win32api.RegOpenKey(key3, "%s\\win64" % (lcid,)) - except win32api.error: - continue - try: - dll, typ = win32api.RegQueryValueEx(key4, None) - if typ == win32con.REG_EXPAND_SZ: - dll = win32api.ExpandEnvironmentStrings(dll) - except win32api.error: - dll = None - spec = TypelibSpec(iid, lcid, major, minor, flags) - spec.dll = dll - spec.desc = tlbdesc - spec.ver_desc = tlbdesc + " (" + version + ")" - results.append(spec) - return results - - -def FindTlbsWithDescription(desc): - """Find all installed type libraries with the specified description""" - ret = [] - items = EnumTlbs() - for item in items: - if item.desc == desc: - ret.append(item) - return ret - - -def SelectTlb(title="Select Library", excludeFlags=0): - """Display a list of all the type libraries, and select one. Returns None if cancelled""" - import pywin.dialogs.list - - items = EnumTlbs(excludeFlags) - # fixup versions - we assume hex (see __init__ above) - for i in items: - i.major = int(i.major, 16) - i.minor = int(i.minor, 16) - items.sort() - rc = pywin.dialogs.list.SelectFromLists(title, items, ["Type Library"]) - if rc is None: - return None - return items[rc] - - -# Test code. -if __name__ == "__main__": - print(SelectTlb().__dict__) diff --git a/lib/win32com/client/tlbrowse.py b/lib/win32com/client/tlbrowse.py deleted file mode 100644 index acc00281..00000000 --- a/lib/win32com/client/tlbrowse.py +++ /dev/null @@ -1,279 +0,0 @@ -import commctrl -import pythoncom -import win32api -import win32con -import win32ui -from pywin.mfc import dialog - - -class TLBrowserException(Exception): - "TypeLib browser internal error" - - -error = TLBrowserException - -FRAMEDLG_STD = win32con.WS_CAPTION | win32con.WS_SYSMENU -SS_STD = win32con.WS_CHILD | win32con.WS_VISIBLE -BS_STD = SS_STD | win32con.WS_TABSTOP -ES_STD = BS_STD | win32con.WS_BORDER -LBS_STD = ( - ES_STD | win32con.LBS_NOTIFY | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL -) -CBS_STD = ES_STD | win32con.CBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL - -typekindmap = { - pythoncom.TKIND_ENUM: "Enumeration", - pythoncom.TKIND_RECORD: "Record", - pythoncom.TKIND_MODULE: "Module", - pythoncom.TKIND_INTERFACE: "Interface", - pythoncom.TKIND_DISPATCH: "Dispatch", - pythoncom.TKIND_COCLASS: "CoClass", - pythoncom.TKIND_ALIAS: "Alias", - pythoncom.TKIND_UNION: "Union", -} - -TypeBrowseDialog_Parent = dialog.Dialog - - -class TypeBrowseDialog(TypeBrowseDialog_Parent): - "Browse a type library" - - IDC_TYPELIST = 1000 - IDC_MEMBERLIST = 1001 - IDC_PARAMLIST = 1002 - IDC_LISTVIEW = 1003 - - def __init__(self, typefile=None): - TypeBrowseDialog_Parent.__init__(self, self.GetTemplate()) - try: - if typefile: - self.tlb = pythoncom.LoadTypeLib(typefile) - else: - self.tlb = None - except pythoncom.ole_error: - self.MessageBox("The file does not contain type information") - self.tlb = None - self.HookCommand(self.CmdTypeListbox, self.IDC_TYPELIST) - self.HookCommand(self.CmdMemberListbox, self.IDC_MEMBERLIST) - - def OnAttachedObjectDeath(self): - self.tlb = None - self.typeinfo = None - self.attr = None - return TypeBrowseDialog_Parent.OnAttachedObjectDeath(self) - - def _SetupMenu(self): - menu = win32ui.CreateMenu() - flags = win32con.MF_STRING | win32con.MF_ENABLED - menu.AppendMenu(flags, win32ui.ID_FILE_OPEN, "&Open...") - menu.AppendMenu(flags, win32con.IDCANCEL, "&Close") - mainMenu = win32ui.CreateMenu() - mainMenu.AppendMenu(flags | win32con.MF_POPUP, menu.GetHandle(), "&File") - self.SetMenu(mainMenu) - self.HookCommand(self.OnFileOpen, win32ui.ID_FILE_OPEN) - - def OnFileOpen(self, id, code): - openFlags = win32con.OFN_OVERWRITEPROMPT | win32con.OFN_FILEMUSTEXIST - fspec = "Type Libraries (*.tlb, *.olb)|*.tlb;*.olb|OCX Files (*.ocx)|*.ocx|DLL's (*.dll)|*.dll|All Files (*.*)|*.*||" - dlg = win32ui.CreateFileDialog(1, None, None, openFlags, fspec) - if dlg.DoModal() == win32con.IDOK: - try: - self.tlb = pythoncom.LoadTypeLib(dlg.GetPathName()) - except pythoncom.ole_error: - self.MessageBox("The file does not contain type information") - self.tlb = None - self._SetupTLB() - - def OnInitDialog(self): - self._SetupMenu() - self.typelb = self.GetDlgItem(self.IDC_TYPELIST) - self.memberlb = self.GetDlgItem(self.IDC_MEMBERLIST) - self.paramlb = self.GetDlgItem(self.IDC_PARAMLIST) - self.listview = self.GetDlgItem(self.IDC_LISTVIEW) - - # Setup the listview columns - itemDetails = (commctrl.LVCFMT_LEFT, 100, "Item", 0) - self.listview.InsertColumn(0, itemDetails) - itemDetails = (commctrl.LVCFMT_LEFT, 1024, "Details", 0) - self.listview.InsertColumn(1, itemDetails) - - if self.tlb is None: - self.OnFileOpen(None, None) - else: - self._SetupTLB() - return TypeBrowseDialog_Parent.OnInitDialog(self) - - def _SetupTLB(self): - self.typelb.ResetContent() - self.memberlb.ResetContent() - self.paramlb.ResetContent() - self.typeinfo = None - self.attr = None - if self.tlb is None: - return - n = self.tlb.GetTypeInfoCount() - for i in range(n): - self.typelb.AddString(self.tlb.GetDocumentation(i)[0]) - - def _SetListviewTextItems(self, items): - self.listview.DeleteAllItems() - index = -1 - for item in items: - index = self.listview.InsertItem(index + 1, item[0]) - data = item[1] - if data is None: - data = "" - self.listview.SetItemText(index, 1, data) - - def SetupAllInfoTypes(self): - infos = self._GetMainInfoTypes() + self._GetMethodInfoTypes() - self._SetListviewTextItems(infos) - - def _GetMainInfoTypes(self): - pos = self.typelb.GetCurSel() - if pos < 0: - return [] - docinfo = self.tlb.GetDocumentation(pos) - infos = [("GUID", str(self.attr[0]))] - infos.append(("Help File", docinfo[3])) - infos.append(("Help Context", str(docinfo[2]))) - try: - infos.append(("Type Kind", typekindmap[self.tlb.GetTypeInfoType(pos)])) - except: - pass - - info = self.tlb.GetTypeInfo(pos) - attr = info.GetTypeAttr() - infos.append(("Attributes", str(attr))) - - for j in range(attr[8]): - flags = info.GetImplTypeFlags(j) - refInfo = info.GetRefTypeInfo(info.GetRefTypeOfImplType(j)) - doc = refInfo.GetDocumentation(-1) - attr = refInfo.GetTypeAttr() - typeKind = attr[5] - typeFlags = attr[11] - - desc = doc[0] - desc = desc + ", Flags=0x%x, typeKind=0x%x, typeFlags=0x%x" % ( - flags, - typeKind, - typeFlags, - ) - if flags & pythoncom.IMPLTYPEFLAG_FSOURCE: - desc = desc + "(Source)" - infos.append(("Implements", desc)) - - return infos - - def _GetMethodInfoTypes(self): - pos = self.memberlb.GetCurSel() - if pos < 0: - return [] - - realPos, isMethod = self._GetRealMemberPos(pos) - ret = [] - if isMethod: - funcDesc = self.typeinfo.GetFuncDesc(realPos) - id = funcDesc[0] - ret.append(("Func Desc", str(funcDesc))) - else: - id = self.typeinfo.GetVarDesc(realPos)[0] - - docinfo = self.typeinfo.GetDocumentation(id) - ret.append(("Help String", docinfo[1])) - ret.append(("Help Context", str(docinfo[2]))) - return ret - - def CmdTypeListbox(self, id, code): - if code == win32con.LBN_SELCHANGE: - pos = self.typelb.GetCurSel() - if pos >= 0: - self.memberlb.ResetContent() - self.typeinfo = self.tlb.GetTypeInfo(pos) - self.attr = self.typeinfo.GetTypeAttr() - for i in range(self.attr[7]): - id = self.typeinfo.GetVarDesc(i)[0] - self.memberlb.AddString(self.typeinfo.GetNames(id)[0]) - for i in range(self.attr[6]): - id = self.typeinfo.GetFuncDesc(i)[0] - self.memberlb.AddString(self.typeinfo.GetNames(id)[0]) - self.SetupAllInfoTypes() - return 1 - - def _GetRealMemberPos(self, pos): - pos = self.memberlb.GetCurSel() - if pos >= self.attr[7]: - return pos - self.attr[7], 1 - elif pos >= 0: - return pos, 0 - else: - raise error("The position is not valid") - - def CmdMemberListbox(self, id, code): - if code == win32con.LBN_SELCHANGE: - self.paramlb.ResetContent() - pos = self.memberlb.GetCurSel() - realPos, isMethod = self._GetRealMemberPos(pos) - if isMethod: - id = self.typeinfo.GetFuncDesc(realPos)[0] - names = self.typeinfo.GetNames(id) - for i in range(len(names)): - if i > 0: - self.paramlb.AddString(names[i]) - self.SetupAllInfoTypes() - return 1 - - def GetTemplate(self): - "Return the template used to create this dialog" - - w = 272 # Dialog width - h = 192 # Dialog height - style = ( - FRAMEDLG_STD - | win32con.WS_VISIBLE - | win32con.DS_SETFONT - | win32con.WS_MINIMIZEBOX - ) - template = [ - ["Type Library Browser", (0, 0, w, h), style, None, (8, "Helv")], - ] - template.append([130, "&Type", -1, (10, 10, 62, 9), SS_STD | win32con.SS_LEFT]) - template.append([131, None, self.IDC_TYPELIST, (10, 20, 80, 80), LBS_STD]) - template.append( - [130, "&Members", -1, (100, 10, 62, 9), SS_STD | win32con.SS_LEFT] - ) - template.append([131, None, self.IDC_MEMBERLIST, (100, 20, 80, 80), LBS_STD]) - template.append( - [130, "&Parameters", -1, (190, 10, 62, 9), SS_STD | win32con.SS_LEFT] - ) - template.append([131, None, self.IDC_PARAMLIST, (190, 20, 75, 80), LBS_STD]) - - lvStyle = ( - SS_STD - | commctrl.LVS_REPORT - | commctrl.LVS_AUTOARRANGE - | commctrl.LVS_ALIGNLEFT - | win32con.WS_BORDER - | win32con.WS_TABSTOP - ) - template.append( - ["SysListView32", "", self.IDC_LISTVIEW, (10, 110, 255, 65), lvStyle] - ) - - return template - - -if __name__ == "__main__": - import sys - - fname = None - try: - fname = sys.argv[1] - except: - pass - dlg = TypeBrowseDialog(fname) - if win32api.GetConsoleTitle(): # empty string w/o console - dlg.DoModal() - else: - dlg.CreateWindow(win32ui.GetMainFrame()) diff --git a/lib/win32com/client/util.py b/lib/win32com/client/util.py deleted file mode 100644 index c5762951..00000000 --- a/lib/win32com/client/util.py +++ /dev/null @@ -1,102 +0,0 @@ -"""General client side utilities. - -This module contains utility functions, used primarily by advanced COM -programmers, or other COM modules. -""" -import pythoncom -from win32com.client import Dispatch, _get_good_object_ - -PyIDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch] - - -def WrapEnum(ob, resultCLSID=None): - """Wrap an object in a VARIANT enumerator. - - All VT_DISPATCHs returned by the enumerator are converted to wrapper objects - (which may be either a class instance, or a dynamic.Dispatch type object). - - """ - if type(ob) != pythoncom.TypeIIDs[pythoncom.IID_IEnumVARIANT]: - ob = ob.QueryInterface(pythoncom.IID_IEnumVARIANT) - return EnumVARIANT(ob, resultCLSID) - - -class Enumerator: - """A class that provides indexed access into an Enumerator - - By wrapping a PyIEnum* object in this class, you can perform - natural looping and indexing into the Enumerator. - - Looping is very efficient, but it should be noted that although random - access is supported, the underlying object is still an enumerator, so - this will force many reset-and-seek operations to find the requested index. - - """ - - def __init__(self, enum): - self._oleobj_ = enum # a PyIEnumVARIANT - self.index = -1 - - def __getitem__(self, index): - return self.__GetIndex(index) - - def __call__(self, index): - return self.__GetIndex(index) - - def __GetIndex(self, index): - if type(index) != type(0): - raise TypeError("Only integer indexes are supported for enumerators") - # NOTE - # In this context, self.index is users purely as a flag to say - # "am I still in sequence". The user may call Next() or Reset() if they - # so choose, in which case self.index will not be correct (although we - # still want to stay in sequence) - if index != self.index + 1: - # Index requested out of sequence. - self._oleobj_.Reset() - if index: - self._oleobj_.Skip( - index - ) # if asked for item 1, must skip 1, Python always zero based. - self.index = index - result = self._oleobj_.Next(1) - if len(result): - return self._make_retval_(result[0]) - raise IndexError("list index out of range") - - def Next(self, count=1): - ret = self._oleobj_.Next(count) - realRets = [] - for r in ret: - realRets.append(self._make_retval_(r)) - return tuple(realRets) # Convert back to tuple. - - def Reset(self): - return self._oleobj_.Reset() - - def Clone(self): - return self.__class__(self._oleobj_.Clone(), self.resultCLSID) - - def _make_retval_(self, result): - return result - - -class EnumVARIANT(Enumerator): - def __init__(self, enum, resultCLSID=None): - self.resultCLSID = resultCLSID - Enumerator.__init__(self, enum) - - def _make_retval_(self, result): - return _get_good_object_(result, resultCLSID=self.resultCLSID) - - -class Iterator: - def __init__(self, enum, resultCLSID=None): - self.resultCLSID = resultCLSID - self._iter_ = iter(enum.QueryInterface(pythoncom.IID_IEnumVARIANT)) - - def __iter__(self): - return self - - def __next__(self): - return _get_good_object_(next(self._iter_), resultCLSID=self.resultCLSID) diff --git a/lib/win32com/demos/__init__.py b/lib/win32com/demos/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/win32com/demos/connect.py b/lib/win32com/demos/connect.py deleted file mode 100644 index ed7f51c9..00000000 --- a/lib/win32com/demos/connect.py +++ /dev/null @@ -1,100 +0,0 @@ -# Implements _both_ a connectable client, and a connectable server. -# -# Note that we cheat just a little - the Server in this demo is not created -# via Normal COM - this means we can avoid registering the server. -# However, the server _is_ accessed as a COM object - just the creation -# is cheated on - so this is still working as a fully-fledged server. - -import pythoncom -import win32com.server.connect -import win32com.server.util -from pywin32_testutil import str2bytes -from win32com.server.exception import Exception - -# This is the IID of the Events interface both Client and Server support. -IID_IConnectDemoEvents = pythoncom.MakeIID("{A4988850-49C3-11d0-AE5D-52342E000000}") - -# The server which implements -# Create a connectable class, that has a single public method -# 'DoIt', which echos to a single sink 'DoneIt' - - -class ConnectableServer(win32com.server.connect.ConnectableServer): - _public_methods_ = [ - "DoIt" - ] + win32com.server.connect.ConnectableServer._public_methods_ - _connect_interfaces_ = [IID_IConnectDemoEvents] - - # The single public method that the client can call on us - # (ie, as a normal COM server, this exposes just this single method. - def DoIt(self, arg): - # Simply broadcast a notification. - self._BroadcastNotify(self.NotifyDoneIt, (arg,)) - - def NotifyDoneIt(self, interface, arg): - interface.Invoke(1000, 0, pythoncom.DISPATCH_METHOD, 1, arg) - - -# Here is the client side of the connection world. -# Define a COM object which implements the methods defined by the -# IConnectDemoEvents interface. -class ConnectableClient: - # This is another cheat - I _know_ the server defines the "DoneIt" event - # as DISPID==1000 - I also know from the implementation details of COM - # that the first method in _public_methods_ gets 1000. - # Normally some explicit DISPID->Method mapping is required. - _public_methods_ = ["OnDoneIt"] - - def __init__(self): - self.last_event_arg = None - - # A client must implement QI, and respond to a query for the Event interface. - # In addition, it must provide a COM object (which server.util.wrap) does. - def _query_interface_(self, iid): - import win32com.server.util - - # Note that this seems like a necessary hack. I am responding to IID_IConnectDemoEvents - # but only creating an IDispatch gateway object. - if iid == IID_IConnectDemoEvents: - return win32com.server.util.wrap(self) - - # And here is our event method which gets called. - def OnDoneIt(self, arg): - self.last_event_arg = arg - - -def CheckEvent(server, client, val, verbose): - client.last_event_arg = None - server.DoIt(val) - if client.last_event_arg != val: - raise RuntimeError("Sent %r, but got back %r" % (val, client.last_event_arg)) - if verbose: - print("Sent and received %r" % val) - - -# A simple test script for all this. -# In the real world, it is likely that the code controlling the server -# will be in the same class as that getting the notifications. -def test(verbose=0): - import win32com.client.connect - import win32com.client.dynamic - import win32com.server.policy - - server = win32com.client.dynamic.Dispatch( - win32com.server.util.wrap(ConnectableServer()) - ) - connection = win32com.client.connect.SimpleConnection() - client = ConnectableClient() - connection.Connect(server, client, IID_IConnectDemoEvents) - CheckEvent(server, client, "Hello", verbose) - CheckEvent(server, client, str2bytes("Here is a null>\x00<"), verbose) - CheckEvent(server, client, "Here is a null>\x00<", verbose) - val = "test-\xe0\xf2" # 2 extended characters. - CheckEvent(server, client, val, verbose) - if verbose: - print("Everything seemed to work!") - # Aggressive memory leak checking (ie, do nothing!) :-) All should cleanup OK??? - - -if __name__ == "__main__": - test(1) diff --git a/lib/win32com/demos/dump_clipboard.py b/lib/win32com/demos/dump_clipboard.py deleted file mode 100644 index 534d540a..00000000 --- a/lib/win32com/demos/dump_clipboard.py +++ /dev/null @@ -1,74 +0,0 @@ -import pythoncom -import win32con - -formats = """CF_TEXT CF_BITMAP CF_METAFILEPICT CF_SYLK CF_DIF CF_TIFF - CF_OEMTEXT CF_DIB CF_PALETTE CF_PENDATA CF_RIFF CF_WAVE - CF_UNICODETEXT CF_ENHMETAFILE CF_HDROP CF_LOCALE CF_MAX - CF_OWNERDISPLAY CF_DSPTEXT CF_DSPBITMAP CF_DSPMETAFILEPICT - CF_DSPENHMETAFILE""".split() -format_name_map = {} -for f in formats: - val = getattr(win32con, f) - format_name_map[val] = f - -tymeds = [attr for attr in pythoncom.__dict__.keys() if attr.startswith("TYMED_")] - - -def DumpClipboard(): - do = pythoncom.OleGetClipboard() - print("Dumping all clipboard formats...") - for fe in do.EnumFormatEtc(): - fmt, td, aspect, index, tymed = fe - tymeds_this = [ - getattr(pythoncom, t) for t in tymeds if tymed & getattr(pythoncom, t) - ] - print("Clipboard format", format_name_map.get(fmt, str(fmt))) - for t_this in tymeds_this: - # As we are enumerating there should be no need to call - # QueryGetData, but we do anyway! - fetc_query = fmt, td, aspect, index, t_this - try: - do.QueryGetData(fetc_query) - except pythoncom.com_error: - print("Eeek - QGD indicated failure for tymed", t_this) - # now actually get it. - try: - medium = do.GetData(fetc_query) - except pythoncom.com_error as exc: - print("Failed to get the clipboard data:", exc) - continue - if medium.tymed == pythoncom.TYMED_GDI: - data = "GDI handle %d" % medium.data - elif medium.tymed == pythoncom.TYMED_MFPICT: - data = "METAFILE handle %d" % medium.data - elif medium.tymed == pythoncom.TYMED_ENHMF: - data = "ENHMETAFILE handle %d" % medium.data - elif medium.tymed == pythoncom.TYMED_HGLOBAL: - data = "%d bytes via HGLOBAL" % len(medium.data) - elif medium.tymed == pythoncom.TYMED_FILE: - data = "filename '%s'" % data - elif medium.tymed == pythoncom.TYMED_ISTREAM: - stream = medium.data - stream.Seek(0, 0) - bytes = 0 - while 1: - chunk = stream.Read(4096) - if not chunk: - break - bytes += len(chunk) - data = "%d bytes via IStream" % bytes - elif medium.tymed == pythoncom.TYMED_ISTORAGE: - data = "a IStorage" - else: - data = "*** unknown tymed!" - print(" -> got", data) - do = None - - -if __name__ == "__main__": - DumpClipboard() - if pythoncom._GetInterfaceCount() + pythoncom._GetGatewayCount(): - print( - "XXX - Leaving with %d/%d COM objects alive" - % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount()) - ) diff --git a/lib/win32com/demos/eventsApartmentThreaded.py b/lib/win32com/demos/eventsApartmentThreaded.py deleted file mode 100644 index ae8e70fa..00000000 --- a/lib/win32com/demos/eventsApartmentThreaded.py +++ /dev/null @@ -1,98 +0,0 @@ -# A sample originally provided by Richard Bell, and modified by Mark Hammond. - -# This sample demonstrates how to use COM events in an aparment-threaded -# world. In this world, COM itself ensures that all calls to and events -# from an object happen on the same thread that created the object, even -# if they originated from different threads. For this cross-thread -# marshalling to work, this main thread *must* run a "message-loop" (ie, -# a loop fetching and dispatching Windows messages). Without such message -# processing, dead-locks can occur. - -# See also eventsFreeThreaded.py for how to do this in a free-threaded -# world where these marshalling considerations do not exist. - -# NOTE: This example uses Internet Explorer, but it should not be considerd -# a "best-practices" for writing against IE events, but for working with -# events in general. For example: -# * The first OnDocumentComplete event is not a reliable indicator that the -# URL has completed loading -# * As we are demonstrating the most efficient way of handling events, when -# running this sample you will see an IE Windows briefly appear, but -# vanish without ever being repainted. - -import time - -# sys.coinit_flags not set, so pythoncom initializes apartment-threaded. -import pythoncom -import win32api -import win32com.client -import win32event - - -class ExplorerEvents: - def __init__(self): - self.event = win32event.CreateEvent(None, 0, 0, None) - - def OnDocumentComplete(self, pDisp=pythoncom.Empty, URL=pythoncom.Empty): - thread = win32api.GetCurrentThreadId() - print("OnDocumentComplete event processed on thread %d" % thread) - # Set the event our main thread is waiting on. - win32event.SetEvent(self.event) - - def OnQuit(self): - thread = win32api.GetCurrentThreadId() - print("OnQuit event processed on thread %d" % thread) - win32event.SetEvent(self.event) - - -def WaitWhileProcessingMessages(event, timeout=2): - start = time.perf_counter() - while True: - # Wake 4 times a second - we can't just specify the - # full timeout here, as then it would reset for every - # message we process. - rc = win32event.MsgWaitForMultipleObjects( - (event,), 0, 250, win32event.QS_ALLEVENTS - ) - if rc == win32event.WAIT_OBJECT_0: - # event signalled - stop now! - return True - if (time.perf_counter() - start) > timeout: - # Timeout expired. - return False - # must be a message. - pythoncom.PumpWaitingMessages() - - -def TestExplorerEvents(): - iexplore = win32com.client.DispatchWithEvents( - "InternetExplorer.Application", ExplorerEvents - ) - - thread = win32api.GetCurrentThreadId() - print("TestExplorerEvents created IE object on thread %d" % thread) - - iexplore.Visible = 1 - try: - iexplore.Navigate(win32api.GetFullPathName("..\\readme.html")) - except pythoncom.com_error as details: - print("Warning - could not open the test HTML file", details) - - # Wait for the event to be signalled while pumping messages. - if not WaitWhileProcessingMessages(iexplore.event): - print("Document load event FAILED to fire!!!") - - iexplore.Quit() - # - # Give IE a chance to shutdown, else it can get upset on fast machines. - # Note, Quit generates events. Although this test does NOT catch them - # it is NECESSARY to pump messages here instead of a sleep so that the Quit - # happens properly! - if not WaitWhileProcessingMessages(iexplore.event): - print("OnQuit event FAILED to fire!!!") - - iexplore = None - - -if __name__ == "__main__": - TestExplorerEvents() diff --git a/lib/win32com/demos/eventsFreeThreaded.py b/lib/win32com/demos/eventsFreeThreaded.py deleted file mode 100644 index 2fc04d82..00000000 --- a/lib/win32com/demos/eventsFreeThreaded.py +++ /dev/null @@ -1,92 +0,0 @@ -# A sample originally provided by Richard Bell, and modified by Mark Hammond. - -# This sample demonstrates how to use COM events in a free-threaded world. -# In this world, there is no need to marshall calls across threads, so -# no message loops are needed at all. This means regular cross-thread -# sychronization can be used. In this sample we just wait on win32 event -# objects. - -# See also ieEventsApartmentThreaded.py for how to do this in an -# aparment-threaded world, where thread-marshalling complicates things. - -# NOTE: This example uses Internet Explorer, but it should not be considerd -# a "best-practices" for writing against IE events, but for working with -# events in general. For example: -# * The first OnDocumentComplete event is not a reliable indicator that the -# URL has completed loading -# * As we are demonstrating the most efficient way of handling events, when -# running this sample you will see an IE Windows briefly appear, but -# vanish without ever being repainted. - -import sys - -sys.coinit_flags = 0 # specify free threading - - -import pythoncom -import win32api -import win32com.client -import win32event - - -# The print statements indicate that COM has actually started another thread -# and will deliver the events to that thread (ie, the events do not actually -# fire on our main thread. -class ExplorerEvents: - def __init__(self): - # We reuse this event for all events. - self.event = win32event.CreateEvent(None, 0, 0, None) - - def OnDocumentComplete(self, pDisp=pythoncom.Empty, URL=pythoncom.Empty): - # - # Caution: Since the main thread and events thread(s) are different - # it may be necessary to serialize access to shared data. Because - # this is a simple test case, that is not required here. Your - # situation may be different. Caveat programmer. - # - thread = win32api.GetCurrentThreadId() - print("OnDocumentComplete event processed on thread %d" % thread) - # Set the event our main thread is waiting on. - win32event.SetEvent(self.event) - - def OnQuit(self): - thread = win32api.GetCurrentThreadId() - print("OnQuit event processed on thread %d" % thread) - win32event.SetEvent(self.event) - - -def TestExplorerEvents(): - iexplore = win32com.client.DispatchWithEvents( - "InternetExplorer.Application", ExplorerEvents - ) - - thread = win32api.GetCurrentThreadId() - print("TestExplorerEvents created IE object on thread %d" % thread) - - iexplore.Visible = 1 - try: - iexplore.Navigate(win32api.GetFullPathName("..\\readme.html")) - except pythoncom.com_error as details: - print("Warning - could not open the test HTML file", details) - - # In this free-threaded example, we can simply wait until an event has - # been set - we will give it 2 seconds before giving up. - rc = win32event.WaitForSingleObject(iexplore.event, 2000) - if rc != win32event.WAIT_OBJECT_0: - print("Document load event FAILED to fire!!!") - - iexplore.Quit() - # Now we can do the same thing to wait for exit! - # Although Quit generates events, in this free-threaded world we - # do *not* need to run any message pumps. - - rc = win32event.WaitForSingleObject(iexplore.event, 2000) - if rc != win32event.WAIT_OBJECT_0: - print("OnQuit event FAILED to fire!!!") - - iexplore = None - print("Finished the IE event sample!") - - -if __name__ == "__main__": - TestExplorerEvents() diff --git a/lib/win32com/demos/excelAddin.py b/lib/win32com/demos/excelAddin.py deleted file mode 100644 index 96f6cff8..00000000 --- a/lib/win32com/demos/excelAddin.py +++ /dev/null @@ -1,170 +0,0 @@ -# A demo plugin for Microsoft Excel -# -# This addin simply adds a new button to the main Excel toolbar, -# and displays a message box when clicked. Thus, it demonstrates -# how to plug in to Excel itself, and hook Excel events. -# -# -# To register the addin, simply execute: -# excelAddin.py -# This will install the COM server, and write the necessary -# AddIn key to Excel -# -# To unregister completely: -# excelAddin.py --unregister -# -# To debug, execute: -# excelAddin.py --debug -# -# Then open Pythonwin, and select "Tools->Trace Collector Debugging Tool" -# Restart excel, and you should see some output generated. -# -# NOTE: If the AddIn fails with an error, Excel will re-register -# the addin to not automatically load next time Excel starts. To -# correct this, simply re-register the addin (see above) -# -# Author Eric Koome -# Copyright (c) 2003 Wavecom Inc. All rights reserved -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES -# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL ERIC KOOME OR -# ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. - -import sys - -import pythoncom -from win32com import universal -from win32com.client import Dispatch, DispatchWithEvents, constants, gencache -from win32com.server.exception import COMException - -# Support for COM objects we use. -gencache.EnsureModule( - "{00020813-0000-0000-C000-000000000046}", 0, 1, 3, bForDemand=True -) # Excel 9 -gencache.EnsureModule( - "{2DF8D04C-5BFA-101B-BDE5-00AA0044DE52}", 0, 2, 1, bForDemand=True -) # Office 9 - -# The TLB defining the interfaces we implement -universal.RegisterInterfaces( - "{AC0714F2-3D04-11D1-AE7D-00A0C90F26F4}", 0, 1, 0, ["_IDTExtensibility2"] -) - - -class ButtonEvent: - def OnClick(self, button, cancel): - import win32con # Possible, but not necessary, to use a Pythonwin GUI - import win32ui - - win32ui.MessageBox("Hello from Python", "Python Test", win32con.MB_OKCANCEL) - return cancel - - -class ExcelAddin: - _com_interfaces_ = ["_IDTExtensibility2"] - _public_methods_ = [] - _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER - _reg_clsid_ = "{C5482ECA-F559-45A0-B078-B2036E6F011A}" - _reg_progid_ = "Python.Test.ExcelAddin" - _reg_policy_spec_ = "win32com.server.policy.EventHandlerPolicy" - - def __init__(self): - self.appHostApp = None - - def OnConnection(self, application, connectMode, addin, custom): - print("OnConnection", application, connectMode, addin, custom) - try: - self.appHostApp = application - cbcMyBar = self.appHostApp.CommandBars.Add( - Name="PythonBar", - Position=constants.msoBarTop, - MenuBar=constants.msoBarTypeNormal, - Temporary=True, - ) - btnMyButton = cbcMyBar.Controls.Add( - Type=constants.msoControlButton, Parameter="Greetings" - ) - btnMyButton = self.toolbarButton = DispatchWithEvents( - btnMyButton, ButtonEvent - ) - btnMyButton.Style = constants.msoButtonCaption - btnMyButton.BeginGroup = True - btnMyButton.Caption = "&Python" - btnMyButton.TooltipText = "Python rules the World" - btnMyButton.Width = "34" - cbcMyBar.Visible = True - except pythoncom.com_error as xxx_todo_changeme: - (hr, msg, exc, arg) = xxx_todo_changeme.args - print("The Excel call failed with code %d: %s" % (hr, msg)) - if exc is None: - print("There is no extended error information") - else: - wcode, source, text, helpFile, helpId, scode = exc - print("The source of the error is", source) - print("The error message is", text) - print("More info can be found in %s (id=%d)" % (helpFile, helpId)) - - def OnDisconnection(self, mode, custom): - print("OnDisconnection") - self.appHostApp.CommandBars("PythonBar").Delete - self.appHostApp = None - - def OnAddInsUpdate(self, custom): - print("OnAddInsUpdate", custom) - - def OnStartupComplete(self, custom): - print("OnStartupComplete", custom) - - def OnBeginShutdown(self, custom): - print("OnBeginShutdown", custom) - - -def RegisterAddin(klass): - import winreg - - key = winreg.CreateKey( - winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Excel\\Addins" - ) - subkey = winreg.CreateKey(key, klass._reg_progid_) - winreg.SetValueEx(subkey, "CommandLineSafe", 0, winreg.REG_DWORD, 0) - winreg.SetValueEx(subkey, "LoadBehavior", 0, winreg.REG_DWORD, 3) - winreg.SetValueEx(subkey, "Description", 0, winreg.REG_SZ, "Excel Addin") - winreg.SetValueEx(subkey, "FriendlyName", 0, winreg.REG_SZ, "A Simple Excel Addin") - - -def UnregisterAddin(klass): - import winreg - - try: - winreg.DeleteKey( - winreg.HKEY_CURRENT_USER, - "Software\\Microsoft\\Office\\Excel\\Addins\\" + klass._reg_progid_, - ) - except WindowsError: - pass - - -if __name__ == "__main__": - import win32com.server.register - - win32com.server.register.UseCommandLine(ExcelAddin) - if "--unregister" in sys.argv: - UnregisterAddin(ExcelAddin) - else: - RegisterAddin(ExcelAddin) diff --git a/lib/win32com/demos/excelRTDServer.py b/lib/win32com/demos/excelRTDServer.py deleted file mode 100644 index c50e1ac7..00000000 --- a/lib/win32com/demos/excelRTDServer.py +++ /dev/null @@ -1,434 +0,0 @@ -"""Excel IRTDServer implementation. - -This module is a functional example of how to implement the IRTDServer interface -in python, using the pywin32 extensions. Further details, about this interface -and it can be found at: - http://msdn.microsoft.com/library/default.asp?url=/library/en-us/dnexcl2k2/html/odc_xlrtdfaq.asp -""" - -# Copyright (c) 2003-2004 by Chris Nilsson -# -# By obtaining, using, and/or copying this software and/or its -# associated documentation, you agree that you have read, understood, -# and will comply with the following terms and conditions: -# -# Permission to use, copy, modify, and distribute this software and -# its associated documentation for any purpose and without fee is -# hereby granted, provided that the above copyright notice appears in -# all copies, and that both that copyright notice and this permission -# notice appear in supporting documentation, and that the name of -# Christopher Nilsson (the author) not be used in advertising or publicity -# pertaining to distribution of the software without specific, written -# prior permission. -# -# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD -# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- -# ABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR -# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY -# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS -# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE -# OF THIS SOFTWARE. - -import datetime # For the example classes... -import threading - -import pythoncom -import win32com.client -from win32com import universal -from win32com.client import gencache -from win32com.server.exception import COMException - -# Typelib info for version 10 - aka Excel XP. -# This is the minimum version of excel that we can work with as this is when -# Microsoft introduced these interfaces. -EXCEL_TLB_GUID = "{00020813-0000-0000-C000-000000000046}" -EXCEL_TLB_LCID = 0 -EXCEL_TLB_MAJOR = 1 -EXCEL_TLB_MINOR = 4 - -# Import the excel typelib to make sure we've got early-binding going on. -# The "ByRef" parameters we use later won't work without this. -gencache.EnsureModule(EXCEL_TLB_GUID, EXCEL_TLB_LCID, EXCEL_TLB_MAJOR, EXCEL_TLB_MINOR) - -# Tell pywin to import these extra interfaces. -# -- -# QUESTION: Why? The interfaces seem to descend from IDispatch, so -# I'd have thought, for example, calling callback.UpdateNotify() (on the -# IRTDUpdateEvent callback excel gives us) would work without molestation. -# But the callback needs to be cast to a "real" IRTDUpdateEvent type. Hmm... -# This is where my small knowledge of the pywin framework / COM gets hazy. -# -- -# Again, we feed in the Excel typelib as the source of these interfaces. -universal.RegisterInterfaces( - EXCEL_TLB_GUID, - EXCEL_TLB_LCID, - EXCEL_TLB_MAJOR, - EXCEL_TLB_MINOR, - ["IRtdServer", "IRTDUpdateEvent"], -) - - -class ExcelRTDServer(object): - """Base RTDServer class. - - Provides most of the features needed to implement the IRtdServer interface. - Manages topic adding, removal, and packing up the values for excel. - - Shouldn't be instanciated directly. - - Instead, descendant classes should override the CreateTopic() method. - Topic objects only need to provide a GetValue() function to play nice here. - The values given need to be atomic (eg. string, int, float... etc). - - Also note: nothing has been done within this class to ensure that we get - time to check our topics for updates. I've left that up to the subclass - since the ways, and needs, of refreshing your topics will vary greatly. For - example, the sample implementation uses a timer thread to wake itself up. - Whichever way you choose to do it, your class needs to be able to wake up - occaisionally, since excel will never call your class without being asked to - first. - - Excel will communicate with our object in this order: - 1. Excel instanciates our object and calls ServerStart, providing us with - an IRTDUpdateEvent callback object. - 2. Excel calls ConnectData when it wants to subscribe to a new "topic". - 3. When we have new data to provide, we call the UpdateNotify method of the - callback object we were given. - 4. Excel calls our RefreshData method, and receives a 2d SafeArray (row-major) - containing the Topic ids in the 1st dim, and the topic values in the - 2nd dim. - 5. When not needed anymore, Excel will call our DisconnectData to - unsubscribe from a topic. - 6. When there are no more topics left, Excel will call our ServerTerminate - method to kill us. - - Throughout, at undetermined periods, Excel will call our Heartbeat - method to see if we're still alive. It must return a non-zero value, or - we'll be killed. - - NOTE: By default, excel will at most call RefreshData once every 2 seconds. - This is a setting that needs to be changed excel-side. To change this, - you can set the throttle interval like this in the excel VBA object model: - Application.RTD.ThrottleInterval = 1000 ' milliseconds - """ - - _com_interfaces_ = ["IRtdServer"] - _public_methods_ = [ - "ConnectData", - "DisconnectData", - "Heartbeat", - "RefreshData", - "ServerStart", - "ServerTerminate", - ] - _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER - # _reg_clsid_ = "# subclass must provide this class attribute" - # _reg_desc_ = "# subclass should provide this description" - # _reg_progid_ = "# subclass must provide this class attribute" - - ALIVE = 1 - NOT_ALIVE = 0 - - def __init__(self): - """Constructor""" - super(ExcelRTDServer, self).__init__() - self.IsAlive = self.ALIVE - self.__callback = None - self.topics = {} - - def SignalExcel(self): - """Use the callback we were given to tell excel new data is available.""" - if self.__callback is None: - raise COMException(desc="Callback excel provided is Null") - self.__callback.UpdateNotify() - - def ConnectData(self, TopicID, Strings, GetNewValues): - """Creates a new topic out of the Strings excel gives us.""" - try: - self.topics[TopicID] = self.CreateTopic(Strings) - except Exception as why: - raise COMException(desc=str(why)) - GetNewValues = True - result = self.topics[TopicID] - if result is None: - result = "# %s: Waiting for update" % self.__class__.__name__ - else: - result = result.GetValue() - - # fire out internal event... - self.OnConnectData(TopicID) - - # GetNewValues as per interface is ByRef, so we need to pass it back too. - return result, GetNewValues - - def DisconnectData(self, TopicID): - """Deletes the given topic.""" - self.OnDisconnectData(TopicID) - - if TopicID in self.topics: - self.topics[TopicID] = None - del self.topics[TopicID] - - def Heartbeat(self): - """Called by excel to see if we're still here.""" - return self.IsAlive - - def RefreshData(self, TopicCount): - """Packs up the topic values. Called by excel when it's ready for an update. - - Needs to: - * Return the current number of topics, via the "ByRef" TopicCount - * Return a 2d SafeArray of the topic data. - - 1st dim: topic numbers - - 2nd dim: topic values - - We could do some caching, instead of repacking everytime... - But this works for demonstration purposes.""" - TopicCount = len(self.topics) - self.OnRefreshData() - - # Grow the lists, so we don't need a heap of calls to append() - results = [[None] * TopicCount, [None] * TopicCount] - - # Excel expects a 2-dimensional array. The first dim contains the - # topic numbers, and the second contains the values for the topics. - # In true VBA style (yuck), we need to pack the array in row-major format, - # which looks like: - # ( (topic_num1, topic_num2, ..., topic_numN), \ - # (topic_val1, topic_val2, ..., topic_valN) ) - for idx, topicdata in enumerate(self.topics.items()): - topicNum, topic = topicdata - results[0][idx] = topicNum - results[1][idx] = topic.GetValue() - - # TopicCount is meant to be passed to us ByRef, so return it as well, as per - # the way pywin32 handles ByRef arguments. - return tuple(results), TopicCount - - def ServerStart(self, CallbackObject): - """Excel has just created us... We take its callback for later, and set up shop.""" - self.IsAlive = self.ALIVE - - if CallbackObject is None: - raise COMException(desc="Excel did not provide a callback") - - # Need to "cast" the raw PyIDispatch object to the IRTDUpdateEvent interface - IRTDUpdateEventKlass = win32com.client.CLSIDToClass.GetClass( - "{A43788C1-D91B-11D3-8F39-00C04F3651B8}" - ) - self.__callback = IRTDUpdateEventKlass(CallbackObject) - - self.OnServerStart() - - return self.IsAlive - - def ServerTerminate(self): - """Called when excel no longer wants us.""" - self.IsAlive = self.NOT_ALIVE # On next heartbeat, excel will free us - self.OnServerTerminate() - - def CreateTopic(self, TopicStrings=None): - """Topic factory method. Subclass must override. - - Topic objects need to provide: - * GetValue() method which returns an atomic value. - - Will raise NotImplemented if not overridden. - """ - raise NotImplemented("Subclass must implement") - - # Overridable class events... - def OnConnectData(self, TopicID): - """Called when a new topic has been created, at excel's request.""" - pass - - def OnDisconnectData(self, TopicID): - """Called when a topic is about to be deleted, at excel's request.""" - pass - - def OnRefreshData(self): - """Called when excel has requested all current topic data.""" - pass - - def OnServerStart(self): - """Called when excel has instanciated us.""" - pass - - def OnServerTerminate(self): - """Called when excel is about to destroy us.""" - pass - - -class RTDTopic(object): - """Base RTD Topic. - Only method required by our RTDServer implementation is GetValue(). - The others are more for convenience.""" - - def __init__(self, TopicStrings): - super(RTDTopic, self).__init__() - self.TopicStrings = TopicStrings - self.__currentValue = None - self.__dirty = False - - def Update(self, sender): - """Called by the RTD Server. - Gives us a chance to check if our topic data needs to be - changed (eg. check a file, quiz a database, etc).""" - raise NotImplemented("subclass must implement") - - def Reset(self): - """Call when this topic isn't considered "dirty" anymore.""" - self.__dirty = False - - def GetValue(self): - return self.__currentValue - - def SetValue(self, value): - self.__dirty = True - self.__currentValue = value - - def HasChanged(self): - return self.__dirty - - -# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= - -###################################### -# Example classes -###################################### - - -class TimeServer(ExcelRTDServer): - """Example Time RTD server. - - Sends time updates back to excel. - - example of use, in an excel sheet: - =RTD("Python.RTD.TimeServer","","seconds","5") - - This will cause a timestamp string to fill the cell, and update its value - every 5 seconds (or as close as possible depending on how busy excel is). - - The empty string parameter denotes the com server is running on the local - machine. Otherwise, put in the hostname to look on. For more info - on this, lookup the Excel help for its "RTD" worksheet function. - - Obviously, you'd want to wrap this kind of thing in a friendlier VBA - function. - - Also, remember that the RTD function accepts a maximum of 28 arguments! - If you want to pass more, you may need to concatenate arguments into one - string, and have your topic parse them appropriately. - """ - - # win32com.server setup attributes... - # Never copy the _reg_clsid_ value in your own classes! - _reg_clsid_ = "{EA7F2CF1-11A2-45E4-B2D5-68E240DB8CB1}" - _reg_progid_ = "Python.RTD.TimeServer" - _reg_desc_ = "Python class implementing Excel IRTDServer -- feeds time" - - # other class attributes... - INTERVAL = 0.5 # secs. Threaded timer will wake us up at this interval. - - def __init__(self): - super(TimeServer, self).__init__() - - # Simply timer thread to ensure we get to update our topics, and - # tell excel about any changes. This is a pretty basic and dirty way to - # do this. Ideally, there should be some sort of waitable (eg. either win32 - # event, socket data event...) and be kicked off by that event triggering. - # As soon as we set up shop here, we _must_ return control back to excel. - # (ie. we can't block and do our own thing...) - self.ticker = threading.Timer(self.INTERVAL, self.Update) - - def OnServerStart(self): - self.ticker.start() - - def OnServerTerminate(self): - if not self.ticker.finished.isSet(): - self.ticker.cancel() # Cancel our wake-up thread. Excel has killed us. - - def Update(self): - # Get our wake-up thread ready... - self.ticker = threading.Timer(self.INTERVAL, self.Update) - try: - # Check if any of our topics have new info to pass on - if len(self.topics): - refresh = False - for topic in self.topics.values(): - topic.Update(self) - if topic.HasChanged(): - refresh = True - topic.Reset() - - if refresh: - self.SignalExcel() - finally: - self.ticker.start() # Make sure we get to run again - - def CreateTopic(self, TopicStrings=None): - """Topic factory. Builds a TimeTopic object out of the given TopicStrings.""" - return TimeTopic(TopicStrings) - - -class TimeTopic(RTDTopic): - """Example topic for example RTD server. - - Will accept some simple commands to alter how long to delay value updates. - - Commands: - * seconds, delay_in_seconds - * minutes, delay_in_minutes - * hours, delay_in_hours - """ - - def __init__(self, TopicStrings): - super(TimeTopic, self).__init__(TopicStrings) - try: - self.cmd, self.delay = self.TopicStrings - except Exception as E: - # We could simply return a "# ERROR" type string as the - # topic value, but explosions like this should be able to get handled by - # the VBA-side "On Error" stuff. - raise ValueError("Invalid topic strings: %s" % str(TopicStrings)) - - # self.cmd = str(self.cmd) - self.delay = float(self.delay) - - # setup our initial value - self.checkpoint = self.timestamp() - self.SetValue(str(self.checkpoint)) - - def timestamp(self): - return datetime.datetime.now() - - def Update(self, sender): - now = self.timestamp() - delta = now - self.checkpoint - refresh = False - if self.cmd == "seconds": - if delta.seconds >= self.delay: - refresh = True - elif self.cmd == "minutes": - if delta.minutes >= self.delay: - refresh = True - elif self.cmd == "hours": - if delta.hours >= self.delay: - refresh = True - else: - self.SetValue("#Unknown command: " + self.cmd) - - if refresh: - self.SetValue(str(now)) - self.checkpoint = now - - -if __name__ == "__main__": - import win32com.server.register - - # Register/Unregister TimeServer example - # eg. at the command line: excelrtd.py --register - # Then type in an excel cell something like: - # =RTD("Python.RTD.TimeServer","","seconds","5") - win32com.server.register.UseCommandLine(TimeServer) diff --git a/lib/win32com/demos/iebutton.py b/lib/win32com/demos/iebutton.py deleted file mode 100644 index bc3ce99b..00000000 --- a/lib/win32com/demos/iebutton.py +++ /dev/null @@ -1,217 +0,0 @@ -# -*- coding: latin-1 -*- - -# PyWin32 Internet Explorer Button -# -# written by Leonard Ritter (paniq@gmx.net) -# and Robert Förtsch (info@robert-foertsch.com) - - -""" -This sample implements a simple IE Button COM server -with access to the IWebBrowser2 interface. - -To demonstrate: -* Execute this script to register the server. -* Open Pythonwin's Tools -> Trace Collector Debugging Tool, so you can - see the output of 'print' statements in this demo. -* Open a new IE instance. The toolbar should have a new "scissors" icon, - with tooltip text "IE Button" - this is our new button - click it. -* Switch back to the Pythonwin window - you should see: - IOleCommandTarget::Exec called. - This is the button being clicked. Extending this to do something more - useful is left as an exercise. - -Contribtions to this sample to make it a little "friendlier" welcome! -""" - -# imports section - -import pythoncom -import win32api -import win32com -import win32com.server.register -from win32com import universal -from win32com.client import Dispatch, DispatchWithEvents, constants, gencache, getevents - -# This demo uses 'print' - use win32traceutil to see it if we have no -# console. -try: - win32api.GetConsoleTitle() -except win32api.error: - import win32traceutil - -import array - -from win32com.axcontrol import axcontrol - -# ensure we know the ms internet controls typelib so we have access to IWebBrowser2 later on -win32com.client.gencache.EnsureModule("{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}", 0, 1, 1) - - -# -IObjectWithSite_methods = ["SetSite", "GetSite"] -IOleCommandTarget_methods = ["Exec", "QueryStatus"] - -_iebutton_methods_ = IOleCommandTarget_methods + IObjectWithSite_methods -_iebutton_com_interfaces_ = [ - axcontrol.IID_IOleCommandTarget, - axcontrol.IID_IObjectWithSite, # IObjectWithSite -] - - -class Stub: - """ - this class serves as a method stub, - outputting debug info whenever the object - is being called. - """ - - def __init__(self, name): - self.name = name - - def __call__(self, *args): - print("STUB: ", self.name, args) - - -class IEButton: - """ - The actual COM server class - """ - - _com_interfaces_ = _iebutton_com_interfaces_ - _public_methods_ = _iebutton_methods_ - _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER - _button_text_ = "IE Button" - _tool_tip_ = "An example implementation for an IE Button." - _icon_ = "" - _hot_icon_ = "" - - def __init__(self): - # put stubs for non-implemented methods - for method in self._public_methods_: - if not hasattr(self, method): - print("providing default stub for %s" % method) - setattr(self, method, Stub(method)) - - def QueryStatus(self, pguidCmdGroup, prgCmds, cmdtextf): - # 'cmdtextf' is the 'cmdtextf' element from the OLECMDTEXT structure, - # or None if a NULL pointer was passed. - result = [] - for id, flags in prgCmds: - flags |= axcontrol.OLECMDF_SUPPORTED | axcontrol.OLECMDF_ENABLED - result.append((id, flags)) - if cmdtextf is None: - cmdtext = None # must return None if nothing requested. - # IE never seems to want any text - this code is here for - # demo purposes only - elif cmdtextf == axcontrol.OLECMDTEXTF_NAME: - cmdtext = "IEButton Name" - else: - cmdtext = "IEButton State" - return result, cmdtext - - def Exec(self, pguidCmdGroup, nCmdID, nCmdExecOpt, pvaIn): - print(pguidCmdGroup, nCmdID, nCmdExecOpt, pvaIn) - print("IOleCommandTarget::Exec called.") - # self.webbrowser.ShowBrowserBar(GUID_IETOOLBAR, not is_ietoolbar_visible()) - - def SetSite(self, unknown): - if unknown: - # first get a command target - cmdtarget = unknown.QueryInterface(axcontrol.IID_IOleCommandTarget) - # then travel over to a service provider - serviceprovider = cmdtarget.QueryInterface(pythoncom.IID_IServiceProvider) - # finally ask for the internet explorer application, returned as a dispatch object - self.webbrowser = win32com.client.Dispatch( - serviceprovider.QueryService( - "{0002DF05-0000-0000-C000-000000000046}", pythoncom.IID_IDispatch - ) - ) - else: - # lose all references - self.webbrowser = None - - def GetClassID(self): - return self._reg_clsid_ - - -def register(classobj): - import winreg - - subKeyCLSID = ( - "SOFTWARE\\Microsoft\\Internet Explorer\\Extensions\\%38s" - % classobj._reg_clsid_ - ) - try: - hKey = winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE, subKeyCLSID) - subKey = winreg.SetValueEx( - hKey, "ButtonText", 0, winreg.REG_SZ, classobj._button_text_ - ) - winreg.SetValueEx( - hKey, "ClsidExtension", 0, winreg.REG_SZ, classobj._reg_clsid_ - ) # reg value for calling COM object - winreg.SetValueEx( - hKey, "CLSID", 0, winreg.REG_SZ, "{1FBA04EE-3024-11D2-8F1F-0000F87ABD16}" - ) # CLSID for button that sends command to COM object - winreg.SetValueEx(hKey, "Default Visible", 0, winreg.REG_SZ, "Yes") - winreg.SetValueEx(hKey, "ToolTip", 0, winreg.REG_SZ, classobj._tool_tip_) - winreg.SetValueEx(hKey, "Icon", 0, winreg.REG_SZ, classobj._icon_) - winreg.SetValueEx(hKey, "HotIcon", 0, winreg.REG_SZ, classobj._hot_icon_) - except WindowsError: - print("Couldn't set standard toolbar reg keys.") - else: - print("Set standard toolbar reg keys.") - - -def unregister(classobj): - import winreg - - subKeyCLSID = ( - "SOFTWARE\\Microsoft\\Internet Explorer\\Extensions\\%38s" - % classobj._reg_clsid_ - ) - try: - hKey = winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE, subKeyCLSID) - subKey = winreg.DeleteValue(hKey, "ButtonText") - winreg.DeleteValue(hKey, "ClsidExtension") # for calling COM object - winreg.DeleteValue(hKey, "CLSID") - winreg.DeleteValue(hKey, "Default Visible") - winreg.DeleteValue(hKey, "ToolTip") - winreg.DeleteValue(hKey, "Icon") - winreg.DeleteValue(hKey, "HotIcon") - winreg.DeleteKey(winreg.HKEY_LOCAL_MACHINE, subKeyCLSID) - except WindowsError: - print("Couldn't delete Standard toolbar regkey.") - else: - print("Deleted Standard toolbar regkey.") - - -# -# test implementation -# - - -class PyWin32InternetExplorerButton(IEButton): - _reg_clsid_ = "{104B66A9-9E68-49D1-A3F5-94754BE9E0E6}" - _reg_progid_ = "PyWin32.IEButton" - _reg_desc_ = "Test Button" - _button_text_ = "IE Button" - _tool_tip_ = "An example implementation for an IE Button." - _icon_ = "" - _hot_icon_ = _icon_ - - -def DllRegisterServer(): - register(PyWin32InternetExplorerButton) - - -def DllUnregisterServer(): - unregister(PyWin32InternetExplorerButton) - - -if __name__ == "__main__": - win32com.server.register.UseCommandLine( - PyWin32InternetExplorerButton, - finalize_register=DllRegisterServer, - finalize_unregister=DllUnregisterServer, - ) diff --git a/lib/win32com/demos/ietoolbar.py b/lib/win32com/demos/ietoolbar.py deleted file mode 100644 index 084db3c1..00000000 --- a/lib/win32com/demos/ietoolbar.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: latin-1 -*- - -# PyWin32 Internet Explorer Toolbar -# -# written by Leonard Ritter (paniq@gmx.net) -# and Robert Förtsch (info@robert-foertsch.com) - - -""" -This sample implements a simple IE Toolbar COM server -supporting Windows XP styles and access to -the IWebBrowser2 interface. - -It also demonstrates how to hijack the parent window -to catch WM_COMMAND messages. -""" - -# imports section -import sys -import winreg - -import pythoncom -import win32com -from win32com import universal -from win32com.axcontrol import axcontrol -from win32com.client import Dispatch, DispatchWithEvents, constants, gencache, getevents -from win32com.shell import shell -from win32com.shell.shellcon import * - -try: - # try to get styles (winxp) - import winxpgui as win32gui -except: - # import default module (win2k and lower) - import win32gui - -import array -import struct - -import commctrl -import win32con -import win32ui - -# ensure we know the ms internet controls typelib so we have access to IWebBrowser2 later on -win32com.client.gencache.EnsureModule("{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}", 0, 1, 1) - -# -IDeskBand_methods = ["GetBandInfo"] -IDockingWindow_methods = ["ShowDW", "CloseDW", "ResizeBorderDW"] -IOleWindow_methods = ["GetWindow", "ContextSensitiveHelp"] -IInputObject_methods = ["UIActivateIO", "HasFocusIO", "TranslateAcceleratorIO"] -IObjectWithSite_methods = ["SetSite", "GetSite"] -IPersistStream_methods = ["GetClassID", "IsDirty", "Load", "Save", "GetSizeMax"] - -_ietoolbar_methods_ = ( - IDeskBand_methods - + IDockingWindow_methods - + IOleWindow_methods - + IInputObject_methods - + IObjectWithSite_methods - + IPersistStream_methods -) -_ietoolbar_com_interfaces_ = [ - shell.IID_IDeskBand, # IDeskBand - axcontrol.IID_IObjectWithSite, # IObjectWithSite - pythoncom.IID_IPersistStream, - axcontrol.IID_IOleCommandTarget, -] - - -class WIN32STRUCT: - def __init__(self, **kw): - full_fmt = "" - for name, fmt, default in self._struct_items_: - self.__dict__[name] = None - if fmt == "z": - full_fmt += "pi" - else: - full_fmt += fmt - for name, val in kw.items(): - self.__dict__[name] = val - - def __setattr__(self, attr, val): - if not attr.startswith("_") and attr not in self.__dict__: - raise AttributeError(attr) - self.__dict__[attr] = val - - def toparam(self): - self._buffs = [] - full_fmt = "" - vals = [] - for name, fmt, default in self._struct_items_: - val = self.__dict__[name] - if fmt == "z": - fmt = "Pi" - if val is None: - vals.append(0) - vals.append(0) - else: - str_buf = array.array("c", val + "\0") - vals.append(str_buf.buffer_info()[0]) - vals.append(len(val)) - self._buffs.append(str_buf) # keep alive during the call. - else: - if val is None: - val = default - vals.append(val) - full_fmt += fmt - return struct.pack(*(full_fmt,) + tuple(vals)) - - -class TBBUTTON(WIN32STRUCT): - _struct_items_ = [ - ("iBitmap", "i", 0), - ("idCommand", "i", 0), - ("fsState", "B", 0), - ("fsStyle", "B", 0), - ("bReserved", "H", 0), - ("dwData", "I", 0), - ("iString", "z", None), - ] - - -class Stub: - """ - this class serves as a method stub, - outputting debug info whenever the object - is being called. - """ - - def __init__(self, name): - self.name = name - - def __call__(self, *args): - print("STUB: ", self.name, args) - - -class IEToolbarCtrl: - """ - a tiny wrapper for our winapi-based - toolbar control implementation. - """ - - def __init__(self, hwndparent): - styles = ( - win32con.WS_CHILD - | win32con.WS_VISIBLE - | win32con.WS_CLIPSIBLINGS - | win32con.WS_CLIPCHILDREN - | commctrl.TBSTYLE_LIST - | commctrl.TBSTYLE_FLAT - | commctrl.TBSTYLE_TRANSPARENT - | commctrl.CCS_TOP - | commctrl.CCS_NODIVIDER - | commctrl.CCS_NORESIZE - | commctrl.CCS_NOPARENTALIGN - ) - self.hwnd = win32gui.CreateWindow( - "ToolbarWindow32", - None, - styles, - 0, - 0, - 100, - 100, - hwndparent, - 0, - win32gui.dllhandle, - None, - ) - win32gui.SendMessage(self.hwnd, commctrl.TB_BUTTONSTRUCTSIZE, 20, 0) - - def ShowWindow(self, mode): - win32gui.ShowWindow(self.hwnd, mode) - - def AddButtons(self, *buttons): - tbbuttons = "" - for button in buttons: - tbbuttons += button.toparam() - return win32gui.SendMessage( - self.hwnd, commctrl.TB_ADDBUTTONS, len(buttons), tbbuttons - ) - - def GetSafeHwnd(self): - return self.hwnd - - -class IEToolbar: - """ - The actual COM server class - """ - - _com_interfaces_ = _ietoolbar_com_interfaces_ - _public_methods_ = _ietoolbar_methods_ - _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER - # if you copy and modify this example, be sure to change the clsid below - _reg_clsid_ = "{F21202A2-959A-4149-B1C3-68B9013F3335}" - _reg_progid_ = "PyWin32.IEToolbar" - _reg_desc_ = "PyWin32 IE Toolbar" - - def __init__(self): - # put stubs for non-implemented methods - for method in self._public_methods_: - if not hasattr(self, method): - print("providing default stub for %s" % method) - setattr(self, method, Stub(method)) - - def GetWindow(self): - return self.toolbar.GetSafeHwnd() - - def Load(self, stream): - # called when the toolbar is loaded - pass - - def Save(self, pStream, fClearDirty): - # called when the toolbar shall save its information - pass - - def CloseDW(self, dwReserved): - del self.toolbar - - def ShowDW(self, bShow): - if bShow: - self.toolbar.ShowWindow(win32con.SW_SHOW) - else: - self.toolbar.ShowWindow(win32con.SW_HIDE) - - def on_first_button(self): - print("first!") - self.webbrowser.Navigate2("http://starship.python.net/crew/mhammond/") - - def on_second_button(self): - print("second!") - - def on_third_button(self): - print("third!") - - def toolbar_command_handler(self, args): - hwnd, message, wparam, lparam, time, point = args - if lparam == self.toolbar.GetSafeHwnd(): - self._command_map[wparam]() - - def SetSite(self, unknown): - if unknown: - # retrieve the parent window interface for this site - olewindow = unknown.QueryInterface(pythoncom.IID_IOleWindow) - # ask the window for its handle - hwndparent = olewindow.GetWindow() - - # first get a command target - cmdtarget = unknown.QueryInterface(axcontrol.IID_IOleCommandTarget) - # then travel over to a service provider - serviceprovider = cmdtarget.QueryInterface(pythoncom.IID_IServiceProvider) - # finally ask for the internet explorer application, returned as a dispatch object - self.webbrowser = win32com.client.Dispatch( - serviceprovider.QueryService( - "{0002DF05-0000-0000-C000-000000000046}", pythoncom.IID_IDispatch - ) - ) - - # now create and set up the toolbar - self.toolbar = IEToolbarCtrl(hwndparent) - - buttons = [ - ("Visit PyWin32 Homepage", self.on_first_button), - ("Another Button", self.on_second_button), - ("Yet Another Button", self.on_third_button), - ] - - self._command_map = {} - # wrap our parent window so we can hook message handlers - window = win32ui.CreateWindowFromHandle(hwndparent) - - # add the buttons - for i in range(len(buttons)): - button = TBBUTTON() - name, func = buttons[i] - id = 0x4444 + i - button.iBitmap = -2 - button.idCommand = id - button.fsState = commctrl.TBSTATE_ENABLED - button.fsStyle = commctrl.TBSTYLE_BUTTON - button.iString = name - self._command_map[0x4444 + i] = func - self.toolbar.AddButtons(button) - window.HookMessage(self.toolbar_command_handler, win32con.WM_COMMAND) - else: - # lose all references - self.webbrowser = None - - def GetClassID(self): - return self._reg_clsid_ - - def GetBandInfo(self, dwBandId, dwViewMode, dwMask): - ptMinSize = (0, 24) - ptMaxSize = (2000, 24) - ptIntegral = (0, 0) - ptActual = (2000, 24) - wszTitle = "PyWin32 IE Toolbar" - dwModeFlags = DBIMF_VARIABLEHEIGHT - crBkgnd = 0 - return ( - ptMinSize, - ptMaxSize, - ptIntegral, - ptActual, - wszTitle, - dwModeFlags, - crBkgnd, - ) - - -# used for HKLM install -def DllInstall(bInstall, cmdLine): - comclass = IEToolbar - - -# register plugin -def DllRegisterServer(): - comclass = IEToolbar - - # register toolbar with IE - try: - print("Trying to register Toolbar.\n") - hkey = winreg.CreateKey( - winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Internet Explorer\\Toolbar" - ) - subKey = winreg.SetValueEx( - hkey, comclass._reg_clsid_, 0, winreg.REG_BINARY, "\0" - ) - except WindowsError: - print( - "Couldn't set registry value.\nhkey: %d\tCLSID: %s\n" - % (hkey, comclass._reg_clsid_) - ) - else: - print( - "Set registry value.\nhkey: %d\tCLSID: %s\n" % (hkey, comclass._reg_clsid_) - ) - # TODO: implement reg settings for standard toolbar button - - -# unregister plugin -def DllUnregisterServer(): - comclass = IEToolbar - - # unregister toolbar from internet explorer - try: - print("Trying to unregister Toolbar.\n") - hkey = winreg.CreateKey( - winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Internet Explorer\\Toolbar" - ) - winreg.DeleteValue(hkey, comclass._reg_clsid_) - except WindowsError: - print( - "Couldn't delete registry value.\nhkey: %d\tCLSID: %s\n" - % (hkey, comclass._reg_clsid_) - ) - else: - print("Deleting reg key succeeded.\n") - - -# entry point -if __name__ == "__main__": - import win32com.server.register - - win32com.server.register.UseCommandLine(IEToolbar) - - # parse actual command line option - if "--unregister" in sys.argv: - DllUnregisterServer() - else: - DllRegisterServer() -else: - # import trace utility for remote debugging - import win32traceutil diff --git a/lib/win32com/demos/outlookAddin.py b/lib/win32com/demos/outlookAddin.py deleted file mode 100644 index 9433596d..00000000 --- a/lib/win32com/demos/outlookAddin.py +++ /dev/null @@ -1,138 +0,0 @@ -# A demo plugin for Microsoft Outlook (NOT Outlook Express) -# -# This addin simply adds a new button to the main Outlook toolbar, -# and displays a message box when clicked. Thus, it demonstrates -# how to plug in to Outlook itself, and hook outlook events. -# -# Additionally, each time a new message arrives in the Inbox, a message -# is printed with the subject of the message. -# -# To register the addin, simply execute: -# outlookAddin.py -# This will install the COM server, and write the necessary -# AddIn key to Outlook -# -# To unregister completely: -# outlookAddin.py --unregister -# -# To debug, execute: -# outlookAddin.py --debug -# -# Then open Pythonwin, and select "Tools->Trace Collector Debugging Tool" -# Restart Outlook, and you should see some output generated. -# -# NOTE: If the AddIn fails with an error, Outlook will re-register -# the addin to not automatically load next time Outlook starts. To -# correct this, simply re-register the addin (see above) - -import sys - -import pythoncom -from win32com import universal -from win32com.client import DispatchWithEvents, constants, gencache -from win32com.server.exception import COMException - -# Support for COM objects we use. -gencache.EnsureModule( - "{00062FFF-0000-0000-C000-000000000046}", 0, 9, 0, bForDemand=True -) # Outlook 9 -gencache.EnsureModule( - "{2DF8D04C-5BFA-101B-BDE5-00AA0044DE52}", 0, 2, 1, bForDemand=True -) # Office 9 - -# The TLB defining the interfaces we implement -universal.RegisterInterfaces( - "{AC0714F2-3D04-11D1-AE7D-00A0C90F26F4}", 0, 1, 0, ["_IDTExtensibility2"] -) - - -class ButtonEvent: - def OnClick(self, button, cancel): - import win32ui # Possible, but not necessary, to use a Pythonwin GUI - - win32ui.MessageBox("Hello from Python") - return cancel - - -class FolderEvent: - def OnItemAdd(self, item): - try: - print("An item was added to the inbox with subject:", item.Subject) - except AttributeError: - print( - "An item was added to the inbox, but it has no subject! - ", repr(item) - ) - - -class OutlookAddin: - _com_interfaces_ = ["_IDTExtensibility2"] - _public_methods_ = [] - _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER - _reg_clsid_ = "{0F47D9F3-598B-4d24-B7E3-92AC15ED27E2}" - _reg_progid_ = "Python.Test.OutlookAddin" - _reg_policy_spec_ = "win32com.server.policy.EventHandlerPolicy" - - def OnConnection(self, application, connectMode, addin, custom): - print("OnConnection", application, connectMode, addin, custom) - # ActiveExplorer may be none when started without a UI (eg, WinCE synchronisation) - activeExplorer = application.ActiveExplorer() - if activeExplorer is not None: - bars = activeExplorer.CommandBars - toolbar = bars.Item("Standard") - item = toolbar.Controls.Add(Type=constants.msoControlButton, Temporary=True) - # Hook events for the item - item = self.toolbarButton = DispatchWithEvents(item, ButtonEvent) - item.Caption = "Python" - item.TooltipText = "Click for Python" - item.Enabled = True - - # And now, for the sake of demonstration, setup a hook for all new messages - inbox = application.Session.GetDefaultFolder(constants.olFolderInbox) - self.inboxItems = DispatchWithEvents(inbox.Items, FolderEvent) - - def OnDisconnection(self, mode, custom): - print("OnDisconnection") - - def OnAddInsUpdate(self, custom): - print("OnAddInsUpdate", custom) - - def OnStartupComplete(self, custom): - print("OnStartupComplete", custom) - - def OnBeginShutdown(self, custom): - print("OnBeginShutdown", custom) - - -def RegisterAddin(klass): - import winreg - - key = winreg.CreateKey( - winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Outlook\\Addins" - ) - subkey = winreg.CreateKey(key, klass._reg_progid_) - winreg.SetValueEx(subkey, "CommandLineSafe", 0, winreg.REG_DWORD, 0) - winreg.SetValueEx(subkey, "LoadBehavior", 0, winreg.REG_DWORD, 3) - winreg.SetValueEx(subkey, "Description", 0, winreg.REG_SZ, klass._reg_progid_) - winreg.SetValueEx(subkey, "FriendlyName", 0, winreg.REG_SZ, klass._reg_progid_) - - -def UnregisterAddin(klass): - import winreg - - try: - winreg.DeleteKey( - winreg.HKEY_CURRENT_USER, - "Software\\Microsoft\\Office\\Outlook\\Addins\\" + klass._reg_progid_, - ) - except WindowsError: - pass - - -if __name__ == "__main__": - import win32com.server.register - - win32com.server.register.UseCommandLine(OutlookAddin) - if "--unregister" in sys.argv: - UnregisterAddin(OutlookAddin) - else: - RegisterAddin(OutlookAddin) diff --git a/lib/win32com/demos/trybag.py b/lib/win32com/demos/trybag.py deleted file mode 100644 index 3d3918b6..00000000 --- a/lib/win32com/demos/trybag.py +++ /dev/null @@ -1,77 +0,0 @@ -import pythoncom -from win32com.server import exception, util - -VT_EMPTY = pythoncom.VT_EMPTY - - -class Bag: - _public_methods_ = ["Read", "Write"] - _com_interfaces_ = [pythoncom.IID_IPropertyBag] - - def __init__(self): - self.data = {} - - def Read(self, propName, varType, errorLog): - print("read: name=", propName, "type=", varType) - if propName not in self.data: - if errorLog: - hr = 0x80070057 - exc = pythoncom.com_error(0, "Bag.Read", "no such item", None, 0, hr) - errorLog.AddError(propName, exc) - raise exception.Exception(scode=hr) - return self.data[propName] - - def Write(self, propName, value): - print("write: name=", propName, "value=", value) - self.data[propName] = value - - -class Target: - _public_methods_ = ["GetClassID", "InitNew", "Load", "Save"] - _com_interfaces_ = [pythoncom.IID_IPersist, pythoncom.IID_IPersistPropertyBag] - - def GetClassID(self): - raise exception.Exception(scode=0x80004005) # E_FAIL - - def InitNew(self): - pass - - def Load(self, bag, log): - print(bag.Read("prop1", VT_EMPTY, log)) - print(bag.Read("prop2", VT_EMPTY, log)) - try: - print(bag.Read("prop3", VT_EMPTY, log)) - except exception.Exception: - pass - - def Save(self, bag, clearDirty, saveAllProps): - bag.Write("prop1", "prop1.hello") - bag.Write("prop2", "prop2.there") - - -class Log: - _public_methods_ = ["AddError"] - _com_interfaces_ = [pythoncom.IID_IErrorLog] - - def AddError(self, propName, excepInfo): - print("error: propName=", propName, "error=", excepInfo) - - -def test(): - bag = Bag() - target = Target() - log = Log() - - target.Save(bag, 1, 1) - target.Load(bag, log) - - comBag = util.wrap(bag, pythoncom.IID_IPropertyBag) - comTarget = util.wrap(target, pythoncom.IID_IPersistPropertyBag) - comLog = util.wrap(log, pythoncom.IID_IErrorLog) - - comTarget.Save(comBag, 1, 1) - comTarget.Load(comBag, comLog) - - -if __name__ == "__main__": - test() diff --git a/lib/win32com/include/PythonCOM.h b/lib/win32com/include/PythonCOM.h deleted file mode 100644 index e7599717..00000000 --- a/lib/win32com/include/PythonCOM.h +++ /dev/null @@ -1,766 +0,0 @@ -/* PythonCOM.h - - Main header for Python COM support. - - This file is involved mainly with client side COM support for - Python. - - Most COM work put together by Greg Stein and Mark Hammond, with a - few others starting to come out of the closet. - - - -------------------------------------------------------------------- - Thread State Rules - ------------------ - These rules apply to PythonCOM in general, and not just to - the client side. - - The rules are quite simple, but it is critical they be followed. - In general, errors here will be picked up quite quickly, as Python - will raise a Fatal Error. However, the Release() issue in particular - may keep a number of problems well hidden. - - Interfaces: - ----------- - Before making ANY call out to COM, you MUST release the Python lock. - This is true to ANY call whatsoever, including the COM call in question, - but also any calls to "->Release();" - - This is normally achieved with the calls - PY_INTERFACE_PRECALL and PY_INTERFACE_POSTCALL, which release - and acquire the Python lock. - - Gateways: - --------- - Before doing anything related to Python, gateways MUST acquire the - Python lock, and must release it before returning. - - This is normally achieved with PY_GATEWAY_METHOD at the top of a - gateway method. This macro resolves to a class, which automatically does - the right thing. - - Release: - -------- - As mentioned above for Interfaces, EVERY call to Release() must be done - with the Python lock released. This is expanded here. - - This is very important, but an error may not be noticed. The problem will - only be seen when the Release() is on a Python object and the Release() is the - final one for the object. In this case, the Python object will attempt to - acquire the Python lock before destroying itself, and Python will raise a - fatal error. - - In many many cases, you will not notice this error, but someday, someone will - implement the other side in Python, and suddenly FatalErrors will start - appearing. Make sure you get this right. - - Eg, this code is correct: - PY_INTERFACE_PRECALL; - pSomeObj->SomeFunction(pSomeOtherObject); - pSomeOtherObject->Release(); - PY_INTERFACE_POSTCALL; - - However, this code is WRONG, but will RARELY FAIL. - PY_INTERFACE_PRECALL; - pSomeObj->SomeFunction(pSomeOtherObject); - PY_INTERFACE_POSTCALL; - pSomeOtherObject->Release(); --------------------------------------------------------------------- -*/ -#ifndef __PYTHONCOM_H__ -#define __PYTHONCOM_H__ - -// #define _DEBUG_LIFETIMES // Trace COM object lifetimes. - -#ifdef FREEZE_PYTHONCOM -/* The pythoncom module is being included in a frozen .EXE/.DLL */ -#define PYCOM_EXPORT -#else -#ifdef BUILD_PYTHONCOM -/* We are building pythoncomxx.dll */ -#define PYCOM_EXPORT __declspec(dllexport) -#else -/* This module uses pythoncomxx.dll */ -#define PYCOM_EXPORT __declspec(dllimport) -#ifndef _DEBUG -#pragma comment(lib, "pythoncom.lib") -#else -#pragma comment(lib, "pythoncom_d.lib") -#endif -#endif -#endif - -#ifdef MS_WINCE -// List of interfaces not supported by CE. -#define NO_PYCOM_IDISPATCHEX -#define NO_PYCOM_IPROVIDECLASSINFO -#define NO_PYCOM_IENUMGUID -#define NO_PYCOM_IENUMCATEGORYINFO -#define NO_PYCOM_ICATINFORMATION -#define NO_PYCOM_ICATREGISTER -#define NO_PYCOM_ISERVICEPROVIDER -#define NO_PYCOM_IPROPERTYSTORAGE -#define NO_PYCOM_IPROPERTYSETSTORAGE -#define NO_PYCOM_ENUMSTATPROPSTG - -#include "ocidl.h" -#include "oleauto.h" - -#endif // MS_WINCE - -#ifdef __MINGW32__ -// Special Mingw32 considerations. -#define NO_PYCOM_ENUMSTATPROPSTG -#define __try try -#define __except catch -#include - -#endif // __MINGW32__ - -#include // Standard Win32 Types - -#ifndef NO_PYCOM_IDISPATCHEX -#include // New header for IDispatchEx interface. -#endif // NO_PYCOM_IDISPATCHEX - -#if defined(MAINWIN) -// Mainwin seems to have 1/2 the VT_RECORD infrastructure in place -#if !defined(VT_RECORD) -#define VT_RECORD 36 -#define V_RECORDINFO(X) ((X)->brecVal.pRecInfo) -#define V_RECORD(X) ((X)->brecVal.pvRecord) -#else -#pragma message( \ - "MAINWIN appears to have grown correct VT_RECORD " \ - "support. Please update PythonCOM.h accordingly") -#endif // VT_RECORD -#endif // MAINWIN - -class PyIUnknown; -// To make life interesting/complicated, I use C++ classes for -// all Python objects. The main advantage is that I can derive -// a PyIDispatch object from a PyIUnknown, etc. This provides a -// clean C++ interface, and "automatically" provides all base -// Python methods to "derived" Python types. -// -// Main disadvantage is that any extension DLLs will need to include -// these headers, and link with this .lib -// -// Base class for (most of) the type objects. - -class PYCOM_EXPORT PyComTypeObject : public PyTypeObject { - public: - PyComTypeObject(const char *name, PyComTypeObject *pBaseType, Py_ssize_t typeSize, struct PyMethodDef *methodList, - PyIUnknown *(*thector)(IUnknown *)); - ~PyComTypeObject(); - - // is the given object an interface type object? (e.g. PyIUnknown) - static BOOL is_interface_type(PyObject *ob); - - public: - PyIUnknown *(*ctor)(IUnknown *); -}; - -// A type used for interfaces that can automatically provide enumerators -// (ie, they themselves aren't enumerable, but do have a suitable default -// method that returns a PyIEnum object -class PYCOM_EXPORT PyComEnumProviderTypeObject : public PyComTypeObject { - public: - PyComEnumProviderTypeObject(const char *name, PyComTypeObject *pBaseType, Py_ssize_t typeSize, - struct PyMethodDef *methodList, PyIUnknown *(*thector)(IUnknown *), - const char *enum_method_name); - static PyObject *iter(PyObject *self); - const char *enum_method_name; -}; - -// A type used for PyIEnum interfaces -class PYCOM_EXPORT PyComEnumTypeObject : public PyComTypeObject { - public: - static PyObject *iter(PyObject *self); - static PyObject *iternext(PyObject *self); - PyComEnumTypeObject(const char *name, PyComTypeObject *pBaseType, Py_ssize_t typeSize, struct PyMethodDef *methodList, - PyIUnknown *(*thector)(IUnknown *)); -}; - -// Very very base class - not COM specific - Should exist in the -// Python core somewhere, IMO. -class PYCOM_EXPORT PyIBase : public PyObject { - public: - // virtuals for Python support - virtual PyObject *getattr(char *name); - virtual int setattr(char *name, PyObject *v); - virtual PyObject *repr(); - virtual int compare(PyObject *other) - { - if (this == other) - return 0; - if (this < other) - return -1; - return 1; - } - // These iter are a little special, in that returning NULL means - // use the implementation in the type - virtual PyObject *iter() { return NULL; } - virtual PyObject *iternext() { return NULL; } - - protected: - PyIBase(); - virtual ~PyIBase(); - - public: - static BOOL is_object(PyObject *, PyComTypeObject *which); - BOOL is_object(PyComTypeObject *which); - static void dealloc(PyObject *ob); - static PyObject *repr(PyObject *ob); - static PyObject *getattro(PyObject *self, PyObject *name); - static int setattro(PyObject *op, PyObject *obname, PyObject *v); - static int cmp(PyObject *ob1, PyObject *ob2); - static PyObject *richcmp(PyObject *ob1, PyObject *ob2, int op); -}; - -/* Special Type objects */ -extern PYCOM_EXPORT PyTypeObject PyOleEmptyType; // equivalent to VT_EMPTY -extern PYCOM_EXPORT PyTypeObject PyOleMissingType; // special Python handling. -extern PYCOM_EXPORT PyTypeObject PyOleArgNotFoundType; // special VT_ERROR value -extern PYCOM_EXPORT PyTypeObject PyOleNothingType; // special VT_ERROR value - -// ALL of these set an appropriate Python error on bad return. - -// Given a Python object that is a registered COM type, return a given -// interface pointer on its underlying object, with a new reference added. -PYCOM_EXPORT BOOL PyCom_InterfaceFromPyObject(PyObject *ob, REFIID iid, LPVOID *ppv, BOOL bNoneOK = TRUE); - -// As above, but allows instance with "_oleobj_" attribute. -PYCOM_EXPORT BOOL PyCom_InterfaceFromPyInstanceOrObject(PyObject *ob, REFIID iid, LPVOID *ppv, BOOL bNoneOK = TRUE); - -// Release an arbitary COM pointer. -// NOTE: the PRECALL/POSTCALL stuff is probably not strictly necessary -// since the PyGILSTATE stuff has been in place (and even then, it only -// mattered when it was the last Release() on a Python implemented object) -#define PYCOM_RELEASE(pUnk) \ - { \ - if (pUnk) { \ - PY_INTERFACE_PRECALL; \ - (pUnk)->Release(); \ - PY_INTERFACE_POSTCALL; \ - } \ - } - -// Given an IUnknown and an Interface ID, create and return an object -// of the appropriate type. eg IID_Unknown->PyIUnknown, -// IID_IDispatch->PyIDispatch, etc. -// Uses a map that external extension DLLs can populate with their IID/type. -// Under the principal of least surprise, this will return Py_None is punk is NULL. -// Otherwise, a valid PyI*, but with NULL m_obj (and therefore totally useless) -// object would be created. -// BOOL bAddRef indicates if a COM reference count should be added to the IUnknown. -// This depends purely on the context in which it is called. If the IUnknown is obtained -// from a function that creates a new ref (eg, CoCreateInstance()) then you should use -// FALSE. If you receive the pointer as (eg) a param to a gateway function, then -// you normally need to pass TRUE, as this is truly a new reference. -// *** ALWAYS take the time to get this right. *** -PYCOM_EXPORT PyObject *PyCom_PyObjectFromIUnknown(IUnknown *punk, REFIID riid, BOOL bAddRef = FALSE); - -// VARIANT <-> PyObject conversion utilities. -PYCOM_EXPORT BOOL PyCom_VariantFromPyObject(PyObject *obj, VARIANT *var); -PYCOM_EXPORT PyObject *PyCom_PyObjectFromVariant(const VARIANT *var); - -// PROPVARIANT -PYCOM_EXPORT PyObject *PyObject_FromPROPVARIANT(PROPVARIANT *pVar); -PYCOM_EXPORT PyObject *PyObject_FromPROPVARIANTs(PROPVARIANT *pVars, ULONG cVars); -PYCOM_EXPORT BOOL PyObject_AsPROPVARIANT(PyObject *ob, PROPVARIANT *pVar); - -// Other conversion helpers... -PYCOM_EXPORT PyObject *PyCom_PyObjectFromSTATSTG(STATSTG *pStat); -PYCOM_EXPORT BOOL PyCom_PyObjectAsSTATSTG(PyObject *ob, STATSTG *pStat, DWORD flags = 0); -PYCOM_EXPORT BOOL PyCom_SAFEARRAYFromPyObject(PyObject *obj, SAFEARRAY **ppSA, VARENUM vt = VT_VARIANT); -PYCOM_EXPORT PyObject *PyCom_PyObjectFromSAFEARRAY(SAFEARRAY *psa, VARENUM vt = VT_VARIANT); -#ifndef NO_PYCOM_STGOPTIONS -PYCOM_EXPORT BOOL PyCom_PyObjectAsSTGOPTIONS(PyObject *obstgoptions, STGOPTIONS **ppstgoptions, TmpWCHAR *tmpw_shelve); -#endif -PYCOM_EXPORT PyObject *PyCom_PyObjectFromSTATPROPSETSTG(STATPROPSETSTG *pStat); -PYCOM_EXPORT BOOL PyCom_PyObjectAsSTATPROPSETSTG(PyObject *, STATPROPSETSTG *); - -// Currency support. -PYCOM_EXPORT PyObject *PyObject_FromCurrency(CURRENCY &cy); -PYCOM_EXPORT BOOL PyObject_AsCurrency(PyObject *ob, CURRENCY *pcy); - -// OLEMENUGROUPWIDTHS are used by axcontrol, shell, etc -PYCOM_EXPORT BOOL PyObject_AsOLEMENUGROUPWIDTHS(PyObject *oblpMenuWidths, OLEMENUGROUPWIDTHS *pWidths); -PYCOM_EXPORT PyObject *PyObject_FromOLEMENUGROUPWIDTHS(const OLEMENUGROUPWIDTHS *pWidths); - -/* Functions for Initializing COM, and also letting the core know about it! - */ -PYCOM_EXPORT HRESULT PyCom_CoInitializeEx(LPVOID reserved, DWORD dwInit); -PYCOM_EXPORT HRESULT PyCom_CoInitialize(LPVOID reserved); -PYCOM_EXPORT void PyCom_CoUninitialize(); - -/////////////////////////////////////////////////////////////////// -// Error related functions - -// Client related functions - generally called by interfaces before -// they return NULL back to Python to indicate the error. -// All these functions return NULL so interfaces can generally -// just "return PyCom_BuildPyException(hr, punk, IID_IWhatever)" - -// Uses the HRESULT, and IErrorInfo interfaces if available to -// create and set a pythoncom.com_error. -PYCOM_EXPORT PyObject *PyCom_BuildPyException(HRESULT hr, IUnknown *pUnk = NULL, REFIID iid = IID_NULL); - -// Uses the HRESULT and an EXCEPINFO structure to create and -// set a pythoncom.com_error. -PYCOM_EXPORT PyObject *PyCom_BuildPyExceptionFromEXCEPINFO(HRESULT hr, EXCEPINFO *pexcepInfo, UINT nArgErr = (UINT)-1); - -// Sets a pythoncom.internal_error - no one should ever see these! -PYCOM_EXPORT PyObject *PyCom_BuildInternalPyException(char *msg); - -// Log an error to a Python logger object if one can be found, or -// to stderr if no log available. -// If logProvider is not NULL, we will call a "_GetLogger_()" method on it. -// If logProvider is NULL, we attempt to fetch "win32com.logger". -// If they do not exist, return None, or raise an error fetching them -// (or even writing to them once fetched), the message still goes to stderr. -// NOTE: By default, win32com does *not* provide a logger, so default is that -// all errors are written to stdout. -// This will *not* write a record if a COM Server error is current. -PYCOM_EXPORT void PyCom_LoggerNonServerException(PyObject *logProvider, const WCHAR *fmt, ...); - -// Write an error record, including exception. This will write an error -// record even if a COM server error is current. -PYCOM_EXPORT void PyCom_LoggerException(PyObject *logProvider, const WCHAR *fmt, ...); - -// Write a warning record - in general this does *not* mean a call failed, but -// still is something in the programmers control that they should change. -// XXX - if an exception is pending when this is called, the traceback will -// also be written. This is undesirable and will be changed should this -// start being a problem. -PYCOM_EXPORT void PyCom_LoggerWarning(PyObject *logProvider, const WCHAR *fmt, ...); - -// Server related error functions -// These are supplied so that any Python errors we detect can be -// converted into COM error information. The HRESULT returned should -// be returned by the COM function, and these functions also set the -// IErrorInfo interfaces, so the caller can extract more detailed -// information about the Python exception. - -// Set a COM exception, logging the exception if not an explicitly raised 'server' exception -PYCOM_EXPORT HRESULT PyCom_SetAndLogCOMErrorFromPyException(const char *methodName, REFIID riid /* = IID_NULL */); -PYCOM_EXPORT HRESULT PyCom_SetAndLogCOMErrorFromPyExceptionEx(PyObject *provider, const char *methodName, - REFIID riid /* = IID_NULL */); - -// Used in gateways to SetErrorInfo() with a simple HRESULT, then return it. -// The description is generally only useful for debugging purposes, -// and if you are debugging via a server that supports IErrorInfo (like Python :-) -// NOTE: this function is usuable from outside the Python context -PYCOM_EXPORT HRESULT PyCom_SetCOMErrorFromSimple(HRESULT hr, REFIID riid = IID_NULL, const WCHAR *description = NULL); - -// Used in gateways to check if an IEnum*'s Next() or Clone() method worked. -PYCOM_EXPORT HRESULT PyCom_CheckIEnumNextResult(HRESULT hr, REFIID riid); - -// Used in gateways when an enumerator expected a sequence but didn't get it. -PYCOM_EXPORT HRESULT PyCom_HandleIEnumNoSequence(REFIID riid); - -// Used in gateways to SetErrorInfo() the current Python exception, and -// (assuming not a server error explicitly raised) also logs an error -// to stdout/win32com.logger. -// NOTE: this function assumes GIL held -PYCOM_EXPORT HRESULT PyCom_SetCOMErrorFromPyException(REFIID riid = IID_NULL); - -// A couple of EXCEPINFO helpers - could be private to IDispatch -// if it wasnt for the AXScript support (and ITypeInfo if we get around to that :-) -// These functions do not set any error states to either Python or -// COM - they simply convert to/from PyObjects and EXCEPINFOs - -// Use the current Python exception to fill an EXCEPINFO structure. -PYCOM_EXPORT void PyCom_ExcepInfoFromPyException(EXCEPINFO *pExcepInfo); - -// Fill in an EXCEPINFO structure from a Python instance or tuple object. -// (ie, similar to the above, except the Python exception object is specified, -// rather than using the "current" -PYCOM_EXPORT BOOL PyCom_ExcepInfoFromPyObject(PyObject *obExcepInfo, EXCEPINFO *pexcepInfo, HRESULT *phresult = NULL); - -// Create a Python object holding the exception information. The exception -// information is *not* freed by this function. Python exceptions are -// raised and NULL is returned if an error occurs. -PYCOM_EXPORT PyObject *PyCom_PyObjectFromExcepInfo(const EXCEPINFO *pexcepInfo); - -/////////////////////////////////////////////////////////////////// -// -// External C++ helpers - these helpers are for other DLLs which -// may need similar functionality, but dont want to duplicate all - -// This helper is for an application that has an IDispatch, and COM arguments -// and wants to call a Python function. It is assumed the caller can map the IDispatch -// to a Python object, so the Python handler is passed. -// Args: -// handler : A Python callable object. -// dispparms : the COM arguments. -// pVarResult : The variant for the return value of the Python call. -// pexcepinfo : Exception info the helper may fill out. -// puArgErr : Argument error the helper may fill out on exception -// addnArgs : Any additional arguments to the Python function. May be NULL. -// If addnArgs is NULL, then it is assumed the Python call should be native - -// ie, the COM args are packed as normal Python args to the call. -// If addnArgs is NOT NULL, it is assumed the Python function itself is -// a helper. This Python function will be called with 2 arguments - both -// tuples - first one is the COM args, second is the addn args. -PYCOM_EXPORT BOOL PyCom_MakeOlePythonCall(PyObject *handler, DISPPARAMS FAR *params, VARIANT FAR *pVarResult, - EXCEPINFO FAR *pexcepinfo, UINT FAR *puArgErr, PyObject *addnlArgs); - -///////////////////////////////////////////////////////////////////////////// -// Various special purpose singletons -class PYCOM_EXPORT PyOleEmpty : public PyObject { - public: - PyOleEmpty(); -}; - -class PYCOM_EXPORT PyOleMissing : public PyObject { - public: - PyOleMissing(); -}; - -class PYCOM_EXPORT PyOleArgNotFound : public PyObject { - public: - PyOleArgNotFound(); -}; - -class PYCOM_EXPORT PyOleNothing : public PyObject { - public: - PyOleNothing(); -}; - -// We need to dynamically create C++ Python objects -// These helpers allow each type object to create it. -#define MAKE_PYCOM_CTOR(classname) \ - static PyIUnknown *PyObConstruct(IUnknown *pInitObj) { return new classname(pInitObj); } -#define MAKE_PYCOM_CTOR_ERRORINFO(classname, iid) \ - static PyIUnknown *PyObConstruct(IUnknown *pInitObj) { return new classname(pInitObj); } \ - static PyObject *SetPythonCOMError(PyObject *self, HRESULT hr) \ - { \ - return PyCom_BuildPyException(hr, GetI(self), iid); \ - } -#define GET_PYCOM_CTOR(classname) classname::PyObConstruct - -// Macros that interfaces should use. PY_INTERFACE_METHOD at the top of the method -// The other 2 wrap directly around the underlying method call. -#define PY_INTERFACE_METHOD -// Identical to Py_BEGIN_ALLOW_THREADS except no { !!! -#define PY_INTERFACE_PRECALL PyThreadState *_save = PyEval_SaveThread(); -#define PY_INTERFACE_POSTCALL PyEval_RestoreThread(_save); - -///////////////////////////////////////////////////////////////////////////// -// class PyIUnknown -class PYCOM_EXPORT PyIUnknown : public PyIBase { - public: - MAKE_PYCOM_CTOR(PyIUnknown); - virtual PyObject *repr(); - virtual int compare(PyObject *other); - - static IUnknown *GetI(PyObject *self); - IUnknown *m_obj; - static char *szErrMsgObjectReleased; - static void SafeRelease(PyIUnknown *ob); - static PyComTypeObject type; - - // The Python methods - static PyObject *QueryInterface(PyObject *self, PyObject *args); - static PyObject *SafeRelease(PyObject *self, PyObject *args); - - protected: - PyIUnknown(IUnknown *punk); - ~PyIUnknown(); -}; - -///////////////////////////////////////////////////////////////////////////// -// class PyIDispatch - -class PYCOM_EXPORT PyIDispatch : public PyIUnknown { - public: - MAKE_PYCOM_CTOR(PyIDispatch); - static IDispatch *GetI(PyObject *self); - static PyComTypeObject type; - - // The Python methods - static PyObject *Invoke(PyObject *self, PyObject *args); - static PyObject *InvokeTypes(PyObject *self, PyObject *args); - static PyObject *GetIDsOfNames(PyObject *self, PyObject *args); - static PyObject *GetTypeInfo(PyObject *self, PyObject *args); - static PyObject *GetTypeInfoCount(PyObject *self, PyObject *args); - - protected: - PyIDispatch(IUnknown *pdisp); - ~PyIDispatch(); -}; - -#ifndef NO_PYCOM_IDISPATCHEX -///////////////////////////////////////////////////////////////////////////// -// class PyIDispatchEx - -class PYCOM_EXPORT PyIDispatchEx : public PyIDispatch { - public: - MAKE_PYCOM_CTOR_ERRORINFO(PyIDispatchEx, IID_IDispatchEx); - static IDispatchEx *GetI(PyObject *self); - static PyComTypeObject type; - - // The Python methods - static PyObject *GetDispID(PyObject *self, PyObject *args); - static PyObject *InvokeEx(PyObject *self, PyObject *args); - static PyObject *DeleteMemberByName(PyObject *self, PyObject *args); - static PyObject *DeleteMemberByDispID(PyObject *self, PyObject *args); - static PyObject *GetMemberProperties(PyObject *self, PyObject *args); - static PyObject *GetMemberName(PyObject *self, PyObject *args); - static PyObject *GetNextDispID(PyObject *self, PyObject *args); - - protected: - PyIDispatchEx(IUnknown *pdisp); - ~PyIDispatchEx(); -}; -#endif // NO_PYCOM_IDISPATCHEX - -///////////////////////////////////////////////////////////////////////////// -// class PyIClassFactory - -class PYCOM_EXPORT PyIClassFactory : public PyIUnknown { - public: - MAKE_PYCOM_CTOR(PyIClassFactory); - static IClassFactory *GetI(PyObject *self); - static PyComTypeObject type; - - // The Python methods - static PyObject *CreateInstance(PyObject *self, PyObject *args); - static PyObject *LockServer(PyObject *self, PyObject *args); - - protected: - PyIClassFactory(IUnknown *pdisp); - ~PyIClassFactory(); -}; - -#ifndef NO_PYCOM_IPROVIDECLASSINFO - -///////////////////////////////////////////////////////////////////////////// -// class PyIProvideTypeInfo - -class PYCOM_EXPORT PyIProvideClassInfo : public PyIUnknown { - public: - MAKE_PYCOM_CTOR(PyIProvideClassInfo); - static IProvideClassInfo *GetI(PyObject *self); - static PyComTypeObject type; - - // The Python methods - static PyObject *GetClassInfo(PyObject *self, PyObject *args); - - protected: - PyIProvideClassInfo(IUnknown *pdisp); - ~PyIProvideClassInfo(); -}; - -class PYCOM_EXPORT PyIProvideClassInfo2 : public PyIProvideClassInfo { - public: - MAKE_PYCOM_CTOR(PyIProvideClassInfo2); - static IProvideClassInfo2 *GetI(PyObject *self); - static PyComTypeObject type; - - // The Python methods - static PyObject *GetGUID(PyObject *self, PyObject *args); - - protected: - PyIProvideClassInfo2(IUnknown *pdisp); - ~PyIProvideClassInfo2(); -}; -#endif // NO_PYCOM_IPROVIDECLASSINFO - -///////////////////////////////////////////////////////////////////////////// -// class PyITypeInfo -class PYCOM_EXPORT PyITypeInfo : public PyIUnknown { - public: - MAKE_PYCOM_CTOR(PyITypeInfo); - static PyComTypeObject type; - static ITypeInfo *GetI(PyObject *self); - - PyObject *GetContainingTypeLib(); - PyObject *GetDocumentation(MEMBERID); - PyObject *GetRefTypeInfo(HREFTYPE href); - PyObject *GetRefTypeOfImplType(int index); - PyObject *GetFuncDesc(int pos); - PyObject *GetIDsOfNames(OLECHAR FAR *FAR *, int); - PyObject *GetNames(MEMBERID); - PyObject *GetTypeAttr(); - PyObject *GetVarDesc(int pos); - PyObject *GetImplTypeFlags(int index); - PyObject *GetTypeComp(); - - protected: - PyITypeInfo(IUnknown *); - ~PyITypeInfo(); -}; - -///////////////////////////////////////////////////////////////////////////// -// class PyITypeComp -class PYCOM_EXPORT PyITypeComp : public PyIUnknown { - public: - MAKE_PYCOM_CTOR(PyITypeComp); - static PyComTypeObject type; - static ITypeComp *GetI(PyObject *self); - - PyObject *Bind(OLECHAR *szName, unsigned short wflags); - PyObject *BindType(OLECHAR *szName); - - protected: - PyITypeComp(IUnknown *); - ~PyITypeComp(); -}; - -///////////////////////////////////////////////////////////////////////////// -// class CPyTypeLib - -class PYCOM_EXPORT PyITypeLib : public PyIUnknown { - public: - MAKE_PYCOM_CTOR(PyITypeLib); - static PyComTypeObject type; - static ITypeLib *GetI(PyObject *self); - - PyObject *GetLibAttr(); - PyObject *GetDocumentation(int pos); - PyObject *GetTypeInfo(int pos); - PyObject *GetTypeInfoCount(); - PyObject *GetTypeInfoOfGuid(REFGUID guid); - PyObject *GetTypeInfoType(int pos); - PyObject *GetTypeComp(); - - protected: - PyITypeLib(IUnknown *); - ~PyITypeLib(); -}; - -///////////////////////////////////////////////////////////////////////////// -// class PyIConnectionPoint - -class PYCOM_EXPORT PyIConnectionPoint : public PyIUnknown { - public: - MAKE_PYCOM_CTOR_ERRORINFO(PyIConnectionPoint, IID_IConnectionPoint); - static PyComTypeObject type; - static IConnectionPoint *GetI(PyObject *self); - - static PyObject *GetConnectionInterface(PyObject *self, PyObject *args); - static PyObject *GetConnectionPointContainer(PyObject *self, PyObject *args); - static PyObject *Advise(PyObject *self, PyObject *args); - static PyObject *Unadvise(PyObject *self, PyObject *args); - static PyObject *EnumConnections(PyObject *self, PyObject *args); - - protected: - PyIConnectionPoint(IUnknown *); - ~PyIConnectionPoint(); -}; - -class PYCOM_EXPORT PyIConnectionPointContainer : public PyIUnknown { - public: - MAKE_PYCOM_CTOR_ERRORINFO(PyIConnectionPointContainer, IID_IConnectionPointContainer); - static PyComTypeObject type; - static IConnectionPointContainer *GetI(PyObject *self); - - static PyObject *EnumConnectionPoints(PyObject *self, PyObject *args); - static PyObject *FindConnectionPoint(PyObject *self, PyObject *args); - - protected: - PyIConnectionPointContainer(IUnknown *); - ~PyIConnectionPointContainer(); -}; - -///////////////////////////////////////////////////////////////////////////// -// class PythonOleArgHelper -// -// A PythonOleArgHelper is used primarily to help out Python helpers -// which need to convert from a Python object when the specific OLE -// type is known - eg, when a TypeInfo is available. -// -// The type of conversion determines who owns what buffers etc. I wish BYREF didnt exist :-) -typedef enum { - // We dont know what sort of conversion it is yet. - POAH_CONVERT_UNKNOWN, - // A PyObject is given, we convert to a VARIANT, make the COM call, then BYREFs back to a PyObject - // ie, this is typically a "normal" COM call, where Python initiates the call - POAH_CONVERT_FROM_PYOBJECT, - // A VARIANT is given, we convert to a PyObject, make the Python call, then BYREFs back to a VARIANT. - // ie, this is typically handling a COM event, where COM itself initiates the call. - POAH_CONVERT_FROM_VARIANT, -} POAH_CONVERT_DIRECTION; - -class PYCOM_EXPORT PythonOleArgHelper { - public: - PythonOleArgHelper(); - ~PythonOleArgHelper(); - BOOL ParseTypeInformation(PyObject *reqdObjectTuple); - - // Using this call with reqdObject != NULL will check the existing - // VT_ of the variant. If not VT_EMPTY, then the result will be coerced to - // that type. This contrasts with PyCom_PyObjectToVariant which just - // uses the Python type to determine the variant type. - BOOL MakeObjToVariant(PyObject *obj, VARIANT *var, PyObject *reqdObjectTuple = NULL); - PyObject *MakeVariantToObj(VARIANT *var); - - VARTYPE m_reqdType; - BOOL m_bParsedTypeInfo; - BOOL m_bIsOut; - POAH_CONVERT_DIRECTION m_convertDirection; - PyObject *m_pyVariant; // if non-null, a win32com.client.VARIANT - union { - void *m_pValueHolder; - short m_sBuf; - long m_lBuf; - LONGLONG m_llBuf; - VARIANT_BOOL m_boolBuf; - double m_dBuf; - float m_fBuf; - IDispatch *m_dispBuf; - IUnknown *m_unkBuf; - SAFEARRAY *m_arrayBuf; - VARIANT *m_varBuf; - DATE m_dateBuf; - CY m_cyBuf; - }; -}; - -///////////////////////////////////////////////////////////////////////////// -// global functions and variables -PYCOM_EXPORT BOOL MakePythonArgumentTuples(PyObject **pArgs, PythonOleArgHelper **ppHelpers, PyObject **pNamedArgs, - PythonOleArgHelper **ppNamedHelpers, DISPPARAMS FAR *params); - -// Convert a Python object to a BSTR - allow embedded NULLs, None, etc. -PYCOM_EXPORT BOOL PyCom_BstrFromPyObject(PyObject *stringObject, BSTR *pResult, BOOL bNoneOK = FALSE); - -// MakeBstrToObj - convert a BSTR into a Python string. -// -// ONLY USE THIS FOR TRUE BSTR's - Use the fn below for OLECHAR *'s. -// NOTE - does not use standard macros, so NULLs get through! -PYCOM_EXPORT PyObject *MakeBstrToObj(const BSTR bstr); - -// Size info is available (eg, a fn returns a string and also fills in a size variable) -PYCOM_EXPORT PyObject *MakeOLECHARToObj(const OLECHAR *str, int numChars); - -// No size info avail. -PYCOM_EXPORT PyObject *MakeOLECHARToObj(const OLECHAR *str); - -PYCOM_EXPORT void PyCom_LogF(const WCHAR *fmt, ...); - -// Generic conversion from python sequence to VT_VECTOR array -// Resulting array must be freed with CoTaskMemFree -template -BOOL SeqToVector(PyObject *ob, arraytype **pA, ULONG *pcount, BOOL (*converter)(PyObject *, arraytype *)) -{ - TmpPyObject seq = PyWinSequence_Tuple(ob, pcount); - if (seq == NULL) - return FALSE; - *pA = (arraytype *)CoTaskMemAlloc(*pcount * sizeof(arraytype)); - if (*pA == NULL) { - PyErr_NoMemory(); - return FALSE; - } - for (ULONG i = 0; i < *pcount; i++) { - PyObject *item = PyTuple_GET_ITEM((PyObject *)seq, i); - if (!(*converter)(item, &(*pA)[i])) - return FALSE; - } - return TRUE; -} - -#endif // __PYTHONCOM_H__ diff --git a/lib/win32com/include/PythonCOMRegister.h b/lib/win32com/include/PythonCOMRegister.h deleted file mode 100644 index ab960f53..00000000 --- a/lib/win32com/include/PythonCOMRegister.h +++ /dev/null @@ -1,84 +0,0 @@ -// Support for PythonCOM and its extensions to register the interfaces, -// gateways and IIDs it supports. -// -// The module can simply declare an array of type PyCom_InterfaceSupportInfo, then -// use the macros to populate it. -// -// See Register.cpp and AXScript.cpp for examples on its use. - -#ifndef __PYTHONCOMREGISTER_H__ -#define __PYTHONCOMREGISTER_H__ - -#include "PythonCOMServer.h" // Need defns in this file... - -typedef struct { - const GUID *pGUID; // The supported IID - required - const char *interfaceName; // Name of the interface - required - const char *iidName; // Name of the IID that goes into the dict. - required - PyTypeObject *pTypeOb; // the type object for client PyI* side - NULL for server only support. - pfnPyGatewayConstructor ctor; // Gateway (PyG*) interface constructor - NULL for client only support - -} PyCom_InterfaceSupportInfo; - -#define PYCOM_INTERFACE_IID_ONLY(ifc) \ - { \ - &IID_I##ifc, "I" #ifc, "IID_I" #ifc, NULL, NULL \ - } -#define PYCOM_INTERFACE_CLSID_ONLY(ifc) \ - { \ - &CLSID_##ifc, "CLSID_" #ifc, "CLSID_" #ifc, NULL, NULL \ - } -#define PYCOM_INTERFACE_CATID_ONLY(ifc) \ - { \ - &CATID_##ifc, "CATID_" #ifc, "CATID_" #ifc, NULL, NULL \ - } -#define PYCOM_INTERFACE_CLIENT_ONLY(ifc) \ - { \ - &IID_I##ifc, "I" #ifc, "IID_I" #ifc, &PyI##ifc::type, NULL \ - } -#define PYCOM_INTERFACE_SERVER_ONLY(ifc) \ - { \ - &IID_I##ifc, "I" #ifc, "IID_I" #ifc, NULL, GET_PYGATEWAY_CTOR(PyG##ifc) \ - } -#define PYCOM_INTERFACE_FULL(ifc) \ - { \ - &IID_I##ifc, "I" #ifc, "IID_I" #ifc, &PyI##ifc::type, GET_PYGATEWAY_CTOR(PyG##ifc) \ - } - -// Versions that use __uuidof() to get the IID, which seems to avoid the need -// to link with a lib holding the IIDs. Note that almost all extensions -// build with __uuidof() being the default; the build failed at 'shell' - so -// we could consider making this the default and making the 'explicit' version -// above the special case. -#define PYCOM_INTERFACE_IID_ONLY_UUIDOF(ifc) \ - { \ - &__uuidof(I##ifc), "I" #ifc, "IID_I" #ifc, NULL, NULL \ - } -#define PYCOM_INTERFACE_CLIENT_ONLY_UUIDOF(ifc) \ - { \ - &__uuidof(I##ifc), "I" #ifc, "IID_I" #ifc, &PyI##ifc::type, NULL \ - } -#define PYCOM_INTERFACE_SERVER_ONLY_UUIDOF(ifc) \ - { \ - &__uuidof(I##ifc), "I" #ifc, "IID_I" #ifc, NULL, GET_PYGATEWAY_CTOR(PyG##ifc) \ - } -#define PYCOM_INTERFACE_FULL_UUIDOF(ifc) \ - { \ - &__uuidof(I##ifc), "I" #ifc, "IID_I" #ifc, &PyI##ifc::type, GET_PYGATEWAY_CTOR(PyG##ifc) \ - } - -// Prototypes for the register functions - -// Register a PythonCOM extension module -PYCOM_EXPORT int PyCom_RegisterExtensionSupport(PyObject *dict, const PyCom_InterfaceSupportInfo *pInterfaces, - int numEntries); - -// THESE SHOULD NO LONGER BE USED. Instead, use the functions above passing an -// array of PyCom_InterfaceSupportInfo objects. - -PYCOM_EXPORT int PyCom_RegisterClientType(PyTypeObject *typeOb, const GUID *guid); - -HRESULT PYCOM_EXPORT PyCom_RegisterGatewayObject(REFIID iid, pfnPyGatewayConstructor ctor, const char *interfaceName); -PYCOM_EXPORT int PyCom_IsGatewayRegistered(REFIID iid); - -#endif /* __PYTHONCOMREGISTER_H__ */ diff --git a/lib/win32com/include/PythonCOMServer.h b/lib/win32com/include/PythonCOMServer.h deleted file mode 100644 index d263b3ad..00000000 --- a/lib/win32com/include/PythonCOMServer.h +++ /dev/null @@ -1,176 +0,0 @@ -#ifndef __PYTHONCOMSERVER_H__ -#define __PYTHONCOMSERVER_H__ - -// PythonCOMServer.h :Server side COM support - -#include - -#define DLLAcquireGlobalLock PyWin_AcquireGlobalLock -#define DLLReleaseGlobalLock PyWin_ReleaseGlobalLock - -void PYCOM_EXPORT PyCom_DLLAddRef(void); -void PYCOM_EXPORT PyCom_DLLReleaseRef(void); - -// Use this macro at the start of all gateway methods. -#define PY_GATEWAY_METHOD CEnterLeavePython _celp - -class PyGatewayBase; -// Gateway constructors. -// Each gateway must be able to be created from a "gateway constructor". This -// is simply a function that takes a Python instance as as argument, and returns -// a gateway object of the correct type. The MAKE_PYGATEWAY_CTOR is a helper that -// will embed such a constructor in the class - however, this is not necessary - -// _any_ function of the correct signature can be used. - -typedef HRESULT (*pfnPyGatewayConstructor)(PyObject *PythonInstance, PyGatewayBase *, void **ppResult, REFIID iid); -HRESULT PyCom_MakeRegisteredGatewayObject(REFIID iid, PyObject *instance, PyGatewayBase *base, void **ppv); - -// A version of the above which support classes being derived from -// other than IUnknown -#define PYGATEWAY_MAKE_SUPPORT2(classname, IInterface, theIID, gatewaybaseclass) \ - public: \ - static HRESULT PyGatewayConstruct(PyObject *pPyInstance, PyGatewayBase *unkBase, void **ppResult, \ - REFIID iid) \ - { \ - if (ppResult == NULL) \ - return E_INVALIDARG; \ - classname *newob = new classname(pPyInstance); \ - newob->m_pBaseObject = unkBase; \ - if (unkBase) \ - unkBase->AddRef(); \ - *ppResult = newob->ThisAsIID(iid); \ - return *ppResult ? S_OK : E_OUTOFMEMORY; \ - } \ - \ - protected: \ - virtual IID GetIID(void) { return theIID; } \ - virtual void *ThisAsIID(IID iid) \ - { \ - if (this == NULL) \ - return NULL; \ - if (iid == theIID) \ - return (IInterface *)this; \ - else \ - return gatewaybaseclass::ThisAsIID(iid); \ - } \ - STDMETHOD_(ULONG, AddRef)(void) { return gatewaybaseclass::AddRef(); } \ - STDMETHOD_(ULONG, Release)(void) { return gatewaybaseclass::Release(); } \ - STDMETHOD(QueryInterface)(REFIID iid, void **obj) { return gatewaybaseclass::QueryInterface(iid, obj); }; - -// This is the "old" version to use, or use it if you derive -// directly from PyGatewayBase -#define PYGATEWAY_MAKE_SUPPORT(classname, IInterface, theIID) \ - PYGATEWAY_MAKE_SUPPORT2(classname, IInterface, theIID, PyGatewayBase) - -#define GET_PYGATEWAY_CTOR(classname) classname::PyGatewayConstruct - -#ifdef _MSC_VER -// Disable an OK warning... -#pragma warning(disable : 4275) -// warning C4275: non dll-interface struct 'IDispatch' used as base for dll-interface class 'PyGatewayBase' -#endif // _MSC_VER - -// Helper interface for fetching a Python object from a gateway - -extern const GUID IID_IInternalUnwrapPythonObject; - -interface IInternalUnwrapPythonObject : public IUnknown -{ - public: - STDMETHOD(Unwrap)(PyObject * *ppPyObject) = 0; -}; - -///////////////////////////////////////////////////////////////////////////// -// PyGatewayBase -// -// Base class for all gateways. -// -class PYCOM_EXPORT PyGatewayBase : -#ifndef NO_PYCOM_IDISPATCHEX - public IDispatchEx, // IDispatch comes along for the ride! -#else - public IDispatch, // No IDispatchEx - must explicitely use IDispatch -#endif - public ISupportErrorInfo, - public IInternalUnwrapPythonObject { - protected: - PyGatewayBase(PyObject *instance); - virtual ~PyGatewayBase(); - - // Invoke the Python method (via the policy object) - STDMETHOD(InvokeViaPolicy)(const char *szMethodName, PyObject **ppResult = NULL, const char *szFormat = NULL, ...); - - public: - // IUnknown - STDMETHOD_(ULONG, AddRef)(void); - STDMETHOD_(ULONG, Release)(void); - STDMETHOD(QueryInterface)(REFIID iid, void **obj); - - // IDispatch - STDMETHOD(GetTypeInfoCount)(UINT FAR *pctInfo); - STDMETHOD(GetTypeInfo)(UINT itinfo, LCID lcid, ITypeInfo FAR *FAR *pptInfo); - STDMETHOD(GetIDsOfNames)(REFIID refiid, OLECHAR FAR *FAR *rgszNames, UINT cNames, LCID lcid, DISPID FAR *rgdispid); - STDMETHOD(Invoke) - (DISPID dispid, REFIID riid, LCID lcid, WORD wFlags, DISPPARAMS FAR *params, VARIANT FAR *pVarResult, - EXCEPINFO FAR *pexcepinfo, UINT FAR *puArgErr); - - // IDispatchEx -#ifndef NO_PYCOM_IDISPATCHEX - STDMETHOD(GetDispID)(BSTR bstrName, DWORD grfdex, DISPID *pid); - STDMETHOD(InvokeEx) - (DISPID id, LCID lcid, WORD wFlags, DISPPARAMS *pdp, VARIANT *pvarRes, EXCEPINFO *pei, IServiceProvider *pspCaller); - STDMETHOD(DeleteMemberByName)(BSTR bstr, DWORD grfdex); - STDMETHOD(DeleteMemberByDispID)(DISPID id); - STDMETHOD(GetMemberProperties)(DISPID id, DWORD grfdexFetch, DWORD *pgrfdex); - STDMETHOD(GetMemberName)(DISPID id, BSTR *pbstrName); - STDMETHOD(GetNextDispID)(DWORD grfdex, DISPID id, DISPID *pid); - STDMETHOD(GetNameSpaceParent)(IUnknown **ppunk); -#endif // NO_PYCOM_IDISPATCHEX - // ISupportErrorInfo - STDMETHOD(InterfaceSupportsErrorInfo)(REFIID riid); - - // IInternalUnwrapPythonObject - STDMETHOD(Unwrap)(PyObject **ppPyObject); - - // Basically just PYGATEWAY_MAKE_SUPPORT(PyGatewayBase, IDispatch, IID_IDispatch); - // but with special handling as its the base class. - static HRESULT PyGatewayConstruct(PyObject *pPyInstance, PyGatewayBase *gatewayBase, void **ppResult, - REFIID iid) - { - if (ppResult == NULL) - return E_INVALIDARG; - PyGatewayBase *obNew = new PyGatewayBase(pPyInstance); - obNew->m_pBaseObject = gatewayBase; - if (gatewayBase) - gatewayBase->AddRef(); - *ppResult = (IDispatch *)obNew; - return *ppResult ? S_OK : E_OUTOFMEMORY; - } - // Currently this is used only for ISupportErrorInfo, - // so hopefully this will never be called in this base class. - // (however, this is not a rule, so we wont assert or anything!) - virtual IID GetIID(void) { return IID_IUnknown; } - virtual void *ThisAsIID(IID iid); - // End of PYGATEWAY_MAKE_SUPPORT - PyObject *m_pPyObject; - PyGatewayBase *m_pBaseObject; - - private: - LONG m_cRef; -}; - -#ifdef _MSC_VER -#pragma warning(default : 4275) -#endif // _MSC_VER - -// B/W compat hack for gateways. -#define PyCom_HandlePythonFailureToCOM() \ - PyCom_SetAndLogCOMErrorFromPyExceptionEx(this->m_pPyObject, "", GetIID()) - -// F/W compat hack for gateways! Must be careful about updating -// PyGatewayBase vtable, so a slightly older pythoncomXX.dll will work -// with slightly later extensions. So use a #define. -#define MAKE_PYCOM_GATEWAY_FAILURE_CODE(method_name) \ - PyCom_SetAndLogCOMErrorFromPyExceptionEx(this->m_pPyObject, method_name, GetIID()) - -#endif /* __PYTHONCOMSERVER_H__ */ diff --git a/lib/win32com/makegw/__init__.py b/lib/win32com/makegw/__init__.py deleted file mode 100644 index 6178c97c..00000000 --- a/lib/win32com/makegw/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# indicates a python package. diff --git a/lib/win32com/makegw/makegw.py b/lib/win32com/makegw/makegw.py deleted file mode 100644 index 8d2e83d6..00000000 --- a/lib/win32com/makegw/makegw.py +++ /dev/null @@ -1,560 +0,0 @@ -"""Utility functions for writing out gateway C++ files - - This module will generate a C++/Python binding for a specific COM - interface. - - At this stage, no command line interface exists. You must start Python, - import this module, change to the directory where the generated code should - be written, and run the public function. - - This module is capable of generating both 'Interfaces' (ie, Python - client side support for the interface) and 'Gateways' (ie, Python - server side support for the interface). Many COM interfaces are useful - both as Client and Server. Other interfaces, however, really only make - sense to implement one side or the other. For example, it would be pointless - for Python to implement Server side for 'IRunningObjectTable', unless we were - implementing core COM for an operating system in Python (hey - now there's an idea!) - - Most COM interface code is totally boiler-plate - it consists of - converting arguments, dispatching the call to Python, and processing - any result values. - - This module automates the generation of such code. It has the ability to - parse a .H file generated by the MIDL tool (ie, almost all COM .h files) - and build almost totally complete C++ code. - - The module understands some of the well known data types, and how to - convert them. There are only a couple of places where hand-editing is - necessary, as detailed below: - - unsupported types -- If a type is not known, the generator will - pretty much ignore it, but write a comment to the generated code. You - may want to add custom support for this type. In some cases, C++ compile errors - will result. These are intentional - generating code to remove these errors would - imply a false sense of security that the generator has done the right thing. - - other return policies -- By default, Python never sees the return SCODE from - a COM function. The interface usually returns None if OK, else a COM exception - if "FAILED(scode)" is TRUE. You may need to change this if: - * EXCEPINFO is passed to the COM function. This is not detected and handled - * For some reason Python should always see the result SCODE, even if it - did fail or succeed. For example, some functions return a BOOLEAN result - in the SCODE, meaning Python should always see it. - * FAILED(scode) for the interface still has valid data to return (by default, - the code generated does not process the return values, and raise an exception - to Python/COM - -""" - -import re - -from . import makegwparse - - -def make_framework_support( - header_file_name, interface_name, bMakeInterface=1, bMakeGateway=1 -): - """Generate C++ code for a Python Interface and Gateway - - header_file_name -- The full path to the .H file which defines the interface. - interface_name -- The name of the interface to search for, and to generate. - bMakeInterface = 1 -- Should interface (ie, client) support be generated. - bMakeGatewayInterface = 1 -- Should gateway (ie, server) support be generated. - - This method will write a .cpp and .h file into the current directory, - (using the name of the interface to build the file name. - - """ - fin = open(header_file_name) - try: - interface = makegwparse.parse_interface_info(interface_name, fin) - finally: - fin.close() - - if bMakeInterface and bMakeGateway: - desc = "Interface and Gateway" - elif bMakeInterface and not bMakeGateway: - desc = "Interface" - else: - desc = "Gateway" - if interface.name[:5] == "IEnum": # IEnum - use my really simple template-based one - import win32com.makegw.makegwenum - - ifc_cpp_writer = win32com.makegw.makegwenum._write_enumifc_cpp - gw_cpp_writer = win32com.makegw.makegwenum._write_enumgw_cpp - else: # Use my harder working ones. - ifc_cpp_writer = _write_ifc_cpp - gw_cpp_writer = _write_gw_cpp - - fout = open("Py%s.cpp" % interface.name, "w") - try: - fout.write( - """\ -// This file implements the %s %s for Python. -// Generated by makegw.py - -#include "shell_pch.h" -""" - % (interface.name, desc) - ) - # if bMakeGateway: - # fout.write('#include "PythonCOMServer.h"\n') - # if interface.base not in ["IUnknown", "IDispatch"]: - # fout.write('#include "Py%s.h"\n' % interface.base) - fout.write( - '#include "Py%s.h"\n\n// @doc - This file contains autoduck documentation\n' - % interface.name - ) - if bMakeInterface: - ifc_cpp_writer(fout, interface) - if bMakeGateway: - gw_cpp_writer(fout, interface) - finally: - fout.close() - fout = open("Py%s.h" % interface.name, "w") - try: - fout.write( - """\ -// This file declares the %s %s for Python. -// Generated by makegw.py -""" - % (interface.name, desc) - ) - - if bMakeInterface: - _write_ifc_h(fout, interface) - if bMakeGateway: - _write_gw_h(fout, interface) - finally: - fout.close() - - -########################################################################### -# -# INTERNAL FUNCTIONS -# -# - - -def _write_ifc_h(f, interface): - f.write( - """\ -// --------------------------------------------------- -// -// Interface Declaration - -class Py%s : public Py%s -{ -public: - MAKE_PYCOM_CTOR(Py%s); - static %s *GetI(PyObject *self); - static PyComTypeObject type; - - // The Python methods -""" - % (interface.name, interface.base, interface.name, interface.name) - ) - for method in interface.methods: - f.write( - "\tstatic PyObject *%s(PyObject *self, PyObject *args);\n" % method.name - ) - f.write( - """\ - -protected: - Py%s(IUnknown *pdisp); - ~Py%s(); -}; -""" - % (interface.name, interface.name) - ) - - -def _write_ifc_cpp(f, interface): - name = interface.name - f.write( - """\ -// --------------------------------------------------- -// -// Interface Implementation - -Py%(name)s::Py%(name)s(IUnknown *pdisp): - Py%(base)s(pdisp) -{ - ob_type = &type; -} - -Py%(name)s::~Py%(name)s() -{ -} - -/* static */ %(name)s *Py%(name)s::GetI(PyObject *self) -{ - return (%(name)s *)Py%(base)s::GetI(self); -} - -""" - % (interface.__dict__) - ) - - ptr = re.sub("[a-z]", "", interface.name) - strdict = {"interfacename": interface.name, "ptr": ptr} - for method in interface.methods: - strdict["method"] = method.name - f.write( - """\ -// @pymethod |Py%(interfacename)s|%(method)s|Description of %(method)s. -PyObject *Py%(interfacename)s::%(method)s(PyObject *self, PyObject *args) -{ - %(interfacename)s *p%(ptr)s = GetI(self); - if ( p%(ptr)s == NULL ) - return NULL; -""" - % strdict - ) - argsParseTuple = ( - argsCOM - ) = ( - formatChars - ) = codePost = codePobjects = codeCobjects = cleanup = cleanup_gil = "" - needConversion = 0 - # if method.name=="Stat": import win32dbg;win32dbg.brk() - for arg in method.args: - try: - argCvt = makegwparse.make_arg_converter(arg) - if arg.HasAttribute("in"): - val = argCvt.GetFormatChar() - if val: - f.write("\t" + argCvt.GetAutoduckString() + "\n") - formatChars = formatChars + val - argsParseTuple = ( - argsParseTuple + ", " + argCvt.GetParseTupleArg() - ) - codePobjects = ( - codePobjects + argCvt.DeclareParseArgTupleInputConverter() - ) - codePost = codePost + argCvt.GetParsePostCode() - needConversion = needConversion or argCvt.NeedUSES_CONVERSION() - cleanup = cleanup + argCvt.GetInterfaceArgCleanup() - cleanup_gil = cleanup_gil + argCvt.GetInterfaceArgCleanupGIL() - comArgName, comArgDeclString = argCvt.GetInterfaceCppObjectInfo() - if comArgDeclString: # If we should declare a variable - codeCobjects = codeCobjects + "\t%s;\n" % (comArgDeclString) - argsCOM = argsCOM + ", " + comArgName - except makegwparse.error_not_supported as why: - f.write( - '// *** The input argument %s of type "%s" was not processed ***\n// Please check the conversion function is appropriate and exists!\n' - % (arg.name, arg.raw_type) - ) - - f.write( - "\t%s %s;\n\tPyObject *ob%s;\n" % (arg.type, arg.name, arg.name) - ) - f.write( - "\t// @pyparm |%s||Description for %s\n" - % (arg.type, arg.name, arg.name) - ) - codePost = ( - codePost - + "\tif (bPythonIsHappy && !PyObject_As%s( ob%s, &%s )) bPythonIsHappy = FALSE;\n" - % (arg.type, arg.name, arg.name) - ) - - formatChars = formatChars + "O" - argsParseTuple = argsParseTuple + ", &ob%s" % (arg.name) - - argsCOM = argsCOM + ", " + arg.name - cleanup = cleanup + "\tPyObject_Free%s(%s);\n" % (arg.type, arg.name) - - if needConversion: - f.write("\tUSES_CONVERSION;\n") - f.write(codePobjects) - f.write(codeCobjects) - f.write( - '\tif ( !PyArg_ParseTuple(args, "%s:%s"%s) )\n\t\treturn NULL;\n' - % (formatChars, method.name, argsParseTuple) - ) - if codePost: - f.write("\tBOOL bPythonIsHappy = TRUE;\n") - f.write(codePost) - f.write("\tif (!bPythonIsHappy) return NULL;\n") - strdict["argsCOM"] = argsCOM[1:] - strdict["cleanup"] = cleanup - strdict["cleanup_gil"] = cleanup_gil - f.write( - """ HRESULT hr; - PY_INTERFACE_PRECALL; - hr = p%(ptr)s->%(method)s(%(argsCOM)s ); -%(cleanup)s - PY_INTERFACE_POSTCALL; -%(cleanup_gil)s - if ( FAILED(hr) ) - return PyCom_BuildPyException(hr, p%(ptr)s, IID_%(interfacename)s ); -""" - % strdict - ) - codePre = codePost = formatChars = codeVarsPass = codeDecl = "" - for arg in method.args: - if not arg.HasAttribute("out"): - continue - try: - argCvt = makegwparse.make_arg_converter(arg) - formatChar = argCvt.GetFormatChar() - if formatChar: - formatChars = formatChars + formatChar - codePre = codePre + argCvt.GetBuildForInterfacePreCode() - codePost = codePost + argCvt.GetBuildForInterfacePostCode() - codeVarsPass = codeVarsPass + ", " + argCvt.GetBuildValueArg() - codeDecl = codeDecl + argCvt.DeclareParseArgTupleInputConverter() - except makegwparse.error_not_supported as why: - f.write( - '// *** The output argument %s of type "%s" was not processed ***\n// %s\n' - % (arg.name, arg.raw_type, why) - ) - continue - if formatChars: - f.write( - '%s\n%s\tPyObject *pyretval = Py_BuildValue("%s"%s);\n%s\treturn pyretval;' - % (codeDecl, codePre, formatChars, codeVarsPass, codePost) - ) - else: - f.write("\tPy_INCREF(Py_None);\n\treturn Py_None;\n") - f.write("\n}\n\n") - - f.write("// @object Py%s|Description of the interface\n" % (name)) - f.write("static struct PyMethodDef Py%s_methods[] =\n{\n" % name) - for method in interface.methods: - f.write( - '\t{ "%s", Py%s::%s, 1 }, // @pymeth %s|Description of %s\n' - % (method.name, interface.name, method.name, method.name, method.name) - ) - - interfacebase = interface.base - f.write( - """\ - { NULL } -}; - -PyComTypeObject Py%(name)s::type("Py%(name)s", - &Py%(interfacebase)s::type, - sizeof(Py%(name)s), - Py%(name)s_methods, - GET_PYCOM_CTOR(Py%(name)s)); -""" - % locals() - ) - - -def _write_gw_h(f, interface): - if interface.name[0] == "I": - gname = "PyG" + interface.name[1:] - else: - gname = "PyG" + interface.name - name = interface.name - if interface.base == "IUnknown" or interface.base == "IDispatch": - base_name = "PyGatewayBase" - else: - if interface.base[0] == "I": - base_name = "PyG" + interface.base[1:] - else: - base_name = "PyG" + interface.base - f.write( - """\ -// --------------------------------------------------- -// -// Gateway Declaration - -class %s : public %s, public %s -{ -protected: - %s(PyObject *instance) : %s(instance) { ; } - PYGATEWAY_MAKE_SUPPORT2(%s, %s, IID_%s, %s) - -""" - % (gname, base_name, name, gname, base_name, gname, name, name, base_name) - ) - if interface.base != "IUnknown": - f.write( - "\t// %s\n\t// *** Manually add %s method decls here\n\n" - % (interface.base, interface.base) - ) - else: - f.write("\n\n") - - f.write("\t// %s\n" % name) - - for method in interface.methods: - f.write("\tSTDMETHOD(%s)(\n" % method.name) - if method.args: - for arg in method.args[:-1]: - f.write("\t\t%s,\n" % (arg.GetRawDeclaration())) - arg = method.args[-1] - f.write("\t\t%s);\n\n" % (arg.GetRawDeclaration())) - else: - f.write("\t\tvoid);\n\n") - - f.write("};\n") - f.close() - - -def _write_gw_cpp(f, interface): - if interface.name[0] == "I": - gname = "PyG" + interface.name[1:] - else: - gname = "PyG" + interface.name - name = interface.name - if interface.base == "IUnknown" or interface.base == "IDispatch": - base_name = "PyGatewayBase" - else: - if interface.base[0] == "I": - base_name = "PyG" + interface.base[1:] - else: - base_name = "PyG" + interface.base - f.write( - """\ -// --------------------------------------------------- -// -// Gateway Implementation -""" - % {"name": name, "gname": gname, "base_name": base_name} - ) - - for method in interface.methods: - f.write( - """\ -STDMETHODIMP %s::%s( -""" - % (gname, method.name) - ) - - if method.args: - for arg in method.args[:-1]: - inoutstr = "][".join(arg.inout) - f.write("\t\t/* [%s] */ %s,\n" % (inoutstr, arg.GetRawDeclaration())) - - arg = method.args[-1] - inoutstr = "][".join(arg.inout) - f.write("\t\t/* [%s] */ %s)\n" % (inoutstr, arg.GetRawDeclaration())) - else: - f.write("\t\tvoid)\n") - - f.write("{\n\tPY_GATEWAY_METHOD;\n") - cout = 0 - codePre = codePost = codeVars = "" - argStr = "" - needConversion = 0 - formatChars = "" - if method.args: - for arg in method.args: - if arg.HasAttribute("out"): - cout = cout + 1 - if arg.indirectionLevel == 2: - f.write("\tif (%s==NULL) return E_POINTER;\n" % arg.name) - if arg.HasAttribute("in"): - try: - argCvt = makegwparse.make_arg_converter(arg) - argCvt.SetGatewayMode() - formatchar = argCvt.GetFormatChar() - needConversion = needConversion or argCvt.NeedUSES_CONVERSION() - - if formatchar: - formatChars = formatChars + formatchar - codeVars = ( - codeVars + argCvt.DeclareParseArgTupleInputConverter() - ) - argStr = argStr + ", " + argCvt.GetBuildValueArg() - codePre = codePre + argCvt.GetBuildForGatewayPreCode() - codePost = codePost + argCvt.GetBuildForGatewayPostCode() - except makegwparse.error_not_supported as why: - f.write( - '// *** The input argument %s of type "%s" was not processed ***\n// - Please ensure this conversion function exists, and is appropriate\n// - %s\n' - % (arg.name, arg.raw_type, why) - ) - f.write( - "\tPyObject *ob%s = PyObject_From%s(%s);\n" - % (arg.name, arg.type, arg.name) - ) - f.write( - '\tif (ob%s==NULL) return MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' - % (arg.name, method.name) - ) - codePost = codePost + "\tPy_DECREF(ob%s);\n" % arg.name - formatChars = formatChars + "O" - argStr = argStr + ", ob%s" % (arg.name) - - if needConversion: - f.write("\tUSES_CONVERSION;\n") - f.write(codeVars) - f.write(codePre) - if cout: - f.write("\tPyObject *result;\n") - resStr = "&result" - else: - resStr = "NULL" - - if formatChars: - fullArgStr = '%s, "%s"%s' % (resStr, formatChars, argStr) - else: - fullArgStr = resStr - - f.write('\tHRESULT hr=InvokeViaPolicy("%s", %s);\n' % (method.name, fullArgStr)) - f.write(codePost) - if cout: - f.write("\tif (FAILED(hr)) return hr;\n") - f.write( - "\t// Process the Python results, and convert back to the real params\n" - ) - # process the output arguments. - formatChars = codePobjects = codePost = argsParseTuple = "" - needConversion = 0 - for arg in method.args: - if not arg.HasAttribute("out"): - continue - try: - argCvt = makegwparse.make_arg_converter(arg) - argCvt.SetGatewayMode() - val = argCvt.GetFormatChar() - if val: - formatChars = formatChars + val - argsParseTuple = ( - argsParseTuple + ", " + argCvt.GetParseTupleArg() - ) - codePobjects = ( - codePobjects + argCvt.DeclareParseArgTupleInputConverter() - ) - codePost = codePost + argCvt.GetParsePostCode() - needConversion = needConversion or argCvt.NeedUSES_CONVERSION() - except makegwparse.error_not_supported as why: - f.write( - '// *** The output argument %s of type "%s" was not processed ***\n// %s\n' - % (arg.name, arg.raw_type, why) - ) - - if formatChars: # If I have any to actually process. - if len(formatChars) == 1: - parseFn = "PyArg_Parse" - else: - parseFn = "PyArg_ParseTuple" - if codePobjects: - f.write(codePobjects) - f.write( - '\tif (!%s(result, "%s" %s))\n\t\treturn MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' - % (parseFn, formatChars, argsParseTuple, method.name) - ) - if codePost: - f.write("\tBOOL bPythonIsHappy = TRUE;\n") - f.write(codePost) - f.write( - '\tif (!bPythonIsHappy) hr = MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' - % method.name - ) - f.write("\tPy_DECREF(result);\n") - f.write("\treturn hr;\n}\n\n") - - -def test(): - # make_framework_support("d:\\msdev\\include\\objidl.h", "ILockBytes") - make_framework_support("d:\\msdev\\include\\objidl.h", "IStorage") - - -# make_framework_support("d:\\msdev\\include\\objidl.h", "IEnumSTATSTG") diff --git a/lib/win32com/makegw/makegwenum.py b/lib/win32com/makegw/makegwenum.py deleted file mode 100644 index 39c9b2d1..00000000 --- a/lib/win32com/makegw/makegwenum.py +++ /dev/null @@ -1,331 +0,0 @@ -"""Utility file for generating PyIEnum support. - -This is almost a 'template' file. It simplay contains almost full -C++ source code for PyIEnum* support, and the Python code simply -substitutes the appropriate interface name. - -This module is notmally not used directly - the @makegw@ module -automatically calls this. -""" -# -# INTERNAL FUNCTIONS -# -# -import string - - -def is_interface_enum(enumtype): - return not (enumtype[0] in string.uppercase and enumtype[2] in string.uppercase) - - -def _write_enumifc_cpp(f, interface): - enumtype = interface.name[5:] - if is_interface_enum(enumtype): - # Assume an interface. - enum_interface = "I" + enumtype[:-1] - converter = ( - "PyObject *ob = PyCom_PyObjectFromIUnknown(rgVar[i], IID_%(enum_interface)s, FALSE);" - % locals() - ) - arraydeclare = ( - "%(enum_interface)s **rgVar = new %(enum_interface)s *[celt];" % locals() - ) - else: - # Enum of a simple structure - converter = ( - "PyObject *ob = PyCom_PyObjectFrom%(enumtype)s(&rgVar[i]);" % locals() - ) - arraydeclare = "%(enumtype)s *rgVar = new %(enumtype)s[celt];" % locals() - - f.write( - """ -// --------------------------------------------------- -// -// Interface Implementation - -PyIEnum%(enumtype)s::PyIEnum%(enumtype)s(IUnknown *pdisp): - PyIUnknown(pdisp) -{ - ob_type = &type; -} - -PyIEnum%(enumtype)s::~PyIEnum%(enumtype)s() -{ -} - -/* static */ IEnum%(enumtype)s *PyIEnum%(enumtype)s::GetI(PyObject *self) -{ - return (IEnum%(enumtype)s *)PyIUnknown::GetI(self); -} - -// @pymethod object|PyIEnum%(enumtype)s|Next|Retrieves a specified number of items in the enumeration sequence. -PyObject *PyIEnum%(enumtype)s::Next(PyObject *self, PyObject *args) -{ - long celt = 1; - // @pyparm int|num|1|Number of items to retrieve. - if ( !PyArg_ParseTuple(args, "|l:Next", &celt) ) - return NULL; - - IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self); - if ( pIE%(enumtype)s == NULL ) - return NULL; - - %(arraydeclare)s - if ( rgVar == NULL ) { - PyErr_SetString(PyExc_MemoryError, "allocating result %(enumtype)ss"); - return NULL; - } - - int i; -/* for ( i = celt; i--; ) - // *** possibly init each structure element??? -*/ - - ULONG celtFetched = 0; - PY_INTERFACE_PRECALL; - HRESULT hr = pIE%(enumtype)s->Next(celt, rgVar, &celtFetched); - PY_INTERFACE_POSTCALL; - if ( HRESULT_CODE(hr) != ERROR_NO_MORE_ITEMS && FAILED(hr) ) - { - delete [] rgVar; - return PyCom_BuildPyException(hr,pIE%(enumtype)s, IID_IE%(enumtype)s); - } - - PyObject *result = PyTuple_New(celtFetched); - if ( result != NULL ) - { - for ( i = celtFetched; i--; ) - { - %(converter)s - if ( ob == NULL ) - { - Py_DECREF(result); - result = NULL; - break; - } - PyTuple_SET_ITEM(result, i, ob); - } - } - -/* for ( i = celtFetched; i--; ) - // *** possibly cleanup each structure element??? -*/ - delete [] rgVar; - return result; -} - -// @pymethod |PyIEnum%(enumtype)s|Skip|Skips over the next specified elementes. -PyObject *PyIEnum%(enumtype)s::Skip(PyObject *self, PyObject *args) -{ - long celt; - if ( !PyArg_ParseTuple(args, "l:Skip", &celt) ) - return NULL; - - IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self); - if ( pIE%(enumtype)s == NULL ) - return NULL; - - PY_INTERFACE_PRECALL; - HRESULT hr = pIE%(enumtype)s->Skip(celt); - PY_INTERFACE_POSTCALL; - if ( FAILED(hr) ) - return PyCom_BuildPyException(hr, pIE%(enumtype)s, IID_IE%(enumtype)s); - - Py_INCREF(Py_None); - return Py_None; -} - -// @pymethod |PyIEnum%(enumtype)s|Reset|Resets the enumeration sequence to the beginning. -PyObject *PyIEnum%(enumtype)s::Reset(PyObject *self, PyObject *args) -{ - if ( !PyArg_ParseTuple(args, ":Reset") ) - return NULL; - - IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self); - if ( pIE%(enumtype)s == NULL ) - return NULL; - - PY_INTERFACE_PRECALL; - HRESULT hr = pIE%(enumtype)s->Reset(); - PY_INTERFACE_POSTCALL; - if ( FAILED(hr) ) - return PyCom_BuildPyException(hr, pIE%(enumtype)s, IID_IE%(enumtype)s); - - Py_INCREF(Py_None); - return Py_None; -} - -// @pymethod |PyIEnum%(enumtype)s|Clone|Creates another enumerator that contains the same enumeration state as the current one -PyObject *PyIEnum%(enumtype)s::Clone(PyObject *self, PyObject *args) -{ - if ( !PyArg_ParseTuple(args, ":Clone") ) - return NULL; - - IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self); - if ( pIE%(enumtype)s == NULL ) - return NULL; - - IEnum%(enumtype)s *pClone; - PY_INTERFACE_PRECALL; - HRESULT hr = pIE%(enumtype)s->Clone(&pClone); - PY_INTERFACE_POSTCALL; - if ( FAILED(hr) ) - return PyCom_BuildPyException(hr, pIE%(enumtype)s, IID_IE%(enumtype)s); - - return PyCom_PyObjectFromIUnknown(pClone, IID_IEnum%(enumtype)s, FALSE); -} - -// @object PyIEnum%(enumtype)s|A Python interface to IEnum%(enumtype)s -static struct PyMethodDef PyIEnum%(enumtype)s_methods[] = -{ - { "Next", PyIEnum%(enumtype)s::Next, 1 }, // @pymeth Next|Retrieves a specified number of items in the enumeration sequence. - { "Skip", PyIEnum%(enumtype)s::Skip, 1 }, // @pymeth Skip|Skips over the next specified elementes. - { "Reset", PyIEnum%(enumtype)s::Reset, 1 }, // @pymeth Reset|Resets the enumeration sequence to the beginning. - { "Clone", PyIEnum%(enumtype)s::Clone, 1 }, // @pymeth Clone|Creates another enumerator that contains the same enumeration state as the current one. - { NULL } -}; - -PyComEnumTypeObject PyIEnum%(enumtype)s::type("PyIEnum%(enumtype)s", - &PyIUnknown::type, - sizeof(PyIEnum%(enumtype)s), - PyIEnum%(enumtype)s_methods, - GET_PYCOM_CTOR(PyIEnum%(enumtype)s)); -""" - % locals() - ) - - -def _write_enumgw_cpp(f, interface): - enumtype = interface.name[5:] - if is_interface_enum(enumtype): - # Assume an interface. - enum_interface = "I" + enumtype[:-1] - converter = ( - "if ( !PyCom_InterfaceFromPyObject(ob, IID_%(enum_interface)s, (void **)&rgVar[i], FALSE) )" - % locals() - ) - argdeclare = "%(enum_interface)s __RPC_FAR * __RPC_FAR *rgVar" % locals() - else: - argdeclare = "%(enumtype)s __RPC_FAR *rgVar" % locals() - converter = "if ( !PyCom_PyObjectAs%(enumtype)s(ob, &rgVar[i]) )" % locals() - f.write( - """ -// --------------------------------------------------- -// -// Gateway Implementation - -// Std delegation -STDMETHODIMP_(ULONG) PyGEnum%(enumtype)s::AddRef(void) {return PyGatewayBase::AddRef();} -STDMETHODIMP_(ULONG) PyGEnum%(enumtype)s::Release(void) {return PyGatewayBase::Release();} -STDMETHODIMP PyGEnum%(enumtype)s::QueryInterface(REFIID iid, void ** obj) {return PyGatewayBase::QueryInterface(iid, obj);} -STDMETHODIMP PyGEnum%(enumtype)s::GetTypeInfoCount(UINT FAR* pctInfo) {return PyGatewayBase::GetTypeInfoCount(pctInfo);} -STDMETHODIMP PyGEnum%(enumtype)s::GetTypeInfo(UINT itinfo, LCID lcid, ITypeInfo FAR* FAR* pptInfo) {return PyGatewayBase::GetTypeInfo(itinfo, lcid, pptInfo);} -STDMETHODIMP PyGEnum%(enumtype)s::GetIDsOfNames(REFIID refiid, OLECHAR FAR* FAR* rgszNames, UINT cNames, LCID lcid, DISPID FAR* rgdispid) {return PyGatewayBase::GetIDsOfNames( refiid, rgszNames, cNames, lcid, rgdispid);} -STDMETHODIMP PyGEnum%(enumtype)s::Invoke(DISPID dispid, REFIID riid, LCID lcid, WORD wFlags, DISPPARAMS FAR* params, VARIANT FAR* pVarResult, EXCEPINFO FAR* pexcepinfo, UINT FAR* puArgErr) {return PyGatewayBase::Invoke( dispid, riid, lcid, wFlags, params, pVarResult, pexcepinfo, puArgErr);} - -STDMETHODIMP PyGEnum%(enumtype)s::Next( - /* [in] */ ULONG celt, - /* [length_is][size_is][out] */ %(argdeclare)s, - /* [out] */ ULONG __RPC_FAR *pCeltFetched) -{ - PY_GATEWAY_METHOD; - PyObject *result; - HRESULT hr = InvokeViaPolicy("Next", &result, "i", celt); - if ( FAILED(hr) ) - return hr; - - if ( !PySequence_Check(result) ) - goto error; - int len; - len = PyObject_Length(result); - if ( len == -1 ) - goto error; - if ( len > (int)celt) - len = celt; - - if ( pCeltFetched ) - *pCeltFetched = len; - - int i; - for ( i = 0; i < len; ++i ) - { - PyObject *ob = PySequence_GetItem(result, i); - if ( ob == NULL ) - goto error; - - %(converter)s - { - Py_DECREF(result); - return PyCom_SetCOMErrorFromPyException(IID_IEnum%(enumtype)s); - } - } - - Py_DECREF(result); - - return len < (int)celt ? S_FALSE : S_OK; - - error: - PyErr_Clear(); // just in case - Py_DECREF(result); - return PyCom_HandleIEnumNoSequence(IID_IEnum%(enumtype)s); -} - -STDMETHODIMP PyGEnum%(enumtype)s::Skip( - /* [in] */ ULONG celt) -{ - PY_GATEWAY_METHOD; - return InvokeViaPolicy("Skip", NULL, "i", celt); -} - -STDMETHODIMP PyGEnum%(enumtype)s::Reset(void) -{ - PY_GATEWAY_METHOD; - return InvokeViaPolicy("Reset"); -} - -STDMETHODIMP PyGEnum%(enumtype)s::Clone( - /* [out] */ IEnum%(enumtype)s __RPC_FAR *__RPC_FAR *ppEnum) -{ - PY_GATEWAY_METHOD; - PyObject * result; - HRESULT hr = InvokeViaPolicy("Clone", &result); - if ( FAILED(hr) ) - return hr; - - /* - ** Make sure we have the right kind of object: we should have some kind - ** of IUnknown subclass wrapped into a PyIUnknown instance. - */ - if ( !PyIBase::is_object(result, &PyIUnknown::type) ) - { - /* the wrong kind of object was returned to us */ - Py_DECREF(result); - return PyCom_SetCOMErrorFromSimple(E_FAIL, IID_IEnum%(enumtype)s); - } - - /* - ** Get the IUnknown out of the thing. note that the Python ob maintains - ** a reference, so we don't have to explicitly AddRef() here. - */ - IUnknown *punk = ((PyIUnknown *)result)->m_obj; - if ( !punk ) - { - /* damn. the object was released. */ - Py_DECREF(result); - return PyCom_SetCOMErrorFromSimple(E_FAIL, IID_IEnum%(enumtype)s); - } - - /* - ** Get the interface we want. note it is returned with a refcount. - ** This QI is actually going to instantiate a PyGEnum%(enumtype)s. - */ - hr = punk->QueryInterface(IID_IEnum%(enumtype)s, (LPVOID *)ppEnum); - - /* done with the result; this DECREF is also for */ - Py_DECREF(result); - - return PyCom_CheckIEnumNextResult(hr, IID_IEnum%(enumtype)s); -} -""" - % locals() - ) diff --git a/lib/win32com/makegw/makegwparse.py b/lib/win32com/makegw/makegwparse.py deleted file mode 100644 index 59512f48..00000000 --- a/lib/win32com/makegw/makegwparse.py +++ /dev/null @@ -1,1008 +0,0 @@ -"""Utilities for makegw - Parse a header file to build an interface - - This module contains the core code for parsing a header file describing a - COM interface, and building it into an "Interface" structure. - - Each Interface has methods, and each method has arguments. - - Each argument knows how to use Py_BuildValue or Py_ParseTuple to - exchange itself with Python. - - See the @win32com.makegw@ module for information in building a COM - interface -""" -import re -import traceback - - -class error_not_found(Exception): - def __init__(self, msg="The requested item could not be found"): - super(error_not_found, self).__init__(msg) - - -class error_not_supported(Exception): - def __init__(self, msg="The required functionality is not supported"): - super(error_not_supported, self).__init__(msg) - - -VERBOSE = 0 -DEBUG = 0 - -## NOTE : For interfaces as params to work correctly, you must -## make sure any PythonCOM extensions which expose the interface are loaded -## before generating. - - -class ArgFormatter: - """An instance for a specific type of argument. Knows how to convert itself""" - - def __init__(self, arg, builtinIndirection, declaredIndirection=0): - # print 'init:', arg.name, builtinIndirection, declaredIndirection, arg.indirectionLevel - self.arg = arg - self.builtinIndirection = builtinIndirection - self.declaredIndirection = declaredIndirection - self.gatewayMode = 0 - - def _IndirectPrefix(self, indirectionFrom, indirectionTo): - """Given the indirection level I was declared at (0=Normal, 1=*, 2=**) - return a string prefix so I can pass to a function with the - required indirection (where the default is the indirection of the method's param. - - eg, assuming my arg has indirection level of 2, if this function was passed 1 - it would return "&", so that a variable declared with indirection of 1 - can be prefixed with this to turn it into the indirection level required of 2 - """ - dif = indirectionFrom - indirectionTo - if dif == 0: - return "" - elif dif == -1: - return "&" - elif dif == 1: - return "*" - else: - return "?? (%d)" % (dif,) - raise error_not_supported("Can't indirect this far - please fix me :-)") - - def GetIndirectedArgName(self, indirectFrom, indirectionTo): - # print 'get:',self.arg.name, indirectFrom,self._GetDeclaredIndirection() + self.builtinIndirection, indirectionTo, self.arg.indirectionLevel - - if indirectFrom is None: - ### ACK! this does not account for [in][out] variables. - ### when this method is called, we need to know which - indirectFrom = self._GetDeclaredIndirection() + self.builtinIndirection - - return self._IndirectPrefix(indirectFrom, indirectionTo) + self.arg.name - - def GetBuildValueArg(self): - "Get the argument to be passes to Py_BuildValue" - return self.arg.name - - def GetParseTupleArg(self): - "Get the argument to be passed to PyArg_ParseTuple" - if self.gatewayMode: - # use whatever they were declared with - return self.GetIndirectedArgName(None, 1) - # local declarations have just their builtin indirection - return self.GetIndirectedArgName(self.builtinIndirection, 1) - - def GetInterfaceCppObjectInfo(self): - """Provide information about the C++ object used. - - Simple variables (such as integers) can declare their type (eg an integer) - and use it as the target of both PyArg_ParseTuple and the COM function itself. - - More complex types require a PyObject * declared as the target of PyArg_ParseTuple, - then some conversion routine to the C++ object which is actually passed to COM. - - This method provides the name, and optionally the type of that C++ variable. - If the type if provided, the caller will likely generate a variable declaration. - The name must always be returned. - - Result is a tuple of (variableName, [DeclareType|None|""]) - """ - - # the first return element is the variable to be passed as - # an argument to an interface method. the variable was - # declared with only its builtin indirection level. when - # we pass it, we'll need to pass in whatever amount of - # indirection was applied (plus the builtin amount) - # the second return element is the variable declaration; it - # should simply be builtin indirection - return self.GetIndirectedArgName( - self.builtinIndirection, self.arg.indirectionLevel + self.builtinIndirection - ), "%s %s" % (self.GetUnconstType(), self.arg.name) - - def GetInterfaceArgCleanup(self): - "Return cleanup code for C++ args passed to the interface method." - if DEBUG: - return "/* GetInterfaceArgCleanup output goes here: %s */\n" % self.arg.name - else: - return "" - - def GetInterfaceArgCleanupGIL(self): - """Return cleanup code for C++ args passed to the interface - method that must be executed with the GIL held""" - if DEBUG: - return ( - "/* GetInterfaceArgCleanup (GIL held) output goes here: %s */\n" - % self.arg.name - ) - else: - return "" - - def GetUnconstType(self): - return self.arg.unc_type - - def SetGatewayMode(self): - self.gatewayMode = 1 - - def _GetDeclaredIndirection(self): - return self.arg.indirectionLevel - print("declared:", self.arg.name, self.gatewayMode) - if self.gatewayMode: - return self.arg.indirectionLevel - else: - return self.declaredIndirection - - def DeclareParseArgTupleInputConverter(self): - "Declare the variable used as the PyArg_ParseTuple param for a gateway" - # Only declare it?? - # if self.arg.indirectionLevel==0: - # return "\t%s %s;\n" % (self.arg.type, self.arg.name) - # else: - if DEBUG: - return ( - "/* Declare ParseArgTupleInputConverter goes here: %s */\n" - % self.arg.name - ) - else: - return "" - - def GetParsePostCode(self): - "Get a string of C++ code to be executed after (ie, to finalise) the PyArg_ParseTuple conversion" - if DEBUG: - return "/* GetParsePostCode code goes here: %s */\n" % self.arg.name - else: - return "" - - def GetBuildForInterfacePreCode(self): - "Get a string of C++ code to be executed before (ie, to initialise) the Py_BuildValue conversion for Interfaces" - if DEBUG: - return "/* GetBuildForInterfacePreCode goes here: %s */\n" % self.arg.name - else: - return "" - - def GetBuildForGatewayPreCode(self): - "Get a string of C++ code to be executed before (ie, to initialise) the Py_BuildValue conversion for Gateways" - s = self.GetBuildForInterfacePreCode() # Usually the same - if DEBUG: - if s[:4] == "/* G": - s = "/* GetBuildForGatewayPreCode goes here: %s */\n" % self.arg.name - return s - - def GetBuildForInterfacePostCode(self): - "Get a string of C++ code to be executed after (ie, to finalise) the Py_BuildValue conversion for Interfaces" - if DEBUG: - return "/* GetBuildForInterfacePostCode goes here: %s */\n" % self.arg.name - return "" - - def GetBuildForGatewayPostCode(self): - "Get a string of C++ code to be executed after (ie, to finalise) the Py_BuildValue conversion for Gateways" - s = self.GetBuildForInterfacePostCode() # Usually the same - if DEBUG: - if s[:4] == "/* G": - s = "/* GetBuildForGatewayPostCode goes here: %s */\n" % self.arg.name - return s - - def GetAutoduckString(self): - return "// @pyparm %s|%s||Description for %s" % ( - self._GetPythonTypeDesc(), - self.arg.name, - self.arg.name, - ) - - def _GetPythonTypeDesc(self): - "Returns a string with the description of the type. Used for doco purposes" - return None - - def NeedUSES_CONVERSION(self): - "Determines if this arg forces a USES_CONVERSION macro" - return 0 - - -# Special formatter for floats since they're smaller than Python floats. -class ArgFormatterFloat(ArgFormatter): - def GetFormatChar(self): - return "f" - - def DeclareParseArgTupleInputConverter(self): - # Declare a double variable - return "\tdouble dbl%s;\n" % self.arg.name - - def GetParseTupleArg(self): - return "&dbl" + self.arg.name - - def _GetPythonTypeDesc(self): - return "float" - - def GetBuildValueArg(self): - return "&dbl" + self.arg.name - - def GetBuildForInterfacePreCode(self): - return "\tdbl" + self.arg.name + " = " + self.arg.name + ";\n" - - def GetBuildForGatewayPreCode(self): - return ( - "\tdbl%s = " % self.arg.name - + self._IndirectPrefix(self._GetDeclaredIndirection(), 0) - + self.arg.name - + ";\n" - ) - - def GetParsePostCode(self): - s = "\t" - if self.gatewayMode: - s = s + self._IndirectPrefix(self._GetDeclaredIndirection(), 0) - s = s + self.arg.name - s = s + " = (float)dbl%s;\n" % self.arg.name - return s - - -# Special formatter for Shorts because they're -# a different size than Python ints! -class ArgFormatterShort(ArgFormatter): - def GetFormatChar(self): - return "i" - - def DeclareParseArgTupleInputConverter(self): - # Declare a double variable - return "\tINT i%s;\n" % self.arg.name - - def GetParseTupleArg(self): - return "&i" + self.arg.name - - def _GetPythonTypeDesc(self): - return "int" - - def GetBuildValueArg(self): - return "&i" + self.arg.name - - def GetBuildForInterfacePreCode(self): - return "\ti" + self.arg.name + " = " + self.arg.name + ";\n" - - def GetBuildForGatewayPreCode(self): - return ( - "\ti%s = " % self.arg.name - + self._IndirectPrefix(self._GetDeclaredIndirection(), 0) - + self.arg.name - + ";\n" - ) - - def GetParsePostCode(self): - s = "\t" - if self.gatewayMode: - s = s + self._IndirectPrefix(self._GetDeclaredIndirection(), 0) - s = s + self.arg.name - s = s + " = i%s;\n" % self.arg.name - return s - - -# for types which are 64bits on AMD64 - eg, HWND -class ArgFormatterLONG_PTR(ArgFormatter): - def GetFormatChar(self): - return "O" - - def DeclareParseArgTupleInputConverter(self): - # Declare a PyObject variable - return "\tPyObject *ob%s;\n" % self.arg.name - - def GetParseTupleArg(self): - return "&ob" + self.arg.name - - def _GetPythonTypeDesc(self): - return "int/long" - - def GetBuildValueArg(self): - return "ob" + self.arg.name - - def GetBuildForInterfacePostCode(self): - return "\tPy_XDECREF(ob%s);\n" % self.arg.name - - def GetParsePostCode(self): - return ( - "\tif (bPythonIsHappy && !PyWinLong_AsULONG_PTR(ob%s, (ULONG_PTR *)%s)) bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 2)) - ) - - def GetBuildForInterfacePreCode(self): - notdirected = self.GetIndirectedArgName(None, 1) - return "\tob%s = PyWinObject_FromULONG_PTR(%s);\n" % ( - self.arg.name, - notdirected, - ) - - def GetBuildForGatewayPostCode(self): - return "\tPy_XDECREF(ob%s);\n" % self.arg.name - - -class ArgFormatterPythonCOM(ArgFormatter): - """An arg formatter for types exposed in the PythonCOM module""" - - def GetFormatChar(self): - return "O" - - # def GetInterfaceCppObjectInfo(self): - # return ArgFormatter.GetInterfaceCppObjectInfo(self)[0], \ - # "%s %s%s" % (self.arg.unc_type, "*" * self._GetDeclaredIndirection(), self.arg.name) - def DeclareParseArgTupleInputConverter(self): - # Declare a PyObject variable - return "\tPyObject *ob%s;\n" % self.arg.name - - def GetParseTupleArg(self): - return "&ob" + self.arg.name - - def _GetPythonTypeDesc(self): - return "" % self.arg.type - - def GetBuildValueArg(self): - return "ob" + self.arg.name - - def GetBuildForInterfacePostCode(self): - return "\tPy_XDECREF(ob%s);\n" % self.arg.name - - -class ArgFormatterBSTR(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "" - - def GetParsePostCode(self): - return ( - "\tif (bPythonIsHappy && !PyWinObject_AsBstr(ob%s, %s)) bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 2)) - ) - - def GetBuildForInterfacePreCode(self): - notdirected = self.GetIndirectedArgName(None, 1) - return "\tob%s = MakeBstrToObj(%s);\n" % (self.arg.name, notdirected) - - def GetBuildForInterfacePostCode(self): - return "\tSysFreeString(%s);\n" % ( - self.arg.name, - ) + ArgFormatterPythonCOM.GetBuildForInterfacePostCode(self) - - def GetBuildForGatewayPostCode(self): - return "\tPy_XDECREF(ob%s);\n" % self.arg.name - - -class ArgFormatterOLECHAR(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "" - - def GetUnconstType(self): - if self.arg.type[:3] == "LPC": - return self.arg.type[:2] + self.arg.type[3:] - else: - return self.arg.unc_type - - def GetParsePostCode(self): - return ( - "\tif (bPythonIsHappy && !PyWinObject_AsBstr(ob%s, %s)) bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 2)) - ) - - def GetInterfaceArgCleanup(self): - return "\tSysFreeString(%s);\n" % self.GetIndirectedArgName(None, 1) - - def GetBuildForInterfacePreCode(self): - # the variable was declared with just its builtin indirection - notdirected = self.GetIndirectedArgName(self.builtinIndirection, 1) - return "\tob%s = MakeOLECHARToObj(%s);\n" % (self.arg.name, notdirected) - - def GetBuildForInterfacePostCode(self): - # memory returned into an OLECHAR should be freed - return "\tCoTaskMemFree(%s);\n" % ( - self.arg.name, - ) + ArgFormatterPythonCOM.GetBuildForInterfacePostCode(self) - - def GetBuildForGatewayPostCode(self): - return "\tPy_XDECREF(ob%s);\n" % self.arg.name - - -class ArgFormatterTCHAR(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "string/" - - def GetUnconstType(self): - if self.arg.type[:3] == "LPC": - return self.arg.type[:2] + self.arg.type[3:] - else: - return self.arg.unc_type - - def GetParsePostCode(self): - return ( - "\tif (bPythonIsHappy && !PyWinObject_AsTCHAR(ob%s, %s)) bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 2)) - ) - - def GetInterfaceArgCleanup(self): - return "\tPyWinObject_FreeTCHAR(%s);\n" % self.GetIndirectedArgName(None, 1) - - def GetBuildForInterfacePreCode(self): - # the variable was declared with just its builtin indirection - notdirected = self.GetIndirectedArgName(self.builtinIndirection, 1) - return "\tob%s = PyWinObject_FromTCHAR(%s);\n" % (self.arg.name, notdirected) - - def GetBuildForInterfacePostCode(self): - return "// ??? - TCHAR post code\n" - - def GetBuildForGatewayPostCode(self): - return "\tPy_XDECREF(ob%s);\n" % self.arg.name - - -class ArgFormatterIID(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "" - - def GetParsePostCode(self): - return "\tif (!PyWinObject_AsIID(ob%s, &%s)) bPythonIsHappy = FALSE;\n" % ( - self.arg.name, - self.arg.name, - ) - - def GetBuildForInterfacePreCode(self): - # notdirected = self.GetIndirectedArgName(self.arg.indirectionLevel, 0) - notdirected = self.GetIndirectedArgName(None, 0) - return "\tob%s = PyWinObject_FromIID(%s);\n" % (self.arg.name, notdirected) - - def GetInterfaceCppObjectInfo(self): - return self.arg.name, "IID %s" % (self.arg.name) - - -class ArgFormatterTime(ArgFormatterPythonCOM): - def __init__(self, arg, builtinIndirection, declaredIndirection=0): - # we don't want to declare LPSYSTEMTIME / LPFILETIME objects - if arg.indirectionLevel == 0 and arg.unc_type[:2] == "LP": - arg.unc_type = arg.unc_type[2:] - # reduce the builtin and increment the declaration - arg.indirectionLevel = arg.indirectionLevel + 1 - builtinIndirection = 0 - ArgFormatterPythonCOM.__init__( - self, arg, builtinIndirection, declaredIndirection - ) - - def _GetPythonTypeDesc(self): - return "" - - def GetParsePostCode(self): - # variable was declared with only the builtinIndirection - ### NOTE: this is an [in] ... so use only builtin - return ( - '\tif (!PyTime_Check(ob%s)) {\n\t\tPyErr_SetString(PyExc_TypeError, "The argument must be a PyTime object");\n\t\tbPythonIsHappy = FALSE;\n\t}\n\tif (!((PyTime *)ob%s)->GetTime(%s)) bPythonIsHappy = FALSE;\n' - % ( - self.arg.name, - self.arg.name, - self.GetIndirectedArgName(self.builtinIndirection, 1), - ) - ) - - def GetBuildForInterfacePreCode(self): - ### use just the builtinIndirection again... - notdirected = self.GetIndirectedArgName(self.builtinIndirection, 0) - return "\tob%s = new PyTime(%s);\n" % (self.arg.name, notdirected) - - def GetBuildForInterfacePostCode(self): - ### hack to determine if we need to free stuff - ret = "" - if self.builtinIndirection + self.arg.indirectionLevel > 1: - # memory returned into an OLECHAR should be freed - ret = "\tCoTaskMemFree(%s);\n" % self.arg.name - return ret + ArgFormatterPythonCOM.GetBuildForInterfacePostCode(self) - - -class ArgFormatterSTATSTG(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "" - - def GetParsePostCode(self): - return ( - "\tif (!PyCom_PyObjectAsSTATSTG(ob%s, %s, 0/*flags*/)) bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 1)) - ) - - def GetBuildForInterfacePreCode(self): - notdirected = self.GetIndirectedArgName(None, 1) - return ( - "\tob%s = PyCom_PyObjectFromSTATSTG(%s);\n\t// STATSTG doco says our responsibility to free\n\tif ((%s).pwcsName) CoTaskMemFree((%s).pwcsName);\n" - % ( - self.arg.name, - self.GetIndirectedArgName(None, 1), - notdirected, - notdirected, - ) - ) - - -class ArgFormatterGeneric(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "" % self.arg.type - - def GetParsePostCode(self): - return "\tif (!PyObject_As%s(ob%s, &%s) bPythonIsHappy = FALSE;\n" % ( - self.arg.type, - self.arg.name, - self.GetIndirectedArgName(None, 1), - ) - - def GetInterfaceArgCleanup(self): - return "\tPyObject_Free%s(%s);\n" % (self.arg.type, self.arg.name) - - def GetBuildForInterfacePreCode(self): - notdirected = self.GetIndirectedArgName(None, 1) - return "\tob%s = PyObject_From%s(%s);\n" % ( - self.arg.name, - self.arg.type, - self.GetIndirectedArgName(None, 1), - ) - - -class ArgFormatterIDLIST(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "" - - def GetParsePostCode(self): - return ( - "\tif (bPythonIsHappy && !PyObject_AsPIDL(ob%s, &%s)) bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 1)) - ) - - def GetInterfaceArgCleanup(self): - return "\tPyObject_FreePIDL(%s);\n" % (self.arg.name,) - - def GetBuildForInterfacePreCode(self): - notdirected = self.GetIndirectedArgName(None, 1) - return "\tob%s = PyObject_FromPIDL(%s);\n" % ( - self.arg.name, - self.GetIndirectedArgName(None, 1), - ) - - -class ArgFormatterHANDLE(ArgFormatterPythonCOM): - def _GetPythonTypeDesc(self): - return "" - - def GetParsePostCode(self): - return ( - "\tif (!PyWinObject_AsHANDLE(ob%s, &%s, FALSE) bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 1)) - ) - - def GetBuildForInterfacePreCode(self): - notdirected = self.GetIndirectedArgName(None, 1) - return "\tob%s = PyWinObject_FromHANDLE(%s);\n" % ( - self.arg.name, - self.GetIndirectedArgName(None, 0), - ) - - -class ArgFormatterLARGE_INTEGER(ArgFormatterPythonCOM): - def GetKeyName(self): - return "LARGE_INTEGER" - - def _GetPythonTypeDesc(self): - return "" % self.GetKeyName() - - def GetParsePostCode(self): - return "\tif (!PyWinObject_As%s(ob%s, %s)) bPythonIsHappy = FALSE;\n" % ( - self.GetKeyName(), - self.arg.name, - self.GetIndirectedArgName(None, 1), - ) - - def GetBuildForInterfacePreCode(self): - notdirected = self.GetIndirectedArgName(None, 0) - return "\tob%s = PyWinObject_From%s(%s);\n" % ( - self.arg.name, - self.GetKeyName(), - notdirected, - ) - - -class ArgFormatterULARGE_INTEGER(ArgFormatterLARGE_INTEGER): - def GetKeyName(self): - return "ULARGE_INTEGER" - - -class ArgFormatterInterface(ArgFormatterPythonCOM): - def GetInterfaceCppObjectInfo(self): - return self.GetIndirectedArgName(1, self.arg.indirectionLevel), "%s * %s" % ( - self.GetUnconstType(), - self.arg.name, - ) - - def GetParsePostCode(self): - # This gets called for out params in gateway mode - if self.gatewayMode: - sArg = self.GetIndirectedArgName(None, 2) - else: - # vs. in params for interface mode. - sArg = self.GetIndirectedArgName(1, 2) - return ( - "\tif (bPythonIsHappy && !PyCom_InterfaceFromPyInstanceOrObject(ob%s, IID_%s, (void **)%s, TRUE /* bNoneOK */))\n\t\t bPythonIsHappy = FALSE;\n" - % (self.arg.name, self.arg.type, sArg) - ) - - def GetBuildForInterfacePreCode(self): - return "\tob%s = PyCom_PyObjectFromIUnknown(%s, IID_%s, FALSE);\n" % ( - self.arg.name, - self.arg.name, - self.arg.type, - ) - - def GetBuildForGatewayPreCode(self): - sPrefix = self._IndirectPrefix(self._GetDeclaredIndirection(), 1) - return "\tob%s = PyCom_PyObjectFromIUnknown(%s%s, IID_%s, TRUE);\n" % ( - self.arg.name, - sPrefix, - self.arg.name, - self.arg.type, - ) - - def GetInterfaceArgCleanup(self): - return "\tif (%s) %s->Release();\n" % (self.arg.name, self.arg.name) - - -class ArgFormatterVARIANT(ArgFormatterPythonCOM): - def GetParsePostCode(self): - return ( - "\tif ( !PyCom_VariantFromPyObject(ob%s, %s) )\n\t\tbPythonIsHappy = FALSE;\n" - % (self.arg.name, self.GetIndirectedArgName(None, 1)) - ) - - def GetBuildForGatewayPreCode(self): - notdirected = self.GetIndirectedArgName(None, 1) - return "\tob%s = PyCom_PyObjectFromVariant(%s);\n" % ( - self.arg.name, - notdirected, - ) - - def GetBuildForGatewayPostCode(self): - return "\tPy_XDECREF(ob%s);\n" % self.arg.name - - # Key : , Python Type Description, ParseTuple format char - - -ConvertSimpleTypes = { - "BOOL": ("BOOL", "int", "i"), - "UINT": ("UINT", "int", "i"), - "BYTE": ("BYTE", "int", "i"), - "INT": ("INT", "int", "i"), - "DWORD": ("DWORD", "int", "l"), - "HRESULT": ("HRESULT", "int", "l"), - "ULONG": ("ULONG", "int", "l"), - "LONG": ("LONG", "int", "l"), - "int": ("int", "int", "i"), - "long": ("long", "int", "l"), - "DISPID": ("DISPID", "long", "l"), - "APPBREAKFLAGS": ("int", "int", "i"), - "BREAKRESUMEACTION": ("int", "int", "i"), - "ERRORRESUMEACTION": ("int", "int", "i"), - "BREAKREASON": ("int", "int", "i"), - "BREAKPOINT_STATE": ("int", "int", "i"), - "BREAKRESUME_ACTION": ("int", "int", "i"), - "SOURCE_TEXT_ATTR": ("int", "int", "i"), - "TEXT_DOC_ATTR": ("int", "int", "i"), - "QUERYOPTION": ("int", "int", "i"), - "PARSEACTION": ("int", "int", "i"), -} - - -class ArgFormatterSimple(ArgFormatter): - """An arg formatter for simple integer etc types""" - - def GetFormatChar(self): - return ConvertSimpleTypes[self.arg.type][2] - - def _GetPythonTypeDesc(self): - return ConvertSimpleTypes[self.arg.type][1] - - -AllConverters = { - "const OLECHAR": (ArgFormatterOLECHAR, 0, 1), - "WCHAR": (ArgFormatterOLECHAR, 0, 1), - "OLECHAR": (ArgFormatterOLECHAR, 0, 1), - "LPCOLESTR": (ArgFormatterOLECHAR, 1, 1), - "LPOLESTR": (ArgFormatterOLECHAR, 1, 1), - "LPCWSTR": (ArgFormatterOLECHAR, 1, 1), - "LPWSTR": (ArgFormatterOLECHAR, 1, 1), - "LPCSTR": (ArgFormatterOLECHAR, 1, 1), - "LPTSTR": (ArgFormatterTCHAR, 1, 1), - "LPCTSTR": (ArgFormatterTCHAR, 1, 1), - "HANDLE": (ArgFormatterHANDLE, 0), - "BSTR": (ArgFormatterBSTR, 1, 0), - "const IID": (ArgFormatterIID, 0), - "CLSID": (ArgFormatterIID, 0), - "IID": (ArgFormatterIID, 0), - "GUID": (ArgFormatterIID, 0), - "const GUID": (ArgFormatterIID, 0), - "const IID": (ArgFormatterIID, 0), - "REFCLSID": (ArgFormatterIID, 0), - "REFIID": (ArgFormatterIID, 0), - "REFGUID": (ArgFormatterIID, 0), - "const FILETIME": (ArgFormatterTime, 0), - "const SYSTEMTIME": (ArgFormatterTime, 0), - "const LPSYSTEMTIME": (ArgFormatterTime, 1, 1), - "LPSYSTEMTIME": (ArgFormatterTime, 1, 1), - "FILETIME": (ArgFormatterTime, 0), - "SYSTEMTIME": (ArgFormatterTime, 0), - "STATSTG": (ArgFormatterSTATSTG, 0), - "LARGE_INTEGER": (ArgFormatterLARGE_INTEGER, 0), - "ULARGE_INTEGER": (ArgFormatterULARGE_INTEGER, 0), - "VARIANT": (ArgFormatterVARIANT, 0), - "float": (ArgFormatterFloat, 0), - "single": (ArgFormatterFloat, 0), - "short": (ArgFormatterShort, 0), - "WORD": (ArgFormatterShort, 0), - "VARIANT_BOOL": (ArgFormatterShort, 0), - "HWND": (ArgFormatterLONG_PTR, 1), - "HMENU": (ArgFormatterLONG_PTR, 1), - "HOLEMENU": (ArgFormatterLONG_PTR, 1), - "HICON": (ArgFormatterLONG_PTR, 1), - "HDC": (ArgFormatterLONG_PTR, 1), - "LPARAM": (ArgFormatterLONG_PTR, 1), - "WPARAM": (ArgFormatterLONG_PTR, 1), - "LRESULT": (ArgFormatterLONG_PTR, 1), - "UINT": (ArgFormatterShort, 0), - "SVSIF": (ArgFormatterShort, 0), - "Control": (ArgFormatterInterface, 0, 1), - "DataObject": (ArgFormatterInterface, 0, 1), - "_PropertyBag": (ArgFormatterInterface, 0, 1), - "AsyncProp": (ArgFormatterInterface, 0, 1), - "DataSource": (ArgFormatterInterface, 0, 1), - "DataFormat": (ArgFormatterInterface, 0, 1), - "void **": (ArgFormatterInterface, 2, 2), - "ITEMIDLIST": (ArgFormatterIDLIST, 0, 0), - "LPITEMIDLIST": (ArgFormatterIDLIST, 0, 1), - "LPCITEMIDLIST": (ArgFormatterIDLIST, 0, 1), - "const ITEMIDLIST": (ArgFormatterIDLIST, 0, 1), -} - -# Auto-add all the simple types -for key in ConvertSimpleTypes.keys(): - AllConverters[key] = ArgFormatterSimple, 0 - - -def make_arg_converter(arg): - try: - clz = AllConverters[arg.type][0] - bin = AllConverters[arg.type][1] - decl = 0 - if len(AllConverters[arg.type]) > 2: - decl = AllConverters[arg.type][2] - return clz(arg, bin, decl) - except KeyError: - if arg.type[0] == "I": - return ArgFormatterInterface(arg, 0, 1) - - raise error_not_supported( - "The type '%s' (%s) is unknown." % (arg.type, arg.name) - ) - - -############################################################# -# -# The instances that represent the args, methods and interface -class Argument: - """A representation of an argument to a COM method - - This class contains information about a specific argument to a method. - In addition, methods exist so that an argument knows how to convert itself - to/from Python arguments. - """ - - # in,out type name [ ] - # -------------- -------- ------------ ------ - regex = re.compile(r"/\* \[([^\]]*.*?)] \*/[ \t](.*[* ]+)(\w+)(\[ *])?[\),]") - - def __init__(self, good_interface_names): - self.good_interface_names = good_interface_names - self.inout = self.name = self.type = None - self.const = 0 - self.arrayDecl = 0 - - def BuildFromFile(self, file): - """Parse and build my data from a file - - Reads the next line in the file, and matches it as an argument - description. If not a valid argument line, an error_not_found exception - is raised. - """ - line = file.readline() - mo = self.regex.search(line) - if not mo: - raise error_not_found - self.name = mo.group(3) - self.inout = mo.group(1).split("][") - typ = mo.group(2).strip() - self.raw_type = typ - self.indirectionLevel = 0 - if mo.group(4): # Has "[ ]" decl - self.arrayDecl = 1 - try: - pos = typ.rindex("__RPC_FAR") - self.indirectionLevel = self.indirectionLevel + 1 - typ = typ[:pos].strip() - except ValueError: - pass - - typ = typ.replace("__RPC_FAR", "") - while 1: - try: - pos = typ.rindex("*") - self.indirectionLevel = self.indirectionLevel + 1 - typ = typ[:pos].strip() - except ValueError: - break - self.type = typ - if self.type[:6] == "const ": - self.unc_type = self.type[6:] - else: - self.unc_type = self.type - - if VERBOSE: - print( - " Arg %s of type %s%s (%s)" - % (self.name, self.type, "*" * self.indirectionLevel, self.inout) - ) - - def HasAttribute(self, typ): - """Determines if the argument has the specific attribute. - - Argument attributes are specified in the header file, such as - "[in][out][retval]" etc. You can pass a specific string (eg "out") - to find if this attribute was specified for the argument - """ - return typ in self.inout - - def GetRawDeclaration(self): - ret = "%s %s" % (self.raw_type, self.name) - if self.arrayDecl: - ret = ret + "[]" - return ret - - -class Method: - """A representation of a C++ method on a COM interface - - This class contains information about a specific method, as well as - a list of all @Argument@s - """ - - # options ret type callconv name - # ----------------- -------- -------- -------- - regex = re.compile(r"virtual (/\*.*?\*/ )?(.*?) (.*?) (.*?)\(\w?") - - def __init__(self, good_interface_names): - self.good_interface_names = good_interface_names - self.name = self.result = self.callconv = None - self.args = [] - - def BuildFromFile(self, file): - """Parse and build my data from a file - - Reads the next line in the file, and matches it as a method - description. If not a valid method line, an error_not_found exception - is raised. - """ - line = file.readline() - mo = self.regex.search(line) - if not mo: - raise error_not_found - self.name = mo.group(4) - self.result = mo.group(2) - if self.result != "HRESULT": - if self.result == "DWORD": # DWORD is for old old stuff? - print( - "Warning: Old style interface detected - compilation errors likely!" - ) - else: - print( - "Method %s - Only HRESULT return types are supported." % self.name - ) - # raise error_not_supported, if VERBOSE: - print(" Method %s %s(" % (self.result, self.name)) - while 1: - arg = Argument(self.good_interface_names) - try: - arg.BuildFromFile(file) - self.args.append(arg) - except error_not_found: - break - - -class Interface: - """A representation of a C++ COM Interface - - This class contains information about a specific interface, as well as - a list of all @Method@s - """ - - # name base - # -------- -------- - regex = re.compile("(interface|) ([^ ]*) : public (.*)$") - - def __init__(self, mo): - self.methods = [] - self.name = mo.group(2) - self.base = mo.group(3) - if VERBOSE: - print("Interface %s : public %s" % (self.name, self.base)) - - def BuildMethods(self, file): - """Build all sub-methods for this interface""" - # skip the next 2 lines. - file.readline() - file.readline() - while 1: - try: - method = Method([self.name]) - method.BuildFromFile(file) - self.methods.append(method) - except error_not_found: - break - - -def find_interface(interfaceName, file): - """Find and return an interface in a file - - Given an interface name and file, search for the specified interface. - - Upon return, the interface itself has been built, - but not the methods. - """ - interface = None - line = file.readline() - while line: - mo = Interface.regex.search(line) - if mo: - name = mo.group(2) - print(name) - AllConverters[name] = (ArgFormatterInterface, 0, 1) - if name == interfaceName: - interface = Interface(mo) - interface.BuildMethods(file) - line = file.readline() - if interface: - return interface - raise error_not_found - - -def parse_interface_info(interfaceName, file): - """Find, parse and return an interface in a file - - Given an interface name and file, search for the specified interface. - - Upon return, the interface itself is fully built, - """ - try: - return find_interface(interfaceName, file) - except re.error: - traceback.print_exc() - print("The interface could not be built, as the regular expression failed!") - - -def test(): - f = open("d:\\msdev\\include\\objidl.h") - try: - parse_interface_info("IPersistStream", f) - finally: - f.close() - - -def test_regex(r, text): - res = r.search(text, 0) - if res == -1: - print("** Not found") - else: - print( - "%d\n%s\n%s\n%s\n%s" % (res, r.group(1), r.group(2), r.group(3), r.group(4)) - ) diff --git a/lib/win32com/olectl.py b/lib/win32com/olectl.py deleted file mode 100644 index 50ec6f74..00000000 --- a/lib/win32com/olectl.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Constants used by COM Controls - - Hand created version of OLECTL.H constants. -""" - -import winerror - -FACILITY_CONTROL = 0xA - - -def MAKE_SCODE(sev, fac, code): - return int((int(-sev) << 31) | ((fac) << 16) | ((code))) - - -def STD_CTL_SCODE(n): - return MAKE_SCODE(winerror.SEVERITY_ERROR, FACILITY_CONTROL, n) - - -CTL_E_ILLEGALFUNCTIONCALL = STD_CTL_SCODE(5) -CTL_E_OVERFLOW = STD_CTL_SCODE(6) -CTL_E_OUTOFMEMORY = STD_CTL_SCODE(7) -CTL_E_DIVISIONBYZERO = STD_CTL_SCODE(11) -CTL_E_OUTOFSTRINGSPACE = STD_CTL_SCODE(14) -CTL_E_OUTOFSTACKSPACE = STD_CTL_SCODE(28) -CTL_E_BADFILENAMEORNUMBER = STD_CTL_SCODE(52) -CTL_E_FILENOTFOUND = STD_CTL_SCODE(53) -CTL_E_BADFILEMODE = STD_CTL_SCODE(54) -CTL_E_FILEALREADYOPEN = STD_CTL_SCODE(55) -CTL_E_DEVICEIOERROR = STD_CTL_SCODE(57) -CTL_E_FILEALREADYEXISTS = STD_CTL_SCODE(58) -CTL_E_BADRECORDLENGTH = STD_CTL_SCODE(59) -CTL_E_DISKFULL = STD_CTL_SCODE(61) -CTL_E_BADRECORDNUMBER = STD_CTL_SCODE(63) -CTL_E_BADFILENAME = STD_CTL_SCODE(64) -CTL_E_TOOMANYFILES = STD_CTL_SCODE(67) -CTL_E_DEVICEUNAVAILABLE = STD_CTL_SCODE(68) -CTL_E_PERMISSIONDENIED = STD_CTL_SCODE(70) -CTL_E_DISKNOTREADY = STD_CTL_SCODE(71) -CTL_E_PATHFILEACCESSERROR = STD_CTL_SCODE(75) -CTL_E_PATHNOTFOUND = STD_CTL_SCODE(76) -CTL_E_INVALIDPATTERNSTRING = STD_CTL_SCODE(93) -CTL_E_INVALIDUSEOFNULL = STD_CTL_SCODE(94) -CTL_E_INVALIDFILEFORMAT = STD_CTL_SCODE(321) -CTL_E_INVALIDPROPERTYVALUE = STD_CTL_SCODE(380) -CTL_E_INVALIDPROPERTYARRAYINDEX = STD_CTL_SCODE(381) -CTL_E_SETNOTSUPPORTEDATRUNTIME = STD_CTL_SCODE(382) -CTL_E_SETNOTSUPPORTED = STD_CTL_SCODE(383) -CTL_E_NEEDPROPERTYARRAYINDEX = STD_CTL_SCODE(385) -CTL_E_SETNOTPERMITTED = STD_CTL_SCODE(387) -CTL_E_GETNOTSUPPORTEDATRUNTIME = STD_CTL_SCODE(393) -CTL_E_GETNOTSUPPORTED = STD_CTL_SCODE(394) -CTL_E_PROPERTYNOTFOUND = STD_CTL_SCODE(422) -CTL_E_INVALIDCLIPBOARDFORMAT = STD_CTL_SCODE(460) -CTL_E_INVALIDPICTURE = STD_CTL_SCODE(481) -CTL_E_PRINTERERROR = STD_CTL_SCODE(482) -CTL_E_CANTSAVEFILETOTEMP = STD_CTL_SCODE(735) -CTL_E_SEARCHTEXTNOTFOUND = STD_CTL_SCODE(744) -CTL_E_REPLACEMENTSTOOLONG = STD_CTL_SCODE(746) - -CONNECT_E_FIRST = MAKE_SCODE(winerror.SEVERITY_ERROR, winerror.FACILITY_ITF, 0x0200) -CONNECT_E_LAST = MAKE_SCODE(winerror.SEVERITY_ERROR, winerror.FACILITY_ITF, 0x020F) -CONNECT_S_FIRST = MAKE_SCODE(winerror.SEVERITY_SUCCESS, winerror.FACILITY_ITF, 0x0200) -CONNECT_S_LAST = MAKE_SCODE(winerror.SEVERITY_SUCCESS, winerror.FACILITY_ITF, 0x020F) - -CONNECT_E_NOCONNECTION = CONNECT_E_FIRST + 0 -CONNECT_E_ADVISELIMIT = CONNECT_E_FIRST + 1 -CONNECT_E_CANNOTCONNECT = CONNECT_E_FIRST + 2 -CONNECT_E_OVERRIDDEN = CONNECT_E_FIRST + 3 - -CLASS_E_NOTLICENSED = winerror.CLASSFACTORY_E_FIRST + 2 diff --git a/lib/win32com/readme.html b/lib/win32com/readme.html deleted file mode 100644 index 3df75340..00000000 --- a/lib/win32com/readme.html +++ /dev/null @@ -1,87 +0,0 @@ - - - - win32com Readme - - - -

Python and COM - Blowing the others away

- -

Python COM Extensions Readme

- -

This is the readme for win32com. Please check out the win32com documentation index

- -

The win32com/test directory contains some interesting - scripts (and a new readme.txt). Although these - are used for testing, they do show a variety of COM techniques.

- -

VARIANT objects

-

win32com.client now has explicit VARIANT objects which can be used in -situations where you need more control over the argument types passed when -calling COM methods. See the documentation on -this object - -

Important Currency changes

-

-In all builds prior to 204, a COM currency value was returned as a tuple of -integers. Working with 2 integers to represent a currency object was a poor -choice, but the alternative was never clear. Now Python ships with the -decimal -module, the alternative has arrived! -

-

-Up until build 212, code could set pythoncom.__future_currency__ = True -to force use of the decimal module, with a warning issued otherwise. In -builds 213 and later, the decimal module is unconditionally used when -pythoncon returns you a currency value. -

- -

Recent Changes

- -

Lots of internal changes on the road to py3k

- -

win32com.axcontrol and win2con.internet

-Many more interfaces for hosting AX controls and the interfaces -used by Internet Explorer. - -

win32com.shell

-The shell interfaces have undergone a number of enhancements and changes. -A couple of methods have changed signature between the first build with shell support (200) and later builds. -SHGetFileInfo was broken in its result handling, so had to be changed - this -is the only function used by the samples that changed, but others not used by the samples also have changed. -These shell interfaces are now generally stable. -

New win32com.taskscheduler module

-Roger Upole has contributed an interface to the Windows task scheduler. This is actually very neat, and it allows -Python to edit the task list as shown by Windows Control Panel. Property page suppport may even appear later, -now that the win32 library has the new win32rcparser module. -

ActiveX Scripting

- -

Python only supports "trusted" execution hosts - thus, it will no longer work -as an engine inside IE (Python itself no longer has a restricted execution environment). -Python continues to work fine as an Active Scripting Engine in all other -applications, including Windows Scripting Host, and ASP. - -

There is also support for Python as an ActiveX Scripting Host.

- -

Active Debugging seems to be fully functional.

- -

Older stuff

-
    - -
  • Unexpected exceptions in Python COM objects will generally now dump -the exception and traceback to stdout.  This is useful for debugging -and testing - it means that in some cases there will be no need to register -an object with --debug to see these -tracebacks.  Note that COM objects used by server processes (such as -ASP) generally have no valid stdout, so will still need to use --debug as usual.
    -
  • -
  • universal gateway support has been improved - we can now work as an -Outlook Addin
    -
  • - - - diff --git a/lib/win32com/server/__init__.py b/lib/win32com/server/__init__.py deleted file mode 100644 index 3348fc9f..00000000 --- a/lib/win32com/server/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Empty __init__ file to designate a sub-package. diff --git a/lib/win32com/server/connect.py b/lib/win32com/server/connect.py deleted file mode 100644 index 5c9a2dfe..00000000 --- a/lib/win32com/server/connect.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Utilities for Server Side connections. - - A collection of helpers for server side connection points. -""" -import pythoncom -import win32com.server.util -import winerror -from win32com import olectl - -from .exception import Exception - -# Methods implemented by the interfaces. -IConnectionPointContainer_methods = ["EnumConnectionPoints", "FindConnectionPoint"] -IConnectionPoint_methods = [ - "EnumConnections", - "Unadvise", - "Advise", - "GetConnectionPointContainer", - "GetConnectionInterface", -] - - -class ConnectableServer: - _public_methods_ = IConnectionPointContainer_methods + IConnectionPoint_methods - _com_interfaces_ = [ - pythoncom.IID_IConnectionPoint, - pythoncom.IID_IConnectionPointContainer, - ] - - # Clients must set _connect_interfaces_ = [...] - def __init__(self): - self.cookieNo = 0 - self.connections = {} - - # IConnectionPoint interfaces - def EnumConnections(self): - raise Exception(winerror.E_NOTIMPL) - - def GetConnectionInterface(self): - raise Exception(winerror.E_NOTIMPL) - - def GetConnectionPointContainer(self): - return win32com.server.util.wrap(self) - - def Advise(self, pUnk): - # Creates a connection to the client. Simply allocate a new cookie, - # find the clients interface, and store it in a dictionary. - try: - interface = pUnk.QueryInterface( - self._connect_interfaces_[0], pythoncom.IID_IDispatch - ) - except pythoncom.com_error: - raise Exception(scode=olectl.CONNECT_E_NOCONNECTION) - self.cookieNo = self.cookieNo + 1 - self.connections[self.cookieNo] = interface - return self.cookieNo - - def Unadvise(self, cookie): - # Destroy a connection - simply delete interface from the map. - try: - del self.connections[cookie] - except KeyError: - raise Exception(scode=winerror.E_UNEXPECTED) - - # IConnectionPointContainer interfaces - def EnumConnectionPoints(self): - raise Exception(winerror.E_NOTIMPL) - - def FindConnectionPoint(self, iid): - # Find a connection we support. Only support the single event interface. - if iid in self._connect_interfaces_: - return win32com.server.util.wrap(self) - - def _BroadcastNotify(self, broadcaster, extraArgs): - # Broadcasts a notification to all connections. - # Ignores clients that fail. - for interface in self.connections.values(): - try: - broadcaster(*(interface,) + extraArgs) - except pythoncom.com_error as details: - self._OnNotifyFail(interface, details) - - def _OnNotifyFail(self, interface, details): - print("Ignoring COM error to connection - %s" % (repr(details))) diff --git a/lib/win32com/server/dispatcher.py b/lib/win32com/server/dispatcher.py deleted file mode 100644 index ac983125..00000000 --- a/lib/win32com/server/dispatcher.py +++ /dev/null @@ -1,291 +0,0 @@ -"""Dispatcher - -Please see policy.py for a discussion on dispatchers and policies -""" -import traceback -from sys import exc_info - -import pythoncom -import win32api -import win32com - -# -from win32com.server.exception import IsCOMServerException -from win32com.util import IIDToInterfaceName - - -class DispatcherBase: - """The base class for all Dispatchers. - - This dispatcher supports wrapping all operations in exception handlers, - and all the necessary delegation to the policy. - - This base class supports the printing of "unexpected" exceptions. Note, however, - that exactly where the output of print goes may not be useful! A derived class may - provide additional semantics for this. - """ - - def __init__(self, policyClass, object): - self.policy = policyClass(object) - # The logger we should dump to. If None, we should send to the - # default location (typically 'print') - self.logger = getattr(win32com, "logger", None) - - # Note the "return self._HandleException_()" is purely to stop pychecker - # complaining - _HandleException_ will itself raise an exception for the - # pythoncom framework, so the result will never be seen. - def _CreateInstance_(self, clsid, reqIID): - try: - self.policy._CreateInstance_(clsid, reqIID) - return pythoncom.WrapObject(self, reqIID) - except: - return self._HandleException_() - - def _QueryInterface_(self, iid): - try: - return self.policy._QueryInterface_(iid) - except: - return self._HandleException_() - - def _Invoke_(self, dispid, lcid, wFlags, args): - try: - return self.policy._Invoke_(dispid, lcid, wFlags, args) - except: - return self._HandleException_() - - def _GetIDsOfNames_(self, names, lcid): - try: - return self.policy._GetIDsOfNames_(names, lcid) - except: - return self._HandleException_() - - def _GetTypeInfo_(self, index, lcid): - try: - return self.policy._GetTypeInfo_(index, lcid) - except: - return self._HandleException_() - - def _GetTypeInfoCount_(self): - try: - return self.policy._GetTypeInfoCount_() - except: - return self._HandleException_() - - def _GetDispID_(self, name, fdex): - try: - return self.policy._GetDispID_(name, fdex) - except: - return self._HandleException_() - - def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): - try: - return self.policy._InvokeEx_( - dispid, lcid, wFlags, args, kwargs, serviceProvider - ) - except: - return self._HandleException_() - - def _DeleteMemberByName_(self, name, fdex): - try: - return self.policy._DeleteMemberByName_(name, fdex) - except: - return self._HandleException_() - - def _DeleteMemberByDispID_(self, id): - try: - return self.policy._DeleteMemberByDispID_(id) - except: - return self._HandleException_() - - def _GetMemberProperties_(self, id, fdex): - try: - return self.policy._GetMemberProperties_(id, fdex) - except: - return self._HandleException_() - - def _GetMemberName_(self, dispid): - try: - return self.policy._GetMemberName_(dispid) - except: - return self._HandleException_() - - def _GetNextDispID_(self, fdex, flags): - try: - return self.policy._GetNextDispID_(fdex, flags) - except: - return self._HandleException_() - - def _GetNameSpaceParent_(self): - try: - return self.policy._GetNameSpaceParent_() - except: - return self._HandleException_() - - def _HandleException_(self): - """Called whenever an exception is raised. - - Default behaviour is to print the exception. - """ - # If not a COM exception, print it for the developer. - if not IsCOMServerException(): - if self.logger is not None: - self.logger.exception("pythoncom server error") - else: - traceback.print_exc() - # But still raise it for the framework. - raise - - def _trace_(self, *args): - if self.logger is not None: - record = " ".join(map(str, args)) - self.logger.debug(record) - else: - for arg in args[:-1]: - print(arg, end=" ") - print(args[-1]) - - -class DispatcherTrace(DispatcherBase): - """A dispatcher, which causes a 'print' line for each COM function called.""" - - def _QueryInterface_(self, iid): - rc = DispatcherBase._QueryInterface_(self, iid) - if not rc: - self._trace_( - "in %s._QueryInterface_ with unsupported IID %s (%s)" - % (repr(self.policy._obj_), IIDToInterfaceName(iid), iid) - ) - return rc - - def _GetIDsOfNames_(self, names, lcid): - self._trace_("in _GetIDsOfNames_ with '%s' and '%d'\n" % (names, lcid)) - return DispatcherBase._GetIDsOfNames_(self, names, lcid) - - def _GetTypeInfo_(self, index, lcid): - self._trace_("in _GetTypeInfo_ with index=%d, lcid=%d\n" % (index, lcid)) - return DispatcherBase._GetTypeInfo_(self, index, lcid) - - def _GetTypeInfoCount_(self): - self._trace_("in _GetTypeInfoCount_\n") - return DispatcherBase._GetTypeInfoCount_(self) - - def _Invoke_(self, dispid, lcid, wFlags, args): - self._trace_("in _Invoke_ with", dispid, lcid, wFlags, args) - return DispatcherBase._Invoke_(self, dispid, lcid, wFlags, args) - - def _GetDispID_(self, name, fdex): - self._trace_("in _GetDispID_ with", name, fdex) - return DispatcherBase._GetDispID_(self, name, fdex) - - def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): - self._trace_( - "in %r._InvokeEx_-%s%r [%x,%s,%r]" - % (self.policy._obj_, dispid, args, wFlags, lcid, serviceProvider) - ) - return DispatcherBase._InvokeEx_( - self, dispid, lcid, wFlags, args, kwargs, serviceProvider - ) - - def _DeleteMemberByName_(self, name, fdex): - self._trace_("in _DeleteMemberByName_ with", name, fdex) - return DispatcherBase._DeleteMemberByName_(self, name, fdex) - - def _DeleteMemberByDispID_(self, id): - self._trace_("in _DeleteMemberByDispID_ with", id) - return DispatcherBase._DeleteMemberByDispID_(self, id) - - def _GetMemberProperties_(self, id, fdex): - self._trace_("in _GetMemberProperties_ with", id, fdex) - return DispatcherBase._GetMemberProperties_(self, id, fdex) - - def _GetMemberName_(self, dispid): - self._trace_("in _GetMemberName_ with", dispid) - return DispatcherBase._GetMemberName_(self, dispid) - - def _GetNextDispID_(self, fdex, flags): - self._trace_("in _GetNextDispID_ with", fdex, flags) - return DispatcherBase._GetNextDispID_(self, fdex, flags) - - def _GetNameSpaceParent_(self): - self._trace_("in _GetNameSpaceParent_") - return DispatcherBase._GetNameSpaceParent_(self) - - -class DispatcherWin32trace(DispatcherTrace): - """A tracing dispatcher that sends its output to the win32trace remote collector.""" - - def __init__(self, policyClass, object): - DispatcherTrace.__init__(self, policyClass, object) - if self.logger is None: - # If we have no logger, setup our output. - import win32traceutil # Sets up everything. - self._trace_( - "Object with win32trace dispatcher created (object=%s)" % repr(object) - ) - - -class DispatcherOutputDebugString(DispatcherTrace): - """A tracing dispatcher that sends its output to win32api.OutputDebugString""" - - def _trace_(self, *args): - for arg in args[:-1]: - win32api.OutputDebugString(str(arg) + " ") - win32api.OutputDebugString(str(args[-1]) + "\n") - - -class DispatcherWin32dbg(DispatcherBase): - """A source-level debugger dispatcher - - A dispatcher which invokes the debugger as an object is instantiated, or - when an unexpected exception occurs. - - Requires Pythonwin. - """ - - def __init__(self, policyClass, ob): - # No one uses this, and it just causes py2exe to drag all of - # pythonwin in. - # import pywin.debugger - pywin.debugger.brk() - print("The DispatcherWin32dbg dispatcher is deprecated!") - print("Please let me know if this is a problem.") - print("Uncomment the relevant lines in dispatcher.py to re-enable") - # DEBUGGER Note - You can either: - # * Hit Run and wait for a (non Exception class) exception to occur! - # * Set a breakpoint and hit run. - # * Step into the object creation (a few steps away!) - DispatcherBase.__init__(self, policyClass, ob) - - def _HandleException_(self): - """Invoke the debugger post mortem capability""" - # Save details away. - typ, val, tb = exc_info() - # import pywin.debugger, pywin.debugger.dbgcon - debug = 0 - try: - raise typ(val) - except Exception: # AARG - What is this Exception??? - # Use some inside knowledge to borrow a Debugger option which dictates if we - # stop at "expected" exceptions. - debug = pywin.debugger.GetDebugger().get_option( - pywin.debugger.dbgcon.OPT_STOP_EXCEPTIONS - ) - except: - debug = 1 - if debug: - try: - pywin.debugger.post_mortem(tb, typ, val) # The original exception - except: - traceback.print_exc() - - # But still raise it. - del tb - raise - - -try: - import win32trace - - DefaultDebugDispatcher = DispatcherWin32trace -except ImportError: # no win32trace module - just use a print based one. - DefaultDebugDispatcher = DispatcherTrace diff --git a/lib/win32com/server/exception.py b/lib/win32com/server/exception.py deleted file mode 100644 index e7f4d080..00000000 --- a/lib/win32com/server/exception.py +++ /dev/null @@ -1,105 +0,0 @@ -"""Exception Handling - - Exceptions - - To better support COM exceptions, the framework allows for an instance to be - raised. This instance may have a certain number of known attributes, which are - translated into COM exception details. - - This means, for example, that Python could raise a COM exception that includes details - on a Help file and location, and a description for the user. - - This module provides a class which provides the necessary attributes. - -""" -import sys - -import pythoncom - - -# Note that we derive from com_error, which derives from exceptions.Exception -# Also note that we dont support "self.args", as we dont support tuple-unpacking -class COMException(pythoncom.com_error): - """An Exception object that is understood by the framework. - - If the framework is presented with an exception of type class, - it looks for certain known attributes on this class to provide rich - error information to the caller. - - It should be noted that the framework supports providing this error - information via COM Exceptions, or via the ISupportErrorInfo interface. - - By using this class, you automatically provide rich error information to the - server. - """ - - def __init__( - self, - description=None, - scode=None, - source=None, - helpfile=None, - helpContext=None, - desc=None, - hresult=None, - ): - """Initialize an exception - **Params** - - description -- A string description for the exception. - scode -- An integer scode to be returned to the server, if necessary. - The pythoncom framework defaults this to be DISP_E_EXCEPTION if not specified otherwise. - source -- A string which identifies the source of the error. - helpfile -- A string which points to a help file which contains details on the error. - helpContext -- An integer context in the help file. - desc -- A short-cut for description. - hresult -- A short-cut for scode. - """ - - # convert a WIN32 error into an HRESULT - scode = scode or hresult - if scode and scode != 1: # We dont want S_FALSE mapped! - if scode >= -32768 and scode < 32768: - # this is HRESULT_FROM_WIN32() - scode = -2147024896 | (scode & 0x0000FFFF) - self.scode = scode - - self.description = description or desc - if scode == 1 and not self.description: - self.description = "S_FALSE" - elif scode and not self.description: - self.description = pythoncom.GetScodeString(scode) - - self.source = source - self.helpfile = helpfile - self.helpcontext = helpContext - - # todo - fill in the exception value - pythoncom.com_error.__init__(self, scode, self.description, None, -1) - - def __repr__(self): - return "" % (self.scode, self.description) - - -# Old name for the COMException class. -# Do NOT use the name Exception, as it is now a built-in -# COMException is the new, official name. -Exception = COMException - - -def IsCOMException(t=None): - if t is None: - t = sys.exc_info()[0] - try: - return issubclass(t, pythoncom.com_error) - except TypeError: # 1.5 in -X mode? - return t is pythoncon.com_error - - -def IsCOMServerException(t=None): - if t is None: - t = sys.exc_info()[0] - try: - return issubclass(t, COMException) - except TypeError: # String exception - return 0 diff --git a/lib/win32com/server/factory.py b/lib/win32com/server/factory.py deleted file mode 100644 index 7298f799..00000000 --- a/lib/win32com/server/factory.py +++ /dev/null @@ -1,26 +0,0 @@ -# Class factory utilities. -import pythoncom - - -def RegisterClassFactories(clsids, flags=None, clsctx=None): - """Given a list of CLSID, create and register class factories. - - Returns a list, which should be passed to RevokeClassFactories - """ - if flags is None: - flags = pythoncom.REGCLS_MULTIPLEUSE | pythoncom.REGCLS_SUSPENDED - if clsctx is None: - clsctx = pythoncom.CLSCTX_LOCAL_SERVER - ret = [] - for clsid in clsids: - # Some server append '-Embedding' etc - if clsid[0] not in ["-", "/"]: - factory = pythoncom.MakePyFactory(clsid) - regId = pythoncom.CoRegisterClassObject(clsid, factory, clsctx, flags) - ret.append((factory, regId)) - return ret - - -def RevokeClassFactories(infos): - for factory, revokeId in infos: - pythoncom.CoRevokeClassObject(revokeId) diff --git a/lib/win32com/server/localserver.py b/lib/win32com/server/localserver.py deleted file mode 100644 index 21eeafeb..00000000 --- a/lib/win32com/server/localserver.py +++ /dev/null @@ -1,53 +0,0 @@ -# LocalServer .EXE support for Python. -# -# This is designed to be used as a _script_ file by pythonw.exe -# -# In some cases, you could also use Python.exe, which will create -# a console window useful for debugging. -# -# NOTE: When NOT running in any sort of debugging mode, -# 'print' statements may fail, as sys.stdout is not valid!!! - -# -# Usage: -# wpython.exe LocalServer.py clsid [, clsid] -import sys - -sys.coinit_flags = 2 -import pythoncom -import win32api -from win32com.server import factory - -usage = """\ -Invalid command line arguments - -This program provides LocalServer COM support -for Python COM objects. - -It is typically run automatically by COM, passing as arguments -The ProgID or CLSID of the Python Server(s) to be hosted -""" - - -def serve(clsids): - infos = factory.RegisterClassFactories(clsids) - - pythoncom.EnableQuitMessage(win32api.GetCurrentThreadId()) - pythoncom.CoResumeClassObjects() - - pythoncom.PumpMessages() - - factory.RevokeClassFactories(infos) - - pythoncom.CoUninitialize() - - -def main(): - if len(sys.argv) == 1: - win32api.MessageBox(0, usage, "Python COM Server") - sys.exit(1) - serve(sys.argv[1:]) - - -if __name__ == "__main__": - main() diff --git a/lib/win32com/server/policy.py b/lib/win32com/server/policy.py deleted file mode 100644 index 8105e180..00000000 --- a/lib/win32com/server/policy.py +++ /dev/null @@ -1,829 +0,0 @@ -"""Policies - -Note that Dispatchers are now implemented in "dispatcher.py", but -are still documented here. - -Policies - - A policy is an object which manages the interaction between a public - Python object, and COM . In simple terms, the policy object is the - object which is actually called by COM, and it invokes the requested - method, fetches/sets the requested property, etc. See the - @win32com.server.policy.CreateInstance@ method for a description of - how a policy is specified or created. - - Exactly how a policy determines which underlying object method/property - is obtained is up to the policy. A few policies are provided, but you - can build your own. See each policy class for a description of how it - implements its policy. - - There is a policy that allows the object to specify exactly which - methods and properties will be exposed. There is also a policy that - will dynamically expose all Python methods and properties - even those - added after the object has been instantiated. - -Dispatchers - - A Dispatcher is a level in front of a Policy. A dispatcher is the - thing which actually receives the COM calls, and passes them to the - policy object (which in turn somehow does something with the wrapped - object). - - It is important to note that a policy does not need to have a dispatcher. - A dispatcher has the same interface as a policy, and simply steps in its - place, delegating to the real policy. The primary use for a Dispatcher - is to support debugging when necessary, but without imposing overheads - when not (ie, by not using a dispatcher at all). - - There are a few dispatchers provided - "tracing" dispatchers which simply - prints calls and args (including a variation which uses - win32api.OutputDebugString), and a "debugger" dispatcher, which can - invoke the debugger when necessary. - -Error Handling - - It is important to realise that the caller of these interfaces may - not be Python. Therefore, general Python exceptions and tracebacks aren't - much use. - - In general, there is an Exception class that should be raised, to allow - the framework to extract rich COM type error information. - - The general rule is that the **only** exception returned from Python COM - Server code should be an Exception instance. Any other Python exception - should be considered an implementation bug in the server (if not, it - should be handled, and an appropriate Exception instance raised). Any - other exception is considered "unexpected", and a dispatcher may take - special action (see Dispatchers above) - - Occasionally, the implementation will raise the policy.error error. - This usually means there is a problem in the implementation that the - Python programmer should fix. - - For example, if policy is asked to wrap an object which it can not - support (because, eg, it does not provide _public_methods_ or _dynamic_) - then policy.error will be raised, indicating it is a Python programmers - problem, rather than a COM error. - -""" -__author__ = "Greg Stein and Mark Hammond" - -import sys -import types - -import pythoncom -import pywintypes -import win32api -import win32con -import winerror - -# Import a few important constants to speed lookups. -from pythoncom import ( - DISPATCH_METHOD, - DISPATCH_PROPERTYGET, - DISPATCH_PROPERTYPUT, - DISPATCH_PROPERTYPUTREF, - DISPID_COLLECT, - DISPID_CONSTRUCTOR, - DISPID_DESTRUCTOR, - DISPID_EVALUATE, - DISPID_NEWENUM, - DISPID_PROPERTYPUT, - DISPID_STARTENUM, - DISPID_UNKNOWN, - DISPID_VALUE, -) - -S_OK = 0 - -# Few more globals to speed things. -IDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch] -IUnknownType = pythoncom.TypeIIDs[pythoncom.IID_IUnknown] - -from .exception import COMException - -error = __name__ + " error" - -regSpec = "CLSID\\%s\\PythonCOM" -regPolicy = "CLSID\\%s\\PythonCOMPolicy" -regDispatcher = "CLSID\\%s\\PythonCOMDispatcher" -regAddnPath = "CLSID\\%s\\PythonCOMPath" - - -def CreateInstance(clsid, reqIID): - """Create a new instance of the specified IID - - The COM framework **always** calls this function to create a new - instance for the specified CLSID. This function looks up the - registry for the name of a policy, creates the policy, and asks the - policy to create the specified object by calling the _CreateInstance_ method. - - Exactly how the policy creates the instance is up to the policy. See the - specific policy documentation for more details. - """ - # First see is sys.path should have something on it. - try: - addnPaths = win32api.RegQueryValue( - win32con.HKEY_CLASSES_ROOT, regAddnPath % clsid - ).split(";") - for newPath in addnPaths: - if newPath not in sys.path: - sys.path.insert(0, newPath) - except win32api.error: - pass - try: - policy = win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT, regPolicy % clsid) - policy = resolve_func(policy) - except win32api.error: - policy = DefaultPolicy - - try: - dispatcher = win32api.RegQueryValue( - win32con.HKEY_CLASSES_ROOT, regDispatcher % clsid - ) - if dispatcher: - dispatcher = resolve_func(dispatcher) - except win32api.error: - dispatcher = None - - if dispatcher: - retObj = dispatcher(policy, None) - else: - retObj = policy(None) - return retObj._CreateInstance_(clsid, reqIID) - - -class BasicWrapPolicy: - """The base class of policies. - - Normally not used directly (use a child class, instead) - - This policy assumes we are wrapping another object - as the COM server. This supports the delegation of the core COM entry points - to either the wrapped object, or to a child class. - - This policy supports the following special attributes on the wrapped object - - _query_interface_ -- A handler which can respond to the COM 'QueryInterface' call. - _com_interfaces_ -- An optional list of IIDs which the interface will assume are - valid for the object. - _invoke_ -- A handler which can respond to the COM 'Invoke' call. If this attribute - is not provided, then the default policy implementation is used. If this attribute - does exist, it is responsible for providing all required functionality - ie, the - policy _invoke_ method is not invoked at all (and nor are you able to call it!) - _getidsofnames_ -- A handler which can respond to the COM 'GetIDsOfNames' call. If this attribute - is not provided, then the default policy implementation is used. If this attribute - does exist, it is responsible for providing all required functionality - ie, the - policy _getidsofnames_ method is not invoked at all (and nor are you able to call it!) - - IDispatchEx functionality: - - _invokeex_ -- Very similar to _invoke_, except slightly different arguments are used. - And the result is just the _real_ result (rather than the (hresult, argErr, realResult) - tuple that _invoke_ uses. - This is the new, prefered handler (the default _invoke_ handler simply called _invokeex_) - _getdispid_ -- Very similar to _getidsofnames_, except slightly different arguments are used, - and only 1 property at a time can be fetched (which is all we support in getidsofnames anyway!) - This is the new, prefered handler (the default _invoke_ handler simply called _invokeex_) - _getnextdispid_- uses self._name_to_dispid_ to enumerate the DISPIDs - """ - - def __init__(self, object): - """Initialise the policy object - - Params: - - object -- The object to wrap. May be None *iff* @BasicWrapPolicy._CreateInstance_@ will be - called immediately after this to setup a brand new object - """ - if object is not None: - self._wrap_(object) - - def _CreateInstance_(self, clsid, reqIID): - """Creates a new instance of a **wrapped** object - - This method looks up a "@win32com.server.policy.regSpec@" % clsid entry - in the registry (using @DefaultPolicy@) - """ - try: - classSpec = win32api.RegQueryValue( - win32con.HKEY_CLASSES_ROOT, regSpec % clsid - ) - except win32api.error: - raise error( - "The object is not correctly registered - %s key can not be read" - % (regSpec % clsid) - ) - myob = call_func(classSpec) - self._wrap_(myob) - try: - return pythoncom.WrapObject(self, reqIID) - except pythoncom.com_error as xxx_todo_changeme: - (hr, desc, exc, arg) = xxx_todo_changeme.args - from win32com.util import IIDToInterfaceName - - desc = ( - "The object '%r' was created, but does not support the " - "interface '%s'(%s): %s" - % (myob, IIDToInterfaceName(reqIID), reqIID, desc) - ) - raise pythoncom.com_error(hr, desc, exc, arg) - - def _wrap_(self, object): - """Wraps up the specified object. - - This function keeps a reference to the passed - object, and may interogate it to determine how to respond to COM requests, etc. - """ - # We "clobber" certain of our own methods with ones - # provided by the wrapped object, iff they exist. - self._name_to_dispid_ = {} - ob = self._obj_ = object - if hasattr(ob, "_query_interface_"): - self._query_interface_ = ob._query_interface_ - - if hasattr(ob, "_invoke_"): - self._invoke_ = ob._invoke_ - - if hasattr(ob, "_invokeex_"): - self._invokeex_ = ob._invokeex_ - - if hasattr(ob, "_getidsofnames_"): - self._getidsofnames_ = ob._getidsofnames_ - - if hasattr(ob, "_getdispid_"): - self._getdispid_ = ob._getdispid_ - - # Allow for override of certain special attributes. - if hasattr(ob, "_com_interfaces_"): - self._com_interfaces_ = [] - # Allow interfaces to be specified by name. - for i in ob._com_interfaces_: - if type(i) != pywintypes.IIDType: - # Prolly a string! - if i[0] != "{": - i = pythoncom.InterfaceNames[i] - else: - i = pythoncom.MakeIID(i) - self._com_interfaces_.append(i) - else: - self._com_interfaces_ = [] - - # "QueryInterface" handling. - def _QueryInterface_(self, iid): - """The main COM entry-point for QueryInterface. - - This checks the _com_interfaces_ attribute and if the interface is not specified - there, it calls the derived helper _query_interface_ - """ - if iid in self._com_interfaces_: - return 1 - return self._query_interface_(iid) - - def _query_interface_(self, iid): - """Called if the object does not provide the requested interface in _com_interfaces_, - and does not provide a _query_interface_ handler. - - Returns a result to the COM framework indicating the interface is not supported. - """ - return 0 - - # "Invoke" handling. - def _Invoke_(self, dispid, lcid, wFlags, args): - """The main COM entry-point for Invoke. - - This calls the _invoke_ helper. - """ - # Translate a possible string dispid to real dispid. - if type(dispid) == type(""): - try: - dispid = self._name_to_dispid_[dispid.lower()] - except KeyError: - raise COMException( - scode=winerror.DISP_E_MEMBERNOTFOUND, desc="Member not found" - ) - return self._invoke_(dispid, lcid, wFlags, args) - - def _invoke_(self, dispid, lcid, wFlags, args): - # Delegates to the _invokeex_ implementation. This allows - # a custom policy to define _invokeex_, and automatically get _invoke_ too. - return S_OK, -1, self._invokeex_(dispid, lcid, wFlags, args, None, None) - - # "GetIDsOfNames" handling. - def _GetIDsOfNames_(self, names, lcid): - """The main COM entry-point for GetIDsOfNames. - - This checks the validity of the arguments, and calls the _getidsofnames_ helper. - """ - if len(names) > 1: - raise COMException( - scode=winerror.DISP_E_INVALID, - desc="Cannot support member argument names", - ) - return self._getidsofnames_(names, lcid) - - def _getidsofnames_(self, names, lcid): - ### note: lcid is being ignored... - return (self._getdispid_(names[0], 0),) - - # IDispatchEx support for policies. Most of the IDispathEx functionality - # by default will raise E_NOTIMPL. Thus it is not necessary for derived - # policies to explicitely implement all this functionality just to not implement it! - - def _GetDispID_(self, name, fdex): - return self._getdispid_(name, fdex) - - def _getdispid_(self, name, fdex): - try: - ### TODO - look at the fdex flags!!! - return self._name_to_dispid_[name.lower()] - except KeyError: - raise COMException(scode=winerror.DISP_E_UNKNOWNNAME) - - # "InvokeEx" handling. - def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): - """The main COM entry-point for InvokeEx. - - This calls the _invokeex_ helper. - """ - # Translate a possible string dispid to real dispid. - if type(dispid) == type(""): - try: - dispid = self._name_to_dispid_[dispid.lower()] - except KeyError: - raise COMException( - scode=winerror.DISP_E_MEMBERNOTFOUND, desc="Member not found" - ) - return self._invokeex_(dispid, lcid, wFlags, args, kwargs, serviceProvider) - - def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): - """A stub for _invokeex_ - should never be called. - - Simply raises an exception. - """ - # Base classes should override this method (and not call the base) - raise error("This class does not provide _invokeex_ semantics") - - def _DeleteMemberByName_(self, name, fdex): - return self._deletememberbyname_(name, fdex) - - def _deletememberbyname_(self, name, fdex): - raise COMException(scode=winerror.E_NOTIMPL) - - def _DeleteMemberByDispID_(self, id): - return self._deletememberbydispid(id) - - def _deletememberbydispid_(self, id): - raise COMException(scode=winerror.E_NOTIMPL) - - def _GetMemberProperties_(self, id, fdex): - return self._getmemberproperties_(id, fdex) - - def _getmemberproperties_(self, id, fdex): - raise COMException(scode=winerror.E_NOTIMPL) - - def _GetMemberName_(self, dispid): - return self._getmembername_(dispid) - - def _getmembername_(self, dispid): - raise COMException(scode=winerror.E_NOTIMPL) - - def _GetNextDispID_(self, fdex, dispid): - return self._getnextdispid_(fdex, dispid) - - def _getnextdispid_(self, fdex, dispid): - ids = list(self._name_to_dispid_.values()) - ids.sort() - if DISPID_STARTENUM in ids: - ids.remove(DISPID_STARTENUM) - if dispid == DISPID_STARTENUM: - return ids[0] - else: - try: - return ids[ids.index(dispid) + 1] - except ValueError: # dispid not in list? - raise COMException(scode=winerror.E_UNEXPECTED) - except IndexError: # No more items - raise COMException(scode=winerror.S_FALSE) - - def _GetNameSpaceParent_(self): - return self._getnamespaceparent() - - def _getnamespaceparent_(self): - raise COMException(scode=winerror.E_NOTIMPL) - - -class MappedWrapPolicy(BasicWrapPolicy): - """Wraps an object using maps to do its magic - - This policy wraps up a Python object, using a number of maps - which translate from a Dispatch ID and flags, into an object to call/getattr, etc. - - It is the responsibility of derived classes to determine exactly how the - maps are filled (ie, the derived classes determine the map filling policy. - - This policy supports the following special attributes on the wrapped object - - _dispid_to_func_/_dispid_to_get_/_dispid_to_put_ -- These are dictionaries - (keyed by integer dispid, values are string attribute names) which the COM - implementation uses when it is processing COM requests. Note that the implementation - uses this dictionary for its own purposes - not a copy - which means the contents of - these dictionaries will change as the object is used. - - """ - - def _wrap_(self, object): - BasicWrapPolicy._wrap_(self, object) - ob = self._obj_ - if hasattr(ob, "_dispid_to_func_"): - self._dispid_to_func_ = ob._dispid_to_func_ - else: - self._dispid_to_func_ = {} - if hasattr(ob, "_dispid_to_get_"): - self._dispid_to_get_ = ob._dispid_to_get_ - else: - self._dispid_to_get_ = {} - if hasattr(ob, "_dispid_to_put_"): - self._dispid_to_put_ = ob._dispid_to_put_ - else: - self._dispid_to_put_ = {} - - def _getmembername_(self, dispid): - if dispid in self._dispid_to_func_: - return self._dispid_to_func_[dispid] - elif dispid in self._dispid_to_get_: - return self._dispid_to_get_[dispid] - elif dispid in self._dispid_to_put_: - return self._dispid_to_put_[dispid] - else: - raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND) - - -class DesignatedWrapPolicy(MappedWrapPolicy): - """A policy which uses a mapping to link functions and dispid - - A MappedWrappedPolicy which allows the wrapped object to specify, via certain - special named attributes, exactly which methods and properties are exposed. - - All a wrapped object need do is provide the special attributes, and the policy - will handle everything else. - - Attributes: - - _public_methods_ -- Required, unless a typelib GUID is given -- A list - of strings, which must be the names of methods the object - provides. These methods will be exposed and callable - from other COM hosts. - _public_attrs_ A list of strings, which must be the names of attributes on the object. - These attributes will be exposed and readable and possibly writeable from other COM hosts. - _readonly_attrs_ -- A list of strings, which must also appear in _public_attrs. These - attributes will be readable, but not writable, by other COM hosts. - _value_ -- A method that will be called if the COM host requests the "default" method - (ie, calls Invoke with dispid==DISPID_VALUE) - _NewEnum -- A method that will be called if the COM host requests an enumerator on the - object (ie, calls Invoke with dispid==DISPID_NEWENUM.) - It is the responsibility of the method to ensure the returned - object conforms to the required Enum interface. - - _typelib_guid_ -- The GUID of the typelibrary with interface definitions we use. - _typelib_version_ -- A tuple of (major, minor) with a default of 1,1 - _typelib_lcid_ -- The LCID of the typelib, default = LOCALE_USER_DEFAULT - - _Evaluate -- Dunno what this means, except the host has called Invoke with dispid==DISPID_EVALUATE! - See the COM documentation for details. - """ - - def _wrap_(self, ob): - # If we have nominated universal interfaces to support, load them now - tlb_guid = getattr(ob, "_typelib_guid_", None) - if tlb_guid is not None: - tlb_major, tlb_minor = getattr(ob, "_typelib_version_", (1, 0)) - tlb_lcid = getattr(ob, "_typelib_lcid_", 0) - from win32com import universal - - # XXX - what if the user wants to implement interfaces from multiple - # typelibs? - # Filter out all 'normal' IIDs (ie, IID objects and strings starting with { - interfaces = [ - i - for i in getattr(ob, "_com_interfaces_", []) - if type(i) != pywintypes.IIDType and not i.startswith("{") - ] - universal_data = universal.RegisterInterfaces( - tlb_guid, tlb_lcid, tlb_major, tlb_minor, interfaces - ) - else: - universal_data = [] - MappedWrapPolicy._wrap_(self, ob) - if not hasattr(ob, "_public_methods_") and not hasattr(ob, "_typelib_guid_"): - raise error( - "Object does not support DesignatedWrapPolicy, as it does not have either _public_methods_ or _typelib_guid_ attributes." - ) - - # Copy existing _dispid_to_func_ entries to _name_to_dispid_ - for dispid, name in self._dispid_to_func_.items(): - self._name_to_dispid_[name.lower()] = dispid - for dispid, name in self._dispid_to_get_.items(): - self._name_to_dispid_[name.lower()] = dispid - for dispid, name in self._dispid_to_put_.items(): - self._name_to_dispid_[name.lower()] = dispid - - # Patch up the universal stuff. - for dispid, invkind, name in universal_data: - self._name_to_dispid_[name.lower()] = dispid - if invkind == DISPATCH_METHOD: - self._dispid_to_func_[dispid] = name - elif invkind in (DISPATCH_PROPERTYPUT, DISPATCH_PROPERTYPUTREF): - self._dispid_to_put_[dispid] = name - elif invkind == DISPATCH_PROPERTYGET: - self._dispid_to_get_[dispid] = name - else: - raise ValueError("unexpected invkind: %d (%s)" % (invkind, name)) - - # look for reserved methods - if hasattr(ob, "_value_"): - self._dispid_to_get_[DISPID_VALUE] = "_value_" - self._dispid_to_put_[DISPID_PROPERTYPUT] = "_value_" - if hasattr(ob, "_NewEnum"): - self._name_to_dispid_["_newenum"] = DISPID_NEWENUM - self._dispid_to_func_[DISPID_NEWENUM] = "_NewEnum" - if hasattr(ob, "_Evaluate"): - self._name_to_dispid_["_evaluate"] = DISPID_EVALUATE - self._dispid_to_func_[DISPID_EVALUATE] = "_Evaluate" - - next_dispid = self._allocnextdispid(999) - # note: funcs have precedence over attrs (install attrs first) - if hasattr(ob, "_public_attrs_"): - if hasattr(ob, "_readonly_attrs_"): - readonly = ob._readonly_attrs_ - else: - readonly = [] - for name in ob._public_attrs_: - dispid = self._name_to_dispid_.get(name.lower()) - if dispid is None: - dispid = next_dispid - self._name_to_dispid_[name.lower()] = dispid - next_dispid = self._allocnextdispid(next_dispid) - self._dispid_to_get_[dispid] = name - if name not in readonly: - self._dispid_to_put_[dispid] = name - for name in getattr(ob, "_public_methods_", []): - dispid = self._name_to_dispid_.get(name.lower()) - if dispid is None: - dispid = next_dispid - self._name_to_dispid_[name.lower()] = dispid - next_dispid = self._allocnextdispid(next_dispid) - self._dispid_to_func_[dispid] = name - self._typeinfos_ = None # load these on demand. - - def _build_typeinfos_(self): - # Can only ever be one for now. - tlb_guid = getattr(self._obj_, "_typelib_guid_", None) - if tlb_guid is None: - return [] - tlb_major, tlb_minor = getattr(self._obj_, "_typelib_version_", (1, 0)) - tlb = pythoncom.LoadRegTypeLib(tlb_guid, tlb_major, tlb_minor) - typecomp = tlb.GetTypeComp() - # Not 100% sure what semantics we should use for the default interface. - # Look for the first name in _com_interfaces_ that exists in the typelib. - for iname in self._obj_._com_interfaces_: - try: - type_info, type_comp = typecomp.BindType(iname) - if type_info is not None: - return [type_info] - except pythoncom.com_error: - pass - return [] - - def _GetTypeInfoCount_(self): - if self._typeinfos_ is None: - self._typeinfos_ = self._build_typeinfos_() - return len(self._typeinfos_) - - def _GetTypeInfo_(self, index, lcid): - if self._typeinfos_ is None: - self._typeinfos_ = self._build_typeinfos_() - if index < 0 or index >= len(self._typeinfos_): - raise COMException(scode=winerror.DISP_E_BADINDEX) - return 0, self._typeinfos_[index] - - def _allocnextdispid(self, last_dispid): - while 1: - last_dispid = last_dispid + 1 - if ( - last_dispid not in self._dispid_to_func_ - and last_dispid not in self._dispid_to_get_ - and last_dispid not in self._dispid_to_put_ - ): - return last_dispid - - def _invokeex_(self, dispid, lcid, wFlags, args, kwArgs, serviceProvider): - ### note: lcid is being ignored... - - if wFlags & DISPATCH_METHOD: - try: - funcname = self._dispid_to_func_[dispid] - except KeyError: - if not wFlags & DISPATCH_PROPERTYGET: - raise COMException( - scode=winerror.DISP_E_MEMBERNOTFOUND - ) # not found - else: - try: - func = getattr(self._obj_, funcname) - except AttributeError: - # May have a dispid, but that doesnt mean we have the function! - raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND) - # Should check callable here - try: - return func(*args) - except TypeError as v: - # Particularly nasty is "wrong number of args" type error - # This helps you see what 'func' and 'args' actually is - if str(v).find("arguments") >= 0: - print( - "** TypeError %s calling function %r(%r)" % (v, func, args) - ) - raise - - if wFlags & DISPATCH_PROPERTYGET: - try: - name = self._dispid_to_get_[dispid] - except KeyError: - raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND) # not found - retob = getattr(self._obj_, name) - if type(retob) == types.MethodType: # a method as a property - call it. - retob = retob(*args) - return retob - - if wFlags & (DISPATCH_PROPERTYPUT | DISPATCH_PROPERTYPUTREF): ### correct? - try: - name = self._dispid_to_put_[dispid] - except KeyError: - raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND) # read-only - # If we have a method of that name (ie, a property get function), and - # we have an equiv. property set function, use that instead. - if ( - type(getattr(self._obj_, name, None)) == types.MethodType - and type(getattr(self._obj_, "Set" + name, None)) == types.MethodType - ): - fn = getattr(self._obj_, "Set" + name) - fn(*args) - else: - # just set the attribute - setattr(self._obj_, name, args[0]) - return - - raise COMException(scode=winerror.E_INVALIDARG, desc="invalid wFlags") - - -class EventHandlerPolicy(DesignatedWrapPolicy): - """The default policy used by event handlers in the win32com.client package. - - In addition to the base policy, this provides argument conversion semantics for - params - * dispatch params are converted to dispatch objects. - * Unicode objects are converted to strings (1.5.2 and earlier) - - NOTE: Later, we may allow the object to override this process?? - """ - - def _transform_args_(self, args, kwArgs, dispid, lcid, wFlags, serviceProvider): - ret = [] - for arg in args: - arg_type = type(arg) - if arg_type == IDispatchType: - import win32com.client - - arg = win32com.client.Dispatch(arg) - elif arg_type == IUnknownType: - try: - import win32com.client - - arg = win32com.client.Dispatch( - arg.QueryInterface(pythoncom.IID_IDispatch) - ) - except pythoncom.error: - pass # Keep it as IUnknown - ret.append(arg) - return tuple(ret), kwArgs - - def _invokeex_(self, dispid, lcid, wFlags, args, kwArgs, serviceProvider): - # transform the args. - args, kwArgs = self._transform_args_( - args, kwArgs, dispid, lcid, wFlags, serviceProvider - ) - return DesignatedWrapPolicy._invokeex_( - self, dispid, lcid, wFlags, args, kwArgs, serviceProvider - ) - - -class DynamicPolicy(BasicWrapPolicy): - """A policy which dynamically (ie, at run-time) determines public interfaces. - - A dynamic policy is used to dynamically dispatch methods and properties to the - wrapped object. The list of objects and properties does not need to be known in - advance, and methods or properties added to the wrapped object after construction - are also handled. - - The wrapped object must provide the following attributes: - - _dynamic_ -- A method that will be called whenever an invoke on the object - is called. The method is called with the name of the underlying method/property - (ie, the mapping of dispid to/from name has been resolved.) This name property - may also be '_value_' to indicate the default, and '_NewEnum' to indicate a new - enumerator is requested. - - """ - - def _wrap_(self, object): - BasicWrapPolicy._wrap_(self, object) - if not hasattr(self._obj_, "_dynamic_"): - raise error("Object does not support Dynamic COM Policy") - self._next_dynamic_ = self._min_dynamic_ = 1000 - self._dyn_dispid_to_name_ = { - DISPID_VALUE: "_value_", - DISPID_NEWENUM: "_NewEnum", - } - - def _getdispid_(self, name, fdex): - # TODO - Look at fdex flags. - lname = name.lower() - try: - return self._name_to_dispid_[lname] - except KeyError: - dispid = self._next_dynamic_ = self._next_dynamic_ + 1 - self._name_to_dispid_[lname] = dispid - self._dyn_dispid_to_name_[dispid] = name # Keep case in this map... - return dispid - - def _invoke_(self, dispid, lcid, wFlags, args): - return S_OK, -1, self._invokeex_(dispid, lcid, wFlags, args, None, None) - - def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): - ### note: lcid is being ignored... - ### note: kwargs is being ignored... - ### note: serviceProvider is being ignored... - ### there might be assigned DISPID values to properties, too... - try: - name = self._dyn_dispid_to_name_[dispid] - except KeyError: - raise COMException( - scode=winerror.DISP_E_MEMBERNOTFOUND, desc="Member not found" - ) - return self._obj_._dynamic_(name, lcid, wFlags, args) - - -DefaultPolicy = DesignatedWrapPolicy - - -def resolve_func(spec): - """Resolve a function by name - - Given a function specified by 'module.function', return a callable object - (ie, the function itself) - """ - try: - idx = spec.rindex(".") - mname = spec[:idx] - fname = spec[idx + 1 :] - # Dont attempt to optimize by looking in sys.modules, - # as another thread may also be performing the import - this - # way we take advantage of the built-in import lock. - module = _import_module(mname) - return getattr(module, fname) - except ValueError: # No "." in name - assume in this module - return globals()[spec] - - -def call_func(spec, *args): - """Call a function specified by name. - - Call a function specified by 'module.function' and return the result. - """ - - return resolve_func(spec)(*args) - - -def _import_module(mname): - """Import a module just like the 'import' statement. - - Having this function is much nicer for importing arbitrary modules than - using the 'exec' keyword. It is more efficient and obvious to the reader. - """ - __import__(mname) - # Eeek - result of _import_ is "win32com" - not "win32com.a.b.c" - # Get the full module from sys.modules - return sys.modules[mname] - - -####### -# -# Temporary hacks until all old code moves. -# -# These have been moved to a new source file, but some code may -# still reference them here. These will end up being removed. -try: - from .dispatcher import DispatcherTrace, DispatcherWin32trace -except ImportError: # Quite likely a frozen executable that doesnt need dispatchers - pass diff --git a/lib/win32com/server/register.py b/lib/win32com/server/register.py deleted file mode 100644 index ae513c75..00000000 --- a/lib/win32com/server/register.py +++ /dev/null @@ -1,672 +0,0 @@ -"""Utilities for registering objects. - -This module contains utility functions to register Python objects as -valid COM Servers. The RegisterServer function provides all information -necessary to allow the COM framework to respond to a request for a COM object, -construct the necessary Python object, and dispatch COM events. - -""" -import os -import sys - -import pythoncom -import win32api -import win32con -import winerror - -CATID_PythonCOMServer = "{B3EF80D0-68E2-11D0-A689-00C04FD658FF}" - - -def _set_subkeys(keyName, valueDict, base=win32con.HKEY_CLASSES_ROOT): - hkey = win32api.RegCreateKey(base, keyName) - try: - for key, value in valueDict.items(): - win32api.RegSetValueEx(hkey, key, None, win32con.REG_SZ, value) - finally: - win32api.RegCloseKey(hkey) - - -def _set_string(path, value, base=win32con.HKEY_CLASSES_ROOT): - "Set a string value in the registry." - - win32api.RegSetValue(base, path, win32con.REG_SZ, value) - - -def _get_string(path, base=win32con.HKEY_CLASSES_ROOT): - "Get a string value from the registry." - - try: - return win32api.RegQueryValue(base, path) - except win32api.error: - return None - - -def _remove_key(path, base=win32con.HKEY_CLASSES_ROOT): - "Remove a string from the registry." - - try: - win32api.RegDeleteKey(base, path) - except win32api.error as xxx_todo_changeme1: - (code, fn, msg) = xxx_todo_changeme1.args - if code != winerror.ERROR_FILE_NOT_FOUND: - raise win32api.error(code, fn, msg) - - -def recurse_delete_key(path, base=win32con.HKEY_CLASSES_ROOT): - """Recursively delete registry keys. - - This is needed since you can't blast a key when subkeys exist. - """ - try: - h = win32api.RegOpenKey(base, path) - except win32api.error as xxx_todo_changeme2: - (code, fn, msg) = xxx_todo_changeme2.args - if code != winerror.ERROR_FILE_NOT_FOUND: - raise win32api.error(code, fn, msg) - else: - # parent key found and opened successfully. do some work, making sure - # to always close the thing (error or no). - try: - # remove all of the subkeys - while 1: - try: - subkeyname = win32api.RegEnumKey(h, 0) - except win32api.error as xxx_todo_changeme: - (code, fn, msg) = xxx_todo_changeme.args - if code != winerror.ERROR_NO_MORE_ITEMS: - raise win32api.error(code, fn, msg) - break - recurse_delete_key(path + "\\" + subkeyname, base) - - # remove the parent key - _remove_key(path, base) - finally: - win32api.RegCloseKey(h) - - -def _cat_registrar(): - return pythoncom.CoCreateInstance( - pythoncom.CLSID_StdComponentCategoriesMgr, - None, - pythoncom.CLSCTX_INPROC_SERVER, - pythoncom.IID_ICatRegister, - ) - - -def _find_localserver_exe(mustfind): - if not sys.platform.startswith("win32"): - return sys.executable - if pythoncom.__file__.find("_d") < 0: - exeBaseName = "pythonw.exe" - else: - exeBaseName = "pythonw_d.exe" - # First see if in the same directory as this .EXE - exeName = os.path.join(os.path.split(sys.executable)[0], exeBaseName) - if not os.path.exists(exeName): - # See if in our sys.prefix directory - exeName = os.path.join(sys.prefix, exeBaseName) - if not os.path.exists(exeName): - # See if in our sys.prefix/pcbuild directory (for developers) - if "64 bit" in sys.version: - exeName = os.path.join(sys.prefix, "PCbuild", "amd64", exeBaseName) - else: - exeName = os.path.join(sys.prefix, "PCbuild", exeBaseName) - if not os.path.exists(exeName): - # See if the registry has some info. - try: - key = "SOFTWARE\\Python\\PythonCore\\%s\\InstallPath" % sys.winver - path = win32api.RegQueryValue(win32con.HKEY_LOCAL_MACHINE, key) - exeName = os.path.join(path, exeBaseName) - except (AttributeError, win32api.error): - pass - if not os.path.exists(exeName): - if mustfind: - raise RuntimeError("Can not locate the program '%s'" % exeBaseName) - return None - return exeName - - -def _find_localserver_module(): - import win32com.server - - path = win32com.server.__path__[0] - baseName = "localserver" - pyfile = os.path.join(path, baseName + ".py") - try: - os.stat(pyfile) - except os.error: - # See if we have a compiled extension - if __debug__: - ext = ".pyc" - else: - ext = ".pyo" - pyfile = os.path.join(path, baseName + ext) - try: - os.stat(pyfile) - except os.error: - raise RuntimeError( - "Can not locate the Python module 'win32com.server.%s'" % baseName - ) - return pyfile - - -def RegisterServer( - clsid, - pythonInstString=None, - desc=None, - progID=None, - verProgID=None, - defIcon=None, - threadingModel="both", - policy=None, - catids=[], - other={}, - addPyComCat=None, - dispatcher=None, - clsctx=None, - addnPath=None, -): - """Registers a Python object as a COM Server. This enters almost all necessary - information in the system registry, allowing COM to use the object. - - clsid -- The (unique) CLSID of the server. - pythonInstString -- A string holding the instance name that will be created - whenever COM requests a new object. - desc -- The description of the COM object. - progID -- The user name of this object (eg, Word.Document) - verProgId -- The user name of this version's implementation (eg Word.6.Document) - defIcon -- The default icon for the object. - threadingModel -- The threading model this object supports. - policy -- The policy to use when creating this object. - catids -- A list of category ID's this object belongs in. - other -- A dictionary of extra items to be registered. - addPyComCat -- A flag indicating if the object should be added to the list - of Python servers installed on the machine. If None (the default) - then it will be registered when running from python source, but - not registered if running in a frozen environment. - dispatcher -- The dispatcher to use when creating this object. - clsctx -- One of the CLSCTX_* constants. - addnPath -- An additional path the COM framework will add to sys.path - before attempting to create the object. - """ - - ### backwards-compat check - ### Certain policies do not require a "class name", just the policy itself. - if not pythonInstString and not policy: - raise TypeError( - "You must specify either the Python Class or Python Policy which implement the COM object." - ) - - keyNameRoot = "CLSID\\%s" % str(clsid) - _set_string(keyNameRoot, desc) - - # Also register as an "Application" so DCOM etc all see us. - _set_string("AppID\\%s" % clsid, progID) - # Depending on contexts requested, register the specified server type. - # Set default clsctx. - if not clsctx: - clsctx = pythoncom.CLSCTX_INPROC_SERVER | pythoncom.CLSCTX_LOCAL_SERVER - # And if we are frozen, ignore the ones that don't make sense in this - # context. - if pythoncom.frozen: - assert ( - sys.frozen - ), "pythoncom is frozen, but sys.frozen is not set - don't know the context!" - if sys.frozen == "dll": - clsctx = clsctx & pythoncom.CLSCTX_INPROC_SERVER - else: - clsctx = clsctx & pythoncom.CLSCTX_LOCAL_SERVER - # Now setup based on the clsctx left over. - if clsctx & pythoncom.CLSCTX_INPROC_SERVER: - # get the module to use for registration. - # nod to Gordon's installer - if sys.frozen and sys.frozendllhandle - # exist, then we are being registered via a DLL - use this DLL as the - # file name. - if pythoncom.frozen: - if hasattr(sys, "frozendllhandle"): - dllName = win32api.GetModuleFileName(sys.frozendllhandle) - else: - raise RuntimeError( - "We appear to have a frozen DLL, but I don't know the DLL to use" - ) - else: - # Normal case - running from .py file, so register pythoncom's DLL. - # Although now we prefer a 'loader' DLL if it exists to avoid some - # manifest issues (the 'loader' DLL has a manifest, but pythoncom does not) - pythoncom_dir = os.path.dirname(pythoncom.__file__) - suffix = "_d" if "_d" in pythoncom.__file__ else "" - # Always register with the full path to the DLLs. - loadername = os.path.join( - pythoncom_dir, - "pythoncomloader%d%d%s.dll" - % (sys.version_info[0], sys.version_info[1], suffix), - ) - dllName = loadername if os.path.isfile(loadername) else pythoncom.__file__ - - _set_subkeys( - keyNameRoot + "\\InprocServer32", - { - None: dllName, - "ThreadingModel": threadingModel, - }, - ) - else: # Remove any old InProcServer32 registrations - _remove_key(keyNameRoot + "\\InprocServer32") - - if clsctx & pythoncom.CLSCTX_LOCAL_SERVER: - if pythoncom.frozen: - # If we are frozen, we write "{exe} /Automate", just - # like "normal" .EXEs do - exeName = win32api.GetShortPathName(sys.executable) - command = "%s /Automate" % (exeName,) - else: - # Running from .py sources - we need to write - # 'python.exe win32com\server\localserver.py {clsid}" - exeName = _find_localserver_exe(1) - exeName = win32api.GetShortPathName(exeName) - pyfile = _find_localserver_module() - command = '%s "%s" %s' % (exeName, pyfile, str(clsid)) - _set_string(keyNameRoot + "\\LocalServer32", command) - else: # Remove any old LocalServer32 registrations - _remove_key(keyNameRoot + "\\LocalServer32") - - if pythonInstString: - _set_string(keyNameRoot + "\\PythonCOM", pythonInstString) - else: - _remove_key(keyNameRoot + "\\PythonCOM") - if policy: - _set_string(keyNameRoot + "\\PythonCOMPolicy", policy) - else: - _remove_key(keyNameRoot + "\\PythonCOMPolicy") - - if dispatcher: - _set_string(keyNameRoot + "\\PythonCOMDispatcher", dispatcher) - else: - _remove_key(keyNameRoot + "\\PythonCOMDispatcher") - - if defIcon: - _set_string(keyNameRoot + "\\DefaultIcon", defIcon) - else: - _remove_key(keyNameRoot + "\\DefaultIcon") - - if addnPath: - _set_string(keyNameRoot + "\\PythonCOMPath", addnPath) - else: - _remove_key(keyNameRoot + "\\PythonCOMPath") - - if addPyComCat is None: - addPyComCat = pythoncom.frozen == 0 - if addPyComCat: - catids = catids + [CATID_PythonCOMServer] - - # Set up the implemented categories - if catids: - regCat = _cat_registrar() - regCat.RegisterClassImplCategories(clsid, catids) - - # set up any other reg values they might have - if other: - for key, value in other.items(): - _set_string(keyNameRoot + "\\" + key, value) - - if progID: - # set the progID as the most specific that was given to us - if verProgID: - _set_string(keyNameRoot + "\\ProgID", verProgID) - else: - _set_string(keyNameRoot + "\\ProgID", progID) - - # Set up the root entries - version independent. - if desc: - _set_string(progID, desc) - _set_string(progID + "\\CLSID", str(clsid)) - - # Set up the root entries - version dependent. - if verProgID: - # point from independent to the current version - _set_string(progID + "\\CurVer", verProgID) - - # point to the version-independent one - _set_string(keyNameRoot + "\\VersionIndependentProgID", progID) - - # set up the versioned progID - if desc: - _set_string(verProgID, desc) - _set_string(verProgID + "\\CLSID", str(clsid)) - - -def GetUnregisterServerKeys(clsid, progID=None, verProgID=None, customKeys=None): - """Given a server, return a list of of ("key", root), which are keys recursively - and uncondtionally deleted at unregister or uninstall time. - """ - # remove the main CLSID registration - ret = [("CLSID\\%s" % str(clsid), win32con.HKEY_CLASSES_ROOT)] - # remove the versioned ProgID registration - if verProgID: - ret.append((verProgID, win32con.HKEY_CLASSES_ROOT)) - # blow away the independent ProgID. we can't leave it since we just - # torched the class. - ### could potentially check the CLSID... ? - if progID: - ret.append((progID, win32con.HKEY_CLASSES_ROOT)) - # The DCOM config tool may write settings to the AppID key for our CLSID - ret.append(("AppID\\%s" % str(clsid), win32con.HKEY_CLASSES_ROOT)) - # Any custom keys? - if customKeys: - ret = ret + customKeys - - return ret - - -def UnregisterServer(clsid, progID=None, verProgID=None, customKeys=None): - """Unregisters a Python COM server.""" - - for args in GetUnregisterServerKeys(clsid, progID, verProgID, customKeys): - recurse_delete_key(*args) - - ### it might be nice at some point to "roll back" the independent ProgID - ### to an earlier version if one exists, and just blowing away the - ### specified version of the ProgID (and its corresponding CLSID) - ### another time, though... - - ### NOTE: ATL simply blows away the above three keys without the - ### potential checks that I describe. Assuming that defines the - ### "standard" then we have no additional changes necessary. - - -def GetRegisteredServerOption(clsid, optionName): - """Given a CLSID for a server and option name, return the option value""" - keyNameRoot = "CLSID\\%s\\%s" % (str(clsid), str(optionName)) - return _get_string(keyNameRoot) - - -def _get(ob, attr, default=None): - try: - return getattr(ob, attr) - except AttributeError: - pass - # look down sub-classes - try: - bases = ob.__bases__ - except AttributeError: - # ob is not a class - no probs. - return default - for base in bases: - val = _get(base, attr, None) - if val is not None: - return val - return default - - -def RegisterClasses(*classes, **flags): - quiet = "quiet" in flags and flags["quiet"] - debugging = "debug" in flags and flags["debug"] - for cls in classes: - clsid = cls._reg_clsid_ - progID = _get(cls, "_reg_progid_") - desc = _get(cls, "_reg_desc_", progID) - spec = _get(cls, "_reg_class_spec_") - verProgID = _get(cls, "_reg_verprogid_") - defIcon = _get(cls, "_reg_icon_") - threadingModel = _get(cls, "_reg_threading_", "both") - catids = _get(cls, "_reg_catids_", []) - options = _get(cls, "_reg_options_", {}) - policySpec = _get(cls, "_reg_policy_spec_") - clsctx = _get(cls, "_reg_clsctx_") - tlb_filename = _get(cls, "_reg_typelib_filename_") - # default to being a COM category only when not frozen. - addPyComCat = not _get(cls, "_reg_disable_pycomcat_", pythoncom.frozen != 0) - addnPath = None - if debugging: - # If the class has a debugging dispatcher specified, use it, otherwise - # use our default dispatcher. - dispatcherSpec = _get(cls, "_reg_debug_dispatcher_spec_") - if dispatcherSpec is None: - dispatcherSpec = "win32com.server.dispatcher.DefaultDebugDispatcher" - # And remember the debugging flag as servers may wish to use it at runtime. - debuggingDesc = "(for debugging)" - options["Debugging"] = "1" - else: - dispatcherSpec = _get(cls, "_reg_dispatcher_spec_") - debuggingDesc = "" - options["Debugging"] = "0" - - if spec is None: - moduleName = cls.__module__ - if moduleName == "__main__": - # Use argv[0] to determine the module name. - try: - # Use the win32api to find the case-sensitive name - moduleName = os.path.splitext( - win32api.FindFiles(sys.argv[0])[0][8] - )[0] - except (IndexError, win32api.error): - # Can't find the script file - the user must explicitely set the _reg_... attribute. - raise TypeError( - "Can't locate the script hosting the COM object - please set _reg_class_spec_ in your object" - ) - - spec = moduleName + "." + cls.__name__ - # Frozen apps don't need their directory on sys.path - if not pythoncom.frozen: - scriptDir = os.path.split(sys.argv[0])[0] - if not scriptDir: - scriptDir = "." - addnPath = win32api.GetFullPathName(scriptDir) - - RegisterServer( - clsid, - spec, - desc, - progID, - verProgID, - defIcon, - threadingModel, - policySpec, - catids, - options, - addPyComCat, - dispatcherSpec, - clsctx, - addnPath, - ) - if not quiet: - print("Registered:", progID or spec, debuggingDesc) - # Register the typelibrary - if tlb_filename: - tlb_filename = os.path.abspath(tlb_filename) - typelib = pythoncom.LoadTypeLib(tlb_filename) - pythoncom.RegisterTypeLib(typelib, tlb_filename) - if not quiet: - print("Registered type library:", tlb_filename) - extra = flags.get("finalize_register") - if extra: - extra() - - -def UnregisterClasses(*classes, **flags): - quiet = "quiet" in flags and flags["quiet"] - for cls in classes: - clsid = cls._reg_clsid_ - progID = _get(cls, "_reg_progid_") - verProgID = _get(cls, "_reg_verprogid_") - customKeys = _get(cls, "_reg_remove_keys_") - unregister_typelib = _get(cls, "_reg_typelib_filename_") is not None - - UnregisterServer(clsid, progID, verProgID, customKeys) - if not quiet: - print("Unregistered:", progID or str(clsid)) - if unregister_typelib: - tlb_guid = _get(cls, "_typelib_guid_") - if tlb_guid is None: - # I guess I could load the typelib, but they need the GUID anyway. - print("Have typelib filename, but no GUID - can't unregister") - else: - major, minor = _get(cls, "_typelib_version_", (1, 0)) - lcid = _get(cls, "_typelib_lcid_", 0) - try: - pythoncom.UnRegisterTypeLib(tlb_guid, major, minor, lcid) - if not quiet: - print("Unregistered type library") - except pythoncom.com_error: - pass - - extra = flags.get("finalize_unregister") - if extra: - extra() - - -# -# Unregister info is for installers or external uninstallers. -# The WISE installer, for example firstly registers the COM server, -# then queries for the Unregister info, appending it to its -# install log. Uninstalling the package will the uninstall the server -def UnregisterInfoClasses(*classes, **flags): - ret = [] - for cls in classes: - clsid = cls._reg_clsid_ - progID = _get(cls, "_reg_progid_") - verProgID = _get(cls, "_reg_verprogid_") - customKeys = _get(cls, "_reg_remove_keys_") - - ret = ret + GetUnregisterServerKeys(clsid, progID, verProgID, customKeys) - return ret - - -# Attempt to 're-execute' our current process with elevation. -def ReExecuteElevated(flags): - import tempfile - - import win32event # we've already checked we are running XP above - import win32process - import winxpgui - from win32com.shell import shellcon - from win32com.shell.shell import ShellExecuteEx - - if not flags["quiet"]: - print("Requesting elevation and retrying...") - new_params = " ".join(['"' + a + '"' for a in sys.argv]) - # If we aren't already in unattended mode, we want our sub-process to - # be. - if not flags["unattended"]: - new_params += " --unattended" - # specifying the parent means the dialog is centered over our window, - # which is a good usability clue. - # hwnd is unlikely on the command-line, but flags may come from elsewhere - hwnd = flags.get("hwnd", None) - if hwnd is None: - try: - hwnd = winxpgui.GetConsoleWindow() - except winxpgui.error: - hwnd = 0 - # Redirect output so we give the user some clue what went wrong. This - # also means we need to use COMSPEC. However, the "current directory" - # appears to end up ignored - so we execute things via a temp batch file. - tempbase = tempfile.mktemp("pycomserverreg") - outfile = tempbase + ".out" - batfile = tempbase + ".bat" - - # If registering from pythonwin, need to run python console instead since - # pythonwin will just open script for editting - current_exe = os.path.split(sys.executable)[1].lower() - exe_to_run = None - if current_exe == "pythonwin.exe": - exe_to_run = os.path.join(sys.prefix, "python.exe") - elif current_exe == "pythonwin_d.exe": - exe_to_run = os.path.join(sys.prefix, "python_d.exe") - if not exe_to_run or not os.path.exists(exe_to_run): - exe_to_run = sys.executable - - try: - batf = open(batfile, "w") - try: - cwd = os.getcwd() - print("@echo off", file=batf) - # nothing is 'inherited' by the elevated process, including the - # environment. I wonder if we need to set more? - print("set PYTHONPATH=%s" % os.environ.get("PYTHONPATH", ""), file=batf) - # may be on a different drive - select that before attempting to CD. - print(os.path.splitdrive(cwd)[0], file=batf) - print('cd "%s"' % os.getcwd(), file=batf) - print( - '%s %s > "%s" 2>&1' - % (win32api.GetShortPathName(exe_to_run), new_params, outfile), - file=batf, - ) - finally: - batf.close() - executable = os.environ.get("COMSPEC", "cmd.exe") - rc = ShellExecuteEx( - hwnd=hwnd, - fMask=shellcon.SEE_MASK_NOCLOSEPROCESS, - lpVerb="runas", - lpFile=executable, - lpParameters='/C "%s"' % batfile, - nShow=win32con.SW_SHOW, - ) - hproc = rc["hProcess"] - win32event.WaitForSingleObject(hproc, win32event.INFINITE) - exit_code = win32process.GetExitCodeProcess(hproc) - outf = open(outfile) - try: - output = outf.read() - finally: - outf.close() - - if exit_code: - # Even if quiet you get to see this message. - print("Error: registration failed (exit code %s)." % exit_code) - # if we are quiet then the output if likely to already be nearly - # empty, so always print it. - print(output, end=" ") - finally: - for f in (outfile, batfile): - try: - os.unlink(f) - except os.error as exc: - print("Failed to remove tempfile '%s': %s" % (f, exc)) - - -def UseCommandLine(*classes, **flags): - unregisterInfo = "--unregister_info" in sys.argv - unregister = "--unregister" in sys.argv - flags["quiet"] = flags.get("quiet", 0) or "--quiet" in sys.argv - flags["debug"] = flags.get("debug", 0) or "--debug" in sys.argv - flags["unattended"] = flags.get("unattended", 0) or "--unattended" in sys.argv - if unregisterInfo: - return UnregisterInfoClasses(*classes, **flags) - try: - if unregister: - UnregisterClasses(*classes, **flags) - else: - RegisterClasses(*classes, **flags) - except win32api.error as exc: - # If we are on xp+ and have "access denied", retry using - # ShellExecuteEx with 'runas' verb to force elevation (vista) and/or - # admin login dialog (vista/xp) - if ( - flags["unattended"] - or exc.winerror != winerror.ERROR_ACCESS_DENIED - or sys.getwindowsversion()[0] < 5 - ): - raise - ReExecuteElevated(flags) - - -def RegisterPyComCategory(): - """Register the Python COM Server component category.""" - regCat = _cat_registrar() - regCat.RegisterCategories([(CATID_PythonCOMServer, 0x0409, "Python COM Server")]) - - -if not pythoncom.frozen: - try: - win32api.RegQueryValue( - win32con.HKEY_CLASSES_ROOT, - "Component Categories\\%s" % CATID_PythonCOMServer, - ) - except win32api.error: - try: - RegisterPyComCategory() - except pythoncom.error: # Error with the COM category manager - oh well. - pass diff --git a/lib/win32com/server/util.py b/lib/win32com/server/util.py deleted file mode 100644 index c46dd375..00000000 --- a/lib/win32com/server/util.py +++ /dev/null @@ -1,229 +0,0 @@ -""" General Server side utilities -""" -import pythoncom -import winerror - -from . import policy -from .exception import COMException - - -def wrap(ob, iid=None, usePolicy=None, useDispatcher=None): - """Wraps an object in a PyGDispatch gateway. - - Returns a client side PyI{iid} interface. - - Interface and gateway support must exist for the specified IID, as - the QueryInterface() method is used. - - """ - if usePolicy is None: - usePolicy = policy.DefaultPolicy - if useDispatcher == 1: # True will also work here. - import win32com.server.dispatcher - - useDispatcher = win32com.server.dispatcher.DefaultDebugDispatcher - if useDispatcher is None or useDispatcher == 0: - ob = usePolicy(ob) - else: - ob = useDispatcher(usePolicy, ob) - - # get a PyIDispatch, which interfaces to PyGDispatch - ob = pythoncom.WrapObject(ob) - if iid is not None: - ob = ob.QueryInterface(iid) # Ask the PyIDispatch if it supports it? - return ob - - -def unwrap(ob): - """Unwraps an interface. - - Given an interface which wraps up a Gateway, return the object behind - the gateway. - """ - ob = pythoncom.UnwrapObject(ob) - # see if the object is a dispatcher - if hasattr(ob, "policy"): - ob = ob.policy - return ob._obj_ - - -class ListEnumerator: - """A class to expose a Python sequence as an EnumVARIANT. - - Create an instance of this class passing a sequence (list, tuple, or - any sequence protocol supporting object) and it will automatically - support the EnumVARIANT interface for the object. - - See also the @NewEnum@ function, which can be used to turn the - instance into an actual COM server. - """ - - _public_methods_ = ["Next", "Skip", "Reset", "Clone"] - - def __init__(self, data, index=0, iid=pythoncom.IID_IEnumVARIANT): - self._list_ = data - self.index = index - self._iid_ = iid - - def _query_interface_(self, iid): - if iid == self._iid_: - return 1 - - def Next(self, count): - result = self._list_[self.index : self.index + count] - self.Skip(count) - return result - - def Skip(self, count): - end = self.index + count - if end > len(self._list_): - end = len(self._list_) - self.index = end - - def Reset(self): - self.index = 0 - - def Clone(self): - return self._wrap(self.__class__(self._list_, self.index)) - - def _wrap(self, ob): - return wrap(ob) - - -class ListEnumeratorGateway(ListEnumerator): - """A List Enumerator which wraps a sequence's items in gateways. - - If a sequence contains items (objects) that have not been wrapped for - return through the COM layers, then a ListEnumeratorGateway can be - used to wrap those items before returning them (from the Next() method). - - See also the @ListEnumerator@ class and the @NewEnum@ function. - """ - - def Next(self, count): - result = self._list_[self.index : self.index + count] - self.Skip(count) - return map(self._wrap, result) - - -def NewEnum( - seq, - cls=ListEnumerator, - iid=pythoncom.IID_IEnumVARIANT, - usePolicy=None, - useDispatcher=None, -): - """Creates a new enumerator COM server. - - This function creates a new COM Server that implements the - IID_IEnumVARIANT interface. - - A COM server that can enumerate the passed in sequence will be - created, then wrapped up for return through the COM framework. - Optionally, a custom COM server for enumeration can be passed - (the default is @ListEnumerator@), and the specific IEnum - interface can be specified. - """ - ob = cls(seq, iid=iid) - return wrap(ob, iid, usePolicy=usePolicy, useDispatcher=useDispatcher) - - -class Collection: - "A collection of VARIANT values." - - _public_methods_ = ["Item", "Count", "Add", "Remove", "Insert"] - - def __init__(self, data=None, readOnly=0): - if data is None: - data = [] - self.data = data - - # disable Add/Remove if read-only. note that we adjust _public_methods_ - # on this instance only. - if readOnly: - self._public_methods_ = ["Item", "Count"] - - # This method is also used as the "default" method. - # Thus "print ob" will cause this to be called with zero - # params. Handle this slightly more elegantly here. - # Ideally the policy should handle this. - def Item(self, *args): - if len(args) != 1: - raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT) - - try: - return self.data[args[0]] - except IndexError as desc: - raise COMException(scode=winerror.DISP_E_BADINDEX, desc=str(desc)) - - _value_ = Item - - def Count(self): - return len(self.data) - - def Add(self, value): - self.data.append(value) - - def Remove(self, index): - try: - del self.data[index] - except IndexError as desc: - raise COMException(scode=winerror.DISP_E_BADINDEX, desc=str(desc)) - - def Insert(self, index, value): - try: - index = int(index) - except (ValueError, TypeError): - raise COMException(scode=winerror.DISP_E_TYPEMISMATCH) - self.data.insert(index, value) - - def _NewEnum(self): - return NewEnum(self.data) - - -def NewCollection(seq, cls=Collection): - """Creates a new COM collection object - - This function creates a new COM Server that implements the - common collection protocols, including enumeration. (_NewEnum) - - A COM server that can enumerate the passed in sequence will be - created, then wrapped up for return through the COM framework. - Optionally, a custom COM server for enumeration can be passed - (the default is @Collection@). - """ - return pythoncom.WrapObject( - policy.DefaultPolicy(cls(seq)), pythoncom.IID_IDispatch, pythoncom.IID_IDispatch - ) - - -class FileStream: - _public_methods_ = ["Read", "Write", "Clone", "CopyTo", "Seek"] - _com_interfaces_ = [pythoncom.IID_IStream] - - def __init__(self, file): - self.file = file - - def Read(self, amount): - return self.file.read(amount) - - def Write(self, data): - self.file.write(data) - return len(data) - - def Clone(self): - return self._wrap(self.__class__(self.file)) - - def CopyTo(self, dest, cb): - data = self.file.read(cb) - cbread = len(data) - dest.Write(data) ## ??? Write does not currently return the length ??? - return cbread, cbread - - def Seek(self, offset, origin): - # how convient that the 'origin' values are the same as the CRT :) - self.file.seek(offset, origin) - return self.file.tell() - - def _wrap(self, ob): - return wrap(ob) diff --git a/lib/win32com/servers/PythonTools.py b/lib/win32com/servers/PythonTools.py deleted file mode 100644 index 7f8f4d76..00000000 --- a/lib/win32com/servers/PythonTools.py +++ /dev/null @@ -1,47 +0,0 @@ -import sys -import time - - -class Tools: - _public_methods_ = ["reload", "adddir", "echo", "sleep"] - - def reload(self, module): - if module in sys.modules: - from importlib import reload - - reload(sys.modules[module]) - return "reload succeeded." - return "no reload performed." - - def adddir(self, dir): - if type(dir) == type(""): - sys.path.append(dir) - return str(sys.path) - - def echo(self, arg): - return repr(arg) - - def sleep(self, t): - time.sleep(t) - - -if __name__ == "__main__": - from win32com.server.register import RegisterServer, UnregisterServer - - clsid = "{06ce7630-1d81-11d0-ae37-c2fa70000000}" - progid = "Python.Tools" - verprogid = "Python.Tools.1" - if "--unregister" in sys.argv: - print("Unregistering...") - UnregisterServer(clsid, progid, verprogid) - print("Unregistered OK") - else: - print("Registering COM server...") - RegisterServer( - clsid, - "win32com.servers.PythonTools.Tools", - "Python Tools", - progid, - verprogid, - ) - print("Class registered.") diff --git a/lib/win32com/servers/__init__.py b/lib/win32com/servers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/win32com/servers/dictionary.py b/lib/win32com/servers/dictionary.py deleted file mode 100644 index 28596b81..00000000 --- a/lib/win32com/servers/dictionary.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Python.Dictionary COM Server. - -This module implements a simple COM server that acts much like a Python -dictionary or as a standard string-keyed VB Collection. The keys of -the dictionary are strings and are case-insensitive. - -It uses a highly customized policy to fine-tune the behavior exposed to -the COM client. - -The object exposes the following properties: - - int Count (readonly) - VARIANT Item(BSTR key) (propget for Item) - Item(BSTR key, VARIANT value) (propput for Item) - - Note that 'Item' is the default property, so the following forms of - VB code are acceptable: - - set ob = CreateObject("Python.Dictionary") - ob("hello") = "there" - ob.Item("hi") = ob("HELLO") - -All keys are defined, returning VT_NULL (None) if a value has not been -stored. To delete a key, simply assign VT_NULL to the key. - -The object responds to the _NewEnum method by returning an enumerator over -the dictionary's keys. This allows for the following type of VB code: - - for each name in ob - debug.print name, ob(name) - next -""" - - -import pythoncom -import pywintypes -import winerror -from pythoncom import DISPATCH_METHOD, DISPATCH_PROPERTYGET -from win32com.server import policy, util -from win32com.server.exception import COMException -from winerror import S_OK - - -class DictionaryPolicy(policy.BasicWrapPolicy): - ### BasicWrapPolicy looks for this - _com_interfaces_ = [] - - ### BasicWrapPolicy looks for this - _name_to_dispid_ = { - "item": pythoncom.DISPID_VALUE, - "_newenum": pythoncom.DISPID_NEWENUM, - "count": 1, - } - - ### Auto-Registration process looks for these... - _reg_desc_ = "Python Dictionary" - _reg_clsid_ = "{39b61048-c755-11d0-86fa-00c04fc2e03e}" - _reg_progid_ = "Python.Dictionary" - _reg_verprogid_ = "Python.Dictionary.1" - _reg_policy_spec_ = "win32com.servers.dictionary.DictionaryPolicy" - - def _CreateInstance_(self, clsid, reqIID): - self._wrap_({}) - return pythoncom.WrapObject(self, reqIID) - - def _wrap_(self, ob): - self._obj_ = ob # ob should be a dictionary - - def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): - if dispid == 0: # item - l = len(args) - if l < 1: - raise COMException( - desc="not enough parameters", scode=winerror.DISP_E_BADPARAMCOUNT - ) - - key = args[0] - if type(key) not in [str, str]: - ### the nArgErr thing should be 0-based, not reversed... sigh - raise COMException( - desc="Key must be a string", scode=winerror.DISP_E_TYPEMISMATCH - ) - - key = key.lower() - - if wFlags & (DISPATCH_METHOD | DISPATCH_PROPERTYGET): - if l > 1: - raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT) - try: - return self._obj_[key] - except KeyError: - return None # unknown keys return None (VT_NULL) - - if l != 2: - raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT) - if args[1] is None: - # delete a key when None is assigned to it - try: - del self._obj_[key] - except KeyError: - pass - else: - self._obj_[key] = args[1] - return S_OK - - if dispid == 1: # count - if not wFlags & DISPATCH_PROPERTYGET: - raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND) # not found - if len(args) != 0: - raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT) - return len(self._obj_) - - if dispid == pythoncom.DISPID_NEWENUM: - return util.NewEnum(list(self._obj_.keys())) - - raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND) - - def _getidsofnames_(self, names, lcid): - ### this is a copy of MappedWrapPolicy._getidsofnames_ ... - - name = names[0].lower() - try: - return (self._name_to_dispid_[name],) - except KeyError: - raise COMException( - scode=winerror.DISP_E_MEMBERNOTFOUND, desc="Member not found" - ) - - -def Register(): - from win32com.server.register import UseCommandLine - - return UseCommandLine(DictionaryPolicy) - - -if __name__ == "__main__": - Register() diff --git a/lib/win32com/servers/interp.py b/lib/win32com/servers/interp.py deleted file mode 100644 index b15cede7..00000000 --- a/lib/win32com/servers/interp.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Python.Interpreter COM Server - - This module implements a very very simple COM server which - exposes the Python interpreter. - - This is designed more as a demonstration than a full blown COM server. - General functionality and Error handling are both limited. - - To use this object, ensure it is registered by running this module - from Python.exe. Then, from Visual Basic, use "CreateObject('Python.Interpreter')", - and call its methods! -""" - -import winerror -from win32com.server.exception import Exception - - -# Expose the Python interpreter. -class Interpreter: - """The interpreter object exposed via COM""" - - _public_methods_ = ["Exec", "Eval"] - # All registration stuff to support fully automatic register/unregister - _reg_verprogid_ = "Python.Interpreter.2" - _reg_progid_ = "Python.Interpreter" - _reg_desc_ = "Python Interpreter" - _reg_clsid_ = "{30BD3490-2632-11cf-AD5B-524153480001}" - _reg_class_spec_ = "win32com.servers.interp.Interpreter" - - def __init__(self): - self.dict = {} - - def Eval(self, exp): - """Evaluate an expression.""" - if type(exp) != str: - raise Exception(desc="Must be a string", scode=winerror.DISP_E_TYPEMISMATCH) - - return eval(str(exp), self.dict) - - def Exec(self, exp): - """Execute a statement.""" - if type(exp) != str: - raise Exception(desc="Must be a string", scode=winerror.DISP_E_TYPEMISMATCH) - exec(str(exp), self.dict) - - -def Register(): - import win32com.server.register - - return win32com.server.register.UseCommandLine(Interpreter) - - -if __name__ == "__main__": - print("Registering COM server...") - Register() diff --git a/lib/win32com/servers/perfmon.py b/lib/win32com/servers/perfmon.py deleted file mode 100644 index 861c6940..00000000 --- a/lib/win32com/servers/perfmon.py +++ /dev/null @@ -1,34 +0,0 @@ -"""A COM Server which exposes the NT Performance monitor in a very rudimentary way - -Usage from VB: - set ob = CreateObject("Python.PerfmonQuery") - freeBytes = ob.Query("Memory", "Available Bytes") -""" -import pythoncom -import win32pdhutil -import winerror -from win32com.server import exception, register - - -class PerfMonQuery: - _reg_verprogid_ = "Python.PerfmonQuery.1" - _reg_progid_ = "Python.PerfmonQuery" - _reg_desc_ = "Python Performance Monitor query object" - _reg_clsid_ = "{64cef7a0-8ece-11d1-a65a-00aa00125a98}" - _reg_class_spec_ = "win32com.servers.perfmon.PerfMonQuery" - _public_methods_ = ["Query"] - - def Query(self, object, counter, instance=None, machine=None): - try: - return win32pdhutil.GetPerformanceAttributes( - object, counter, instance, machine=machine - ) - except win32pdhutil.error as exc: - raise exception.Exception(desc=exc.strerror) - except TypeError as desc: - raise exception.Exception(desc=desc, scode=winerror.DISP_E_TYPEMISMATCH) - - -if __name__ == "__main__": - print("Registering COM server...") - register.UseCommandLine(PerfMonQuery) diff --git a/lib/win32com/servers/test_pycomtest.py b/lib/win32com/servers/test_pycomtest.py deleted file mode 100644 index da606efa..00000000 --- a/lib/win32com/servers/test_pycomtest.py +++ /dev/null @@ -1,182 +0,0 @@ -# This is part of the Python test suite. -# The object is registered when you first run the test suite. -# (and hopefully unregistered once done ;-) - -import pythoncom -import winerror - -# Ensure the vtables in the tlb are known. -from win32com import universal -from win32com.client import constants, gencache -from win32com.server.exception import COMException -from win32com.server.util import wrap - -pythoncom.__future_currency__ = True -# We use the constants from the module, so must insist on a gencache. -# Otherwise, use of gencache is not necessary (tho still advised) -gencache.EnsureModule("{6BCDCB60-5605-11D0-AE5F-CADD4C000000}", 0, 1, 1) - - -class PyCOMTest: - _typelib_guid_ = "{6BCDCB60-5605-11D0-AE5F-CADD4C000000}" - _typelib_version = 1, 0 - _com_interfaces_ = ["IPyCOMTest"] - _reg_clsid_ = "{e743d9cd-cb03-4b04-b516-11d3a81c1597}" - _reg_progid_ = "Python.Test.PyCOMTest" - - def DoubleString(self, str): - return str * 2 - - def DoubleInOutString(self, str): - return str * 2 - - def Fire(self, nID): - raise COMException(hresult=winerror.E_NOTIMPL) - - def GetLastVarArgs(self): - raise COMException(hresult=winerror.E_NOTIMPL) - - def GetMultipleInterfaces(self, outinterface1, outinterface2): - raise COMException(hresult=winerror.E_NOTIMPL) - - def GetSafeArrays(self, attrs, attrs2, ints): - raise COMException(hresult=winerror.E_NOTIMPL) - - def GetSetDispatch(self, indisp): - raise COMException(hresult=winerror.E_NOTIMPL) - - # Result is of type IPyCOMTest - def GetSetInterface(self, ininterface): - return wrap(self) - - def GetSetVariant(self, indisp): - return indisp - - def TestByRefVariant(self, v): - return v * 2 - - def TestByRefString(self, v): - return v * 2 - - # Result is of type IPyCOMTest - def GetSetInterfaceArray(self, ininterface): - raise COMException(hresult=winerror.E_NOTIMPL) - - def GetSetUnknown(self, inunk): - raise COMException(hresult=winerror.E_NOTIMPL) - - # Result is of type ISimpleCounter - def GetSimpleCounter(self): - raise COMException(hresult=winerror.E_NOTIMPL) - - def GetSimpleSafeArray(self, ints): - raise COMException(hresult=winerror.E_NOTIMPL) - - def GetStruct(self): - raise COMException(hresult=winerror.E_NOTIMPL) - - def SetIntSafeArray(self, ints): - return len(ints) - - def SetLongLongSafeArray(self, ints): - return len(ints) - - def SetULongLongSafeArray(self, ints): - return len(ints) - - def SetBinSafeArray(self, buf): - return len(buf) - - def SetVarArgs(self, *args): - raise COMException(hresult=winerror.E_NOTIMPL) - - def SetVariantSafeArray(self, vars): - raise COMException(hresult=winerror.E_NOTIMPL) - - def Start(self): - raise COMException(hresult=winerror.E_NOTIMPL) - - def Stop(self, nID): - raise COMException(hresult=winerror.E_NOTIMPL) - - def StopAll(self): - raise COMException(hresult=winerror.E_NOTIMPL) - - def TakeByRefDispatch(self, inout): - raise COMException(hresult=winerror.E_NOTIMPL) - - def TakeByRefTypedDispatch(self, inout): - raise COMException(hresult=winerror.E_NOTIMPL) - - def Test(self, key, inval): - return not inval - - def Test2(self, inval): - return inval - - def Test3(self, inval): - raise COMException(hresult=winerror.E_NOTIMPL) - - def Test4(self, inval): - raise COMException(hresult=winerror.E_NOTIMPL) - - def Test5(self, inout): - if inout == constants.TestAttr1: - return constants.TestAttr1_1 - elif inout == constants.TestAttr1_1: - return constants.TestAttr1 - else: - return -1 - - def Test6(self, inval): - return inval - - def TestInOut(self, fval, bval, lval): - return winerror.S_OK, fval * 2, not bval, lval * 2 - - def TestOptionals(self, strArg="def", sval=0, lval=1, dval=3.1400001049041748): - raise COMException(hresult=winerror.E_NOTIMPL) - - def TestOptionals2(self, dval, strval="", sval=1): - raise COMException(hresult=winerror.E_NOTIMPL) - - def CheckVariantSafeArray(self, data): - return 1 - - def LongProp(self): - return self.longval - - def SetLongProp(self, val): - self.longval = val - - def ULongProp(self): - return self.ulongval - - def SetULongProp(self, val): - self.ulongval = val - - def IntProp(self): - return self.intval - - def SetIntProp(self, val): - self.intval = val - - -class PyCOMTestMI(PyCOMTest): - _typelib_guid_ = "{6BCDCB60-5605-11D0-AE5F-CADD4C000000}" - _typelib_version = 1, 0 - # Interfaces with a interface name, a real IID, and an IID as a string - _com_interfaces_ = [ - "IPyCOMTest", - pythoncom.IID_IStream, - str(pythoncom.IID_IStorage), - ] - _reg_clsid_ = "{F506E9A1-FB46-4238-A597-FA4EB69787CA}" - _reg_progid_ = "Python.Test.PyCOMTestMI" - - -if __name__ == "__main__": - import win32com.server.register - - win32com.server.register.UseCommandLine(PyCOMTest) - win32com.server.register.UseCommandLine(PyCOMTestMI) diff --git a/lib/win32com/storagecon.py b/lib/win32com/storagecon.py deleted file mode 100644 index 3ed52902..00000000 --- a/lib/win32com/storagecon.py +++ /dev/null @@ -1,142 +0,0 @@ -"""Constants related to IStorage and related interfaces - -This file was generated by h2py from d:\msdev\include\objbase.h -then hand edited, a few extra constants added, etc. -""" - -STGC_DEFAULT = 0 -STGC_OVERWRITE = 1 -STGC_ONLYIFCURRENT = 2 -STGC_DANGEROUSLYCOMMITMERELYTODISKCACHE = 4 -STGC_CONSOLIDATE = 8 - -STGTY_STORAGE = 1 -STGTY_STREAM = 2 -STGTY_LOCKBYTES = 3 -STGTY_PROPERTY = 4 -STREAM_SEEK_SET = 0 -STREAM_SEEK_CUR = 1 -STREAM_SEEK_END = 2 - -LOCK_WRITE = 1 -LOCK_EXCLUSIVE = 2 -LOCK_ONLYONCE = 4 - -# Generated as from here. - -CWCSTORAGENAME = 32 -STGM_DIRECT = 0x00000000 -STGM_TRANSACTED = 0x00010000 -STGM_SIMPLE = 0x08000000 -STGM_READ = 0x00000000 -STGM_WRITE = 0x00000001 -STGM_READWRITE = 0x00000002 -STGM_SHARE_DENY_NONE = 0x00000040 -STGM_SHARE_DENY_READ = 0x00000030 -STGM_SHARE_DENY_WRITE = 0x00000020 -STGM_SHARE_EXCLUSIVE = 0x00000010 -STGM_PRIORITY = 0x00040000 -STGM_DELETEONRELEASE = 0x04000000 -STGM_NOSCRATCH = 0x00100000 -STGM_CREATE = 0x00001000 -STGM_CONVERT = 0x00020000 -STGM_FAILIFTHERE = 0x00000000 -STGM_NOSNAPSHOT = 0x00200000 -ASYNC_MODE_COMPATIBILITY = 0x00000001 -ASYNC_MODE_DEFAULT = 0x00000000 -STGTY_REPEAT = 0x00000100 -STG_TOEND = 0xFFFFFFFF -STG_LAYOUT_SEQUENTIAL = 0x00000000 -STG_LAYOUT_INTERLEAVED = 0x00000001 - -## access rights used with COM server ACL's -COM_RIGHTS_EXECUTE = 1 -COM_RIGHTS_EXECUTE_LOCAL = 2 -COM_RIGHTS_EXECUTE_REMOTE = 4 -COM_RIGHTS_ACTIVATE_LOCAL = 8 -COM_RIGHTS_ACTIVATE_REMOTE = 16 - -STGFMT_DOCUMENT = 0 -STGFMT_STORAGE = 0 -STGFMT_NATIVE = 1 -STGFMT_FILE = 3 -STGFMT_ANY = 4 -STGFMT_DOCFILE = 5 - -PID_DICTIONARY = 0 -PID_CODEPAGE = 1 -PID_FIRST_USABLE = 2 -PID_FIRST_NAME_DEFAULT = 4095 - -PID_LOCALE = -2147483648 -PID_MODIFY_TIME = -2147483647 -PID_SECURITY = -2147483646 -PID_BEHAVIOR = -2147483645 -PID_ILLEGAL = -1 -PID_MIN_READONLY = -2147483648 -PID_MAX_READONLY = -1073741825 - -## DiscardableInformation -PIDDI_THUMBNAIL = 0x00000002 - -## SummaryInformation -PIDSI_TITLE = 2 -PIDSI_SUBJECT = 3 -PIDSI_AUTHOR = 4 -PIDSI_KEYWORDS = 5 -PIDSI_COMMENTS = 6 -PIDSI_TEMPLATE = 7 -PIDSI_LASTAUTHOR = 8 -PIDSI_REVNUMBER = 9 -PIDSI_EDITTIME = 10 -PIDSI_LASTPRINTED = 11 -PIDSI_CREATE_DTM = 12 -PIDSI_LASTSAVE_DTM = 13 -PIDSI_PAGECOUNT = 14 -PIDSI_WORDCOUNT = 15 -PIDSI_CHARCOUNT = 16 -PIDSI_THUMBNAIL = 17 -PIDSI_APPNAME = 18 -PIDSI_DOC_SECURITY = 19 - -## DocSummaryInformation -PIDDSI_CATEGORY = 2 -PIDDSI_PRESFORMAT = 3 -PIDDSI_BYTECOUNT = 4 -PIDDSI_LINECOUNT = 5 -PIDDSI_PARCOUNT = 6 -PIDDSI_SLIDECOUNT = 7 -PIDDSI_NOTECOUNT = 8 -PIDDSI_HIDDENCOUNT = 9 -PIDDSI_MMCLIPCOUNT = 10 -PIDDSI_SCALE = 11 -PIDDSI_HEADINGPAIR = 12 -PIDDSI_DOCPARTS = 13 -PIDDSI_MANAGER = 14 -PIDDSI_COMPANY = 15 -PIDDSI_LINKSDIRTY = 16 - - -## MediaFileSummaryInfo -PIDMSI_EDITOR = 2 -PIDMSI_SUPPLIER = 3 -PIDMSI_SOURCE = 4 -PIDMSI_SEQUENCE_NO = 5 -PIDMSI_PROJECT = 6 -PIDMSI_STATUS = 7 -PIDMSI_OWNER = 8 -PIDMSI_RATING = 9 -PIDMSI_PRODUCTION = 10 -PIDMSI_COPYRIGHT = 11 - -## PROPSETFLAG enum -PROPSETFLAG_DEFAULT = 0 -PROPSETFLAG_NONSIMPLE = 1 -PROPSETFLAG_ANSI = 2 -PROPSETFLAG_UNBUFFERED = 4 -PROPSETFLAG_CASE_SENSITIVE = 8 - -## STGMOVE enum -STGMOVE_MOVE = 0 -STGMOVE_COPY = 1 -STGMOVE_SHALLOWCOPY = 2 diff --git a/lib/win32com/test/GenTestScripts.py b/lib/win32com/test/GenTestScripts.py deleted file mode 100644 index 73bb2641..00000000 --- a/lib/win32com/test/GenTestScripts.py +++ /dev/null @@ -1,95 +0,0 @@ -# -# Generate scripts needed for serious testing! -# -import os -import sys - -import pythoncom -import win32com -import win32com.client.makepy -import win32com.test - -genList = [ - ("msword8", "{00020905-0000-0000-C000-000000000046}", 1033, 8, 0), -] - -genDir = "Generated4Test" - - -def GetGenPath(): - import win32api - - return os.path.join(win32api.GetFullPathName(win32com.test.__path__[0]), genDir) - - -def GenerateFromRegistered(fname, *loadArgs): - # tlb = apply(pythoncom.LoadRegTypeLib, loadArgs) - genPath = GetGenPath() - try: - os.stat(genPath) - except os.error: - os.mkdir(genPath) - # Ensure an __init__ exists. - open(os.path.join(genPath, "__init__.py"), "w").close() - print(fname, ": generating -", end=" ") - f = open(os.path.join(genPath, fname + ".py"), "w") - win32com.client.makepy.GenerateFromTypeLibSpec( - loadArgs, f, bQuiet=1, bGUIProgress=1 - ) - f.close() - print("compiling -", end=" ") - fullModName = "win32com.test.%s.%s" % (genDir, fname) - exec("import " + fullModName) - # Inject the generated module as a top level module. - sys.modules[fname] = sys.modules[fullModName] - print("done") - - -def GenerateAll(): - for args in genList: - try: - GenerateFromRegistered(*args) - except KeyboardInterrupt: - print("** Interrupted ***") - break - except pythoncom.com_error: - print("** Could not generate test code for ", args[0]) - - -def CleanAll(): - print("Cleaning generated test scripts...") - try: # Clear exceptions! - 1 / 0 - except: - pass - genPath = GetGenPath() - for args in genList: - try: - name = args[0] + ".py" - os.unlink(os.path.join(genPath, name)) - except os.error as details: - if type(details) == type(()) and details[0] != 2: - print("Could not deleted generated", name, details) - try: - name = args[0] + ".pyc" - os.unlink(os.path.join(genPath, name)) - except os.error as details: - if type(details) == type(()) and details[0] != 2: - print("Could not deleted generated", name, details) - try: - os.unlink(os.path.join(genPath, "__init__.py")) - except: - pass - try: - os.unlink(os.path.join(genPath, "__init__.pyc")) - except: - pass - try: - os.rmdir(genPath) - except os.error as details: - print("Could not delete test directory -", details) - - -if __name__ == "__main__": - GenerateAll() - CleanAll() diff --git a/lib/win32com/test/Testpys.sct b/lib/win32com/test/Testpys.sct deleted file mode 100644 index dd9b4e00..00000000 --- a/lib/win32com/test/Testpys.sct +++ /dev/null @@ -1,64 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/win32com/test/__init__.py b/lib/win32com/test/__init__.py deleted file mode 100644 index cb6d7f47..00000000 --- a/lib/win32com/test/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Empty file to designate a Python package diff --git a/lib/win32com/test/daodump.py b/lib/win32com/test/daodump.py deleted file mode 100644 index c222087a..00000000 --- a/lib/win32com/test/daodump.py +++ /dev/null @@ -1,90 +0,0 @@ -# import dao3032 -# No longer imported here - callers responsibility to load -# -import win32com.client - - -def DumpDB(db, bDeep=1): - # MUST be a DB object. - DumpTables(db, bDeep) - DumpRelations(db, bDeep) - DumpAllContainers(db, bDeep) - - -def DumpTables(db, bDeep=1): - for tab in db.TableDefs: - tab = db.TableDefs(tab.Name) # Redundant lookup for testing purposes. - print( - "Table %s - Fields: %d, Attributes:%d" - % (tab.Name, len(tab.Fields), tab.Attributes) - ) - if bDeep: - DumpFields(tab.Fields) - - -def DumpFields(fields): - for field in fields: - print( - " %s, size=%d, reqd=%d, type=%d, defVal=%s" - % ( - field.Name, - field.Size, - field.Required, - field.Type, - str(field.DefaultValue), - ) - ) - - -def DumpRelations(db, bDeep=1): - for relation in db.Relations: - print( - "Relation %s - %s->%s" - % (relation.Name, relation.Table, relation.ForeignTable) - ) - - -#### This dont work. TLB says it is a Fields collection, but apparently not! -#### if bDeep: DumpFields(relation.Fields) - - -def DumpAllContainers(db, bDeep=1): - for cont in db.Containers: - print("Container %s - %d documents" % (cont.Name, len(cont.Documents))) - if bDeep: - DumpContainerDocuments(cont) - - -def DumpContainerDocuments(container): - for doc in container.Documents: - import time - - timeStr = time.ctime(int(doc.LastUpdated)) - print(" %s - updated %s (" % (doc.Name, timeStr), end=" ") - print(doc.LastUpdated, ")") # test the _print_ method? - - -def TestEngine(engine): - import sys - - if len(sys.argv) > 1: - dbName = sys.argv[1] - else: - dbName = "e:\\temp\\TestPython.mdb" - db = engine.OpenDatabase(dbName) - DumpDB(db) - - -def test(): - for progid in ("DAO.DBEngine.36", "DAO.DBEngine.35", "DAO.DBEngine.30"): - try: - ob = win32com.client.gencache.EnsureDispatch(progid) - except pythoncom.com_error: - print(progid, "does not seem to be installed") - else: - TestEngine(ob) - break - - -if __name__ == "__main__": - test() diff --git a/lib/win32com/test/errorSemantics.py b/lib/win32com/test/errorSemantics.py deleted file mode 100644 index 49ebae2f..00000000 --- a/lib/win32com/test/errorSemantics.py +++ /dev/null @@ -1,259 +0,0 @@ -# errorSemantics.py - -# Test the Python error handling semantics. Specifically: -# -# * When a Python COM object is called via IDispatch, the nominated -# scode is placed in the exception tuple, and the HRESULT is -# DISP_E_EXCEPTION -# * When the same interface is called via IWhatever, the -# nominated scode is returned directly (with the scode also -# reflected in the exception tuple) -# * In all cases, the description etc end up in the exception tuple -# * "Normal" Python exceptions resolve to an E_FAIL "internal error" - -import pythoncom -import winerror -from win32com.client import Dispatch -from win32com.server.exception import COMException -from win32com.server.util import wrap -from win32com.test.util import CaptureWriter - - -class error(Exception): - def __init__(self, msg, com_exception=None): - Exception.__init__(self, msg, str(com_exception)) - - -# Our COM server. -class TestServer: - _public_methods_ = ["Clone", "Commit", "LockRegion", "Read"] - _com_interfaces_ = [pythoncom.IID_IStream] - - def Clone(self): - raise COMException("Not today", scode=winerror.E_UNEXPECTED) - - def Commit(self, flags): - # Testing unicode: 1F600 '😀'; GRINNING FACE - # Use the 'name' just for fun! - if flags == 0: - # A non com-specific exception. - raise Exception("\N{GRINNING FACE}") - # An explicit com_error, which is a bit of an edge-case, but might happen if - # a COM server itself calls another COM object and it fails. - excepinfo = ( - winerror.E_UNEXPECTED, - "source", - "\N{GRINNING FACE}", - "helpfile", - 1, - winerror.E_FAIL, - ) - raise pythoncom.com_error(winerror.E_UNEXPECTED, "desc", excepinfo, None) - - -def test(): - # Call via a native interface. - com_server = wrap(TestServer(), pythoncom.IID_IStream) - try: - com_server.Clone() - raise error("Expecting this call to fail!") - except pythoncom.com_error as com_exc: - if com_exc.hresult != winerror.E_UNEXPECTED: - raise error( - "Calling the object natively did not yield the correct scode", com_exc - ) - exc = com_exc.excepinfo - if not exc or exc[-1] != winerror.E_UNEXPECTED: - raise error( - "The scode element of the exception tuple did not yield the correct scode", - com_exc, - ) - if exc[2] != "Not today": - raise error( - "The description in the exception tuple did not yield the correct string", - com_exc, - ) - cap = CaptureWriter() - try: - cap.capture() - try: - com_server.Commit(0) - finally: - cap.release() - raise error("Expecting this call to fail!") - except pythoncom.com_error as com_exc: - if com_exc.hresult != winerror.E_FAIL: - raise error("The hresult was not E_FAIL for an internal error", com_exc) - if com_exc.excepinfo[1] != "Python COM Server Internal Error": - raise error( - "The description in the exception tuple did not yield the correct string", - com_exc, - ) - # Check we saw a traceback in stderr - if cap.get_captured().find("Traceback") < 0: - raise error("Could not find a traceback in stderr: %r" % (cap.get_captured(),)) - - # Now do it all again, but using IDispatch - com_server = Dispatch(wrap(TestServer())) - try: - com_server.Clone() - raise error("Expecting this call to fail!") - except pythoncom.com_error as com_exc: - if com_exc.hresult != winerror.DISP_E_EXCEPTION: - raise error( - "Calling the object via IDispatch did not yield the correct scode", - com_exc, - ) - exc = com_exc.excepinfo - if not exc or exc[-1] != winerror.E_UNEXPECTED: - raise error( - "The scode element of the exception tuple did not yield the correct scode", - com_exc, - ) - if exc[2] != "Not today": - raise error( - "The description in the exception tuple did not yield the correct string", - com_exc, - ) - - cap.clear() - try: - cap.capture() - try: - com_server.Commit(0) - finally: - cap.release() - raise error("Expecting this call to fail!") - except pythoncom.com_error as com_exc: - if com_exc.hresult != winerror.DISP_E_EXCEPTION: - raise error( - "Calling the object via IDispatch did not yield the correct scode", - com_exc, - ) - exc = com_exc.excepinfo - if not exc or exc[-1] != winerror.E_FAIL: - raise error( - "The scode element of the exception tuple did not yield the correct scode", - com_exc, - ) - if exc[1] != "Python COM Server Internal Error": - raise error( - "The description in the exception tuple did not yield the correct string", - com_exc, - ) - # Check we saw a traceback in stderr - if cap.get_captured().find("Traceback") < 0: - raise error("Could not find a traceback in stderr: %r" % (cap.get_captured(),)) - - # And an explicit com_error - cap.clear() - try: - cap.capture() - try: - com_server.Commit(1) - finally: - cap.release() - raise error("Expecting this call to fail!") - except pythoncom.com_error as com_exc: - if com_exc.hresult != winerror.DISP_E_EXCEPTION: - raise error( - "Calling the object via IDispatch did not yield the correct scode", - com_exc, - ) - exc = com_exc.excepinfo - if not exc or exc[-1] != winerror.E_FAIL: - raise error( - "The scode element of the exception tuple did not yield the correct scode", - com_exc, - ) - if exc[1] != "source": - raise error( - "The source in the exception tuple did not yield the correct string", - com_exc, - ) - if exc[2] != "\U0001F600": - raise error( - "The description in the exception tuple did not yield the correct string", - com_exc, - ) - if exc[3] != "helpfile": - raise error( - "The helpfile in the exception tuple did not yield the correct string", - com_exc, - ) - if exc[4] != 1: - raise error( - "The help context in the exception tuple did not yield the correct string", - com_exc, - ) - - -try: - import logging -except ImportError: - logging = None -if logging is not None: - import win32com - - class TestLogHandler(logging.Handler): - def __init__(self): - self.reset() - logging.Handler.__init__(self) - - def reset(self): - self.num_emits = 0 - self.last_record = None - - def emit(self, record): - self.num_emits += 1 - self.last_record = self.format(record) - return - print("--- record start") - print(self.last_record) - print("--- record end") - - def testLogger(): - assert not hasattr(win32com, "logger") - handler = TestLogHandler() - formatter = logging.Formatter("%(message)s") - handler.setFormatter(formatter) - log = logging.getLogger("win32com_test") - log.addHandler(handler) - win32com.logger = log - # Now throw some exceptions! - # Native interfaces - com_server = wrap(TestServer(), pythoncom.IID_IStream) - try: - com_server.Commit(0) - raise RuntimeError("should have failed") - except pythoncom.error as exc: - # `excepinfo` is a tuple with elt 2 being the traceback we captured. - message = exc.excepinfo[2] - assert message.endswith("Exception: \U0001F600\n") - assert handler.num_emits == 1, handler.num_emits - assert handler.last_record.startswith( - "pythoncom error: Unexpected exception in gateway method 'Commit'" - ) - handler.reset() - - # IDispatch - com_server = Dispatch(wrap(TestServer())) - try: - com_server.Commit(0) - raise RuntimeError("should have failed") - except pythoncom.error as exc: - # `excepinfo` is a tuple with elt 2 being the traceback we captured. - message = exc.excepinfo[2] - assert message.endswith("Exception: \U0001F600\n") - assert handler.num_emits == 1, handler.num_emits - handler.reset() - - -if __name__ == "__main__": - test() - if logging is not None: - testLogger() - from win32com.test.util import CheckClean - - CheckClean() - print("error semantic tests worked") diff --git a/lib/win32com/test/pippo.idl b/lib/win32com/test/pippo.idl deleted file mode 100644 index 4222684c..00000000 --- a/lib/win32com/test/pippo.idl +++ /dev/null @@ -1,66 +0,0 @@ -// TestServer.idl : IDL source for TestServer.dll -// - -// This file will be processed by the MIDL tool to -// produce the type library (TestServer.tlb) and marshalling code. - -import "oaidl.idl"; -import "ocidl.idl"; - [ - object, - uuid(50086EE8-F535-464B-806E-365ADBB727CF), - dual, - helpstring("ITestServerApp Interface"), - pointer_default(unique) - ] - interface ITestServerApp : IDispatch - { - [id(1), helpstring("method Test1")] HRESULT Test1([out, retval] ITestServerApp **pVal); - [id(2), helpstring("method Test2")] HRESULT Test2([out, retval] VARIANT *pVar); - [propget, id(3), helpstring("property MyProp1")] HRESULT MyProp1([out, retval] long *pVal); - }; - [ - object, - uuid(618DB2A3-D5BD-4850-B66A-828727EB37E5), - dual, - helpstring("IPippo Interface"), - pointer_default(unique) - ] - interface IPippo : IDispatch - { - [id(1), helpstring("method Method1")] HRESULT Method1([out, retval] IPippo **val); - [propget, id(2), helpstring("property MyProp1")] HRESULT MyProp1([out, retval] long *pVal); - [id(3), helpstring("method Method2")] HRESULT Method2([in] long in1, [in, out] long *inout1, - [out, retval] long *val); - [id(4), helpstring("method Method3")] HRESULT Method3([in] VARIANT in1, - [out, retval] VARIANT *val); - }; - -[ - uuid(7783054E-9A20-4584-8C62-6ED2A08F6AC6), - version(1.0), - helpstring("TestServer 1.0 Type Library") -] -library TESTSERVERLib -{ - importlib("stdole32.tlb"); - importlib("stdole2.tlb"); - importlib("msado15.dll"); - - [ - uuid(49E44E89-5A72-4456-B1D5-68268A19E798), - helpstring("TestServerApp Class") - ] - coclass TestServerApp - { - [default] interface ITestServerApp; - }; - [ - uuid(1F0F75D6-BD63-41B9-9F88-2D9D2E1AA5C3), - helpstring("Pippo Class") - ] - coclass Pippo - { - [default] interface IPippo; - }; -}; diff --git a/lib/win32com/test/pippo_server.py b/lib/win32com/test/pippo_server.py deleted file mode 100644 index edcd9168..00000000 --- a/lib/win32com/test/pippo_server.py +++ /dev/null @@ -1,97 +0,0 @@ -# A little test server, complete with typelib, we can use for testing. -# Originally submitted with bug: -# [ 753154 ] memory leak wrapping object having _typelib_guid_ attribute -# but modified by mhammond for use as part of the test suite. -import os -import sys - -import pythoncom -import win32com -import winerror -from win32com.server.util import wrap - - -class CPippo: - # - # COM declarations - # - _reg_clsid_ = "{1F0F75D6-BD63-41B9-9F88-2D9D2E1AA5C3}" - _reg_desc_ = "Pippo Python test object" - _reg_progid_ = "Python.Test.Pippo" - # _reg_clsctx_ = pythoncom.CLSCTX_LOCAL_SERVER - ### - ### Link to typelib - _typelib_guid_ = "{7783054E-9A20-4584-8C62-6ED2A08F6AC6}" - _typelib_version_ = 1, 0 - _com_interfaces_ = ["IPippo"] - - def __init__(self): - self.MyProp1 = 10 - - def Method1(self): - return wrap(CPippo()) - - def Method2(self, in1, inout1): - return in1, inout1 * 2 - - def Method3(self, in1): - # in1 will be a tuple, not a list. - # Yet, we are not allowed to return a tuple, but need to convert it to a list first. (Bug?) - return list(in1) - - -def BuildTypelib(): - from distutils.dep_util import newer - - this_dir = os.path.dirname(__file__) - idl = os.path.abspath(os.path.join(this_dir, "pippo.idl")) - tlb = os.path.splitext(idl)[0] + ".tlb" - if newer(idl, tlb): - print("Compiling %s" % (idl,)) - rc = os.system('midl "%s"' % (idl,)) - if rc: - raise RuntimeError("Compiling MIDL failed!") - # Can't work out how to prevent MIDL from generating the stubs. - # just nuke them - for fname in "dlldata.c pippo_i.c pippo_p.c pippo.h".split(): - os.remove(os.path.join(this_dir, fname)) - - print("Registering %s" % (tlb,)) - tli = pythoncom.LoadTypeLib(tlb) - pythoncom.RegisterTypeLib(tli, tlb) - - -def UnregisterTypelib(): - k = CPippo - try: - pythoncom.UnRegisterTypeLib( - k._typelib_guid_, - k._typelib_version_[0], - k._typelib_version_[1], - 0, - pythoncom.SYS_WIN32, - ) - print("Unregistered typelib") - except pythoncom.error as details: - if details[0] == winerror.TYPE_E_REGISTRYACCESS: - pass - else: - raise - - -def main(argv=None): - if argv is None: - argv = sys.argv[1:] - if "--unregister" in argv: - # Unregister the type-libraries. - UnregisterTypelib() - else: - # Build and register the type-libraries. - BuildTypelib() - import win32com.server.register - - win32com.server.register.UseCommandLine(CPippo) - - -if __name__ == "__main__": - main(sys.argv) diff --git a/lib/win32com/test/policySemantics.py b/lib/win32com/test/policySemantics.py deleted file mode 100644 index 746522f7..00000000 --- a/lib/win32com/test/policySemantics.py +++ /dev/null @@ -1,116 +0,0 @@ -import unittest - -import pythoncom -import win32com.client -import win32com.server.util -import win32com.test.util -import winerror - - -class Error(Exception): - pass - - -# An object representing a list of numbers -class PythonSemanticClass: - _public_methods_ = ["In"] # DISPIDs are allocated. - _dispid_to_func_ = {10: "Add", 11: "Remove"} # DISPIDs specified by the object. - - def __init__(self): - self.list = [] - - def _NewEnum(self): - return win32com.server.util.NewEnum(self.list) - - def _value_(self): - # should return an array. - return self.list - - def _Evaluate(self): - # return the sum - return sum(self.list) - - def In(self, value): - return value in self.list - - def Add(self, value): - self.list.append(value) - - def Remove(self, value): - self.list.remove(value) - - -def DispExTest(ob): - if not __debug__: - print("WARNING: Tests dressed up as assertions are being skipped!") - assert ob.GetDispID("Add", 0) == 10, "Policy did not honour the dispid" - # Not impl - # assert ob.GetMemberName(10, 0)=="add", "Policy did not give me the correct function for the dispid" - assert ob.GetDispID("Remove", 0) == 11, "Policy did not honour the dispid" - assert ob.GetDispID("In", 0) == 1000, "Allocated dispid unexpected value" - assert ( - ob.GetDispID("_NewEnum", 0) == pythoncom.DISPID_NEWENUM - ), "_NewEnum() got unexpected DISPID" - dispids = [] - dispid = -1 - while 1: - try: - dispid = ob.GetNextDispID(0, dispid) - dispids.append(dispid) - except pythoncom.com_error as xxx_todo_changeme: - (hr, desc, exc, arg) = xxx_todo_changeme.args - assert hr == winerror.S_FALSE, "Bad result at end of enum" - break - dispids.sort() - if dispids != [pythoncom.DISPID_EVALUATE, pythoncom.DISPID_NEWENUM, 10, 11, 1000]: - raise Error("Got back the wrong dispids: %s" % dispids) - - -def SemanticTest(ob): - # First just check our object "generally" as expected. - ob.Add(1) - ob.Add(2) - ob.Add(3) - # invoke _value_ - if ob() != (1, 2, 3): - raise Error("Bad result - got %s" % (repr(ob()))) - - dispob = ob._oleobj_ - - rc = dispob.Invoke( - pythoncom.DISPID_EVALUATE, - 0, - pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET, - 1, - ) - if rc != 6: - raise Error("Evaluate returned %d" % rc) - - -class Tester(win32com.test.util.TestCase): - def setUp(self): - debug = 0 - import win32com.server.dispatcher - - if debug: - dispatcher = win32com.server.dispatcher.DefaultDebugDispatcher - else: - dispatcher = None - disp = win32com.server.util.wrap( - PythonSemanticClass(), useDispatcher=dispatcher - ) - self.ob = win32com.client.Dispatch(disp) - - def tearDown(self): - self.ob = None - - def testSemantics(self): - SemanticTest(self.ob) - - def testIDispatchEx(self): - dispexob = self.ob._oleobj_.QueryInterface(pythoncom.IID_IDispatchEx) - DispExTest(dispexob) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/readme.txt b/lib/win32com/test/readme.txt deleted file mode 100644 index 02edbd55..00000000 --- a/lib/win32com/test/readme.txt +++ /dev/null @@ -1,18 +0,0 @@ -COM Test Suite Readme ---------------------- - -Running the test suite: ------------------------ -* Open a command prompt -* Change to the "win32com\test" directory. -* run "testall.py". This will perform level 1 testing. - You may specify 1, 2, or 3 on the command line ("testutil 3") - to execute more tests. - -In general, this should just run the best it can, utilizing what is available -on the machine. It is likely some tests will refuse to run due to objects not -being locally available - this is normal. - -The win32com source tree has source code to a C++ and VB component used purely -for testing. You may like to build and register these, particularly if you -are doing anything related to argument/result handling. diff --git a/lib/win32com/test/testADOEvents.py b/lib/win32com/test/testADOEvents.py deleted file mode 100644 index c79b2f7e..00000000 --- a/lib/win32com/test/testADOEvents.py +++ /dev/null @@ -1,100 +0,0 @@ -import os -import time - -import pythoncom -from win32com.client import Dispatch, DispatchWithEvents, constants - -finished = 0 # Flag for the wait loop from (3) to test - - -class ADOEvents: # event handler class - def OnWillConnect(self, str, user, pw, opt, sts, cn): - # Must have this event, as if it is not handled, ADO assumes the - # operation is cancelled, and raises an error (Operation cancelled - # by the user) - pass - - def OnConnectComplete(self, error, status, connection): - # Assume no errors, until we have the basic stuff - # working. Now, "connection" should be an open - # connection to my data source - # Do the "something" from (2). For now, just - # print the connection data source - print("connection is", connection) - print("Connected to", connection.Properties("Data Source")) - # OK, our work is done. Let the main loop know - global finished - finished = 1 - - def OnCommitTransComplete(self, pError, adStatus, pConnection): - pass - - def OnInfoMessage(self, pError, adStatus, pConnection): - pass - - def OnDisconnect(self, adStatus, pConnection): - pass - - def OnBeginTransComplete(self, TransactionLevel, pError, adStatus, pConnection): - pass - - def OnRollbackTransComplete(self, pError, adStatus, pConnection): - pass - - def OnExecuteComplete( - self, RecordsAffected, pError, adStatus, pCommand, pRecordset, pConnection - ): - pass - - def OnWillExecute( - self, - Source, - CursorType, - LockType, - Options, - adStatus, - pCommand, - pRecordset, - pConnection, - ): - pass - - -def TestConnection(dbname): - # Create the ADO connection object, and link the event - # handlers into it - c = DispatchWithEvents("ADODB.Connection", ADOEvents) - - # Initiate the asynchronous open - dsn = "Driver={Microsoft Access Driver (*.mdb)};Dbq=%s" % dbname - user = "system" - pw = "manager" - c.Open(dsn, user, pw, constants.adAsyncConnect) - - # Sit in a loop, until our event handler (above) sets the - # "finished" flag or we time out. - end_time = time.clock() + 10 - while time.clock() < end_time: - # Pump messages so that COM gets a look in - pythoncom.PumpWaitingMessages() - if not finished: - print("XXX - Failed to connect!") - - -def Test(): - from . import testAccess - - try: - testAccess.GenerateSupport() - except pythoncom.com_error: - print("*** Can not import the MSAccess type libraries - tests skipped") - return - dbname = testAccess.CreateTestAccessDatabase() - try: - TestConnection(dbname) - finally: - os.unlink(dbname) - - -if __name__ == "__main__": - Test() diff --git a/lib/win32com/test/testAXScript.py b/lib/win32com/test/testAXScript.py deleted file mode 100644 index c37a5aff..00000000 --- a/lib/win32com/test/testAXScript.py +++ /dev/null @@ -1,43 +0,0 @@ -# Test AXScripting the best we can in an automated fashion... -import os -import sys - -import win32api -import win32com.axscript -import win32com.axscript.client -import win32com.test.util - -verbose = "-v" in sys.argv - - -class AXScript(win32com.test.util.TestCase): - def setUp(self): - file = win32api.GetFullPathName( - os.path.join(win32com.axscript.client.__path__[0], "pyscript.py") - ) - from win32com.test.util import RegisterPythonServer - - self.verbose = verbose - RegisterPythonServer(file, "python", verbose=self.verbose) - - def testHost(self): - file = win32api.GetFullPathName( - os.path.join(win32com.axscript.__path__[0], "test\\testHost.py") - ) - cmd = '%s "%s"' % (win32api.GetModuleFileName(0), file) - if verbose: - print("Testing Python Scripting host") - win32com.test.util.ExecuteShellCommand(cmd, self) - - def testCScript(self): - file = win32api.GetFullPathName( - os.path.join(win32com.axscript.__path__[0], "Demos\\Client\\wsh\\test.pys") - ) - cmd = 'cscript.exe "%s"' % (file) - if verbose: - print("Testing Windows Scripting host with Python script") - win32com.test.util.ExecuteShellCommand(cmd, self) - - -if __name__ == "__main__": - win32com.test.util.testmain() diff --git a/lib/win32com/test/testAccess.py b/lib/win32com/test/testAccess.py deleted file mode 100644 index e6fce872..00000000 --- a/lib/win32com/test/testAccess.py +++ /dev/null @@ -1,187 +0,0 @@ -# -# This assumes that you have MSAccess and DAO installed. -# You need to run makepy.py over "msaccess.tlb" and -# "dao3032.dll", and ensure the generated files are on the -# path. - -# You can run this with no args, and a test database will be generated. -# You can optionally pass a dbname on the command line, in which case it will be dumped. - -import os -import sys - -import pythoncom -import win32api -from win32com.client import Dispatch, constants, gencache - - -def CreateTestAccessDatabase(dbname=None): - # Creates a test access database - returns the filename. - if dbname is None: - dbname = os.path.join(win32api.GetTempPath(), "COMTestSuiteTempDatabase.mdb") - - access = Dispatch("Access.Application") - dbEngine = access.DBEngine - workspace = dbEngine.Workspaces(0) - - try: - os.unlink(dbname) - except os.error: - print( - "WARNING - Unable to delete old test database - expect a COM exception RSN!" - ) - - newdb = workspace.CreateDatabase( - dbname, constants.dbLangGeneral, constants.dbEncrypt - ) - - # Create one test table. - table = newdb.CreateTableDef("Test Table 1") - table.Fields.Append(table.CreateField("First Name", constants.dbText)) - table.Fields.Append(table.CreateField("Last Name", constants.dbText)) - - index = table.CreateIndex("UniqueIndex") - index.Fields.Append(index.CreateField("First Name")) - index.Fields.Append(index.CreateField("Last Name")) - index.Unique = -1 - table.Indexes.Append(index) - - newdb.TableDefs.Append(table) - - # Create a second test table. - table = newdb.CreateTableDef("Test Table 2") - table.Fields.Append(table.CreateField("First Name", constants.dbText)) - table.Fields.Append(table.CreateField("Last Name", constants.dbText)) - - newdb.TableDefs.Append(table) - - # Create a relationship between them - relation = newdb.CreateRelation("TestRelationship") - relation.Table = "Test Table 1" - relation.ForeignTable = "Test Table 2" - - field = relation.CreateField("First Name") - field.ForeignName = "First Name" - relation.Fields.Append(field) - - field = relation.CreateField("Last Name") - field.ForeignName = "Last Name" - relation.Fields.Append(field) - - relation.Attributes = ( - constants.dbRelationDeleteCascade + constants.dbRelationUpdateCascade - ) - - newdb.Relations.Append(relation) - - # Finally we can add some data to the table. - tab1 = newdb.OpenRecordset("Test Table 1") - tab1.AddNew() - tab1.Fields("First Name").Value = "Mark" - tab1.Fields("Last Name").Value = "Hammond" - tab1.Update() - - tab1.MoveFirst() - # We do a simple bookmark test which tests our optimized VT_SAFEARRAY|VT_UI1 support. - # The bookmark will be a buffer object - remember it for later. - bk = tab1.Bookmark - - # Add a second record. - tab1.AddNew() - tab1.Fields("First Name").Value = "Second" - tab1.Fields("Last Name").Value = "Person" - tab1.Update() - - # Reset the bookmark to the one we saved. - # But first check the test is actually doing something! - tab1.MoveLast() - if tab1.Fields("First Name").Value != "Second": - raise RuntimeError("Unexpected record is last - makes bookmark test pointless!") - - tab1.Bookmark = bk - if tab1.Bookmark != bk: - raise RuntimeError("The bookmark data is not the same") - - if tab1.Fields("First Name").Value != "Mark": - raise RuntimeError("The bookmark did not reset the record pointer correctly") - - return dbname - - -def DoDumpAccessInfo(dbname): - from . import daodump - - a = forms = None - try: - sys.stderr.write("Creating Access Application...\n") - a = Dispatch("Access.Application") - print("Opening database %s" % dbname) - a.OpenCurrentDatabase(dbname) - db = a.CurrentDb() - daodump.DumpDB(db, 1) - forms = a.Forms - print("There are %d forms open." % (len(forms))) - # Uncommenting these lines means Access remains open. - # for form in forms: - # print " %s" % form.Name - reports = a.Reports - print("There are %d reports open" % (len(reports))) - finally: - if not a is None: - sys.stderr.write("Closing database\n") - try: - a.CloseCurrentDatabase() - except pythoncom.com_error: - pass - - -# Generate all the support we can. -def GenerateSupport(): - # dao - gencache.EnsureModule("{00025E01-0000-0000-C000-000000000046}", 0, 4, 0) - # Access - # gencache.EnsureModule("{4AFFC9A0-5F99-101B-AF4E-00AA003F0F07}", 0, 8, 0) - gencache.EnsureDispatch("Access.Application") - - -def DumpAccessInfo(dbname): - amod = gencache.GetModuleForProgID("Access.Application") - dmod = gencache.GetModuleForProgID("DAO.DBEngine.35") - if amod is None and dmod is None: - DoDumpAccessInfo(dbname) - # Now generate all the support we can. - GenerateSupport() - else: - sys.stderr.write( - "testAccess not doing dynamic test, as generated code already exists\n" - ) - # Now a generated version. - DoDumpAccessInfo(dbname) - - -def test(dbname=None): - if dbname is None: - # We need makepy support to create a database (just for the constants!) - try: - GenerateSupport() - except pythoncom.com_error: - print("*** Can not import the MSAccess type libraries - tests skipped") - return - dbname = CreateTestAccessDatabase() - print("A test database at '%s' was created" % dbname) - - DumpAccessInfo(dbname) - - -if __name__ == "__main__": - import sys - - from .util import CheckClean - - dbname = None - if len(sys.argv) > 1: - dbname = sys.argv[1] - - test(dbname) - - CheckClean() diff --git a/lib/win32com/test/testArrays.py b/lib/win32com/test/testArrays.py deleted file mode 100644 index f395a413..00000000 --- a/lib/win32com/test/testArrays.py +++ /dev/null @@ -1,99 +0,0 @@ -# Originally contributed by Stefan Schukat as part of this arbitrary-sized -# arrays patch. - -from win32com.client import gencache -from win32com.test import util - -ZeroD = 0 -OneDEmpty = [] -OneD = [1, 2, 3] -TwoD = [[1, 2, 3], [1, 2, 3], [1, 2, 3]] - -TwoD1 = [[[1, 2, 3, 5], [1, 2, 3], [1, 2, 3]], [[1, 2, 3], [1, 2, 3], [1, 2, 3]]] - -OneD1 = [[[1, 2, 3], [1, 2, 3], [1, 2, 3]], [[1, 2, 3], [1, 2, 3]]] - -OneD2 = [ - [1, 2, 3], - [1, 2, 3, 4, 5], - [[1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]], -] - - -ThreeD = [[[1, 2, 3], [1, 2, 3], [1, 2, 3]], [[1, 2, 3], [1, 2, 3], [1, 2, 3]]] - -FourD = [ - [ - [[1, 2, 3], [1, 2, 3], [1, 2, 3]], - [[1, 2, 3], [1, 2, 3], [1, 2, 3]], - [[1, 2, 3], [1, 2, 3], [1, 2, 3]], - ], - [ - [[1, 2, 3], [1, 2, 3], [1, 2, 3]], - [[1, 2, 3], [1, 2, 3], [1, 2, 3]], - [[1, 2, 3], [1, 2, 3], [1, 2, 3]], - ], -] - -LargeD = [ - [[list(range(10))] * 10], -] * 512 - - -def _normalize_array(a): - if type(a) != type(()): - return a - ret = [] - for i in a: - ret.append(_normalize_array(i)) - return ret - - -class ArrayTest(util.TestCase): - def setUp(self): - self.arr = gencache.EnsureDispatch("PyCOMTest.ArrayTest") - - def tearDown(self): - self.arr = None - - def _doTest(self, array): - self.arr.Array = array - self.assertEqual(_normalize_array(self.arr.Array), array) - - def testZeroD(self): - self._doTest(ZeroD) - - def testOneDEmpty(self): - self._doTest(OneDEmpty) - - def testOneD(self): - self._doTest(OneD) - - def testTwoD(self): - self._doTest(TwoD) - - def testThreeD(self): - self._doTest(ThreeD) - - def testFourD(self): - self._doTest(FourD) - - def testTwoD1(self): - self._doTest(TwoD1) - - def testOneD1(self): - self._doTest(OneD1) - - def testOneD2(self): - self._doTest(OneD2) - - def testLargeD(self): - self._doTest(LargeD) - - -if __name__ == "__main__": - try: - util.testmain() - except SystemExit as rc: - if not rc: - raise diff --git a/lib/win32com/test/testClipboard.py b/lib/win32com/test/testClipboard.py deleted file mode 100644 index 60df8668..00000000 --- a/lib/win32com/test/testClipboard.py +++ /dev/null @@ -1,170 +0,0 @@ -# testClipboard.py -import unittest - -import pythoncom -import win32clipboard -import win32con -import winerror -from win32com.server.exception import COMException -from win32com.server.util import NewEnum, wrap - -IDataObject_Methods = """GetData GetDataHere QueryGetData - GetCanonicalFormatEtc SetData EnumFormatEtc - DAdvise DUnadvise EnumDAdvise""".split() - -# A COM object implementing IDataObject used for basic testing. -num_do_objects = 0 - - -def WrapCOMObject(ob, iid=None): - return wrap(ob, iid=iid, useDispatcher=0) - - -class TestDataObject: - _com_interfaces_ = [pythoncom.IID_IDataObject] - _public_methods_ = IDataObject_Methods - - def __init__(self, bytesval): - global num_do_objects - num_do_objects += 1 - self.bytesval = bytesval - self.supported_fe = [] - for cf in (win32con.CF_TEXT, win32con.CF_UNICODETEXT): - fe = cf, None, pythoncom.DVASPECT_CONTENT, -1, pythoncom.TYMED_HGLOBAL - self.supported_fe.append(fe) - - def __del__(self): - global num_do_objects - num_do_objects -= 1 - - def _query_interface_(self, iid): - if iid == pythoncom.IID_IEnumFORMATETC: - return NewEnum(self.supported_fe, iid=iid) - - def GetData(self, fe): - ret_stg = None - cf, target, aspect, index, tymed = fe - if aspect & pythoncom.DVASPECT_CONTENT and tymed == pythoncom.TYMED_HGLOBAL: - if cf == win32con.CF_TEXT: - ret_stg = pythoncom.STGMEDIUM() - ret_stg.set(pythoncom.TYMED_HGLOBAL, self.bytesval) - elif cf == win32con.CF_UNICODETEXT: - ret_stg = pythoncom.STGMEDIUM() - ret_stg.set(pythoncom.TYMED_HGLOBAL, self.bytesval.decode("latin1")) - - if ret_stg is None: - raise COMException(hresult=winerror.E_NOTIMPL) - return ret_stg - - def GetDataHere(self, fe): - raise COMException(hresult=winerror.E_NOTIMPL) - - def QueryGetData(self, fe): - cf, target, aspect, index, tymed = fe - if aspect & pythoncom.DVASPECT_CONTENT == 0: - raise COMException(hresult=winerror.DV_E_DVASPECT) - if tymed != pythoncom.TYMED_HGLOBAL: - raise COMException(hresult=winerror.DV_E_TYMED) - return None # should check better - - def GetCanonicalFormatEtc(self, fe): - RaiseCOMException(winerror.DATA_S_SAMEFORMATETC) - # return fe - - def SetData(self, fe, medium): - raise COMException(hresult=winerror.E_NOTIMPL) - - def EnumFormatEtc(self, direction): - if direction != pythoncom.DATADIR_GET: - raise COMException(hresult=winerror.E_NOTIMPL) - return NewEnum(self.supported_fe, iid=pythoncom.IID_IEnumFORMATETC) - - def DAdvise(self, fe, flags, sink): - raise COMException(hresult=winerror.E_NOTIMPL) - - def DUnadvise(self, connection): - raise COMException(hresult=winerror.E_NOTIMPL) - - def EnumDAdvise(self): - raise COMException(hresult=winerror.E_NOTIMPL) - - -class ClipboardTester(unittest.TestCase): - def setUp(self): - pythoncom.OleInitialize() - - def tearDown(self): - try: - pythoncom.OleFlushClipboard() - except pythoncom.com_error: - # We never set anything! - pass - - def testIsCurrentClipboard(self): - do = TestDataObject(b"Hello from Python") - do = WrapCOMObject(do, iid=pythoncom.IID_IDataObject) - pythoncom.OleSetClipboard(do) - self.assertTrue(pythoncom.OleIsCurrentClipboard(do)) - - def testComToWin32(self): - # Set the data via our DataObject - do = TestDataObject(b"Hello from Python") - do = WrapCOMObject(do, iid=pythoncom.IID_IDataObject) - pythoncom.OleSetClipboard(do) - # Then get it back via the standard win32 clipboard functions. - win32clipboard.OpenClipboard() - got = win32clipboard.GetClipboardData(win32con.CF_TEXT) - # CF_TEXT gives bytes. - expected = b"Hello from Python" - self.assertEqual(got, expected) - # Now check unicode - got = win32clipboard.GetClipboardData(win32con.CF_UNICODETEXT) - self.assertEqual(got, "Hello from Python") - win32clipboard.CloseClipboard() - - def testWin32ToCom(self): - # Set the data via the std win32 clipboard functions. - val = b"Hello again!" # always bytes - win32clipboard.OpenClipboard() - win32clipboard.SetClipboardData(win32con.CF_TEXT, val) - win32clipboard.CloseClipboard() - # and get it via an IDataObject provided by COM - do = pythoncom.OleGetClipboard() - cf = ( - win32con.CF_TEXT, - None, - pythoncom.DVASPECT_CONTENT, - -1, - pythoncom.TYMED_HGLOBAL, - ) - stg = do.GetData(cf) - got = stg.data - # The data we get back has the \0, as our STGMEDIUM has no way of - # knowing if it meant to be a string, or a binary buffer, so - # it must return it too. - self.assertTrue(got, b"Hello again!\0") - - def testDataObjectFlush(self): - do = TestDataObject(b"Hello from Python") - do = WrapCOMObject(do, iid=pythoncom.IID_IDataObject) - pythoncom.OleSetClipboard(do) - self.assertEqual(num_do_objects, 1) - - do = None # clear my ref! - pythoncom.OleFlushClipboard() - self.assertEqual(num_do_objects, 0) - - def testDataObjectReset(self): - do = TestDataObject(b"Hello from Python") - do = WrapCOMObject(do) - pythoncom.OleSetClipboard(do) - do = None # clear my ref! - self.assertEqual(num_do_objects, 1) - pythoncom.OleSetClipboard(None) - self.assertEqual(num_do_objects, 0) - - -if __name__ == "__main__": - from win32com.test import util - - util.testmain() diff --git a/lib/win32com/test/testCollections.py b/lib/win32com/test/testCollections.py deleted file mode 100644 index 954697e0..00000000 --- a/lib/win32com/test/testCollections.py +++ /dev/null @@ -1,166 +0,0 @@ -# testCollections.py -# -# This code tests both the client and server side of collections -# and enumerators. -# -# Also has the side effect of testing some of the PythonCOM error semantics. -import sys - -import pythoncom -import pywintypes -import win32com.client -import win32com.server.util -import win32com.test.util -import winerror - -L = pywintypes.Unicode - -import unittest - -error = "collection test error" - - -def MakeEmptyEnum(): - # create the Python enumerator object as a real COM object - o = win32com.server.util.wrap(win32com.server.util.Collection()) - return win32com.client.Dispatch(o) - - -def MakeTestEnum(): - # create a sub-collection, just to make sure it works :-) - sub = win32com.server.util.wrap( - win32com.server.util.Collection(["Sub1", 2, "Sub3"]) - ) - # create the Python enumerator object as a real COM object - o = win32com.server.util.wrap(win32com.server.util.Collection([1, "Two", 3, sub])) - return win32com.client.Dispatch(o) - - -def TestEnumAgainst(o, check): - for i in range(len(check)): - if o(i) != check[i]: - raise error( - "Using default method gave the incorrect value - %s/%s" - % (repr(o(i)), repr(check[i])) - ) - - for i in range(len(check)): - if o.Item(i) != check[i]: - raise error( - "Using Item method gave the incorrect value - %s/%s" - % (repr(o(i)), repr(check[i])) - ) - - # First try looping. - cmp = [] - for s in o: - cmp.append(s) - - if cmp[: len(check)] != check: - raise error( - "Result after looping isnt correct - %s/%s" - % (repr(cmp[: len(check)]), repr(check)) - ) - - for i in range(len(check)): - if o[i] != check[i]: - raise error("Using indexing gave the incorrect value") - - -def TestEnum(quiet=None): - if quiet is None: - quiet = not "-v" in sys.argv - if not quiet: - print("Simple enum test") - o = MakeTestEnum() - check = [1, "Two", 3] - TestEnumAgainst(o, check) - - if not quiet: - print("sub-collection test") - sub = o[3] - TestEnumAgainst(sub, ["Sub1", 2, "Sub3"]) - - # Remove the sublist for this test! - o.Remove(o.Count() - 1) - - if not quiet: - print("Remove item test") - del check[1] - o.Remove(1) - TestEnumAgainst(o, check) - - if not quiet: - print("Add item test") - o.Add("New Item") - check.append("New Item") - TestEnumAgainst(o, check) - - if not quiet: - print("Insert item test") - o.Insert(2, -1) - check.insert(2, -1) - TestEnumAgainst(o, check) - - ### This does not work! - # if not quiet: print "Indexed replace item test" - # o[2] = 'Replaced Item' - # check[2] = 'Replaced Item' - # TestEnumAgainst(o, check) - - try: - o() - raise error("default method with no args worked when it shouldnt have!") - except pythoncom.com_error as exc: - if exc.hresult != winerror.DISP_E_BADPARAMCOUNT: - raise error("Expected DISP_E_BADPARAMCOUNT - got %s" % (exc,)) - - try: - o.Insert("foo", 2) - raise error("Insert worked when it shouldnt have!") - except pythoncom.com_error as exc: - if exc.hresult != winerror.DISP_E_TYPEMISMATCH: - raise error("Expected DISP_E_TYPEMISMATCH - got %s" % (exc,)) - - # Remove the sublist for this test! - try: - o.Remove(o.Count()) - raise error("Remove worked when it shouldnt have!") - except pythoncom.com_error as exc: - if exc.hresult != winerror.DISP_E_BADINDEX: - raise error("Expected DISP_E_BADINDEX - got %s" % (exc,)) - - # Test an empty collection - if not quiet: - print("Empty collection test") - o = MakeEmptyEnum() - for item in o: - raise error("Empty list performed an iteration") - - try: - ob = o[1] - raise error("Empty list could be indexed") - except IndexError: - pass - - try: - ob = o[0] - raise error("Empty list could be indexed") - except IndexError: - pass - - try: - ob = o(0) - raise error("Empty list could be indexed") - except pythoncom.com_error as exc: - if exc.hresult != winerror.DISP_E_BADINDEX: - raise error("Expected DISP_E_BADINDEX - got %s" % (exc,)) - - -class TestCase(win32com.test.util.TestCase): - def testEnum(self): - TestEnum() - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testConversionErrors.py b/lib/win32com/test/testConversionErrors.py deleted file mode 100644 index a4b755e6..00000000 --- a/lib/win32com/test/testConversionErrors.py +++ /dev/null @@ -1,39 +0,0 @@ -import unittest - -import win32com.client -import win32com.server.util -import win32com.test.util - - -class Tester: - _public_methods_ = ["TestValue"] - - def TestValue(self, v): - pass - - -def test_ob(): - return win32com.client.Dispatch(win32com.server.util.wrap(Tester())) - - -class TestException(Exception): - pass - - -# The object we try and pass - pywin32 will call __float__ as a last resort. -class BadConversions: - def __float__(self): - raise TestException() - - -class TestCase(win32com.test.util.TestCase): - def test_float(self): - try: - test_ob().TestValue(BadConversions()) - raise Exception("Should not have worked") - except Exception as e: - assert isinstance(e, TestException) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testDCOM.py b/lib/win32com/test/testDCOM.py deleted file mode 100644 index 9f5a334e..00000000 --- a/lib/win32com/test/testDCOM.py +++ /dev/null @@ -1,50 +0,0 @@ -# testDCOM -usage = """\ -testDCOM.py - Simple DCOM test -Usage: testDCOM.py serverName - -Attempts to start the Python.Interpreter object on the named machine, -and checks that the object is indeed running remotely. - -Requires the named server be configured to run DCOM (using dcomcnfg.exe), -and the Python.Interpreter object installed and registered on that machine. - -The Python.Interpreter object must be installed on the local machine, -but no special DCOM configuration should be necessary. -""" -import string -import sys - -# NOTE: If you configured the object locally using dcomcnfg, you could -# simple use Dispatch rather than DispatchEx. -import pythoncom -import win32api -import win32com.client - - -def test(serverName): - if string.lower(serverName) == string.lower(win32api.GetComputerName()): - print("You must specify a remote server name, not the local machine!") - return - - # Hack to overcome a DCOM limitation. As the Python.Interpreter object - # is probably installed locally as an InProc object, DCOM seems to ignore - # all settings, and use the local object. - clsctx = pythoncom.CLSCTX_SERVER & ~pythoncom.CLSCTX_INPROC_SERVER - ob = win32com.client.DispatchEx("Python.Interpreter", serverName, clsctx=clsctx) - ob.Exec("import win32api") - actualName = ob.Eval("win32api.GetComputerName()") - if string.lower(serverName) != string.lower(actualName): - print( - "Error: The object created on server '%s' reported its name as '%s'" - % (serverName, actualName) - ) - else: - print("Object created and tested OK on server '%s'" % serverName) - - -if __name__ == "__main__": - if len(sys.argv) == 2: - test(sys.argv[1]) - else: - print(usage) diff --git a/lib/win32com/test/testDates.py b/lib/win32com/test/testDates.py deleted file mode 100644 index 00406dda..00000000 --- a/lib/win32com/test/testDates.py +++ /dev/null @@ -1,74 +0,0 @@ -import unittest -from datetime import datetime - -import pywintypes -import win32com.client -import win32com.server.util -import win32com.test.util -from win32timezone import TimeZoneInfo - - -# A COM object so we can pass dates to and from the COM boundary. -class Tester: - _public_methods_ = ["TestDate"] - - def TestDate(self, d): - assert isinstance(d, datetime) - return d - - -def test_ob(): - return win32com.client.Dispatch(win32com.server.util.wrap(Tester())) - - -class TestCase(win32com.test.util.TestCase): - def check(self, d, expected=None): - if not issubclass(pywintypes.TimeType, datetime): - self.skipTest("this is testing pywintypes and datetime") - got = test_ob().TestDate(d) - self.assertEqual(got, expected or d) - - def testUTC(self): - self.check( - datetime( - year=2000, - month=12, - day=25, - microsecond=500000, - tzinfo=TimeZoneInfo.utc(), - ) - ) - - def testLocal(self): - self.check( - datetime( - year=2000, - month=12, - day=25, - microsecond=500000, - tzinfo=TimeZoneInfo.local(), - ) - ) - - def testMSTruncated(self): - # milliseconds are kept but microseconds are lost after rounding. - self.check( - datetime( - year=2000, - month=12, - day=25, - microsecond=500500, - tzinfo=TimeZoneInfo.utc(), - ), - datetime( - year=2000, - month=12, - day=25, - microsecond=500000, - tzinfo=TimeZoneInfo.utc(), - ), - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testDictionary.py b/lib/win32com/test/testDictionary.py deleted file mode 100644 index 869cc186..00000000 --- a/lib/win32com/test/testDictionary.py +++ /dev/null @@ -1,99 +0,0 @@ -# testDictionary.py -# -import sys -import unittest - -import pythoncom -import pywintypes -import win32com.client -import win32com.server.util -import win32com.test.util -import win32timezone -import winerror - - -def MakeTestDictionary(): - return win32com.client.Dispatch("Python.Dictionary") - - -def TestDictAgainst(dict, check): - for key, value in list(check.items()): - if dict(key) != value: - raise Exception( - "Indexing for '%s' gave the incorrect value - %s/%s" - % (repr(key), repr(dict[key]), repr(check[key])) - ) - - -# Ensure we have the correct version registered. -def Register(quiet): - import win32com.servers.dictionary - from win32com.test.util import RegisterPythonServer - - RegisterPythonServer(win32com.servers.dictionary.__file__, "Python.Dictionary") - - -def TestDict(quiet=None): - if quiet is None: - quiet = not "-v" in sys.argv - Register(quiet) - - if not quiet: - print("Simple enum test") - dict = MakeTestDictionary() - checkDict = {} - TestDictAgainst(dict, checkDict) - - dict["NewKey"] = "NewValue" - checkDict["NewKey"] = "NewValue" - TestDictAgainst(dict, checkDict) - - dict["NewKey"] = None - del checkDict["NewKey"] - TestDictAgainst(dict, checkDict) - - now = win32timezone.now() - # We want to keep the milliseconds but discard microseconds as they - # don't survive the conversion. - now = now.replace(microsecond=round(now.microsecond / 1000) * 1000) - dict["Now"] = now - checkDict["Now"] = now - TestDictAgainst(dict, checkDict) - - if not quiet: - print("Failure tests") - try: - dict() - raise Exception("default method with no args worked when it shouldnt have!") - except pythoncom.com_error as xxx_todo_changeme: - (hr, desc, exc, argErr) = xxx_todo_changeme.args - if hr != winerror.DISP_E_BADPARAMCOUNT: - raise Exception("Expected DISP_E_BADPARAMCOUNT - got %d (%s)" % (hr, desc)) - - try: - dict("hi", "there") - raise Exception("multiple args worked when it shouldnt have!") - except pythoncom.com_error as xxx_todo_changeme1: - (hr, desc, exc, argErr) = xxx_todo_changeme1.args - if hr != winerror.DISP_E_BADPARAMCOUNT: - raise Exception("Expected DISP_E_BADPARAMCOUNT - got %d (%s)" % (hr, desc)) - - try: - dict(0) - raise Exception("int key worked when it shouldnt have!") - except pythoncom.com_error as xxx_todo_changeme2: - (hr, desc, exc, argErr) = xxx_todo_changeme2.args - if hr != winerror.DISP_E_TYPEMISMATCH: - raise Exception("Expected DISP_E_TYPEMISMATCH - got %d (%s)" % (hr, desc)) - - if not quiet: - print("Python.Dictionary tests complete.") - - -class TestCase(win32com.test.util.TestCase): - def testDict(self): - TestDict() - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testDictionary.vbs b/lib/win32com/test/testDictionary.vbs deleted file mode 100644 index ca1dc08e..00000000 --- a/lib/win32com/test/testDictionary.vbs +++ /dev/null @@ -1,26 +0,0 @@ -' Test Pyhon.Dictionary using VBScript - this uses -' IDispatchEx, so is an interesting test. - -set ob = CreateObject("Python.Dictionary") -ob("hello") = "there" -' Our keys are case insensitive. -ob.Item("hi") = ob("HELLO") - -dim ok -ok = true - -if ob("hello") <> "there" then - WScript.Echo "**** The dictionary value was wrong!!" - ok = false -end if - -if ob("hi") <> "there" then - WScript.Echo "**** The other dictionary value was wrong!!" - ok = false -end if - -if ok then - WScript.Echo "VBScript has successfully tested Python.Dictionary" -end if - - diff --git a/lib/win32com/test/testDynamic.py b/lib/win32com/test/testDynamic.py deleted file mode 100644 index 2fd91972..00000000 --- a/lib/win32com/test/testDynamic.py +++ /dev/null @@ -1,89 +0,0 @@ -# Test dynamic policy, and running object table. - -import pythoncom -import winerror -from win32com.server.exception import Exception - -error = "testDynamic error" - -iid = pythoncom.MakeIID("{b48969a0-784b-11d0-ae71-d23f56000000}") - - -class VeryPermissive: - def _dynamic_(self, name, lcid, wFlags, args): - if wFlags & pythoncom.DISPATCH_METHOD: - return getattr(self, name)(*args) - - if wFlags & pythoncom.DISPATCH_PROPERTYGET: - try: - # to avoid problems with byref param handling, tuple results are converted to lists. - ret = self.__dict__[name] - if type(ret) == type(()): - ret = list(ret) - return ret - except KeyError: # Probably a method request. - raise Exception(scode=winerror.DISP_E_MEMBERNOTFOUND) - - if wFlags & ( - pythoncom.DISPATCH_PROPERTYPUT | pythoncom.DISPATCH_PROPERTYPUTREF - ): - setattr(self, name, args[0]) - return - - raise Exception(scode=winerror.E_INVALIDARG, desc="invalid wFlags") - - def write(self, *args): - if len(args) == 0: - raise Exception( - scode=winerror.DISP_E_BADPARAMCOUNT - ) # Probably call as PROPGET. - - for arg in args[:-1]: - print(str(arg), end=" ") - print(str(args[-1])) - - -def Test(): - import win32com.server.policy - import win32com.server.util - - # import win32dbg;win32dbg.brk() - ob = win32com.server.util.wrap( - VeryPermissive(), usePolicy=win32com.server.policy.DynamicPolicy - ) - try: - handle = pythoncom.RegisterActiveObject(ob, iid, 0) - except pythoncom.com_error as details: - print("Warning - could not register the object in the ROT:", details) - handle = None - try: - import win32com.client.dynamic - - client = win32com.client.dynamic.Dispatch(iid) - client.ANewAttr = "Hello" - if client.ANewAttr != "Hello": - raise error("Could not set dynamic property") - - v = ["Hello", "From", "Python", 1.4] - client.TestSequence = v - if v != list(client.TestSequence): - raise error( - "Dynamic sequences not working! %r/%r" - % (repr(v), repr(client.testSequence)) - ) - - client.write("This", "output", "has", "come", "via", "testDynamic.py") - # Check our new "_FlagAsMethod" works (kinda!) - client._FlagAsMethod("NotReallyAMethod") - if not callable(client.NotReallyAMethod): - raise error("Method I flagged as callable isn't!") - - client = None - finally: - if handle is not None: - pythoncom.RevokeActiveObject(handle) - print("Test worked!") - - -if __name__ == "__main__": - Test() diff --git a/lib/win32com/test/testExchange.py b/lib/win32com/test/testExchange.py deleted file mode 100644 index b41a4472..00000000 --- a/lib/win32com/test/testExchange.py +++ /dev/null @@ -1,126 +0,0 @@ -# TestExchange = Exchange Server Dump -# Note that this code uses "CDO", which is unlikely to get the best choice. -# You should use the Outlook object model, or -# the win32com.mapi examples for a low-level interface. - -import os - -import pythoncom -from win32com.client import constants, gencache - -ammodule = None # was the generated module! - - -def GetDefaultProfileName(): - import win32api - import win32con - - try: - key = win32api.RegOpenKey( - win32con.HKEY_CURRENT_USER, - "Software\\Microsoft\\Windows NT\\CurrentVersion\\Windows Messaging Subsystem\\Profiles", - ) - try: - return win32api.RegQueryValueEx(key, "DefaultProfile")[0] - finally: - key.Close() - except win32api.error: - return None - - -# -# Recursive dump of folders. -# -def DumpFolder(folder, indent=0): - print(" " * indent, folder.Name) - folders = folder.Folders - folder = folders.GetFirst() - while folder: - DumpFolder(folder, indent + 1) - folder = folders.GetNext() - - -def DumpFolders(session): - try: - infostores = session.InfoStores - except AttributeError: - # later outlook? - store = session.DefaultStore - folder = store.GetRootFolder() - DumpFolder(folder) - return - - print(infostores) - print("There are %d infostores" % infostores.Count) - for i in range(infostores.Count): - infostore = infostores[i + 1] - print("Infostore = ", infostore.Name) - try: - folder = infostore.RootFolder - except pythoncom.com_error as details: - hr, msg, exc, arg = details - # -2147221219 == MAPI_E_FAILONEPROVIDER - a single provider temporarily not available. - if exc and exc[-1] == -2147221219: - print("This info store is currently not available") - continue - DumpFolder(folder) - - -# Build a dictionary of property tags, so I can reverse look-up -# -PropTagsById = {} -if ammodule: - for name, val in ammodule.constants.__dict__.items(): - PropTagsById[val] = name - - -def TestAddress(session): - # entry = session.GetAddressEntry("Skip") - # print entry - pass - - -def TestUser(session): - ae = session.CurrentUser - fields = getattr(ae, "Fields", []) - print("User has %d fields" % len(fields)) - for f in range(len(fields)): - field = fields[f + 1] - try: - id = PropTagsById[field.ID] - except KeyError: - id = field.ID - print("%s/%s=%s" % (field.Name, id, field.Value)) - - -def test(): - import win32com.client - - oldcwd = os.getcwd() - try: - session = gencache.EnsureDispatch("MAPI.Session") - try: - session.Logon(GetDefaultProfileName()) - except pythoncom.com_error as details: - print("Could not log on to MAPI:", details) - return - except pythoncom.error: - # no mapi.session - let's try outlook - app = gencache.EnsureDispatch("Outlook.Application") - session = app.Session - - try: - TestUser(session) - TestAddress(session) - DumpFolders(session) - finally: - session.Logoff() - # It appears Exchange will change the cwd on us :( - os.chdir(oldcwd) - - -if __name__ == "__main__": - from .util import CheckClean - - test() - CheckClean() diff --git a/lib/win32com/test/testExplorer.py b/lib/win32com/test/testExplorer.py deleted file mode 100644 index bca66358..00000000 --- a/lib/win32com/test/testExplorer.py +++ /dev/null @@ -1,145 +0,0 @@ -# testExplorer - - -import os -import time - -import pythoncom -import win32api -import win32com.client.dynamic -import win32con -import win32gui -import winerror -from win32com.client import Dispatch -from win32com.test.util import CheckClean - -bVisibleEventFired = 0 - -# These are errors we might see when this is run in automation (eg, on github) -# Not sure exactly what -2125463506 is, but google shows it's a common error -# possibly related to how IE is configured WRT site permissions etc. -HRESULTS_IN_AUTOMATION = [-2125463506, winerror.MK_E_UNAVAILABLE] - - -class ExplorerEvents: - def OnVisible(self, visible): - global bVisibleEventFired - bVisibleEventFired = 1 - - -def TestExplorerEvents(): - global bVisibleEventFired - try: - iexplore = win32com.client.DispatchWithEvents( - "InternetExplorer.Application", ExplorerEvents - ) - except pythoncom.com_error as exc: - # In automation we see this error trying to connect to events - # It's a little surprising that the non-event tests seem to work, but - # whatever... - if exc.hresult not in HRESULTS_IN_AUTOMATION: - raise - print("IE events appear to not be available, so skipping this test") - return - - iexplore.Visible = 1 - if not bVisibleEventFired: - raise RuntimeError("The IE event did not appear to fire!") - iexplore.Quit() - iexplore = None - - bVisibleEventFired = 0 - ie = win32com.client.Dispatch("InternetExplorer.Application") - ie_events = win32com.client.DispatchWithEvents(ie, ExplorerEvents) - ie.Visible = 1 - if not bVisibleEventFired: - raise RuntimeError("The IE event did not appear to fire!") - ie.Quit() - ie = None - print("IE Event tests worked.") - - -def TestObjectFromWindow(): - # Check we can use ObjectFromLresult to get the COM object from the - # HWND - see KB Q249232 - # Locating the HWND is different than the KB says... - hwnd = win32gui.FindWindow("IEFrame", None) - for child_class in [ - "TabWindowClass", - "Shell DocObject View", - "Internet Explorer_Server", - ]: - hwnd = win32gui.FindWindowEx(hwnd, 0, child_class, None) - # ack - not working for markh on vista with IE8 (or maybe it is the - # lack of the 'accessibility' components mentioned in Q249232) - # either way - not working! - return - # But here is the point - once you have an 'Internet Explorer_Server', - # you can send a message and use ObjectFromLresult to get it back. - msg = win32gui.RegisterWindowMessage("WM_HTML_GETOBJECT") - rc, result = win32gui.SendMessageTimeout( - hwnd, msg, 0, 0, win32con.SMTO_ABORTIFHUNG, 1000 - ) - ob = pythoncom.ObjectFromLresult(result, pythoncom.IID_IDispatch, 0) - doc = Dispatch(ob) - # just to prove it works, set the background color of the document. - for color in "red green blue orange white".split(): - doc.bgColor = color - time.sleep(0.2) - - -def TestExplorer(iexplore): - if not iexplore.Visible: - iexplore.Visible = -1 - filename = os.path.join(os.path.dirname(__file__), "..\\readme.html") - iexplore.Navigate(win32api.GetFullPathName(filename)) - win32api.Sleep(1000) - TestObjectFromWindow() - win32api.Sleep(3000) - try: - iexplore.Quit() - except (AttributeError, pythoncom.com_error): - # User got sick of waiting :) - pass - - -def TestAll(): - try: - try: - try: - iexplore = win32com.client.dynamic.Dispatch( - "InternetExplorer.Application" - ) - except pythoncom.com_error as exc: - if exc.hresult not in HRESULTS_IN_AUTOMATION: - raise - print("IE appears to not be available, so skipping this test") - return - - TestExplorer(iexplore) - - win32api.Sleep(1000) - iexplore = None - - # Test IE events. - TestExplorerEvents() - # Give IE a chance to shutdown, else it can get upset on fast machines. - time.sleep(2) - - # Note that the TextExplorerEvents will force makepy - hence - # this gencache is really no longer needed. - - from win32com.client import gencache - - gencache.EnsureModule("{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}", 0, 1, 1) - iexplore = win32com.client.Dispatch("InternetExplorer.Application") - TestExplorer(iexplore) - except pythoncom.com_error as exc: - if exc.hresult != winerror.RPC_E_DISCONNECTED: # user closed the app! - raise - finally: - iexplore = None - - -if __name__ == "__main__": - TestAll() - CheckClean() diff --git a/lib/win32com/test/testGIT.py b/lib/win32com/test/testGIT.py deleted file mode 100644 index 2ec6bc44..00000000 --- a/lib/win32com/test/testGIT.py +++ /dev/null @@ -1,145 +0,0 @@ -"""Testing pasing object between multiple COM threads - -Uses standard COM marshalling to pass objects between threads. Even -though Python generally seems to work when you just pass COM objects -between threads, it shouldnt. - -This shows the "correct" way to do it. - -It shows that although we create new threads to use the Python.Interpreter, -COM marshalls back all calls to that object to the main Python thread, -which must be running a message loop (as this sample does). - -When this test is run in "free threaded" mode (at this stage, you must -manually mark the COM objects as "ThreadingModel=Free", or run from a -service which has marked itself as free-threaded), then no marshalling -is done, and the Python.Interpreter object start doing the "expected" thing -- ie, it reports being on the same thread as its caller! - -Python.exe needs a good way to mark itself as FreeThreaded - at the moment -this is a pain in the but! - -""" - -import _thread -import traceback - -import pythoncom -import win32api -import win32com.client -import win32event - - -def TestInterp(interp): - if interp.Eval("1+1") != 2: - raise ValueError("The interpreter returned the wrong result.") - try: - interp.Eval(1 + 1) - raise ValueError("The interpreter did not raise an exception") - except pythoncom.com_error as details: - import winerror - - if details[0] != winerror.DISP_E_TYPEMISMATCH: - raise ValueError( - "The interpreter exception was not winerror.DISP_E_TYPEMISMATCH." - ) - - -def TestInterpInThread(stopEvent, cookie): - try: - DoTestInterpInThread(cookie) - finally: - win32event.SetEvent(stopEvent) - - -def CreateGIT(): - return pythoncom.CoCreateInstance( - pythoncom.CLSID_StdGlobalInterfaceTable, - None, - pythoncom.CLSCTX_INPROC, - pythoncom.IID_IGlobalInterfaceTable, - ) - - -def DoTestInterpInThread(cookie): - try: - pythoncom.CoInitialize() - myThread = win32api.GetCurrentThreadId() - GIT = CreateGIT() - - interp = GIT.GetInterfaceFromGlobal(cookie, pythoncom.IID_IDispatch) - interp = win32com.client.Dispatch(interp) - - TestInterp(interp) - interp.Exec("import win32api") - print( - "The test thread id is %d, Python.Interpreter's thread ID is %d" - % (myThread, interp.Eval("win32api.GetCurrentThreadId()")) - ) - interp = None - pythoncom.CoUninitialize() - except: - traceback.print_exc() - - -def BeginThreadsSimpleMarshal(numThreads, cookie): - """Creates multiple threads using simple (but slower) marshalling. - - Single interpreter object, but a new stream is created per thread. - - Returns the handles the threads will set when complete. - """ - ret = [] - for i in range(numThreads): - hEvent = win32event.CreateEvent(None, 0, 0, None) - _thread.start_new(TestInterpInThread, (hEvent, cookie)) - ret.append(hEvent) - return ret - - -def test(fn): - print("The main thread is %d" % (win32api.GetCurrentThreadId())) - GIT = CreateGIT() - interp = win32com.client.Dispatch("Python.Interpreter") - cookie = GIT.RegisterInterfaceInGlobal(interp._oleobj_, pythoncom.IID_IDispatch) - - events = fn(4, cookie) - numFinished = 0 - while 1: - try: - rc = win32event.MsgWaitForMultipleObjects( - events, 0, 2000, win32event.QS_ALLINPUT - ) - if rc >= win32event.WAIT_OBJECT_0 and rc < win32event.WAIT_OBJECT_0 + len( - events - ): - numFinished = numFinished + 1 - if numFinished >= len(events): - break - elif rc == win32event.WAIT_OBJECT_0 + len(events): # a message - # This is critical - whole apartment model demo will hang. - pythoncom.PumpWaitingMessages() - else: # Timeout - print( - "Waiting for thread to stop with interfaces=%d, gateways=%d" - % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount()) - ) - except KeyboardInterrupt: - break - GIT.RevokeInterfaceFromGlobal(cookie) - del interp - del GIT - - -if __name__ == "__main__": - test(BeginThreadsSimpleMarshal) - win32api.Sleep(500) - # Doing CoUninit here stop Pythoncom.dll hanging when DLLMain shuts-down the process - pythoncom.CoUninitialize() - if pythoncom._GetInterfaceCount() != 0 or pythoncom._GetGatewayCount() != 0: - print( - "Done with interfaces=%d, gateways=%d" - % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount()) - ) - else: - print("Done.") diff --git a/lib/win32com/test/testGatewayAddresses.py b/lib/win32com/test/testGatewayAddresses.py deleted file mode 100644 index d4899169..00000000 --- a/lib/win32com/test/testGatewayAddresses.py +++ /dev/null @@ -1,149 +0,0 @@ -# The purpose of this test is to ensure that the gateways objects -# do the right thing WRT COM rules about object identity etc. - -# Also includes a basic test that we support inheritance correctly in -# gateway interfaces. - -# For our test, we create an object of type IID_IPersistStorage -# This interface derives from IPersist. -# Therefore, QI's for IID_IDispatch, IID_IUnknown, IID_IPersist and -# IID_IPersistStorage should all return the same gateway object. -# -# In addition, the interface should only need to declare itself as -# using the IPersistStorage interface, and as the gateway derives -# from IPersist, it should automatically be available without declaration. -# -# We also create an object of type IID_I??, and perform a QI for it. -# We then jump through a number of hoops, ensuring that the objects -# returned by the QIs follow all the rules. -# -# Here is Gregs summary of the rules: -# 1) the set of supported interfaces is static and unchanging -# 2) symmetric: if you QI an interface for that interface, it succeeds -# 3) reflexive: if you QI against A for B, the new pointer must succeed -# for a QI for A -# 4) transitive: if you QI for B, then QI that for C, then QI'ing A for C -# must succeed -# -# -# Note that 1) Requires cooperation of the Python programmer. The rule to keep is: -# "whenever you return an _object_ from _query_interface_(), you must return the -# same object each time for a given IID. Note that you must return the same -# _wrapped_ object -# you -# The rest are tested here. - - -import pythoncom -from win32com.server.util import wrap - -from .util import CheckClean - -numErrors = 0 - - -# Check that the 2 objects both have identical COM pointers. -def CheckSameCOMObject(ob1, ob2): - addr1 = repr(ob1).split()[6][:-1] - addr2 = repr(ob2).split()[6][:-1] - return addr1 == addr2 - - -# Check that the objects conform to COM identity rules. -def CheckObjectIdentity(ob1, ob2): - u1 = ob1.QueryInterface(pythoncom.IID_IUnknown) - u2 = ob2.QueryInterface(pythoncom.IID_IUnknown) - return CheckSameCOMObject(u1, u2) - - -def FailObjectIdentity(ob1, ob2, when): - if not CheckObjectIdentity(ob1, ob2): - global numErrors - numErrors = numErrors + 1 - print(when, "are not identical (%s, %s)" % (repr(ob1), repr(ob2))) - - -class Dummy: - _public_methods_ = [] # We never attempt to make a call on this object. - _com_interfaces_ = [pythoncom.IID_IPersistStorage] - - -class Dummy2: - _public_methods_ = [] # We never attempt to make a call on this object. - _com_interfaces_ = [ - pythoncom.IID_IPersistStorage, - pythoncom.IID_IExternalConnection, - ] - - -class DeletgatedDummy: - _public_methods_ = [] - - -class Dummy3: - _public_methods_ = [] # We never attempt to make a call on this object. - _com_interfaces_ = [pythoncom.IID_IPersistStorage] - - def _query_interface_(self, iid): - if iid == pythoncom.IID_IExternalConnection: - # This will NEVER work - can only wrap the object once! - return wrap(DelegatedDummy()) - - -def TestGatewayInheritance(): - # By default, wrap() creates and discards a temporary object. - # This is not necessary, but just the current implementation of wrap. - # As the object is correctly discarded, it doesnt affect this test. - o = wrap(Dummy(), pythoncom.IID_IPersistStorage) - o2 = o.QueryInterface(pythoncom.IID_IUnknown) - FailObjectIdentity(o, o2, "IID_IPersistStorage->IID_IUnknown") - - o3 = o2.QueryInterface(pythoncom.IID_IDispatch) - - FailObjectIdentity(o2, o3, "IID_IUnknown->IID_IDispatch") - FailObjectIdentity(o, o3, "IID_IPersistStorage->IID_IDispatch") - - o4 = o3.QueryInterface(pythoncom.IID_IPersistStorage) - FailObjectIdentity(o, o4, "IID_IPersistStorage->IID_IPersistStorage(2)") - FailObjectIdentity(o2, o4, "IID_IUnknown->IID_IPersistStorage(2)") - FailObjectIdentity(o3, o4, "IID_IDispatch->IID_IPersistStorage(2)") - - o5 = o4.QueryInterface(pythoncom.IID_IPersist) - FailObjectIdentity(o, o5, "IID_IPersistStorage->IID_IPersist") - FailObjectIdentity(o2, o5, "IID_IUnknown->IID_IPersist") - FailObjectIdentity(o3, o5, "IID_IDispatch->IID_IPersist") - FailObjectIdentity(o4, o5, "IID_IPersistStorage(2)->IID_IPersist") - - -def TestMultiInterface(): - o = wrap(Dummy2(), pythoncom.IID_IPersistStorage) - o2 = o.QueryInterface(pythoncom.IID_IExternalConnection) - - FailObjectIdentity(o, o2, "IID_IPersistStorage->IID_IExternalConnection") - - # Make the same QI again, to make sure it is stable. - o22 = o.QueryInterface(pythoncom.IID_IExternalConnection) - FailObjectIdentity(o, o22, "IID_IPersistStorage->IID_IExternalConnection") - FailObjectIdentity( - o2, o22, "IID_IPersistStorage->IID_IExternalConnection (stability)" - ) - - o3 = o2.QueryInterface(pythoncom.IID_IPersistStorage) - FailObjectIdentity(o2, o3, "IID_IExternalConnection->IID_IPersistStorage") - FailObjectIdentity( - o, o3, "IID_IPersistStorage->IID_IExternalConnection->IID_IPersistStorage" - ) - - -def test(): - TestGatewayInheritance() - TestMultiInterface() - if numErrors == 0: - print("Worked ok") - else: - print("There were", numErrors, "errors.") - - -if __name__ == "__main__": - test() - CheckClean() diff --git a/lib/win32com/test/testInterp.vbs b/lib/win32com/test/testInterp.vbs deleted file mode 100644 index f34d17a9..00000000 --- a/lib/win32com/test/testInterp.vbs +++ /dev/null @@ -1,12 +0,0 @@ -set o = CreateObject("Python.Interpreter") -if o.Eval("1+1") <> 2 Then - WScript.Echo "Eval('1+1') failed" - bFailed = True -end if - -if bFailed then - WScript.Echo "*********** VBScript tests failed *********" -else - WScript.Echo "VBScript test worked OK" -end if - diff --git a/lib/win32com/test/testIterators.py b/lib/win32com/test/testIterators.py deleted file mode 100644 index b75a78df..00000000 --- a/lib/win32com/test/testIterators.py +++ /dev/null @@ -1,140 +0,0 @@ -# Some raw iter tests. Some "high-level" iterator tests can be found in -# testvb.py and testOutlook.py -import sys -import unittest - -import pythoncom -import win32com.server.util -import win32com.test.util -from win32com.client import Dispatch -from win32com.client.gencache import EnsureDispatch - - -class _BaseTestCase(win32com.test.util.TestCase): - def test_enumvariant_vb(self): - ob, iter = self.iter_factory() - got = [] - for v in iter: - got.append(v) - self.assertEqual(got, self.expected_data) - - def test_yield(self): - ob, i = self.iter_factory() - got = [] - for v in iter(i): - got.append(v) - self.assertEqual(got, self.expected_data) - - def _do_test_nonenum(self, object): - try: - for i in object: - pass - self.fail("Could iterate over a non-iterable object") - except TypeError: - pass # this is expected. - self.assertRaises(TypeError, iter, object) - self.assertRaises(AttributeError, getattr, object, "next") - - def test_nonenum_wrapper(self): - # Check our raw PyIDispatch - ob = self.object._oleobj_ - try: - for i in ob: - pass - self.fail("Could iterate over a non-iterable object") - except TypeError: - pass # this is expected. - self.assertRaises(TypeError, iter, ob) - self.assertRaises(AttributeError, getattr, ob, "next") - - # And our Dispatch wrapper - ob = self.object - try: - for i in ob: - pass - self.fail("Could iterate over a non-iterable object") - except TypeError: - pass # this is expected. - # Note that as our object may be dynamic, we *do* have a __getitem__ - # method, meaning we *can* call iter() on the object. In this case - # actual iteration is what fails. - # So either the 'iter(); will raise a type error, or an attempt to - # fetch it - try: - next(iter(ob)) - self.fail("Expected a TypeError fetching this iterator") - except TypeError: - pass - # And it should never have a 'next' method - self.assertRaises(AttributeError, getattr, ob, "next") - - -class VBTestCase(_BaseTestCase): - def setUp(self): - def factory(): - # Our VB test harness exposes a property with IEnumVariant. - ob = self.object.EnumerableCollectionProperty - for i in self.expected_data: - ob.Add(i) - # Get the raw IEnumVARIANT. - invkind = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET - iter = ob._oleobj_.InvokeTypes( - pythoncom.DISPID_NEWENUM, 0, invkind, (13, 10), () - ) - return ob, iter.QueryInterface(pythoncom.IID_IEnumVARIANT) - - # We *need* generated dispatch semantics, so dynamic __getitem__ etc - # don't get in the way of our tests. - self.object = EnsureDispatch("PyCOMVBTest.Tester") - self.expected_data = [1, "Two", "3"] - self.iter_factory = factory - - def tearDown(self): - self.object = None - - -# Test our client semantics, but using a wrapped Python list object. -# This has the effect of re-using our client specific tests, but in this -# case is exercising the server side. -class SomeObject: - _public_methods_ = ["GetCollection"] - - def __init__(self, data): - self.data = data - - def GetCollection(self): - return win32com.server.util.NewCollection(self.data) - - -class WrappedPythonCOMServerTestCase(_BaseTestCase): - def setUp(self): - def factory(): - ob = self.object.GetCollection() - flags = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET - enum = ob._oleobj_.Invoke(pythoncom.DISPID_NEWENUM, 0, flags, 1) - return ob, enum.QueryInterface(pythoncom.IID_IEnumVARIANT) - - self.expected_data = [1, "Two", 3] - sv = win32com.server.util.wrap(SomeObject(self.expected_data)) - self.object = Dispatch(sv) - self.iter_factory = factory - - def tearDown(self): - self.object = None - - -def suite(): - # We dont want our base class run - suite = unittest.TestSuite() - for item in list(globals().values()): - if ( - type(item) == type(unittest.TestCase) - and issubclass(item, unittest.TestCase) - and item != _BaseTestCase - ): - suite.addTest(unittest.makeSuite(item)) - return suite - - -if __name__ == "__main__": - unittest.main(argv=sys.argv + ["suite"]) diff --git a/lib/win32com/test/testMSOffice.py b/lib/win32com/test/testMSOffice.py deleted file mode 100644 index f4893dfd..00000000 --- a/lib/win32com/test/testMSOffice.py +++ /dev/null @@ -1,209 +0,0 @@ -# Test MSOffice -# -# Main purpose of test is to ensure that Dynamic COM objects -# work as expected. - -# Assumes Word and Excel installed on your machine. - -import traceback - -import pythoncom -import win32api -import win32com -import win32com.client.dynamic -from pywintypes import Unicode -from win32com.client import gencache -from win32com.test.util import CheckClean - -error = "MSOffice test error" - - -# Test a few of the MSOffice components. -def TestWord(): - # Try and load the object exposed by Word 8 - # Office 97 - _totally_ different object model! - try: - # NOTE - using "client.Dispatch" would return an msword8.py instance! - print("Starting Word 8 for dynamic test") - word = win32com.client.dynamic.Dispatch("Word.Application") - TestWord8(word) - - word = None - # Now we will test Dispatch without the new "lazy" capabilities - print("Starting Word 8 for non-lazy dynamic test") - dispatch = win32com.client.dynamic._GetGoodDispatch("Word.Application") - typeinfo = dispatch.GetTypeInfo() - attr = typeinfo.GetTypeAttr() - olerepr = win32com.client.build.DispatchItem(typeinfo, attr, None, 0) - word = win32com.client.dynamic.CDispatch(dispatch, olerepr) - dispatch = typeinfo = attr = olerepr = None - TestWord8(word) - - except pythoncom.com_error: - print("Starting Word 7 for dynamic test") - word = win32com.client.Dispatch("Word.Basic") - TestWord7(word) - - except Exception as e: - print("Word dynamic tests failed", e) - traceback.print_exc() - - print("Starting MSWord for generated test") - try: - from win32com.client import gencache - - word = gencache.EnsureDispatch("Word.Application.8") - TestWord8(word) - except Exception as e: - print("Word generated tests failed", e) - traceback.print_exc() - - -def TestWord7(word): - word.FileNew() - # If not shown, show the app. - if not word.AppShow(): - word._proc_("AppShow") - - for i in range(12): - word.FormatFont(Color=i + 1, Points=i + 12) - word.Insert("Hello from Python %d\n" % i) - - word.FileClose(2) - - -def TestWord8(word): - word.Visible = 1 - doc = word.Documents.Add() - wrange = doc.Range() - for i in range(10): - wrange.InsertAfter("Hello from Python %d\n" % i) - paras = doc.Paragraphs - for i in range(len(paras)): - # *sob* - in Word 2019, `p = paras(i+1)` seems to work to get a para - # but `p.Font` then blows up. - # p = paras[i]() - p = paras(i + 1) - p.Font.ColorIndex = i + 1 - p.Font.Size = 12 + (4 * i) - # XXX - note that - # for para in paras: - # para().Font... - # doesnt seem to work - no error, just doesnt work - # Should check if it works for VB! - doc.Close(SaveChanges=0) - word.Quit() - win32api.Sleep(1000) # Wait for word to close, else we - # may get OA error. - - -def TestWord8OldStyle(): - try: - import win32com.test.Generated4Test.msword8 - except ImportError: - print("Can not do old style test") - - -def TextExcel(xl): - xl.Visible = 0 - if xl.Visible: - raise error("Visible property is true.") - xl.Visible = 1 - if not xl.Visible: - raise error("Visible property not true.") - - if int(xl.Version[0]) >= 8: - xl.Workbooks.Add() - else: - xl.Workbooks().Add() - - xl.Range("A1:C1").Value = (1, 2, 3) - xl.Range("A2:C2").Value = ("x", "y", "z") - xl.Range("A3:C3").Value = ("3", "2", "1") - - for i in range(20): - xl.Cells(i + 1, i + 1).Value = "Hi %d" % i - - if xl.Range("A1").Value != "Hi 0": - raise error("Single cell range failed") - - if xl.Range("A1:B1").Value != ((Unicode("Hi 0"), 2),): - raise error("flat-horizontal cell range failed") - - if xl.Range("A1:A2").Value != ((Unicode("Hi 0"),), (Unicode("x"),)): - raise error("flat-vertical cell range failed") - - if xl.Range("A1:C3").Value != ( - (Unicode("Hi 0"), 2, 3), - (Unicode("x"), Unicode("Hi 1"), Unicode("z")), - (3, 2, Unicode("Hi 2")), - ): - raise error("square cell range failed") - - xl.Range("A1:C3").Value = ((3, 2, 1), ("x", "y", "z"), (1, 2, 3)) - - if xl.Range("A1:C3").Value != ( - (3, 2, 1), - (Unicode("x"), Unicode("y"), Unicode("z")), - (1, 2, 3), - ): - raise error("Range was not what I set it to!") - - # test dates out with Excel - xl.Cells(5, 1).Value = "Excel time" - xl.Cells(5, 2).Formula = "=Now()" - - import time - - xl.Cells(6, 1).Value = "Python time" - xl.Cells(6, 2).Value = pythoncom.MakeTime(time.time()) - xl.Cells(6, 2).NumberFormat = "d/mm/yy h:mm" - xl.Columns("A:B").EntireColumn.AutoFit() - - xl.Workbooks(1).Close(0) - xl.Quit() - - -def TestAll(): - TestWord() - - try: - print("Starting Excel for Dynamic test...") - xl = win32com.client.dynamic.Dispatch("Excel.Application") - TextExcel(xl) - except Exception as e: - worked = False - print("Excel tests failed", e) - traceback.print_exc() - - try: - print("Starting Excel 8 for generated excel8.py test...") - mod = gencache.EnsureModule( - "{00020813-0000-0000-C000-000000000046}", 0, 1, 2, bForDemand=1 - ) - xl = win32com.client.Dispatch("Excel.Application") - TextExcel(xl) - except ImportError: - print("Could not import the generated Excel 97 wrapper") - except Exception as e: - print("Generated Excel tests failed", e) - traceback.print_exc() - - try: - import xl5en32 - - mod = gencache.EnsureModule("{00020813-0000-0000-C000-000000000046}", 9, 1, 0) - xl = win32com.client.Dispatch("Excel.Application.5") - print("Starting Excel 95 for makepy test...") - TextExcel(xl) - except ImportError: - print("Could not import the generated Excel 95 wrapper") - except Exception as e: - print("Excel 95 tests failed", e) - traceback.print_exc() - - -if __name__ == "__main__": - TestAll() - CheckClean() - pythoncom.CoUninitialize() diff --git a/lib/win32com/test/testMSOfficeEvents.py b/lib/win32com/test/testMSOfficeEvents.py deleted file mode 100644 index c173c3ef..00000000 --- a/lib/win32com/test/testMSOfficeEvents.py +++ /dev/null @@ -1,136 +0,0 @@ -# OfficeEvents - test/demonstrate events with Word and Excel. -import msvcrt -import sys -import threading -import time -import types - -import pythoncom -from win32com.client import Dispatch, DispatchWithEvents - -stopEvent = threading.Event() - - -def TestExcel(): - class ExcelEvents: - def OnNewWorkbook(self, wb): - if type(wb) != types.InstanceType: - raise RuntimeError( - "The transformer doesnt appear to have translated this for us!" - ) - self.seen_events["OnNewWorkbook"] = None - - def OnWindowActivate(self, wb, wn): - if type(wb) != types.InstanceType or type(wn) != types.InstanceType: - raise RuntimeError( - "The transformer doesnt appear to have translated this for us!" - ) - self.seen_events["OnWindowActivate"] = None - - def OnWindowDeactivate(self, wb, wn): - self.seen_events["OnWindowDeactivate"] = None - - def OnSheetDeactivate(self, sh): - self.seen_events["OnSheetDeactivate"] = None - - def OnSheetBeforeDoubleClick(self, Sh, Target, Cancel): - if Target.Column % 2 == 0: - print("You can double-click there...") - else: - print("You can not double-click there...") - # This function is a void, so the result ends up in - # the only ByRef - Cancel. - return 1 - - class WorkbookEvents: - def OnActivate(self): - print("workbook OnActivate") - - def OnBeforeRightClick(self, Target, Cancel): - print("It's a Worksheet Event") - - e = DispatchWithEvents("Excel.Application", ExcelEvents) - e.seen_events = {} - e.Visible = 1 - book = e.Workbooks.Add() - book = DispatchWithEvents(book, WorkbookEvents) - print("Have book", book) - # sheet = e.Worksheets(1) - # sheet = DispatchWithEvents(sheet, WorksheetEvents) - - print("Double-click in a few of the Excel cells...") - print("Press any key when finished with Excel, or wait 10 seconds...") - if not _WaitForFinish(e, 10): - e.Quit() - if not _CheckSeenEvents(e, ["OnNewWorkbook", "OnWindowActivate"]): - sys.exit(1) - - -def TestWord(): - class WordEvents: - def OnDocumentChange(self): - self.seen_events["OnDocumentChange"] = None - - def OnWindowActivate(self, doc, wn): - self.seen_events["OnWindowActivate"] = None - - def OnQuit(self): - self.seen_events["OnQuit"] = None - stopEvent.set() - - w = DispatchWithEvents("Word.Application", WordEvents) - w.seen_events = {} - w.Visible = 1 - w.Documents.Add() - print("Press any key when finished with Word, or wait 10 seconds...") - if not _WaitForFinish(w, 10): - w.Quit() - if not _CheckSeenEvents(w, ["OnDocumentChange", "OnWindowActivate"]): - sys.exit(1) - - -def _WaitForFinish(ob, timeout): - end = time.time() + timeout - while 1: - if msvcrt.kbhit(): - msvcrt.getch() - break - pythoncom.PumpWaitingMessages() - stopEvent.wait(0.2) - if stopEvent.isSet(): - stopEvent.clear() - break - try: - if not ob.Visible: - # Gone invisible - we need to pretend we timed - # out, so the app is quit. - return 0 - except pythoncom.com_error: - # Excel is busy (eg, editing the cell) - ignore - pass - if time.time() > end: - return 0 - return 1 - - -def _CheckSeenEvents(o, events): - rc = 1 - for e in events: - if e not in o.seen_events: - print("ERROR: Expected event did not trigger", e) - rc = 0 - return rc - - -def test(): - import sys - - if "noword" not in sys.argv[1:]: - TestWord() - if "noexcel" not in sys.argv[1:]: - TestExcel() - print("Word and Excel event tests passed.") - - -if __name__ == "__main__": - test() diff --git a/lib/win32com/test/testMarshal.py b/lib/win32com/test/testMarshal.py deleted file mode 100644 index c57d445f..00000000 --- a/lib/win32com/test/testMarshal.py +++ /dev/null @@ -1,160 +0,0 @@ -"""Testing pasing object between multiple COM threads - -Uses standard COM marshalling to pass objects between threads. Even -though Python generally seems to work when you just pass COM objects -between threads, it shouldnt. - -This shows the "correct" way to do it. - -It shows that although we create new threads to use the Python.Interpreter, -COM marshalls back all calls to that object to the main Python thread, -which must be running a message loop (as this sample does). - -When this test is run in "free threaded" mode (at this stage, you must -manually mark the COM objects as "ThreadingModel=Free", or run from a -service which has marked itself as free-threaded), then no marshalling -is done, and the Python.Interpreter object start doing the "expected" thing -- ie, it reports being on the same thread as its caller! - -Python.exe needs a good way to mark itself as FreeThreaded - at the moment -this is a pain in the but! - -""" - -import threading -import unittest - -import pythoncom -import win32api -import win32com.client -import win32event - -from .testServers import InterpCase - -freeThreaded = 1 - - -class ThreadInterpCase(InterpCase): - def _testInterpInThread(self, stopEvent, interp): - try: - self._doTestInThread(interp) - finally: - win32event.SetEvent(stopEvent) - - def _doTestInThread(self, interp): - pythoncom.CoInitialize() - myThread = win32api.GetCurrentThreadId() - - if freeThreaded: - interp = pythoncom.CoGetInterfaceAndReleaseStream( - interp, pythoncom.IID_IDispatch - ) - interp = win32com.client.Dispatch(interp) - - interp.Exec("import win32api") - # print "The test thread id is %d, Python.Interpreter's thread ID is %d" % (myThread, interp.Eval("win32api.GetCurrentThreadId()")) - pythoncom.CoUninitialize() - - def BeginThreadsSimpleMarshal(self, numThreads): - """Creates multiple threads using simple (but slower) marshalling. - - Single interpreter object, but a new stream is created per thread. - - Returns the handles the threads will set when complete. - """ - interp = win32com.client.Dispatch("Python.Interpreter") - events = [] - threads = [] - for i in range(numThreads): - hEvent = win32event.CreateEvent(None, 0, 0, None) - events.append(hEvent) - interpStream = pythoncom.CoMarshalInterThreadInterfaceInStream( - pythoncom.IID_IDispatch, interp._oleobj_ - ) - t = threading.Thread( - target=self._testInterpInThread, args=(hEvent, interpStream) - ) - t.setDaemon(1) # so errors dont cause shutdown hang - t.start() - threads.append(t) - interp = None - return threads, events - - # - # NOTE - this doesnt quite work - Im not even sure it should, but Greg reckons - # you should be able to avoid the marshal per thread! - # I think that refers to CoMarshalInterface though... - def BeginThreadsFastMarshal(self, numThreads): - """Creates multiple threads using fast (but complex) marshalling. - - The marshal stream is created once, and each thread uses the same stream - - Returns the handles the threads will set when complete. - """ - interp = win32com.client.Dispatch("Python.Interpreter") - if freeThreaded: - interp = pythoncom.CoMarshalInterThreadInterfaceInStream( - pythoncom.IID_IDispatch, interp._oleobj_ - ) - events = [] - threads = [] - for i in range(numThreads): - hEvent = win32event.CreateEvent(None, 0, 0, None) - t = threading.Thread(target=self._testInterpInThread, args=(hEvent, interp)) - t.setDaemon(1) # so errors dont cause shutdown hang - t.start() - events.append(hEvent) - threads.append(t) - return threads, events - - def _DoTestMarshal(self, fn, bCoWait=0): - # print "The main thread is %d" % (win32api.GetCurrentThreadId()) - threads, events = fn(2) - numFinished = 0 - while 1: - try: - if bCoWait: - rc = pythoncom.CoWaitForMultipleHandles(0, 2000, events) - else: - # Specifying "bWaitAll" here will wait for messages *and* all events - # (which is pretty useless) - rc = win32event.MsgWaitForMultipleObjects( - events, 0, 2000, win32event.QS_ALLINPUT - ) - if ( - rc >= win32event.WAIT_OBJECT_0 - and rc < win32event.WAIT_OBJECT_0 + len(events) - ): - numFinished = numFinished + 1 - if numFinished >= len(events): - break - elif rc == win32event.WAIT_OBJECT_0 + len(events): # a message - # This is critical - whole apartment model demo will hang. - pythoncom.PumpWaitingMessages() - else: # Timeout - print( - "Waiting for thread to stop with interfaces=%d, gateways=%d" - % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount()) - ) - except KeyboardInterrupt: - break - for t in threads: - t.join(2) - self.assertFalse(t.is_alive(), "thread failed to stop!?") - threads = None # threads hold references to args - # Seems to be a leak here I can't locate :( - # self.assertEqual(pythoncom._GetInterfaceCount(), 0) - # self.assertEqual(pythoncom._GetGatewayCount(), 0) - - def testSimpleMarshal(self): - self._DoTestMarshal(self.BeginThreadsSimpleMarshal) - - def testSimpleMarshalCoWait(self): - self._DoTestMarshal(self.BeginThreadsSimpleMarshal, 1) - - -# def testFastMarshal(self): -# self._DoTestMarshal(self.BeginThreadsFastMarshal) - -if __name__ == "__main__": - unittest.main("testMarshal") diff --git a/lib/win32com/test/testNetscape.py b/lib/win32com/test/testNetscape.py deleted file mode 100644 index 471de802..00000000 --- a/lib/win32com/test/testNetscape.py +++ /dev/null @@ -1,27 +0,0 @@ -## AHH - I cant make this work!!! - -# But this is the general idea. - -import sys - -import netscape - -error = "Netscape Test Error" - -if __name__ == "__main__": - n = netscape.CNetworkCX() - rc = n.Open("http://d|/temp/apyext.html", 0, None, 0, None) - if not rc: - raise error("Open method of Netscape failed") - while 1: - num, str = n.Read(None, 0) - print("Got ", num, str) - if num == 0: - break # used to be continue - no idea!! - if num == -1: - break - # sys.stdout.write(str) - n.Close() - print("Done!") - del n - sys.last_type = sys.last_value = sys.last_traceback = None diff --git a/lib/win32com/test/testPersist.py b/lib/win32com/test/testPersist.py deleted file mode 100644 index 066f4029..00000000 --- a/lib/win32com/test/testPersist.py +++ /dev/null @@ -1,231 +0,0 @@ -import os - -import pythoncom -import pywintypes -import win32api -import win32com -import win32com.client -import win32com.client.dynamic -import win32com.server.util -import win32ui -from pywin32_testutil import str2bytes -from pywintypes import Unicode -from win32com import storagecon -from win32com.axcontrol import axcontrol -from win32com.test.util import CheckClean - -S_OK = 0 - - -import win32timezone - -now = win32timezone.now() - - -class LockBytes: - _public_methods_ = [ - "ReadAt", - "WriteAt", - "Flush", - "SetSize", - "LockRegion", - "UnlockRegion", - "Stat", - ] - _com_interfaces_ = [pythoncom.IID_ILockBytes] - - def __init__(self, data=""): - self.data = str2bytes(data) - self.ctime = now - self.mtime = now - self.atime = now - - def ReadAt(self, offset, cb): - print("ReadAt") - result = self.data[offset : offset + cb] - return result - - def WriteAt(self, offset, data): - print("WriteAt " + str(offset)) - print("len " + str(len(data))) - print("data:") - # print data - if len(self.data) >= offset: - newdata = self.data[0:offset] + data - print(len(newdata)) - if len(self.data) >= offset + len(data): - newdata = newdata + self.data[offset + len(data) :] - print(len(newdata)) - self.data = newdata - return len(data) - - def Flush(self, whatsthis=0): - print("Flush" + str(whatsthis)) - fname = os.path.join(win32api.GetTempPath(), "persist.doc") - open(fname, "wb").write(self.data) - return S_OK - - def SetSize(self, size): - print("Set Size" + str(size)) - if size > len(self.data): - self.data = self.data + str2bytes("\000" * (size - len(self.data))) - else: - self.data = self.data[0:size] - return S_OK - - def LockRegion(self, offset, size, locktype): - print("LockRegion") - - def UnlockRegion(self, offset, size, locktype): - print("UnlockRegion") - - def Stat(self, statflag): - print("returning Stat " + str(statflag)) - return ( - "PyMemBytes", - storagecon.STGTY_LOCKBYTES, - len(self.data), - self.mtime, - self.ctime, - self.atime, - storagecon.STGM_DIRECT | storagecon.STGM_READWRITE | storagecon.STGM_CREATE, - storagecon.STGM_SHARE_EXCLUSIVE, - "{00020905-0000-0000-C000-000000000046}", - 0, # statebits ? - 0, - ) - - -class OleClientSite: - _public_methods_ = [ - "SaveObject", - "GetMoniker", - "GetContainer", - "ShowObject", - "OnShowWindow", - "RequestNewObjectLayout", - ] - _com_interfaces_ = [axcontrol.IID_IOleClientSite] - - def __init__(self, data=""): - self.IPersistStorage = None - self.IStorage = None - - def SetIPersistStorage(self, IPersistStorage): - self.IPersistStorage = IPersistStorage - - def SetIStorage(self, IStorage): - self.IStorage = IStorage - - def SaveObject(self): - print("SaveObject") - if self.IPersistStorage != None and self.IStorage != None: - self.IPersistStorage.Save(self.IStorage, 1) - self.IStorage.Commit(0) - return S_OK - - def GetMoniker(self, dwAssign, dwWhichMoniker): - print("GetMoniker " + str(dwAssign) + " " + str(dwWhichMoniker)) - - def GetContainer(self): - print("GetContainer") - - def ShowObject(self): - print("ShowObject") - - def OnShowWindow(self, fShow): - print("ShowObject" + str(fShow)) - - def RequestNewObjectLayout(self): - print("RequestNewObjectLayout") - - -def test(): - # create a LockBytes object and - # wrap it as a COM object - # import win32com.server.dispatcher - lbcom = win32com.server.util.wrap( - LockBytes(), pythoncom.IID_ILockBytes - ) # , useDispatcher=win32com.server.dispatcher.DispatcherWin32trace) - - # create a structured storage on the ILockBytes object - stcom = pythoncom.StgCreateDocfileOnILockBytes( - lbcom, - storagecon.STGM_DIRECT - | storagecon.STGM_CREATE - | storagecon.STGM_READWRITE - | storagecon.STGM_SHARE_EXCLUSIVE, - 0, - ) - - # create our ClientSite - ocs = OleClientSite() - # wrap it as a COM object - ocscom = win32com.server.util.wrap(ocs, axcontrol.IID_IOleClientSite) - - # create a Word OLE Document, connect it to our site and our storage - oocom = axcontrol.OleCreate( - "{00020906-0000-0000-C000-000000000046}", - axcontrol.IID_IOleObject, - 0, - (0,), - ocscom, - stcom, - ) - - mf = win32ui.GetMainFrame() - hwnd = mf.GetSafeHwnd() - - # Set the host and document name - # for unknown reason document name becomes hostname, and document name - # is not set, debugged it, but don't know where the problem is? - oocom.SetHostNames("OTPython", "This is Cool") - - # activate the OLE document - oocom.DoVerb(-1, ocscom, 0, hwnd, mf.GetWindowRect()) - - # set the hostnames again - oocom.SetHostNames("OTPython2", "ThisisCool2") - - # get IDispatch of Word - doc = win32com.client.Dispatch(oocom.QueryInterface(pythoncom.IID_IDispatch)) - - # get IPersistStorage of Word - dpcom = oocom.QueryInterface(pythoncom.IID_IPersistStorage) - - # let our ClientSite know the interfaces - ocs.SetIPersistStorage(dpcom) - ocs.SetIStorage(stcom) - - # use IDispatch to do the Office Word test - # pasted from TestOffice.py - - wrange = doc.Range() - for i in range(10): - wrange.InsertAfter("Hello from Python %d\n" % i) - paras = doc.Paragraphs - for i in range(len(paras)): - paras[i]().Font.ColorIndex = i + 1 - paras[i]().Font.Size = 12 + (4 * i) - # XXX - note that - # for para in paras: - # para().Font... - # doesnt seem to work - no error, just doesnt work - # Should check if it works for VB! - - dpcom.Save(stcom, 0) - dpcom.HandsOffStorage() - # oocom.Close(axcontrol.OLECLOSE_NOSAVE) # or OLECLOSE_SAVEIFDIRTY, but it fails??? - - # Save the ILockBytes data to "persist2.doc" - lbcom.Flush() - - # exiting Winword will automatically update the ILockBytes data - # and flush it to "%TEMP%\persist.doc" - doc.Application.Quit() - - -if __name__ == "__main__": - test() - pythoncom.CoUninitialize() - CheckClean() diff --git a/lib/win32com/test/testPippo.py b/lib/win32com/test/testPippo.py deleted file mode 100644 index a1a762c2..00000000 --- a/lib/win32com/test/testPippo.py +++ /dev/null @@ -1,82 +0,0 @@ -import sys -import unittest - -import pythoncom -from win32com.client import Dispatch -from win32com.client.gencache import EnsureDispatch - - -class PippoTester(unittest.TestCase): - def setUp(self): - from win32com.test import pippo_server - from win32com.test.util import RegisterPythonServer - - RegisterPythonServer(pippo_server.__file__, "Python.Test.Pippo") - # create it. - self.object = Dispatch("Python.Test.Pippo") - - def testLeaks(self): - try: - gtrc = sys.gettotalrefcount - except AttributeError: - print("Please run this with python_d for leak tests") - gtrc = lambda: 0 - # note creating self.object() should have consumed our "one time" leaks - self.object.Method1() - start = gtrc() - for i in range(1000): - object = Dispatch("Python.Test.Pippo") - object.Method1() - object = None - end = gtrc() - if end - start > 5: - self.fail("We lost %d references!" % (end - start,)) - - def testResults(self): - rc, out1 = self.object.Method2(123, 111) - self.assertEqual(rc, 123) - self.assertEqual(out1, 222) - - def testPythonArrays(self): - self._testArray([-3, -2, -1, 0, 1, 2, 3]) - self._testArray([-3.14, -2, -0.1, 0.0, 1.1, 2.5, 3]) - - def testNumpyArrays(self): - try: - import numpy - except: - print("Numpy test not possible because numpy module failed to import") - return - self._testArray(numpy.array([-3, -2, -1, 0, 1, 2, 3])) - self._testArray(numpy.array([-3.14, -2, -0.1, 0.0, 1.1, 2.5, 3])) - - def testByteArrays(self): - if "bytes" in dir(__builtins__): - # Use eval to avoid compilation error in Python 2. - self._testArray(eval("b'abcdef'")) - self._testArray(eval("bytearray(b'abcdef')")) - - def _testArray(self, inArray): - outArray = self.object.Method3(inArray) - self.assertEqual(list(outArray), list(inArray)) - - def testLeaksGencache(self): - try: - gtrc = sys.gettotalrefcount - except AttributeError: - print("Please run this with python_d for leak tests") - gtrc = lambda: 0 - # note creating self.object() should have consumed our "one time" leaks - object = EnsureDispatch("Python.Test.Pippo") - start = gtrc() - for i in range(1000): - object = EnsureDispatch("Python.Test.Pippo") - object.Method1() - object = None - end = gtrc() - if end - start > 10: - self.fail("We lost %d references!" % (end - start,)) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testPyComTest.py b/lib/win32com/test/testPyComTest.py deleted file mode 100644 index aec0a6f5..00000000 --- a/lib/win32com/test/testPyComTest.py +++ /dev/null @@ -1,835 +0,0 @@ -# NOTE - Still seems to be a leak here somewhere -# gateway count doesnt hit zero. Hence the print statements! - -import sys - -sys.coinit_flags = 0 # Must be free-threaded! -import datetime -import decimal -import os -import time - -import pythoncom -import pywintypes -import win32api -import win32com -import win32com.client.connect -import win32timezone -import winerror -from pywin32_testutil import str2memory -from win32com.client import VARIANT, CastTo, DispatchBaseClass, constants -from win32com.test.util import CheckClean, RegisterPythonServer - -importMsg = "**** PyCOMTest is not installed ***\n PyCOMTest is a Python test specific COM client and server.\n It is likely this server is not installed on this machine\n To install the server, you must get the win32com sources\n and build it using MS Visual C++" - -error = Exception - -# This test uses a Python implemented COM server - ensure correctly registered. -RegisterPythonServer( - os.path.join(os.path.dirname(__file__), "..", "servers", "test_pycomtest.py"), - "Python.Test.PyCOMTest", -) - -from win32com.client import gencache - -try: - gencache.EnsureModule("{6BCDCB60-5605-11D0-AE5F-CADD4C000000}", 0, 1, 1) -except pythoncom.com_error: - print("The PyCOMTest module can not be located or generated.") - print(importMsg) - raise RuntimeError(importMsg) - -# We had a bg where RegisterInterfaces would fail if gencache had -# already been run - exercise that here -from win32com import universal - -universal.RegisterInterfaces("{6BCDCB60-5605-11D0-AE5F-CADD4C000000}", 0, 1, 1) - -verbose = 0 - - -def check_get_set(func, arg): - got = func(arg) - if got != arg: - raise error("%s failed - expected %r, got %r" % (func, arg, got)) - - -def check_get_set_raises(exc, func, arg): - try: - got = func(arg) - except exc as e: - pass # what we expect! - else: - raise error( - "%s with arg %r didn't raise %s - returned %r" % (func, arg, exc, got) - ) - - -def progress(*args): - if verbose: - for arg in args: - print(arg, end=" ") - print() - - -def TestApplyResult(fn, args, result): - try: - fnName = str(fn).split()[1] - except: - fnName = str(fn) - progress("Testing ", fnName) - pref = "function " + fnName - rc = fn(*args) - if rc != result: - raise error("%s failed - result not %r but %r" % (pref, result, rc)) - - -def TestConstant(constName, pyConst): - try: - comConst = getattr(constants, constName) - except: - raise error("Constant %s missing" % (constName,)) - if comConst != pyConst: - raise error( - "Constant value wrong for %s - got %s, wanted %s" - % (constName, comConst, pyConst) - ) - - -# Simple handler class. This demo only fires one event. -class RandomEventHandler: - def _Init(self): - self.fireds = {} - - def OnFire(self, no): - try: - self.fireds[no] = self.fireds[no] + 1 - except KeyError: - self.fireds[no] = 0 - - def OnFireWithNamedParams(self, no, a_bool, out1, out2): - # This test exists mainly to help with an old bug, where named - # params would come in reverse. - Missing = pythoncom.Missing - if no is not Missing: - # We know our impl called 'OnFire' with the same ID - assert no in self.fireds - assert no + 1 == out1, "expecting 'out1' param to be ID+1" - assert no + 2 == out2, "expecting 'out2' param to be ID+2" - # The middle must be a boolean. - assert a_bool is Missing or type(a_bool) == bool, "middle param not a bool" - return out1 + 2, out2 + 2 - - def _DumpFireds(self): - if not self.fireds: - print("ERROR: Nothing was received!") - for firedId, no in self.fireds.items(): - progress("ID %d fired %d times" % (firedId, no)) - - -# A simple handler class that derives from object (ie, a "new style class") - -# only relevant for Python 2.x (ie, the 2 classes should be identical in 3.x) -class NewStyleRandomEventHandler(object): - def _Init(self): - self.fireds = {} - - def OnFire(self, no): - try: - self.fireds[no] = self.fireds[no] + 1 - except KeyError: - self.fireds[no] = 0 - - def OnFireWithNamedParams(self, no, a_bool, out1, out2): - # This test exists mainly to help with an old bug, where named - # params would come in reverse. - Missing = pythoncom.Missing - if no is not Missing: - # We know our impl called 'OnFire' with the same ID - assert no in self.fireds - assert no + 1 == out1, "expecting 'out1' param to be ID+1" - assert no + 2 == out2, "expecting 'out2' param to be ID+2" - # The middle must be a boolean. - assert a_bool is Missing or type(a_bool) == bool, "middle param not a bool" - return out1 + 2, out2 + 2 - - def _DumpFireds(self): - if not self.fireds: - print("ERROR: Nothing was received!") - for firedId, no in self.fireds.items(): - progress("ID %d fired %d times" % (firedId, no)) - - -# Test everything which can be tested using both the "dynamic" and "generated" -# COM objects (or when there are very subtle differences) -def TestCommon(o, is_generated): - progress("Getting counter") - counter = o.GetSimpleCounter() - TestCounter(counter, is_generated) - - progress("Checking default args") - rc = o.TestOptionals() - if rc[:-1] != ("def", 0, 1) or abs(rc[-1] - 3.14) > 0.01: - print(rc) - raise error("Did not get the optional values correctly") - rc = o.TestOptionals("Hi", 2, 3, 1.1) - if rc[:-1] != ("Hi", 2, 3) or abs(rc[-1] - 1.1) > 0.01: - print(rc) - raise error("Did not get the specified optional values correctly") - rc = o.TestOptionals2(0) - if rc != (0, "", 1): - print(rc) - raise error("Did not get the optional2 values correctly") - rc = o.TestOptionals2(1.1, "Hi", 2) - if rc[1:] != ("Hi", 2) or abs(rc[0] - 1.1) > 0.01: - print(rc) - raise error("Did not get the specified optional2 values correctly") - - progress("Checking getting/passing IUnknown") - check_get_set(o.GetSetUnknown, o) - progress("Checking getting/passing IDispatch") - # This might be called with either the interface or the CoClass - but these - # functions always return from the interface. - expected_class = o.__class__ - # CoClass instances have `default_interface` - expected_class = getattr(expected_class, "default_interface", expected_class) - if not isinstance(o.GetSetDispatch(o), expected_class): - raise error("GetSetDispatch failed: %r" % (o.GetSetDispatch(o),)) - progress("Checking getting/passing IDispatch of known type") - expected_class = o.__class__ - expected_class = getattr(expected_class, "default_interface", expected_class) - if o.GetSetInterface(o).__class__ != expected_class: - raise error("GetSetDispatch failed") - - progress("Checking misc args") - check_get_set(o.GetSetVariant, 4) - check_get_set(o.GetSetVariant, "foo") - check_get_set(o.GetSetVariant, o) - - # signed/unsigned. - check_get_set(o.GetSetInt, 0) - check_get_set(o.GetSetInt, -1) - check_get_set(o.GetSetInt, 1) - - check_get_set(o.GetSetUnsignedInt, 0) - check_get_set(o.GetSetUnsignedInt, 1) - check_get_set(o.GetSetUnsignedInt, 0x80000000) - if o.GetSetUnsignedInt(-1) != 0xFFFFFFFF: - # -1 is a special case - we accept a negative int (silently converting to - # unsigned) but when getting it back we convert it to a long. - raise error("unsigned -1 failed") - - check_get_set(o.GetSetLong, 0) - check_get_set(o.GetSetLong, -1) - check_get_set(o.GetSetLong, 1) - - check_get_set(o.GetSetUnsignedLong, 0) - check_get_set(o.GetSetUnsignedLong, 1) - check_get_set(o.GetSetUnsignedLong, 0x80000000) - # -1 is a special case - see above. - if o.GetSetUnsignedLong(-1) != 0xFFFFFFFF: - raise error("unsigned -1 failed") - - # We want to explicitly test > 32 bits. py3k has no 'maxint' and - # 'maxsize+1' is no good on 64bit platforms as its 65 bits! - big = 2147483647 # sys.maxint on py2k - for l in big, big + 1, 1 << 65: - check_get_set(o.GetSetVariant, l) - - progress("Checking structs") - r = o.GetStruct() - assert r.int_value == 99 and str(r.str_value) == "Hello from C++" - assert o.DoubleString("foo") == "foofoo" - - progress("Checking var args") - o.SetVarArgs("Hi", "There", "From", "Python", 1) - if o.GetLastVarArgs() != ("Hi", "There", "From", "Python", 1): - raise error("VarArgs failed -" + str(o.GetLastVarArgs())) - - progress("Checking arrays") - l = [] - TestApplyResult(o.SetVariantSafeArray, (l,), len(l)) - l = [1, 2, 3, 4] - TestApplyResult(o.SetVariantSafeArray, (l,), len(l)) - TestApplyResult( - o.CheckVariantSafeArray, - ( - ( - 1, - 2, - 3, - 4, - ), - ), - 1, - ) - - # and binary - TestApplyResult(o.SetBinSafeArray, (str2memory("foo\0bar"),), 7) - - progress("Checking properties") - o.LongProp = 3 - if o.LongProp != 3 or o.IntProp != 3: - raise error("Property value wrong - got %d/%d" % (o.LongProp, o.IntProp)) - o.LongProp = o.IntProp = -3 - if o.LongProp != -3 or o.IntProp != -3: - raise error("Property value wrong - got %d/%d" % (o.LongProp, o.IntProp)) - # This number fits in an unsigned long. Attempting to set it to a normal - # long will involve overflow, which is to be expected. But we do - # expect it to work in a property explicitly a VT_UI4. - check = 3 * 10**9 - o.ULongProp = check - if o.ULongProp != check: - raise error( - "Property value wrong - got %d (expected %d)" % (o.ULongProp, check) - ) - - TestApplyResult(o.Test, ("Unused", 99), 1) # A bool function - TestApplyResult(o.Test, ("Unused", -1), 1) # A bool function - TestApplyResult(o.Test, ("Unused", 1 == 1), 1) # A bool function - TestApplyResult(o.Test, ("Unused", 0), 0) - TestApplyResult(o.Test, ("Unused", 1 == 0), 0) - - assert o.DoubleString("foo") == "foofoo" - - TestConstant("ULongTest1", 0xFFFFFFFF) - TestConstant("ULongTest2", 0x7FFFFFFF) - TestConstant("LongTest1", -0x7FFFFFFF) - TestConstant("LongTest2", 0x7FFFFFFF) - TestConstant("UCharTest", 255) - TestConstant("CharTest", -1) - # 'Hello World', but the 'r' is the "Registered" sign (\xae) - TestConstant("StringTest", "Hello Wo\xaeld") - - progress("Checking dates and times") - # For now *all* times passed must be tz-aware. - now = win32timezone.now() - # but conversion to and from a VARIANT loses sub-second... - now = now.replace(microsecond=0) - later = now + datetime.timedelta(seconds=1) - TestApplyResult(o.EarliestDate, (now, later), now) - - # The below used to fail with `ValueError: microsecond must be in 0..999999` - see #1655 - # https://planetcalc.com/7027/ says that float is: Sun, 25 Mar 1951 7:23:49 am - assert o.MakeDate(18712.308206013888) == datetime.datetime.fromisoformat( - "1951-03-25 07:23:49+00:00" - ) - - progress("Checking currency") - # currency. - pythoncom.__future_currency__ = 1 - if o.CurrencyProp != 0: - raise error("Expecting 0, got %r" % (o.CurrencyProp,)) - for val in ("1234.5678", "1234.56", "1234"): - o.CurrencyProp = decimal.Decimal(val) - if o.CurrencyProp != decimal.Decimal(val): - raise error("%s got %r" % (val, o.CurrencyProp)) - v1 = decimal.Decimal("1234.5678") - TestApplyResult(o.DoubleCurrency, (v1,), v1 * 2) - - v2 = decimal.Decimal("9012.3456") - TestApplyResult(o.AddCurrencies, (v1, v2), v1 + v2) - - TestTrickyTypesWithVariants(o, is_generated) - progress("Checking win32com.client.VARIANT") - TestPyVariant(o, is_generated) - - -def TestTrickyTypesWithVariants(o, is_generated): - # Test tricky stuff with type handling and generally only works with - # "generated" support but can be worked around using VARIANT. - if is_generated: - got = o.TestByRefVariant(2) - else: - v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_VARIANT, 2) - o.TestByRefVariant(v) - got = v.value - if got != 4: - raise error("TestByRefVariant failed") - - if is_generated: - got = o.TestByRefString("Foo") - else: - v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_BSTR, "Foo") - o.TestByRefString(v) - got = v.value - if got != "FooFoo": - raise error("TestByRefString failed") - - # check we can pass ints as a VT_UI1 - vals = [1, 2, 3, 4] - if is_generated: - arg = vals - else: - arg = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_UI1, vals) - TestApplyResult(o.SetBinSafeArray, (arg,), len(vals)) - - # safearrays of doubles and floats - vals = [0, 1.1, 2.2, 3.3] - if is_generated: - arg = vals - else: - arg = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_R8, vals) - TestApplyResult(o.SetDoubleSafeArray, (arg,), len(vals)) - - if is_generated: - arg = vals - else: - arg = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_R4, vals) - TestApplyResult(o.SetFloatSafeArray, (arg,), len(vals)) - - vals = [1.1, 2.2, 3.3, 4.4] - expected = (1.1 * 2, 2.2 * 2, 3.3 * 2, 4.4 * 2) - if is_generated: - TestApplyResult(o.ChangeDoubleSafeArray, (vals,), expected) - else: - arg = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_ARRAY | pythoncom.VT_R8, vals) - o.ChangeDoubleSafeArray(arg) - if arg.value != expected: - raise error("ChangeDoubleSafeArray got the wrong value") - - if is_generated: - got = o.DoubleInOutString("foo") - else: - v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_BSTR, "foo") - o.DoubleInOutString(v) - got = v.value - assert got == "foofoo", got - - val = decimal.Decimal("1234.5678") - if is_generated: - got = o.DoubleCurrencyByVal(val) - else: - v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_CY, val) - o.DoubleCurrencyByVal(v) - got = v.value - assert got == val * 2 - - -def TestDynamic(): - progress("Testing Dynamic") - import win32com.client.dynamic - - o = win32com.client.dynamic.DumbDispatch("PyCOMTest.PyCOMTest") - TestCommon(o, False) - - counter = win32com.client.dynamic.DumbDispatch("PyCOMTest.SimpleCounter") - TestCounter(counter, False) - - # Dynamic doesn't know this should be an int, so we get a COM - # TypeMismatch error. - try: - check_get_set_raises(ValueError, o.GetSetInt, "foo") - raise error("no exception raised") - except pythoncom.com_error as exc: - if exc.hresult != winerror.DISP_E_TYPEMISMATCH: - raise - - arg1 = VARIANT(pythoncom.VT_R4 | pythoncom.VT_BYREF, 2.0) - arg2 = VARIANT(pythoncom.VT_BOOL | pythoncom.VT_BYREF, True) - arg3 = VARIANT(pythoncom.VT_I4 | pythoncom.VT_BYREF, 4) - o.TestInOut(arg1, arg2, arg3) - assert arg1.value == 4.0, arg1 - assert arg2.value == False - assert arg3.value == 8 - - # damn - props with params don't work for dynamic objects :( - # o.SetParamProp(0, 1) - # if o.ParamProp(0) != 1: - # raise RuntimeError, o.paramProp(0) - - -def TestGenerated(): - # Create an instance of the server. - from win32com.client.gencache import EnsureDispatch - - o = EnsureDispatch("PyCOMTest.PyCOMTest") - TestCommon(o, True) - - counter = EnsureDispatch("PyCOMTest.SimpleCounter") - TestCounter(counter, True) - - # This dance lets us get a CoClass even though it's not explicitly registered. - # This is `CoPyComTest` - from win32com.client.CLSIDToClass import GetClass - - coclass_o = GetClass("{8EE0C520-5605-11D0-AE5F-CADD4C000000}")() - TestCommon(coclass_o, True) - - # Test the regression reported in #1753 - assert bool(coclass_o) - - # This is `CoSimpleCounter` and the counter tests should work. - coclass = GetClass("{B88DD310-BAE8-11D0-AE86-76F2C1000000}")() - TestCounter(coclass, True) - - # XXX - this is failing in dynamic tests, but should work fine. - i1, i2 = o.GetMultipleInterfaces() - if not isinstance(i1, DispatchBaseClass) or not isinstance(i2, DispatchBaseClass): - # Yay - is now an instance returned! - raise error( - "GetMultipleInterfaces did not return instances - got '%s', '%s'" % (i1, i2) - ) - del i1 - del i2 - - # Generated knows to only pass a 32bit int, so should fail. - check_get_set_raises(OverflowError, o.GetSetInt, 0x80000000) - check_get_set_raises(OverflowError, o.GetSetLong, 0x80000000) - - # Generated knows this should be an int, so raises ValueError - check_get_set_raises(ValueError, o.GetSetInt, "foo") - check_get_set_raises(ValueError, o.GetSetLong, "foo") - - # Pass some non-sequence objects to our array decoder, and watch it fail. - try: - o.SetVariantSafeArray("foo") - raise error("Expected a type error") - except TypeError: - pass - try: - o.SetVariantSafeArray(666) - raise error("Expected a type error") - except TypeError: - pass - - o.GetSimpleSafeArray(None) - TestApplyResult(o.GetSimpleSafeArray, (None,), tuple(range(10))) - resultCheck = tuple(range(5)), tuple(range(10)), tuple(range(20)) - TestApplyResult(o.GetSafeArrays, (None, None, None), resultCheck) - - l = [] - TestApplyResult(o.SetIntSafeArray, (l,), len(l)) - l = [1, 2, 3, 4] - TestApplyResult(o.SetIntSafeArray, (l,), len(l)) - ll = [1, 2, 3, 0x100000000] - TestApplyResult(o.SetLongLongSafeArray, (ll,), len(ll)) - TestApplyResult(o.SetULongLongSafeArray, (ll,), len(ll)) - - # Tell the server to do what it does! - TestApplyResult(o.Test2, (constants.Attr2,), constants.Attr2) - TestApplyResult(o.Test3, (constants.Attr2,), constants.Attr2) - TestApplyResult(o.Test4, (constants.Attr2,), constants.Attr2) - TestApplyResult(o.Test5, (constants.Attr2,), constants.Attr2) - - TestApplyResult(o.Test6, (constants.WideAttr1,), constants.WideAttr1) - TestApplyResult(o.Test6, (constants.WideAttr2,), constants.WideAttr2) - TestApplyResult(o.Test6, (constants.WideAttr3,), constants.WideAttr3) - TestApplyResult(o.Test6, (constants.WideAttr4,), constants.WideAttr4) - TestApplyResult(o.Test6, (constants.WideAttr5,), constants.WideAttr5) - - TestApplyResult(o.TestInOut, (2.0, True, 4), (4.0, False, 8)) - - o.SetParamProp(0, 1) - if o.ParamProp(0) != 1: - raise RuntimeError(o.paramProp(0)) - - # Make sure CastTo works - even though it is only casting it to itself! - o2 = CastTo(o, "IPyCOMTest") - if o != o2: - raise error("CastTo should have returned the same object") - - # Do the connection point thing... - # Create a connection object. - progress("Testing connection points") - o2 = win32com.client.DispatchWithEvents(o, RandomEventHandler) - TestEvents(o2, o2) - o2 = win32com.client.DispatchWithEvents(o, NewStyleRandomEventHandler) - TestEvents(o2, o2) - # and a plain "WithEvents". - handler = win32com.client.WithEvents(o, RandomEventHandler) - TestEvents(o, handler) - handler = win32com.client.WithEvents(o, NewStyleRandomEventHandler) - TestEvents(o, handler) - progress("Finished generated .py test.") - - -def TestEvents(o, handler): - sessions = [] - handler._Init() - try: - for i in range(3): - session = o.Start() - sessions.append(session) - time.sleep(0.5) - finally: - # Stop the servers - for session in sessions: - o.Stop(session) - handler._DumpFireds() - handler.close() - - -def _TestPyVariant(o, is_generated, val, checker=None): - if is_generated: - vt, got = o.GetVariantAndType(val) - else: - # Gotta supply all 3 args with the last 2 being explicit variants to - # get the byref behaviour. - var_vt = VARIANT(pythoncom.VT_UI2 | pythoncom.VT_BYREF, 0) - var_result = VARIANT(pythoncom.VT_VARIANT | pythoncom.VT_BYREF, 0) - o.GetVariantAndType(val, var_vt, var_result) - vt = var_vt.value - got = var_result.value - if checker is not None: - checker(got) - return - # default checking. - assert vt == val.varianttype, (vt, val.varianttype) - # Handle our safe-array test - if the passed value is a list of variants, - # compare against the actual values. - if type(val.value) in (tuple, list): - check = [v.value if isinstance(v, VARIANT) else v for v in val.value] - # pythoncom always returns arrays as tuples. - got = list(got) - else: - check = val.value - assert type(check) == type(got), (type(check), type(got)) - assert check == got, (check, got) - - -def _TestPyVariantFails(o, is_generated, val, exc): - try: - _TestPyVariant(o, is_generated, val) - raise error("Setting %r didn't raise %s" % (val, exc)) - except exc: - pass - - -def TestPyVariant(o, is_generated): - _TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_UI1, 1)) - _TestPyVariant( - o, is_generated, VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_UI4, [1, 2, 3]) - ) - _TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_BSTR, "hello")) - _TestPyVariant( - o, - is_generated, - VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_BSTR, ["hello", "there"]), - ) - - def check_dispatch(got): - assert isinstance(got._oleobj_, pythoncom.TypeIIDs[pythoncom.IID_IDispatch]) - - _TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_DISPATCH, o), check_dispatch) - _TestPyVariant( - o, is_generated, VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_DISPATCH, [o]) - ) - # an array of variants each with a specific type. - v = VARIANT( - pythoncom.VT_ARRAY | pythoncom.VT_VARIANT, - [ - VARIANT(pythoncom.VT_UI4, 1), - VARIANT(pythoncom.VT_UI4, 2), - VARIANT(pythoncom.VT_UI4, 3), - ], - ) - _TestPyVariant(o, is_generated, v) - - # and failures - _TestPyVariantFails(o, is_generated, VARIANT(pythoncom.VT_UI1, "foo"), ValueError) - - -def TestCounter(counter, bIsGenerated): - # Test random access into container - progress("Testing counter", repr(counter)) - import random - - for i in range(50): - num = int(random.random() * len(counter)) - try: - # XXX - this appears broken by commit 08a14d4deb374eaa06378509cf44078ad467b9dc - - # We shouldn't need to do generated differently than dynamic. - if bIsGenerated: - ret = counter.Item(num + 1) - else: - ret = counter[num] - if ret != num + 1: - raise error( - "Random access into element %d failed - return was %s" - % (num, repr(ret)) - ) - except IndexError: - raise error("** IndexError accessing collection element %d" % num) - - num = 0 - if bIsGenerated: - counter.SetTestProperty(1) - counter.TestProperty = 1 # Note this has a second, default arg. - counter.SetTestProperty(1, 2) - if counter.TestPropertyWithDef != 0: - raise error("Unexpected property set value!") - if counter.TestPropertyNoDef(1) != 1: - raise error("Unexpected property set value!") - else: - pass - # counter.TestProperty = 1 - - counter.LBound = 1 - counter.UBound = 10 - if counter.LBound != 1 or counter.UBound != 10: - print("** Error - counter did not keep its properties") - - if bIsGenerated: - bounds = counter.GetBounds() - if bounds[0] != 1 or bounds[1] != 10: - raise error("** Error - counter did not give the same properties back") - counter.SetBounds(bounds[0], bounds[1]) - - for item in counter: - num = num + 1 - if num != len(counter): - raise error("*** Length of counter and loop iterations dont match ***") - if num != 10: - raise error("*** Unexpected number of loop iterations ***") - - try: - counter = iter(counter)._iter_.Clone() # Test Clone() and enum directly - except AttributeError: - # *sob* - sometimes this is a real iterator and sometimes not :/ - progress("Finished testing counter (but skipped the iterator stuff") - return - counter.Reset() - num = 0 - for item in counter: - num = num + 1 - if num != 10: - raise error("*** Unexpected number of loop iterations - got %d ***" % num) - progress("Finished testing counter") - - -def TestLocalVTable(ob): - # Python doesn't fully implement this interface. - if ob.DoubleString("foo") != "foofoo": - raise error("couldn't foofoo") - - -############################### -## -## Some vtable tests of the interface -## -def TestVTable(clsctx=pythoncom.CLSCTX_ALL): - # Any vtable interfaces marked as dual *should* be able to be - # correctly implemented as IDispatch. - ob = win32com.client.Dispatch("Python.Test.PyCOMTest") - TestLocalVTable(ob) - # Now test it via vtable - use some C++ code to help here as Python can't do it directly yet. - tester = win32com.client.Dispatch("PyCOMTest.PyCOMTest") - testee = pythoncom.CoCreateInstance( - "Python.Test.PyCOMTest", None, clsctx, pythoncom.IID_IUnknown - ) - # check we fail gracefully with None passed. - try: - tester.TestMyInterface(None) - except pythoncom.com_error as details: - pass - # and a real object. - tester.TestMyInterface(testee) - - -def TestVTable2(): - # We once crashed creating our object with the native interface as - # the first IID specified. We must do it _after_ the tests, so that - # Python has already had the gateway registered from last run. - ob = win32com.client.Dispatch("Python.Test.PyCOMTest") - iid = pythoncom.InterfaceNames["IPyCOMTest"] - clsid = "Python.Test.PyCOMTest" - clsctx = pythoncom.CLSCTX_SERVER - try: - testee = pythoncom.CoCreateInstance(clsid, None, clsctx, iid) - except TypeError: - # Python can't actually _use_ this interface yet, so this is - # "expected". Any COM error is not. - pass - - -def TestVTableMI(): - clsctx = pythoncom.CLSCTX_SERVER - ob = pythoncom.CoCreateInstance( - "Python.Test.PyCOMTestMI", None, clsctx, pythoncom.IID_IUnknown - ) - # This inherits from IStream. - ob.QueryInterface(pythoncom.IID_IStream) - # This implements IStorage, specifying the IID as a string - ob.QueryInterface(pythoncom.IID_IStorage) - # IDispatch should always work - ob.QueryInterface(pythoncom.IID_IDispatch) - - iid = pythoncom.InterfaceNames["IPyCOMTest"] - try: - ob.QueryInterface(iid) - except TypeError: - # Python can't actually _use_ this interface yet, so this is - # "expected". Any COM error is not. - pass - - -def TestQueryInterface(long_lived_server=0, iterations=5): - tester = win32com.client.Dispatch("PyCOMTest.PyCOMTest") - if long_lived_server: - # Create a local server - t0 = win32com.client.Dispatch( - "Python.Test.PyCOMTest", clsctx=pythoncom.CLSCTX_LOCAL_SERVER - ) - # Request custom interfaces a number of times - prompt = [ - "Testing QueryInterface without long-lived local-server #%d of %d...", - "Testing QueryInterface with long-lived local-server #%d of %d...", - ] - - for i in range(iterations): - progress(prompt[long_lived_server != 0] % (i + 1, iterations)) - tester.TestQueryInterface() - - -class Tester(win32com.test.util.TestCase): - def testVTableInProc(self): - # We used to crash running this the second time - do it a few times - for i in range(3): - progress("Testing VTables in-process #%d..." % (i + 1)) - TestVTable(pythoncom.CLSCTX_INPROC_SERVER) - - def testVTableLocalServer(self): - for i in range(3): - progress("Testing VTables out-of-process #%d..." % (i + 1)) - TestVTable(pythoncom.CLSCTX_LOCAL_SERVER) - - def testVTable2(self): - for i in range(3): - TestVTable2() - - def testVTableMI(self): - for i in range(3): - TestVTableMI() - - def testMultiQueryInterface(self): - TestQueryInterface(0, 6) - # When we use the custom interface in the presence of a long-lived - # local server, i.e. a local server that is already running when - # we request an instance of our COM object, and remains afterwards, - # then after repeated requests to create an instance of our object - # the custom interface disappears -- i.e. QueryInterface fails with - # E_NOINTERFACE. Set the upper range of the following test to 2 to - # pass this test, i.e. TestQueryInterface(1,2) - TestQueryInterface(1, 6) - - def testDynamic(self): - TestDynamic() - - def testGenerated(self): - TestGenerated() - - -if __name__ == "__main__": - # XXX - todo - Complete hack to crank threading support. - # Should NOT be necessary - def NullThreadFunc(): - pass - - import _thread - - _thread.start_new(NullThreadFunc, ()) - - if "-v" in sys.argv: - verbose = 1 - - win32com.test.util.testmain() diff --git a/lib/win32com/test/testPyScriptlet.js b/lib/win32com/test/testPyScriptlet.js deleted file mode 100644 index fd9822b9..00000000 --- a/lib/win32com/test/testPyScriptlet.js +++ /dev/null @@ -1,33 +0,0 @@ -function print(msg) -{ - WScript.Echo(msg) ; -} - -function check(condition, msg) -{ - if (!condition) { - print("***** testPyScriptlet.js failed *****"); - print(msg); - } -} - -var thisScriptEngine = ScriptEngine() ; - -var majorVersion = ScriptEngineMajorVersion() ; -var minorVersion = ScriptEngineMinorVersion() ; -var buildVersion = ScriptEngineBuildVersion() ; - -WScript.Echo(thisScriptEngine + " Version " + majorVersion + "." + minorVersion + " Build " + buildVersion) ; - -var scriptlet = new ActiveXObject("TestPys.Scriptlet") ; - -check(scriptlet.PyProp1=="PyScript Property1", "PyProp1 wasn't correct initial value"); -scriptlet.PyProp1 = "New Value"; -check(scriptlet.PyProp1=="New Value", "PyProp1 wasn't correct new value"); - -check(scriptlet.PyProp2=="PyScript Property2", "PyProp2 wasn't correct initial value"); -scriptlet.PyProp2 = "Another New Value"; -check(scriptlet.PyProp2=="Another New Value", "PyProp2 wasn't correct new value"); - -check(scriptlet.PyMethod1()=="PyMethod1 called", "Method1 wrong value"); -check(scriptlet.PyMethod2()=="PyMethod2 called", "Method2 wrong value"); diff --git a/lib/win32com/test/testROT.py b/lib/win32com/test/testROT.py deleted file mode 100644 index 5a4ccbcc..00000000 --- a/lib/win32com/test/testROT.py +++ /dev/null @@ -1,29 +0,0 @@ -import unittest - -import pythoncom -import win32com.test.util -import winerror - - -class TestROT(win32com.test.util.TestCase): - def testit(self): - ctx = pythoncom.CreateBindCtx() - rot = pythoncom.GetRunningObjectTable() - num = 0 - for mk in rot: - name = mk.GetDisplayName(ctx, None) - num += 1 - # Monikers themselves can iterate their contents (sometimes :) - try: - for sub in mk: - num += 1 - except pythoncom.com_error as exc: - if exc.hresult != winerror.E_NOTIMPL: - raise - - # if num < 2: - # print "Only", num, "objects in the ROT - this is unusual" - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testServers.py b/lib/win32com/test/testServers.py deleted file mode 100644 index f25009bf..00000000 --- a/lib/win32com/test/testServers.py +++ /dev/null @@ -1,52 +0,0 @@ -import unittest - -import pythoncom -import win32com.client.dynamic -import win32com.test.util -import winerror - - -def TestConnections(): - import win32com.demos.connect - - win32com.demos.connect.test() - - -class InterpCase(win32com.test.util.TestCase): - def setUp(self): - # Ensure the correct version registered. - from win32com.servers import interp - from win32com.test.util import RegisterPythonServer - - RegisterPythonServer(interp.__file__, "Python.Interpreter") - - def _testInterp(self, interp): - self.assertEqual(interp.Eval("1+1"), 2) - win32com.test.util.assertRaisesCOM_HRESULT( - self, winerror.DISP_E_TYPEMISMATCH, interp.Eval, 2 - ) - - def testInproc(self): - interp = win32com.client.dynamic.Dispatch( - "Python.Interpreter", clsctx=pythoncom.CLSCTX_INPROC - ) - self._testInterp(interp) - - def testLocalServer(self): - interp = win32com.client.dynamic.Dispatch( - "Python.Interpreter", clsctx=pythoncom.CLSCTX_LOCAL_SERVER - ) - self._testInterp(interp) - - def testAny(self): - interp = win32com.client.dynamic.Dispatch("Python.Interpreter") - self._testInterp(interp) - - -class ConnectionsTestCase(win32com.test.util.TestCase): - def testConnections(self): - TestConnections() - - -if __name__ == "__main__": - unittest.main("testServers") diff --git a/lib/win32com/test/testShell.py b/lib/win32com/test/testShell.py deleted file mode 100644 index b312a9dc..00000000 --- a/lib/win32com/test/testShell.py +++ /dev/null @@ -1,281 +0,0 @@ -import datetime -import os -import struct -import sys - -import win32timezone - -try: - sys_maxsize = sys.maxsize # 2.6 and later - maxsize != maxint on 64bits -except AttributeError: - sys_maxsize = sys.maxint - -import pythoncom -import pywintypes -import win32com.test.util -import win32con -from pywin32_testutil import str2bytes -from win32com.shell import shell -from win32com.shell.shellcon import * -from win32com.storagecon import * - - -class ShellTester(win32com.test.util.TestCase): - def testShellLink(self): - desktop = str(shell.SHGetSpecialFolderPath(0, CSIDL_DESKTOP)) - num = 0 - shellLink = pythoncom.CoCreateInstance( - shell.CLSID_ShellLink, - None, - pythoncom.CLSCTX_INPROC_SERVER, - shell.IID_IShellLink, - ) - persistFile = shellLink.QueryInterface(pythoncom.IID_IPersistFile) - names = [os.path.join(desktop, n) for n in os.listdir(desktop)] - programs = str(shell.SHGetSpecialFolderPath(0, CSIDL_PROGRAMS)) - names.extend([os.path.join(programs, n) for n in os.listdir(programs)]) - for name in names: - try: - persistFile.Load(name, STGM_READ) - except pythoncom.com_error: - continue - # Resolve is slow - avoid it for our tests. - # shellLink.Resolve(0, shell.SLR_ANY_MATCH | shell.SLR_NO_UI) - fname, findData = shellLink.GetPath(0) - unc = shellLink.GetPath(shell.SLGP_UNCPRIORITY)[0] - num += 1 - if num == 0: - # This isn't a fatal error, but is unlikely. - print( - "Could not find any links on your desktop or programs dir, which is unusual" - ) - - def testShellFolder(self): - sf = shell.SHGetDesktopFolder() - names_1 = [] - for i in sf: # Magically calls EnumObjects - name = sf.GetDisplayNameOf(i, SHGDN_NORMAL) - names_1.append(name) - - # And get the enumerator manually - enum = sf.EnumObjects( - 0, SHCONTF_FOLDERS | SHCONTF_NONFOLDERS | SHCONTF_INCLUDEHIDDEN - ) - names_2 = [] - for i in enum: - name = sf.GetDisplayNameOf(i, SHGDN_NORMAL) - names_2.append(name) - names_1.sort() - names_2.sort() - self.assertEqual(names_1, names_2) - - -class PIDLTester(win32com.test.util.TestCase): - def _rtPIDL(self, pidl): - pidl_str = shell.PIDLAsString(pidl) - pidl_rt = shell.StringAsPIDL(pidl_str) - self.assertEqual(pidl_rt, pidl) - pidl_str_rt = shell.PIDLAsString(pidl_rt) - self.assertEqual(pidl_str_rt, pidl_str) - - def _rtCIDA(self, parent, kids): - cida = parent, kids - cida_str = shell.CIDAAsString(cida) - cida_rt = shell.StringAsCIDA(cida_str) - self.assertEqual(cida, cida_rt) - cida_str_rt = shell.CIDAAsString(cida_rt) - self.assertEqual(cida_str_rt, cida_str) - - def testPIDL(self): - # A PIDL of "\1" is: cb pidl cb - expect = str2bytes("\03\00" "\1" "\0\0") - self.assertEqual(shell.PIDLAsString([str2bytes("\1")]), expect) - self._rtPIDL([str2bytes("\0")]) - self._rtPIDL([str2bytes("\1"), str2bytes("\2"), str2bytes("\3")]) - self._rtPIDL([str2bytes("\0") * 2048] * 2048) - # PIDL must be a list - self.assertRaises(TypeError, shell.PIDLAsString, "foo") - - def testCIDA(self): - self._rtCIDA([str2bytes("\0")], [[str2bytes("\0")]]) - self._rtCIDA([str2bytes("\1")], [[str2bytes("\2")]]) - self._rtCIDA( - [str2bytes("\0")], [[str2bytes("\0")], [str2bytes("\1")], [str2bytes("\2")]] - ) - - def testBadShortPIDL(self): - # A too-short child element: cb pidl cb - pidl = str2bytes("\01\00" "\1") - self.assertRaises(ValueError, shell.StringAsPIDL, pidl) - - # ack - tried to test too long PIDLs, but a len of 0xFFFF may not - # always fail. - - -class FILEGROUPDESCRIPTORTester(win32com.test.util.TestCase): - def _getTestTimes(self): - if issubclass(pywintypes.TimeType, datetime.datetime): - ctime = win32timezone.now() - # FILETIME only has ms precision... - ctime = ctime.replace(microsecond=ctime.microsecond // 1000 * 1000) - atime = ctime + datetime.timedelta(seconds=1) - wtime = atime + datetime.timedelta(seconds=1) - else: - ctime = pywintypes.Time(11) - atime = pywintypes.Time(12) - wtime = pywintypes.Time(13) - return ctime, atime, wtime - - def _testRT(self, fd): - fgd_string = shell.FILEGROUPDESCRIPTORAsString([fd]) - fd2 = shell.StringAsFILEGROUPDESCRIPTOR(fgd_string)[0] - - fd = fd.copy() - fd2 = fd2.copy() - - # The returned objects *always* have dwFlags and cFileName. - if "dwFlags" not in fd: - del fd2["dwFlags"] - if "cFileName" not in fd: - self.assertEqual(fd2["cFileName"], "") - del fd2["cFileName"] - - self.assertEqual(fd, fd2) - - def _testSimple(self, make_unicode): - fgd = shell.FILEGROUPDESCRIPTORAsString([], make_unicode) - header = struct.pack("i", 0) - self.assertEqual(header, fgd[: len(header)]) - self._testRT(dict()) - d = dict() - fgd = shell.FILEGROUPDESCRIPTORAsString([d], make_unicode) - header = struct.pack("i", 1) - self.assertEqual(header, fgd[: len(header)]) - self._testRT(d) - - def testSimpleBytes(self): - self._testSimple(False) - - def testSimpleUnicode(self): - self._testSimple(True) - - def testComplex(self): - clsid = pythoncom.MakeIID("{CD637886-DB8B-4b04-98B5-25731E1495BE}") - ctime, atime, wtime = self._getTestTimes() - d = dict( - cFileName="foo.txt", - clsid=clsid, - sizel=(1, 2), - pointl=(3, 4), - dwFileAttributes=win32con.FILE_ATTRIBUTE_NORMAL, - ftCreationTime=ctime, - ftLastAccessTime=atime, - ftLastWriteTime=wtime, - nFileSize=sys_maxsize + 1, - ) - self._testRT(d) - - def testUnicode(self): - # exercise a bug fixed in build 210 - multiple unicode objects failed. - ctime, atime, wtime = self._getTestTimes() - d = [ - dict( - cFileName="foo.txt", - sizel=(1, 2), - pointl=(3, 4), - dwFileAttributes=win32con.FILE_ATTRIBUTE_NORMAL, - ftCreationTime=ctime, - ftLastAccessTime=atime, - ftLastWriteTime=wtime, - nFileSize=sys_maxsize + 1, - ), - dict( - cFileName="foo2.txt", - sizel=(1, 2), - pointl=(3, 4), - dwFileAttributes=win32con.FILE_ATTRIBUTE_NORMAL, - ftCreationTime=ctime, - ftLastAccessTime=atime, - ftLastWriteTime=wtime, - nFileSize=sys_maxsize + 1, - ), - dict( - cFileName="foo\xa9.txt", - sizel=(1, 2), - pointl=(3, 4), - dwFileAttributes=win32con.FILE_ATTRIBUTE_NORMAL, - ftCreationTime=ctime, - ftLastAccessTime=atime, - ftLastWriteTime=wtime, - nFileSize=sys_maxsize + 1, - ), - ] - s = shell.FILEGROUPDESCRIPTORAsString(d, 1) - d2 = shell.StringAsFILEGROUPDESCRIPTOR(s) - # clobber 'dwFlags' - they are not expected to be identical - for t in d2: - del t["dwFlags"] - self.assertEqual(d, d2) - - -class FileOperationTester(win32com.test.util.TestCase): - def setUp(self): - import tempfile - - self.src_name = os.path.join(tempfile.gettempdir(), "pywin32_testshell") - self.dest_name = os.path.join(tempfile.gettempdir(), "pywin32_testshell_dest") - self.test_data = str2bytes("Hello from\0Python") - f = open(self.src_name, "wb") - f.write(self.test_data) - f.close() - try: - os.unlink(self.dest_name) - except os.error: - pass - - def tearDown(self): - for fname in (self.src_name, self.dest_name): - if os.path.isfile(fname): - os.unlink(fname) - - def testCopy(self): - s = (0, FO_COPY, self.src_name, self.dest_name) # hwnd, # operation - - rc, aborted = shell.SHFileOperation(s) - self.assertTrue(not aborted) - self.assertEqual(0, rc) - self.assertTrue(os.path.isfile(self.src_name)) - self.assertTrue(os.path.isfile(self.dest_name)) - - def testRename(self): - s = (0, FO_RENAME, self.src_name, self.dest_name) # hwnd, # operation - rc, aborted = shell.SHFileOperation(s) - self.assertTrue(not aborted) - self.assertEqual(0, rc) - self.assertTrue(os.path.isfile(self.dest_name)) - self.assertTrue(not os.path.isfile(self.src_name)) - - def testMove(self): - s = (0, FO_MOVE, self.src_name, self.dest_name) # hwnd, # operation - rc, aborted = shell.SHFileOperation(s) - self.assertTrue(not aborted) - self.assertEqual(0, rc) - self.assertTrue(os.path.isfile(self.dest_name)) - self.assertTrue(not os.path.isfile(self.src_name)) - - def testDelete(self): - s = ( - 0, # hwnd, - FO_DELETE, # operation - self.src_name, - None, - FOF_NOCONFIRMATION, - ) - rc, aborted = shell.SHFileOperation(s) - self.assertTrue(not aborted) - self.assertEqual(0, rc) - self.assertTrue(not os.path.isfile(self.src_name)) - - -if __name__ == "__main__": - win32com.test.util.testmain() diff --git a/lib/win32com/test/testStorage.py b/lib/win32com/test/testStorage.py deleted file mode 100644 index 5b7e25de..00000000 --- a/lib/win32com/test/testStorage.py +++ /dev/null @@ -1,88 +0,0 @@ -import unittest - -import pythoncom -import win32api -import win32com.test.util -from win32com import storagecon - - -class TestEnum(win32com.test.util.TestCase): - def testit(self): - fname, tmp = win32api.GetTempFileName(win32api.GetTempPath(), "stg") - m = storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE - ## file, mode, format, attrs (always 0), IID (IStorage or IPropertySetStorage, storage options(only used with STGFMT_DOCFILE) - pss = pythoncom.StgOpenStorageEx( - fname, m, storagecon.STGFMT_FILE, 0, pythoncom.IID_IPropertySetStorage - ) - ### {"Version":2,"reserved":0,"SectorSize":512,"TemplateFile":u'somefilename'}) - - ## FMTID_SummaryInformation FMTID_DocSummaryInformation FMTID_UserDefinedProperties - psuser = pss.Create( - pythoncom.FMTID_UserDefinedProperties, - pythoncom.IID_IPropertySetStorage, - storagecon.PROPSETFLAG_DEFAULT, - storagecon.STGM_READWRITE - | storagecon.STGM_CREATE - | storagecon.STGM_SHARE_EXCLUSIVE, - ) ## its very picky about flag combinations! - psuser.WriteMultiple((3, 4), ("hey", "bubba")) - psuser.WritePropertyNames((3, 4), ("property3", "property4")) - expected_summaries = [] - expected_summaries.append(("property3", 3, pythoncom.VT_BSTR)) - expected_summaries.append(("property4", 4, pythoncom.VT_BSTR)) - psuser = None - - pssum = pss.Create( - pythoncom.FMTID_SummaryInformation, - pythoncom.IID_IPropertySetStorage, - storagecon.PROPSETFLAG_DEFAULT, - storagecon.STGM_READWRITE - | storagecon.STGM_CREATE - | storagecon.STGM_SHARE_EXCLUSIVE, - ) - pssum.WriteMultiple( - (storagecon.PIDSI_AUTHOR, storagecon.PIDSI_COMMENTS), ("me", "comment") - ) - - pssum = None - pss = None ## doesn't seem to be a close or release method, and you can't even reopen it from the same process until previous object is gone - - pssread = pythoncom.StgOpenStorageEx( - fname, - storagecon.STGM_READ | storagecon.STGM_SHARE_EXCLUSIVE, - storagecon.STGFMT_FILE, - 0, - pythoncom.IID_IPropertySetStorage, - ) - found_summaries = [] - for psstat in pssread: - ps = pssread.Open( - psstat[0], storagecon.STGM_READ | storagecon.STGM_SHARE_EXCLUSIVE - ) - for p in ps: - p_val = ps.ReadMultiple((p[1],))[0] - if (p[1] == storagecon.PIDSI_AUTHOR and p_val == "me") or ( - p[1] == storagecon.PIDSI_COMMENTS and p_val == "comment" - ): - pass - else: - self.fail("Uxexpected property %s/%s" % (p, p_val)) - ps = None - ## FMTID_UserDefinedProperties can't exist without FMTID_DocSummaryInformation, and isn't returned independently from Enum - ## also can't be open at same time - if psstat[0] == pythoncom.FMTID_DocSummaryInformation: - ps = pssread.Open( - pythoncom.FMTID_UserDefinedProperties, - storagecon.STGM_READ | storagecon.STGM_SHARE_EXCLUSIVE, - ) - for p in ps: - found_summaries.append(p) - ps = None - psread = None - expected_summaries.sort() - found_summaries.sort() - self.assertEqual(expected_summaries, found_summaries) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testStreams.py b/lib/win32com/test/testStreams.py deleted file mode 100644 index 3beb47c7..00000000 --- a/lib/win32com/test/testStreams.py +++ /dev/null @@ -1,148 +0,0 @@ -import unittest - -import pythoncom -import win32com.server.util -import win32com.test.util -from pywin32_testutil import str2bytes - - -class Persists: - _public_methods_ = [ - "GetClassID", - "IsDirty", - "Load", - "Save", - "GetSizeMax", - "InitNew", - ] - _com_interfaces_ = [pythoncom.IID_IPersistStreamInit] - - def __init__(self): - self.data = str2bytes("abcdefg") - self.dirty = 1 - - def GetClassID(self): - return pythoncom.IID_NULL - - def IsDirty(self): - return self.dirty - - def Load(self, stream): - self.data = stream.Read(26) - - def Save(self, stream, clearDirty): - stream.Write(self.data) - if clearDirty: - self.dirty = 0 - - def GetSizeMax(self): - return 1024 - - def InitNew(self): - pass - - -class Stream: - _public_methods_ = ["Read", "Write", "Seek"] - _com_interfaces_ = [pythoncom.IID_IStream] - - def __init__(self, data): - self.data = data - self.index = 0 - - def Read(self, amount): - result = self.data[self.index : self.index + amount] - self.index = self.index + amount - return result - - def Write(self, data): - self.data = data - self.index = 0 - return len(data) - - def Seek(self, dist, origin): - if origin == pythoncom.STREAM_SEEK_SET: - self.index = dist - elif origin == pythoncom.STREAM_SEEK_CUR: - self.index = self.index + dist - elif origin == pythoncom.STREAM_SEEK_END: - self.index = len(self.data) + dist - else: - raise ValueError("Unknown Seek type: " + str(origin)) - if self.index < 0: - self.index = 0 - else: - self.index = min(self.index, len(self.data)) - return self.index - - -class BadStream(Stream): - """PyGStream::Read could formerly overflow buffer if the python implementation - returned more data than requested. - """ - - def Read(self, amount): - return str2bytes("x") * (amount + 1) - - -class StreamTest(win32com.test.util.TestCase): - def _readWrite(self, data, write_stream, read_stream=None): - if read_stream is None: - read_stream = write_stream - write_stream.Write(data) - read_stream.Seek(0, pythoncom.STREAM_SEEK_SET) - got = read_stream.Read(len(data)) - self.assertEqual(data, got) - read_stream.Seek(1, pythoncom.STREAM_SEEK_SET) - got = read_stream.Read(len(data) - 2) - self.assertEqual(data[1:-1], got) - - def testit(self): - mydata = str2bytes("abcdefghijklmnopqrstuvwxyz") - - # First test the objects just as Python objects... - s = Stream(mydata) - p = Persists() - - p.Load(s) - p.Save(s, 0) - self.assertEqual(s.data, mydata) - - # Wrap the Python objects as COM objects, and make the calls as if - # they were non-Python COM objects. - s2 = win32com.server.util.wrap(s, pythoncom.IID_IStream) - p2 = win32com.server.util.wrap(p, pythoncom.IID_IPersistStreamInit) - - self._readWrite(mydata, s, s) - self._readWrite(mydata, s, s2) - self._readWrite(mydata, s2, s) - self._readWrite(mydata, s2, s2) - - self._readWrite(str2bytes("string with\0a NULL"), s2, s2) - # reset the stream - s.Write(mydata) - p2.Load(s2) - p2.Save(s2, 0) - self.assertEqual(s.data, mydata) - - def testseek(self): - s = Stream(str2bytes("yo")) - s = win32com.server.util.wrap(s, pythoncom.IID_IStream) - # we used to die in py3k passing a value > 32bits - s.Seek(0x100000000, pythoncom.STREAM_SEEK_SET) - - def testerrors(self): - # setup a test logger to capture tracebacks etc. - records, old_log = win32com.test.util.setup_test_logger() - ## check for buffer overflow in Read method - badstream = BadStream("Check for buffer overflow") - badstream2 = win32com.server.util.wrap(badstream, pythoncom.IID_IStream) - self.assertRaises(pythoncom.com_error, badstream2.Read, 10) - win32com.test.util.restore_test_logger(old_log) - # there's 1 error here - self.assertEqual(len(records), 1) - self.assertTrue(records[0].msg.startswith("pythoncom error")) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testWMI.py b/lib/win32com/test/testWMI.py deleted file mode 100644 index 5829b48e..00000000 --- a/lib/win32com/test/testWMI.py +++ /dev/null @@ -1,18 +0,0 @@ -import unittest - -import win32com.test.util -from win32com.client import GetObject - - -class Simple(win32com.test.util.TestCase): - def testit(self): - cses = GetObject("WinMgMts:").InstancesOf("Win32_Process") - vals = [] - for cs in cses: - val = cs.Properties_("Caption").Value - vals.append(val) - self.assertFalse(len(vals) < 5, "We only found %d processes!" % len(vals)) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testall.py b/lib/win32com/test/testall.py deleted file mode 100644 index 6da07407..00000000 --- a/lib/win32com/test/testall.py +++ /dev/null @@ -1,321 +0,0 @@ -import getopt -import os -import re -import sys -import traceback -import unittest - -try: - this_file = __file__ -except NameError: - this_file = sys.argv[0] - -win32com_src_dir = os.path.abspath(os.path.join(this_file, "../..")) - -import win32com - -# We'd prefer the win32com namespace to be the parent of __file__ - ie, our source-tree, -# rather than the version installed - otherwise every .py change needs a full install to -# test! -# We can't patch win32comext as most of them have a .pyd in their root :( -# This clearly ins't ideal or perfect :) -win32com.__path__[0] = win32com_src_dir - -import pythoncom -import win32com.client -from win32com.test.util import ( - CapturingFunctionTestCase, - CheckClean, - RegisterPythonServer, - ShellTestCase, - TestCase, - TestLoader, - TestRunner, -) - -verbosity = 1 # default unittest verbosity. - - -def GenerateAndRunOldStyle(): - from . import GenTestScripts - - GenTestScripts.GenerateAll() - try: - pass # - finally: - GenTestScripts.CleanAll() - - -def CleanGenerated(): - import shutil - - import win32com - - if os.path.isdir(win32com.__gen_path__): - if verbosity > 1: - print("Deleting files from %s" % (win32com.__gen_path__)) - shutil.rmtree(win32com.__gen_path__) - import win32com.client.gencache - - win32com.client.gencache.__init__() # Reset - - -def RemoveRefCountOutput(data): - while 1: - last_line_pos = data.rfind("\n") - if not re.match("\[\d+ refs\]", data[last_line_pos + 1 :]): - break - if last_line_pos < 0: - # All the output - return "" - data = data[:last_line_pos] - - return data - - -def ExecuteSilentlyIfOK(cmd, testcase): - f = os.popen(cmd) - data = f.read().strip() - rc = f.close() - if rc: - print(data) - testcase.fail("Executing '%s' failed (%d)" % (cmd, rc)) - # for "_d" builds, strip the '[xxx refs]' line - return RemoveRefCountOutput(data) - - -class PyCOMTest(TestCase): - no_leak_tests = True # done by the test itself - - def testit(self): - # Check that the item is registered, so we get the correct - # 'skipped' behaviour (and recorded as such) rather than either - # error or silence due to non-registration. - RegisterPythonServer( - os.path.join( - os.path.dirname(__file__), "..", "servers", "test_pycomtest.py" - ), - "Python.Test.PyCOMTest", - ) - - # Execute testPyComTest in its own process so it can play - # with the Python thread state - fname = os.path.join(os.path.dirname(this_file), "testPyComTest.py") - cmd = '%s "%s" -q 2>&1' % (sys.executable, fname) - data = ExecuteSilentlyIfOK(cmd, self) - - -class PippoTest(TestCase): - def testit(self): - # Check we are registered before spawning the process. - from win32com.test import pippo_server - - RegisterPythonServer(pippo_server.__file__, "Python.Test.Pippo") - - python = sys.executable - fname = os.path.join(os.path.dirname(this_file), "testPippo.py") - cmd = '%s "%s" 2>&1' % (python, fname) - ExecuteSilentlyIfOK(cmd, self) - - -# This is a list of "win32com.test.???" module names, optionally with a -# function in that module if the module isn't unitest based... -unittest_modules = [ - # Level 1 tests - fast and few dependencies - good for CI! - """testIterators testvbscript_regexp testStorage - testStreams testWMI policySemantics testShell testROT - testxslt testCollections - errorSemantics.test testArrays - testClipboard - testConversionErrors - """.split(), - # Level 2 tests - wants our demo COM objects registered. - # (these are strange; on github CI they get further than expected when - # our objects are not installed, so fail to quietly fail with "can't - # register" like they do locally. So really just a nod to CI) - """ - testAXScript testDictionary testServers testvb testMarshal - """.split(), - # Level 3 tests - Requires Office or other non-free stuff. - """testMSOffice.TestAll testMSOfficeEvents.test testAccess.test - testExplorer.TestAll testExchange.test - """.split(), - # Level 4 tests - we try and run `makepy` over every typelib installed! - """testmakepy.TestAll - """.split(), -] - -# A list of other unittest modules we use - these are fully qualified module -# names and the module is assumed to be unittest based. -unittest_other_modules = [ - # Level 1 tests. - """win32com.directsound.test.ds_test - """.split(), - # Level 2 tests. - [], - # Level 3 tests. - [], - # Level 4 tests. - [], -] - - -output_checked_programs = [ - # Level 1 tests. - [], - # Level 2 tests. - [ - ("cscript.exe /nologo //E:vbscript testInterp.vbs", "VBScript test worked OK"), - ( - "cscript.exe /nologo //E:vbscript testDictionary.vbs", - "VBScript has successfully tested Python.Dictionary", - ), - ], - # Level 3 tests - [], - # Level 4 tests. - [], -] - -custom_test_cases = [ - # Level 1 tests. - [], - # Level 2 tests. - [ - PyCOMTest, - PippoTest, - ], - # Level 3 tests - [], - # Level 4 tests. - [], -] - - -def get_test_mod_and_func(test_name, import_failures): - if test_name.find(".") > 0: - mod_name, func_name = test_name.split(".") - else: - mod_name = test_name - func_name = None - fq_mod_name = "win32com.test." + mod_name - try: - __import__(fq_mod_name) - mod = sys.modules[fq_mod_name] - except: - import_failures.append((mod_name, sys.exc_info()[:2])) - return None, None - func = None if func_name is None else getattr(mod, func_name) - return mod, func - - -# Return a test suite all loaded with the tests we want to run -def make_test_suite(test_level=1): - suite = unittest.TestSuite() - import_failures = [] - loader = TestLoader() - for i in range(testLevel): - for mod_name in unittest_modules[i]: - mod, func = get_test_mod_and_func(mod_name, import_failures) - if mod is None: - raise Exception("no such module '{}'".format(mod_name)) - if func is not None: - test = CapturingFunctionTestCase(func, description=mod_name) - else: - if hasattr(mod, "suite"): - test = mod.suite() - else: - test = loader.loadTestsFromModule(mod) - assert test.countTestCases() > 0, "No tests loaded from %r" % mod - suite.addTest(test) - for cmd, output in output_checked_programs[i]: - suite.addTest(ShellTestCase(cmd, output)) - - for test_class in custom_test_cases[i]: - suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(test_class)) - # other "normal" unittest modules. - for i in range(testLevel): - for mod_name in unittest_other_modules[i]: - try: - __import__(mod_name) - except: - import_failures.append((mod_name, sys.exc_info()[:2])) - continue - - mod = sys.modules[mod_name] - if hasattr(mod, "suite"): - test = mod.suite() - else: - test = loader.loadTestsFromModule(mod) - assert test.countTestCases() > 0, "No tests loaded from %r" % mod - suite.addTest(test) - - return suite, import_failures - - -def usage(why): - print(why) - print() - print("win32com test suite") - print("usage: testall [-v] test_level") - print(" where test_level is an integer 1-3. Level 1 tests are quick,") - print(" level 2 tests invoke Word, IE etc, level 3 take ages!") - sys.exit(1) - - -if __name__ == "__main__": - try: - opts, args = getopt.getopt(sys.argv[1:], "v") - except getopt.error as why: - usage(why) - for opt, val in opts: - if opt == "-v": - verbosity += 1 - testLevel = 2 # default to quick test with local objects - test_names = [] - for arg in args: - try: - testLevel = int(arg) - if testLevel < 0 or testLevel > 4: - raise ValueError("Only levels 1-4 are supported") - except ValueError: - test_names.append(arg) - if test_names: - usage("Test names are not supported yet") - CleanGenerated() - - suite, import_failures = make_test_suite(testLevel) - if verbosity: - if hasattr(sys, "gettotalrefcount"): - print("This is a debug build - memory leak tests will also be run.") - print("These tests may take *many* minutes to run - be patient!") - print("(running from python.exe will avoid these leak tests)") - print( - "Executing level %d tests - %d test cases will be run" - % (testLevel, suite.countTestCases()) - ) - if verbosity == 1 and suite.countTestCases() < 70: - # A little row of markers so the dots show how close to finished - print("|" * suite.countTestCases()) - testRunner = TestRunner(verbosity=verbosity) - testResult = testRunner.run(suite) - if import_failures: - testResult.stream.writeln( - "*** The following test modules could not be imported ***" - ) - for mod_name, (exc_type, exc_val) in import_failures: - desc = "\n".join(traceback.format_exception_only(exc_type, exc_val)) - testResult.stream.write("%s: %s" % (mod_name, desc)) - testResult.stream.writeln( - "*** %d test(s) could not be run ***" % len(import_failures) - ) - - # re-print unit-test error here so it is noticed - if not testResult.wasSuccessful(): - print("*" * 20, "- unittest tests FAILED") - - CheckClean() - pythoncom.CoUninitialize() - CleanGenerated() - if not testResult.wasSuccessful(): - sys.exit(1) diff --git a/lib/win32com/test/testmakepy.py b/lib/win32com/test/testmakepy.py deleted file mode 100644 index 2eeb8818..00000000 --- a/lib/win32com/test/testmakepy.py +++ /dev/null @@ -1,55 +0,0 @@ -# Test makepy - try and run it over every OCX in the windows system directory. - -import sys -import traceback - -import pythoncom -import win32api -import win32com.test.util -import winerror -from win32com.client import gencache, makepy, selecttlb - - -def TestBuildAll(verbose=1): - num = 0 - tlbInfos = selecttlb.EnumTlbs() - for info in tlbInfos: - if verbose: - print("%s (%s)" % (info.desc, info.dll)) - try: - makepy.GenerateFromTypeLibSpec(info) - # sys.stderr.write("Attr typeflags for coclass referenced object %s=%d (%d), typekind=%d\n" % (name, refAttr.wTypeFlags, refAttr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL,refAttr.typekind)) - num += 1 - except pythoncom.com_error as details: - # Ignore these 2 errors, as the are very common and can obscure - # useful warnings. - if details.hresult not in [ - winerror.TYPE_E_CANTLOADLIBRARY, - winerror.TYPE_E_LIBNOTREGISTERED, - ]: - print("** COM error on", info.desc) - print(details) - except KeyboardInterrupt: - print("Interrupted!") - raise KeyboardInterrupt - except: - print("Failed:", info.desc) - traceback.print_exc() - if makepy.bForDemandDefault: - # This only builds enums etc by default - build each - # interface manually - tinfo = (info.clsid, info.lcid, info.major, info.minor) - mod = gencache.EnsureModule(info.clsid, info.lcid, info.major, info.minor) - for name in mod.NamesToIIDMap.keys(): - makepy.GenerateChildFromTypeLibSpec(name, tinfo) - return num - - -def TestAll(verbose=0): - num = TestBuildAll(verbose) - print("Generated and imported", num, "modules") - win32com.test.util.CheckClean() - - -if __name__ == "__main__": - TestAll("-q" not in sys.argv) diff --git a/lib/win32com/test/testvb.py b/lib/win32com/test/testvb.py deleted file mode 100644 index 5a3d2f73..00000000 --- a/lib/win32com/test/testvb.py +++ /dev/null @@ -1,594 +0,0 @@ -# Test code for a VB Program. -# -# This requires the PythonCOM VB Test Harness. -# - -import sys -import traceback - -import pythoncom -import win32com.client -import win32com.client.dynamic -import win32com.client.gencache -import winerror -from pywin32_testutil import str2memory -from win32com.server.util import NewCollection, wrap -from win32com.test import util - -# for debugging -useDispatcher = None -## import win32com.server.dispatcher -## useDispatcher = win32com.server.dispatcher.DefaultDebugDispatcher - -error = RuntimeError - - -# Set up a COM object that VB will do some callbacks on. This is used -# to test byref params for gateway IDispatch. -class TestObject: - _public_methods_ = [ - "CallbackVoidOneByRef", - "CallbackResultOneByRef", - "CallbackVoidTwoByRef", - "CallbackString", - "CallbackResultOneByRefButReturnNone", - "CallbackVoidOneByRefButReturnNone", - "CallbackArrayResult", - "CallbackArrayResultOneArrayByRef", - "CallbackArrayResultWrongSize", - ] - - def CallbackVoidOneByRef(self, intVal): - return intVal + 1 - - def CallbackResultOneByRef(self, intVal): - return intVal, intVal + 1 - - def CallbackVoidTwoByRef(self, int1, int2): - return int1 + int2, int1 - int2 - - def CallbackString(self, strVal): - return 0, strVal + " has visited Python" - - def CallbackArrayResult(self, arrayVal): - ret = [] - for i in arrayVal: - ret.append(i + 1) - # returning as a list forces it be processed as a single result - # (rather than a tuple, where it may be interpreted as - # multiple results for byref unpacking) - return ret - - def CallbackArrayResultWrongSize(self, arrayVal): - return list(arrayVal[:-1]) - - def CallbackArrayResultOneArrayByRef(self, arrayVal): - ret = [] - for i in arrayVal: - ret.append(i + 1) - # See above for list processing. - return list(arrayVal), ret - - def CallbackResultOneByRefButReturnNone(self, intVal): - return - - def CallbackVoidOneByRefButReturnNone(self, intVal): - return - - -def TestVB(vbtest, bUseGenerated): - vbtest.LongProperty = -1 - if vbtest.LongProperty != -1: - raise error("Could not set the long property correctly.") - vbtest.IntProperty = 10 - if vbtest.IntProperty != 10: - raise error("Could not set the integer property correctly.") - vbtest.VariantProperty = 10 - if vbtest.VariantProperty != 10: - raise error("Could not set the variant integer property correctly.") - vbtest.VariantProperty = str2memory("raw\0data") - if vbtest.VariantProperty != str2memory("raw\0data"): - raise error("Could not set the variant buffer property correctly.") - vbtest.StringProperty = "Hello from Python" - if vbtest.StringProperty != "Hello from Python": - raise error("Could not set the string property correctly.") - vbtest.VariantProperty = "Hello from Python" - if vbtest.VariantProperty != "Hello from Python": - raise error("Could not set the variant string property correctly.") - vbtest.VariantProperty = (1.0, 2.0, 3.0) - if vbtest.VariantProperty != (1.0, 2.0, 3.0): - raise error( - "Could not set the variant property to an array of floats correctly - '%s'." - % (vbtest.VariantProperty,) - ) - - TestArrays(vbtest, bUseGenerated) - TestStructs(vbtest) - TestCollections(vbtest) - - assert vbtest.TakeByValObject(vbtest) == vbtest - - # Python doesnt support PUTREF properties without a typeref - # (although we could) - if bUseGenerated: - ob = vbtest.TakeByRefObject(vbtest) - assert ob[0] == vbtest and ob[1] == vbtest - - # A property that only has PUTREF defined. - vbtest.VariantPutref = vbtest - if vbtest.VariantPutref._oleobj_ != vbtest._oleobj_: - raise error("Could not set the VariantPutref property correctly.") - # Cant test further types for this VariantPutref, as only - # COM objects can be stored ByRef. - - # A "set" type property - only works for generated. - # VB recognizes a collection via a few "private" interfaces that we - # could later build support in for. - # vbtest.CollectionProperty = NewCollection((1,2,"3", "Four")) - # if vbtest.CollectionProperty != (1,2,"3", "Four"): - # raise error("Could not set the Collection property correctly - got back " + str(vbtest.CollectionProperty)) - - # These are sub's that have a single byref param - # Result should be just the byref. - if vbtest.IncrementIntegerParam(1) != 2: - raise error("Could not pass an integer byref") - - # Sigh - we cant have *both* "ommited byref" and optional args - # We really have to opt that args nominated as optional work as optional - # rather than simply all byrefs working as optional. - # if vbtest.IncrementIntegerParam() != 1: - # raise error("Could not pass an omitted integer byref") - - if vbtest.IncrementVariantParam(1) != 2: - raise error( - "Could not pass an int VARIANT byref:" - + str(vbtest.IncrementVariantParam(1)) - ) - - if vbtest.IncrementVariantParam(1.5) != 2.5: - raise error("Could not pass a float VARIANT byref") - - # Can't test IncrementVariantParam with the param omitted as it - # it not declared in the VB code as "Optional" - callback_ob = wrap(TestObject(), useDispatcher=useDispatcher) - vbtest.DoSomeCallbacks(callback_ob) - - ret = vbtest.PassIntByVal(1) - if ret != 2: - raise error("Could not increment the integer - " + str(ret)) - - TestVBInterface(vbtest) - # Python doesnt support byrefs without some sort of generated support. - if bUseGenerated: - # This is a VB function that takes a single byref - # Hence 2 return values - function and byref. - ret = vbtest.PassIntByRef(1) - if ret != (1, 2): - raise error("Could not increment the integer - " + str(ret)) - # Check you can leave a byref arg blank. - - -# see above -# ret = vbtest.PassIntByRef() -# if ret != (0,1): -# raise error("Could not increment the integer with default arg- "+str(ret)) - - -def _DoTestCollection(vbtest, col_name, expected): - # It sucks that some objects allow "Count()", but others "Count" - def _getcount(ob): - r = getattr(ob, "Count") - if type(r) != type(0): - return r() - return r - - c = getattr(vbtest, col_name) - check = [] - for item in c: - check.append(item) - if check != list(expected): - raise error( - "Collection %s didn't have %r (had %r)" % (col_name, expected, check) - ) - # Just looping over the collection again works (ie, is restartable) - check = [] - for item in c: - check.append(item) - if check != list(expected): - raise error( - "Collection 2nd time around %s didn't have %r (had %r)" - % (col_name, expected, check) - ) - # Check we can get it via iter() - i = iter(getattr(vbtest, col_name)) - check = [] - for item in i: - check.append(item) - if check != list(expected): - raise error( - "Collection iterator %s didn't have %r 2nd time around (had %r)" - % (col_name, expected, check) - ) - # but an iterator is not restartable - check = [] - for item in i: - check.append(item) - if check != []: - raise error( - "2nd time around Collection iterator %s wasn't empty (had %r)" - % (col_name, check) - ) - - # Check len()==Count() - c = getattr(vbtest, col_name) - if len(c) != _getcount(c): - raise error( - "Collection %s __len__(%r) wasn't==Count(%r)" - % (col_name, len(c), _getcount(c)) - ) - # Check we can do it with zero based indexing. - c = getattr(vbtest, col_name) - check = [] - for i in range(_getcount(c)): - check.append(c[i]) - if check != list(expected): - raise error( - "Collection %s didn't have %r (had %r)" % (col_name, expected, check) - ) - - # Check we can do it with our old "Skip/Next" methods. - c = getattr(vbtest, col_name)._NewEnum() - check = [] - while 1: - n = c.Next() - if not n: - break - check.append(n[0]) - if check != list(expected): - raise error( - "Collection %s didn't have %r (had %r)" % (col_name, expected, check) - ) - - -def TestCollections(vbtest): - _DoTestCollection(vbtest, "CollectionProperty", [1, "Two", "3"]) - # zero based indexing works for simple VB collections. - if vbtest.CollectionProperty[0] != 1: - raise error("The CollectionProperty[0] element was not the default value") - - _DoTestCollection(vbtest, "EnumerableCollectionProperty", []) - vbtest.EnumerableCollectionProperty.Add(1) - vbtest.EnumerableCollectionProperty.Add("Two") - vbtest.EnumerableCollectionProperty.Add("3") - _DoTestCollection(vbtest, "EnumerableCollectionProperty", [1, "Two", "3"]) - - -def _DoTestArray(vbtest, data, expected_exception=None): - try: - vbtest.ArrayProperty = data - if expected_exception is not None: - raise error("Expected '%s'" % expected_exception) - except expected_exception: - return - got = vbtest.ArrayProperty - if got != data: - raise error( - "Could not set the array data correctly - got %r, expected %r" % (got, data) - ) - - -def TestArrays(vbtest, bUseGenerated): - # Try and use a safe array (note that the VB code has this declared as a VARIANT - # and I cant work out how to force it to use native arrays! - # (NOTE Python will convert incoming arrays to tuples, so we pass a tuple, even tho - # a list works fine - just makes it easier for us to compare the result! - # Empty array - _DoTestArray(vbtest, ()) - # Empty child array - _DoTestArray(vbtest, ((), ())) - # ints - _DoTestArray(vbtest, tuple(range(1, 100))) - # Floats - _DoTestArray(vbtest, (1.0, 2.0, 3.0)) - # Strings. - _DoTestArray(vbtest, tuple("Hello from Python".split())) - # Date and Time? - # COM objects. - _DoTestArray(vbtest, (vbtest, vbtest)) - # Mixed - _DoTestArray(vbtest, (1, 2.0, "3")) - # Array alements containing other arrays - _DoTestArray(vbtest, (1, (vbtest, vbtest), ("3", "4"))) - # Multi-dimensional - _DoTestArray(vbtest, (((1, 2, 3), (4, 5, 6)))) - _DoTestArray(vbtest, (((vbtest, vbtest, vbtest), (vbtest, vbtest, vbtest)))) - # Another dimension! - arrayData = (((1, 2), (3, 4), (5, 6)), ((7, 8), (9, 10), (11, 12))) - arrayData = ( - ((vbtest, vbtest), (vbtest, vbtest), (vbtest, vbtest)), - ((vbtest, vbtest), (vbtest, vbtest), (vbtest, vbtest)), - ) - _DoTestArray(vbtest, arrayData) - - # Check that when a '__getitem__ that fails' object is the first item - # in the structure, we don't mistake it for a sequence. - _DoTestArray(vbtest, (vbtest, 2.0, "3")) - _DoTestArray(vbtest, (1, 2.0, vbtest)) - - # Pass arbitrarily sized arrays - these used to fail, but thanks to - # Stefan Schukat, they now work! - expected_exception = None - arrayData = (((1, 2, 1), (3, 4), (5, 6)), ((7, 8), (9, 10), (11, 12))) - _DoTestArray(vbtest, arrayData, expected_exception) - arrayData = (((vbtest, vbtest),), ((vbtest,),)) - _DoTestArray(vbtest, arrayData, expected_exception) - # Pass bad data - last item wrong size - arrayData = (((1, 2), (3, 4), (5, 6, 8)), ((7, 8), (9, 10), (11, 12))) - _DoTestArray(vbtest, arrayData, expected_exception) - - # byref safearray results with incorrect size. - callback_ob = wrap(TestObject(), useDispatcher=useDispatcher) - print("** Expecting a 'ValueError' exception to be printed next:") - try: - vbtest.DoCallbackSafeArraySizeFail(callback_ob) - except pythoncom.com_error as exc: - assert ( - exc.excepinfo[1] == "Python COM Server Internal Error" - ), "Didnt get the correct exception - '%s'" % (exc,) - - if bUseGenerated: - # This one is a bit strange! The array param is "ByRef", as VB insists. - # The function itself also _returns_ the arram param. - # Therefore, Python sees _2_ result values - one for the result, - # and one for the byref. - testData = "Mark was here".split() - resultData, byRefParam = vbtest.PassSAFEARRAY(testData) - if testData != list(resultData): - raise error( - "The safe array data was not what we expected - got " + str(resultData) - ) - if testData != list(byRefParam): - raise error( - "The safe array data was not what we expected - got " + str(byRefParam) - ) - testData = [1.0, 2.0, 3.0] - resultData, byRefParam = vbtest.PassSAFEARRAYVariant(testData) - assert testData == list(byRefParam) - assert testData == list(resultData) - testData = ["hi", "from", "Python"] - resultData, byRefParam = vbtest.PassSAFEARRAYVariant(testData) - assert testData == list(byRefParam), "Expected '%s', got '%s'" % ( - testData, - list(byRefParam), - ) - assert testData == list(resultData), "Expected '%s', got '%s'" % ( - testData, - list(resultData), - ) - # This time, instead of an explicit str() for 1.5, we just - # pass Unicode, so the result should compare equal - testData = [1, 2.0, "3"] - resultData, byRefParam = vbtest.PassSAFEARRAYVariant(testData) - assert testData == list(byRefParam) - assert testData == list(resultData) - print("Array tests passed") - - -def TestStructs(vbtest): - try: - vbtest.IntProperty = "One" - raise error("Should have failed by now") - except pythoncom.com_error as exc: - if exc.hresult != winerror.DISP_E_TYPEMISMATCH: - raise error("Expected DISP_E_TYPEMISMATCH") - - s = vbtest.StructProperty - if s.int_val != 99 or str(s.str_val) != "hello": - raise error("The struct value was not correct") - s.str_val = "Hi from Python" - s.int_val = 11 - if s.int_val != 11 or str(s.str_val) != "Hi from Python": - raise error("The struct value didnt persist!") - - if s.sub_val.int_val != 66 or str(s.sub_val.str_val) != "sub hello": - raise error("The sub-struct value was not correct") - sub = s.sub_val - sub.int_val = 22 - if sub.int_val != 22: - print(sub.int_val) - raise error("The sub-struct value didnt persist!") - - if s.sub_val.int_val != 22: - print(s.sub_val.int_val) - raise error("The sub-struct value (re-fetched) didnt persist!") - - if ( - s.sub_val.array_val[0].int_val != 0 - or str(s.sub_val.array_val[0].str_val) != "zero" - ): - print(s.sub_val.array_val[0].int_val) - raise error("The array element wasnt correct") - s.sub_val.array_val[0].int_val = 99 - s.sub_val.array_val[1].int_val = 66 - if s.sub_val.array_val[0].int_val != 99 or s.sub_val.array_val[1].int_val != 66: - print(s.sub_val.array_val[0].int_val) - raise error("The array element didnt persist.") - # Now pass the struct back to VB - vbtest.StructProperty = s - # And get it back again - s = vbtest.StructProperty - if s.int_val != 11 or str(s.str_val) != "Hi from Python": - raise error("After sending to VB, the struct value didnt persist!") - if s.sub_val.array_val[0].int_val != 99: - raise error("After sending to VB, the struct array value didnt persist!") - - # Now do some object equality tests. - assert s == s - assert s != None - if sys.version_info > (3, 0): - try: - s < None - raise error("Expected type error") - except TypeError: - pass - try: - None < s - raise error("Expected type error") - except TypeError: - pass - assert s != s.sub_val - import copy - - s2 = copy.copy(s) - assert s is not s2 - assert s == s2 - s2.int_val = 123 - assert s != s2 - # Make sure everything works with functions - s2 = vbtest.GetStructFunc() - assert s == s2 - vbtest.SetStructSub(s2) - - # Create a new structure, and set its elements. - s = win32com.client.Record("VBStruct", vbtest) - assert s.int_val == 0, "new struct inst initialized correctly!" - s.int_val = -1 - vbtest.SetStructSub(s) - assert vbtest.GetStructFunc().int_val == -1, "new struct didnt make the round trip!" - # Finally, test stand-alone structure arrays. - s_array = vbtest.StructArrayProperty - assert s_array is None, "Expected None from the uninitialized VB array" - vbtest.MakeStructArrayProperty(3) - s_array = vbtest.StructArrayProperty - assert len(s_array) == 3 - for i in range(len(s_array)): - assert s_array[i].int_val == i - assert s_array[i].sub_val.int_val == i - assert s_array[i].sub_val.array_val[0].int_val == i - assert s_array[i].sub_val.array_val[1].int_val == i + 1 - assert s_array[i].sub_val.array_val[2].int_val == i + 2 - - # Some error type checks. - try: - s.bad_attribute - raise RuntimeError("Could get a bad attribute") - except AttributeError: - pass - m = s.__members__ - assert ( - m[0] == "int_val" - and m[1] == "str_val" - and m[2] == "ob_val" - and m[3] == "sub_val" - ), m - - # Test attribute errors. - try: - s.foo - raise RuntimeError("Expected attribute error") - except AttributeError as exc: - assert "foo" in str(exc), exc - - # test repr - it uses repr() of the sub-objects, so check it matches. - expected = "com_struct(int_val=%r, str_val=%r, ob_val=%r, sub_val=%r)" % ( - s.int_val, - s.str_val, - s.ob_val, - s.sub_val, - ) - if repr(s) != expected: - print("Expected repr:", expected) - print("Actual repr :", repr(s)) - raise RuntimeError("repr() of record object failed") - - print("Struct/Record tests passed") - - -def TestVBInterface(ob): - t = ob.GetInterfaceTester(2) - if t.getn() != 2: - raise error("Initial value wrong") - t.setn(3) - if t.getn() != 3: - raise error("New value wrong") - - -def TestObjectSemantics(ob): - # a convenient place to test some of our equality semantics - assert ob == ob._oleobj_ - assert not ob != ob._oleobj_ - # same test again, but lhs and rhs reversed. - assert ob._oleobj_ == ob - assert not ob._oleobj_ != ob - # same tests but against different pointers. COM identity rules should - # still ensure all works - assert ob._oleobj_ == ob._oleobj_.QueryInterface(pythoncom.IID_IUnknown) - assert not ob._oleobj_ != ob._oleobj_.QueryInterface(pythoncom.IID_IUnknown) - - assert ob._oleobj_ != None - assert None != ob._oleobj_ - assert ob != None - assert None != ob - if sys.version_info > (3, 0): - try: - ob < None - raise error("Expected type error") - except TypeError: - pass - try: - None < ob - raise error("Expected type error") - except TypeError: - pass - - assert ob._oleobj_.QueryInterface(pythoncom.IID_IUnknown) == ob._oleobj_ - assert not ob._oleobj_.QueryInterface(pythoncom.IID_IUnknown) != ob._oleobj_ - - assert ob._oleobj_ == ob._oleobj_.QueryInterface(pythoncom.IID_IDispatch) - assert not ob._oleobj_ != ob._oleobj_.QueryInterface(pythoncom.IID_IDispatch) - - assert ob._oleobj_.QueryInterface(pythoncom.IID_IDispatch) == ob._oleobj_ - assert not ob._oleobj_.QueryInterface(pythoncom.IID_IDispatch) != ob._oleobj_ - - print("Object semantic tests passed") - - -def DoTestAll(): - o = win32com.client.Dispatch("PyCOMVBTest.Tester") - TestObjectSemantics(o) - TestVB(o, 1) - - o = win32com.client.dynamic.DumbDispatch("PyCOMVBTest.Tester") - TestObjectSemantics(o) - TestVB(o, 0) - - -def TestAll(): - # Import the type library for the test module. Let the 'invalid clsid' - # exception filter up, where the test runner will treat it as 'skipped' - win32com.client.gencache.EnsureDispatch("PyCOMVBTest.Tester") - - if not __debug__: - raise RuntimeError("This must be run in debug mode - we use assert!") - try: - DoTestAll() - print("All tests appear to have worked!") - except: - # ????? - print("TestAll() failed!!") - traceback.print_exc() - raise - - -# Make this test run under our test suite to leak tests etc work -def suite(): - import unittest - - test = util.CapturingFunctionTestCase(TestAll, description="VB tests") - suite = unittest.TestSuite() - suite.addTest(test) - return suite - - -if __name__ == "__main__": - util.testmain() diff --git a/lib/win32com/test/testvbscript_regexp.py b/lib/win32com/test/testvbscript_regexp.py deleted file mode 100644 index a22f0bea..00000000 --- a/lib/win32com/test/testvbscript_regexp.py +++ /dev/null @@ -1,40 +0,0 @@ -import unittest - -import win32com.test.util -from win32com.client.dynamic import DumbDispatch -from win32com.client.gencache import EnsureDispatch - - -class RegexTest(win32com.test.util.TestCase): - def _CheckMatches(self, match, expected): - found = [] - for imatch in match: - found.append(imatch.FirstIndex) - self.assertEqual(list(found), list(expected)) - - def _TestVBScriptRegex(self, re): - StringToSearch = "Python python pYthon Python" - re.Pattern = "Python" - re.Global = True - - re.IgnoreCase = True - match = re.Execute(StringToSearch) - expected = 0, 7, 14, 21 - self._CheckMatches(match, expected) - - re.IgnoreCase = False - match = re.Execute(StringToSearch) - expected = 0, 21 - self._CheckMatches(match, expected) - - def testDynamic(self): - re = DumbDispatch("VBScript.Regexp") - self._TestVBScriptRegex(re) - - def testGenerated(self): - re = EnsureDispatch("VBScript.Regexp") - self._TestVBScriptRegex(re) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testxslt.js b/lib/win32com/test/testxslt.js deleted file mode 100644 index 347401c2..00000000 --- a/lib/win32com/test/testxslt.js +++ /dev/null @@ -1,12 +0,0 @@ -//Args: input-file style-file output-file -var xml = WScript.CreateObject("Microsoft.XMLDOM"); //input -xml.validateOnParse=false; -xml.load(WScript.Arguments(0)); -var xsl = WScript.CreateObject("Microsoft.XMLDOM"); //style -xsl.validateOnParse=false; -xsl.load(WScript.Arguments(1)); -var out = WScript.CreateObject("Scripting.FileSystemObject"); //output -var replace = true; var unicode = false; //output file properties -var hdl = out.CreateTextFile( WScript.Arguments(2), replace, unicode ) -hdl.write( xml.transformNode( xsl.documentElement )); -//eof diff --git a/lib/win32com/test/testxslt.py b/lib/win32com/test/testxslt.py deleted file mode 100644 index caaa2ec5..00000000 --- a/lib/win32com/test/testxslt.py +++ /dev/null @@ -1,34 +0,0 @@ -import os -import tempfile -import unittest - -import win32com.test.util - -expected_output = "The jscript test worked.\nThe Python test worked" - - -class XSLT(win32com.test.util.TestCase): - def testAll(self): - output_name = tempfile.mktemp("-pycom-test") - cmd = ( - "cscript //nologo testxslt.js doesnt_matter.xml testxslt.xsl " + output_name - ) - win32com.test.util.ExecuteShellCommand(cmd, self) - try: - f = open(output_name) - try: - got = f.read() - if got != expected_output: - print("ERROR: XSLT expected output of %r" % (expected_output,)) - print("but got %r" % (got,)) - finally: - f.close() - finally: - try: - os.unlink(output_name) - except os.error: - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/win32com/test/testxslt.xsl b/lib/win32com/test/testxslt.xsl deleted file mode 100644 index 9ab5c0f9..00000000 --- a/lib/win32com/test/testxslt.xsl +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - function worked() { - return "The jscript test worked"; - } - - - - -def worked(): - return "The Python test worked" - - - - - -. - - - - - - diff --git a/lib/win32com/test/util.py b/lib/win32com/test/util.py deleted file mode 100644 index 691be851..00000000 --- a/lib/win32com/test/util.py +++ /dev/null @@ -1,265 +0,0 @@ -import gc -import logging -import os -import sys -import tempfile -import unittest -import winreg - -import pythoncom -import pywin32_testutil -import pywintypes -import win32api -import win32com -import winerror -from pythoncom import _GetGatewayCount, _GetInterfaceCount -from pywin32_testutil import LeakTestCase, TestLoader, TestResult, TestRunner - - -def CheckClean(): - # Ensure no lingering exceptions - Python should have zero outstanding - # COM objects - try: - sys.exc_clear() - except AttributeError: - pass # py3k - c = _GetInterfaceCount() - if c: - print("Warning - %d com interface objects still alive" % c) - c = _GetGatewayCount() - if c: - print("Warning - %d com gateway objects still alive" % c) - - -def RegisterPythonServer(filename, progids=None, verbose=0): - if progids: - if isinstance(progids, str): - progids = [progids] - # we know the CLSIDs we need, but we might not be an admin user - # and otherwise unable to register them. So as long as the progids - # exist and the DLL points at our version, assume it already is. - why_not = None - for progid in progids: - try: - clsid = pywintypes.IID(progid) - except pythoncom.com_error: - # not registered. - break - try: - HKCR = winreg.HKEY_CLASSES_ROOT - hk = winreg.OpenKey(HKCR, "CLSID\\%s" % clsid) - dll = winreg.QueryValue(hk, "InprocServer32") - except WindowsError: - # no CLSID or InProcServer32 - not registered - break - ok_files = [ - os.path.basename(pythoncom.__file__), - "pythoncomloader%d%d.dll" % (sys.version_info[0], sys.version_info[1]), - ] - if os.path.basename(dll) not in ok_files: - why_not = "%r is registered against a different Python version (%s)" % ( - progid, - dll, - ) - break - else: - # print "Skipping registration of '%s' - already registered" % filename - return - # needs registration - see if its likely! - try: - from win32com.shell.shell import IsUserAnAdmin - except ImportError: - print("Can't import win32com.shell - no idea if you are an admin or not?") - is_admin = False - else: - try: - is_admin = IsUserAnAdmin() - except pythoncom.com_error: - # old, less-secure OS - assume *is* admin. - is_admin = True - if not is_admin: - msg = ( - "%r isn't registered, but I'm not an administrator who can register it." - % progids[0] - ) - if why_not: - msg += "\n(registration check failed as %s)" % why_not - # throw a normal "class not registered" exception - we don't report - # them the same way as "real" errors. - raise pythoncom.com_error(winerror.CO_E_CLASSSTRING, msg, None, -1) - # so theoretically we are able to register it. - cmd = '%s "%s" --unattended > nul 2>&1' % (win32api.GetModuleFileName(0), filename) - if verbose: - print("Registering engine", filename) - # print cmd - rc = os.system(cmd) - if rc: - print("Registration command was:") - print(cmd) - raise RuntimeError("Registration of engine '%s' failed" % filename) - - -def ExecuteShellCommand( - cmd, - testcase, - expected_output=None, # Set to '' to check for nothing - tracebacks_ok=0, # OK if the output contains a t/b? -): - output_name = tempfile.mktemp("win32com_test") - cmd = cmd + ' > "%s" 2>&1' % output_name - rc = os.system(cmd) - output = open(output_name, "r").read().strip() - os.remove(output_name) - - class Failed(Exception): - pass - - try: - if rc: - raise Failed("exit code was " + str(rc)) - if expected_output is not None and output != expected_output: - raise Failed("Expected output %r (got %r)" % (expected_output, output)) - if not tracebacks_ok and output.find("Traceback (most recent call last)") >= 0: - raise Failed("traceback in program output") - return output - except Failed as why: - print("Failed to exec command '%r'" % cmd) - print("Failed as", why) - print("** start of program output **") - print(output) - print("** end of program output **") - testcase.fail("Executing '%s' failed as %s" % (cmd, why)) - - -def assertRaisesCOM_HRESULT(testcase, hresult, func, *args, **kw): - try: - func(*args, **kw) - except pythoncom.com_error as details: - if details.hresult == hresult: - return - testcase.fail("Excepected COM exception with HRESULT 0x%x" % hresult) - - -class CaptureWriter: - def __init__(self): - self.old_err = self.old_out = None - self.clear() - - def capture(self): - self.clear() - self.old_out = sys.stdout - self.old_err = sys.stderr - sys.stdout = sys.stderr = self - - def release(self): - if self.old_out: - sys.stdout = self.old_out - self.old_out = None - if self.old_err: - sys.stderr = self.old_err - self.old_err = None - - def clear(self): - self.captured = [] - - def write(self, msg): - self.captured.append(msg) - - def get_captured(self): - return "".join(self.captured) - - def get_num_lines_captured(self): - return len("".join(self.captured).split("\n")) - - -# Utilities to set the win32com logger to something what just captures -# records written and doesn't print them. -class LogHandler(logging.Handler): - def __init__(self): - self.emitted = [] - logging.Handler.__init__(self) - - def emit(self, record): - self.emitted.append(record) - - -_win32com_logger = None - - -def setup_test_logger(): - old_log = getattr(win32com, "logger", None) - global _win32com_logger - if _win32com_logger is None: - _win32com_logger = logging.Logger("test") - handler = LogHandler() - _win32com_logger.addHandler(handler) - - win32com.logger = _win32com_logger - handler = _win32com_logger.handlers[0] - handler.emitted = [] - return handler.emitted, old_log - - -def restore_test_logger(prev_logger): - assert prev_logger is None, "who needs this?" - if prev_logger is None: - del win32com.logger - else: - win32com.logger = prev_logger - - -# We used to override some of this (and may later!) -TestCase = unittest.TestCase - - -def CapturingFunctionTestCase(*args, **kw): - real_test = _CapturingFunctionTestCase(*args, **kw) - return LeakTestCase(real_test) - - -class _CapturingFunctionTestCase(unittest.FunctionTestCase): # , TestCaseMixin): - def __call__(self, result=None): - if result is None: - result = self.defaultTestResult() - writer = CaptureWriter() - # self._preTest() - writer.capture() - try: - unittest.FunctionTestCase.__call__(self, result) - if getattr(self, "do_leak_tests", 0) and hasattr(sys, "gettotalrefcount"): - self.run_leak_tests(result) - finally: - writer.release() - # self._postTest(result) - output = writer.get_captured() - self.checkOutput(output, result) - if result.showAll: - print(output) - - def checkOutput(self, output, result): - if output.find("Traceback") >= 0: - msg = "Test output contained a traceback\n---\n%s\n---" % output - result.errors.append((self, msg)) - - -class ShellTestCase(unittest.TestCase): - def __init__(self, cmd, expected_output): - self.__cmd = cmd - self.__eo = expected_output - unittest.TestCase.__init__(self) - - def runTest(self): - ExecuteShellCommand(self.__cmd, self, self.__eo) - - def __str__(self): - max = 30 - if len(self.__cmd) > max: - cmd_repr = self.__cmd[:max] + "..." - else: - cmd_repr = self.__cmd - return "exec: " + cmd_repr - - -def testmain(*args, **kw): - pywin32_testutil.testmain(*args, **kw) - CheckClean() diff --git a/lib/win32com/universal.py b/lib/win32com/universal.py deleted file mode 100644 index 7c2944ed..00000000 --- a/lib/win32com/universal.py +++ /dev/null @@ -1,226 +0,0 @@ -# Code that packs and unpacks the Univgw structures. - -# See if we have a special directory for the binaries (for developers) - -import pythoncom -from win32com.client import gencache - -com_error = pythoncom.com_error -_univgw = pythoncom._univgw - - -def RegisterInterfaces(typelibGUID, lcid, major, minor, interface_names=None): - ret = [] # return a list of (dispid, funcname for our policy's benefit - # First see if we have makepy support. If so, we can probably satisfy the request without loading the typelib. - try: - mod = gencache.GetModuleForTypelib(typelibGUID, lcid, major, minor) - except ImportError: - mod = None - if mod is None: - import win32com.client.build - - # Load up the typelib and build (but don't cache) it now - tlb = pythoncom.LoadRegTypeLib(typelibGUID, major, minor, lcid) - typecomp_lib = tlb.GetTypeComp() - if interface_names is None: - interface_names = [] - for i in range(tlb.GetTypeInfoCount()): - info = tlb.GetTypeInfo(i) - doc = tlb.GetDocumentation(i) - attr = info.GetTypeAttr() - if attr.typekind == pythoncom.TKIND_INTERFACE or ( - attr.typekind == pythoncom.TKIND_DISPATCH - and attr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL - ): - interface_names.append(doc[0]) - for name in interface_names: - type_info, type_comp = typecomp_lib.BindType( - name, - ) - # Not sure why we don't get an exception here - BindType's C - # impl looks correct.. - if type_info is None: - raise ValueError("The interface '%s' can not be located" % (name,)) - # If we got back a Dispatch interface, convert to the real interface. - attr = type_info.GetTypeAttr() - if attr.typekind == pythoncom.TKIND_DISPATCH: - refhtype = type_info.GetRefTypeOfImplType(-1) - type_info = type_info.GetRefTypeInfo(refhtype) - attr = type_info.GetTypeAttr() - item = win32com.client.build.VTableItem( - type_info, attr, type_info.GetDocumentation(-1) - ) - _doCreateVTable( - item.clsid, item.python_name, item.bIsDispatch, item.vtableFuncs - ) - for info in item.vtableFuncs: - names, dispid, desc = info - invkind = desc[4] - ret.append((dispid, invkind, names[0])) - else: - # Cool - can used cached info. - if not interface_names: - interface_names = list(mod.VTablesToClassMap.values()) - for name in interface_names: - try: - iid = mod.NamesToIIDMap[name] - except KeyError: - raise ValueError( - "Interface '%s' does not exist in this cached typelib" % (name,) - ) - # print "Processing interface", name - sub_mod = gencache.GetModuleForCLSID(iid) - is_dispatch = getattr(sub_mod, name + "_vtables_dispatch_", None) - method_defs = getattr(sub_mod, name + "_vtables_", None) - if is_dispatch is None or method_defs is None: - raise ValueError("Interface '%s' is IDispatch only" % (name,)) - - # And create the univgw defn - _doCreateVTable(iid, name, is_dispatch, method_defs) - for info in method_defs: - names, dispid, desc = info - invkind = desc[4] - ret.append((dispid, invkind, names[0])) - return ret - - -def _doCreateVTable(iid, interface_name, is_dispatch, method_defs): - defn = Definition(iid, is_dispatch, method_defs) - vtbl = _univgw.CreateVTable(defn, is_dispatch) - _univgw.RegisterVTable(vtbl, iid, interface_name) - - -def _CalcTypeSize(typeTuple): - t = typeTuple[0] - if t & (pythoncom.VT_BYREF | pythoncom.VT_ARRAY): - # Its a pointer. - cb = _univgw.SizeOfVT(pythoncom.VT_PTR)[1] - elif t == pythoncom.VT_RECORD: - # Just because a type library uses records doesn't mean the user - # is trying to. We need to better place to warn about this, but it - # isn't here. - # try: - # import warnings - # warnings.warn("warning: records are known to not work for vtable interfaces") - # except ImportError: - # print "warning: records are known to not work for vtable interfaces" - cb = _univgw.SizeOfVT(pythoncom.VT_PTR)[1] - # cb = typeInfo.GetTypeAttr().cbSizeInstance - else: - cb = _univgw.SizeOfVT(t)[1] - return cb - - -class Arg: - def __init__(self, arg_info, name=None): - self.name = name - self.vt, self.inOut, self.default, self.clsid = arg_info - self.size = _CalcTypeSize(arg_info) - # Offset from the beginning of the arguments of the stack. - self.offset = 0 - - -class Method: - def __init__(self, method_info, isEventSink=0): - all_names, dispid, desc = method_info - name = all_names[0] - names = all_names[1:] - invkind = desc[4] - arg_defs = desc[2] - ret_def = desc[8] - - self.dispid = dispid - self.invkind = invkind - # We dont use this ATM. - # self.ret = Arg(ret_def) - if isEventSink and name[:2] != "On": - name = "On%s" % name - self.name = name - cbArgs = 0 - self.args = [] - for argDesc in arg_defs: - arg = Arg(argDesc) - arg.offset = cbArgs - cbArgs = cbArgs + arg.size - self.args.append(arg) - self.cbArgs = cbArgs - self._gw_in_args = self._GenerateInArgTuple() - self._gw_out_args = self._GenerateOutArgTuple() - - def _GenerateInArgTuple(self): - # Given a method, generate the in argument tuple - l = [] - for arg in self.args: - if arg.inOut & pythoncom.PARAMFLAG_FIN or arg.inOut == 0: - l.append((arg.vt, arg.offset, arg.size)) - return tuple(l) - - def _GenerateOutArgTuple(self): - # Given a method, generate the out argument tuple - l = [] - for arg in self.args: - if ( - arg.inOut & pythoncom.PARAMFLAG_FOUT - or arg.inOut & pythoncom.PARAMFLAG_FRETVAL - or arg.inOut == 0 - ): - l.append((arg.vt, arg.offset, arg.size, arg.clsid)) - return tuple(l) - - -class Definition: - def __init__(self, iid, is_dispatch, method_defs): - self._iid = iid - self._methods = [] - self._is_dispatch = is_dispatch - for info in method_defs: - entry = Method(info) - self._methods.append(entry) - - def iid(self): - return self._iid - - def vtbl_argsizes(self): - return [m.cbArgs for m in self._methods] - - def vtbl_argcounts(self): - return [len(m.args) for m in self._methods] - - def dispatch( - self, - ob, - index, - argPtr, - ReadFromInTuple=_univgw.ReadFromInTuple, - WriteFromOutTuple=_univgw.WriteFromOutTuple, - ): - "Dispatch a call to an interface method." - meth = self._methods[index] - # Infer S_OK if they don't return anything bizarre. - hr = 0 - args = ReadFromInTuple(meth._gw_in_args, argPtr) - # If ob is a dispatcher, ensure a policy - ob = getattr(ob, "policy", ob) - # Ensure the correct dispid is setup - ob._dispid_to_func_[meth.dispid] = meth.name - retVal = ob._InvokeEx_(meth.dispid, 0, meth.invkind, args, None, None) - # None is an allowed return value stating that - # the code doesn't want to touch any output arguments. - if type(retVal) == tuple: # Like pythoncom, we special case a tuple. - # However, if they want to return a specific HRESULT, - # then they have to return all of the out arguments - # AND the HRESULT. - if len(retVal) == len(meth._gw_out_args) + 1: - hr = retVal[0] - retVal = retVal[1:] - else: - raise TypeError( - "Expected %s return values, got: %s" - % (len(meth._gw_out_args) + 1, len(retVal)) - ) - else: - retVal = [retVal] - retVal.extend([None] * (len(meth._gw_out_args) - 1)) - retVal = tuple(retVal) - WriteFromOutTuple(retVal, meth._gw_out_args, argPtr) - return hr diff --git a/lib/win32com/util.py b/lib/win32com/util.py deleted file mode 100644 index 767e34a7..00000000 --- a/lib/win32com/util.py +++ /dev/null @@ -1,34 +0,0 @@ -"""General utility functions common to client and server. - - This module contains a collection of general purpose utility functions. -""" -import pythoncom -import win32api -import win32con - - -def IIDToInterfaceName(iid): - """Converts an IID to a string interface name. - - Used primarily for debugging purposes, this allows a cryptic IID to - be converted to a useful string name. This will firstly look for interfaces - known (ie, registered) by pythoncom. If not known, it will look in the - registry for a registered interface. - - iid -- An IID object. - - Result -- Always a string - either an interface name, or '' - """ - try: - return pythoncom.ServerInterfaces[iid] - except KeyError: - try: - try: - return win32api.RegQueryValue( - win32con.HKEY_CLASSES_ROOT, "Interface\\%s" % iid - ) - except win32api.error: - pass - except ImportError: - pass - return str(iid) diff --git a/lib/win32comext/adsi/__init__.py b/lib/win32comext/adsi/__init__.py deleted file mode 100644 index ab95196f..00000000 --- a/lib/win32comext/adsi/__init__.py +++ /dev/null @@ -1,122 +0,0 @@ -import win32com -import win32com.client - -if type(__path__) == type(""): - # For freeze to work! - import sys - - try: - import adsi - - sys.modules["win32com.adsi.adsi"] = adsi - except ImportError: - pass -else: - # See if we have a special directory for the binaries (for developers) - win32com.__PackageSupportBuildPath__(__path__) - - -# Some helpers -# We want to _look_ like the ADSI module, but provide some additional -# helpers. - -# Of specific note - most of the interfaces supported by ADSI -# derive from IDispatch - thus, you get the custome methods from the -# interface, as well as via IDispatch. -import pythoncom - -from .adsi import * - -LCID = 0 - -IDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch] -IADsContainerType = pythoncom.TypeIIDs[adsi.IID_IADsContainer] - - -def _get_good_ret( - ob, - # Named arguments used internally - resultCLSID=None, -): - assert resultCLSID is None, "Now have type info for ADSI objects - fix me!" - # See if the object supports IDispatch - if hasattr(ob, "Invoke"): - import win32com.client.dynamic - - name = "Dispatch wrapper around %r" % ob - return win32com.client.dynamic.Dispatch(ob, name, ADSIDispatch) - return ob - - -class ADSIEnumerator: - def __init__(self, ob): - # Query the object for the container interface. - self._cont_ = ob.QueryInterface(IID_IADsContainer) - self._oleobj_ = ADsBuildEnumerator(self._cont_) # a PyIADsEnumVARIANT - self.index = -1 - - def __getitem__(self, index): - return self.__GetIndex(index) - - def __call__(self, index): - return self.__GetIndex(index) - - def __GetIndex(self, index): - if type(index) != type(0): - raise TypeError("Only integer indexes are supported for enumerators") - if index != self.index + 1: - # Index requested out of sequence. - raise ValueError("You must index this object sequentially") - self.index = index - result = ADsEnumerateNext(self._oleobj_, 1) - if len(result): - return _get_good_ret(result[0]) - # Failed - reset for next time around. - self.index = -1 - self._oleobj_ = ADsBuildEnumerator(self._cont_) # a PyIADsEnumVARIANT - raise IndexError("list index out of range") - - -class ADSIDispatch(win32com.client.CDispatch): - def _wrap_dispatch_( - self, ob, userName=None, returnCLSID=None, UnicodeToString=None - ): - assert UnicodeToString is None, "this is deprectated and will be removed" - if not userName: - userName = "ADSI-object" - olerepr = win32com.client.dynamic.MakeOleRepr(ob, None, None) - return ADSIDispatch(ob, olerepr, userName) - - def _NewEnum(self): - try: - return ADSIEnumerator(self) - except pythoncom.com_error: - # doesnt support it - let our base try! - return win32com.client.CDispatch._NewEnum(self) - - def __getattr__(self, attr): - try: - return getattr(self._oleobj_, attr) - except AttributeError: - return win32com.client.CDispatch.__getattr__(self, attr) - - def QueryInterface(self, iid): - ret = self._oleobj_.QueryInterface(iid) - return _get_good_ret(ret) - - -# We override the global methods to do the right thing. -_ADsGetObject = ADsGetObject # The one in the .pyd - - -def ADsGetObject(path, iid=pythoncom.IID_IDispatch): - ret = _ADsGetObject(path, iid) - return _get_good_ret(ret) - - -_ADsOpenObject = ADsOpenObject - - -def ADsOpenObject(path, username, password, reserved=0, iid=pythoncom.IID_IDispatch): - ret = _ADsOpenObject(path, username, password, reserved, iid) - return _get_good_ret(ret) diff --git a/lib/win32comext/adsi/adsi.pyd b/lib/win32comext/adsi/adsi.pyd deleted file mode 100644 index 31006a51..00000000 Binary files a/lib/win32comext/adsi/adsi.pyd and /dev/null differ diff --git a/lib/win32comext/adsi/adsicon.py b/lib/win32comext/adsi/adsicon.py deleted file mode 100644 index 9944aefa..00000000 --- a/lib/win32comext/adsi/adsicon.py +++ /dev/null @@ -1,340 +0,0 @@ -ADS_ATTR_CLEAR = 1 -ADS_ATTR_UPDATE = 2 -ADS_ATTR_APPEND = 3 -ADS_ATTR_DELETE = 4 -ADS_EXT_MINEXTDISPID = 1 -ADS_EXT_MAXEXTDISPID = 16777215 -ADS_EXT_INITCREDENTIALS = 1 -ADS_EXT_INITIALIZE_COMPLETE = 2 - -ADS_SEARCHPREF_ASYNCHRONOUS = 0 -ADS_SEARCHPREF_DEREF_ALIASES = 1 -ADS_SEARCHPREF_SIZE_LIMIT = 2 -ADS_SEARCHPREF_TIME_LIMIT = 3 -ADS_SEARCHPREF_ATTRIBTYPES_ONLY = 4 -ADS_SEARCHPREF_SEARCH_SCOPE = 5 -ADS_SEARCHPREF_TIMEOUT = 6 -ADS_SEARCHPREF_PAGESIZE = 7 -ADS_SEARCHPREF_PAGED_TIME_LIMIT = 8 -ADS_SEARCHPREF_CHASE_REFERRALS = 9 -ADS_SEARCHPREF_SORT_ON = 10 -ADS_SEARCHPREF_CACHE_RESULTS = 11 -ADS_SEARCHPREF_DIRSYNC = 12 -ADS_SEARCHPREF_TOMBSTONE = 13 - -ADS_SCOPE_BASE = 0 -ADS_SCOPE_ONELEVEL = 1 -ADS_SCOPE_SUBTREE = 2 - -ADS_SECURE_AUTHENTICATION = 0x1 -ADS_USE_ENCRYPTION = 0x2 -ADS_USE_SSL = 0x2 -ADS_READONLY_SERVER = 0x4 -ADS_PROMPT_CREDENTIALS = 0x8 -ADS_NO_AUTHENTICATION = 0x10 -ADS_FAST_BIND = 0x20 -ADS_USE_SIGNING = 0x40 -ADS_USE_SEALING = 0x80 -ADS_USE_DELEGATION = 0x100 -ADS_SERVER_BIND = 0x200 - -ADSTYPE_INVALID = 0 -ADSTYPE_DN_STRING = ADSTYPE_INVALID + 1 -ADSTYPE_CASE_EXACT_STRING = ADSTYPE_DN_STRING + 1 -ADSTYPE_CASE_IGNORE_STRING = ADSTYPE_CASE_EXACT_STRING + 1 -ADSTYPE_PRINTABLE_STRING = ADSTYPE_CASE_IGNORE_STRING + 1 -ADSTYPE_NUMERIC_STRING = ADSTYPE_PRINTABLE_STRING + 1 -ADSTYPE_BOOLEAN = ADSTYPE_NUMERIC_STRING + 1 -ADSTYPE_INTEGER = ADSTYPE_BOOLEAN + 1 -ADSTYPE_OCTET_STRING = ADSTYPE_INTEGER + 1 -ADSTYPE_UTC_TIME = ADSTYPE_OCTET_STRING + 1 -ADSTYPE_LARGE_INTEGER = ADSTYPE_UTC_TIME + 1 -ADSTYPE_PROV_SPECIFIC = ADSTYPE_LARGE_INTEGER + 1 -ADSTYPE_OBJECT_CLASS = ADSTYPE_PROV_SPECIFIC + 1 -ADSTYPE_CASEIGNORE_LIST = ADSTYPE_OBJECT_CLASS + 1 -ADSTYPE_OCTET_LIST = ADSTYPE_CASEIGNORE_LIST + 1 -ADSTYPE_PATH = ADSTYPE_OCTET_LIST + 1 -ADSTYPE_POSTALADDRESS = ADSTYPE_PATH + 1 -ADSTYPE_TIMESTAMP = ADSTYPE_POSTALADDRESS + 1 -ADSTYPE_BACKLINK = ADSTYPE_TIMESTAMP + 1 -ADSTYPE_TYPEDNAME = ADSTYPE_BACKLINK + 1 -ADSTYPE_HOLD = ADSTYPE_TYPEDNAME + 1 -ADSTYPE_NETADDRESS = ADSTYPE_HOLD + 1 -ADSTYPE_REPLICAPOINTER = ADSTYPE_NETADDRESS + 1 -ADSTYPE_FAXNUMBER = ADSTYPE_REPLICAPOINTER + 1 -ADSTYPE_EMAIL = ADSTYPE_FAXNUMBER + 1 -ADSTYPE_NT_SECURITY_DESCRIPTOR = ADSTYPE_EMAIL + 1 -ADSTYPE_UNKNOWN = ADSTYPE_NT_SECURITY_DESCRIPTOR + 1 -ADSTYPE_DN_WITH_BINARY = ADSTYPE_UNKNOWN + 1 -ADSTYPE_DN_WITH_STRING = ADSTYPE_DN_WITH_BINARY + 1 - -ADS_PROPERTY_CLEAR = 1 -ADS_PROPERTY_UPDATE = 2 -ADS_PROPERTY_APPEND = 3 -ADS_PROPERTY_DELETE = 4 -ADS_SYSTEMFLAG_DISALLOW_DELETE = -2147483648 -ADS_SYSTEMFLAG_CONFIG_ALLOW_RENAME = 0x40000000 -ADS_SYSTEMFLAG_CONFIG_ALLOW_MOVE = 0x20000000 -ADS_SYSTEMFLAG_CONFIG_ALLOW_LIMITED_MOVE = 0x10000000 -ADS_SYSTEMFLAG_DOMAIN_DISALLOW_RENAME = -2147483648 -ADS_SYSTEMFLAG_DOMAIN_DISALLOW_MOVE = 0x4000000 -ADS_SYSTEMFLAG_CR_NTDS_NC = 0x1 -ADS_SYSTEMFLAG_CR_NTDS_DOMAIN = 0x2 -ADS_SYSTEMFLAG_ATTR_NOT_REPLICATED = 0x1 -ADS_SYSTEMFLAG_ATTR_IS_CONSTRUCTED = 0x4 -ADS_GROUP_TYPE_GLOBAL_GROUP = 0x2 -ADS_GROUP_TYPE_DOMAIN_LOCAL_GROUP = 0x4 -ADS_GROUP_TYPE_LOCAL_GROUP = 0x4 -ADS_GROUP_TYPE_UNIVERSAL_GROUP = 0x8 -ADS_GROUP_TYPE_SECURITY_ENABLED = -2147483648 -ADS_UF_SCRIPT = 0x1 -ADS_UF_ACCOUNTDISABLE = 0x2 -ADS_UF_HOMEDIR_REQUIRED = 0x8 -ADS_UF_LOCKOUT = 0x10 -ADS_UF_PASSWD_NOTREQD = 0x20 -ADS_UF_PASSWD_CANT_CHANGE = 0x40 -ADS_UF_ENCRYPTED_TEXT_PASSWORD_ALLOWED = 0x80 -ADS_UF_TEMP_DUPLICATE_ACCOUNT = 0x100 -ADS_UF_NORMAL_ACCOUNT = 0x200 -ADS_UF_INTERDOMAIN_TRUST_ACCOUNT = 0x800 -ADS_UF_WORKSTATION_TRUST_ACCOUNT = 0x1000 -ADS_UF_SERVER_TRUST_ACCOUNT = 0x2000 -ADS_UF_DONT_EXPIRE_PASSWD = 0x10000 -ADS_UF_MNS_LOGON_ACCOUNT = 0x20000 -ADS_UF_SMARTCARD_REQUIRED = 0x40000 -ADS_UF_TRUSTED_FOR_DELEGATION = 0x80000 -ADS_UF_NOT_DELEGATED = 0x100000 -ADS_UF_USE_DES_KEY_ONLY = 0x200000 -ADS_UF_DONT_REQUIRE_PREAUTH = 0x400000 -ADS_UF_PASSWORD_EXPIRED = 0x800000 -ADS_UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION = 0x1000000 -ADS_RIGHT_DELETE = 0x10000 -ADS_RIGHT_READ_CONTROL = 0x20000 -ADS_RIGHT_WRITE_DAC = 0x40000 -ADS_RIGHT_WRITE_OWNER = 0x80000 -ADS_RIGHT_SYNCHRONIZE = 0x100000 -ADS_RIGHT_ACCESS_SYSTEM_SECURITY = 0x1000000 -ADS_RIGHT_GENERIC_READ = -2147483648 -ADS_RIGHT_GENERIC_WRITE = 0x40000000 -ADS_RIGHT_GENERIC_EXECUTE = 0x20000000 -ADS_RIGHT_GENERIC_ALL = 0x10000000 -ADS_RIGHT_DS_CREATE_CHILD = 0x1 -ADS_RIGHT_DS_DELETE_CHILD = 0x2 -ADS_RIGHT_ACTRL_DS_LIST = 0x4 -ADS_RIGHT_DS_SELF = 0x8 -ADS_RIGHT_DS_READ_PROP = 0x10 -ADS_RIGHT_DS_WRITE_PROP = 0x20 -ADS_RIGHT_DS_DELETE_TREE = 0x40 -ADS_RIGHT_DS_LIST_OBJECT = 0x80 -ADS_RIGHT_DS_CONTROL_ACCESS = 0x100 -ADS_ACETYPE_ACCESS_ALLOWED = 0 -ADS_ACETYPE_ACCESS_DENIED = 0x1 -ADS_ACETYPE_SYSTEM_AUDIT = 0x2 -ADS_ACETYPE_ACCESS_ALLOWED_OBJECT = 0x5 -ADS_ACETYPE_ACCESS_DENIED_OBJECT = 0x6 -ADS_ACETYPE_SYSTEM_AUDIT_OBJECT = 0x7 -ADS_ACETYPE_SYSTEM_ALARM_OBJECT = 0x8 -ADS_ACETYPE_ACCESS_ALLOWED_CALLBACK = 0x9 -ADS_ACETYPE_ACCESS_DENIED_CALLBACK = 0xA -ADS_ACETYPE_ACCESS_ALLOWED_CALLBACK_OBJECT = 0xB -ADS_ACETYPE_ACCESS_DENIED_CALLBACK_OBJECT = 0xC -ADS_ACETYPE_SYSTEM_AUDIT_CALLBACK = 0xD -ADS_ACETYPE_SYSTEM_ALARM_CALLBACK = 0xE -ADS_ACETYPE_SYSTEM_AUDIT_CALLBACK_OBJECT = 0xF -ADS_ACETYPE_SYSTEM_ALARM_CALLBACK_OBJECT = 0x10 -ADS_ACEFLAG_INHERIT_ACE = 0x2 -ADS_ACEFLAG_NO_PROPAGATE_INHERIT_ACE = 0x4 -ADS_ACEFLAG_INHERIT_ONLY_ACE = 0x8 -ADS_ACEFLAG_INHERITED_ACE = 0x10 -ADS_ACEFLAG_VALID_INHERIT_FLAGS = 0x1F -ADS_ACEFLAG_SUCCESSFUL_ACCESS = 0x40 -ADS_ACEFLAG_FAILED_ACCESS = 0x80 -ADS_FLAG_OBJECT_TYPE_PRESENT = 0x1 -ADS_FLAG_INHERITED_OBJECT_TYPE_PRESENT = 0x2 -ADS_SD_CONTROL_SE_OWNER_DEFAULTED = 0x1 -ADS_SD_CONTROL_SE_GROUP_DEFAULTED = 0x2 -ADS_SD_CONTROL_SE_DACL_PRESENT = 0x4 -ADS_SD_CONTROL_SE_DACL_DEFAULTED = 0x8 -ADS_SD_CONTROL_SE_SACL_PRESENT = 0x10 -ADS_SD_CONTROL_SE_SACL_DEFAULTED = 0x20 -ADS_SD_CONTROL_SE_DACL_AUTO_INHERIT_REQ = 0x100 -ADS_SD_CONTROL_SE_SACL_AUTO_INHERIT_REQ = 0x200 -ADS_SD_CONTROL_SE_DACL_AUTO_INHERITED = 0x400 -ADS_SD_CONTROL_SE_SACL_AUTO_INHERITED = 0x800 -ADS_SD_CONTROL_SE_DACL_PROTECTED = 0x1000 -ADS_SD_CONTROL_SE_SACL_PROTECTED = 0x2000 -ADS_SD_CONTROL_SE_SELF_RELATIVE = 0x8000 -ADS_SD_REVISION_DS = 4 -ADS_NAME_TYPE_1779 = 1 -ADS_NAME_TYPE_CANONICAL = 2 -ADS_NAME_TYPE_NT4 = 3 -ADS_NAME_TYPE_DISPLAY = 4 -ADS_NAME_TYPE_DOMAIN_SIMPLE = 5 -ADS_NAME_TYPE_ENTERPRISE_SIMPLE = 6 -ADS_NAME_TYPE_GUID = 7 -ADS_NAME_TYPE_UNKNOWN = 8 -ADS_NAME_TYPE_USER_PRINCIPAL_NAME = 9 -ADS_NAME_TYPE_CANONICAL_EX = 10 -ADS_NAME_TYPE_SERVICE_PRINCIPAL_NAME = 11 -ADS_NAME_TYPE_SID_OR_SID_HISTORY_NAME = 12 -ADS_NAME_INITTYPE_DOMAIN = 1 -ADS_NAME_INITTYPE_SERVER = 2 -ADS_NAME_INITTYPE_GC = 3 -ADS_OPTION_SERVERNAME = 0 -ADS_OPTION_REFERRALS = ADS_OPTION_SERVERNAME + 1 -ADS_OPTION_PAGE_SIZE = ADS_OPTION_REFERRALS + 1 -ADS_OPTION_SECURITY_MASK = ADS_OPTION_PAGE_SIZE + 1 -ADS_OPTION_MUTUAL_AUTH_STATUS = ADS_OPTION_SECURITY_MASK + 1 -ADS_OPTION_QUOTA = ADS_OPTION_MUTUAL_AUTH_STATUS + 1 -ADS_OPTION_PASSWORD_PORTNUMBER = ADS_OPTION_QUOTA + 1 -ADS_OPTION_PASSWORD_METHOD = ADS_OPTION_PASSWORD_PORTNUMBER + 1 -ADS_SECURITY_INFO_OWNER = 0x1 -ADS_SECURITY_INFO_GROUP = 0x2 -ADS_SECURITY_INFO_DACL = 0x4 -ADS_SECURITY_INFO_SACL = 0x8 -ADS_SETTYPE_FULL = 1 -ADS_SETTYPE_PROVIDER = 2 -ADS_SETTYPE_SERVER = 3 -ADS_SETTYPE_DN = 4 -ADS_FORMAT_WINDOWS = 1 -ADS_FORMAT_WINDOWS_NO_SERVER = 2 -ADS_FORMAT_WINDOWS_DN = 3 -ADS_FORMAT_WINDOWS_PARENT = 4 -ADS_FORMAT_X500 = 5 -ADS_FORMAT_X500_NO_SERVER = 6 -ADS_FORMAT_X500_DN = 7 -ADS_FORMAT_X500_PARENT = 8 -ADS_FORMAT_SERVER = 9 -ADS_FORMAT_PROVIDER = 10 -ADS_FORMAT_LEAF = 11 -ADS_DISPLAY_FULL = 1 -ADS_DISPLAY_VALUE_ONLY = 2 -ADS_ESCAPEDMODE_DEFAULT = 1 -ADS_ESCAPEDMODE_ON = 2 -ADS_ESCAPEDMODE_OFF = 3 -ADS_ESCAPEDMODE_OFF_EX = 4 -ADS_PATH_FILE = 1 -ADS_PATH_FILESHARE = 2 -ADS_PATH_REGISTRY = 3 -ADS_SD_FORMAT_IID = 1 -ADS_SD_FORMAT_RAW = 2 -ADS_SD_FORMAT_HEXSTRING = 3 - - -# Generated by h2py from AdsErr.h -def _HRESULT_TYPEDEF_(_sc): - return _sc - - -E_ADS_BAD_PATHNAME = _HRESULT_TYPEDEF_((-2147463168)) -E_ADS_INVALID_DOMAIN_OBJECT = _HRESULT_TYPEDEF_((-2147463167)) -E_ADS_INVALID_USER_OBJECT = _HRESULT_TYPEDEF_((-2147463166)) -E_ADS_INVALID_COMPUTER_OBJECT = _HRESULT_TYPEDEF_((-2147463165)) -E_ADS_UNKNOWN_OBJECT = _HRESULT_TYPEDEF_((-2147463164)) -E_ADS_PROPERTY_NOT_SET = _HRESULT_TYPEDEF_((-2147463163)) -E_ADS_PROPERTY_NOT_SUPPORTED = _HRESULT_TYPEDEF_((-2147463162)) -E_ADS_PROPERTY_INVALID = _HRESULT_TYPEDEF_((-2147463161)) -E_ADS_BAD_PARAMETER = _HRESULT_TYPEDEF_((-2147463160)) -E_ADS_OBJECT_UNBOUND = _HRESULT_TYPEDEF_((-2147463159)) -E_ADS_PROPERTY_NOT_MODIFIED = _HRESULT_TYPEDEF_((-2147463158)) -E_ADS_PROPERTY_MODIFIED = _HRESULT_TYPEDEF_((-2147463157)) -E_ADS_CANT_CONVERT_DATATYPE = _HRESULT_TYPEDEF_((-2147463156)) -E_ADS_PROPERTY_NOT_FOUND = _HRESULT_TYPEDEF_((-2147463155)) -E_ADS_OBJECT_EXISTS = _HRESULT_TYPEDEF_((-2147463154)) -E_ADS_SCHEMA_VIOLATION = _HRESULT_TYPEDEF_((-2147463153)) -E_ADS_COLUMN_NOT_SET = _HRESULT_TYPEDEF_((-2147463152)) -S_ADS_ERRORSOCCURRED = _HRESULT_TYPEDEF_(0x00005011) -S_ADS_NOMORE_ROWS = _HRESULT_TYPEDEF_(0x00005012) -S_ADS_NOMORE_COLUMNS = _HRESULT_TYPEDEF_(0x00005013) -E_ADS_INVALID_FILTER = _HRESULT_TYPEDEF_((-2147463148)) - -# ADS_DEREFENUM enum -ADS_DEREF_NEVER = 0 -ADS_DEREF_SEARCHING = 1 -ADS_DEREF_FINDING = 2 -ADS_DEREF_ALWAYS = 3 - -# ADS_PREFERENCES_ENUM -ADSIPROP_ASYNCHRONOUS = 0 -ADSIPROP_DEREF_ALIASES = 0x1 -ADSIPROP_SIZE_LIMIT = 0x2 -ADSIPROP_TIME_LIMIT = 0x3 -ADSIPROP_ATTRIBTYPES_ONLY = 0x4 -ADSIPROP_SEARCH_SCOPE = 0x5 -ADSIPROP_TIMEOUT = 0x6 -ADSIPROP_PAGESIZE = 0x7 -ADSIPROP_PAGED_TIME_LIMIT = 0x8 -ADSIPROP_CHASE_REFERRALS = 0x9 -ADSIPROP_SORT_ON = 0xA -ADSIPROP_CACHE_RESULTS = 0xB -ADSIPROP_ADSIFLAG = 0xC - -# ADSI_DIALECT_ENUM -ADSI_DIALECT_LDAP = 0 -ADSI_DIALECT_SQL = 0x1 - -# ADS_CHASE_REFERRALS_ENUM -ADS_CHASE_REFERRALS_NEVER = 0 -ADS_CHASE_REFERRALS_SUBORDINATE = 0x20 -ADS_CHASE_REFERRALS_EXTERNAL = 0x40 -ADS_CHASE_REFERRALS_ALWAYS = ( - ADS_CHASE_REFERRALS_SUBORDINATE | ADS_CHASE_REFERRALS_EXTERNAL -) - -# Generated by h2py from ObjSel.h -DSOP_SCOPE_TYPE_TARGET_COMPUTER = 0x00000001 -DSOP_SCOPE_TYPE_UPLEVEL_JOINED_DOMAIN = 0x00000002 -DSOP_SCOPE_TYPE_DOWNLEVEL_JOINED_DOMAIN = 0x00000004 -DSOP_SCOPE_TYPE_ENTERPRISE_DOMAIN = 0x00000008 -DSOP_SCOPE_TYPE_GLOBAL_CATALOG = 0x00000010 -DSOP_SCOPE_TYPE_EXTERNAL_UPLEVEL_DOMAIN = 0x00000020 -DSOP_SCOPE_TYPE_EXTERNAL_DOWNLEVEL_DOMAIN = 0x00000040 -DSOP_SCOPE_TYPE_WORKGROUP = 0x00000080 -DSOP_SCOPE_TYPE_USER_ENTERED_UPLEVEL_SCOPE = 0x00000100 -DSOP_SCOPE_TYPE_USER_ENTERED_DOWNLEVEL_SCOPE = 0x00000200 -DSOP_SCOPE_FLAG_STARTING_SCOPE = 0x00000001 -DSOP_SCOPE_FLAG_WANT_PROVIDER_WINNT = 0x00000002 -DSOP_SCOPE_FLAG_WANT_PROVIDER_LDAP = 0x00000004 -DSOP_SCOPE_FLAG_WANT_PROVIDER_GC = 0x00000008 -DSOP_SCOPE_FLAG_WANT_SID_PATH = 0x00000010 -DSOP_SCOPE_FLAG_WANT_DOWNLEVEL_BUILTIN_PATH = 0x00000020 -DSOP_SCOPE_FLAG_DEFAULT_FILTER_USERS = 0x00000040 -DSOP_SCOPE_FLAG_DEFAULT_FILTER_GROUPS = 0x00000080 -DSOP_SCOPE_FLAG_DEFAULT_FILTER_COMPUTERS = 0x00000100 -DSOP_SCOPE_FLAG_DEFAULT_FILTER_CONTACTS = 0x00000200 -DSOP_FILTER_INCLUDE_ADVANCED_VIEW = 0x00000001 -DSOP_FILTER_USERS = 0x00000002 -DSOP_FILTER_BUILTIN_GROUPS = 0x00000004 -DSOP_FILTER_WELL_KNOWN_PRINCIPALS = 0x00000008 -DSOP_FILTER_UNIVERSAL_GROUPS_DL = 0x00000010 -DSOP_FILTER_UNIVERSAL_GROUPS_SE = 0x00000020 -DSOP_FILTER_GLOBAL_GROUPS_DL = 0x00000040 -DSOP_FILTER_GLOBAL_GROUPS_SE = 0x00000080 -DSOP_FILTER_DOMAIN_LOCAL_GROUPS_DL = 0x00000100 -DSOP_FILTER_DOMAIN_LOCAL_GROUPS_SE = 0x00000200 -DSOP_FILTER_CONTACTS = 0x00000400 -DSOP_FILTER_COMPUTERS = 0x00000800 -DSOP_DOWNLEVEL_FILTER_USERS = -2147483647 -DSOP_DOWNLEVEL_FILTER_LOCAL_GROUPS = -2147483646 -DSOP_DOWNLEVEL_FILTER_GLOBAL_GROUPS = -2147483644 -DSOP_DOWNLEVEL_FILTER_COMPUTERS = -2147483640 -DSOP_DOWNLEVEL_FILTER_WORLD = -2147483632 -DSOP_DOWNLEVEL_FILTER_AUTHENTICATED_USER = -2147483616 -DSOP_DOWNLEVEL_FILTER_ANONYMOUS = -2147483584 -DSOP_DOWNLEVEL_FILTER_BATCH = -2147483520 -DSOP_DOWNLEVEL_FILTER_CREATOR_OWNER = -2147483392 -DSOP_DOWNLEVEL_FILTER_CREATOR_GROUP = -2147483136 -DSOP_DOWNLEVEL_FILTER_DIALUP = -2147482624 -DSOP_DOWNLEVEL_FILTER_INTERACTIVE = -2147481600 -DSOP_DOWNLEVEL_FILTER_NETWORK = -2147479552 -DSOP_DOWNLEVEL_FILTER_SERVICE = -2147475456 -DSOP_DOWNLEVEL_FILTER_SYSTEM = -2147467264 -DSOP_DOWNLEVEL_FILTER_EXCLUDE_BUILTIN_GROUPS = -2147450880 -DSOP_DOWNLEVEL_FILTER_TERMINAL_SERVER = -2147418112 -DSOP_DOWNLEVEL_FILTER_ALL_WELLKNOWN_SIDS = -2147352576 -DSOP_DOWNLEVEL_FILTER_LOCAL_SERVICE = -2147221504 -DSOP_DOWNLEVEL_FILTER_NETWORK_SERVICE = -2146959360 -DSOP_DOWNLEVEL_FILTER_REMOTE_LOGON = -2146435072 -DSOP_FLAG_MULTISELECT = 0x00000001 -DSOP_FLAG_SKIP_TARGET_COMPUTER_DC_CHECK = 0x00000002 -CFSTR_DSOP_DS_SELECTION_LIST = "CFSTR_DSOP_DS_SELECTION_LIST" diff --git a/lib/win32comext/adsi/demos/objectPicker.py b/lib/win32comext/adsi/demos/objectPicker.py deleted file mode 100644 index 5df42459..00000000 --- a/lib/win32comext/adsi/demos/objectPicker.py +++ /dev/null @@ -1,68 +0,0 @@ -# A demo for the IDsObjectPicker interface. -import pythoncom -import win32clipboard -from win32com.adsi import adsi -from win32com.adsi.adsicon import * - -cf_objectpicker = win32clipboard.RegisterClipboardFormat(CFSTR_DSOP_DS_SELECTION_LIST) - - -def main(): - hwnd = 0 - - # Create an instance of the object picker. - picker = pythoncom.CoCreateInstance( - adsi.CLSID_DsObjectPicker, - None, - pythoncom.CLSCTX_INPROC_SERVER, - adsi.IID_IDsObjectPicker, - ) - - # Create our scope init info. - siis = adsi.DSOP_SCOPE_INIT_INFOs(1) - sii = siis[0] - - # Combine multiple scope types in a single array entry. - - sii.type = ( - DSOP_SCOPE_TYPE_UPLEVEL_JOINED_DOMAIN | DSOP_SCOPE_TYPE_DOWNLEVEL_JOINED_DOMAIN - ) - - # Set uplevel and downlevel filters to include only computer objects. - # Uplevel filters apply to both mixed and native modes. - # Notice that the uplevel and downlevel flags are different. - - sii.filterFlags.uplevel.bothModes = DSOP_FILTER_COMPUTERS - sii.filterFlags.downlevel = DSOP_DOWNLEVEL_FILTER_COMPUTERS - - # Initialize the interface. - picker.Initialize( - None, # Target is the local computer. - siis, # scope infos - DSOP_FLAG_MULTISELECT, # options - ("objectGUID", "displayName"), - ) # attributes to fetch - - do = picker.InvokeDialog(hwnd) - # Extract the data from the IDataObject. - format_etc = ( - cf_objectpicker, - None, - pythoncom.DVASPECT_CONTENT, - -1, - pythoncom.TYMED_HGLOBAL, - ) - medium = do.GetData(format_etc) - data = adsi.StringAsDS_SELECTION_LIST(medium.data) - for item in data: - name, klass, adspath, upn, attrs, flags = item - print("Item", name) - print(" Class:", klass) - print(" AdsPath:", adspath) - print(" UPN:", upn) - print(" Attrs:", attrs) - print(" Flags:", flags) - - -if __name__ == "__main__": - main() diff --git a/lib/win32comext/adsi/demos/scp.py b/lib/win32comext/adsi/demos/scp.py deleted file mode 100644 index d1a4ca05..00000000 --- a/lib/win32comext/adsi/demos/scp.py +++ /dev/null @@ -1,565 +0,0 @@ -"""A re-implementation of the MS DirectoryService samples related to services. - -* Adds and removes an ActiveDirectory "Service Connection Point", - including managing the security on the object. -* Creates and registers Service Principal Names. -* Changes the username for a domain user. - -Some of these functions are likely to become move to a module - but there -is also a little command-line-interface to try these functions out. - -For example: - -scp.py --account-name=domain\\user --service-class=PythonScpTest \\ - --keyword=foo --keyword=bar --binding-string=bind_info \\ - ScpCreate SpnCreate SpnRegister - -would: -* Attempt to delete a Service Connection Point for the service class - 'PythonScpTest' -* Attempt to create a Service Connection Point for that class, with 2 - keywords and a binding string of 'bind_info' -* Create a Service Principal Name for the service and register it - -to undo those changes, you could execute: - -scp.py --account-name=domain\\user --service-class=PythonScpTest \\ - SpnCreate SpnUnregister ScpDelete - -which will: -* Create a SPN -* Unregister that SPN from the Active Directory. -* Delete the Service Connection Point - -Executing with --test will create and remove one of everything. -""" - -import optparse -import textwrap -import traceback - -import ntsecuritycon as dscon -import win32api -import win32con -import win32security -import winerror -from win32com.adsi import adsi -from win32com.adsi.adsicon import * -from win32com.client import Dispatch - -verbose = 1 -g_createdSCP = None -g_createdSPNs = [] -g_createdSPNLast = None - -import logging - -logger = logging # use logging module global methods for now. - -# still a bit confused about log(n, ...) vs logger.info/debug() - - -# Returns distinguished name of SCP. -def ScpCreate( - service_binding_info, - service_class_name, # Service class string to store in SCP. - account_name=None, # Logon account that needs access to SCP. - container_name=None, - keywords=None, - object_class="serviceConnectionPoint", - dns_name_type="A", - dn=None, - dns_name=None, -): - container_name = container_name or service_class_name - if not dns_name: - # Get the DNS name of the local computer - dns_name = win32api.GetComputerNameEx(win32con.ComputerNameDnsFullyQualified) - # Get the distinguished name of the computer object for the local computer - if dn is None: - dn = win32api.GetComputerObjectName(win32con.NameFullyQualifiedDN) - - # Compose the ADSpath and bind to the computer object for the local computer - comp = adsi.ADsGetObject("LDAP://" + dn, adsi.IID_IDirectoryObject) - - # Publish the SCP as a child of the computer object - keywords = keywords or [] - # Fill in the attribute values to be stored in the SCP. - attrs = [ - ("cn", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (container_name,)), - ("objectClass", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (object_class,)), - ("keywords", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, keywords), - ("serviceDnsName", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (dns_name,)), - ( - "serviceDnsNameType", - ADS_ATTR_UPDATE, - ADSTYPE_CASE_IGNORE_STRING, - (dns_name_type,), - ), - ( - "serviceClassName", - ADS_ATTR_UPDATE, - ADSTYPE_CASE_IGNORE_STRING, - (service_class_name,), - ), - ( - "serviceBindingInformation", - ADS_ATTR_UPDATE, - ADSTYPE_CASE_IGNORE_STRING, - (service_binding_info,), - ), - ] - new = comp.CreateDSObject("cn=" + container_name, attrs) - logger.info("New connection point is at %s", container_name) - # Wrap in a usable IDispatch object. - new = Dispatch(new) - # And allow access to the SCP for the specified account name - AllowAccessToScpProperties(account_name, new) - return new - - -def ScpDelete(container_name, dn=None): - if dn is None: - dn = win32api.GetComputerObjectName(win32con.NameFullyQualifiedDN) - logger.debug("Removing connection point '%s' from %s", container_name, dn) - - # Compose the ADSpath and bind to the computer object for the local computer - comp = adsi.ADsGetObject("LDAP://" + dn, adsi.IID_IDirectoryObject) - comp.DeleteDSObject("cn=" + container_name) - logger.info("Deleted service connection point '%s'", container_name) - - -# This function is described in detail in the MSDN article titled -# "Enabling Service Account to Access SCP Properties" -# From that article: -# The following sample code sets a pair of ACEs on a service connection point -# (SCP) object. The ACEs grant read/write access to the user or computer account -# under which the service instance will be running. Your service installation -# program calls this code to ensure that the service will be allowed to update -# its properties at run time. If you don't set ACEs like these, your service -# will get access-denied errors if it tries to modify the SCP's properties. -# -# The code uses the IADsSecurityDescriptor, IADsAccessControlList, and -# IADsAccessControlEntry interfaces to do the following: -# * Get the SCP object's security descriptor. -# * Set ACEs in the DACL of the security descriptor. -# * Set the security descriptor back on the SCP object. - - -def AllowAccessToScpProperties( - accountSAM, # Service account to allow access. - scpObject, # The IADs SCP object. - schemaIDGUIDs=( # Attributes to allow write-access to. - "{28630eb8-41d5-11d1-a9c1-0000f80367c1}", # serviceDNSName - "{b7b1311c-b82e-11d0-afee-0000f80367c1}", # serviceBindingInformation - ), -): - # If no service account is specified, service runs under LocalSystem. - # So allow access to the computer account of the service's host. - if accountSAM: - trustee = accountSAM - else: - # Get the SAM account name of the computer object for the server. - trustee = win32api.GetComputerObjectName(win32con.NameSamCompatible) - - # Get the nTSecurityDescriptor attribute - attribute = "nTSecurityDescriptor" - sd = getattr(scpObject, attribute) - acl = sd.DiscretionaryAcl - - for sguid in schemaIDGUIDs: - ace = Dispatch(adsi.CLSID_AccessControlEntry) - - # Set the properties of the ACE. - # Allow read and write access to the property. - ace.AccessMask = ADS_RIGHT_DS_READ_PROP | ADS_RIGHT_DS_WRITE_PROP - - # Set the trustee, which is either the service account or the - # host computer account. - ace.Trustee = trustee - - # Set the ACE type. - ace.AceType = ADS_ACETYPE_ACCESS_ALLOWED_OBJECT - - # Set AceFlags to zero because ACE is not inheritable. - ace.AceFlags = 0 - - # Set Flags to indicate an ACE that protects a specified object. - ace.Flags = ADS_FLAG_OBJECT_TYPE_PRESENT - - # Set ObjectType to the schemaIDGUID of the attribute. - ace.ObjectType = sguid - - # Add the ACEs to the DACL. - acl.AddAce(ace) - - # Write the modified DACL back to the security descriptor. - sd.DiscretionaryAcl = acl - # Write the ntSecurityDescriptor property to the property cache. - setattr(scpObject, attribute, sd) - # SetInfo updates the SCP object in the directory. - scpObject.SetInfo() - logger.info("Set security on object for account '%s'" % (trustee,)) - - -# Service Principal Names functions from the same sample. -# The example calls the DsWriteAccountSpn function, which stores the SPNs in -# Microsoft Active Directory under the servicePrincipalName attribute of the -# account object specified by the serviceAcctDN parameter. The account object -# corresponds to the logon account specified in the CreateService call for this -# service instance. If the logon account is a domain user account, -# serviceAcctDN must be the distinguished name of the account object in -# Active Directory for that user account. If the service's logon account is the -# LocalSystem account, serviceAcctDN must be the distinguished name of the -# computer account object for the host computer on which the service is -# installed. win32api.TranslateNames and win32security.DsCrackNames can -# be used to convert a domain\account format name to a distinguished name. -def SpnRegister( - serviceAcctDN, # DN of the service's logon account - spns, # List of SPNs to register - operation, # Add, replace, or delete SPNs -): - assert type(spns) not in [str, str] and hasattr(spns, "__iter__"), ( - "spns must be a sequence of strings (got %r)" % spns - ) - # Bind to a domain controller. - # Get the domain for the current user. - samName = win32api.GetUserNameEx(win32api.NameSamCompatible) - samName = samName.split("\\", 1)[0] - - if not serviceAcctDN: - # Get the SAM account name of the computer object for the server. - serviceAcctDN = win32api.GetComputerObjectName(win32con.NameFullyQualifiedDN) - logger.debug("SpnRegister using DN '%s'", serviceAcctDN) - - # Get the name of a domain controller in that domain. - info = win32security.DsGetDcName( - domainName=samName, - flags=dscon.DS_IS_FLAT_NAME - | dscon.DS_RETURN_DNS_NAME - | dscon.DS_DIRECTORY_SERVICE_REQUIRED, - ) - # Bind to the domain controller. - handle = win32security.DsBind(info["DomainControllerName"]) - - # Write the SPNs to the service account or computer account. - logger.debug("DsWriteAccountSpn with spns %s") - win32security.DsWriteAccountSpn( - handle, # handle to the directory - operation, # Add or remove SPN from account's existing SPNs - serviceAcctDN, # DN of service account or computer account - spns, - ) # names - - # Unbind the DS in any case (but Python would do it anyway) - handle.Close() - - -def UserChangePassword(username_dn, new_password): - # set the password on the account. - # Use the distinguished name to bind to the account object. - accountPath = "LDAP://" + username_dn - user = adsi.ADsGetObject(accountPath, adsi.IID_IADsUser) - - # Set the password on the account. - user.SetPassword(new_password) - - -# functions related to the command-line interface -def log(level, msg, *args): - if verbose >= level: - print(msg % args) - - -class _NoDefault: - pass - - -def _get_option(po, opt_name, default=_NoDefault): - parser, options = po - ret = getattr(options, opt_name, default) - if not ret and default is _NoDefault: - parser.error("The '%s' option must be specified for this operation" % opt_name) - if not ret: - ret = default - return ret - - -def _option_error(po, why): - parser = po[0] - parser.error(why) - - -def do_ScpCreate(po): - """Create a Service Connection Point""" - global g_createdSCP - scp = ScpCreate( - _get_option(po, "binding_string"), - _get_option(po, "service_class"), - _get_option(po, "account_name_sam", None), - keywords=_get_option(po, "keywords", None), - ) - g_createdSCP = scp - return scp.distinguishedName - - -def do_ScpDelete(po): - """Delete a Service Connection Point""" - sc = _get_option(po, "service_class") - try: - ScpDelete(sc) - except adsi.error as details: - if details[0] != winerror.ERROR_DS_OBJ_NOT_FOUND: - raise - log(2, "ScpDelete ignoring ERROR_DS_OBJ_NOT_FOUND for service-class '%s'", sc) - return sc - - -def do_SpnCreate(po): - """Create a Service Principal Name""" - # The 'service name' is the dn of our scp. - if g_createdSCP is None: - # Could accept an arg to avoid this? - _option_error(po, "ScpCreate must have been specified before SpnCreate") - # Create a Service Principal Name" - spns = win32security.DsGetSpn( - dscon.DS_SPN_SERVICE, - _get_option(po, "service_class"), - g_createdSCP.distinguishedName, - _get_option(po, "port", 0), - None, - None, - ) - spn = spns[0] - log(2, "Created SPN: %s", spn) - global g_createdSPNLast - g_createdSPNLast = spn - g_createdSPNs.append(spn) - return spn - - -def do_SpnRegister(po): - """Register a previously created Service Principal Name""" - if not g_createdSPNLast: - _option_error(po, "SpnCreate must appear before SpnRegister") - - SpnRegister( - _get_option(po, "account_name_dn", None), - (g_createdSPNLast,), - dscon.DS_SPN_ADD_SPN_OP, - ) - return g_createdSPNLast - - -def do_SpnUnregister(po): - """Unregister a previously created Service Principal Name""" - if not g_createdSPNLast: - _option_error(po, "SpnCreate must appear before SpnUnregister") - SpnRegister( - _get_option(po, "account_name_dn", None), - (g_createdSPNLast,), - dscon.DS_SPN_DELETE_SPN_OP, - ) - return g_createdSPNLast - - -def do_UserChangePassword(po): - """Change the password for a specified user""" - UserChangePassword(_get_option(po, "account_name_dn"), _get_option(po, "password")) - return "Password changed OK" - - -handlers = ( - ("ScpCreate", do_ScpCreate), - ("ScpDelete", do_ScpDelete), - ("SpnCreate", do_SpnCreate), - ("SpnRegister", do_SpnRegister), - ("SpnUnregister", do_SpnUnregister), - ("UserChangePassword", do_UserChangePassword), -) - - -class HelpFormatter(optparse.IndentedHelpFormatter): - def format_description(self, description): - return description - - -def main(): - global verbose - _handlers_dict = {} - - arg_descs = [] - for arg, func in handlers: - this_desc = "\n".join(textwrap.wrap(func.__doc__, subsequent_indent=" " * 8)) - arg_descs.append(" %s: %s" % (arg, this_desc)) - _handlers_dict[arg.lower()] = func - - description = __doc__ + "\ncommands:\n" + "\n".join(arg_descs) + "\n" - - parser = optparse.OptionParser( - usage="%prog [options] command ...", - description=description, - formatter=HelpFormatter(), - ) - - parser.add_option( - "-v", - action="count", - dest="verbose", - default=1, - help="increase the verbosity of status messages", - ) - - parser.add_option( - "-q", "--quiet", action="store_true", help="Don't print any status messages" - ) - - parser.add_option( - "-t", - "--test", - action="store_true", - help="Execute a mini-test suite, providing defaults for most options and args", - ), - - parser.add_option( - "", - "--show-tracebacks", - action="store_true", - help="Show the tracebacks for any exceptions", - ) - - parser.add_option("", "--service-class", help="The service class name to use") - - parser.add_option( - "", "--port", default=0, help="The port number to associate with the SPN" - ) - - parser.add_option( - "", "--binding-string", help="The binding string to use for SCP creation" - ) - - parser.add_option( - "", "--account-name", help="The account name to use (default is LocalSystem)" - ) - - parser.add_option("", "--password", help="The password to set.") - - parser.add_option( - "", - "--keyword", - action="append", - dest="keywords", - help="""A keyword to add to the SCP. May be specified - multiple times""", - ) - - parser.add_option( - "", - "--log-level", - help="""The log-level to use - may be a number or a logging - module constant""", - default=str(logging.WARNING), - ) - - options, args = parser.parse_args() - po = (parser, options) - # fixup misc - try: - options.port = int(options.port) - except (TypeError, ValueError): - parser.error("--port must be numeric") - # fixup log-level - try: - log_level = int(options.log_level) - except (TypeError, ValueError): - try: - log_level = int(getattr(logging, options.log_level.upper())) - except (ValueError, TypeError, AttributeError): - parser.error("Invalid --log-level value") - try: - sl = logger.setLevel - # logger is a real logger - except AttributeError: - # logger is logging module - sl = logging.getLogger().setLevel - sl(log_level) - # Check -q/-v - if options.quiet and options.verbose: - parser.error("Can't specify --quiet and --verbose") - if options.quiet: - options.verbose -= 1 - verbose = options.verbose - # --test - if options.test: - if args: - parser.error("Can't specify args with --test") - - args = "ScpDelete ScpCreate SpnCreate SpnRegister SpnUnregister ScpDelete" - log(1, "--test - pretending args are:\n %s", args) - args = args.split() - if not options.service_class: - options.service_class = "PythonScpTest" - log(2, "--test: --service-class=%s", options.service_class) - if not options.keywords: - options.keywords = "Python Powered".split() - log(2, "--test: --keyword=%s", options.keywords) - if not options.binding_string: - options.binding_string = "test binding string" - log(2, "--test: --binding-string=%s", options.binding_string) - - # check args - if not args: - parser.error("No command specified (use --help for valid commands)") - for arg in args: - if arg.lower() not in _handlers_dict: - parser.error("Invalid command '%s' (use --help for valid commands)" % arg) - - # Patch up account-name. - if options.account_name: - log(2, "Translating account name '%s'", options.account_name) - options.account_name_sam = win32security.TranslateName( - options.account_name, win32api.NameUnknown, win32api.NameSamCompatible - ) - log(2, "NameSamCompatible is '%s'", options.account_name_sam) - options.account_name_dn = win32security.TranslateName( - options.account_name, win32api.NameUnknown, win32api.NameFullyQualifiedDN - ) - log(2, "NameFullyQualifiedDNis '%s'", options.account_name_dn) - - # do it. - for arg in args: - handler = _handlers_dict[arg.lower()] # already been validated - if handler is None: - parser.error("Invalid command '%s'" % arg) - err_msg = None - try: - try: - log(2, "Executing '%s'...", arg) - result = handler(po) - log(1, "%s: %s", arg, result) - except: - if options.show_tracebacks: - print("--show-tracebacks specified - dumping exception") - traceback.print_exc() - raise - except adsi.error as xxx_todo_changeme: - (hr, desc, exc, argerr) = xxx_todo_changeme.args - if exc: - extra_desc = exc[2] - else: - extra_desc = "" - err_msg = desc - if extra_desc: - err_msg += "\n\t" + extra_desc - except win32api.error as xxx_todo_changeme1: - (hr, func, msg) = xxx_todo_changeme1.args - err_msg = msg - if err_msg: - log(1, "Command '%s' failed: %s", arg, err_msg) - - -if __name__ == "__main__": - try: - main() - except KeyboardInterrupt: - print("*** Interrupted") diff --git a/lib/win32comext/adsi/demos/search.py b/lib/win32comext/adsi/demos/search.py deleted file mode 100644 index 22c6a258..00000000 --- a/lib/win32comext/adsi/demos/search.py +++ /dev/null @@ -1,152 +0,0 @@ -import pythoncom -import pywintypes -import win32security -from win32com.adsi import adsi, adsicon -from win32com.adsi.adsicon import * - -options = None # set to optparse options object - -ADsTypeNameMap = {} - - -def getADsTypeName(type_val): - # convert integer type to the 'typename' as known in the headerfiles. - if not ADsTypeNameMap: - for n, v in adsicon.__dict__.items(): - if n.startswith("ADSTYPE_"): - ADsTypeNameMap[v] = n - return ADsTypeNameMap.get(type_val, hex(type_val)) - - -def _guid_from_buffer(b): - return pywintypes.IID(b, True) - - -def _sid_from_buffer(b): - return str(pywintypes.SID(b)) - - -_null_converter = lambda x: x - -converters = { - "objectGUID": _guid_from_buffer, - "objectSid": _sid_from_buffer, - "instanceType": getADsTypeName, -} - - -def log(level, msg, *args): - if options.verbose >= level: - print("log:", msg % args) - - -def getGC(): - cont = adsi.ADsOpenObject( - "GC:", options.user, options.password, 0, adsi.IID_IADsContainer - ) - enum = adsi.ADsBuildEnumerator(cont) - # Only 1 child of the global catalog. - for e in enum: - gc = e.QueryInterface(adsi.IID_IDirectorySearch) - return gc - return None - - -def print_attribute(col_data): - prop_name, prop_type, values = col_data - if values is not None: - log(2, "property '%s' has type '%s'", prop_name, getADsTypeName(prop_type)) - value = [converters.get(prop_name, _null_converter)(v[0]) for v in values] - if len(value) == 1: - value = value[0] - print(" %s=%r" % (prop_name, value)) - else: - print(" %s is None" % (prop_name,)) - - -def search(): - gc = getGC() - if gc is None: - log(0, "Can't find the global catalog") - return - - prefs = [(ADS_SEARCHPREF_SEARCH_SCOPE, (ADS_SCOPE_SUBTREE,))] - hr, statuses = gc.SetSearchPreference(prefs) - log(3, "SetSearchPreference returned %d/%r", hr, statuses) - - if options.attributes: - attributes = options.attributes.split(",") - else: - attributes = None - - h = gc.ExecuteSearch(options.filter, attributes) - hr = gc.GetNextRow(h) - while hr != S_ADS_NOMORE_ROWS: - print("-- new row --") - if attributes is None: - # Loop over all columns returned - while 1: - col_name = gc.GetNextColumnName(h) - if col_name is None: - break - data = gc.GetColumn(h, col_name) - print_attribute(data) - else: - # loop over attributes specified. - for a in attributes: - try: - data = gc.GetColumn(h, a) - print_attribute(data) - except adsi.error as details: - if details[0] != E_ADS_COLUMN_NOT_SET: - raise - print_attribute((a, None, None)) - hr = gc.GetNextRow(h) - gc.CloseSearchHandle(h) - - -def main(): - global options - from optparse import OptionParser - - parser = OptionParser() - parser.add_option( - "-f", "--file", dest="filename", help="write report to FILE", metavar="FILE" - ) - parser.add_option( - "-v", - "--verbose", - action="count", - default=1, - help="increase verbosity of output", - ) - parser.add_option( - "-q", "--quiet", action="store_true", help="suppress output messages" - ) - - parser.add_option("-U", "--user", help="specify the username used to connect") - parser.add_option("-P", "--password", help="specify the password used to connect") - parser.add_option( - "", - "--filter", - default="(&(objectCategory=person)(objectClass=User))", - help="specify the search filter", - ) - parser.add_option( - "", "--attributes", help="comma sep'd list of attribute names to print" - ) - - options, args = parser.parse_args() - if options.quiet: - if options.verbose != 1: - parser.error("Can not use '--verbose' and '--quiet'") - options.verbose = 0 - - if args: - parser.error("You need not specify args") - - search() - - -if __name__ == "__main__": - main() diff --git a/lib/win32comext/adsi/demos/test.py b/lib/win32comext/adsi/demos/test.py deleted file mode 100644 index a1dba34d..00000000 --- a/lib/win32comext/adsi/demos/test.py +++ /dev/null @@ -1,273 +0,0 @@ -import string -import sys - -import pythoncom -import win32api -from win32com.adsi import * - -verbose_level = 0 - -server = "" # Must have trailing / -local_name = win32api.GetComputerName() - - -def DumpRoot(): - "Dumps the root DSE" - path = "LDAP://%srootDSE" % server - rootdse = ADsGetObject(path) - - for item in rootdse.Get("SupportedLDAPVersion"): - print("%s supports ldap version %s" % (path, item)) - - attributes = ["CurrentTime", "defaultNamingContext"] - for attr in attributes: - val = rootdse.Get(attr) - print(" %s=%s" % (attr, val)) - - -############################################### -# -# Code taken from article titled: -# Reading attributeSchema and classSchema Objects -def _DumpClass(child): - attrs = "Abstract lDAPDisplayName schemaIDGUID schemaNamingContext attributeSyntax oMSyntax" - _DumpTheseAttributes(child, string.split(attrs)) - - -def _DumpAttribute(child): - attrs = "lDAPDisplayName schemaIDGUID adminDescription adminDisplayName rDNAttID defaultHidingValue defaultObjectCategory systemOnly defaultSecurityDescriptor" - _DumpTheseAttributes(child, string.split(attrs)) - - -def _DumpTheseAttributes(child, attrs): - for attr in attrs: - try: - val = child.Get(attr) - except pythoncom.com_error as details: - continue - # ### - (hr, msg, exc, arg) = details - if exc and exc[2]: - msg = exc[2] - val = "" % (msg,) - if verbose_level >= 2: - print(" %s: %s=%s" % (child.Class, attr, val)) - - -def DumpSchema(): - "Dumps the default DSE schema" - # Bind to rootDSE to get the schemaNamingContext property. - path = "LDAP://%srootDSE" % server - rootdse = ADsGetObject(path) - name = rootdse.Get("schemaNamingContext") - - # Bind to the actual schema container. - path = "LDAP://" + server + name - print("Binding to", path) - ob = ADsGetObject(path) - nclasses = nattr = nsub = nunk = 0 - - # Enumerate the attribute and class objects in the schema container. - for child in ob: - # Find out if this is a class, attribute, or subSchema object. - class_name = child.Class - if class_name == "classSchema": - _DumpClass(child) - nclasses = nclasses + 1 - elif class_name == "attributeSchema": - _DumpAttribute(child) - nattr = nattr + 1 - elif class_name == "subSchema": - nsub = nsub + 1 - else: - print("Unknown class:", class_name) - nunk = nunk + 1 - if verbose_level: - print("Processed", nclasses, "classes") - print("Processed", nattr, "attributes") - print("Processed", nsub, "sub-schema's") - print("Processed", nunk, "unknown types") - - -def _DumpObject(ob, level=0): - prefix = " " * level - print("%s%s object: %s" % (prefix, ob.Class, ob.Name)) - # Do the directory object thing - try: - dir_ob = ADsGetObject(ob.ADsPath, IID_IDirectoryObject) - except pythoncom.com_error: - dir_ob = None - if dir_ob is not None: - info = dir_ob.GetObjectInformation() - print("%s RDN='%s', ObjectDN='%s'" % (prefix, info.RDN, info.ObjectDN)) - # Create a list of names to fetch - names = ["distinguishedName"] - attrs = dir_ob.GetObjectAttributes(names) - for attr in attrs: - for val, typ in attr.Values: - print("%s Attribute '%s' = %s" % (prefix, attr.AttrName, val)) - - for child in ob: - _DumpObject(child, level + 1) - - -def DumpAllObjects(): - "Recursively dump the entire directory!" - path = "LDAP://%srootDSE" % server - rootdse = ADsGetObject(path) - name = rootdse.Get("defaultNamingContext") - - # Bind to the actual schema container. - path = "LDAP://" + server + name - print("Binding to", path) - ob = ADsGetObject(path) - - # Enumerate the attribute and class objects in the schema container. - _DumpObject(ob) - - -########################################################## -# -# Code taken from article: -# Example Code for Enumerating Schema Classes, Attributes, and Syntaxes - -# Fill a map with VT_ datatypes, to give us better names: -vt_map = {} -for name, val in pythoncom.__dict__.items(): - if name[:3] == "VT_": - vt_map[val] = name - - -def DumpSchema2(): - "Dumps the schema using an alternative technique" - path = "LDAP://%sschema" % (server,) - schema = ADsGetObject(path, IID_IADsContainer) - nclass = nprop = nsyntax = 0 - for item in schema: - item_class = string.lower(item.Class) - if item_class == "class": - items = [] - if item.Abstract: - items.append("Abstract") - if item.Auxiliary: - items.append("Auxiliary") - # if item.Structural: items.append("Structural") - desc = string.join(items, ", ") - import win32com.util - - iid_name = win32com.util.IIDToInterfaceName(item.PrimaryInterface) - if verbose_level >= 2: - print( - "Class: Name=%s, Flags=%s, Primary Interface=%s" - % (item.Name, desc, iid_name) - ) - nclass = nclass + 1 - elif item_class == "property": - if item.MultiValued: - val_type = "Multi-Valued" - else: - val_type = "Single-Valued" - if verbose_level >= 2: - print("Property: Name=%s, %s" % (item.Name, val_type)) - nprop = nprop + 1 - elif item_class == "syntax": - data_type = vt_map.get(item.OleAutoDataType, "") - if verbose_level >= 2: - print("Syntax: Name=%s, Datatype = %s" % (item.Name, data_type)) - nsyntax = nsyntax + 1 - if verbose_level >= 1: - print("Processed", nclass, "classes") - print("Processed", nprop, "properties") - print("Processed", nsyntax, "syntax items") - - -def DumpGC(): - "Dumps the GC: object (whatever that is!)" - ob = ADsGetObject("GC:", IID_IADsContainer) - for sub_ob in ob: - print("GC ob: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)) - - -def DumpLocalUsers(): - "Dumps the local machine users" - path = "WinNT://%s,computer" % (local_name,) - ob = ADsGetObject(path, IID_IADsContainer) - ob.put_Filter(["User", "Group"]) - for sub_ob in ob: - print("User/Group: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)) - - -def DumpLocalGroups(): - "Dumps the local machine groups" - path = "WinNT://%s,computer" % (local_name,) - ob = ADsGetObject(path, IID_IADsContainer) - - ob.put_Filter(["Group"]) - for sub_ob in ob: - print("Group: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)) - # get the members - members = sub_ob.Members() - for member in members: - print(" Group member: %s (%s)" % (member.Name, member.ADsPath)) - - -def usage(tests): - import os - - print("Usage: %s [-s server ] [-v] [Test ...]" % os.path.basename(sys.argv[0])) - print(" -v : Verbose - print more information") - print(" -s : server - execute the tests against the named server") - print("where Test is one of:") - for t in tests: - print(t.__name__, ":", t.__doc__) - print() - print("If not tests are specified, all tests are run") - sys.exit(1) - - -def main(): - import getopt - import traceback - - tests = [] - for ob in globals().values(): - if type(ob) == type(main) and ob.__doc__: - tests.append(ob) - opts, args = getopt.getopt(sys.argv[1:], "s:hv") - for opt, val in opts: - if opt == "-s": - if val[-1] not in "\\/": - val = val + "/" - global server - server = val - if opt == "-h": - usage(tests) - if opt == "-v": - global verbose_level - verbose_level = verbose_level + 1 - - if len(args) == 0: - print("Running all tests - use '-h' to see command-line options...") - dotests = tests - else: - dotests = [] - for arg in args: - for t in tests: - if t.__name__ == arg: - dotests.append(t) - break - else: - print("Test '%s' unknown - skipping" % arg) - if not len(dotests): - print("Nothing to do!") - usage(tests) - for test in dotests: - try: - test() - except: - print("Test %s failed" % test.__name__) - traceback.print_exc() - - -if __name__ == "__main__": - main() diff --git a/lib/win32comext/authorization/__init__.py b/lib/win32comext/authorization/__init__.py deleted file mode 100644 index 3a893267..00000000 --- a/lib/win32comext/authorization/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# This is a python package -# __PackageSupportBuildPath__ not needed for distutil based builds, -# but not everyone is there yet. -import win32com - -win32com.__PackageSupportBuildPath__(__path__) diff --git a/lib/win32comext/authorization/authorization.pyd b/lib/win32comext/authorization/authorization.pyd deleted file mode 100644 index 4c473bd0..00000000 Binary files a/lib/win32comext/authorization/authorization.pyd and /dev/null differ diff --git a/lib/win32comext/authorization/demos/EditSecurity.py b/lib/win32comext/authorization/demos/EditSecurity.py deleted file mode 100644 index d961bc0d..00000000 --- a/lib/win32comext/authorization/demos/EditSecurity.py +++ /dev/null @@ -1,255 +0,0 @@ -import os - -import ntsecuritycon -import pythoncom -import win32api -import win32com.server.policy -import win32con -import win32security -from ntsecuritycon import ( - CONTAINER_INHERIT_ACE, - FILE_ALL_ACCESS, - FILE_APPEND_DATA, - FILE_GENERIC_EXECUTE, - FILE_GENERIC_READ, - FILE_GENERIC_WRITE, - FILE_READ_ATTRIBUTES, - FILE_READ_DATA, - FILE_READ_EA, - FILE_WRITE_ATTRIBUTES, - FILE_WRITE_DATA, - FILE_WRITE_EA, - INHERIT_ONLY_ACE, - OBJECT_INHERIT_ACE, - PSPCB_SI_INITDIALOG, - READ_CONTROL, - SI_ACCESS_CONTAINER, - SI_ACCESS_GENERAL, - SI_ACCESS_PROPERTY, - SI_ACCESS_SPECIFIC, - SI_ADVANCED, - SI_CONTAINER, - SI_EDIT_ALL, - SI_EDIT_AUDITS, - SI_EDIT_PROPERTIES, - SI_PAGE_ADVPERM, - SI_PAGE_AUDIT, - SI_PAGE_OWNER, - SI_PAGE_PERM, - SI_PAGE_TITLE, - SI_RESET, - STANDARD_RIGHTS_EXECUTE, - STANDARD_RIGHTS_READ, - STANDARD_RIGHTS_WRITE, - SYNCHRONIZE, - WRITE_DAC, - WRITE_OWNER, -) -from pythoncom import IID_NULL -from win32com.authorization import authorization -from win32com.shell.shellcon import ( # # Msg parameter to PropertySheetPageCallback - PSPCB_CREATE, - PSPCB_RELEASE, -) -from win32security import CONTAINER_INHERIT_ACE, INHERIT_ONLY_ACE, OBJECT_INHERIT_ACE - - -class SecurityInformation(win32com.server.policy.DesignatedWrapPolicy): - _com_interfaces_ = [authorization.IID_ISecurityInformation] - _public_methods_ = [ - "GetObjectInformation", - "GetSecurity", - "SetSecurity", - "GetAccessRights", - "GetInheritTypes", - "MapGeneric", - "PropertySheetPageCallback", - ] - - def __init__(self, FileName): - self.FileName = FileName - self._wrap_(self) - - def GetObjectInformation(self): - """Identifies object whose security will be modified, and determines options available - to the end user""" - flags = SI_ADVANCED | SI_EDIT_ALL | SI_PAGE_TITLE | SI_RESET - if os.path.isdir(self.FileName): - flags |= SI_CONTAINER - hinstance = 0 ## handle to module containing string resources - servername = "" ## name of authenticating server if not local machine - objectname = os.path.split(self.FileName)[1] - pagetitle = "Python ACL Editor" - if os.path.isdir(self.FileName): - pagetitle += " (dir)" - else: - pagetitle += " (file)" - objecttype = IID_NULL - return flags, hinstance, servername, objectname, pagetitle, objecttype - - def GetSecurity(self, requestedinfo, bdefault): - """Requests the existing permissions for object""" - if bdefault: - ## This is invoked if the 'Default' button is pressed (only present if SI_RESET is passed - ## with the flags in GetObjectInfo). Passing an empty SD with a NULL Dacl - ## should cause inherited ACL from parent dir or default dacl from user's token to be used - return win32security.SECURITY_DESCRIPTOR() - else: - ## GetFileSecurity sometimes fails to return flags indicating that an ACE is inherited - return win32security.GetNamedSecurityInfo( - self.FileName, win32security.SE_FILE_OBJECT, requestedinfo - ) - - def SetSecurity(self, requestedinfo, sd): - """Applies permissions to the object""" - owner = sd.GetSecurityDescriptorOwner() - group = sd.GetSecurityDescriptorGroup() - dacl = sd.GetSecurityDescriptorDacl() - sacl = sd.GetSecurityDescriptorSacl() - win32security.SetNamedSecurityInfo( - self.FileName, - win32security.SE_FILE_OBJECT, - requestedinfo, - owner, - group, - dacl, - sacl, - ) - ## should also handle recursive operations here - - def GetAccessRights(self, objecttype, flags): - """Returns a tuple of (AccessRights, DefaultAccess), where AccessRights is a sequence of tuples representing - SI_ACCESS structs, containing (guid, access mask, Name, flags). DefaultAccess indicates which of the - AccessRights will be used initially when a new ACE is added (zero based). - Flags can contain SI_ACCESS_SPECIFIC,SI_ACCESS_GENERAL,SI_ACCESS_CONTAINER,SI_ACCESS_PROPERTY, - CONTAINER_INHERIT_ACE,INHERIT_ONLY_ACE,OBJECT_INHERIT_ACE - """ - ## input flags: SI_ADVANCED,SI_EDIT_AUDITS,SI_EDIT_PROPERTIES indicating which property sheet is requesting the rights - if (objecttype is not None) and (objecttype != IID_NULL): - ## Should not be true for file objects. Usually only used with DS objects that support security for - ## their properties - raise NotImplementedError("Object type is not supported") - - if os.path.isdir(self.FileName): - file_append_data_desc = "Create subfolders" - file_write_data_desc = "Create Files" - else: - file_append_data_desc = "Append data" - file_write_data_desc = "Write data" - - accessrights = [ - ( - IID_NULL, - FILE_GENERIC_READ, - "Generic read", - SI_ACCESS_GENERAL - | SI_ACCESS_SPECIFIC - | OBJECT_INHERIT_ACE - | CONTAINER_INHERIT_ACE, - ), - ( - IID_NULL, - FILE_GENERIC_WRITE, - "Generic write", - SI_ACCESS_GENERAL - | SI_ACCESS_SPECIFIC - | OBJECT_INHERIT_ACE - | CONTAINER_INHERIT_ACE, - ), - ( - IID_NULL, - win32con.DELETE, - "Delete", - SI_ACCESS_SPECIFIC | OBJECT_INHERIT_ACE | CONTAINER_INHERIT_ACE, - ), - ( - IID_NULL, - WRITE_OWNER, - "Change owner", - SI_ACCESS_SPECIFIC | OBJECT_INHERIT_ACE | CONTAINER_INHERIT_ACE, - ), - ( - IID_NULL, - READ_CONTROL, - "Read Permissions", - SI_ACCESS_SPECIFIC | OBJECT_INHERIT_ACE | CONTAINER_INHERIT_ACE, - ), - ( - IID_NULL, - WRITE_DAC, - "Change permissions", - SI_ACCESS_SPECIFIC | OBJECT_INHERIT_ACE | CONTAINER_INHERIT_ACE, - ), - ( - IID_NULL, - FILE_APPEND_DATA, - file_append_data_desc, - SI_ACCESS_SPECIFIC | OBJECT_INHERIT_ACE | CONTAINER_INHERIT_ACE, - ), - ( - IID_NULL, - FILE_WRITE_DATA, - file_write_data_desc, - SI_ACCESS_SPECIFIC | OBJECT_INHERIT_ACE | CONTAINER_INHERIT_ACE, - ), - ] - return (accessrights, 0) - - def MapGeneric(self, guid, aceflags, mask): - """Converts generic access rights to specific rights. This implementation uses standard file system rights, - but you can map them any way that suits your application. - """ - return win32security.MapGenericMask( - mask, - ( - FILE_GENERIC_READ, - FILE_GENERIC_WRITE, - FILE_GENERIC_EXECUTE, - FILE_ALL_ACCESS, - ), - ) - - def GetInheritTypes(self): - """Specifies which types of ACE inheritance are supported. - Returns a sequence of tuples representing SI_INHERIT_TYPE structs, containing - (object type guid, inheritance flags, display name). Guid is usually only used with - Directory Service objects. - """ - return ( - (IID_NULL, 0, "Only current object"), - (IID_NULL, OBJECT_INHERIT_ACE, "Files inherit permissions"), - (IID_NULL, CONTAINER_INHERIT_ACE, "Sub Folders inherit permissions"), - ( - IID_NULL, - CONTAINER_INHERIT_ACE | OBJECT_INHERIT_ACE, - "Files and subfolders", - ), - ) - - def PropertySheetPageCallback(self, hwnd, msg, pagetype): - """Invoked each time a property sheet page is created or destroyed.""" - ## page types from SI_PAGE_TYPE enum: SI_PAGE_PERM SI_PAGE_ADVPERM SI_PAGE_AUDIT SI_PAGE_OWNER - ## msg: PSPCB_CREATE, PSPCB_RELEASE, PSPCB_SI_INITDIALOG - return None - - def EditSecurity(self, owner_hwnd=0): - """Creates an ACL editor dialog based on parameters returned by interface methods""" - isi = pythoncom.WrapObject( - self, authorization.IID_ISecurityInformation, pythoncom.IID_IUnknown - ) - authorization.EditSecurity(owner_hwnd, isi) - - -## folder permissions -temp_dir = win32api.GetTempPath() -dir_name = win32api.GetTempFileName(temp_dir, "isi")[0] -print(dir_name) -os.remove(dir_name) -os.mkdir(dir_name) -si = SecurityInformation(dir_name) -si.EditSecurity() - -## file permissions -fname = win32api.GetTempFileName(dir_name, "isi")[0] -si = SecurityInformation(fname) -si.EditSecurity() diff --git a/lib/win32comext/authorization/demos/EditServiceSecurity.py b/lib/win32comext/authorization/demos/EditServiceSecurity.py deleted file mode 100644 index 86d60414..00000000 --- a/lib/win32comext/authorization/demos/EditServiceSecurity.py +++ /dev/null @@ -1,242 +0,0 @@ -""" -Implements a permissions editor for services. -Service can be specified as plain name for local machine, -or as a remote service of the form \\machinename\service -""" - -import os - -import ntsecuritycon -import pythoncom -import win32api -import win32com.server.policy -import win32con -import win32security -import win32service -from win32com.authorization import authorization - -SERVICE_GENERIC_EXECUTE = ( - win32service.SERVICE_START - | win32service.SERVICE_STOP - | win32service.SERVICE_PAUSE_CONTINUE - | win32service.SERVICE_USER_DEFINED_CONTROL -) -SERVICE_GENERIC_READ = ( - win32service.SERVICE_QUERY_CONFIG - | win32service.SERVICE_QUERY_STATUS - | win32service.SERVICE_INTERROGATE - | win32service.SERVICE_ENUMERATE_DEPENDENTS -) -SERVICE_GENERIC_WRITE = win32service.SERVICE_CHANGE_CONFIG - -from ntsecuritycon import ( - CONTAINER_INHERIT_ACE, - INHERIT_ONLY_ACE, - OBJECT_INHERIT_ACE, - PSPCB_SI_INITDIALOG, - READ_CONTROL, - SI_ACCESS_CONTAINER, - SI_ACCESS_GENERAL, - SI_ACCESS_PROPERTY, - SI_ACCESS_SPECIFIC, - SI_ADVANCED, - SI_CONTAINER, - SI_EDIT_ALL, - SI_EDIT_AUDITS, - SI_EDIT_PROPERTIES, - SI_PAGE_ADVPERM, - SI_PAGE_AUDIT, - SI_PAGE_OWNER, - SI_PAGE_PERM, - SI_PAGE_TITLE, - SI_RESET, - STANDARD_RIGHTS_EXECUTE, - STANDARD_RIGHTS_READ, - STANDARD_RIGHTS_WRITE, - WRITE_DAC, - WRITE_OWNER, -) -from pythoncom import IID_NULL -from win32com.shell.shellcon import ( # # Msg parameter to PropertySheetPageCallback - PSPCB_CREATE, - PSPCB_RELEASE, -) -from win32security import CONTAINER_INHERIT_ACE, INHERIT_ONLY_ACE, OBJECT_INHERIT_ACE - - -class ServiceSecurity(win32com.server.policy.DesignatedWrapPolicy): - _com_interfaces_ = [authorization.IID_ISecurityInformation] - _public_methods_ = [ - "GetObjectInformation", - "GetSecurity", - "SetSecurity", - "GetAccessRights", - "GetInheritTypes", - "MapGeneric", - "PropertySheetPageCallback", - ] - - def __init__(self, ServiceName): - self.ServiceName = ServiceName - self._wrap_(self) - - def GetObjectInformation(self): - """Identifies object whose security will be modified, and determines options available - to the end user""" - flags = SI_ADVANCED | SI_EDIT_ALL | SI_PAGE_TITLE | SI_RESET - hinstance = 0 ## handle to module containing string resources - servername = "" ## name of authenticating server if not local machine - - ## service name can contain remote machine name of the form \\Server\ServiceName - objectname = os.path.split(self.ServiceName)[1] - pagetitle = "Service Permissions for " + self.ServiceName - objecttype = IID_NULL - return flags, hinstance, servername, objectname, pagetitle, objecttype - - def GetSecurity(self, requestedinfo, bdefault): - """Requests the existing permissions for object""" - if bdefault: - return win32security.SECURITY_DESCRIPTOR() - else: - return win32security.GetNamedSecurityInfo( - self.ServiceName, win32security.SE_SERVICE, requestedinfo - ) - - def SetSecurity(self, requestedinfo, sd): - """Applies permissions to the object""" - owner = sd.GetSecurityDescriptorOwner() - group = sd.GetSecurityDescriptorGroup() - dacl = sd.GetSecurityDescriptorDacl() - sacl = sd.GetSecurityDescriptorSacl() - win32security.SetNamedSecurityInfo( - self.ServiceName, - win32security.SE_SERVICE, - requestedinfo, - owner, - group, - dacl, - sacl, - ) - - def GetAccessRights(self, objecttype, flags): - """Returns a tuple of (AccessRights, DefaultAccess), where AccessRights is a sequence of tuples representing - SI_ACCESS structs, containing (guid, access mask, Name, flags). DefaultAccess indicates which of the - AccessRights will be used initially when a new ACE is added (zero based). - Flags can contain SI_ACCESS_SPECIFIC,SI_ACCESS_GENERAL,SI_ACCESS_CONTAINER,SI_ACCESS_PROPERTY, - CONTAINER_INHERIT_ACE,INHERIT_ONLY_ACE,OBJECT_INHERIT_ACE - """ - ## input flags: SI_ADVANCED,SI_EDIT_AUDITS,SI_EDIT_PROPERTIES indicating which property sheet is requesting the rights - if (objecttype is not None) and (objecttype != IID_NULL): - ## Not relevent for services - raise NotImplementedError("Object type is not supported") - - ## ???? for some reason, the DACL for a service will not retain ACCESS_SYSTEM_SECURITY in an ACE ???? - ## (IID_NULL, win32con.ACCESS_SYSTEM_SECURITY, 'View/change audit settings', SI_ACCESS_SPECIFIC), - - accessrights = [ - ( - IID_NULL, - win32service.SERVICE_ALL_ACCESS, - "Full control", - SI_ACCESS_GENERAL, - ), - (IID_NULL, SERVICE_GENERIC_READ, "Generic read", SI_ACCESS_GENERAL), - (IID_NULL, SERVICE_GENERIC_WRITE, "Generic write", SI_ACCESS_GENERAL), - ( - IID_NULL, - SERVICE_GENERIC_EXECUTE, - "Start/Stop/Pause service", - SI_ACCESS_GENERAL, - ), - (IID_NULL, READ_CONTROL, "Read Permissions", SI_ACCESS_GENERAL), - (IID_NULL, WRITE_DAC, "Change permissions", SI_ACCESS_GENERAL), - (IID_NULL, WRITE_OWNER, "Change owner", SI_ACCESS_GENERAL), - (IID_NULL, win32con.DELETE, "Delete service", SI_ACCESS_GENERAL), - (IID_NULL, win32service.SERVICE_START, "Start service", SI_ACCESS_SPECIFIC), - (IID_NULL, win32service.SERVICE_STOP, "Stop service", SI_ACCESS_SPECIFIC), - ( - IID_NULL, - win32service.SERVICE_PAUSE_CONTINUE, - "Pause/unpause service", - SI_ACCESS_SPECIFIC, - ), - ( - IID_NULL, - win32service.SERVICE_USER_DEFINED_CONTROL, - "Execute user defined operations", - SI_ACCESS_SPECIFIC, - ), - ( - IID_NULL, - win32service.SERVICE_QUERY_CONFIG, - "Read configuration", - SI_ACCESS_SPECIFIC, - ), - ( - IID_NULL, - win32service.SERVICE_CHANGE_CONFIG, - "Change configuration", - SI_ACCESS_SPECIFIC, - ), - ( - IID_NULL, - win32service.SERVICE_ENUMERATE_DEPENDENTS, - "List dependent services", - SI_ACCESS_SPECIFIC, - ), - ( - IID_NULL, - win32service.SERVICE_QUERY_STATUS, - "Query status", - SI_ACCESS_SPECIFIC, - ), - ( - IID_NULL, - win32service.SERVICE_INTERROGATE, - "Query status (immediate)", - SI_ACCESS_SPECIFIC, - ), - ] - return (accessrights, 0) - - def MapGeneric(self, guid, aceflags, mask): - """Converts generic access rights to specific rights.""" - return win32security.MapGenericMask( - mask, - ( - SERVICE_GENERIC_READ, - SERVICE_GENERIC_WRITE, - SERVICE_GENERIC_EXECUTE, - win32service.SERVICE_ALL_ACCESS, - ), - ) - - def GetInheritTypes(self): - """Specifies which types of ACE inheritance are supported. - Services don't use any inheritance - """ - return ((IID_NULL, 0, "Only current object"),) - - def PropertySheetPageCallback(self, hwnd, msg, pagetype): - """Invoked each time a property sheet page is created or destroyed.""" - ## page types from SI_PAGE_TYPE enum: SI_PAGE_PERM SI_PAGE_ADVPERM SI_PAGE_AUDIT SI_PAGE_OWNER - ## msg: PSPCB_CREATE, PSPCB_RELEASE, PSPCB_SI_INITDIALOG - return None - - def EditSecurity(self, owner_hwnd=0): - """Creates an ACL editor dialog based on parameters returned by interface methods""" - isi = pythoncom.WrapObject( - self, authorization.IID_ISecurityInformation, pythoncom.IID_IUnknown - ) - authorization.EditSecurity(owner_hwnd, isi) - - -if __name__ == "__main__": - # Find the first service on local machine and edit its permissions - scm = win32service.OpenSCManager( - None, None, win32service.SC_MANAGER_ENUMERATE_SERVICE - ) - svcs = win32service.EnumServicesStatus(scm) - win32service.CloseServiceHandle(scm) - si = ServiceSecurity(svcs[0][0]) - si.EditSecurity() diff --git a/lib/win32comext/axcontrol/__init__.py b/lib/win32comext/axcontrol/__init__.py deleted file mode 100644 index 8d5db8b9..00000000 --- a/lib/win32comext/axcontrol/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# See if we have a special directory for the binaries (for developers) -import win32com - -win32com.__PackageSupportBuildPath__(__path__) diff --git a/lib/win32comext/axcontrol/axcontrol.pyd b/lib/win32comext/axcontrol/axcontrol.pyd deleted file mode 100644 index 40cb13ef..00000000 Binary files a/lib/win32comext/axcontrol/axcontrol.pyd and /dev/null differ diff --git a/lib/win32comext/axdebug/__init__.py b/lib/win32comext/axdebug/__init__.py deleted file mode 100644 index 8d5db8b9..00000000 --- a/lib/win32comext/axdebug/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# See if we have a special directory for the binaries (for developers) -import win32com - -win32com.__PackageSupportBuildPath__(__path__) diff --git a/lib/win32comext/axdebug/adb.py b/lib/win32comext/axdebug/adb.py deleted file mode 100644 index 812c23e6..00000000 --- a/lib/win32comext/axdebug/adb.py +++ /dev/null @@ -1,480 +0,0 @@ -"""The glue between the Python debugger interface and the Active Debugger interface -""" -import _thread -import bdb -import os -import sys -import traceback - -import pythoncom -import win32api -import win32com.client.connect -from win32com.axdebug.util import _wrap, _wrap_remove, trace -from win32com.server.util import unwrap - -from . import axdebug, gateways, stackframe - - -def fnull(*args): - pass - - -try: - os.environ["DEBUG_AXDEBUG"] - debugging = 1 -except KeyError: - debugging = 0 - -traceenter = fnull # trace enter of functions -tracev = fnull # verbose trace - -if debugging: - traceenter = trace # trace enter of functions - tracev = trace # verbose trace - - -class OutputReflector: - def __init__(self, file, writefunc): - self.writefunc = writefunc - self.file = file - - def __getattr__(self, name): - return getattr(self.file, name) - - def write(self, message): - self.writefunc(message) - self.file.write(message) - - -def _dumpf(frame): - if frame is None: - return "" - else: - addn = "(with trace!)" - if frame.f_trace is None: - addn = " **No Trace Set **" - return "Frame at %d, file %s, line: %d%s" % ( - id(frame), - frame.f_code.co_filename, - frame.f_lineno, - addn, - ) - - -g_adb = None - - -def OnSetBreakPoint(codeContext, breakPointState, lineNo): - try: - fileName = codeContext.codeContainer.GetFileName() - # inject the code into linecache. - import linecache - - linecache.cache[fileName] = 0, 0, codeContext.codeContainer.GetText(), fileName - g_adb._OnSetBreakPoint(fileName, codeContext, breakPointState, lineNo + 1) - except: - traceback.print_exc() - - -class Adb(bdb.Bdb, gateways.RemoteDebugApplicationEvents): - def __init__(self): - self.debugApplication = None - self.debuggingThread = None - self.debuggingThreadStateHandle = None - self.stackSnifferCookie = self.stackSniffer = None - self.codeContainerProvider = None - self.debuggingThread = None - self.breakFlags = None - self.breakReason = None - self.appDebugger = None - self.appEventConnection = None - self.logicalbotframe = None # Anything at this level or below does not exist! - self.currentframe = None # The frame we are currently in. - self.recursiveData = [] # Data saved for each reentery on this thread. - bdb.Bdb.__init__(self) - self._threadprotectlock = _thread.allocate_lock() - self.reset() - - def canonic(self, fname): - if fname[0] == "<": - return fname - return bdb.Bdb.canonic(self, fname) - - def reset(self): - traceenter("adb.reset") - bdb.Bdb.reset(self) - - def __xxxxx__set_break(self, filename, lineno, cond=None): - # As per standard one, except no linecache checking! - if filename not in self.breaks: - self.breaks[filename] = [] - list = self.breaks[filename] - if lineno in list: - return "There is already a breakpoint there!" - list.append(lineno) - if cond is not None: - self.cbreaks[filename, lineno] = cond - - def stop_here(self, frame): - traceenter("stop_here", _dumpf(frame), _dumpf(self.stopframe)) - # As per bdb.stop_here, except for logicalbotframe - ## if self.stopframe is None: - ## return 1 - if frame is self.stopframe: - return 1 - - tracev("stop_here said 'No'!") - return 0 - - def break_here(self, frame): - traceenter("break_here", self.breakFlags, _dumpf(frame)) - self.breakReason = None - if self.breakFlags == axdebug.APPBREAKFLAG_DEBUGGER_HALT: - self.breakReason = axdebug.BREAKREASON_DEBUGGER_HALT - elif self.breakFlags == axdebug.APPBREAKFLAG_DEBUGGER_BLOCK: - self.breakReason = axdebug.BREAKREASON_DEBUGGER_BLOCK - elif self.breakFlags == axdebug.APPBREAKFLAG_STEP: - self.breakReason = axdebug.BREAKREASON_STEP - else: - print("Calling base 'break_here' with", self.breaks) - if bdb.Bdb.break_here(self, frame): - self.breakReason = axdebug.BREAKREASON_BREAKPOINT - return self.breakReason is not None - - def break_anywhere(self, frame): - traceenter("break_anywhere", _dumpf(frame)) - if self.breakFlags == axdebug.APPBREAKFLAG_DEBUGGER_HALT: - self.breakReason = axdebug.BREAKREASON_DEBUGGER_HALT - return 1 - rc = bdb.Bdb.break_anywhere(self, frame) - tracev("break_anywhere", _dumpf(frame), "returning", rc) - return rc - - def dispatch_return(self, frame, arg): - traceenter("dispatch_return", _dumpf(frame), arg) - if self.logicalbotframe is frame: - # We dont want to debug parent frames. - tracev("dispatch_return resetting sys.trace") - sys.settrace(None) - return - # self.bSetTrace = 0 - self.currentframe = frame.f_back - return bdb.Bdb.dispatch_return(self, frame, arg) - - def dispatch_line(self, frame): - traceenter("dispatch_line", _dumpf(frame), _dumpf(self.botframe)) - # trace("logbotframe is", _dumpf(self.logicalbotframe), "botframe is", self.botframe) - if frame is self.logicalbotframe: - trace("dispatch_line", _dumpf(frame), "for bottom frame returing tracer") - # The next code executed in the frame above may be a builtin (eg, apply()) - # in which sys.trace needs to be set. - sys.settrace(self.trace_dispatch) - # And return the tracer incase we are about to execute Python code, - # in which case sys tracer is ignored! - return self.trace_dispatch - - if self.codeContainerProvider.FromFileName(frame.f_code.co_filename) is None: - trace( - "dispatch_line has no document for", _dumpf(frame), "- skipping trace!" - ) - return None - self.currentframe = ( - frame # So the stack sniffer knows our most recent, debuggable code. - ) - return bdb.Bdb.dispatch_line(self, frame) - - def dispatch_call(self, frame, arg): - traceenter("dispatch_call", _dumpf(frame)) - frame.f_locals["__axstack_address__"] = axdebug.GetStackAddress() - if frame is self.botframe: - trace("dispatch_call is self.botframe - returning tracer") - return self.trace_dispatch - # Not our bottom frame. If we have a document for it, - # then trace it, otherwise run at full speed. - if self.codeContainerProvider.FromFileName(frame.f_code.co_filename) is None: - trace( - "dispatch_call has no document for", _dumpf(frame), "- skipping trace!" - ) - ## sys.settrace(None) - return None - return self.trace_dispatch - - # rc = bdb.Bdb.dispatch_call(self, frame, arg) - # trace("dispatch_call", _dumpf(frame),"returned",rc) - # return rc - - def trace_dispatch(self, frame, event, arg): - traceenter("trace_dispatch", _dumpf(frame), event, arg) - if self.debugApplication is None: - trace("trace_dispatch has no application!") - return # None - return bdb.Bdb.trace_dispatch(self, frame, event, arg) - - # - # The user functions do bugger all! - # - # def user_call(self, frame, argument_list): - # traceenter("user_call",_dumpf(frame)) - - def user_line(self, frame): - traceenter("user_line", _dumpf(frame)) - # Traces at line zero - if frame.f_lineno != 0: - breakReason = self.breakReason - if breakReason is None: - breakReason = axdebug.BREAKREASON_STEP - self._HandleBreakPoint(frame, None, breakReason) - - def user_return(self, frame, return_value): - # traceenter("user_return",_dumpf(frame),return_value) - bdb.Bdb.user_return(self, frame, return_value) - - def user_exception(self, frame, exc_info): - # traceenter("user_exception") - bdb.Bdb.user_exception(self, frame, exc_info) - - def _HandleBreakPoint(self, frame, tb, reason): - traceenter( - "Calling HandleBreakPoint with reason", reason, "at frame", _dumpf(frame) - ) - traceenter(" Current frame is", _dumpf(self.currentframe)) - try: - resumeAction = self.debugApplication.HandleBreakPoint(reason) - tracev("HandleBreakPoint returned with ", resumeAction) - except pythoncom.com_error as details: - # Eeek - the debugger is dead, or something serious is happening. - # Assume we should continue - resumeAction = axdebug.BREAKRESUMEACTION_CONTINUE - trace("HandleBreakPoint FAILED with", details) - - self.stack = [] - self.curindex = 0 - if resumeAction == axdebug.BREAKRESUMEACTION_ABORT: - self.set_quit() - elif resumeAction == axdebug.BREAKRESUMEACTION_CONTINUE: - tracev("resume action is continue") - self.set_continue() - elif resumeAction == axdebug.BREAKRESUMEACTION_STEP_INTO: - tracev("resume action is step") - self.set_step() - elif resumeAction == axdebug.BREAKRESUMEACTION_STEP_OVER: - tracev("resume action is next") - self.set_next(frame) - elif resumeAction == axdebug.BREAKRESUMEACTION_STEP_OUT: - tracev("resume action is stop out") - self.set_return(frame) - else: - raise ValueError("unknown resume action flags") - self.breakReason = None - - def set_trace(self): - self.breakReason = axdebug.BREAKREASON_LANGUAGE_INITIATED - bdb.Bdb.set_trace(self) - - def CloseApp(self): - traceenter("ClosingApp") - self.reset() - self.logicalbotframe = None - if self.stackSnifferCookie is not None: - try: - self.debugApplication.RemoveStackFrameSniffer(self.stackSnifferCookie) - - except pythoncom.com_error: - trace( - "*** Could not RemoveStackFrameSniffer %d" - % (self.stackSnifferCookie) - ) - if self.stackSniffer: - _wrap_remove(self.stackSniffer) - self.stackSnifferCookie = self.stackSniffer = None - - if self.appEventConnection is not None: - self.appEventConnection.Disconnect() - self.appEventConnection = None - self.debugApplication = None - self.appDebugger = None - if self.codeContainerProvider is not None: - self.codeContainerProvider.Close() - self.codeContainerProvider = None - - def AttachApp(self, debugApplication, codeContainerProvider): - # traceenter("AttachApp", debugApplication, codeContainerProvider) - self.codeContainerProvider = codeContainerProvider - self.debugApplication = debugApplication - self.stackSniffer = _wrap( - stackframe.DebugStackFrameSniffer(self), axdebug.IID_IDebugStackFrameSniffer - ) - self.stackSnifferCookie = debugApplication.AddStackFrameSniffer( - self.stackSniffer - ) - # trace("StackFrameSniffer added (%d)" % self.stackSnifferCookie) - - # Connect to the application events. - self.appEventConnection = win32com.client.connect.SimpleConnection( - self.debugApplication, self, axdebug.IID_IRemoteDebugApplicationEvents - ) - - def ResetAXDebugging(self): - traceenter("ResetAXDebugging", self, "with refcount", len(self.recursiveData)) - if win32api.GetCurrentThreadId() != self.debuggingThread: - trace("ResetAXDebugging called on other thread") - return - - if len(self.recursiveData) == 0: - # print "ResetAXDebugging called for final time." - self.logicalbotframe = None - self.debuggingThread = None - self.currentframe = None - self.debuggingThreadStateHandle = None - return - - ( - self.logbotframe, - self.stopframe, - self.currentframe, - self.debuggingThreadStateHandle, - ) = self.recursiveData[0] - self.recursiveData = self.recursiveData[1:] - - def SetupAXDebugging(self, baseFrame=None, userFrame=None): - """Get ready for potential debugging. Must be called on the thread - that is being debugged. - """ - # userFrame is for non AXScript debugging. This is the first frame of the - # users code. - if userFrame is None: - userFrame = baseFrame - else: - # We have missed the "dispatch_call" function, so set this up now! - userFrame.f_locals["__axstack_address__"] = axdebug.GetStackAddress() - - traceenter("SetupAXDebugging", self) - self._threadprotectlock.acquire() - try: - thisThread = win32api.GetCurrentThreadId() - if self.debuggingThread is None: - self.debuggingThread = thisThread - else: - if self.debuggingThread != thisThread: - trace("SetupAXDebugging called on other thread - ignored!") - return - # push our context. - self.recursiveData.insert( - 0, - ( - self.logicalbotframe, - self.stopframe, - self.currentframe, - self.debuggingThreadStateHandle, - ), - ) - finally: - self._threadprotectlock.release() - - trace("SetupAXDebugging has base frame as", _dumpf(baseFrame)) - self.botframe = baseFrame - self.stopframe = userFrame - self.logicalbotframe = baseFrame - self.currentframe = None - self.debuggingThreadStateHandle = axdebug.GetThreadStateHandle() - - self._BreakFlagsChanged() - - # RemoteDebugApplicationEvents - def OnConnectDebugger(self, appDebugger): - traceenter("OnConnectDebugger", appDebugger) - self.appDebugger = appDebugger - # Reflect output to appDebugger - writefunc = lambda s: appDebugger.onDebugOutput(s) - sys.stdout = OutputReflector(sys.stdout, writefunc) - sys.stderr = OutputReflector(sys.stderr, writefunc) - - def OnDisconnectDebugger(self): - traceenter("OnDisconnectDebugger") - # Stop reflecting output - if isinstance(sys.stdout, OutputReflector): - sys.stdout = sys.stdout.file - if isinstance(sys.stderr, OutputReflector): - sys.stderr = sys.stderr.file - self.appDebugger = None - self.set_quit() - - def OnSetName(self, name): - traceenter("OnSetName", name) - - def OnDebugOutput(self, string): - traceenter("OnDebugOutput", string) - - def OnClose(self): - traceenter("OnClose") - - def OnEnterBreakPoint(self, rdat): - traceenter("OnEnterBreakPoint", rdat) - - def OnLeaveBreakPoint(self, rdat): - traceenter("OnLeaveBreakPoint", rdat) - - def OnCreateThread(self, rdat): - traceenter("OnCreateThread", rdat) - - def OnDestroyThread(self, rdat): - traceenter("OnDestroyThread", rdat) - - def OnBreakFlagChange(self, abf, rdat): - traceenter("Debugger OnBreakFlagChange", abf, rdat) - self.breakFlags = abf - self._BreakFlagsChanged() - - def _BreakFlagsChanged(self): - traceenter( - "_BreakFlagsChanged to %s with our thread = %s, and debugging thread = %s" - % (self.breakFlags, self.debuggingThread, win32api.GetCurrentThreadId()) - ) - trace("_BreakFlagsChanged has breaks", self.breaks) - # If a request comes on our debugging thread, then do it now! - # if self.debuggingThread!=win32api.GetCurrentThreadId(): - # return - - if len(self.breaks) or self.breakFlags: - if self.logicalbotframe: - trace("BreakFlagsChange with bot frame", _dumpf(self.logicalbotframe)) - # We have frames not to be debugged (eg, Scripting engine frames - # (sys.settrace will be set when out logicalbotframe is hit - - # this may not be the right thing to do, as it may not cause the - # immediate break we desire.) - self.logicalbotframe.f_trace = self.trace_dispatch - else: - trace("BreakFlagsChanged, but no bottom frame") - if self.stopframe is not None: - self.stopframe.f_trace = self.trace_dispatch - # If we have the thread-state for the thread being debugged, then - # we dynamically set its trace function - it is possible that the thread - # being debugged is in a blocked call (eg, a message box) and we - # want to hit the debugger the instant we return - if ( - self.debuggingThreadStateHandle is not None - and self.breakFlags - and self.debuggingThread != win32api.GetCurrentThreadId() - ): - axdebug.SetThreadStateTrace( - self.debuggingThreadStateHandle, self.trace_dispatch - ) - - def _OnSetBreakPoint(self, key, codeContext, bps, lineNo): - traceenter("_OnSetBreakPoint", self, key, codeContext, bps, lineNo) - if bps == axdebug.BREAKPOINT_ENABLED: - problem = self.set_break(key, lineNo) - if problem: - print("*** set_break failed -", problem) - trace("_OnSetBreakPoint just set BP and has breaks", self.breaks) - else: - self.clear_break(key, lineNo) - self._BreakFlagsChanged() - trace("_OnSetBreakPoint leaving with breaks", self.breaks) - - -def Debugger(): - global g_adb - if g_adb is None: - g_adb = Adb() - return g_adb diff --git a/lib/win32comext/axdebug/axdebug.pyd b/lib/win32comext/axdebug/axdebug.pyd deleted file mode 100644 index 9a848190..00000000 Binary files a/lib/win32comext/axdebug/axdebug.pyd and /dev/null differ diff --git a/lib/win32comext/axdebug/codecontainer.py b/lib/win32comext/axdebug/codecontainer.py deleted file mode 100644 index a7a2e245..00000000 --- a/lib/win32comext/axdebug/codecontainer.py +++ /dev/null @@ -1,278 +0,0 @@ -"""A utility class for a code container. - -A code container is a class which holds source code for a debugger. It knows how -to color the text, and also how to translate lines into offsets, and back. -""" - -import sys -import tokenize - -import win32api -import winerror -from win32com.axdebug import axdebug -from win32com.server.exception import Exception - -from . import contexts -from .util import RaiseNotImpl, _wrap - -_keywords = {} # set of Python keywords -for name in """ - and assert break class continue def del elif else except exec - finally for from global if import in is lambda not - or pass print raise return try while - """.split(): - _keywords[name] = 1 - - -class SourceCodeContainer: - def __init__( - self, - text, - fileName="", - sourceContext=0, - startLineNumber=0, - site=None, - debugDocument=None, - ): - self.sourceContext = sourceContext # The source context added by a smart host. - self.text = text - if text: - self._buildlines() - self.nextLineNo = 0 - self.fileName = fileName - self.codeContexts = {} - self.site = site - self.startLineNumber = startLineNumber - self.debugDocument = None - - def _Close(self): - self.text = self.lines = self.lineOffsets = None - self.codeContexts = None - self.debugDocument = None - self.site = None - self.sourceContext = None - - def GetText(self): - return self.text - - def GetName(self, dnt): - assert 0, "You must subclass this" - - def GetFileName(self): - return self.fileName - - def GetPositionOfLine(self, cLineNumber): - self.GetText() # Prime us. - try: - return self.lineOffsets[cLineNumber] - except IndexError: - raise Exception(scode=winerror.S_FALSE) - - def GetLineOfPosition(self, charPos): - self.GetText() # Prime us. - lastOffset = 0 - lineNo = 0 - for lineOffset in self.lineOffsets[1:]: - if lineOffset > charPos: - break - lastOffset = lineOffset - lineNo = lineNo + 1 - else: # for not broken. - # print "Cant find", charPos, "in", self.lineOffsets - raise Exception(scode=winerror.S_FALSE) - # print "GLOP ret=",lineNo, (charPos-lastOffset) - return lineNo, (charPos - lastOffset) - - def GetNextLine(self): - if self.nextLineNo >= len(self.lines): - self.nextLineNo = 0 # auto-reset. - return "" - rc = self.lines[self.nextLineNo] - self.nextLineNo = self.nextLineNo + 1 - return rc - - def GetLine(self, num): - self.GetText() # Prime us. - return self.lines[num] - - def GetNumChars(self): - return len(self.GetText()) - - def GetNumLines(self): - self.GetText() # Prime us. - return len(self.lines) - - def _buildline(self, pos): - i = self.text.find("\n", pos) - if i < 0: - newpos = len(self.text) - else: - newpos = i + 1 - r = self.text[pos:newpos] - return r, newpos - - def _buildlines(self): - self.lines = [] - self.lineOffsets = [0] - line, pos = self._buildline(0) - while line: - self.lines.append(line) - self.lineOffsets.append(pos) - line, pos = self._buildline(pos) - - def _ProcessToken(self, type, token, spos, epos, line): - srow, scol = spos - erow, ecol = epos - self.GetText() # Prime us. - linenum = srow - 1 # Lines zero based for us too. - realCharPos = self.lineOffsets[linenum] + scol - numskipped = realCharPos - self.lastPos - if numskipped == 0: - pass - elif numskipped == 1: - self.attrs.append(axdebug.SOURCETEXT_ATTR_COMMENT) - else: - self.attrs.append((axdebug.SOURCETEXT_ATTR_COMMENT, numskipped)) - kwSize = len(token) - self.lastPos = realCharPos + kwSize - attr = 0 - - if type == tokenize.NAME: - if token in _keywords: - attr = axdebug.SOURCETEXT_ATTR_KEYWORD - elif type == tokenize.STRING: - attr = axdebug.SOURCETEXT_ATTR_STRING - elif type == tokenize.NUMBER: - attr = axdebug.SOURCETEXT_ATTR_NUMBER - elif type == tokenize.OP: - attr = axdebug.SOURCETEXT_ATTR_OPERATOR - elif type == tokenize.COMMENT: - attr = axdebug.SOURCETEXT_ATTR_COMMENT - # else attr remains zero... - if kwSize == 0: - pass - elif kwSize == 1: - self.attrs.append(attr) - else: - self.attrs.append((attr, kwSize)) - - def GetSyntaxColorAttributes(self): - self.lastPos = 0 - self.attrs = [] - try: - tokenize.tokenize(self.GetNextLine, self._ProcessToken) - except tokenize.TokenError: - pass # Ignore - will cause all subsequent text to be commented. - numAtEnd = len(self.GetText()) - self.lastPos - if numAtEnd: - self.attrs.append((axdebug.SOURCETEXT_ATTR_COMMENT, numAtEnd)) - return self.attrs - - # We also provide and manage DebugDocumentContext objects - def _MakeDebugCodeContext(self, lineNo, charPos, len): - return _wrap( - contexts.DebugCodeContext(lineNo, charPos, len, self, self.site), - axdebug.IID_IDebugCodeContext, - ) - - # Make a context at the given position. It should take up the entire context. - def _MakeContextAtPosition(self, charPos): - lineNo, offset = self.GetLineOfPosition(charPos) - try: - endPos = self.GetPositionOfLine(lineNo + 1) - except: - endPos = charPos - codecontext = self._MakeDebugCodeContext(lineNo, charPos, endPos - charPos) - return codecontext - - # Returns a DebugCodeContext. debugDocument can be None for smart hosts. - def GetCodeContextAtPosition(self, charPos): - # trace("GetContextOfPos", charPos, maxChars) - # Convert to line number. - lineNo, offset = self.GetLineOfPosition(charPos) - charPos = self.GetPositionOfLine(lineNo) - try: - cc = self.codeContexts[charPos] - # trace(" GetContextOfPos using existing") - except KeyError: - cc = self._MakeContextAtPosition(charPos) - self.codeContexts[charPos] = cc - return cc - - -class SourceModuleContainer(SourceCodeContainer): - def __init__(self, module): - self.module = module - if hasattr(module, "__file__"): - fname = self.module.__file__ - # Check for .pyc or .pyo or even .pys! - if fname[-1] in ["O", "o", "C", "c", "S", "s"]: - fname = fname[:-1] - try: - fname = win32api.GetFullPathName(fname) - except win32api.error: - pass - else: - if module.__name__ == "__main__" and len(sys.argv) > 0: - fname = sys.argv[0] - else: - fname = "" - SourceCodeContainer.__init__(self, None, fname) - - def GetText(self): - if self.text is None: - fname = self.GetFileName() - if fname: - try: - self.text = open(fname, "r").read() - except IOError as details: - self.text = "# Exception opening file\n# %s" % (repr(details)) - else: - self.text = "# No file available for module '%s'" % (self.module) - self._buildlines() - return self.text - - def GetName(self, dnt): - name = self.module.__name__ - try: - fname = win32api.GetFullPathName(self.module.__file__) - except win32api.error: - fname = self.module.__file__ - except AttributeError: - fname = name - if dnt == axdebug.DOCUMENTNAMETYPE_APPNODE: - return name.split(".")[-1] - elif dnt == axdebug.DOCUMENTNAMETYPE_TITLE: - return fname - elif dnt == axdebug.DOCUMENTNAMETYPE_FILE_TAIL: - return os.path.split(fname)[1] - elif dnt == axdebug.DOCUMENTNAMETYPE_URL: - return "file:%s" % fname - else: - raise Exception(scode=winerror.E_UNEXPECTED) - - -if __name__ == "__main__": - import sys - - sys.path.append(".") - import ttest - - sc = SourceModuleContainer(ttest) - # sc = SourceCodeContainer(open(sys.argv[1], "rb").read(), sys.argv[1]) - attrs = sc.GetSyntaxColorAttributes() - attrlen = 0 - for attr in attrs: - if type(attr) == type(()): - attrlen = attrlen + attr[1] - else: - attrlen = attrlen + 1 - text = sc.GetText() - if attrlen != len(text): - print("Lengths dont match!!! (%d/%d)" % (attrlen, len(text))) - - # print "Attributes:" - # print attrs - print("GetLineOfPos=", sc.GetLineOfPosition(0)) - print("GetLineOfPos=", sc.GetLineOfPosition(4)) - print("GetLineOfPos=", sc.GetLineOfPosition(10)) diff --git a/lib/win32comext/axdebug/contexts.py b/lib/win32comext/axdebug/contexts.py deleted file mode 100644 index 85816639..00000000 --- a/lib/win32comext/axdebug/contexts.py +++ /dev/null @@ -1,62 +0,0 @@ -""" A module for managing the AXDebug I*Contexts - -""" -import pythoncom -import win32com.server.util - -from . import adb, axdebug, gateways - -# Utility function for wrapping object created by this module. -from .util import _wrap, _wrap_remove, trace - - -class DebugCodeContext(gateways.DebugCodeContext, gateways.DebugDocumentContext): - # NOTE: We also implement the IDebugDocumentContext interface for Simple Hosts. - # Thus, debugDocument may be NULL when we have smart hosts - but in that case, we - # wont be called upon to provide it. - _public_methods_ = ( - gateways.DebugCodeContext._public_methods_ - + gateways.DebugDocumentContext._public_methods_ - ) - _com_interfaces_ = ( - gateways.DebugCodeContext._com_interfaces_ - + gateways.DebugDocumentContext._com_interfaces_ - ) - - def __init__(self, lineNo, charPos, len, codeContainer, debugSite): - self.debugSite = debugSite - self.offset = charPos - self.length = len - self.breakPointState = 0 - self.lineno = lineNo - gateways.DebugCodeContext.__init__(self) - self.codeContainer = codeContainer - - def _Close(self): - self.debugSite = None - - def GetDocumentContext(self): - if self.debugSite is not None: - # We have a smart host - let him give it to us. - return self.debugSite.GetDocumentContextFromPosition( - self.codeContainer.sourceContext, self.offset, self.length - ) - else: - # Simple host - Fine - Ill do it myself! - return _wrap(self, axdebug.IID_IDebugDocumentContext) - - def SetBreakPoint(self, bps): - self.breakPointState = bps - adb.OnSetBreakPoint(self, bps, self.lineno) - - # The DebugDocumentContext methods for simple hosts. - def GetDocument(self): - return self.codeContainer.debugDocument - - def EnumCodeContexts(self): - return _wrap(EnumDebugCodeContexts([self]), axdebug.IID_IEnumDebugCodeContexts) - - -class EnumDebugCodeContexts(gateways.EnumDebugCodeContexts): - def _wrap(self, obj): - return _wrap(obj, axdebug.IID_IDebugCodeContext) diff --git a/lib/win32comext/axdebug/debugger.py b/lib/win32comext/axdebug/debugger.py deleted file mode 100644 index 3157586b..00000000 --- a/lib/win32comext/axdebug/debugger.py +++ /dev/null @@ -1,250 +0,0 @@ -import os -import string -import sys - -import pythoncom -import win32api -from win32com.axdebug import ( - adb, - axdebug, - codecontainer, - contexts, - documents, - expressions, - gateways, -) -from win32com.axdebug.util import _wrap, _wrap_remove, trace -from win32com.axscript import axscript - -currentDebugger = None - - -class ModuleTreeNode: - """Helper class for building a module tree""" - - def __init__(self, module): - modName = module.__name__ - self.moduleName = modName - self.module = module - self.realNode = None - self.cont = codecontainer.SourceModuleContainer(module) - - def __repr__(self): - return "" % (self.module) - - def Attach(self, parentRealNode): - self.realNode.Attach(parentRealNode) - - def Close(self): - self.module = None - self.cont = None - self.realNode = None - - -def BuildModule(module, built_nodes, rootNode, create_node_fn, create_node_args): - if module: - keep = module.__name__ - keep = keep and (built_nodes.get(module) is None) - if keep and hasattr(module, "__file__"): - keep = string.lower(os.path.splitext(module.__file__)[1]) not in [ - ".pyd", - ".dll", - ] - # keep = keep and module.__name__=='__main__' - if module and keep: - # print "keeping", module.__name__ - node = ModuleTreeNode(module) - built_nodes[module] = node - realNode = create_node_fn(*(node,) + create_node_args) - node.realNode = realNode - - # Split into parent nodes. - parts = string.split(module.__name__, ".") - if parts[-1][:8] == "__init__": - parts = parts[:-1] - parent = string.join(parts[:-1], ".") - parentNode = rootNode - if parent: - parentModule = sys.modules[parent] - BuildModule( - parentModule, built_nodes, rootNode, create_node_fn, create_node_args - ) - if parentModule in built_nodes: - parentNode = built_nodes[parentModule].realNode - node.Attach(parentNode) - - -def RefreshAllModules(builtItems, rootNode, create_node, create_node_args): - for module in list(sys.modules.values()): - BuildModule(module, builtItems, rootNode, create_node, create_node_args) - - -# realNode = pdm.CreateDebugDocumentHelper(None) # DebugDocumentHelper node? -# app.CreateApplicationNode() # doc provider node. - - -class CodeContainerProvider(documents.CodeContainerProvider): - def __init__(self, axdebugger): - self.axdebugger = axdebugger - documents.CodeContainerProvider.__init__(self) - self.currentNumModules = len(sys.modules) - self.nodes = {} - self.axdebugger.RefreshAllModules(self.nodes, self) - - def FromFileName(self, fname): - ### It appears we cant add modules during a debug session! - # if self.currentNumModules != len(sys.modules): - # self.axdebugger.RefreshAllModules(self.nodes, self) - # self.currentNumModules = len(sys.modules) - # for key in self.ccsAndNodes.keys(): - # print "File:", key - return documents.CodeContainerProvider.FromFileName(self, fname) - - def Close(self): - documents.CodeContainerProvider.Close(self) - self.axdebugger = None - print("Closing %d nodes" % (len(self.nodes))) - for node in self.nodes.values(): - node.Close() - self.nodes = {} - - -class OriginalInterfaceMaker: - def MakeInterfaces(self, pdm): - app = self.pdm.CreateApplication() - self.cookie = pdm.AddApplication(app) - root = app.GetRootNode() - return app, root - - def CloseInterfaces(self, pdm): - pdm.RemoveApplication(self.cookie) - - -class SimpleHostStyleInterfaceMaker: - def MakeInterfaces(self, pdm): - app = pdm.GetDefaultApplication() - root = app.GetRootNode() - return app, root - - def CloseInterfaces(self, pdm): - pass - - -class AXDebugger: - def __init__(self, interfaceMaker=None, processName=None): - if processName is None: - processName = "Python Process" - if interfaceMaker is None: - interfaceMaker = SimpleHostStyleInterfaceMaker() - - self.pydebugger = adb.Debugger() - - self.pdm = pythoncom.CoCreateInstance( - axdebug.CLSID_ProcessDebugManager, - None, - pythoncom.CLSCTX_ALL, - axdebug.IID_IProcessDebugManager, - ) - - self.app, self.root = interfaceMaker.MakeInterfaces(self.pdm) - self.app.SetName(processName) - self.interfaceMaker = interfaceMaker - - expressionProvider = _wrap( - expressions.ProvideExpressionContexts(), - axdebug.IID_IProvideExpressionContexts, - ) - self.expressionCookie = self.app.AddGlobalExpressionContextProvider( - expressionProvider - ) - - contProvider = CodeContainerProvider(self) - self.pydebugger.AttachApp(self.app, contProvider) - - def Break(self): - # Get the frame we start debugging from - this is the frame 1 level up - try: - 1 + "" - except: - frame = sys.exc_info()[2].tb_frame.f_back - - # Get/create the debugger, and tell it to break. - self.app.StartDebugSession() - # self.app.CauseBreak() - - self.pydebugger.SetupAXDebugging(None, frame) - self.pydebugger.set_trace() - - def Close(self): - self.pydebugger.ResetAXDebugging() - self.interfaceMaker.CloseInterfaces(self.pdm) - self.pydebugger.CloseApp() - self.app.RemoveGlobalExpressionContextProvider(self.expressionCookie) - self.expressionCookie = None - - self.pdm = None - self.app = None - self.pydebugger = None - self.root = None - - def RefreshAllModules(self, nodes, containerProvider): - RefreshAllModules( - nodes, self.root, self.CreateApplicationNode, (containerProvider,) - ) - - def CreateApplicationNode(self, node, containerProvider): - realNode = self.app.CreateApplicationNode() - - document = documents.DebugDocumentText(node.cont) - document = _wrap(document, axdebug.IID_IDebugDocument) - - node.cont.debugDocument = document - - provider = documents.DebugDocumentProvider(document) - provider = _wrap(provider, axdebug.IID_IDebugDocumentProvider) - realNode.SetDocumentProvider(provider) - - containerProvider.AddCodeContainer(node.cont, realNode) - return realNode - - -def _GetCurrentDebugger(): - global currentDebugger - if currentDebugger is None: - currentDebugger = AXDebugger() - return currentDebugger - - -def Break(): - _GetCurrentDebugger().Break() - - -brk = Break -set_trace = Break - - -def dosomethingelse(): - a = 2 - b = "Hi there" - - -def dosomething(): - a = 1 - b = 2 - dosomethingelse() - - -def test(): - Break() - input("Waiting...") - dosomething() - print("Done") - - -if __name__ == "__main__": - print("About to test the debugging interfaces!") - test() - print( - " %d/%d com objects still alive" - % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount()) - ) diff --git a/lib/win32comext/axdebug/documents.py b/lib/win32comext/axdebug/documents.py deleted file mode 100644 index d039cf40..00000000 --- a/lib/win32comext/axdebug/documents.py +++ /dev/null @@ -1,140 +0,0 @@ -""" Management of documents for AXDebugging. -""" - - -import pythoncom -import win32api -from win32com.server.exception import Exception -from win32com.server.util import unwrap - -from . import axdebug, codecontainer, contexts, gateways -from .util import RaiseNotImpl, _wrap, _wrap_remove, trace - -# def trace(*args): -# pass - - -def GetGoodFileName(fname): - if fname[0] != "<": - return win32api.GetFullPathName(fname) - return fname - - -class DebugDocumentProvider(gateways.DebugDocumentProvider): - def __init__(self, doc): - self.doc = doc - - def GetName(self, dnt): - return self.doc.GetName(dnt) - - def GetDocumentClassId(self): - return self.doc.GetDocumentClassId() - - def GetDocument(self): - return self.doc - - -class DebugDocumentText( - gateways.DebugDocumentInfo, gateways.DebugDocumentText, gateways.DebugDocument -): - _com_interfaces_ = ( - gateways.DebugDocumentInfo._com_interfaces_ - + gateways.DebugDocumentText._com_interfaces_ - + gateways.DebugDocument._com_interfaces_ - ) - _public_methods_ = ( - gateways.DebugDocumentInfo._public_methods_ - + gateways.DebugDocumentText._public_methods_ - + gateways.DebugDocument._public_methods_ - ) - - # A class which implements a DebugDocumentText, using the functionality - # provided by a codeContainer - def __init__(self, codeContainer): - gateways.DebugDocumentText.__init__(self) - gateways.DebugDocumentInfo.__init__(self) - gateways.DebugDocument.__init__(self) - self.codeContainer = codeContainer - - def _Close(self): - self.docContexts = None - # self.codeContainer._Close() - self.codeContainer = None - - # IDebugDocumentInfo - def GetName(self, dnt): - return self.codeContainer.GetName(dnt) - - def GetDocumentClassId(self): - return "{DF630910-1C1D-11d0-AE36-8C0F5E000000}" - - # IDebugDocument has no methods! - # - - # IDebugDocumentText methods. - # def GetDocumentAttributes - def GetSize(self): - # trace("GetSize") - return self.codeContainer.GetNumLines(), self.codeContainer.GetNumChars() - - def GetPositionOfLine(self, cLineNumber): - return self.codeContainer.GetPositionOfLine(cLineNumber) - - def GetLineOfPosition(self, charPos): - return self.codeContainer.GetLineOfPosition(charPos) - - def GetText(self, charPos, maxChars, wantAttr): - # Get all the attributes, else the tokenizer will get upset. - # XXX - not yet! - # trace("GetText", charPos, maxChars, wantAttr) - cont = self.codeContainer - attr = cont.GetSyntaxColorAttributes() - return cont.GetText(), attr - - def GetPositionOfContext(self, context): - trace("GetPositionOfContext", context) - context = unwrap(context) - return context.offset, context.length - - # Return a DebugDocumentContext. - def GetContextOfPosition(self, charPos, maxChars): - # Make one - doc = _wrap(self, axdebug.IID_IDebugDocument) - rc = self.codeContainer.GetCodeContextAtPosition(charPos) - return rc.QueryInterface(axdebug.IID_IDebugDocumentContext) - - -class CodeContainerProvider: - """An abstract Python class which provides code containers! - - Given a Python file name (as the debugger knows it by) this will - return a CodeContainer interface suitable for use. - - This provides a simple base imlpementation that simply supports - a dictionary of nodes and providers. - """ - - def __init__(self): - self.ccsAndNodes = {} - - def AddCodeContainer(self, cc, node=None): - fname = GetGoodFileName(cc.fileName) - self.ccsAndNodes[fname] = cc, node - - def FromFileName(self, fname): - cc, node = self.ccsAndNodes.get(GetGoodFileName(fname), (None, None)) - # if cc is None: - # print "FromFileName for %s returning None" % fname - return cc - - def Close(self): - for cc, node in self.ccsAndNodes.values(): - try: - # Must close the node before closing the provider - # as node may make calls on provider (eg Reset breakpoints etc) - if node is not None: - node.Close() - cc._Close() - except pythoncom.com_error: - pass - self.ccsAndNodes = {} diff --git a/lib/win32comext/axdebug/dump.py b/lib/win32comext/axdebug/dump.py deleted file mode 100644 index 26ee51e0..00000000 --- a/lib/win32comext/axdebug/dump.py +++ /dev/null @@ -1,61 +0,0 @@ -import traceback - -import pythoncom -from win32com.axdebug import axdebug -from win32com.client.util import Enumerator - - -def DumpDebugApplicationNode(node, level=0): - # Recursive dump of a DebugApplicationNode - spacer = " " * level - for desc, attr in [ - ("Node Name", axdebug.DOCUMENTNAMETYPE_APPNODE), - ("Title", axdebug.DOCUMENTNAMETYPE_TITLE), - ("Filename", axdebug.DOCUMENTNAMETYPE_FILE_TAIL), - ("URL", axdebug.DOCUMENTNAMETYPE_URL), - ]: - try: - info = node.GetName(attr) - except pythoncom.com_error: - info = "" - print("%s%s: %s" % (spacer, desc, info)) - try: - doc = node.GetDocument() - except pythoncom.com_error: - doc = None - if doc: - doctext = doc.QueryInterface(axdebug.IID_IDebugDocumentText) - numLines, numChars = doctext.GetSize() - # text, attr = doctext.GetText(0, 20, 1) - text, attr = doctext.GetText(0, numChars, 1) - print( - "%sText is %s, %d bytes long" % (spacer, repr(text[:40] + "..."), len(text)) - ) - else: - print("%s%s" % (spacer, "")) - - for child in Enumerator(node.EnumChildren()): - DumpDebugApplicationNode(child, level + 1) - - -def dumpall(): - dm = pythoncom.CoCreateInstance( - axdebug.CLSID_MachineDebugManager, - None, - pythoncom.CLSCTX_ALL, - axdebug.IID_IMachineDebugManager, - ) - e = Enumerator(dm.EnumApplications()) - for app in e: - print("Application: %s" % app.GetName()) - node = ( - app.GetRootNode() - ) # of type PyIDebugApplicationNode->PyIDebugDocumentProvider->PyIDebugDocumentInfo - DumpDebugApplicationNode(node) - - -if __name__ == "__main__": - try: - dumpall() - except: - traceback.print_exc() diff --git a/lib/win32comext/axdebug/expressions.py b/lib/win32comext/axdebug/expressions.py deleted file mode 100644 index 28c7b904..00000000 --- a/lib/win32comext/axdebug/expressions.py +++ /dev/null @@ -1,214 +0,0 @@ -import io -import string -import sys -import traceback -from pprint import pprint - -import winerror -from win32com.server.exception import COMException - -from . import axdebug, gateways -from .util import RaiseNotImpl, _wrap, _wrap_remove - - -# Given an object, return a nice string -def MakeNiceString(ob): - stream = io.StringIO() - pprint(ob, stream) - return string.strip(stream.getvalue()) - - -class ProvideExpressionContexts(gateways.ProvideExpressionContexts): - pass - - -class ExpressionContext(gateways.DebugExpressionContext): - def __init__(self, frame): - self.frame = frame - - def ParseLanguageText(self, code, radix, delim, flags): - return _wrap( - Expression(self.frame, code, radix, delim, flags), - axdebug.IID_IDebugExpression, - ) - - def GetLanguageInfo(self): - # print "GetLanguageInfo" - return "Python", "{DF630910-1C1D-11d0-AE36-8C0F5E000000}" - - -class Expression(gateways.DebugExpression): - def __init__(self, frame, code, radix, delim, flags): - self.callback = None - self.frame = frame - self.code = code - self.radix = radix - self.delim = delim - self.flags = flags - self.isComplete = 0 - self.result = None - self.hresult = winerror.E_UNEXPECTED - - def Start(self, callback): - try: - try: - try: - self.result = eval( - self.code, self.frame.f_globals, self.frame.f_locals - ) - except SyntaxError: - exec(self.code, self.frame.f_globals, self.frame.f_locals) - self.result = "" - self.hresult = 0 - except: - l = traceback.format_exception_only( - sys.exc_info()[0], sys.exc_info()[1] - ) - # l is a list of strings with trailing "\n" - self.result = string.join(map(lambda s: s[:-1], l), "\n") - self.hresult = winerror.E_FAIL - finally: - self.isComplete = 1 - callback.onComplete() - - def Abort(self): - print("** ABORT **") - - def QueryIsComplete(self): - return self.isComplete - - def GetResultAsString(self): - # print "GetStrAsResult returning", self.result - return self.hresult, MakeNiceString(self.result) - - def GetResultAsDebugProperty(self): - result = _wrap( - DebugProperty(self.code, self.result, None, self.hresult), - axdebug.IID_IDebugProperty, - ) - return self.hresult, result - - -def MakeEnumDebugProperty(object, dwFieldSpec, nRadix, iid, stackFrame=None): - name_vals = [] - if hasattr(object, "items") and hasattr(object, "keys"): # If it is a dict. - name_vals = iter(object.items()) - dictionary = object - elif hasattr(object, "__dict__"): # object with dictionary, module - name_vals = iter(object.__dict__.items()) - dictionary = object.__dict__ - infos = [] - for name, val in name_vals: - infos.append( - GetPropertyInfo(name, val, dwFieldSpec, nRadix, 0, dictionary, stackFrame) - ) - return _wrap(EnumDebugPropertyInfo(infos), axdebug.IID_IEnumDebugPropertyInfo) - - -def GetPropertyInfo( - obname, obvalue, dwFieldSpec, nRadix, hresult=0, dictionary=None, stackFrame=None -): - # returns a tuple - name = typ = value = fullname = attrib = dbgprop = None - if dwFieldSpec & axdebug.DBGPROP_INFO_VALUE: - value = MakeNiceString(obvalue) - if dwFieldSpec & axdebug.DBGPROP_INFO_NAME: - name = obname - if dwFieldSpec & axdebug.DBGPROP_INFO_TYPE: - if hresult: - typ = "Error" - else: - try: - typ = type(obvalue).__name__ - except AttributeError: - typ = str(type(obvalue)) - if dwFieldSpec & axdebug.DBGPROP_INFO_FULLNAME: - fullname = obname - if dwFieldSpec & axdebug.DBGPROP_INFO_ATTRIBUTES: - if hasattr(obvalue, "has_key") or hasattr( - obvalue, "__dict__" - ): # If it is a dict or object - attrib = axdebug.DBGPROP_ATTRIB_VALUE_IS_EXPANDABLE - else: - attrib = 0 - if dwFieldSpec & axdebug.DBGPROP_INFO_DEBUGPROP: - dbgprop = _wrap( - DebugProperty(name, obvalue, None, hresult, dictionary, stackFrame), - axdebug.IID_IDebugProperty, - ) - return name, typ, value, fullname, attrib, dbgprop - - -from win32com.server.util import ListEnumeratorGateway - - -class EnumDebugPropertyInfo(ListEnumeratorGateway): - """A class to expose a Python sequence as an EnumDebugCodeContexts - - Create an instance of this class passing a sequence (list, tuple, or - any sequence protocol supporting object) and it will automatically - support the EnumDebugCodeContexts interface for the object. - - """ - - _public_methods_ = ListEnumeratorGateway._public_methods_ + ["GetCount"] - _com_interfaces_ = [axdebug.IID_IEnumDebugPropertyInfo] - - def GetCount(self): - return len(self._list_) - - def _wrap(self, ob): - return ob - - -class DebugProperty: - _com_interfaces_ = [axdebug.IID_IDebugProperty] - _public_methods_ = [ - "GetPropertyInfo", - "GetExtendedInfo", - "SetValueAsString", - "EnumMembers", - "GetParent", - ] - - def __init__( - self, name, value, parent=None, hresult=0, dictionary=None, stackFrame=None - ): - self.name = name - self.value = value - self.parent = parent - self.hresult = hresult - self.dictionary = dictionary - self.stackFrame = stackFrame - - def GetPropertyInfo(self, dwFieldSpec, nRadix): - return GetPropertyInfo( - self.name, - self.value, - dwFieldSpec, - nRadix, - self.hresult, - dictionary, - stackFrame, - ) - - def GetExtendedInfo(self): ### Note - not in the framework. - RaiseNotImpl("DebugProperty::GetExtendedInfo") - - def SetValueAsString(self, value, radix): - if self.stackFrame and self.dictionary: - self.dictionary[self.name] = eval( - value, self.stackFrame.f_globals, self.stackFrame.f_locals - ) - else: - RaiseNotImpl("DebugProperty::SetValueAsString") - - def EnumMembers(self, dwFieldSpec, nRadix, iid): - # Returns IEnumDebugPropertyInfo - return MakeEnumDebugProperty( - self.value, dwFieldSpec, nRadix, iid, self.stackFrame - ) - - def GetParent(self): - # return IDebugProperty - RaiseNotImpl("DebugProperty::GetParent") diff --git a/lib/win32comext/axdebug/gateways.py b/lib/win32comext/axdebug/gateways.py deleted file mode 100644 index d87e85b3..00000000 --- a/lib/win32comext/axdebug/gateways.py +++ /dev/null @@ -1,583 +0,0 @@ -# Classes which describe interfaces. - -import pythoncom -import win32com.server.connect -import winerror -from win32com.axdebug import axdebug -from win32com.axdebug.util import RaiseNotImpl, _wrap -from win32com.server.exception import Exception -from win32com.server.util import ListEnumeratorGateway - - -class EnumDebugCodeContexts(ListEnumeratorGateway): - """A class to expose a Python sequence as an EnumDebugCodeContexts - - Create an instance of this class passing a sequence (list, tuple, or - any sequence protocol supporting object) and it will automatically - support the EnumDebugCodeContexts interface for the object. - - """ - - _com_interfaces_ = [axdebug.IID_IEnumDebugCodeContexts] - - -class EnumDebugStackFrames(ListEnumeratorGateway): - """A class to expose a Python sequence as an EnumDebugStackFrames - - Create an instance of this class passing a sequence (list, tuple, or - any sequence protocol supporting object) and it will automatically - support the EnumDebugStackFrames interface for the object. - - """ - - _com_interfaces_ = [axdebug.IID_IEnumDebugStackFrames] - - -class EnumDebugApplicationNodes(ListEnumeratorGateway): - """A class to expose a Python sequence as an EnumDebugStackFrames - - Create an instance of this class passing a sequence (list, tuple, or - any sequence protocol supporting object) and it will automatically - support the EnumDebugApplicationNodes interface for the object. - - """ - - _com_interfaces_ = [axdebug.IID_IEnumDebugApplicationNodes] - - -class EnumRemoteDebugApplications(ListEnumeratorGateway): - _com_interfaces_ = [axdebug.IID_IEnumRemoteDebugApplications] - - -class EnumRemoteDebugApplicationThreads(ListEnumeratorGateway): - _com_interfaces_ = [axdebug.IID_IEnumRemoteDebugApplicationThreads] - - -class DebugDocumentInfo: - _public_methods_ = ["GetName", "GetDocumentClassId"] - _com_interfaces_ = [axdebug.IID_IDebugDocumentInfo] - - def __init__(self): - pass - - def GetName(self, dnt): - """Get the one of the name of the document - dnt -- int DOCUMENTNAMETYPE - """ - RaiseNotImpl("GetName") - - def GetDocumentClassId(self): - """ - Result must be an IID object (or string representing one). - """ - RaiseNotImpl("GetDocumentClassId") - - -class DebugDocumentProvider(DebugDocumentInfo): - _public_methods_ = DebugDocumentInfo._public_methods_ + ["GetDocument"] - _com_interfaces_ = DebugDocumentInfo._com_interfaces_ + [ - axdebug.IID_IDebugDocumentProvider - ] - - def GetDocument(self): - RaiseNotImpl("GetDocument") - - -class DebugApplicationNode(DebugDocumentProvider): - """Provides the functionality of IDebugDocumentProvider, plus a context within a project tree.""" - - _public_methods_ = ( - """EnumChildren GetParent SetDocumentProvider - Close Attach Detach""".split() - + DebugDocumentProvider._public_methods_ - ) - _com_interfaces_ = [ - axdebug.IID_IDebugDocumentProvider - ] + DebugDocumentProvider._com_interfaces_ - - def __init__(self): - DebugDocumentProvider.__init__(self) - - def EnumChildren(self): - # Result is type PyIEnumDebugApplicationNodes - RaiseNotImpl("EnumChildren") - - def GetParent(self): - # result is type PyIDebugApplicationNode - RaiseNotImpl("GetParent") - - def SetDocumentProvider(self, pddp): # PyIDebugDocumentProvider pddp - # void result. - RaiseNotImpl("SetDocumentProvider") - - def Close(self): - # void result. - RaiseNotImpl("Close") - - def Attach(self, parent): # PyIDebugApplicationNode - # void result. - RaiseNotImpl("Attach") - - def Detach(self): - # void result. - RaiseNotImpl("Detach") - - -class DebugApplicationNodeEvents: - """Event interface for DebugApplicationNode object.""" - - _public_methods_ = "onAddChild onRemoveChild onDetach".split() - _com_interfaces_ = [axdebug.IID_IDebugApplicationNodeEvents] - - def __init__(self): - pass - - def onAddChild(self, child): # PyIDebugApplicationNode - # void result. - RaiseNotImpl("onAddChild") - - def onRemoveChild(self, child): # PyIDebugApplicationNode - # void result. - RaiseNotImpl("onRemoveChild") - - def onDetach(self): - # void result. - RaiseNotImpl("onDetach") - - def onAttach(self, parent): # PyIDebugApplicationNode - # void result. - RaiseNotImpl("onAttach") - - -class DebugDocument(DebugDocumentInfo): - """The base interface to all debug documents.""" - - _public_methods_ = DebugDocumentInfo._public_methods_ - _com_interfaces_ = [axdebug.IID_IDebugDocument] + DebugDocumentInfo._com_interfaces_ - - -class DebugDocumentText(DebugDocument): - """The interface to a text only debug document.""" - - _com_interfaces_ = [axdebug.IID_IDebugDocumentText] + DebugDocument._com_interfaces_ - _public_methods_ = [ - "GetDocumentAttributes", - "GetSize", - "GetPositionOfLine", - "GetLineOfPosition", - "GetText", - "GetPositionOfContext", - "GetContextOfPosition", - ] + DebugDocument._public_methods_ - - def __init__(self): - pass - - # IDebugDocumentText - def GetDocumentAttributes(self): - # Result is int (TEXT_DOC_ATTR) - RaiseNotImpl("GetDocumentAttributes") - - def GetSize(self): - # Result is (numLines, numChars) - RaiseNotImpl("GetSize") - - def GetPositionOfLine(self, cLineNumber): - # Result is int char position - RaiseNotImpl("GetPositionOfLine") - - def GetLineOfPosition(self, charPos): - # Result is int, int (lineNo, offset) - RaiseNotImpl("GetLineOfPosition") - - def GetText(self, charPos, maxChars, wantAttr): - """Params - charPos -- integer - maxChars -- integer - wantAttr -- Should the function compute attributes. - - Return value must be (string, attribtues). attributes may be - None if(not wantAttr) - """ - RaiseNotImpl("GetText") - - def GetPositionOfContext(self, debugDocumentContext): - """Params - debugDocumentContext -- a PyIDebugDocumentContext object. - - Return value must be (charPos, numChars) - """ - RaiseNotImpl("GetPositionOfContext") - - def GetContextOfPosition(self, charPos, maxChars): - """Params are integers. - Return value must be PyIDebugDocumentContext object - """ - print(self) - RaiseNotImpl("GetContextOfPosition") - - -class DebugDocumentTextExternalAuthor: - """Allow external editors to edit file-based debugger documents, and to notify the document when the source file has been changed.""" - - _public_methods_ = ["GetPathName", "GetFileName", "NotifyChanged"] - _com_interfaces_ = [axdebug.IID_IDebugDocumentTextExternalAuthor] - - def __init__(self): - pass - - def GetPathName(self): - """Return the full path (including file name) to the document's source file. - - Result must be (filename, fIsOriginal), where - - if fIsOriginalPath is TRUE if the path refers to the original file for the document. - - if fIsOriginalPath is FALSE if the path refers to a newly created temporary file. - - raise Exception(winerror.E_FAIL) if no source file can be created/determined. - """ - RaiseNotImpl("GetPathName") - - def GetFileName(self): - """Return just the name of the document, with no path information. (Used for "Save As...") - - Result is a string - """ - RaiseNotImpl("GetFileName") - - def NotifyChanged(self): - """Notify the host that the document's source file has been saved and - that its contents should be refreshed. - """ - RaiseNotImpl("NotifyChanged") - - -class DebugDocumentTextEvents: - _public_methods_ = """onDestroy onInsertText onRemoveText - onReplaceText onUpdateTextAttributes - onUpdateDocumentAttributes""".split() - _com_interfaces_ = [axdebug.IID_IDebugDocumentTextEvents] - - def __init__(self): - pass - - def onDestroy(self): - # Result is void. - RaiseNotImpl("onDestroy") - - def onInsertText(self, cCharacterPosition, cNumToInsert): - # Result is void. - RaiseNotImpl("onInsertText") - - def onRemoveText(self, cCharacterPosition, cNumToRemove): - # Result is void. - RaiseNotImpl("onRemoveText") - - def onReplaceText(self, cCharacterPosition, cNumToReplace): - # Result is void. - RaiseNotImpl("onReplaceText") - - def onUpdateTextAttributes(self, cCharacterPosition, cNumToUpdate): - # Result is void. - RaiseNotImpl("onUpdateTextAttributes") - - def onUpdateDocumentAttributes(self, textdocattr): # TEXT_DOC_ATTR - # Result is void. - RaiseNotImpl("onUpdateDocumentAttributes") - - -class DebugDocumentContext: - _public_methods_ = ["GetDocument", "EnumCodeContexts"] - _com_interfaces_ = [axdebug.IID_IDebugDocumentContext] - - def __init__(self): - pass - - def GetDocument(self): - """Return value must be a PyIDebugDocument object""" - RaiseNotImpl("GetDocument") - - def EnumCodeContexts(self): - """Return value must be a PyIEnumDebugCodeContexts object""" - RaiseNotImpl("EnumCodeContexts") - - -class DebugCodeContext: - _public_methods_ = ["GetDocumentContext", "SetBreakPoint"] - _com_interfaces_ = [axdebug.IID_IDebugCodeContext] - - def __init__(self): - pass - - def GetDocumentContext(self): - """Return value must be a PyIDebugDocumentContext object""" - RaiseNotImpl("GetDocumentContext") - - def SetBreakPoint(self, bps): - """bps -- an integer with flags.""" - RaiseNotImpl("SetBreakPoint") - - -class DebugStackFrame: - """Abstraction representing a logical stack frame on the stack of a thread.""" - - _public_methods_ = [ - "GetCodeContext", - "GetDescriptionString", - "GetLanguageString", - "GetThread", - "GetDebugProperty", - ] - _com_interfaces_ = [axdebug.IID_IDebugStackFrame] - - def __init__(self): - pass - - def GetCodeContext(self): - """Returns the current code context associated with the stack frame. - - Return value must be a IDebugCodeContext object - """ - RaiseNotImpl("GetCodeContext") - - def GetDescriptionString(self, fLong): - """Returns a textual description of the stack frame. - - fLong -- A flag indicating if the long name is requested. - """ - RaiseNotImpl("GetDescriptionString") - - def GetLanguageString(self): - """Returns a short or long textual description of the language. - - fLong -- A flag indicating if the long name is requested. - """ - RaiseNotImpl("GetLanguageString") - - def GetThread(self): - """Returns the thread associated with this stack frame. - - Result must be a IDebugApplicationThread - """ - RaiseNotImpl("GetThread") - - def GetDebugProperty(self): - RaiseNotImpl("GetDebugProperty") - - -class DebugDocumentHost: - """The interface from the IDebugDocumentHelper back to - the smart host or language engine. This interface - exposes host specific functionality such as syntax coloring. - """ - - _public_methods_ = [ - "GetDeferredText", - "GetScriptTextAttributes", - "OnCreateDocumentContext", - "GetPathName", - "GetFileName", - "NotifyChanged", - ] - _com_interfaces_ = [axdebug.IID_IDebugDocumentHost] - - def __init__(self): - pass - - def GetDeferredText(self, dwTextStartCookie, maxChars, bWantAttr): - RaiseNotImpl("GetDeferredText") - - def GetScriptTextAttributes(self, codeText, delimterText, flags): - # Result must be an attribute sequence of same "length" as the code. - RaiseNotImpl("GetScriptTextAttributes") - - def OnCreateDocumentContext(self): - # Result must be a PyIUnknown - RaiseNotImpl("OnCreateDocumentContext") - - def GetPathName(self): - # Result must be (string, int) where the int is a BOOL - # - TRUE if the path refers to the original file for the document. - # - FALSE if the path refers to a newly created temporary file. - # - raise Exception(scode=E_FAIL) if no source file can be created/determined. - RaiseNotImpl("GetPathName") - - def GetFileName(self): - # Result is a string with just the name of the document, no path information. - RaiseNotImpl("GetFileName") - - def NotifyChanged(self): - RaiseNotImpl("NotifyChanged") - - -# Additional gateway related functions. - - -class DebugDocumentTextConnectServer: - _public_methods_ = ( - win32com.server.connect.IConnectionPointContainer_methods - + win32com.server.connect.IConnectionPoint_methods - ) - _com_interfaces_ = [ - pythoncom.IID_IConnectionPoint, - pythoncom.IID_IConnectionPointContainer, - ] - - # IConnectionPoint interfaces - def __init__(self): - self.cookieNo = -1 - self.connections = {} - - def EnumConnections(self): - RaiseNotImpl("EnumConnections") - - def GetConnectionInterface(self): - RaiseNotImpl("GetConnectionInterface") - - def GetConnectionPointContainer(self): - return _wrap(self) - - def Advise(self, pUnk): - # Creates a connection to the client. Simply allocate a new cookie, - # find the clients interface, and store it in a dictionary. - interface = pUnk.QueryInterface(axdebug.IID_IDebugDocumentTextEvents, 1) - self.cookieNo = self.cookieNo + 1 - self.connections[self.cookieNo] = interface - return self.cookieNo - - def Unadvise(self, cookie): - # Destroy a connection - simply delete interface from the map. - try: - del self.connections[cookie] - except KeyError: - return Exception(scode=winerror.E_UNEXPECTED) - - # IConnectionPointContainer interfaces - def EnumConnectionPoints(self): - RaiseNotImpl("EnumConnectionPoints") - - def FindConnectionPoint(self, iid): - # Find a connection we support. Only support the single event interface. - if iid == axdebug.IID_IDebugDocumentTextEvents: - return _wrap(self) - raise Exception(scode=winerror.E_NOINTERFACE) # ?? - - -class RemoteDebugApplicationEvents: - _public_methods_ = [ - "OnConnectDebugger", - "OnDisconnectDebugger", - "OnSetName", - "OnDebugOutput", - "OnClose", - "OnEnterBreakPoint", - "OnLeaveBreakPoint", - "OnCreateThread", - "OnDestroyThread", - "OnBreakFlagChange", - ] - _com_interfaces_ = [axdebug.IID_IRemoteDebugApplicationEvents] - - def OnConnectDebugger(self, appDebugger): - """appDebugger -- a PyIApplicationDebugger""" - RaiseNotImpl("OnConnectDebugger") - - def OnDisconnectDebugger(self): - RaiseNotImpl("OnDisconnectDebugger") - - def OnSetName(self, name): - RaiseNotImpl("OnSetName") - - def OnDebugOutput(self, string): - RaiseNotImpl("OnDebugOutput") - - def OnClose(self): - RaiseNotImpl("OnClose") - - def OnEnterBreakPoint(self, rdat): - """rdat -- PyIRemoteDebugApplicationThread""" - RaiseNotImpl("OnEnterBreakPoint") - - def OnLeaveBreakPoint(self, rdat): - """rdat -- PyIRemoteDebugApplicationThread""" - RaiseNotImpl("OnLeaveBreakPoint") - - def OnCreateThread(self, rdat): - """rdat -- PyIRemoteDebugApplicationThread""" - RaiseNotImpl("OnCreateThread") - - def OnDestroyThread(self, rdat): - """rdat -- PyIRemoteDebugApplicationThread""" - RaiseNotImpl("OnDestroyThread") - - def OnBreakFlagChange(self, abf, rdat): - """abf -- int - one of the axdebug.APPBREAKFLAGS constants - rdat -- PyIRemoteDebugApplicationThread - RaiseNotImpl("OnBreakFlagChange") - """ - - -class DebugExpressionContext: - _public_methods_ = ["ParseLanguageText", "GetLanguageInfo"] - _com_interfaces_ = [axdebug.IID_IDebugExpressionContext] - - def __init__(self): - pass - - def ParseLanguageText(self, code, radix, delim, flags): - """ - result is IDebugExpression - """ - RaiseNotImpl("ParseLanguageText") - - def GetLanguageInfo(self): - """ - result is (string langName, iid langId) - """ - RaiseNotImpl("GetLanguageInfo") - - -class DebugExpression: - _public_methods_ = [ - "Start", - "Abort", - "QueryIsComplete", - "GetResultAsString", - "GetResultAsDebugProperty", - ] - _com_interfaces_ = [axdebug.IID_IDebugExpression] - - def Start(self, callback): - """ - callback -- an IDebugExpressionCallback - - result - void - """ - RaiseNotImpl("Start") - - def Abort(self): - """ - no params - result -- void - """ - RaiseNotImpl("Abort") - - def QueryIsComplete(self): - """ - no params - result -- void - """ - RaiseNotImpl("QueryIsComplete") - - def GetResultAsString(self): - RaiseNotImpl("GetResultAsString") - - def GetResultAsDebugProperty(self): - RaiseNotImpl("GetResultAsDebugProperty") - - -class ProvideExpressionContexts: - _public_methods_ = ["EnumExpressionContexts"] - _com_interfaces_ = [axdebug.IID_IProvideExpressionContexts] - - def EnumExpressionContexts(self): - RaiseNotImpl("EnumExpressionContexts") diff --git a/lib/win32comext/axdebug/stackframe.py b/lib/win32comext/axdebug/stackframe.py deleted file mode 100644 index edc3fe8c..00000000 --- a/lib/win32comext/axdebug/stackframe.py +++ /dev/null @@ -1,179 +0,0 @@ -"""Support for stack-frames. - -Provides Implements a nearly complete wrapper for a stack frame. -""" - -import pythoncom -from win32com.server.exception import COMException - -from . import axdebug, expressions, gateways -from .util import RaiseNotImpl, _wrap, trace - -# def trace(*args): -# pass - - -class EnumDebugStackFrames(gateways.EnumDebugStackFrames): - """A class that given a debugger object, can return an enumerator - of DebugStackFrame objects. - """ - - def __init__(self, debugger): - infos = [] - frame = debugger.currentframe - # print "Stack check" - while frame: - # print " Checking frame", frame.f_code.co_filename, frame.f_lineno-1, frame.f_trace, - # Get a DebugCodeContext for the stack frame. If we fail, then it - # is not debuggable, and therefore not worth displaying. - cc = debugger.codeContainerProvider.FromFileName(frame.f_code.co_filename) - if cc is not None: - try: - address = frame.f_locals["__axstack_address__"] - except KeyError: - # print "Couldnt find stack address for",frame.f_code.co_filename, frame.f_lineno-1 - # Use this one, even tho it is wrong :-( - address = axdebug.GetStackAddress() - frameInfo = ( - DebugStackFrame(frame, frame.f_lineno - 1, cc), - address, - address + 1, - 0, - None, - ) - infos.append(frameInfo) - # print "- Kept!" - # else: - # print "- rejected" - frame = frame.f_back - - gateways.EnumDebugStackFrames.__init__(self, infos, 0) - - # def __del__(self): - # print "EnumDebugStackFrames dieing" - - def Next(self, count): - return gateways.EnumDebugStackFrames.Next(self, count) - - # def _query_interface_(self, iid): - # from win32com.util import IIDToInterfaceName - # print "EnumDebugStackFrames QI with %s (%s)" % (IIDToInterfaceName(iid), str(iid)) - # return 0 - def _wrap(self, obj): - # This enum returns a tuple, with 2 com objects in it. - obFrame, min, lim, fFinal, obFinal = obj - obFrame = _wrap(obFrame, axdebug.IID_IDebugStackFrame) - if obFinal: - obFinal = _wrap(obFinal, pythoncom.IID_IUnknown) - return obFrame, min, lim, fFinal, obFinal - - -class DebugStackFrame(gateways.DebugStackFrame): - def __init__(self, frame, lineno, codeContainer): - self.frame = frame - self.lineno = lineno - self.codeContainer = codeContainer - self.expressionContext = None - - # def __del__(self): - # print "DSF dieing" - def _query_interface_(self, iid): - if iid == axdebug.IID_IDebugExpressionContext: - if self.expressionContext is None: - self.expressionContext = _wrap( - expressions.ExpressionContext(self.frame), - axdebug.IID_IDebugExpressionContext, - ) - return self.expressionContext - # from win32com.util import IIDToInterfaceName - # print "DebugStackFrame QI with %s (%s)" % (IIDToInterfaceName(iid), str(iid)) - return 0 - - # - # The following need implementation - def GetThread(self): - """Returns the thread associated with this stack frame. - - Result must be a IDebugApplicationThread - """ - RaiseNotImpl("GetThread") - - def GetCodeContext(self): - offset = self.codeContainer.GetPositionOfLine(self.lineno) - return self.codeContainer.GetCodeContextAtPosition(offset) - - # - # The following are usefully implemented - def GetDescriptionString(self, fLong): - filename = self.frame.f_code.co_filename - s = "" - if 0: # fLong: - s = s + filename - if self.frame.f_code.co_name: - s = s + self.frame.f_code.co_name - else: - s = s + "" - return s - - def GetLanguageString(self, fLong): - if fLong: - return "Python ActiveX Scripting Engine" - else: - return "Python" - - def GetDebugProperty(self): - return _wrap(StackFrameDebugProperty(self.frame), axdebug.IID_IDebugProperty) - - -class DebugStackFrameSniffer: - _public_methods_ = ["EnumStackFrames"] - _com_interfaces_ = [axdebug.IID_IDebugStackFrameSniffer] - - def __init__(self, debugger): - self.debugger = debugger - trace("DebugStackFrameSniffer instantiated") - - # def __del__(self): - # print "DSFS dieing" - def EnumStackFrames(self): - trace("DebugStackFrameSniffer.EnumStackFrames called") - return _wrap( - EnumDebugStackFrames(self.debugger), axdebug.IID_IEnumDebugStackFrames - ) - - -# A DebugProperty for a stack frame. -class StackFrameDebugProperty: - _com_interfaces_ = [axdebug.IID_IDebugProperty] - _public_methods_ = [ - "GetPropertyInfo", - "GetExtendedInfo", - "SetValueAsString", - "EnumMembers", - "GetParent", - ] - - def __init__(self, frame): - self.frame = frame - - def GetPropertyInfo(self, dwFieldSpec, nRadix): - RaiseNotImpl("StackFrameDebugProperty::GetPropertyInfo") - - def GetExtendedInfo(self): ### Note - not in the framework. - RaiseNotImpl("StackFrameDebugProperty::GetExtendedInfo") - - def SetValueAsString(self, value, radix): - # - RaiseNotImpl("DebugProperty::SetValueAsString") - - def EnumMembers(self, dwFieldSpec, nRadix, iid): - print("EnumMembers", dwFieldSpec, nRadix, iid) - from . import expressions - - return expressions.MakeEnumDebugProperty( - self.frame.f_locals, dwFieldSpec, nRadix, iid, self.frame - ) - - def GetParent(self): - # return IDebugProperty - RaiseNotImpl("DebugProperty::GetParent") diff --git a/lib/win32comext/axdebug/util.py b/lib/win32comext/axdebug/util.py deleted file mode 100644 index c3d4f733..00000000 --- a/lib/win32comext/axdebug/util.py +++ /dev/null @@ -1,141 +0,0 @@ -# Utility function for wrapping objects. Centralising allows me to turn -# debugging on and off for the entire package in a single spot. - -import os -import sys - -import win32api -import win32com.server.util -import winerror -from win32com.server.exception import Exception - -try: - os.environ["DEBUG_AXDEBUG"] - debugging = 1 -except KeyError: - debugging = 0 - - -def trace(*args): - if not debugging: - return - print(str(win32api.GetCurrentThreadId()) + ":", end=" ") - for arg in args: - print(arg, end=" ") - print() - - -# The AXDebugging implementation assumes that the returned COM pointers are in -# some cases identical. Eg, from a C++ perspective: -# p->GetSomeInterface( &p1 ); -# p->GetSomeInterface( &p2 ); -# p1==p2 -# By default, this is _not_ true for Python. -# (Now this is only true for Document objects, and Python -# now does ensure this. - -all_wrapped = {} - - -def _wrap_nodebug(object, iid): - return win32com.server.util.wrap(object, iid) - - -def _wrap_debug(object, iid): - import win32com.server.policy - - dispatcher = win32com.server.policy.DispatcherWin32trace - return win32com.server.util.wrap(object, iid, useDispatcher=dispatcher) - - -if debugging: - _wrap = _wrap_debug -else: - _wrap = _wrap_nodebug - - -def _wrap_remove(object, iid=None): - # Old - no longer used or necessary! - return - - -def _dump_wrapped(): - from win32com.server.util import unwrap - - print("Wrapped items:") - for key, items in all_wrapped.items(): - print(key, end=" ") - try: - ob = unwrap(key) - print(ob, sys.getrefcount(ob)) - except: - print("") - - -def RaiseNotImpl(who=None): - if who is not None: - print("********* Function %s Raising E_NOTIMPL ************" % (who)) - - # Print a sort-of "traceback", dumping all the frames leading to here. - try: - 1 / 0 - except: - frame = sys.exc_info()[2].tb_frame - while frame: - print("File: %s, Line: %d" % (frame.f_code.co_filename, frame.f_lineno)) - frame = frame.f_back - - # and raise the exception for COM - raise Exception(scode=winerror.E_NOTIMPL) - - -import win32com.server.policy - - -class Dispatcher(win32com.server.policy.DispatcherWin32trace): - def __init__(self, policyClass, object): - win32com.server.policy.DispatcherTrace.__init__(self, policyClass, object) - import win32traceutil # Sets up everything. - - # print "Object with win32trace dispatcher created (object=%s)" % `object` - - def _QueryInterface_(self, iid): - rc = win32com.server.policy.DispatcherBase._QueryInterface_(self, iid) - # if not rc: - # self._trace_("in _QueryInterface_ with unsupported IID %s (%s)\n" % (IIDToInterfaceName(iid),iid)) - return rc - - def _Invoke_(self, dispid, lcid, wFlags, args): - print( - "In Invoke with", - dispid, - lcid, - wFlags, - args, - "with object", - self.policy._obj_, - ) - try: - rc = win32com.server.policy.DispatcherBase._Invoke_( - self, dispid, lcid, wFlags, args - ) - # print "Invoke of", dispid, "returning", rc - return rc - except Exception: - t, v, tb = sys.exc_info() - tb = None # A cycle - scode = v.scode - try: - desc = " (" + str(v.description) + ")" - except AttributeError: - desc = "" - print( - "*** Invoke of %s raised COM exception 0x%x%s" % (dispid, scode, desc) - ) - except: - print("*** Invoke of %s failed:" % dispid) - typ, val, tb = sys.exc_info() - import traceback - - traceback.print_exception(typ, val, tb) - raise diff --git a/lib/win32comext/axscript/Demos/client/asp/CreateObject.asp b/lib/win32comext/axscript/Demos/client/asp/CreateObject.asp deleted file mode 100644 index 5338c0a0..00000000 --- a/lib/win32comext/axscript/Demos/client/asp/CreateObject.asp +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/asp/caps.asp b/lib/win32comext/axscript/Demos/client/asp/caps.asp deleted file mode 100644 index 6b0342aa..00000000 --- a/lib/win32comext/axscript/Demos/client/asp/caps.asp +++ /dev/null @@ -1,52 +0,0 @@ -<%@ Language=Python %> - - - - - - -Python test - - - - - - - - - -<% -import sys -print sys.path -from win32com.axscript.asputil import * -print "Hello" -print "There" -print "How are you" -%> - -<%bc = Server.CreateObject("MSWC.BrowserType")%> - - - - - - - - - -
    Browser <%=bc.browser %> -
    Version <%=bc.version %>
    Frames -<%Response.Write( iif(bc.frames, "TRUE", "FALSE")) %>
    Tables -<%Response.Write( iif (bc.tables, "TRUE", "FALSE")) %>
    BackgroundSounds -<%Response.Write( iif(bc.BackgroundSounds, "TRUE", "FALSE"))%>
    VBScript -<%Response.Write( iif(bc.vbscript, "TRUE", "FALSE"))%>
    JavaScript -<%Response.Write( iif(bc.javascript, "TRUE", "FALSE"))%>
    - - - diff --git a/lib/win32comext/axscript/Demos/client/asp/interrupt/test.asp b/lib/win32comext/axscript/Demos/client/asp/interrupt/test.asp deleted file mode 100644 index ee77703f..00000000 --- a/lib/win32comext/axscript/Demos/client/asp/interrupt/test.asp +++ /dev/null @@ -1,4 +0,0 @@ -<%@ language=python%> - -<%Response.Redirect("test1.html")%> - diff --git a/lib/win32comext/axscript/Demos/client/asp/interrupt/test.html b/lib/win32comext/axscript/Demos/client/asp/interrupt/test.html deleted file mode 100644 index 0b7f0559..00000000 --- a/lib/win32comext/axscript/Demos/client/asp/interrupt/test.html +++ /dev/null @@ -1,10 +0,0 @@ - - - -GOT There - - - - diff --git a/lib/win32comext/axscript/Demos/client/asp/interrupt/test1.asp b/lib/win32comext/axscript/Demos/client/asp/interrupt/test1.asp deleted file mode 100644 index a936dd99..00000000 --- a/lib/win32comext/axscript/Demos/client/asp/interrupt/test1.asp +++ /dev/null @@ -1,6 +0,0 @@ -<%@ language =Python%> - - -<%Response.Redirect("test.html")%> - - diff --git a/lib/win32comext/axscript/Demos/client/asp/interrupt/test1.html b/lib/win32comext/axscript/Demos/client/asp/interrupt/test1.html deleted file mode 100644 index 6f1d9b2f..00000000 --- a/lib/win32comext/axscript/Demos/client/asp/interrupt/test1.html +++ /dev/null @@ -1,11 +0,0 @@ - - - -GOT HERE - - - - - diff --git a/lib/win32comext/axscript/Demos/client/asp/tut1.asp b/lib/win32comext/axscript/Demos/client/asp/tut1.asp deleted file mode 100644 index dcbb047c..00000000 --- a/lib/win32comext/axscript/Demos/client/asp/tut1.asp +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/MarqueeText1.htm b/lib/win32comext/axscript/Demos/client/ie/MarqueeText1.htm deleted file mode 100644 index 85a676f8..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/MarqueeText1.htm +++ /dev/null @@ -1,25 +0,0 @@ - - - - Internet Workshop - - - - -

    -
    -

    Python AX Script Engine -
    Demo using the Marquee Control -
    Mark Hammond. - -

    This is really quite a boring demo, as the Marquee control does everything. However, there is Python code behind the buttons that change the speed. This code is all of 2 lines per button!!! - -

    For more information on Python as an ActiveX scripting language, see - -

    Python -
    http://www.python.org - -
    - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/calc.htm b/lib/win32comext/axscript/Demos/client/ie/calc.htm deleted file mode 100644 index 7c7fb48d..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/calc.htm +++ /dev/null @@ -1,116 +0,0 @@ - -Python Script sample: Calculator - - - - -

    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Calculator

    - -
    - diff --git a/lib/win32comext/axscript/Demos/client/ie/dbgtest.htm b/lib/win32comext/axscript/Demos/client/ie/dbgtest.htm deleted file mode 100644 index 7ee9468d..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/dbgtest.htm +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/demo.htm b/lib/win32comext/axscript/Demos/client/ie/demo.htm deleted file mode 100644 index a828bac2..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/demo.htm +++ /dev/null @@ -1,26 +0,0 @@ - - -Python AXScript Demos - - - - - - - - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/demo_check.htm b/lib/win32comext/axscript/Demos/client/ie/demo_check.htm deleted file mode 100644 index f0e09f9d..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/demo_check.htm +++ /dev/null @@ -1,42 +0,0 @@ - -

    Engine Registration

    - - - -

    The Python ActiveX Scripting Engine is not currently registered.

    - -

    Due to a privacy -concern discovered in the engine, the use of Python inside IE has been disabled.

    - -Before any of the supplied demos will work, the engine must be successfully registered. - -

    To install a version of the engine, that does work with IE, you can execute the Python program -win32com\axscript\client\pyscript_rexec.py must be run. You can either do this manually, or follow the instructions below.

    - -

    Register the engine now!

    - -

    If you have read about the privacy -concern and still wish to register the engine, just follow the process outlined below:

    -
      -
    1. Click on the link below -
    2. A dialog will be presented asking if the file should be opened or saved to disk. Select "Open it". -
    3. A Console program will briefly open, while the server is registered. -
    - -

    Register the engine now - -

    Checking the registration

    -After the registration is complete, simply hit the Reload button. If the -registration was successful, the page will change to the Python/AvtiveX Demo Page. - - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/demo_intro.htm b/lib/win32comext/axscript/Demos/client/ie/demo_intro.htm deleted file mode 100644 index b8c811d8..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/demo_intro.htm +++ /dev/null @@ -1,38 +0,0 @@ - - - -

    - Python ActiveX Scripting Demonstation - -

    - -

    Congratulations on installing the Python ActiveX Scripting Engine

    - -

    Be warned that there is a privacy -concern with this engine. Please read this information, including how to disable the feature.

    - - -

    Object model

    -

    Except as described below, the object module exposed should be similar to that exposed -by Visual Basic, etc. Due to the nature of ActiveX Scripting, the details for each -host are different, but Python should work "correctly". - -

    The object model exposed via Python for MSIE is not as seamless as VB. The biggest limitation is -the concept of a "local" namespace. For example, in VB, you can -code text="Hi there", but in Python, you must code -MyForm.ThisButton.Text="Hi There". See the foo2 sample -for futher details. - -

    Known bugs and problems

    -
      -
    • This release seems to have broken Aaron's mouse-trace sample. No idea why, and Im supposed to be looking into it. -

    • Builtin objects such as MARQUEE are giving me grief. Objects accessed via forms are generally -no problem. -

    • If you are trying to use Python with the Windows Scripting Host, note that -.pys files are not correct registered - you will need to explicitely -specify either cscript.exe or wscript.exe on the command line. -

    - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/demo_menu.htm b/lib/win32comext/axscript/Demos/client/ie/demo_menu.htm deleted file mode 100644 index ba23a434..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/demo_menu.htm +++ /dev/null @@ -1,16 +0,0 @@ - - -

    Scripting Demos

    -

    An Introduction to the -scripting engine. - -

    The Calculator Demo is a very -cool sample written by Aaron Watters. - -

    Mouse track is another of -Aaron's samples, and shows how fast the Python engine is! - -

    The foo2 sample is mainly used -for debugging and testing, but does show some forms in action. - - diff --git a/lib/win32comext/axscript/Demos/client/ie/docwrite.htm b/lib/win32comext/axscript/Demos/client/ie/docwrite.htm deleted file mode 100644 index c95b7902..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/docwrite.htm +++ /dev/null @@ -1,25 +0,0 @@ - - -A page generated by Python - - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/foo2.htm b/lib/win32comext/axscript/Demos/client/ie/foo2.htm deleted file mode 100644 index d5e0c4a6..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/foo2.htm +++ /dev/null @@ -1,105 +0,0 @@ - - - - - - - -

    The caption on the first button is set by the Window Load code. Clicking -that button changes the text in the first edit box. - -

    The second button changes its own text when clicked. - -

    The fourth button calls a global function, defined in the global 'script' scope, -rather than the 'MyForm' scope. - -

    - - - - - - - - - - - -

    -

    -And here is a second form -

    -

    - - - - - - - -

    - - - - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/form.htm b/lib/win32comext/axscript/Demos/client/ie/form.htm deleted file mode 100644 index 97a239f3..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/form.htm +++ /dev/null @@ -1,25 +0,0 @@ - - - -
    - Name
    - Address
    - - - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/marqueeDemo.htm b/lib/win32comext/axscript/Demos/client/ie/marqueeDemo.htm deleted file mode 100644 index 33847c1d..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/marqueeDemo.htm +++ /dev/null @@ -1,60 +0,0 @@ - - - - Internet Workshop - - - - -

    -
    -

    Marquee Demo - -

    - - - - - - - - - - -

    - - - - - - - - -

      -


    -Notes: -

    - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/mousetrack.htm b/lib/win32comext/axscript/Demos/client/ie/mousetrack.htm deleted file mode 100644 index d307a4a6..00000000 --- a/lib/win32comext/axscript/Demos/client/ie/mousetrack.htm +++ /dev/null @@ -1,83 +0,0 @@ - - -Python Scripting sample: Mouse tracking - - - - -
    - - -
     
    -
    - -

    -A mouse tracking demo. Move the mouse over the image above... - - - -

    - - - - - - diff --git a/lib/win32comext/axscript/Demos/client/ie/pycom_blowing.gif b/lib/win32comext/axscript/Demos/client/ie/pycom_blowing.gif deleted file mode 100644 index 0d65a292..00000000 Binary files a/lib/win32comext/axscript/Demos/client/ie/pycom_blowing.gif and /dev/null differ diff --git a/lib/win32comext/axscript/Demos/client/wsh/blank.pys b/lib/win32comext/axscript/Demos/client/wsh/blank.pys deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/win32comext/axscript/Demos/client/wsh/excel.pys b/lib/win32comext/axscript/Demos/client/wsh/excel.pys deleted file mode 100644 index 9c8fe751..00000000 --- a/lib/win32comext/axscript/Demos/client/wsh/excel.pys +++ /dev/null @@ -1,34 +0,0 @@ -#app=WScript.Application -#app._print_details_() # Use this to see what Python knows about a COM object. - -g_index = 1 -# A procedure, using a global. -def Show(desc, value = None): - global g_index # Need global for g_index, as I locally assign. - # No global needed to "xl" object, as only referenced. - # Also note "xl" is assigned later in the script - ie, Python is very late bound. - xl.Cells(g_index, 1).Value = desc - if value: xl.Cells(g_index, 2).Value = value - g_index = g_index + 1 - -xl = WScript.CreateObject("Excel.Application") -import sys - -xl.Visible = 1 -#xl.Workbooks().Add() # Excel versions before 98 -xl.Workbooks.Add() - -# Show the WScript properties. -Show("Application Friendly Name", WScript.Name) -Show("Application Version", WScript.Version) -Show("Application Context: Fully Qualified Name", WScript.FullName) -Show("Application Context: Path Only", WScript.Path) -Show("State of Interactive Mode", WScript.Interactive) - -Show("All script arguments:") -args = WScript.Arguments - -for i in range(0,args.Count()): - Show("Arg %d" % i, args(i)) - - diff --git a/lib/win32comext/axscript/Demos/client/wsh/registry.pys b/lib/win32comext/axscript/Demos/client/wsh/registry.pys deleted file mode 100644 index 2d9d1e77..00000000 --- a/lib/win32comext/axscript/Demos/client/wsh/registry.pys +++ /dev/null @@ -1,45 +0,0 @@ -""" Windows Script Host Sample Script -' Ported to Python -' -' ------------------------------------------------------------------------ -' Copyright (C) 1996 Microsoft Corporation -' -' You have a royalty-free right to use, modify, reproduce and distribute -' the Sample Application Files (and/or any modified version) in any way -' you find useful, provided that you agree that Microsoft has no warranty, -' obligations or liability for any Sample Application Files. -' ------------------------------------------------------------------------ -' -' This sample demonstrates how to write/delete from the registry. -""" - -WshShell = WScript.CreateObject("WScript.Shell") - -WshShell.Popup("This script shows how to use registry related methods.", 2) - -WshShell.Popup("Create key HKCU\\Foo with value 'Top level key'") -WshShell.RegWrite("HKCU\\Foo\\", "Top level key") - -WshShell.Popup("Create key HKCU\\Foo\\Bar with value 'Second level key'") -WshShell.RegWrite( "HKCU\\Foo\\Bar\\", "Second level key") - -WshShell.Popup ("Set value HKCU\\Foo\\Value to REG_SZ 1") -WshShell.RegWrite( "HKCU\\Foo\\Value", 1) - -WshShell.Popup ("Set value HKCU\\Foo\\Bar to REG_DWORD 2") -WshShell.RegWrite ("HKCU\\Foo\\Bar", 2, "REG_DWORD") - -WshShell.Popup ("Set value HKCU\\Foo\\Bar to REG_EXPAND_SZ '3'") -WshShell.RegWrite ("HKCU\\Foo\\Bar\\Baz", "%SystemRoot%\\Foo") - -WshShell.Popup ("Delete value HKCU\\Foo\\Bar\\Baz") -WshShell.RegDelete ("HKCU\\Foo\\Bar\\Baz") - -WshShell.Popup ("Delete key HKCU\\Foo\\Bar") -WshShell.RegDelete ("HKCU\\Foo\\Bar\\") - -WshShell.Popup ("Delete key HKCU\\Foo") -WshShell.RegDelete ("HKCU\\Foo\\") - -WScript.Echo ("Done") - diff --git a/lib/win32comext/axscript/Demos/client/wsh/test.pys b/lib/win32comext/axscript/Demos/client/wsh/test.pys deleted file mode 100644 index 4038321e..00000000 --- a/lib/win32comext/axscript/Demos/client/wsh/test.pys +++ /dev/null @@ -1,15 +0,0 @@ -# Testall - test core AX support. - -# Test "Restricted Execution" (ie, IObjectSafety). -# This will fail if in a "restricted execution" environment, but -# will silenty do nothing of not restricted. This same line in an MSIE -# script would cause an exception. -print("Importing win32api...") -import win32api -if 1==1: - print("Hi") - -WScript.Echo("Hello from WScript") - -#fail - diff --git a/lib/win32comext/axscript/__init__.py b/lib/win32comext/axscript/__init__.py deleted file mode 100644 index 8d5db8b9..00000000 --- a/lib/win32comext/axscript/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# See if we have a special directory for the binaries (for developers) -import win32com - -win32com.__PackageSupportBuildPath__(__path__) diff --git a/lib/win32comext/axscript/asputil.py b/lib/win32comext/axscript/asputil.py deleted file mode 100644 index b61c3537..00000000 --- a/lib/win32comext/axscript/asputil.py +++ /dev/null @@ -1,13 +0,0 @@ -"""A utility module for ASP (Active Server Pages on MS Internet Info Server. - -Contains: - iif -- A utility function to avoid using "if" statements in ASP <% tags - -""" - - -def iif(cond, t, f): - if cond: - return t - else: - return f diff --git a/lib/win32comext/axscript/axscript.pyd b/lib/win32comext/axscript/axscript.pyd deleted file mode 100644 index a29615a5..00000000 Binary files a/lib/win32comext/axscript/axscript.pyd and /dev/null differ diff --git a/lib/win32comext/axscript/client/__init__.py b/lib/win32comext/axscript/client/__init__.py deleted file mode 100644 index 7858ad6b..00000000 --- a/lib/win32comext/axscript/client/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# This is a Python package diff --git a/lib/win32comext/axscript/client/debug.py b/lib/win32comext/axscript/client/debug.py deleted file mode 100644 index 2e305f0b..00000000 --- a/lib/win32comext/axscript/client/debug.py +++ /dev/null @@ -1,240 +0,0 @@ -import os -import sys - -import pythoncom -import win32api -import win32com.client.connect -import win32com.server.util -import winerror -from win32com.axdebug import adb, axdebug, contexts, documents, gateways, stackframe -from win32com.axdebug.codecontainer import SourceCodeContainer -from win32com.axdebug.util import _wrap, _wrap_remove -from win32com.client.util import Enumerator -from win32com.server.exception import COMException -from win32com.util import IIDToInterfaceName - -from .framework import trace - -try: - os.environ["DEBUG_AXDEBUG"] - debuggingTrace = 1 # Should we print "trace" output? -except KeyError: - debuggingTrace = 0 - - -def trace(*args): - """A function used instead of "print" for debugging output.""" - if not debuggingTrace: - return - print(win32api.GetCurrentThreadId(), end=" ") - for arg in args: - print(arg, end=" ") - print() - - -# Note that the DebugManager is not a COM gateway class for the -# debugger - but it does create and manage them. -class DebugManager: - _debugger_interfaces_ = [axdebug.IID_IActiveScriptDebug] - - def __init__(self, scriptEngine): - self.scriptEngine = scriptEngine - self.adb = adb.Debugger() - self.rootNode = None - self.debugApplication = None - self.ccProvider = documents.CodeContainerProvider() - try: - self.scriptSiteDebug = scriptEngine.GetScriptSite( - axdebug.IID_IActiveScriptSiteDebug - ) - except pythoncom.com_error: - # No debugger interface (ie, dumb host). Do the extra work. - trace("Scripting site has no debugger interface") - self.scriptSiteDebug = None - # Get the debug application object. - self.debugApplication = None - if self.scriptSiteDebug is not None: - # Spec says that we should test for this, and if it fails revert to - # PDM application. - try: - self.debugApplication = self.scriptSiteDebug.GetApplication() - self.rootNode = self.scriptSiteDebug.GetRootApplicationNode() - except pythoncom.com_error: - self.debugApplication = None - - if self.debugApplication is None: - # Try to get/create the default one - # NOTE - Dont catch exceptions here - let the parent do it, - # so it knows debug support is available. - pdm = pythoncom.CoCreateInstance( - axdebug.CLSID_ProcessDebugManager, - None, - pythoncom.CLSCTX_ALL, - axdebug.IID_IProcessDebugManager, - ) - self.debugApplication = pdm.GetDefaultApplication() - self.rootNode = self.debugApplication.GetRootNode() - - assert ( - self.debugApplication is not None - ), "Need to have a DebugApplication object by now!" - self.activeScriptDebug = None - - if self.debugApplication is not None: - self.adb.AttachApp(self.debugApplication, self.ccProvider) - self.codeContainers = {} - self.activeScriptDebug = _wrap( - ActiveScriptDebug(self, self.codeContainers), axdebug.IID_IActiveScriptDebug - ) - - def Close(self): - # Called by the language engine when it receives a close request - if self.activeScriptDebug is not None: - _wrap_remove(self.activeScriptDebug) - self.activeScriptDebug = None - self.scriptEngine = None - self.rootNode = None - self.debugApplication = None - self.scriptSiteDebug = None - if self.ccProvider is not None: - self.ccProvider.Close() - self.ccProvider = None - self.codeContainers = {} - if self.adb: - self.adb.CloseApp() - self.adb = None - - # print "Close complete" - - def IsAnyHost(self): - "Do we have _any_ debugging interfaces installed?" - return self.debugApplication is not None - - def IsSimpleHost(self): - return self.scriptSiteDebug is None - - def HandleRuntimeError(self): - """Called by the engine when a runtime error occurs. If we have a debugger, - we let it know. - - The result is a boolean which indicates if the error handler should call - IActiveScriptSite::OnScriptError() - """ - # if self.IsAnyHost: - # site = _wrap(self, axdebug.IID_IActiveScriptSite) - # breakResume, errorResume, fCallOnError = self.debugApplication(activeScriptErrorDebug, site) - # Do something with these! - # else: - trace("HandleRuntimeError") - fCallOnError = 1 - return fCallOnError - - def _query_interface_for_debugger_(self, iid): - if iid in self._debugger_interfaces_: - return self.activeScriptDebug - trace("DebugManager QI - unknown IID", iid) - return 0 - - def OnEnterScript(self): - trace("OnEnterScript") - try: - 1 / 0 - except: - # Bit of a hack - reach into engine. - baseFrame = sys.exc_info()[2].tb_frame.f_back - self.adb.SetupAXDebugging(baseFrame) - - def OnLeaveScript(self): - trace("OnLeaveScript") - self.adb.ResetAXDebugging() - - def AddScriptBlock(self, codeBlock): - # If we dont have debugging support, dont bother. - cc = DebugCodeBlockContainer(codeBlock, self.scriptSiteDebug) - if self.IsSimpleHost(): - document = documents.DebugDocumentText(cc) - document = _wrap(document, axdebug.IID_IDebugDocument) - provider = documents.DebugDocumentProvider(document) - provider = _wrap(provider, axdebug.IID_IDebugDocumentProvider) - cc.debugDocument = document - newNode = self.debugApplication.CreateApplicationNode() - newNode.SetDocumentProvider(provider) - newNode.Attach(self.rootNode) - else: - newNode = None # Managed by smart host. - self.codeContainers[cc.sourceContext] = cc - self.ccProvider.AddCodeContainer(cc, newNode) - - -class DebugCodeBlockContainer(SourceCodeContainer): - def __init__(self, codeBlock, site): - self.codeBlock = codeBlock - SourceCodeContainer.__init__( - self, - codeBlock.codeText, - codeBlock.GetFileName(), - codeBlock.sourceContextCookie, - codeBlock.startLineNumber, - site, - ) - - def GetName(self, dnt): - if dnt == axdebug.DOCUMENTNAMETYPE_APPNODE: - return self.codeBlock.GetDisplayName() - elif dnt == axdebug.DOCUMENTNAMETYPE_TITLE: - return self.codeBlock.GetDisplayName() - # elif dnt==axdebug.DOCUMENTNAMETYPE_FILE_TAIL: - # elif dnt==axdebug.DOCUMENTNAMETYPE_URL: - else: - raise COMException(scode=winerror.S_FALSE) - - -class EnumDebugCodeContexts(gateways.EnumDebugCodeContexts): - def _wrap(self, ob): - return ob - - -class ActiveScriptDebug: - """The class which implements the IActiveScriptDebug interface for the Active Script engine. - - Only ever used by smart hosts. - """ - - _public_methods_ = [ - "GetScriptTextAttributes", - "GetScriptletTextAttributes", - "EnumCodeContextsOfPosition", - ] - _com_interfaces_ = [axdebug.IID_IActiveScriptDebug] - - def __init__(self, debugMgr, codeContainers): - self.debugMgr = debugMgr - self.scriptSiteDebug = debugMgr.scriptSiteDebug - self.codeContainers = codeContainers - - def _Close(self): - self.debugMgr = None - self.scriptSiteDebug = None - self.codeContainers = {} - - def _query_interface_(self, iid): - trace("DebuggerQI with", iid) - return _wrap(self.debugMgr.scriptEngine, iid) - - def GetScriptTextAttributes(self, code, delim, flags): - container = SourceCodeContainer(code, "") - return container.GetSyntaxColorAttributes() - - def GetScriptletTextAttributes(self, code, delim, flags): - trace("GetScriptletTextAttributes", code, delim, flags) - container = SourceCodeContainer(code, "") - return container.GetSyntaxColorAttributes() - - def EnumCodeContextsOfPosition(self, context, charOffset, numChars): - trace("EnumCodeContextsOfPosition", context, charOffset, numChars) - try: - context = self.codeContainers[context].GetCodeContextAtPosition(charOffset) - except KeyError: - raise COMException(scode=winerror.E_UNEXPECTED) - enum = EnumDebugCodeContexts([context]) - return _wrap(enum, axdebug.IID_IEnumDebugCodeContexts) diff --git a/lib/win32comext/axscript/client/error.py b/lib/win32comext/axscript/client/error.py deleted file mode 100644 index 1dfa54fa..00000000 --- a/lib/win32comext/axscript/client/error.py +++ /dev/null @@ -1,273 +0,0 @@ -"""Exception and error handling. - - This contains the core exceptions that the implementations should raise - as well as the IActiveScriptError interface code. - -""" - -import re -import sys -import traceback - -import pythoncom -import win32com.server.exception -import win32com.server.util -import winerror -from win32com.axscript import axscript - -debugging = 0 - - -def FormatForAX(text): - """Format a string suitable for an AX Host""" - # Replace all " with ', so it works OK in HTML (ie, ASP) - return ExpandTabs(AddCR(text)) - - -def ExpandTabs(text): - return re.sub("\t", " ", text) - - -def AddCR(text): - return re.sub("\n", "\r\n", text) - - -class IActiveScriptError: - """An implementation of IActiveScriptError - - The ActiveX Scripting host calls this client whenever we report - an exception to it. This interface provides the exception details - for the host to report to the user. - """ - - _com_interfaces_ = [axscript.IID_IActiveScriptError] - _public_methods_ = ["GetSourceLineText", "GetSourcePosition", "GetExceptionInfo"] - - def _query_interface_(self, iid): - print("IActiveScriptError QI - unknown IID", iid) - return 0 - - def _SetExceptionInfo(self, exc): - self.exception = exc - - def GetSourceLineText(self): - return self.exception.linetext - - def GetSourcePosition(self): - ctx = self.exception.sourceContext - # Zero based in the debugger (but our columns are too!) - return ( - ctx, - self.exception.lineno + self.exception.startLineNo - 1, - self.exception.colno, - ) - - def GetExceptionInfo(self): - return self.exception - - -class AXScriptException(win32com.server.exception.COMException): - """A class used as a COM exception. - - Note this has attributes which conform to the standard attributes - for COM exceptions, plus a few others specific to our IActiveScriptError - object. - """ - - def __init__(self, site, codeBlock, exc_type, exc_value, exc_traceback): - # set properties base class shares via base ctor... - win32com.server.exception.COMException.__init__( - self, - description="Unknown Exception", - scode=winerror.DISP_E_EXCEPTION, - source="Python ActiveX Scripting Engine", - ) - - # And my other values... - if codeBlock is None: - self.sourceContext = 0 - self.startLineNo = 0 - else: - self.sourceContext = codeBlock.sourceContextCookie - self.startLineNo = codeBlock.startLineNumber - self.linetext = "" - - self.__BuildFromException(site, exc_type, exc_value, exc_traceback) - - def __BuildFromException(self, site, type, value, tb): - if debugging: - import linecache - - linecache.clearcache() - try: - if issubclass(type, SyntaxError): - self._BuildFromSyntaxError(site, value, tb) - else: - self._BuildFromOther(site, type, value, tb) - except: # Error extracting traceback info!!! - traceback.print_exc() - # re-raise. - raise - - def _BuildFromSyntaxError(self, site, exc, tb): - value = exc.args - # All syntax errors should have a message as element 0 - try: - msg = value[0] - except: - msg = "Unknown Error (%s)" % (value,) - try: - (filename, lineno, offset, line) = value[1] - # Some of these may be None, which upsets us! - if offset is None: - offset = 0 - if line is None: - line = "" - except: - msg = "Unknown" - lineno = 0 - offset = 0 - line = "Unknown" - self.description = FormatForAX(msg) - self.lineno = lineno - self.colno = offset - 1 - self.linetext = ExpandTabs(line.rstrip()) - - def _BuildFromOther(self, site, exc_type, value, tb): - self.colno = -1 - self.lineno = 0 - if debugging: # Full traceback if debugging. - list = traceback.format_exception(exc_type, value, tb) - self.description = ExpandTabs("".join(list)) - return - # Run down the traceback list, looking for the first "" - # Hide traceback above this. In addition, keep going down - # looking for a "_*_" attribute, and below hide these also. - hide_names = [ - "r_import", - "r_reload", - "r_open", - ] # hide from these functions down in the traceback. - depth = None - tb_top = tb - while tb_top: - filename, lineno, name, line = self.ExtractTracebackInfo(tb_top, site) - if filename[:7] == " all items in the list are utf8 courtesy of Python magically - # > converting unicode to utf8 before compilation. - # but that is likely just confusion from early unicode days; - # Python isn't doing it, pywin32 probably was, so 'mbcs' would - # be the default encoding. We should never hit this these days - # anyway, but on py3k, we *never* will, and str objects there - # don't have a decode method... - if sys.version_info < (3,): - for i in range(len(bits)): - if type(bits[i]) is str: - # assert type(bits[i]) is str, type(bits[i]) - bits[i] = bits[i].decode("utf8") - - self.description = ExpandTabs("".join(bits)) - # Clear tracebacks etc. - tb = tb_top = tb_look = None - - def ExtractTracebackInfo(self, tb, site): - import linecache - - f = tb.tb_frame - lineno = tb.tb_lineno - co = f.f_code - filename = co.co_filename - name = co.co_name - line = linecache.getline(filename, lineno) - if not line: - try: - codeBlock = site.scriptCodeBlocks[filename] - except KeyError: - codeBlock = None - if codeBlock: - # Note: 'line' will now be unicode. - line = codeBlock.GetLineNo(lineno) - if line: - line = line.strip() - else: - line = None - return filename, lineno, name, line - - def __repr__(self): - return "AXScriptException Object with description:" + self.description - - -def ProcessAXScriptException(scriptingSite, debugManager, exceptionInstance): - """General function to handle any exception in AX code - - This function creates an instance of our IActiveScriptError interface, and - gives it to the host, along with out exception class. The host will - likely call back on the IActiveScriptError interface to get the source text - and other information not normally in COM exceptions. - """ - # traceback.print_exc() - instance = IActiveScriptError() - instance._SetExceptionInfo(exceptionInstance) - gateway = win32com.server.util.wrap(instance, axscript.IID_IActiveScriptError) - if debugManager: - fCallOnError = debugManager.HandleRuntimeError() - if not fCallOnError: - return None - - try: - result = scriptingSite.OnScriptError(gateway) - except pythoncom.com_error as details: - print("**OnScriptError failed:", details) - print("Exception description:'%s'" % (repr(exceptionInstance.description))) - print("Exception text:'%s'" % (repr(exceptionInstance.linetext))) - result = winerror.S_FALSE - - if result == winerror.S_OK: - # If the above returns NOERROR, it is assumed the error has been - # correctly registered and the value SCRIPT_E_REPORTED is returned. - ret = win32com.server.exception.COMException(scode=axscript.SCRIPT_E_REPORTED) - return ret - else: - # The error is taken to be unreported and is propagated up the call stack - # via the IDispatch::Invoke's EXCEPINFO parameter (hr returned is DISP_E_EXCEPTION. - return exceptionInstance diff --git a/lib/win32comext/axscript/client/framework.py b/lib/win32comext/axscript/client/framework.py deleted file mode 100644 index d7b56a7c..00000000 --- a/lib/win32comext/axscript/client/framework.py +++ /dev/null @@ -1,1270 +0,0 @@ -"""AXScript Client Framework - - This module provides a core framework for an ActiveX Scripting client. - Derived classes actually implement the AX Client itself, including the - scoping rules, etc. - - There are classes defined for the engine itself, and for ScriptItems -""" -import re -import sys - -import pythoncom # Need simple connection point support -import win32api -import win32com.client.connect -import win32com.server.util -import winerror -from win32com.axscript import axscript - - -def RemoveCR(text): - # No longer just "RemoveCR" - should be renamed to - # FixNewlines, or something. Idea is to fix arbitary newlines into - # something Python can compile... - return re.sub("(\r\n)|\r|(\n\r)", "\n", text) - - -SCRIPTTEXT_FORCEEXECUTION = -2147483648 # 0x80000000 -SCRIPTTEXT_ISEXPRESSION = 0x00000020 -SCRIPTTEXT_ISPERSISTENT = 0x00000040 - -from win32com.server.exception import Exception, IsCOMServerException - -from . import error # ax.client.error - -state_map = { - axscript.SCRIPTSTATE_UNINITIALIZED: "SCRIPTSTATE_UNINITIALIZED", - axscript.SCRIPTSTATE_INITIALIZED: "SCRIPTSTATE_INITIALIZED", - axscript.SCRIPTSTATE_STARTED: "SCRIPTSTATE_STARTED", - axscript.SCRIPTSTATE_CONNECTED: "SCRIPTSTATE_CONNECTED", - axscript.SCRIPTSTATE_DISCONNECTED: "SCRIPTSTATE_DISCONNECTED", - axscript.SCRIPTSTATE_CLOSED: "SCRIPTSTATE_CLOSED", -} - - -def profile(fn, *args): - import profile - - prof = profile.Profile() - try: - # roll on 1.6 :-) - # return prof.runcall(fn, *args) - return prof.runcall(*(fn,) + args) - finally: - import pstats - - # Damn - really want to send this to Excel! - # width, list = pstats.Stats(prof).strip_dirs().get_print_list([]) - pstats.Stats(prof).strip_dirs().sort_stats("time").print_stats() - - -class SafeOutput: - softspace = 1 - - def __init__(self, redir=None): - if redir is None: - redir = sys.stdout - self.redir = redir - - def write(self, message): - try: - self.redir.write(message) - except: - win32api.OutputDebugString(message) - - def flush(self): - pass - - def close(self): - pass - - -# Make sure we have a valid sys.stdout/stderr, otherwise out -# print and trace statements may raise an exception -def MakeValidSysOuts(): - if not isinstance(sys.stdout, SafeOutput): - sys.stdout = sys.stderr = SafeOutput() - # and for the sake of working around something I can't understand... - # prevent keyboard interrupts from killing IIS - import signal - - def noOp(a, b): - # it would be nice to get to the bottom of this, so a warning to - # the debug console can't hurt. - print("WARNING: Ignoring keyboard interrupt from ActiveScripting engine") - - # If someone else has already redirected, then assume they know what they are doing! - if signal.getsignal(signal.SIGINT) == signal.default_int_handler: - try: - signal.signal(signal.SIGINT, noOp) - except ValueError: - # Not the main thread - can't do much. - pass - - -def trace(*args): - """A function used instead of "print" for debugging output.""" - for arg in args: - print(arg, end=" ") - print() - - -def RaiseAssert(scode, desc): - """A debugging function that raises an exception considered an "Assertion".""" - print("**************** ASSERTION FAILED *******************") - print(desc) - raise Exception(desc, scode) - - -class AXScriptCodeBlock: - """An object which represents a chunk of code in an AX Script""" - - def __init__(self, name, codeText, sourceContextCookie, startLineNumber, flags): - self.name = name - self.codeText = codeText - self.codeObject = None - self.sourceContextCookie = sourceContextCookie - self.startLineNumber = startLineNumber - self.flags = flags - self.beenExecuted = 0 - - def GetFileName(self): - # Gets the "file name" for Python - uses <...> so Python doesnt think - # it is a real file. - return "<%s>" % self.name - - def GetDisplayName(self): - return self.name - - def GetLineNo(self, no): - pos = -1 - for i in range(no - 1): - pos = self.codeText.find("\n", pos + 1) - if pos == -1: - pos = len(self.codeText) - epos = self.codeText.find("\n", pos + 1) - if epos == -1: - epos = len(self.codeText) - return self.codeText[pos + 1 : epos].strip() - - -class Event: - """A single event for a ActiveX named object.""" - - def __init__(self): - self.name = "" - - def __repr__(self): - return "<%s at %d: %s>" % (self.__class__.__name__, id(self), self.name) - - def Reset(self): - pass - - def Close(self): - pass - - def Build(self, typeinfo, funcdesc): - self.dispid = funcdesc[0] - self.name = typeinfo.GetNames(self.dispid)[0] - - -# print "Event.Build() - Event Name is ", self.name - - -class EventSink: - """A set of events against an item. Note this is a COM client for connection points.""" - - _public_methods_ = [] - - def __init__(self, myItem, coDispatch): - self.events = {} - self.connection = None - self.coDispatch = coDispatch - self.myScriptItem = myItem - self.myInvokeMethod = myItem.GetEngine().ProcessScriptItemEvent - self.iid = None - - def Reset(self): - self.Disconnect() - - def Close(self): - self.iid = None - self.myScriptItem = None - self.myInvokeMethod = None - self.coDispatch = None - for event in self.events.values(): - event.Reset() - self.events = {} - self.Disconnect() - - # COM Connection point methods. - def _query_interface_(self, iid): - if iid == self.iid: - return win32com.server.util.wrap(self) - - def _invoke_(self, dispid, lcid, wFlags, args): - try: - event = self.events[dispid] - except: - raise Exception(scode=winerror.DISP_E_MEMBERNOTFOUND) - # print "Invoke for ", event, "on", self.myScriptItem, " - calling", self.myInvokeMethod - return self.myInvokeMethod(self.myScriptItem, event, lcid, wFlags, args) - - def GetSourceTypeInfo(self, typeinfo): - """Gets the typeinfo for the Source Events for the passed typeinfo""" - attr = typeinfo.GetTypeAttr() - cFuncs = attr[6] - typeKind = attr[5] - if typeKind not in [pythoncom.TKIND_COCLASS, pythoncom.TKIND_INTERFACE]: - RaiseAssert( - winerror.E_UNEXPECTED, "The typeKind of the object is unexpected" - ) - cImplType = attr[8] - for i in range(cImplType): - # Look for the [source, default] interface on the coclass - # that isn't marked as restricted. - flags = typeinfo.GetImplTypeFlags(i) - flagsNeeded = ( - pythoncom.IMPLTYPEFLAG_FDEFAULT | pythoncom.IMPLTYPEFLAG_FSOURCE - ) - if (flags & (flagsNeeded | pythoncom.IMPLTYPEFLAG_FRESTRICTED)) == ( - flagsNeeded - ): - # Get the handle to the implemented interface. - href = typeinfo.GetRefTypeOfImplType(i) - return typeinfo.GetRefTypeInfo(href) - - def BuildEvents(self): - # See if it is an extender object. - try: - mainTypeInfo = self.coDispatch.QueryInterface( - axscript.IID_IProvideMultipleClassInfo - ) - isMulti = 1 - numTypeInfos = mainTypeInfo.GetMultiTypeInfoCount() - except pythoncom.com_error: - isMulti = 0 - numTypeInfos = 1 - try: - mainTypeInfo = self.coDispatch.QueryInterface( - pythoncom.IID_IProvideClassInfo - ) - except pythoncom.com_error: - numTypeInfos = 0 - # Create an event handler for the item. - for item in range(numTypeInfos): - if isMulti: - typeinfo, flags = mainTypeInfo.GetInfoOfIndex( - item, axscript.MULTICLASSINFO_GETTYPEINFO - ) - else: - typeinfo = mainTypeInfo.GetClassInfo() - sourceType = self.GetSourceTypeInfo(typeinfo) - cFuncs = 0 - if sourceType: - attr = sourceType.GetTypeAttr() - self.iid = attr[0] - cFuncs = attr[6] - for i in range(cFuncs): - funcdesc = sourceType.GetFuncDesc(i) - event = Event() - event.Build(sourceType, funcdesc) - self.events[event.dispid] = event - - def Connect(self): - if self.connection is not None or self.iid is None: - return - # trace("Connect for sink item", self.myScriptItem.name, "with IID",str(self.iid)) - self.connection = win32com.client.connect.SimpleConnection( - self.coDispatch, self, self.iid - ) - - def Disconnect(self): - if self.connection: - try: - self.connection.Disconnect() - except pythoncom.com_error: - pass # Ignore disconnection errors. - self.connection = None - - -class ScriptItem: - """An item (or subitem) that is exposed to the ActiveX script""" - - def __init__(self, parentItem, name, dispatch, flags): - self.parentItem = parentItem - self.dispatch = dispatch - self.name = name - self.flags = flags - self.eventSink = None - self.subItems = {} - self.createdConnections = 0 - self.isRegistered = 0 - - # trace("Creating ScriptItem", name, "of parent", parentItem,"with dispatch", dispatch) - - def __repr__(self): - flagsDesc = "" - if self.flags is not None and self.flags & axscript.SCRIPTITEM_GLOBALMEMBERS: - flagsDesc = "/Global" - return "<%s at %d: %s%s>" % ( - self.__class__.__name__, - id(self), - self.name, - flagsDesc, - ) - - def _dump_(self, level): - flagDescs = [] - if self.flags is not None and self.flags & axscript.SCRIPTITEM_GLOBALMEMBERS: - flagDescs.append("GLOBAL!") - if self.flags is None or self.flags & axscript.SCRIPTITEM_ISVISIBLE == 0: - flagDescs.append("NOT VISIBLE") - if self.flags is not None and self.flags & axscript.SCRIPTITEM_ISSOURCE: - flagDescs.append("EVENT SINK") - if self.flags is not None and self.flags & axscript.SCRIPTITEM_CODEONLY: - flagDescs.append("CODE ONLY") - print(" " * level, "Name=", self.name, ", flags=", "/".join(flagDescs), self) - for subItem in self.subItems.values(): - subItem._dump_(level + 1) - - def Reset(self): - self.Disconnect() - if self.eventSink: - self.eventSink.Reset() - self.isRegistered = 0 - for subItem in self.subItems.values(): - subItem.Reset() - - def Close(self): - self.Reset() - self.dispatch = None - self.parentItem = None - if self.eventSink: - self.eventSink.Close() - self.eventSink = None - for subItem in self.subItems.values(): - subItem.Close() - self.subItems = [] - self.createdConnections = 0 - - def Register(self): - if self.isRegistered: - return - # Get the type info to use to build this item. - # if not self.dispatch: - # id = self.parentItem.dispatch.GetIDsOfNames(self.name) - # print "DispID of me is", id - # result = self.parentItem.dispatch.Invoke(id, 0, pythoncom.DISPATCH_PROPERTYGET,1) - # if type(result)==pythoncom.TypeIIDs[pythoncom.IID_IDispatch]: - # self.dispatch = result - # else: - # print "*** No dispatch" - # return - # print "**** Made dispatch" - self.isRegistered = 1 - # Register the sub-items. - for item in self.subItems.values(): - if not item.isRegistered: - item.Register() - - def IsGlobal(self): - return self.flags & axscript.SCRIPTITEM_GLOBALMEMBERS - - def IsVisible(self): - return ( - self.flags & (axscript.SCRIPTITEM_ISVISIBLE | axscript.SCRIPTITEM_ISSOURCE) - ) != 0 - - def GetEngine(self): - item = self - while item.parentItem.__class__ == self.__class__: - item = item.parentItem - return item.parentItem - - def _GetFullItemName(self): - ret = self.name - if self.parentItem: - try: - ret = self.parentItem._GetFullItemName() + "." + ret - except AttributeError: - pass - return ret - - def GetSubItemClass(self): - return self.__class__ - - def GetSubItem(self, name): - return self.subItems[name.lower()] - - def GetCreateSubItem(self, parentItem, name, dispatch, flags): - keyName = name.lower() - try: - rc = self.subItems[keyName] - # No changes allowed to existing flags. - if not rc.flags is None and not flags is None and rc.flags != flags: - raise Exception(scode=winerror.E_INVALIDARG) - # Existing item must not have a dispatch. - if not rc.dispatch is None and not dispatch is None: - raise Exception(scode=winerror.E_INVALIDARG) - rc.flags = flags # Setup the real flags. - rc.dispatch = dispatch - except KeyError: - rc = self.subItems[keyName] = self.GetSubItemClass()( - parentItem, name, dispatch, flags - ) - return rc - - # if self.dispatch is None: - # RaiseAssert(winerror.E_UNEXPECTED, "??") - - def CreateConnections(self): - # Create (but do not connect to) the connection points. - if self.createdConnections: - return - self.createdConnections = 1 - # Nothing to do unless this is an event source - # This flags means self, _and_ children, are connectable. - if self.flags & axscript.SCRIPTITEM_ISSOURCE: - self.BuildEvents() - self.FindBuildSubItemEvents() - - def Connect(self): - # Connect to the already created connection points. - if self.eventSink: - self.eventSink.Connect() - for subItem in self.subItems.values(): - subItem.Connect() - - def Disconnect(self): - # Disconnect from the connection points. - if self.eventSink: - self.eventSink.Disconnect() - for subItem in self.subItems.values(): - subItem.Disconnect() - - def BuildEvents(self): - if self.eventSink is not None or self.dispatch is None: - RaiseAssert( - winerror.E_UNEXPECTED, - "Item already has built events, or no dispatch available?", - ) - - # trace("BuildEvents for named item", self._GetFullItemName()) - self.eventSink = EventSink(self, self.dispatch) - self.eventSink.BuildEvents() - - def FindBuildSubItemEvents(self): - # Called during connection to event source. Seeks out and connects to - # all children. As per the AX spec, this is not recursive - # (ie, children sub-items are not seeked) - try: - multiTypeInfo = self.dispatch.QueryInterface( - axscript.IID_IProvideMultipleClassInfo - ) - numTypeInfos = multiTypeInfo.GetMultiTypeInfoCount() - except pythoncom.com_error: - return - for item in range(numTypeInfos): - typeinfo, flags = multiTypeInfo.GetInfoOfIndex( - item, axscript.MULTICLASSINFO_GETTYPEINFO - ) - defaultType = self.GetDefaultSourceTypeInfo(typeinfo) - index = 0 - while 1: - try: - fdesc = defaultType.GetFuncDesc(index) - except pythoncom.com_error: - break # No more funcs - index = index + 1 - dispid = fdesc[0] - funckind = fdesc[3] - invkind = fdesc[4] - elemdesc = fdesc[8] - funcflags = fdesc[9] - try: - isSubObject = ( - not (funcflags & pythoncom.FUNCFLAG_FRESTRICTED) - and funckind == pythoncom.FUNC_DISPATCH - and invkind == pythoncom.INVOKE_PROPERTYGET - and elemdesc[0][0] == pythoncom.VT_PTR - and elemdesc[0][1][0] == pythoncom.VT_USERDEFINED - ) - except: - isSubObject = 0 - if isSubObject: - try: - # We found a sub-object. - names = typeinfo.GetNames(dispid) - result = self.dispatch.Invoke( - dispid, 0x0, pythoncom.DISPATCH_PROPERTYGET, 1 - ) - # IE has an interesting problem - there are lots of synonyms for the same object. Eg - # in a simple form, "window.top", "window.window", "window.parent", "window.self" - # all refer to the same object. Our event implementation code does not differentiate - # eg, "window_onload" will fire for *all* objects named "window". Thus, - # "window" and "window.window" will fire the same event handler :( - # One option would be to check if the sub-object is indeed the - # parent object - however, this would stop "top_onload" from firing, - # as no event handler for "top" would work. - # I think we simply need to connect to a *single* event handler. - # As use in IE is deprecated, I am not solving this now. - if type(result) == pythoncom.TypeIIDs[pythoncom.IID_IDispatch]: - name = names[0] - subObj = self.GetCreateSubItem( - self, name, result, axscript.SCRIPTITEM_ISVISIBLE - ) - # print "subobj", name, "flags are", subObj.flags, "mydisp=", self.dispatch, "result disp=", result, "compare=", self.dispatch==result - subObj.BuildEvents() - subObj.Register() - except pythoncom.com_error: - pass - - def GetDefaultSourceTypeInfo(self, typeinfo): - """Gets the typeinfo for the Default Dispatch for the passed typeinfo""" - attr = typeinfo.GetTypeAttr() - cFuncs = attr[6] - typeKind = attr[5] - if typeKind not in [pythoncom.TKIND_COCLASS, pythoncom.TKIND_INTERFACE]: - RaiseAssert( - winerror.E_UNEXPECTED, "The typeKind of the object is unexpected" - ) - cImplType = attr[8] - for i in range(cImplType): - # Look for the [source, default] interface on the coclass - # that isn't marked as restricted. - flags = typeinfo.GetImplTypeFlags(i) - if ( - flags - & ( - pythoncom.IMPLTYPEFLAG_FDEFAULT - | pythoncom.IMPLTYPEFLAG_FSOURCE - | pythoncom.IMPLTYPEFLAG_FRESTRICTED - ) - ) == pythoncom.IMPLTYPEFLAG_FDEFAULT: - # Get the handle to the implemented interface. - href = typeinfo.GetRefTypeOfImplType(i) - defTypeInfo = typeinfo.GetRefTypeInfo(href) - attr = defTypeInfo.GetTypeAttr() - typeKind = attr[5] - typeFlags = attr[11] - if ( - typeKind == pythoncom.TKIND_INTERFACE - and typeFlags & pythoncom.TYPEFLAG_FDUAL - ): - # Get corresponding Disp interface - # -1 is a special value which does this for us. - href = typeinfo.GetRefTypeOfImplType(-1) - return defTypeInfo.GetRefTypeInfo(href) - else: - return defTypeInfo - - -IActiveScriptMethods = [ - "SetScriptSite", - "GetScriptSite", - "SetScriptState", - "GetScriptState", - "Close", - "AddNamedItem", - "AddTypeLib", - "GetScriptDispatch", - "GetCurrentScriptThreadID", - "GetScriptThreadID", - "GetScriptThreadState", - "InterruptScriptThread", - "Clone", -] -IActiveScriptParseMethods = ["InitNew", "AddScriptlet", "ParseScriptText"] -IObjectSafetyMethods = ["GetInterfaceSafetyOptions", "SetInterfaceSafetyOptions"] - -# ActiveScriptParseProcedure is a new interface with IIS4/IE4. -IActiveScriptParseProcedureMethods = ["ParseProcedureText"] - - -class COMScript: - """An ActiveX Scripting engine base class. - - This class implements the required COM interfaces for ActiveX scripting. - """ - - _public_methods_ = ( - IActiveScriptMethods - + IActiveScriptParseMethods - + IObjectSafetyMethods - + IActiveScriptParseProcedureMethods - ) - _com_interfaces_ = [ - axscript.IID_IActiveScript, - axscript.IID_IActiveScriptParse, - axscript.IID_IObjectSafety, - ] # , axscript.IID_IActiveScriptParseProcedure] - - def __init__(self): - # Make sure we can print/trace wihout an exception! - MakeValidSysOuts() - # trace("AXScriptEngine object created", self) - self.baseThreadId = -1 - self.debugManager = None - self.threadState = axscript.SCRIPTTHREADSTATE_NOTINSCRIPT - self.scriptState = axscript.SCRIPTSTATE_UNINITIALIZED - self.scriptSite = None - self.safetyOptions = 0 - self.lcid = 0 - self.subItems = {} - self.scriptCodeBlocks = {} - - def _query_interface_(self, iid): - if self.debugManager: - return self.debugManager._query_interface_for_debugger_(iid) - # trace("ScriptEngine QI - unknown IID", iid) - return 0 - - # IActiveScriptParse - def InitNew(self): - if self.scriptSite is not None: - self.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED) - - def AddScriptlet( - self, - defaultName, - code, - itemName, - subItemName, - eventName, - delimiter, - sourceContextCookie, - startLineNumber, - ): - # trace ("AddScriptlet", defaultName, code, itemName, subItemName, eventName, delimiter, sourceContextCookie, startLineNumber) - self.DoAddScriptlet( - defaultName, - code, - itemName, - subItemName, - eventName, - delimiter, - sourceContextCookie, - startLineNumber, - ) - - def ParseScriptText( - self, - code, - itemName, - context, - delimiter, - sourceContextCookie, - startLineNumber, - flags, - bWantResult, - ): - # trace ("ParseScriptText", code[:20],"...", itemName, context, delimiter, sourceContextCookie, startLineNumber, flags, bWantResult) - if ( - bWantResult - or self.scriptState == axscript.SCRIPTSTATE_STARTED - or self.scriptState == axscript.SCRIPTSTATE_CONNECTED - or self.scriptState == axscript.SCRIPTSTATE_DISCONNECTED - ): - flags = flags | SCRIPTTEXT_FORCEEXECUTION - else: - flags = flags & (~SCRIPTTEXT_FORCEEXECUTION) - - if flags & SCRIPTTEXT_FORCEEXECUTION: - # About to execute the code. - self.RegisterNewNamedItems() - return self.DoParseScriptText( - code, sourceContextCookie, startLineNumber, bWantResult, flags - ) - - # - # IActiveScriptParseProcedure - def ParseProcedureText( - self, - code, - formalParams, - procName, - itemName, - unkContext, - delimiter, - contextCookie, - startingLineNumber, - flags, - ): - trace( - "ParseProcedureText", - code, - formalParams, - procName, - itemName, - unkContext, - delimiter, - contextCookie, - startingLineNumber, - flags, - ) - # NOTE - this is never called, as we have disabled this interface. - # Problem is, once enabled all even code comes via here, rather than AddScriptlet. - # However, the "procName" is always an empty string - ie, itemName is the object whose event we are handling, - # but no idea what the specific event is!? - # Problem is disabling this block is that AddScriptlet is _not_ passed - #