Move Windows libs to libs/windows

This commit is contained in:
Labrys of Knossos 2018-12-16 13:36:03 -05:00
commit 3a692c94a5
684 changed files with 4 additions and 1 deletions

BIN
libs/win/PyWin32.chm Normal file

Binary file not shown.

View file

@ -0,0 +1,56 @@
"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO
Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole
* http://sourceforge.net/projects/adodbapi
"""
import sys
import time
if sys.version_info < (3,0): # in Python 2, define all symbols, just like the bad old way
from apibase import *
VariantConversionMap = MultiMap # old name. Should use apibase.MultiMap
from .ado_consts import *
_makeByteBuffer = buffer
else:
# but if the user is running Python 3, then keep the dictionary clean
from .apibase import apilevel, threadsafety, paramstyle
from .apibase import Warning, Error, InterfaceError, DatabaseError, DataError, OperationalError, IntegrityError
from .apibase import InternalError, ProgrammingError, NotSupportedError, FetchFailedError
from .apibase import NUMBER, STRING, BINARY, DATETIME, ROWID
_makeByteBuffer = bytes
from .adodbapi import connect, Connection, __version__, dateconverter, Cursor
def Binary(aString):
"""This function constructs an object capable of holding a binary (long) string value. """
return _makeByteBuffer(aString)
def Date(year,month,day):
"This function constructs an object holding a date value. "
return dateconverter.Date(year,month,day)
def Time(hour,minute,second):
"This function constructs an object holding a time value. "
return dateconverter.Time(hour,minute,second)
def Timestamp(year,month,day,hour,minute,second):
"This function constructs an object holding a time stamp value. "
return dateconverter.Timestamp(year,month,day,hour,minute,second)
def DateFromTicks(ticks):
"""This function constructs an object holding a date value from the given ticks value
(number of seconds since the epoch; see the documentation of the standard Python time module for details). """
return Date(*time.gmtime(ticks)[:3])
def TimeFromTicks(ticks):
"""This function constructs an object holding a time value from the given ticks value
(number of seconds since the epoch; see the documentation of the standard Python time module for details). """
return Time(*time.gmtime(ticks)[3:6])
def TimestampFromTicks(ticks):
"""This function constructs an object holding a time stamp value from the given
ticks value (number of seconds since the epoch;
see the documentation of the standard Python time module for details). """
return Timestamp(*time.gmtime(ticks)[:6])
version = 'adodbapi v' + __version__

View file

@ -0,0 +1,276 @@
# ADO enumerated constants documented on MSDN:
# http://msdn.microsoft.com/en-us/library/ms678353(VS.85).aspx
# IsolationLevelEnum
adXactUnspecified = -1
adXactBrowse = 0x100
adXactChaos = 0x10
adXactCursorStability = 0x1000
adXactIsolated = 0x100000
adXactReadCommitted = 0x1000
adXactReadUncommitted = 0x100
adXactRepeatableRead = 0x10000
adXactSerializable = 0x100000
# CursorLocationEnum
adUseClient = 3
adUseServer = 2
# CursorTypeEnum
adOpenDynamic = 2
adOpenForwardOnly = 0
adOpenKeyset = 1
adOpenStatic = 3
adOpenUnspecified = -1
# CommandTypeEnum
adCmdText = 1
adCmdStoredProc = 4
adSchemaTables = 20
# ParameterDirectionEnum
adParamInput = 1
adParamInputOutput = 3
adParamOutput = 2
adParamReturnValue = 4
adParamUnknown = 0
directions = {
0: 'Unknown',
1: 'Input',
2: 'Output',
3: 'InputOutput',
4: 'Return',
}
def ado_direction_name(ado_dir):
try:
return 'adParam' + directions[ado_dir]
except:
return 'unknown direction ('+str(ado_dir)+')'
# ObjectStateEnum
adStateClosed = 0
adStateOpen = 1
adStateConnecting = 2
adStateExecuting = 4
adStateFetching = 8
# FieldAttributeEnum
adFldMayBeNull = 0x40
# ConnectModeEnum
adModeUnknown = 0
adModeRead = 1
adModeWrite = 2
adModeReadWrite = 3
adModeShareDenyRead = 4
adModeShareDenyWrite = 8
adModeShareExclusive = 12
adModeShareDenyNone = 16
adModeRecursive = 0x400000
# XactAttributeEnum
adXactCommitRetaining = 131072
adXactAbortRetaining = 262144
ado_error_TIMEOUT = -2147217871
# DataTypeEnum - ADO Data types documented at:
# http://msdn2.microsoft.com/en-us/library/ms675318.aspx
adArray = 0x2000
adEmpty = 0x0
adBSTR = 0x8
adBigInt = 0x14
adBinary = 0x80
adBoolean = 0xb
adChapter = 0x88
adChar = 0x81
adCurrency = 0x6
adDBDate = 0x85
adDBTime = 0x86
adDBTimeStamp = 0x87
adDate = 0x7
adDecimal = 0xe
adDouble = 0x5
adError = 0xa
adFileTime = 0x40
adGUID = 0x48
adIDispatch = 0x9
adIUnknown = 0xd
adInteger = 0x3
adLongVarBinary = 0xcd
adLongVarChar = 0xc9
adLongVarWChar = 0xcb
adNumeric = 0x83
adPropVariant = 0x8a
adSingle = 0x4
adSmallInt = 0x2
adTinyInt = 0x10
adUnsignedBigInt = 0x15
adUnsignedInt = 0x13
adUnsignedSmallInt = 0x12
adUnsignedTinyInt = 0x11
adUserDefined = 0x84
adVarBinary = 0xCC
adVarChar = 0xC8
adVarNumeric = 0x8B
adVarWChar = 0xCA
adVariant = 0xC
adWChar = 0x82
# Additional constants used by introspection but not ADO itself
AUTO_FIELD_MARKER = -1000
adTypeNames = {
adBSTR: 'adBSTR',
adBigInt: 'adBigInt',
adBinary: 'adBinary',
adBoolean: 'adBoolean',
adChapter: 'adChapter',
adChar: 'adChar',
adCurrency: 'adCurrency',
adDBDate: 'adDBDate',
adDBTime: 'adDBTime',
adDBTimeStamp: 'adDBTimeStamp',
adDate: 'adDate',
adDecimal: 'adDecimal',
adDouble: 'adDouble',
adEmpty: 'adEmpty',
adError: 'adError',
adFileTime: 'adFileTime',
adGUID: 'adGUID',
adIDispatch: 'adIDispatch',
adIUnknown: 'adIUnknown',
adInteger: 'adInteger',
adLongVarBinary: 'adLongVarBinary',
adLongVarChar: 'adLongVarChar',
adLongVarWChar: 'adLongVarWChar',
adNumeric: 'adNumeric',
adPropVariant: 'adPropVariant',
adSingle: 'adSingle',
adSmallInt: 'adSmallInt',
adTinyInt: 'adTinyInt',
adUnsignedBigInt: 'adUnsignedBigInt',
adUnsignedInt: 'adUnsignedInt',
adUnsignedSmallInt: 'adUnsignedSmallInt',
adUnsignedTinyInt: 'adUnsignedTinyInt',
adUserDefined: 'adUserDefined',
adVarBinary: 'adVarBinary',
adVarChar: 'adVarChar',
adVarNumeric: 'adVarNumeric',
adVarWChar: 'adVarWChar',
adVariant: 'adVariant',
adWChar: 'adWChar',
}
def ado_type_name(ado_type):
return adTypeNames.get(ado_type, 'unknown type ('+str(ado_type)+')')
# here in decimal, sorted by value
#adEmpty 0 Specifies no value (DBTYPE_EMPTY).
#adSmallInt 2 Indicates a two-byte signed integer (DBTYPE_I2).
#adInteger 3 Indicates a four-byte signed integer (DBTYPE_I4).
#adSingle 4 Indicates a single-precision floating-point value (DBTYPE_R4).
#adDouble 5 Indicates a double-precision floating-point value (DBTYPE_R8).
#adCurrency 6 Indicates a currency value (DBTYPE_CY). Currency is a fixed-point number
# with four digits to the right of the decimal point. It is stored in an eight-byte signed integer scaled by 10,000.
#adDate 7 Indicates a date value (DBTYPE_DATE). A date is stored as a double, the whole part of which is
# the number of days since December 30, 1899, and the fractional part of which is the fraction of a day.
#adBSTR 8 Indicates a null-terminated character string (Unicode) (DBTYPE_BSTR).
#adIDispatch 9 Indicates a pointer to an IDispatch interface on a COM object (DBTYPE_IDISPATCH).
#adError 10 Indicates a 32-bit error code (DBTYPE_ERROR).
#adBoolean 11 Indicates a boolean value (DBTYPE_BOOL).
#adVariant 12 Indicates an Automation Variant (DBTYPE_VARIANT).
#adIUnknown 13 Indicates a pointer to an IUnknown interface on a COM object (DBTYPE_IUNKNOWN).
#adDecimal 14 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_DECIMAL).
#adTinyInt 16 Indicates a one-byte signed integer (DBTYPE_I1).
#adUnsignedTinyInt 17 Indicates a one-byte unsigned integer (DBTYPE_UI1).
#adUnsignedSmallInt 18 Indicates a two-byte unsigned integer (DBTYPE_UI2).
#adUnsignedInt 19 Indicates a four-byte unsigned integer (DBTYPE_UI4).
#adBigInt 20 Indicates an eight-byte signed integer (DBTYPE_I8).
#adUnsignedBigInt 21 Indicates an eight-byte unsigned integer (DBTYPE_UI8).
#adFileTime 64 Indicates a 64-bit value representing the number of 100-nanosecond intervals since
# January 1, 1601 (DBTYPE_FILETIME).
#adGUID 72 Indicates a globally unique identifier (GUID) (DBTYPE_GUID).
#adBinary 128 Indicates a binary value (DBTYPE_BYTES).
#adChar 129 Indicates a string value (DBTYPE_STR).
#adWChar 130 Indicates a null-terminated Unicode character string (DBTYPE_WSTR).
#adNumeric 131 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_NUMERIC).
# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT).
#adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT).
#adDBDate 133 Indicates a date value (yyyymmdd) (DBTYPE_DBDATE).
#adDBTime 134 Indicates a time value (hhmmss) (DBTYPE_DBTIME).
#adDBTimeStamp 135 Indicates a date/time stamp (yyyymmddhhmmss plus a fraction in billionths) (DBTYPE_DBTIMESTAMP).
#adChapter 136 Indicates a four-byte chapter value that identifies rows in a child rowset (DBTYPE_HCHAPTER).
#adPropVariant 138 Indicates an Automation PROPVARIANT (DBTYPE_PROP_VARIANT).
#adVarNumeric 139 Indicates a numeric value (Parameter object only).
#adVarChar 200 Indicates a string value (Parameter object only).
#adLongVarChar 201 Indicates a long string value (Parameter object only).
#adVarWChar 202 Indicates a null-terminated Unicode character string (Parameter object only).
#adLongVarWChar 203 Indicates a long null-terminated Unicode string value (Parameter object only).
#adVarBinary 204 Indicates a binary value (Parameter object only).
#adLongVarBinary 205 Indicates a long binary value (Parameter object only).
#adArray (Does not apply to ADOX.) 0x2000 A flag value, always combined with another data type constant,
# that indicates an array of that other data type.
# Error codes to names
adoErrors= {
0xe7b :'adErrBoundToCommand',
0xe94 :'adErrCannotComplete',
0xea4 :'adErrCantChangeConnection',
0xc94 :'adErrCantChangeProvider',
0xe8c :'adErrCantConvertvalue',
0xe8d :'adErrCantCreate',
0xea3 :'adErrCatalogNotSet',
0xe8e :'adErrColumnNotOnThisRow',
0xd5d :'adErrDataConversion',
0xe89 :'adErrDataOverflow',
0xe9a :'adErrDelResOutOfScope',
0xea6 :'adErrDenyNotSupported',
0xea7 :'adErrDenyTypeNotSupported',
0xcb3 :'adErrFeatureNotAvailable',
0xea5 :'adErrFieldsUpdateFailed',
0xc93 :'adErrIllegalOperation',
0xcae :'adErrInTransaction',
0xe87 :'adErrIntegrityViolation',
0xbb9 :'adErrInvalidArgument',
0xe7d :'adErrInvalidConnection',
0xe7c :'adErrInvalidParamInfo',
0xe82 :'adErrInvalidTransaction',
0xe91 :'adErrInvalidURL',
0xcc1 :'adErrItemNotFound',
0xbcd :'adErrNoCurrentRecord',
0xe83 :'adErrNotExecuting',
0xe7e :'adErrNotReentrant',
0xe78 :'adErrObjectClosed',
0xd27 :'adErrObjectInCollection',
0xd5c :'adErrObjectNotSet',
0xe79 :'adErrObjectOpen',
0xbba :'adErrOpeningFile',
0xe80 :'adErrOperationCancelled',
0xe96 :'adErrOutOfSpace',
0xe88 :'adErrPermissionDenied',
0xe9e :'adErrPropConflicting',
0xe9b :'adErrPropInvalidColumn',
0xe9c :'adErrPropInvalidOption',
0xe9d :'adErrPropInvalidValue',
0xe9f :'adErrPropNotAllSettable',
0xea0 :'adErrPropNotSet',
0xea1 :'adErrPropNotSettable',
0xea2 :'adErrPropNotSupported',
0xbb8 :'adErrProviderFailed',
0xe7a :'adErrProviderNotFound',
0xbbb :'adErrReadFile',
0xe93 :'adErrResourceExists',
0xe92 :'adErrResourceLocked',
0xe97 :'adErrResourceOutOfScope',
0xe8a :'adErrSchemaViolation',
0xe8b :'adErrSignMismatch',
0xe81 :'adErrStillConnecting',
0xe7f :'adErrStillExecuting',
0xe90 :'adErrTreePermissionDenied',
0xe8f :'adErrURLDoesNotExist',
0xe99 :'adErrURLNamedRowDoesNotExist',
0xe98 :'adErrUnavailable',
0xe84 :'adErrUnsafeOperation',
0xe95 :'adErrVolumeNotFound',
0xbbc :'adErrWriteFile'
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,70 @@
""" db_print.py -- a simple demo for ADO database reads."""
from __future__ import with_statement #needed for Python 2.5
import sys
import adodbapi.ado_consts as adc
cmd_args = ('proxy_host', 'proxy_port', 'filename', 'table_name')
if 'help' in sys.argv:
print(('possible settings keywords are:',cmd_args))
sys.exit()
kw_args = {} # pick up filename and proxy address from command line (optionally)
for arg in sys.argv:
s = arg.split("=")
if len(s) > 1:
if s[0] in cmd_args:
kw_args[s[0]] = s[1]
kw_args.setdefault('filename', "test.mdb") # assumes server is running from examples folder
kw_args.setdefault('table_name', 'Products') # the name of the demo table
# the server needs to select the provider based on his Python installation
provider_switch = ['provider', 'Microsoft.ACE.OLEDB.12.0', "Microsoft.Jet.OLEDB.4.0"]
# ------------------------ START HERE -------------------------------------
#create the connection
constr = "Provider=%(provider)s;Data Source=%(filename)s"
if 'proxy_host' in kw_args:
import adodbapi.remote as db
else:
import adodbapi as db
con = db.connect(constr, kw_args, macro_is64bit=provider_switch)
if kw_args['table_name'] == '?':
print('The tables in your database are:')
for name in con.get_table_names():
print(name)
else:
#make a cursor on the connection
with con.cursor() as c:
#run an SQL statement on the cursor
sql = 'select * from %s' % kw_args['table_name']
print(('performing query="%s"' % sql))
c.execute(sql)
#check the results
print(('result rowcount shows as= %d. (Note: -1 means "not known")' \
% (c.rowcount,)))
print('')
print('result data description is:')
print(' NAME Type DispSize IntrnlSz Prec Scale Null?')
for d in c.description:
print((('%16s %-12s %8s %8d %4d %5d %s') % \
(d[0], adc.adTypeNames[d[1]], d[2], d[3], d[4],d[5], bool(d[6]))))
print('')
print('str() of first five records are...')
#get the results
db = c.fetchmany(5)
#print them
for rec in db:
print(rec)
print('')
print('repr() of next row is...')
print((repr(c.fetchone())))
print('')
con.close()

View file

@ -0,0 +1,19 @@
""" db_table_names.py -- a simple demo for ADO database table listing."""
import sys
import adodbapi
try:
databasename = sys.argv[1]
except IndexError:
databasename = "test.mdb"
provider = ['prv', "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"]
constr = "Provider=%(prv)s;Data Source=%(db)s"
#create the connection
con = adodbapi.connect(constr, db=databasename, macro_is64bit=provider)
print(('Table names in= %s' % databasename))
for table in con.get_table_names():
print(table)

View file

@ -0,0 +1,38 @@
import sys
import adodbapi
try:
import adodbapi.is64bit as is64bit
is64 = is64bit.Python()
except ImportError:
is64 = False
if is64:
driver = "Microsoft.ACE.OLEDB.12.0"
else:
driver = "Microsoft.Jet.OLEDB.4.0"
extended = 'Extended Properties="Excel 8.0;HDR=Yes;IMEX=1;"'
try: # first command line argument will be xls file name -- default to the one written by xls_write.py
filename = sys.argv[1]
except IndexError:
filename = 'xx.xls'
constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended)
conn = adodbapi.connect(constr)
try: # second command line argument will be worksheet name -- default to first worksheet
sheet = sys.argv[2]
except IndexError:
# use ADO feature to get the name of the first worksheet
sheet = conn.get_table_names()[0]
print(('Shreadsheet=%s Worksheet=%s' % (filename, sheet)))
print('------------------------------------------------------------')
crsr = conn.cursor()
sql = "SELECT * from [%s]" % sheet
crsr.execute(sql)
for row in crsr.fetchmany(10):
print((repr(row)))
crsr.close()
conn.close()

View file

@ -0,0 +1,32 @@
from __future__ import with_statement # needed only if running Python 2.5
import adodbapi
try:
import adodbapi.is64bit as is64bit
is64 = is64bit.Python()
except ImportError:
is64 = False # in case the user has an old version of adodbapi
if is64:
driver = "Microsoft.ACE.OLEDB.12.0"
else:
driver = "Microsoft.Jet.OLEDB.4.0"
filename = 'xx.xls' # file will be created if it does not exist
extended = 'Extended Properties="Excel 8.0;Readonly=False;"'
constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended)
conn = adodbapi.connect(constr)
with conn: # will auto commit if no errors
with conn.cursor() as crsr:
try: crsr.execute('drop table SheetOne')
except: pass # just is case there is one already there
# create the sheet and the header row and set the types for the columns
crsr.execute('create table SheetOne (Header1 text, Header2 text, Header3 text, Header4 text, Header5 text)')
sql = "INSERT INTO SheetOne (Header1, Header2 ,Header3, Header4, Header5) values (?,?,?,?,?)"
data = (1, 2, 3, 4, 5)
crsr.execute(sql, data) # write the first row of data
crsr.execute(sql, (6, 7, 8, 9, 10)) # another row of data
conn.close()
print(('Created spreadsheet=%s worksheet=%s' % (filename, 'SheetOne')))

View file

@ -0,0 +1,33 @@
"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version"""
import sys
def Python():
if sys.platform == 'cli': #IronPython
import System
return System.IntPtr.Size == 8
else:
try:
return sys.maxsize > 2147483647
except AttributeError:
return sys.maxint > 2147483647
def os():
import platform
pm = platform.machine()
if pm != '..' and pm.endswith('64'): # recent Python (not Iron)
return True
else:
import os
if 'PROCESSOR_ARCHITEW6432' in os.environ:
return True # 32 bit program running on 64 bit Windows
try:
return os.environ['PROCESSOR_ARCHITECTURE'].endswith('64') # 64 bit Windows 64 bit program
except IndexError:
pass # not Windows
try:
return '64' in platform.architecture()[0] # this often works in Linux
except:
return False # is an older version of Python, assume also an older os (best we can guess)
if __name__ == "__main__":
print(("is64bit.Python() =", Python(), "is64bit.os() =", os()))

View file

@ -0,0 +1,506 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation
and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at
least three years, to give the same user the materials
specified in Subsection 6a, above, for a charge no more
than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add
an explicit geographical distribution limitation excluding those countries,
so that distribution is permitted only in or among countries not thus
excluded. In such case, this License incorporates the limitation as if
written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

View file

@ -0,0 +1,92 @@
Project
-------
adodbapi
A Python DB-API 2.0 (PEP-249) module that makes it easy to use Microsoft ADO
for connecting with databases and other data sources
using either CPython or IronPython.
Home page: <http://sourceforge.net/projects/adodbapi>
Features:
* 100% DB-API 2.0 (PEP-249) compliant (including most extensions and recommendations).
* Includes pyunit testcases that describe how to use the module.
* Fully implemented in Python. -- runs in Python 2.5+ Python 3.0+ and IronPython 2.6+
* Licensed under the LGPL license, which means that it can be used freely even in commercial programs subject to certain restrictions.
* Includes SERVER and REMOTE modules so that a Windows proxy can serve ADO databases to a Linux client using PyRO.
* The user can choose between paramstyles: 'qmark' 'named' 'format' 'pyformat' 'dynamic'
* Supports data retrieval by column name e.g.:
for row in myCurser.execute("select name,age from students"):
print("Student", row.name, "is", row.age, "years old.")
* Supports user-definable system-to-Python data conversion functions (selected by ADO data type, or by column)
Prerequisites:
* C Python 2.5 or higher
and pywin32 (Mark Hammond's python for windows extensions.)
or
Iron Python 2.6 or higher. (works in IPy2.0 for all data types except BUFFER)
Installation:
* (C-Python on Windows): Download pywin32 from http://sf.net/projects/pywin32 and install from .msi (adodbapi is included)
* ((to use Windows as a server, also download and install Pyro4 (requires Python 2.6 or later))) https://pypi.python.org/pypi/Pyro4
* (IronPython on Windows): Download adodbapi from http://sf.net/projects/adodbapi. Unpack the zip.
Open a command window as an administrator. CD to the folder containing the unzipped files.
Run "setup.py install" using the IronPython of your choice.
* (Linux, as a client): download and install from PyPi: "pip install adodbapi Pyro4"
NOTE: ...........
If you do not like the new default operation of returning Numeric columns as decimal.Decimal,
you can select other options by the user defined conversion feature.
Try:
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtString
or:
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtFloat
or:
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = write_your_own_convertion_function
............
whats new in version 2.6
A cursor.prepare() method and support for prepared SQL statements.
Lots of refactoring, especially of the Remote and Server modules (still to be treated as Beta code).
The quick start document 'quick_reference.odt' will export as a nice-looking pdf.
Added paramstyles 'pyformat' and 'dynamic'. If your 'paramstyle' is 'named' you _must_ pass a dictionary of
parameters to your .execute() method. If your 'paramstyle' is 'format' 'pyformat' or 'dynamic', you _may_
pass a dictionary of parameters -- provided your SQL operation string is formatted correctly.
whats new in version 2.5
Remote module: (works on Linux!) allows a Windows computer to serve ADO databases via PyRO
Server module: PyRO server for ADO. Run using a command like= C:>python -m adodbapi.server
(server has simple connection string macros: is64bit, getuser, sql_provider, auto_security)
Brief documentation included. See adodbapi/examples folder adodbapi.rtf
New connection method conn.get_table_names() --> list of names of tables in database
Vastly refactored. Data conversion things have been moved to the new adodbapi.apibase module.
Many former module-level attributes are now class attributes. (Should be more thread-safe)
Connection objects are now context managers for transactions and will commit or rollback.
Cursor objects are context managers and will automatically close themselves.
Autocommit can be switched on and off.
Keyword and positional arguments on the connect() method work as documented in PEP 249.
Keyword arguments from the connect call can be formatted into the connection string.
New keyword arguments defined, such as: autocommit, paramstyle, remote_proxy, remote_port.
*** Breaking change: variantConversion lookups are simplified: the following will raise KeyError:
oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes]
Refactor as: oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes[0]]
(( More information like this in older_whatsnew.txt ))
License
-------
LGPL, see http://www.opensource.org/licenses/lgpl-license.php
Documentation
-------------
Start with:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
read the examples in adodbapi/examples
and look at the test cases in adodbapi/test directory.
Mailing lists
-------------
The adodbapi mailing lists have been deactivated. Submit comments to the
pywin32 or IronPython mailing lists.
-- the bug tracker on sourceforge.net/projects/adodbapi will be checked, (infrequently).

View file

@ -0,0 +1,14 @@
"""call using an open ADO connection --> list of table names"""
from . import adodbapi
def names(connection_object):
ado = connection_object.adoConn
schema = ado.OpenSchema(20) # constant = adSchemaTables
tables = []
while not schema.EOF:
name = adodbapi.getIndexedValue(schema.Fields,'TABLE_NAME').Value
tables.append(name)
schema.MoveNext()
del schema
return tables

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,161 @@
# Configure this to _YOUR_ environment in order to run the testcases.
"testADOdbapiConfig.py v 2.6.0.A00"
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# #
# # TESTERS:
# #
# # You will need to make numerous modifications to this file
# # to adapt it to your own testing environment.
# #
# # Skip down to the next "# #" line --
# # -- the things you need to change are below it.
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
import platform
import sys
import random
import is64bit
import setuptestframework
if sys.version_info >= (3,0):
import tryconnection3 as tryconnection
else:
import tryconnection2 as tryconnection
print((sys.version))
node = platform.node()
try: print(('node=%s: is64bit.os()= %s, is64bit.Python()= %s' % (node, is64bit.os(), is64bit.Python())))
except: pass
try:
onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python
except:
onWindows = False
# create a random name for temporary table names
_alphabet = "PYFGCRLAOEUIDHTNTQJKXBMWVZ1234567890" # yes, I do use a dvorak keyboard!
tmp = ''.join([random.choice(_alphabet) for x in range(9)])
mdb_name = 'xx_' + tmp + '.mdb'
testfolder = setuptestframework.maketemp()
if '--package' in sys.argv:
pth = setuptestframework.makeadopackage(testfolder)
else:
pth = setuptestframework.find_ado_path()
if pth not in sys.path:
sys.path.insert(1,pth)
# function to clean up the temporary folder -- calling program must run this function before exit.
cleanup = setuptestframework.getcleanupfunction()
import adodbapi # will (hopefully) be imported using the "pth" discovered above
try:
print((adodbapi.version)) # show version
except:
print('"adodbapi.version" not present or not working.')
print(__doc__)
verbose = False
for a in sys.argv:
if a.startswith('--verbose'):
arg = True
try: arg = int(a.split("=")[1])
except IndexError: pass
adodbapi.adodbapi.verbose = arg
verbose = arg
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # start your environment setup here v v v
SQL_HOST_NODE = 'Vpad'
doAllTests = '--all' in sys.argv
doAccessTest = not ('--nojet' in sys.argv)
doSqlServerTest = node == SQL_HOST_NODE or '--mssql' in sys.argv or doAllTests
doMySqlTest = '--mysql' in sys.argv or doAllTests
doPostgresTest = '--pg' in sys.argv or doAllTests
iterateOverTimeTests = ('--time' in sys.argv or doAllTests) and onWindows
THE_PROXY_HOST = '25.44.77.176' if node != SQL_HOST_NODE or not onWindows else '::1' # -- change this
try: #If mx extensions are installed, use mxDateTime
import mx.DateTime
doMxDateTimeTest=True
except:
doMxDateTimeTest=False #Requires eGenixMXExtensions
doTimeTest = True # obsolete python time format
if doAccessTest:
if onWindows and (node == SQL_HOST_NODE or not is64bit.Python()):
c = {'mdb': setuptestframework.makemdb(testfolder, mdb_name)}
else:
c = {'macro_find_temp_test_path' : ['mdb', 'server_test.mdb'],
'proxy_host' : THE_PROXY_HOST}
# macro definition for keyword "driver" using macro "is64bit" -- see documentation
c['macro_is64bit'] = ['driver', "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"]
connStrAccess = "Provider=%(driver)s;Data Source=%(mdb)s"
print(' ...Testing ACCESS connection...')
doAccessTest, connStrAccess, dbAccessconnect = tryconnection.try_connection(verbose, connStrAccess, 10, **c)
if doSqlServerTest:
c = {'macro_getnode' : ['host', r"%s\SQLExpress"], # name of computer with SQL Server
#'host':'25.44.77.176;' # Network Library=dbmssocn',
'database': "adotest",
'user' : 'adotestuser', # None implies Windows security
'password' : "12345678",
# macro definition for keyword "security" using macro "auto_security"
'macro_auto_security' : 'security',
'provider' : 'SQLNCLI11; MARS Connection=True'
}
connStr = "Provider=%(provider)s; Initial Catalog=%(database)s; Data Source=%(host)s; %(security)s;"
if node != SQL_HOST_NODE:
if THE_PROXY_HOST:
c["proxy_host"] = THE_PROXY_HOST # the SQL server runs a proxy for this test
else:
c["pyro_connection"] = "PYRONAME:ado.connection"
print(' ...Testing MS-SQL login...')
doSqlServerTest, connStrSQLServer, dbSqlServerconnect = tryconnection.try_connection(verbose, connStr, 30, **c)
if doMySqlTest:
c = {'host' : "25.223.161.222",
'database' : 'test',
'user' : 'adotest',
'password' : '12345678',
'driver' : "MySQL ODBC 5.3 Unicode Driver"} # or _driver="MySQL ODBC 3.51 Driver
if not onWindows:
if THE_PROXY_HOST:
c["proxy_host"] = THE_PROXY_HOST
else:
c["pyro_connection"] = "PYRONAME:ado.connection"
c['macro_is64bit'] = ['provider', 'Provider=MSDASQL;']
cs = '%(provider)sDriver={%(driver)s};Server=%(host)s;Port=3306;' + \
'Database=%(database)s;user=%(user)s;password=%(password)s;Option=3;'
print(' ...Testing MySql login...')
doMySqlTest, connStrMySql, dbMySqlconnect = tryconnection.try_connection(verbose, cs, 5, **c)
if doPostgresTest:
_computername = "25.223.161.222"
_databasename='adotest'
_username = 'adotestuser'
_password = '12345678'
kws = {'timeout' : 4}
kws['macro_is64bit'] = ['prov_drv', 'Provider=MSDASQL;Driver={PostgreSQL Unicode(x64)}',
'Driver=PostgreSQL Unicode']
if not onWindows:
if THE_PROXY_HOST:
kws['proxy_host'] = THE_PROXY_HOST
else:
kws['pyro_connection'] = 'PYRONAME:ado.connection'
# get driver from http://www.postgresql.org/ftp/odbc/versions/
# test using positional and keyword arguments (bad example for real code)
print(' ...Testing PostgreSQL login...')
doPostgresTest, connStrPostgres, dbPostgresConnect = tryconnection.try_connection(verbose,
'%(prov_drv)s;Server=%(host)s;Database=%(database)s;uid=%(user)s;pwd=%(password)s;',
_username, _password, _computername, _databasename, **kws)
assert doAccessTest or doSqlServerTest or doMySqlTest or doPostgresTest, 'No database engine found for testing'

View file

@ -0,0 +1,899 @@
#!/usr/bin/env python
''' Python DB API 2.0 driver compliance unit test suite.
This software is Public Domain and may be used without restrictions.
"Now we have booze and barflies entering the discussion, plus rumours of
DBAs on drugs... and I won't tell you what flashes through my mind each
time I read the subject line with 'Anal Compliance' in it. All around
this is turning out to be a thoroughly unwholesome unit test."
-- Ian Bicking
'''
__version__ = '$Revision: 1.14.3 $'[11:-2]
__author__ = 'Stuart Bishop <stuart@stuartbishop.net>'
import unittest
import time
import sys
if sys.version[0] >= '3': #python 3.x
_BaseException = Exception
def _failUnless(self, expr, msg=None):
self.assertTrue(expr, msg)
else: #python 2.x
from exceptions import Exception as _BaseException
def _failUnless(self, expr, msg=None):
self.failUnless(expr, msg) ## deprecated since Python 2.6
# set this to "True" to follow API 2.0 to the letter
TEST_FOR_NON_IDEMPOTENT_CLOSE = True
# Revision 1.14 2013/05/20 11:02:05 kf7xm
# Add a literal string to the format insertion test to catch trivial re-format algorithms
# Revision 1.13 2013/05/08 14:31:50 kf7xm
# Quick switch to Turn off IDEMPOTENT_CLOSE test. Also: Silence teardown failure
# Revision 1.12 2009/02/06 03:35:11 kf7xm
# Tested okay with Python 3.0, includes last minute patches from Mark H.
#
# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole
# Include latest changes from main branch
# Updates for py3k
#
# Revision 1.11 2005/01/02 02:41:01 zenzen
# Update author email address
#
# Revision 1.10 2003/10/09 03:14:14 zenzen
# Add test for DB API 2.0 optional extension, where database exceptions
# are exposed as attributes on the Connection object.
#
# Revision 1.9 2003/08/13 01:16:36 zenzen
# Minor tweak from Stefan Fleiter
#
# Revision 1.8 2003/04/10 00:13:25 zenzen
# Changes, as per suggestions by M.-A. Lemburg
# - Add a table prefix, to ensure namespace collisions can always be avoided
#
# Revision 1.7 2003/02/26 23:33:37 zenzen
# Break out DDL into helper functions, as per request by David Rushby
#
# Revision 1.6 2003/02/21 03:04:33 zenzen
# Stuff from Henrik Ekelund:
# added test_None
# added test_nextset & hooks
#
# Revision 1.5 2003/02/17 22:08:43 zenzen
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
# defaults to 1 & generic cursor.callproc test added
#
# Revision 1.4 2003/02/15 00:16:33 zenzen
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
# - Class renamed
# - Now a subclass of TestCase, to avoid requiring the driver stub
# to use multiple inheritance
# - Reversed the polarity of buggy test in test_description
# - Test exception heirarchy correctly
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propogates
# - VARCHAR columns now have a width, which will hopefully make the
# DDL even more portible (this will be reversed if it causes more problems)
# - cursor.rowcount being checked after various execute and fetchXXX methods
# - Check for fetchall and fetchmany returning empty lists after results
# are exhausted (already checking for empty lists if select retrieved
# nothing
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
#
def str2bytes(sval):
if sys.version_info < (3,0) and isinstance(sval, str):
sval = sval.decode("latin1")
return sval.encode("latin1") #python 3 make unicode into bytes
class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.
The 'Optional Extensions' are not yet being tested.
self.drivers should subclass this test, overriding setUp, tearDown,
self.driver, connect_args and connect_kw_args. Class specification
should be as follows:
import dbapi20
class mytest(dbapi20.DatabaseAPI20Test):
[...]
Don't 'import DatabaseAPI20Test from dbapi20', or you will
confuse the unit tester - just 'import dbapi20'.
'''
# The self.driver module. This should be the module where the 'connect'
# method is to be found
driver = None
connect_args = () # List of arguments to pass to connect
connect_kw_args = {} # Keyword arguments for connect
table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20), drink varchar(30))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
# Some drivers may need to override these helpers, for example adding
# a 'commit' after the execute.
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
''' self.drivers should override this method to perform required setup
if any is necessary, such as creating the database.
'''
pass
def tearDown(self):
''' self.drivers should override this method to perform required cleanup
if any is necessary, such as deleting the test database.
The default drops the tables that may be created.
'''
try:
con = self._connect()
try:
cur = con.cursor()
for ddl in (self.xddl1,self.xddl2):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# Assume table didn't exist. Other tests will check if
# execute is busted.
pass
finally:
con.close()
except _BaseException:
pass
def _connect(self):
try:
r = self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
return r
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
# Must exist
threadsafety = self.driver.threadsafety
# Must be a valid value
_failUnless(self, threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
_failUnless(self, paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined heirarchy.
if sys.version[0] == '3': #under Python 3 StardardError no longer exists
self.assertTrue(issubclass(self.driver.Warning,Exception))
self.assertTrue(issubclass(self.driver.Error,Exception))
else:
self.failUnless(issubclass(self.driver.Warning,Exception))
self.failUnless(issubclass(self.driver.Error,Exception))
_failUnless(self,
issubclass(self.driver.InterfaceError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.DatabaseError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.OperationalError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.IntegrityError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.InternalError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
_failUnless(self,
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
# OPTIONAL EXTENSION
# Test for the optional DB API 2.0 extension, where the exceptions
# are exposed as attributes on the Connection object
# I figure this optional extension will be implemented by any
# driver author who is using this test suite, so it is enabled
# by default.
con = self._connect()
drv = self.driver
_failUnless(self,con.Warning is drv.Warning)
_failUnless(self,con.Error is drv.Error)
_failUnless(self,con.InterfaceError is drv.InterfaceError)
_failUnless(self,con.DatabaseError is drv.DatabaseError)
_failUnless(self,con.OperationalError is drv.OperationalError)
_failUnless(self,con.IntegrityError is drv.IntegrityError)
_failUnless(self,con.InternalError is drv.InternalError)
_failUnless(self,con.ProgrammingError is drv.ProgrammingError)
_failUnless(self,con.NotSupportedError is drv.NotSupportedError)
def test_commit(self):
con = self._connect()
try:
# Commit must work, even if it doesn't do anything
con.commit()
finally:
con.close()
def test_rollback(self):
con = self._connect()
# If rollback is defined, it should either work or throw
# the documented exception
if hasattr(con,'rollback'):
try:
con.rollback()
except self.driver.NotSupportedError:
pass
def test_cursor(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
def test_cursor_isolation(self):
con = self._connect()
try:
# Make sure cursors created from the same connection have
# the documented transaction isolation level
cur1 = con.cursor()
cur2 = con.cursor()
self.executeDDL1(cur1)
cur1.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
cur2.execute("select name from %sbooze" % self.table_prefix)
booze = cur2.fetchall()
self.assertEqual(len(booze),1)
self.assertEqual(len(booze[0]),1)
self.assertEqual(booze[0][0],'Victoria Bitter')
finally:
con.close()
def test_description(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.description,None,
'cursor.description should be none after executing a '
'statement that can return no rows (such as DDL)'
)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(len(cur.description),1,
'cursor.description describes too many columns'
)
self.assertEqual(len(cur.description[0]),7,
'cursor.description[x] tuples must have 7 elements'
)
self.assertEqual(cur.description[0][0].lower(),'name',
'cursor.description[x][0] must return column name'
)
self.assertEqual(cur.description[0][1],self.driver.STRING,
'cursor.description[x][1] must return column type. Got %r'
% cur.description[0][1]
)
# Make sure self.description gets reset
self.executeDDL2(cur)
self.assertEqual(cur.description,None,
'cursor.description not being set to None when executing '
'no-result statements (eg. DDL)'
)
finally:
con.close()
def test_rowcount(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
_failUnless(self,cur.rowcount in (-1,0), # Bug #543885
'cursor.rowcount should be -1 or 0 after executing no-result '
'statements'
)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
_failUnless(self,cur.rowcount in (-1,1),
'cursor.rowcount should == number or rows inserted, or '
'set to -1 after executing an insert statement'
)
cur.execute("select name from %sbooze" % self.table_prefix)
_failUnless(self,cur.rowcount in (-1,1),
'cursor.rowcount should == number of rows returned, or '
'set to -1 after executing a select statement'
)
self.executeDDL2(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount not being reset to -1 after executing '
'no-result statements'
)
finally:
con.close()
lower_func = 'lower'
def test_callproc(self):
con = self._connect()
try:
cur = con.cursor()
if self.lower_func and hasattr(cur,'callproc'):
r = cur.callproc(self.lower_func,('FOO',))
self.assertEqual(len(r),1)
self.assertEqual(r[0],'FOO')
r = cur.fetchall()
self.assertEqual(len(r),1,'callproc produced no result set')
self.assertEqual(len(r[0]),1,
'callproc produced invalid result set'
)
self.assertEqual(r[0][0],'foo',
'callproc produced invalid results'
)
finally:
con.close()
def test_close(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
# cursor.execute should raise an Error if called after connection
# closed
self.assertRaises(self.driver.Error,self.executeDDL1,cur)
# connection.commit should raise an Error if called after connection'
# closed.'
self.assertRaises(self.driver.Error,con.commit)
# connection.close should raise an Error if called more than once
#!!! reasonable persons differ about the usefulness of this test and this feature !!!
if TEST_FOR_NON_IDEMPOTENT_CLOSE:
self.assertRaises(self.driver.Error,con.close)
def test_execute(self):
con = self._connect()
try:
cur = con.cursor()
self._paraminsert(cur)
finally:
con.close()
def _paraminsert(self,cur):
self.executeDDL2(cur)
cur.execute("insert into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')" % (
self.table_prefix
))
_failUnless(self,cur.rowcount in (-1,1))
if self.driver.paramstyle == 'qmark':
cur.execute(
"insert into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'numeric':
cur.execute(
"insert into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'named':
cur.execute(
"insert into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix,
{'beer':"Cooper's"}
)
elif self.driver.paramstyle == 'format':
cur.execute(
"insert into %sbarflys values (%%s, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'pyformat':
cur.execute(
"insert into %sbarflys values (%%(beer)s, 'thi%%s :may ca%%(u)se? troub:1e')" % self.table_prefix,
{'beer':"Cooper's"}
)
else:
self.fail('Invalid paramstyle')
_failUnless(self,cur.rowcount in (-1,1))
cur.execute('select name, drink from %sbarflys' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Cooper's",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
self.assertEqual(beers[1],"Victoria Bitter",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
trouble = "thi%s :may ca%(u)se? troub:1e"
self.assertEqual(res[0][1], trouble,
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly. Got=%s, Expected=%s' % (repr(res[0][1]), repr(trouble)))
self.assertEqual(res[1][1], trouble,
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly. Got=%s, Expected=%s' % (repr(res[1][1]), repr(trouble)
))
def test_executemany(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
largs = [ ("Cooper's",) , ("Boag's",) ]
margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
if self.driver.paramstyle == 'qmark':
cur.executemany(
'insert into %sbooze values (?)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'numeric':
cur.executemany(
'insert into %sbooze values (:1)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'named':
cur.executemany(
'insert into %sbooze values (:beer)' % self.table_prefix,
margs
)
elif self.driver.paramstyle == 'format':
cur.executemany(
'insert into %sbooze values (%%s)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'pyformat':
cur.executemany(
'insert into %sbooze values (%%(beer)s)' % (
self.table_prefix
),
margs
)
else:
self.fail('Unknown paramstyle')
_failUnless(self,cur.rowcount in (-1,2),
'insert using cursor.executemany set cursor.rowcount to '
'incorrect value %r' % cur.rowcount
)
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,
'cursor.fetchall retrieved incorrect number of rows'
)
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Boag's",'incorrect data "%s" retrieved' % beers[0])
self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
finally:
con.close()
def test_fetchone(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchone should raise an Error if called before
# executing a select-type query
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if a query retrieves '
'no rows'
)
_failUnless(self,cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannnot return rows
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchone()
self.assertEqual(len(r),1,
'cursor.fetchone should have retrieved a single row'
)
self.assertEqual(r[0],'Victoria Bitter',
'cursor.fetchone retrieved incorrect data'
)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if no more rows available'
)
_failUnless(self,cur.rowcount in (-1,1))
finally:
con.close()
samples = [
'Carlton Cold',
'Carlton Draft',
'Mountain Goat',
'Redback',
'Victoria Bitter',
'XXXX'
]
def _populate(self):
''' Return a list of sql commands to setup the DB for the fetch
tests.
'''
populate = [
"insert into %sbooze values ('%s')" % (self.table_prefix,s)
for s in self.samples
]
return populate
def test_fetchmany(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchmany should raise an Error if called without
#issuing a query
self.assertRaises(self.driver.Error,cur.fetchmany,4)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany()
self.assertEqual(len(r),1,
'cursor.fetchmany retrieved incorrect number of rows, '
'default of arraysize is one.'
)
cur.arraysize=10
r = cur.fetchmany(3) # Should get 3 rows
self.assertEqual(len(r),3,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should get 2 more
self.assertEqual(len(r),2,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should be an empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence after '
'results are exhausted'
)
_failUnless(self,cur.rowcount in (-1,6))
# Same as above, using cursor.arraysize
cur.arraysize=4
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany() # Should get 4 rows
self.assertEqual(len(r),4,
'cursor.arraysize not being honoured by fetchmany'
)
r = cur.fetchmany() # Should get 2 more
self.assertEqual(len(r),2)
r = cur.fetchmany() # Should be an empty sequence
self.assertEqual(len(r),0)
_failUnless(self,cur.rowcount in (-1,6))
cur.arraysize=6
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchmany() # Should get all rows
_failUnless(self,cur.rowcount in (-1,6))
self.assertEqual(len(rows),6)
self.assertEqual(len(rows),6)
rows = [r[0] for r in rows]
rows.sort()
# Make sure we get the right data back out
for i in range(0,6):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved by cursor.fetchmany'
)
rows = cur.fetchmany() # Should return an empty list
self.assertEqual(len(rows),0,
'cursor.fetchmany should return an empty sequence if '
'called after the whole result set has been fetched'
)
_failUnless(self,cur.rowcount in (-1,6))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
r = cur.fetchmany() # Should get empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence if '
'query retrieved no rows'
)
_failUnless(self,cur.rowcount in (-1,0))
finally:
con.close()
def test_fetchall(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchall should raise an Error if called
# without executing a query that may return rows (such
# as a select)
self.assertRaises(self.driver.Error, cur.fetchall)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
# cursor.fetchall should raise an Error if called
# after executing a a statement that cannot return rows
self.assertRaises(self.driver.Error,cur.fetchall)
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchall()
_failUnless(self,cur.rowcount in (-1,len(self.samples)))
self.assertEqual(len(rows),len(self.samples),
'cursor.fetchall did not retrieve all rows'
)
rows = [r[0] for r in rows]
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'cursor.fetchall retrieved incorrect rows'
)
rows = cur.fetchall()
self.assertEqual(
len(rows),0,
'cursor.fetchall should return an empty list if called '
'after the whole result set has been fetched'
)
_failUnless(self,cur.rowcount in (-1,len(self.samples)))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
rows = cur.fetchall()
_failUnless(self,cur.rowcount in (-1,0))
self.assertEqual(len(rows),0,
'cursor.fetchall should return an empty list if '
'a select query returns no rows'
)
finally:
con.close()
def test_mixedfetch(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
rows1 = cur.fetchone()
rows23 = cur.fetchmany(2)
rows4 = cur.fetchone()
rows56 = cur.fetchall()
_failUnless(self,cur.rowcount in (-1,6))
self.assertEqual(len(rows23),2,
'fetchmany returned incorrect number of rows'
)
self.assertEqual(len(rows56),2,
'fetchall returned incorrect number of rows'
)
rows = [rows1[0]]
rows.extend([rows23[0][0],rows23[1][0]])
rows.append(rows4[0])
rows.extend([rows56[0][0],rows56[1][0]])
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved or inserted'
)
finally:
con.close()
def help_nextset_setUp(self,cur):
''' Should create a procedure called deleteme
that returns two result sets, first the
number of rows in booze then "name from booze"
'''
raise NotImplementedError('Helper not implemented')
#sql="""
# create procedure deleteme as
# begin
# select count(*) from booze
# select name from booze
# end
#"""
#cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
raise NotImplementedError('Helper not implemented')
#cur.execute("drop procedure deleteme")
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
if not hasattr(cur,'nextset'):
return
try:
self.executeDDL1(cur)
sql=self._populate()
for sql in self._populate():
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== len(self.samples)
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_nextset(self):
raise NotImplementedError('Drivers need to override this test')
def test_arraysize(self):
# Not much here - rest of the tests for this are in test_fetchmany
con = self._connect()
try:
cur = con.cursor()
_failUnless(self,hasattr(cur,'arraysize'),
'cursor.arraysize must be defined'
)
finally:
con.close()
def test_setinputsizes(self):
con = self._connect()
try:
cur = con.cursor()
cur.setinputsizes( (25,) )
self._paraminsert(cur) # Make sure cursor still works
finally:
con.close()
def test_setoutputsize_basic(self):
# Basic test is to make sure setoutputsize doesn't blow up
con = self._connect()
try:
cur = con.cursor()
cur.setoutputsize(1000)
cur.setoutputsize(2000,0)
self._paraminsert(cur) # Make sure the cursor still works
finally:
con.close()
def test_setoutputsize(self):
# Real test for setoutputsize is driver dependant
raise NotImplementedError('Driver needed to override this test')
def test_None(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
cur.execute('insert into %sbooze values (NULL)' % self.table_prefix)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchall()
self.assertEqual(len(r),1)
self.assertEqual(len(r[0]),1)
self.assertEqual(r[0][0],None,'NULL value not returned as None')
finally:
con.close()
def test_Date(self):
d1 = self.driver.Date(2002,12,25)
d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(d1),str(d2))
def test_Time(self):
t1 = self.driver.Time(13,45,30)
t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Timestamp(self):
t1 = self.driver.Timestamp(2002,12,25,13,45,30)
t2 = self.driver.TimestampFromTicks(
time.mktime((2002,12,25,13,45,30,0,0,0))
)
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Binary(self):
b = self.driver.Binary(str2bytes('Something'))
b = self.driver.Binary(str2bytes(''))
def test_STRING(self):
_failUnless(self, hasattr(self.driver,'STRING'),
'module.STRING must be defined'
)
def test_BINARY(self):
_failUnless(self, hasattr(self.driver,'BINARY'),
'module.BINARY must be defined.'
)
def test_NUMBER(self):
_failUnless(self, hasattr(self.driver,'NUMBER'),
'module.NUMBER must be defined.'
)
def test_DATETIME(self):
_failUnless(self, hasattr(self.driver,'DATETIME'),
'module.DATETIME must be defined.'
)
def test_ROWID(self):
_failUnless(self, hasattr(self.driver,'ROWID'),
'module.ROWID must be defined.'
)

View file

@ -0,0 +1,33 @@
"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version"""
import sys
def Python():
if sys.platform == 'cli': #IronPython
import System
return System.IntPtr.Size == 8
else:
try:
return sys.maxsize > 2147483647
except AttributeError:
return sys.maxint > 2147483647
def os():
import platform
pm = platform.machine()
if pm != '..' and pm.endswith('64'): # recent Python (not Iron)
return True
else:
import os
if 'PROCESSOR_ARCHITEW6432' in os.environ:
return True # 32 bit program running on 64 bit Windows
try:
return os.environ['PROCESSOR_ARCHITECTURE'].endswith('64') # 64 bit Windows 64 bit program
except IndexError:
pass # not Windows
try:
return '64' in platform.architecture()[0] # this often works in Linux
except:
return False # is an older version of Python, assume also an older os (best we can guess)
if __name__ == "__main__":
print(("is64bit.Python() =", Python(), "is64bit.os() =", os()))

View file

@ -0,0 +1,113 @@
#!/usr/bin/python2
# Configure this in order to run the testcases.
"setuptestframework.py v 2.5.0.c9"
import os
import sys
import tempfile
import shutil
try:
OSErrors = (WindowsError, OSError)
except NameError: # not running on Windows
OSErrors = OSError
def maketemp():
temphome = tempfile.gettempdir()
tempdir = os.path.join(temphome, 'adodbapi_test')
## try: ## if running simultanous test, don't erase the other thread's work
## shutil.rmtree(tempdir) # kill off an old copy
## except: pass
try: os.mkdir(tempdir)
except: pass
return tempdir
def _cleanup_function(testfolder, mdb_name):
try: os.unlink(os.path.join(testfolder, mdb_name))
except: pass # mdb database not present
try: shutil.rmtree(os.path.join(testfolder, 'adodbapi'))
except: pass # test package not present
def getcleanupfunction():
return _cleanup_function
def find_ado_path():
adoName = os.path.normpath(os.getcwd() + '/../../adodbapi.py')
adoPackage = os.path.dirname(adoName)
return adoPackage
# make a new package directory for the test copy of ado
def makeadopackage(testfolder):
adoName = os.path.normpath(os.getcwd() + '/../adodbapi.py')
adoPath = os.path.dirname(adoName)
if os.path.exists(adoName):
newpackage = os.path.join(testfolder,'adodbapi')
try:
os.mkdir(newpackage)
except OSErrors:
print('*Note: temporary adodbapi package already exists: may be two versions running?')
for f in os.listdir(adoPath):
if f.endswith('.py'):
shutil.copy(os.path.join(adoPath, f), newpackage)
if sys.version_info >= (3,0): # only when running Py3.n
save = sys.stdout
sys.stdout = None
from lib2to3.main import main # use 2to3 to make test package
main("lib2to3.fixes",args=['-n','-w', newpackage])
sys.stdout = save
return testfolder
else:
raise EnvironmentError('Connot find source of adodbapi to test.')
def makemdb(testfolder, mdb_name):
# following setup code borrowed from pywin32 odbc test suite
# kindly contributed by Frank Millman.
import os
_accessdatasource = os.path.join(testfolder, mdb_name)
if not os.path.isfile(_accessdatasource):
try:
from win32com.client.gencache import EnsureDispatch
from win32com.client import constants
win32 = True
except ImportError: #perhaps we are running IronPython
win32 = False #iron Python
try:
from System import Activator, Type
except:
pass
# Create a brand-new database - what is the story with these?
dbe = None
for suffix in (".36", ".35", ".30"):
try:
if win32:
dbe = EnsureDispatch("DAO.DBEngine" + suffix)
else:
type= Type.GetTypeFromProgID("DAO.DBEngine" + suffix)
dbe = Activator.CreateInstance(type)
break
except:
pass
if dbe:
print((' ...Creating ACCESS db at '+_accessdatasource))
if win32:
workspace = dbe.Workspaces(0)
newdb = workspace.CreateDatabase(_accessdatasource,
constants.dbLangGeneral,
constants.dbEncrypt)
else:
newdb = dbe.CreateDatabase(_accessdatasource,';LANGID=0x0409;CP=1252;COUNTRY=0')
newdb.Close()
else:
print((' ...copying test ACCESS db to '+_accessdatasource))
mdbName = os.path.normpath(os.getcwd() + '/../examples/test.mdb')
import shutil
shutil.copy(mdbName, _accessdatasource)
return _accessdatasource
if __name__ == "__main__":
print('Setting up a Jet database for server to use for remote testing...')
temp = maketemp()
makemdb(temp, 'server_test.mdb')

View file

@ -0,0 +1,181 @@
print("This module depends on the dbapi20 compliance tests created by Stuart Bishop")
print("(see db-sig mailing list history for info)")
import platform
import unittest
import sys
import dbapi20
import setuptestframework
testfolder = setuptestframework.maketemp()
if '--package' in sys.argv:
pth = setuptestframework.makeadopackage(testfolder)
sys.argv.remove('--package')
else:
pth = setuptestframework.find_ado_path()
if pth not in sys.path:
sys.path.insert(1,pth)
# function to clean up the temporary folder -- calling program must run this function before exit.
cleanup = setuptestframework.getcleanupfunction()
import adodbapi
import adodbapi.is64bit as is64bit
db = adodbapi
if '--verbose' in sys.argv:
db.adodbapi.verbose = 3
print((adodbapi.version))
print(("Tested with dbapi20 %s" % dbapi20.__version__))
try:
onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python
except:
onWindows = False
node = platform.node()
conn_kws = {}
host = None # if None, will use macro to fill in node name
instance = r'%s\SQLExpress'
conn_kws['name'] = 'adotest'
if host is None:
conn_kws['macro_getnode'] = ['host', instance]
else:
conn_kws['host'] = host
conn_kws['provider'] = 'Provider=SQLNCLI11;DataTypeCompatibility=80;MARS Connection=True;'
connStr = "%(provider)s; Integrated Security=SSPI; Initial Catalog=%(name)s;Data Source=%(host)s"
if onWindows and node != "z-PC":
pass # default should make a local SQL Server connection
elif node == "xxx": # try Postgres database
_computername = "25.223.161.222"
_databasename='adotest'
_username = 'adotestuser'
_password = '12345678'
_driver="PostgreSQL Unicode"
_provider = ''
connStr = '%sDriver={%s};Server=%s;Database=%s;uid=%s;pwd=%s;' % \
(_provider,_driver,_computername,_databasename,_username,_password)
elif node == "yyy": # ACCESS data base is known to fail some tests.
if is64bit.Python():
driver = "Microsoft.ACE.OLEDB.12.0"
else:
driver = "Microsoft.Jet.OLEDB.4.0"
testmdb = setuptestframework.makemdb(testfolder)
connStr = r"Provider=%s;Data Source=%s" % (driver, testmdb)
else: # try a remote connection to an SQL server
conn_kws['proxy_host'] = '25.44.77.176'
import adodbapi.remote
db = adodbapi.remote
print(('Using Connection String like=%s' % connStr))
print(('Keywords=%s' % repr(conn_kws)))
class test_adodbapi(dbapi20.DatabaseAPI20Test):
driver = db
connect_args = (connStr,)
connect_kw_args = conn_kws
def __init__(self,arg):
dbapi20.DatabaseAPI20Test.__init__(self,arg)
def testMethodName(self):
return self.id().split('.')[-1]
def setUp(self):
# Call superclass setUp In case this does something in the
# future
dbapi20.DatabaseAPI20Test.setUp(self)
if self.testMethodName()=='test_callproc':
con = self._connect()
engine = con.dbms_name
## print('Using database Engine=%s' % engine) ##
if engine != 'MS Jet':
sql="""
create procedure templower
@theData varchar(50)
as
select lower(@theData)
"""
else: # Jet
sql="""
create procedure templower
(theData varchar(50))
as
select lower(theData);
"""
cur = con.cursor()
try:
cur.execute(sql)
con.commit()
except:
pass
cur.close()
con.close()
self.lower_func='templower'
def tearDown(self):
if self.testMethodName()=='test_callproc':
con = self._connect()
cur = con.cursor()
try:
cur.execute("drop procedure templower")
except:
pass
con.commit()
dbapi20.DatabaseAPI20Test.tearDown(self)
def help_nextset_setUp(self,cur):
'Should create a procedure called deleteme '
'that returns two result sets, first the number of rows in booze then "name from booze"'
sql="""
create procedure deleteme as
begin
select count(*) from %sbooze
select name from %sbooze
end
""" %(self.table_prefix,self.table_prefix)
cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
try:
cur.execute("drop procedure deleteme")
except:
pass
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
stmts=[self.ddl1] + self._populate()
for sql in stmts:
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== 6
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
try:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_setoutputsize(self): pass
if __name__ == '__main__':
unittest.main()
cleanup(testfolder, None)

View file

@ -0,0 +1,46 @@
# This module may be retired as soon as Python 2.5 support is dropped.
#
# It exists only to allow trapping exceptions using the "except [exception list], e" format
# which is a syntax error in Python 3
def try_connection(verbose, *args, **kwargs):
import adodbapi
if "proxy_host" in kwargs or 'pyro_connection' in kwargs or 'proxy_host' in args:
import adodbapi.remote
import Pyro4
pyroError = Pyro4.errors.PyroError
dbconnect = adodbapi.remote.connect
remote = True
else:
dbconnect = adodbapi.connect
pyroError = NotImplementedError # (will not occur)
remote = False
try:
s = dbconnect(*args, **kwargs) # connect to server
if verbose:
print('Connected to:', s.connection_string)
print('which has tables:', s.get_table_names())
s.close() # thanks, it worked, goodbye
except (adodbapi.DatabaseError, pyroError) as inst:
print(inst.args[0]) # should be the error message
print('***Failed getting connection using=', repr(args), repr(kwargs))
if remote:
print('** Is your Python2 ado.connection server running?')
print('* Have you run "setuptestframework.py" to create server_test.mdb?')
return False, (args, kwargs), None
if remote:
print(" (remote)", end=' ')
print(" (successful)")
return True, (args, kwargs, remote), dbconnect
def try_operation_with_expected_exception(expected_exceptions, some_function, args, kwargs):
try:
some_function(*args, **kwargs)
except expected_exceptions as e:
return True, e
except:
raise # an exception other than the expected occurred
return False, 'The expected exception did not occur'

View file

@ -0,0 +1,48 @@
from __future__ import print_function
# This module may be retired as soon as Python 2.5 support is dropped.
#
# It exists only to allow trapping exceptions using the "except [exception list] as e" format
# which is a syntax error in Python 2.5
def try_connection(verbose, *args, **kwargs):
import adodbapi
if "proxy_host" in kwargs or "pyro_connection" in kwargs or "proxy_host" in args:
import adodbapi.remote
import Pyro4
pyroError = Pyro4.errors.PyroError
dbconnect = adodbapi.remote.connect
remote = True
else:
dbconnect = adodbapi.connect
pyroError = NotImplementedError # (will not occur)
remote = False
try:
s = dbconnect(*args, **kwargs) # connect to server
if verbose:
print('Connected to:', s.connection_string)
print('which has tables:', s.get_table_names())
s.close() # thanks, it worked, goodbye
except adodbapi.DatabaseError as inst:
print(inst.args[0]) # should be the error message
print('***Failed getting connection using=',repr(args),repr(kwargs))
if remote:
print('** Are you running a *Python3* ado.connection server? **')
print('* Have you run "setuptestframework.py" to create server_test.mdb?')
return False, (args, kwargs), None
if remote:
print(" (remote)",end="")
print(" (successful)")
return True, (args, kwargs, remote), dbconnect
def try_operation_with_expected_exception(expected_exception_list, some_function, *args, **kwargs):
try:
some_function(*args, **kwargs)
except expected_exception_list as e:
return True, e
except:
raise # an exception other than the expected occurred
return False, 'The expected exception did not occur'

BIN
libs/win/bin/enver.exe Normal file

Binary file not shown.

Binary file not shown.

BIN
libs/win/bin/gclip.exe Normal file

Binary file not shown.

BIN
libs/win/bin/mklink.exe Normal file

Binary file not shown.

BIN
libs/win/bin/pclip.exe Normal file

Binary file not shown.

View file

@ -0,0 +1,620 @@
# postinstall script for pywin32
#
# copies PyWinTypesxx.dll and PythonCOMxx.dll into the system directory,
# and creates a pth file
import os, sys, glob, shutil, time
import winreg as winreg
# Send output somewhere so it can be found if necessary...
import tempfile
tee_f = open(os.path.join(tempfile.gettempdir(), 'pywin32_postinstall.log'), "w")
class Tee:
def __init__(self, file):
self.f = file
def write(self, what):
if self.f is not None:
try:
self.f.write(what.replace("\n", "\r\n"))
except IOError:
pass
tee_f.write(what)
def flush(self):
if self.f is not None:
try:
self.f.flush()
except IOError:
pass
tee_f.flush()
# For some unknown reason, when running under bdist_wininst we will start up
# with sys.stdout as None but stderr is hooked up. This work-around allows
# bdist_wininst to see the output we write and display it at the end of
# the install.
if sys.stdout is None:
sys.stdout = sys.stderr
sys.stderr = Tee(sys.stderr)
sys.stdout = Tee(sys.stdout)
com_modules = [
# module_name, class_names
("win32com.servers.interp", "Interpreter"),
("win32com.servers.dictionary", "DictionaryPolicy"),
("win32com.axscript.client.pyscript","PyScript"),
]
# Is this a 'silent' install - ie, avoid all dialogs.
# Different than 'verbose'
silent = 0
# Verbosity of output messages.
verbose = 1
ver_string = "%d.%d" % (sys.version_info[0], sys.version_info[1])
root_key_name = "Software\\Python\\PythonCore\\" + ver_string
try:
# When this script is run from inside the bdist_wininst installer,
# file_created() and directory_created() are additional builtin
# functions which write lines to Python23\pywin32-install.log. This is
# a list of actions for the uninstaller, the format is inspired by what
# the Wise installer also creates.
file_created
is_bdist_wininst = True
except NameError:
is_bdist_wininst = False # we know what it is not - but not what it is :)
def file_created(file):
pass
def directory_created(directory):
pass
def get_root_hkey():
try:
winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
root_key_name, 0, winreg.KEY_CREATE_SUB_KEY)
return winreg.HKEY_LOCAL_MACHINE
except OSError as details:
# Either not exist, or no permissions to create subkey means
# must be HKCU
return winreg.HKEY_CURRENT_USER
try:
create_shortcut
except NameError:
# Create a function with the same signature as create_shortcut provided
# by bdist_wininst
def create_shortcut(path, description, filename,
arguments="", workdir="", iconpath="", iconindex=0):
import pythoncom
from win32com.shell import shell, shellcon
ilink = pythoncom.CoCreateInstance(shell.CLSID_ShellLink, None,
pythoncom.CLSCTX_INPROC_SERVER,
shell.IID_IShellLink)
ilink.SetPath(path)
ilink.SetDescription(description)
if arguments:
ilink.SetArguments(arguments)
if workdir:
ilink.SetWorkingDirectory(workdir)
if iconpath or iconindex:
ilink.SetIconLocation(iconpath, iconindex)
# now save it.
ipf = ilink.QueryInterface(pythoncom.IID_IPersistFile)
ipf.Save(filename, 0)
# Support the same list of "path names" as bdist_wininst.
def get_special_folder_path(path_name):
import pythoncom
from win32com.shell import shell, shellcon
for maybe in """
CSIDL_COMMON_STARTMENU CSIDL_STARTMENU CSIDL_COMMON_APPDATA
CSIDL_LOCAL_APPDATA CSIDL_APPDATA CSIDL_COMMON_DESKTOPDIRECTORY
CSIDL_DESKTOPDIRECTORY CSIDL_COMMON_STARTUP CSIDL_STARTUP
CSIDL_COMMON_PROGRAMS CSIDL_PROGRAMS CSIDL_PROGRAM_FILES_COMMON
CSIDL_PROGRAM_FILES CSIDL_FONTS""".split():
if maybe == path_name:
csidl = getattr(shellcon, maybe)
return shell.SHGetSpecialFolderPath(0, csidl, False)
raise ValueError("%s is an unknown path ID" % (path_name,))
def CopyTo(desc, src, dest):
import win32api, win32con
while 1:
try:
win32api.CopyFile(src, dest, 0)
return
except win32api.error as details:
if details.winerror==5: # access denied - user not admin.
raise
if silent:
# Running silent mode - just re-raise the error.
raise
tb = None
full_desc = "Error %s\n\n" \
"If you have any Python applications running, " \
"please close them now\nand select 'Retry'\n\n%s" \
% (desc, details.strerror)
rc = win32api.MessageBox(0,
full_desc,
"Installation Error",
win32con.MB_ABORTRETRYIGNORE)
if rc == win32con.IDABORT:
raise
elif rc == win32con.IDIGNORE:
return
# else retry - around we go again.
# We need to import win32api to determine the Windows system directory,
# so we can copy our system files there - but importing win32api will
# load the pywintypes.dll already in the system directory preventing us
# from updating them!
# So, we pull the same trick pywintypes.py does, but it loads from
# our pywintypes_system32 directory.
def LoadSystemModule(lib_dir, modname):
# See if this is a debug build.
import imp
for suffix_item in imp.get_suffixes():
if suffix_item[0]=='_d.pyd':
suffix = '_d'
break
else:
suffix = ""
filename = "%s%d%d%s.dll" % \
(modname, sys.version_info[0], sys.version_info[1], suffix)
filename = os.path.join(lib_dir, "pywin32_system32", filename)
mod = imp.load_dynamic(modname, filename)
def SetPyKeyVal(key_name, value_name, value):
root_hkey = get_root_hkey()
root_key = winreg.OpenKey(root_hkey, root_key_name)
try:
my_key = winreg.CreateKey(root_key, key_name)
try:
winreg.SetValueEx(my_key, value_name, 0, winreg.REG_SZ, value)
finally:
my_key.Close()
finally:
root_key.Close()
if verbose:
print("-> %s\\%s[%s]=%r" % (root_key_name, key_name, value_name, value))
def RegisterCOMObjects(register = 1):
import win32com.server.register
if register:
func = win32com.server.register.RegisterClasses
else:
func = win32com.server.register.UnregisterClasses
flags = {}
if not verbose:
flags['quiet']=1
for module, klass_name in com_modules:
__import__(module)
mod = sys.modules[module]
flags["finalize_register"] = getattr(mod, "DllRegisterServer", None)
flags["finalize_unregister"] = getattr(mod, "DllUnregisterServer", None)
klass = getattr(mod, klass_name)
func(klass, **flags)
def RegisterPythonwin(register=True):
""" Add (or remove) Pythonwin to context menu for python scripts.
??? Should probably also add Edit command for pys files also.
Also need to remove these keys on uninstall, but there's no function
like file_created to add registry entries to uninstall log ???
"""
import os, distutils.sysconfig
lib_dir = distutils.sysconfig.get_python_lib(plat_specific=1)
classes_root=get_root_hkey()
## Installer executable doesn't seem to pass anything to postinstall script indicating if it's a debug build,
pythonwin_exe = os.path.join(lib_dir, "Pythonwin", "Pythonwin.exe")
pythonwin_edit_command=pythonwin_exe + ' /edit "%1"'
keys_vals = [
('Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\Pythonwin.exe', '', pythonwin_exe),
('Software\\Classes\\Python.File\\shell\\Edit with Pythonwin', 'command', pythonwin_edit_command),
('Software\\Classes\\Python.NoConFile\\shell\\Edit with Pythonwin', 'command', pythonwin_edit_command),
]
try:
if register:
for key, sub_key, val in keys_vals:
## Since winreg only uses the character Api functions, this can fail if Python
## is installed to a path containing non-ascii characters
hkey = winreg.CreateKey(classes_root, key)
if sub_key:
hkey = winreg.CreateKey(hkey, sub_key)
winreg.SetValueEx(hkey, None, 0, winreg.REG_SZ, val)
hkey.Close()
else:
for key, sub_key, val in keys_vals:
try:
winreg.DeleteKey(classes_root, key)
except OSError as why:
winerror = getattr(why, 'winerror', why.errno)
if winerror != 2: # file not found
raise
finally:
# tell windows about the change
from win32com.shell import shell, shellcon
shell.SHChangeNotify(shellcon.SHCNE_ASSOCCHANGED, shellcon.SHCNF_IDLIST, None, None)
def get_shortcuts_folder():
if get_root_hkey()==winreg.HKEY_LOCAL_MACHINE:
try:
fldr = get_special_folder_path("CSIDL_COMMON_PROGRAMS")
except OSError:
# No CSIDL_COMMON_PROGRAMS on this platform
fldr = get_special_folder_path("CSIDL_PROGRAMS")
else:
# non-admin install - always goes in this user's start menu.
fldr = get_special_folder_path("CSIDL_PROGRAMS")
try:
install_group = winreg.QueryValue(get_root_hkey(),
root_key_name + "\\InstallPath\\InstallGroup")
except OSError:
vi = sys.version_info
install_group = "Python %d.%d" % (vi[0], vi[1])
return os.path.join(fldr, install_group)
# Get the system directory, which may be the Wow64 directory if we are a 32bit
# python on a 64bit OS.
def get_system_dir():
import win32api # we assume this exists.
try:
import pythoncom
import win32process
from win32com.shell import shell, shellcon
try:
if win32process.IsWow64Process():
return shell.SHGetSpecialFolderPath(0,shellcon.CSIDL_SYSTEMX86)
return shell.SHGetSpecialFolderPath(0,shellcon.CSIDL_SYSTEM)
except (pythoncom.com_error, win32process.error):
return win32api.GetSystemDirectory()
except ImportError:
return win32api.GetSystemDirectory()
def fixup_dbi():
# We used to have a dbi.pyd with our .pyd files, but now have a .py file.
# If the user didn't uninstall, they will find the .pyd which will cause
# problems - so handle that.
import win32api, win32con
pyd_name = os.path.join(os.path.dirname(win32api.__file__), "dbi.pyd")
pyd_d_name = os.path.join(os.path.dirname(win32api.__file__), "dbi_d.pyd")
py_name = os.path.join(os.path.dirname(win32con.__file__), "dbi.py")
for this_pyd in (pyd_name, pyd_d_name):
this_dest = this_pyd + ".old"
if os.path.isfile(this_pyd) and os.path.isfile(py_name):
try:
if os.path.isfile(this_dest):
print("Old dbi '%s' already exists - deleting '%s'" % (this_dest, this_pyd))
os.remove(this_pyd)
else:
os.rename(this_pyd, this_dest)
print("renamed '%s'->'%s.old'" % (this_pyd, this_pyd))
file_created(this_pyd+".old")
except os.error as exc:
print("FAILED to rename '%s': %s" % (this_pyd, exc))
def install():
import distutils.sysconfig
import traceback
# The .pth file is now installed as a regular file.
# Create the .pth file in the site-packages dir, and use only relative paths
lib_dir = distutils.sysconfig.get_python_lib(plat_specific=1)
# We used to write a .pth directly to sys.prefix - clobber it.
if os.path.isfile(os.path.join(sys.prefix, "pywin32.pth")):
os.unlink(os.path.join(sys.prefix, "pywin32.pth"))
# The .pth may be new and therefore not loaded in this session.
# Setup the paths just in case.
for name in "win32 win32\\lib Pythonwin".split():
sys.path.append(os.path.join(lib_dir, name))
# It is possible people with old versions installed with still have
# pywintypes and pythoncom registered. We no longer need this, and stale
# entries hurt us.
for name in "pythoncom pywintypes".split():
keyname = "Software\\Python\\PythonCore\\" + sys.winver + "\\Modules\\" + name
for root in winreg.HKEY_LOCAL_MACHINE, winreg.HKEY_CURRENT_USER:
try:
winreg.DeleteKey(root, keyname + "\\Debug")
except WindowsError:
pass
try:
winreg.DeleteKey(root, keyname)
except WindowsError:
pass
LoadSystemModule(lib_dir, "pywintypes")
LoadSystemModule(lib_dir, "pythoncom")
import win32api
# and now we can get the system directory:
files = glob.glob(os.path.join(lib_dir, "pywin32_system32\\*.*"))
if not files:
raise RuntimeError("No system files to copy!!")
# Try the system32 directory first - if that fails due to "access denied",
# it implies a non-admin user, and we use sys.prefix
for dest_dir in [get_system_dir(), sys.prefix]:
# and copy some files over there
worked = 0
try:
for fname in files:
base = os.path.basename(fname)
dst = os.path.join(dest_dir, base)
CopyTo("installing %s" % base, fname, dst)
if verbose:
print("Copied %s to %s" % (base, dst))
# Register the files with the uninstaller
file_created(dst)
worked = 1
# If this isn't sys.prefix (ie, System32), then nuke
# any versions that may exist in sys.prefix - having
# duplicates causes major headaches.
if dest_dir != sys.prefix:
bad_fname = os.path.join(sys.prefix, base)
if os.path.exists(bad_fname):
# let exceptions go here - delete must succeed
os.unlink(bad_fname)
if worked:
break
except win32api.error as details:
if details.winerror==5:
# access denied - user not admin - try sys.prefix dir,
# but first check that a version doesn't already exist
# in that place - otherwise that one will still get used!
if os.path.exists(dst):
msg = "The file '%s' exists, but can not be replaced " \
"due to insufficient permissions. You must " \
"reinstall this software as an Administrator" \
% dst
print(msg)
raise RuntimeError(msg)
continue
raise
else:
raise RuntimeError(
"You don't have enough permissions to install the system files")
# Pythonwin 'compiles' config files - record them for uninstall.
pywin_dir = os.path.join(lib_dir, "Pythonwin", "pywin")
for fname in glob.glob(os.path.join(pywin_dir, "*.cfg")):
file_created(fname[:-1] + "c") # .cfg->.cfc
# Register our demo COM objects.
try:
try:
RegisterCOMObjects()
except win32api.error as details:
if details.winerror!=5: # ERROR_ACCESS_DENIED
raise
print("You do not have the permissions to install COM objects.")
print("The sample COM objects were not registered.")
except:
print("FAILED to register the Python COM objects")
traceback.print_exc()
# There may be no main Python key in HKCU if, eg, an admin installed
# python itself.
winreg.CreateKey(get_root_hkey(), root_key_name)
# Register the .chm help file.
chm_file = os.path.join(lib_dir, "PyWin32.chm")
if os.path.isfile(chm_file):
# This isn't recursive, so if 'Help' doesn't exist, we croak
SetPyKeyVal("Help", None, None)
SetPyKeyVal("Help\\Pythonwin Reference", None, chm_file)
else:
print("NOTE: PyWin32.chm can not be located, so has not " \
"been registered")
# misc other fixups.
fixup_dbi()
# Register Pythonwin in context menu
try:
RegisterPythonwin()
except:
print('Failed to register pythonwin as editor')
traceback.print_exc()
else:
if verbose:
print('Pythonwin has been registered in context menu')
# Create the win32com\gen_py directory.
make_dir = os.path.join(lib_dir, "win32com", "gen_py")
if not os.path.isdir(make_dir):
if verbose:
print("Creating directory", make_dir)
directory_created(make_dir)
os.mkdir(make_dir)
try:
# create shortcuts
# CSIDL_COMMON_PROGRAMS only available works on NT/2000/XP, and
# will fail there if the user has no admin rights.
fldr = get_shortcuts_folder()
# If the group doesn't exist, then we don't make shortcuts - its
# possible that this isn't a "normal" install.
if os.path.isdir(fldr):
dst = os.path.join(fldr, "PythonWin.lnk")
create_shortcut(os.path.join(lib_dir, "Pythonwin\\Pythonwin.exe"),
"The Pythonwin IDE", dst, "", sys.prefix)
file_created(dst)
if verbose:
print("Shortcut for Pythonwin created")
# And the docs.
dst = os.path.join(fldr, "Python for Windows Documentation.lnk")
doc = "Documentation for the PyWin32 extensions"
create_shortcut(chm_file, doc, dst)
file_created(dst)
if verbose:
print("Shortcut to documentation created")
else:
if verbose:
print("Can't install shortcuts - %r is not a folder" % (fldr,))
except Exception as details:
print(details)
# importing win32com.client ensures the gen_py dir created - not strictly
# necessary to do now, but this makes the installation "complete"
try:
import win32com.client
except ImportError:
# Don't let this error sound fatal
pass
print("The pywin32 extensions were successfully installed.")
def uninstall():
import distutils.sysconfig
lib_dir = distutils.sysconfig.get_python_lib(plat_specific=1)
# First ensure our system modules are loaded from pywin32_system, so
# we can remove the ones we copied...
LoadSystemModule(lib_dir, "pywintypes")
LoadSystemModule(lib_dir, "pythoncom")
try:
RegisterCOMObjects(False)
except Exception as why:
print("Failed to unregister COM objects:", why)
try:
RegisterPythonwin(False)
except Exception as why:
print("Failed to unregister Pythonwin:", why)
else:
if verbose:
print('Unregistered Pythonwin')
try:
# remove gen_py directory.
gen_dir = os.path.join(lib_dir, "win32com", "gen_py")
if os.path.isdir(gen_dir):
shutil.rmtree(gen_dir)
if verbose:
print("Removed directory", gen_dir)
# Remove pythonwin compiled "config" files.
pywin_dir = os.path.join(lib_dir, "Pythonwin", "pywin")
for fname in glob.glob(os.path.join(pywin_dir, "*.cfc")):
os.remove(fname)
# The dbi.pyd.old files we may have created.
try:
os.remove(os.path.join(lib_dir, "win32", "dbi.pyd.old"))
except os.error:
pass
try:
os.remove(os.path.join(lib_dir, "win32", "dbi_d.pyd.old"))
except os.error:
pass
except Exception as why:
print("Failed to remove misc files:", why)
try:
fldr = get_shortcuts_folder()
for link in ("PythonWin.lnk", "Python for Windows Documentation.lnk"):
fqlink = os.path.join(fldr, link)
if os.path.isfile(fqlink):
os.remove(fqlink)
if verbose:
print("Removed", link)
except Exception as why:
print("Failed to remove shortcuts:", why)
# Now remove the system32 files.
files = glob.glob(os.path.join(lib_dir, "pywin32_system32\\*.*"))
# Try the system32 directory first - if that fails due to "access denied",
# it implies a non-admin user, and we use sys.prefix
try:
for dest_dir in [get_system_dir(), sys.prefix]:
# and copy some files over there
worked = 0
for fname in files:
base = os.path.basename(fname)
dst = os.path.join(dest_dir, base)
if os.path.isfile(dst):
try:
os.remove(dst)
worked = 1
if verbose:
print("Removed file %s" % (dst))
except Exception:
print("FAILED to remove", dst)
if worked:
break
except Exception as why:
print("FAILED to remove system files:", why)
def usage():
msg = \
"""%s: A post-install script for the pywin32 extensions.
Typical usage:
> python pywin32_postinstall.py -install
If you installed pywin32 via a .exe installer, this should be run
automatically after installation, but if it fails you can run it again.
If you installed pywin32 via PIP, you almost certainly need to run this to
setup the environment correctly.
Execute with script with a '-install' parameter, to ensure the environment
is setup correctly.
Options:
-install : Configure the Python environment correctly for pywin32.
-remove : Try and remove everything that was installed or copied.
-wait pid : Wait for the specified process to terminate before starting.
-silent : Don't display the "Abort/Retry/Ignore" dialog for files in use.
-quiet : Don't display progress messages.
"""
print(msg.strip() % os.path.basename(sys.argv[0]))
# NOTE: If this script is run from inside the bdist_wininst created
# binary installer or uninstaller, the command line args are either
# '-install' or '-remove'.
# Important: From inside the binary installer this script MUST NOT
# call sys.exit() or raise SystemExit, otherwise not only this script
# but also the installer will terminate! (Is there a way to prevent
# this from the bdist_wininst C code?)
if __name__=='__main__':
if len(sys.argv)==1:
usage()
sys.exit(1)
arg_index = 1
while arg_index < len(sys.argv):
arg = sys.argv[arg_index]
# Hack for installing while we are in use. Just a simple wait so the
# parent process can terminate.
if arg == "-wait":
arg_index += 1
pid = int(sys.argv[arg_index])
try:
os.waitpid(pid, 0)
except AttributeError:
# Python 2.2 - no waitpid - just sleep.
time.sleep(3)
except os.error:
# child already dead
pass
elif arg == "-install":
install()
elif arg == "-silent":
silent = 1
elif arg == "-quiet":
verbose = 0
elif arg == "-remove":
# bdist_msi calls us before uninstall, so we can undo what we
# previously did. Sadly, bdist_wininst calls us *after*, so
# we can't do much at all.
if not is_bdist_wininst:
uninstall()
else:
print("Unknown option:", arg)
usage()
sys.exit(0)
arg_index += 1

View file

@ -0,0 +1,88 @@
"""A test runner for pywin32"""
import sys
import os
import distutils.sysconfig
import win32api
# locate the dirs based on where this script is - it may be either in the
# source tree, or in an installed Python 'Scripts' tree.
this_dir = os.path.dirname(__file__)
site_packages = distutils.sysconfig.get_python_lib(plat_specific=1)
if hasattr(os, 'popen3'):
def run_test(script, cmdline_rest=""):
dirname, scriptname = os.path.split(script)
# some tests prefer to be run from their directory.
cwd = os.getcwd()
os.chdir(dirname)
try:
executable = win32api.GetShortPathName(sys.executable)
cmd = '%s "%s" %s' % (sys.executable, scriptname, cmdline_rest)
print(script)
stdin, stdout, stderr = os.popen3(cmd)
stdin.close()
while 1:
char = stderr.read(1)
if not char:
break
sys.stdout.write(char)
for line in stdout.readlines():
print(line)
stdout.close()
result = stderr.close()
if result is not None:
print("****** %s failed: %s" % (script, result))
finally:
os.chdir(cwd)
else:
# a subprocess version - but we prefer the popen one if we can as we can
# see test results as they are run (whereas this one waits until the test
# is finished...)
import subprocess
def run_test(script, cmdline_rest=""):
dirname, scriptname = os.path.split(script)
# some tests prefer to be run from their directory.
cmd = [sys.executable, "-u", scriptname] + cmdline_rest.split()
print(script)
popen = subprocess.Popen(cmd, shell=True, cwd=dirname,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
data = popen.communicate()[0]
sys.stdout.buffer.write(data)
if popen.returncode:
print("****** %s failed: %s" % (script, popen.returncode))
def find_and_run(possible_locations, script, cmdline_rest=""):
for maybe in possible_locations:
if os.path.isfile(os.path.join(maybe, script)):
run_test(os.path.abspath(os.path.join(maybe, script)), cmdline_rest)
break
else:
raise RuntimeError("Failed to locate the test script '%s' in one of %s"
% (script, possible_locations))
if __name__=='__main__':
# win32
maybes = [os.path.join(this_dir, "win32", "test"),
os.path.join(site_packages, "win32", "test"),
]
find_and_run(maybes, 'testall.py')
# win32com
maybes = [os.path.join(this_dir, "com", "win32com", "test"),
os.path.join(site_packages, "win32com", "test"),
]
find_and_run(maybes, 'testall.py', "2")
# adodbapi
maybes = [os.path.join(this_dir, "adodbapi", "tests"),
os.path.join(site_packages, "adodbapi", "tests"),
]
find_and_run(maybes, 'adodbapitest.py')
# This script has a hard-coded sql server name in it, (and markh typically
# doesn't have a different server to test on) so don't bother trying to
# run it...
# find_and_run(maybes, 'test_adodbapi_dbapi20.py')
if sys.version_info > (3,):
print("** The tests have some issues on py3k - not all failures are a problem...")

BIN
libs/win/bin/xmouse.exe Normal file

Binary file not shown.

View file

@ -0,0 +1,17 @@
from .api import distribution, Distribution, PackageNotFoundError # noqa: F401
from .api import metadata, entry_points, resolve, version, read_text
# Import for installation side-effects.
from . import _hooks # noqa: F401
__all__ = [
'metadata',
'entry_points',
'resolve',
'version',
'read_text',
]
__version__ = version(__name__)

View file

@ -0,0 +1,148 @@
from __future__ import unicode_literals, absolute_import
import re
import sys
import itertools
from .api import Distribution
from zipfile import ZipFile
if sys.version_info >= (3,): # pragma: nocover
from contextlib import suppress
from pathlib import Path
else: # pragma: nocover
from contextlib2 import suppress # noqa
from itertools import imap as map # type: ignore
from pathlib2 import Path
FileNotFoundError = IOError, OSError
__metaclass__ = type
def install(cls):
"""Class decorator for installation on sys.meta_path."""
sys.meta_path.append(cls)
return cls
class NullFinder:
@staticmethod
def find_spec(*args, **kwargs):
return None
# In Python 2, the import system requires finders
# to have a find_module() method, but this usage
# is deprecated in Python 3 in favor of find_spec().
# For the purposes of this finder (i.e. being present
# on sys.meta_path but having no other import
# system functionality), the two methods are identical.
find_module = find_spec
@install
class MetadataPathFinder(NullFinder):
"""A degenerate finder for distribution packages on the file system.
This finder supplies only a find_distribution() method for versions
of Python that do not have a PathFinder find_distribution().
"""
search_template = r'{name}(-.*)?\.(dist|egg)-info'
@classmethod
def find_distribution(cls, name):
paths = cls._search_paths(name)
dists = map(PathDistribution, paths)
return next(dists, None)
@classmethod
def _search_paths(cls, name):
"""
Find metadata directories in sys.path heuristically.
"""
return itertools.chain.from_iterable(
cls._search_path(path, name)
for path in map(Path, sys.path)
)
@classmethod
def _search_path(cls, root, name):
if not root.is_dir():
return ()
normalized = name.replace('-', '_')
return (
item
for item in root.iterdir()
if item.is_dir()
and re.match(
cls.search_template.format(name=normalized),
str(item.name),
flags=re.IGNORECASE,
)
)
class PathDistribution(Distribution):
def __init__(self, path):
"""Construct a distribution from a path to the metadata directory."""
self._path = path
def read_text(self, filename):
with suppress(FileNotFoundError):
with self._path.joinpath(filename).open(encoding='utf-8') as fp:
return fp.read()
return None
read_text.__doc__ = Distribution.read_text.__doc__
@install
class WheelMetadataFinder(NullFinder):
"""A degenerate finder for distribution packages in wheels.
This finder supplies only a find_distribution() method for versions
of Python that do not have a PathFinder find_distribution().
"""
search_template = r'{name}(-.*)?\.whl'
@classmethod
def find_distribution(cls, name):
paths = cls._search_paths(name)
dists = map(WheelDistribution, paths)
return next(dists, None)
@classmethod
def _search_paths(cls, name):
return (
item
for item in map(Path, sys.path)
if re.match(
cls.search_template.format(name=name),
str(item.name),
flags=re.IGNORECASE,
)
)
class WheelDistribution(Distribution):
def __init__(self, archive):
self._archive = archive
name, version = archive.name.split('-')[0:2]
self._dist_info = '{}-{}.dist-info'.format(name, version)
def read_text(self, filename):
with ZipFile(_path_to_filename(self._archive)) as zf:
with suppress(KeyError):
as_bytes = zf.read('{}/{}'.format(self._dist_info, filename))
return as_bytes.decode('utf-8')
return None
read_text.__doc__ = Distribution.read_text.__doc__
def _path_to_filename(path): # pragma: nocover
"""
On non-compliant systems, ensure a path-like object is
a string.
"""
try:
return path.__fspath__()
except AttributeError:
return str(path)

View file

@ -0,0 +1,146 @@
import io
import abc
import sys
import email
from importlib import import_module
if sys.version_info > (3,): # pragma: nocover
from configparser import ConfigParser
else: # pragma: nocover
from ConfigParser import SafeConfigParser as ConfigParser
try:
BaseClass = ModuleNotFoundError
except NameError: # pragma: nocover
BaseClass = ImportError # type: ignore
__metaclass__ = type
class PackageNotFoundError(BaseClass):
"""The package was not found."""
class Distribution:
"""A Python distribution package."""
@abc.abstractmethod
def read_text(self, filename):
"""Attempt to load metadata file given by the name.
:param filename: The name of the file in the distribution info.
:return: The text if found, otherwise None.
"""
@classmethod
def from_name(cls, name):
"""Return the Distribution for the given package name.
:param name: The name of the distribution package to search for.
:return: The Distribution instance (or subclass thereof) for the named
package, if found.
:raises PackageNotFoundError: When the named package's distribution
metadata cannot be found.
"""
for resolver in cls._discover_resolvers():
resolved = resolver(name)
if resolved is not None:
return resolved
else:
raise PackageNotFoundError(name)
@staticmethod
def _discover_resolvers():
"""Search the meta_path for resolvers."""
declared = (
getattr(finder, 'find_distribution', None)
for finder in sys.meta_path
)
return filter(None, declared)
@property
def metadata(self):
"""Return the parsed metadata for this Distribution.
The returned object will have keys that name the various bits of
metadata. See PEP 566 for details.
"""
return email.message_from_string(
self.read_text('METADATA') or self.read_text('PKG-INFO')
)
@property
def version(self):
"""Return the 'Version' metadata for the distribution package."""
return self.metadata['Version']
def distribution(package):
"""Get the ``Distribution`` instance for the given package.
:param package: The name of the package as a string.
:return: A ``Distribution`` instance (or subclass thereof).
"""
return Distribution.from_name(package)
def metadata(package):
"""Get the metadata for the package.
:param package: The name of the distribution package to query.
:return: An email.Message containing the parsed metadata.
"""
return Distribution.from_name(package).metadata
def version(package):
"""Get the version string for the named package.
:param package: The name of the distribution package to query.
:return: The version string for the package as defined in the package's
"Version" metadata key.
"""
return distribution(package).version
def entry_points(name):
"""Return the entry points for the named distribution package.
:param name: The name of the distribution package to query.
:return: A ConfigParser instance where the sections and keys are taken
from the entry_points.txt ini-style contents.
"""
as_string = read_text(name, 'entry_points.txt')
# 2018-09-10(barry): Should we provide any options here, or let the caller
# send options to the underlying ConfigParser? For now, YAGNI.
config = ConfigParser()
try:
config.read_string(as_string)
except AttributeError: # pragma: nocover
# Python 2 has no read_string
config.readfp(io.StringIO(as_string))
return config
def resolve(entry_point):
"""Resolve an entry point string into the named callable.
:param entry_point: An entry point string of the form
`path.to.module:callable`.
:return: The actual callable object `path.to.module.callable`
:raises ValueError: When `entry_point` doesn't have the proper format.
"""
path, colon, name = entry_point.rpartition(':')
if colon != ':':
raise ValueError('Not an entry point: {}'.format(entry_point))
module = import_module(path)
return getattr(module, name)
def read_text(package, filename):
"""
Read the text of the file in the distribution info directory.
"""
return distribution(package).read_text(filename)

View file

@ -0,0 +1,57 @@
=========================
importlib_metadata NEWS
=========================
0.7 (2018-11-27)
================
* Fixed issue where packages with dashes in their names would
not be discovered. Closes #21.
* Distribution lookup is now case-insensitive. Closes #20.
* Wheel distributions can no longer be discovered by their module
name. Like Path distributions, they must be indicated by their
distribution package name.
0.6 (2018-10-07)
================
* Removed ``importlib_metadata.distribution`` function. Now
the public interface is primarily the utility functions exposed
in ``importlib_metadata.__all__``. Closes #14.
* Added two new utility functions ``read_text`` and
``metadata``.
0.5 (2018-09-18)
================
* Updated README and removed details about Distribution
class, now considered private. Closes #15.
* Added test suite support for Python 3.4+.
* Fixed SyntaxErrors on Python 3.4 and 3.5. !12
* Fixed errors on Windows joining Path elements. !15
0.4 (2018-09-14)
================
* Housekeeping.
0.3 (2018-09-14)
================
* Added usage documentation. Closes #8
* Add support for getting metadata from wheels on ``sys.path``. Closes #9
0.2 (2018-09-11)
================
* Added ``importlib_metadata.entry_points()``. Closes #1
* Added ``importlib_metadata.resolve()``. Closes #12
* Add support for Python 2.7. Closes #4
0.1 (2018-09-10)
================
* Initial release.
..
Local Variables:
mode: change-log-mode
indent-tabs-mode: nil
sentence-end-double-space: t
fill-column: 78
coding: utf-8
End:

View file

@ -0,0 +1,180 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# flake8: noqa
#
# importlib_metadata documentation build configuration file, created by
# sphinx-quickstart on Thu Nov 30 10:21:00 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'importlib_metadata'
copyright = '2017-2018, Jason Coombs, Barry Warsaw'
author = 'Jason Coombs, Barry Warsaw'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'importlib_metadatadoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'importlib_metadata.tex', 'importlib\\_metadata Documentation',
'Brett Cannon, Barry Warsaw', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
author, 'importlib_metadata', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
}

View file

@ -0,0 +1,53 @@
===============================
Welcome to importlib_metadata
===============================
``importlib_metadata`` is a library which provides an API for accessing an
installed package's `metadata`_, such as its entry points or its top-level
name. This functionality intends to replace most uses of ``pkg_resources``
`entry point API`_ and `metadata API`_. Along with ``importlib.resources`` in
`Python 3.7 and newer`_ (backported as `importlib_resources`_ for older
versions of Python), this can eliminate the need to use the older and less
efficient ``pkg_resources`` package.
``importlib_metadata`` is a backport of Python 3.8's standard library
`importlib.metadata`_ module for Python 2.7, and 3.4 through 3.7. Users of
Python 3.8 and beyond are encouraged to use the standard library module, and
in fact for these versions, ``importlib_metadata`` just shadows that module.
Developers looking for detailed API descriptions should refer to the Python
3.8 standard library documentation.
The documentation here includes a general :ref:`usage <using>` guide.
.. toctree::
:maxdepth: 2
:caption: Contents:
using.rst
changelog.rst
Project details
===============
* Project home: https://gitlab.com/python-devs/importlib_metadata
* Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues
* Code hosting: https://gitlab.com/python-devs/importlib_metadata.git
* Documentation: http://importlib_metadata.readthedocs.io/
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. _`metadata`: https://www.python.org/dev/peps/pep-0566/
.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
.. _`Python 3.7 and newer`: https://docs.python.org/3/library/importlib.html#module-importlib.resources
.. _`importlib_resources`: https://importlib-resources.readthedocs.io/en/latest/index.html
.. _`importlib.metadata`: TBD

View file

@ -0,0 +1,133 @@
.. _using:
==========================
Using importlib_metadata
==========================
``importlib_metadata`` is a library that provides for access to installed
package metadata. Built in part on Python's import system, this library
intends to replace similar functionality in ``pkg_resources`` `entry point
API`_ and `metadata API`_. Along with ``importlib.resources`` in `Python 3.7
and newer`_ (backported as `importlib_resources`_ for older versions of
Python), this can eliminate the need to use the older and less efficient
``pkg_resources`` package.
By "installed package" we generally mean a third party package installed into
Python's ``site-packages`` directory via tools such as ``pip``. Specifically,
it means a package with either a discoverable ``dist-info`` or ``egg-info``
directory, and metadata defined by `PEP 566`_ or its older specifications.
By default, package metadata can live on the file system or in wheels on
``sys.path``. Through an extension mechanism, the metadata can live almost
anywhere.
Overview
========
Let's say you wanted to get the version string for a package you've installed
using ``pip``. We start by creating a virtual environment and installing
something into it::
$ python3 -m venv example
$ source example/bin/activate
(example) $ pip install importlib_metadata
(example) $ pip install wheel
You can get the version string for ``wheel`` by running the following::
(example) $ python
>>> from importlib_metadata import version
>>> version('wheel')
'0.31.1'
You can also get the set of entry points for the ``wheel`` package. Since the
``entry_points.txt`` file is an ``.ini``-style, the ``entry_points()``
function returns a `ConfigParser instance`_. To get the list of command line
entry points, extract the ``console_scripts`` section::
>>> cp = entry_points('wheel')
>>> cp.options('console_scripts')
['wheel']
You can also get the callable that the entry point is mapped to::
>>> cp.get('console_scripts', 'wheel')
'wheel.tool:main'
Even more conveniently, you can resolve this entry point to the actual
callable::
>>> from importlib_metadata import resolve
>>> ep = cp.get('console_scripts', 'wheel')
>>> resolve(ep)
<function main at 0x111b91bf8>
Distributions
=============
While the above API is the most common and convenient usage, you can get all
of that information from the ``Distribution`` class. A ``Distribution`` is an
abstract object that represents the metadata for a Python package. You can
get the ``Distribution`` instance::
>>> from importlib_metadata import distribution
>>> dist = distribution('wheel')
Thus, an alternative way to get the version number is through the
``Distribution`` instance::
>>> dist.version
'0.31.1'
There are all kinds of additional metadata available on the ``Distribution``
instance::
>>> d.metadata['Requires-Python']
'>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
>>> d.metadata['License']
'MIT'
The full set of available metadata is not described here. See PEP 566 for
additional details.
Extending the search algorithm
==============================
Because package metadata is not available through ``sys.path`` searches, or
package loaders directly, the metadata for a package is found through import
system `finders`_. To find a distribution package's metadata,
``importlib_metadata`` queries the list of `meta path finders`_ on
`sys.meta_path`_.
By default ``importlib_metadata`` installs a finder for packages found on the
file system. This finder doesn't actually find any *packages*, but it cany
find the package's metadata.
The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the
interface expected of finders by Python's import system.
``importlib_metadata`` extends this protocol by looking for an optional
``find_distribution()`` ``@classmethod`` on the finders from
``sys.meta_path``. If the finder has this method, it takes a single argument
which is the name of the distribution package to find. The method returns
``None`` if it cannot find the distribution package, otherwise it returns an
instance of the ``Distribution`` abstract class.
What this means in practice is that to support finding distribution package
metadata in locations other than the file system, you should derive from
``Distribution`` and implement the ``load_metadata()`` method. This takes a
single argument which is the name of the package whose metadata is being
found. This instance of the ``Distribution`` base abstract class is what your
finder's ``find_distribution()`` method should return.
.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
.. _`Python 3.7 and newer`: https://docs.python.org/3/library/importlib.html#module-importlib.resources
.. _`importlib_resources`: https://importlib-resources.readthedocs.io/en/latest/index.html
.. _`PEP 566`: https://www.python.org/dev/peps/pep-0566/
.. _`ConfigParser instance`: https://docs.python.org/3/library/configparser.html#configparser.ConfigParser
.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders
.. _`meta path finders`: https://docs.python.org/3/glossary.html#term-meta-path-finder
.. _`sys.meta_path`: https://docs.python.org/3/library/sys.html#sys.meta_path

View file

@ -0,0 +1,44 @@
import re
import unittest
import importlib_metadata
class APITests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
version = importlib_metadata.version('importlib_metadata')
assert isinstance(version, str)
assert re.match(self.version_pattern, version)
def test_retrieves_version_of_pip(self):
# Assume pip is installed and retrieve the version of pip.
version = importlib_metadata.version('pip')
assert isinstance(version, str)
assert re.match(self.version_pattern, version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFoundError):
importlib_metadata.distribution('does-not-exist')
def test_for_top_level(self):
distribution = importlib_metadata.distribution('importlib_metadata')
self.assertEqual(
distribution.read_text('top_level.txt').strip(),
'importlib_metadata')
def test_entry_points(self):
parser = importlib_metadata.entry_points('pip')
# We should probably not be dependent on a third party package's
# internal API staying stable.
entry_point = parser.get('console_scripts', 'pip')
self.assertEqual(entry_point, 'pip._internal:main')
def test_metadata_for_this_package(self):
md = importlib_metadata.metadata('importlib_metadata')
assert md['author'] == 'Barry Warsaw'
assert md['LICENSE'] == 'Apache Software License'
assert md['Name'] == 'importlib-metadata'
classifiers = md.get_all('Classifier')
assert 'Topic :: Software Development :: Libraries' in classifiers

View file

@ -0,0 +1,121 @@
from __future__ import unicode_literals
import re
import sys
import shutil
import tempfile
import unittest
import importlib
import contextlib
import importlib_metadata
try:
from contextlib import ExitStack
except ImportError:
from contextlib2 import ExitStack
try:
import pathlib
except ImportError:
import pathlib2 as pathlib
from importlib_metadata import _hooks
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_pip(self):
# Assume pip is installed and retrieve the version of pip.
dist = importlib_metadata.Distribution.from_name('pip')
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFoundError):
importlib_metadata.Distribution.from_name('does-not-exist')
def test_new_style_classes(self):
self.assertIsInstance(importlib_metadata.Distribution, type)
self.assertIsInstance(_hooks.MetadataPathFinder, type)
self.assertIsInstance(_hooks.WheelMetadataFinder, type)
self.assertIsInstance(_hooks.WheelDistribution, type)
class ImportTests(unittest.TestCase):
def test_import_nonexistent_module(self):
# Ensure that the MetadataPathFinder does not crash an import of a
# non-existant module.
with self.assertRaises(ImportError):
importlib.import_module('does_not_exist')
def test_resolve(self):
entry_points = importlib_metadata.entry_points('pip')
main = importlib_metadata.resolve(
entry_points.get('console_scripts', 'pip'))
import pip._internal
self.assertEqual(main, pip._internal.main)
def test_resolve_invalid(self):
self.assertRaises(ValueError, importlib_metadata.resolve, 'bogus.ep')
class NameNormalizationTests(unittest.TestCase):
@staticmethod
def pkg_with_dashes(site_dir):
"""
Create minimal metadata for a package with dashes
in the name (and thus underscores in the filename).
"""
metadata_dir = site_dir / 'my_pkg.dist-info'
metadata_dir.mkdir()
metadata = metadata_dir / 'METADATA'
with metadata.open('w') as strm:
strm.write('Version: 1.0\n')
return 'my-pkg'
@staticmethod
@contextlib.contextmanager
def site_dir():
tmpdir = tempfile.mkdtemp()
sys.path[:0] = [tmpdir]
try:
yield pathlib.Path(tmpdir)
finally:
sys.path.remove(tmpdir)
shutil.rmtree(tmpdir)
def setUp(self):
self.fixtures = ExitStack()
self.addCleanup(self.fixtures.close)
self.site_dir = self.fixtures.enter_context(self.site_dir())
def test_dashes_in_dist_name_found_as_underscores(self):
"""
For a package with a dash in the name, the dist-info metadata
uses underscores in the name. Ensure the metadata loads.
"""
pkg_name = self.pkg_with_dashes(self.site_dir)
assert importlib_metadata.version(pkg_name) == '1.0'
@staticmethod
def pkg_with_mixed_case(site_dir):
"""
Create minimal metadata for a package with mixed case
in the name.
"""
metadata_dir = site_dir / 'CherryPy.dist-info'
metadata_dir.mkdir()
metadata = metadata_dir / 'METADATA'
with metadata.open('w') as strm:
strm.write('Version: 1.0\n')
return 'CherryPy'
def test_dist_name_found_as_any_case(self):
"""
Ensure the metadata loads when queried with any case.
"""
pkg_name = self.pkg_with_mixed_case(self.site_dir)
assert importlib_metadata.version(pkg_name) == '1.0'
assert importlib_metadata.version(pkg_name.lower()) == '1.0'
assert importlib_metadata.version(pkg_name.upper()) == '1.0'

View file

@ -0,0 +1,42 @@
import sys
import unittest
import importlib_metadata
try:
from contextlib import ExitStack
except ImportError:
from contextlib2 import ExitStack
from importlib_resources import path
class BespokeLoader:
archive = 'bespoke'
class TestZip(unittest.TestCase):
def setUp(self):
# Find the path to the example.*.whl so we can add it to the front of
# sys.path, where we'll then try to find the metadata thereof.
self.resources = ExitStack()
self.addCleanup(self.resources.close)
wheel = self.resources.enter_context(
path('importlib_metadata.tests.data',
'example-21.12-py3-none-any.whl'))
sys.path.insert(0, str(wheel))
self.resources.callback(sys.path.pop, 0)
def test_zip_version(self):
self.assertEqual(importlib_metadata.version('example'), '21.12')
def test_zip_entry_points(self):
parser = importlib_metadata.entry_points('example')
entry_point = parser.get('console_scripts', 'example')
self.assertEqual(entry_point, 'example:main')
def test_missing_metadata(self):
distribution = importlib_metadata.distribution('example')
self.assertIsNone(distribution.read_text('does not exist'))
def test_case_insensitive(self):
self.assertEqual(importlib_metadata.version('Example'), '21.12')

View file

@ -0,0 +1 @@
0.7

Binary file not shown.

View file

@ -0,0 +1,7 @@
A Python ISAPI extension. Contributed by Phillip Frantz, and is
Copyright 2002-2003 by Blackdog Software Pty Ltd.
See the 'samples' directory, and particularly samples\README.txt
You can find documentation in the PyWin32.chm file that comes with pywin32 -
you can open this from Pythonwin->Help, or from the start menu.

View file

@ -0,0 +1,33 @@
# The Python ISAPI package.
# Exceptions thrown by the DLL framework.
class ISAPIError(Exception):
def __init__(self, errno, strerror = None, funcname = None):
# named attributes match IOError etc.
self.errno = errno
self.strerror = strerror
self.funcname = funcname
Exception.__init__(self, errno, strerror, funcname)
def __str__(self):
if self.strerror is None:
try:
import win32api
self.strerror = win32api.FormatMessage(self.errno).strip()
except:
self.strerror = "no error message is available"
# str() looks like a win32api error.
return str( (self.errno, self.strerror, self.funcname) )
class FilterError(ISAPIError):
pass
class ExtensionError(ISAPIError):
pass
# A little development aid - a filter or extension callback function can
# raise one of these exceptions, and the handler module will be reloaded.
# This means you can change your code without restarting IIS.
# After a reload, your filter/extension will have the GetFilterVersion/
# GetExtensionVersion function called, but with None as the first arg.
class InternalReloadException(Exception):
pass

View file

@ -0,0 +1,92 @@
<!-- NOTE: This HTML is displayed inside the CHM file - hence some hrefs
will only work in that environment
-->
<HTML>
<BODY>
<TITLE>Introduction to Python ISAPI support</TITLE>
<h2>Introduction to Python ISAPI support</h2>
<h3>See also</h3>
<ul>
<li><a href="/isapi_modules.html">The isapi related modules</a>
</li>
<li><a href="/isapi_objects.html">The isapi related objects</a>
</li>
</ul>
<p><i>Note: if you are viewing this documentation directly from disk,
most links in this document will fail - you can also find this document in the
CHM file that comes with pywin32, where the links will work</i>
<h3>Introduction</h3>
This documents Python support for hosting ISAPI exensions and filters inside
Microsoft Internet Information Server (IIS). It assumes a basic understanding
of the ISAPI filter and extension mechanism.
<p>
In summary, to implement a filter or extension, you provide a Python module
which defines a Filter and/or Extension class. Once your class has been
loaded, IIS/ISAPI will, via an extension DLL, call methods on your class.
<p>
A filter and a class instance need only provide 3 methods - for filters they
are called <code>GetFilterVersion</code>, <code>HttpFilterProc</code> and
<code>TerminateFilter</code>. For extensions they
are named <code>GetExtensionVersion</code>, <code>HttpExtensionProc</code> and
<code>TerminateExtension</code>. If you are familiar with writing ISAPI
extensions in C/C++, these names and their purpose will be familiar.
<p>
Most of the work is done in the <code>HttpFilterProc</code> and
<code>HttpExtensionProc</code> methods. These both take a single
parameter - an <a href="/HTTP_FILTER_CONTEXT.html">HTTP_FILTER_CONTEXT</a> and
<a href="/EXTENSION_CONTROL_BLOCK.html">EXTENSION_CONTROL_BLOCK</a>
object respectively.
<p>
In addition to these components, there is an 'isapi' package, containing
support facilities (base-classes, exceptions, etc) which can be leveraged
by the extension.
<h4>Base classes</h4>
There are a number of base classes provided to make writing extensions a little
simpler. Of particular note is <code>isapi.threaded_extension.ThreadPoolExtension</code>.
This implements a thread-pool and informs IIS that the request is progressing
in the background. Your sub-class need only provide a <code>Dispatch</code>
method, which is called on one of the worker threads rather than the thread
that the request came in on.
<p>
There is base-class for a filter in <code>isapi.simple</code>, but there is no
equivilent threaded filter - filters work under a different model, where
background processing is not possible.
<h4>Samples</h4>
Please see the <code>isapi/samples</code> directory for some sample filters
and extensions.
<H3>Implementation</H3>
A Python ISAPI filter extension consists of 2 main components:
<UL>
<LI>A DLL used by ISAPI to interface with Python.</LI>
<LI>A Python script used by that DLL to implement the filter or extension
functionality</LI>
</UL>
<h4>Extension DLL</h4>
The DLL is usually managed automatically by the isapi.install module. As the
Python script for the extension is installed, a generic DLL provided with
the isapi package is installed next to the script, and IIS configured to
use this DLL.
<p>
The name of the DLL always has the same base name as the Python script, but
with a leading underscore (_), and an extension of .dll. For example, the
sample "redirector.py" will, when installed, have "_redirector.dll" created
in the same directory.
<p/>
The Python script may provide 2 entry points - methods named __FilterFactory__
and __ExtensionFactory__, both taking no arguments and returning a filter or
extension object.
<h3>Using py2exe and the isapi package</h3>
You can instruct py2exe to create a 'frozen' Python ISAPI filter/extension.
In this case, py2exe will create a package with everything you need in one
directory, and the Python source file embedded in the .zip file.
<p>
In general, you will want to build a seperate installation executable along
with the ISAPI extension. This executable will be built from the same script.
See the ISAPI sample in the py2exe distribution.

730
libs/win/isapi/install.py Normal file
View file

@ -0,0 +1,730 @@
"""Installation utilities for Python ISAPI filters and extensions."""
# this code adapted from "Tomcat JK2 ISAPI redirector", part of Apache
# Created July 2004, Mark Hammond.
import sys, os, imp, shutil, stat
import operator
from win32com.client import GetObject, Dispatch
from win32com.client.gencache import EnsureModule, EnsureDispatch
import win32api
import pythoncom
import winerror
import traceback
_APP_INPROC = 0
_APP_OUTPROC = 1
_APP_POOLED = 2
_IIS_OBJECT = "IIS://LocalHost/W3SVC"
_IIS_SERVER = "IIsWebServer"
_IIS_WEBDIR = "IIsWebDirectory"
_IIS_WEBVIRTUALDIR = "IIsWebVirtualDir"
_IIS_FILTERS = "IIsFilters"
_IIS_FILTER = "IIsFilter"
_DEFAULT_SERVER_NAME = "Default Web Site"
_DEFAULT_HEADERS = "X-Powered-By: Python"
_DEFAULT_PROTECTION = _APP_POOLED
# Default is for 'execute' only access - ie, only the extension
# can be used. This can be overridden via your install script.
_DEFAULT_ACCESS_EXECUTE = True
_DEFAULT_ACCESS_READ = False
_DEFAULT_ACCESS_WRITE = False
_DEFAULT_ACCESS_SCRIPT = False
_DEFAULT_CONTENT_INDEXED = False
_DEFAULT_ENABLE_DIR_BROWSING = False
_DEFAULT_ENABLE_DEFAULT_DOC = False
_extensions = [ext for ext, _, _ in imp.get_suffixes()]
is_debug_build = '_d.pyd' in _extensions
this_dir = os.path.abspath(os.path.dirname(__file__))
class FilterParameters:
Name = None
Description = None
Path = None
Server = None
# Params that control if/how AddExtensionFile is called.
AddExtensionFile = True
AddExtensionFile_Enabled = True
AddExtensionFile_GroupID = None # defaults to Name
AddExtensionFile_CanDelete = True
AddExtensionFile_Description = None # defaults to Description.
def __init__(self, **kw):
self.__dict__.update(kw)
class VirtualDirParameters:
Name = None # Must be provided.
Description = None # defaults to Name
AppProtection = _DEFAULT_PROTECTION
Headers = _DEFAULT_HEADERS
Path = None # defaults to WWW root.
Type = _IIS_WEBVIRTUALDIR
AccessExecute = _DEFAULT_ACCESS_EXECUTE
AccessRead = _DEFAULT_ACCESS_READ
AccessWrite = _DEFAULT_ACCESS_WRITE
AccessScript = _DEFAULT_ACCESS_SCRIPT
ContentIndexed = _DEFAULT_CONTENT_INDEXED
EnableDirBrowsing = _DEFAULT_ENABLE_DIR_BROWSING
EnableDefaultDoc = _DEFAULT_ENABLE_DEFAULT_DOC
DefaultDoc = None # Only set in IIS if not None
ScriptMaps = []
ScriptMapUpdate = "end" # can be 'start', 'end', 'replace'
Server = None
def __init__(self, **kw):
self.__dict__.update(kw)
def is_root(self):
"This virtual directory is a root directory if parent and name are blank"
parent, name = self.split_path()
return not parent and not name
def split_path(self):
return split_path(self.Name)
class ScriptMapParams:
Extension = None
Module = None
Flags = 5
Verbs = ""
# Params that control if/how AddExtensionFile is called.
AddExtensionFile = True
AddExtensionFile_Enabled = True
AddExtensionFile_GroupID = None # defaults to Name
AddExtensionFile_CanDelete = True
AddExtensionFile_Description = None # defaults to Description.
def __init__(self, **kw):
self.__dict__.update(kw)
def __str__(self):
"Format this parameter suitable for IIS"
items = [self.Extension, self.Module, self.Flags]
# IIS gets upset if there is a trailing verb comma, but no verbs
if self.Verbs:
items.append(self.Verbs)
items = [str(item) for item in items]
return ','.join(items)
class ISAPIParameters:
ServerName = _DEFAULT_SERVER_NAME
# Description = None
Filters = []
VirtualDirs = []
def __init__(self, **kw):
self.__dict__.update(kw)
verbose = 1 # The level - 0 is quiet.
def log(level, what):
if verbose >= level:
print(what)
# Convert an ADSI COM exception to the Win32 error code embedded in it.
def _GetWin32ErrorCode(com_exc):
hr = com_exc.hresult
# If we have more details in the 'excepinfo' struct, use it.
if com_exc.excepinfo:
hr = com_exc.excepinfo[-1]
if winerror.HRESULT_FACILITY(hr) != winerror.FACILITY_WIN32:
raise
return winerror.SCODE_CODE(hr)
class InstallationError(Exception): pass
class ItemNotFound(InstallationError): pass
class ConfigurationError(InstallationError): pass
def FindPath(options, server, name):
if name.lower().startswith("iis://"):
return name
else:
if name and name[0] != "/":
name = "/"+name
return FindWebServer(options, server)+"/ROOT"+name
def LocateWebServerPath(description):
"""
Find an IIS web server whose name or comment matches the provided
description (case-insensitive).
>>> LocateWebServerPath('Default Web Site') # doctest: +SKIP
or
>>> LocateWebServerPath('1') #doctest: +SKIP
"""
assert len(description) >= 1, "Server name or comment is required"
iis = GetObject(_IIS_OBJECT)
description = description.lower().strip()
for site in iis:
# Name is generally a number, but no need to assume that.
site_attributes = [getattr(site, attr, "").lower().strip()
for attr in ("Name", "ServerComment")]
if description in site_attributes:
return site.AdsPath
msg = "No web sites match the description '%s'" % description
raise ItemNotFound(msg)
def GetWebServer(description = None):
"""
Load the web server instance (COM object) for a given instance
or description.
If None is specified, the default website is retrieved (indicated
by the identifier 1.
"""
description = description or "1"
path = LocateWebServerPath(description)
server = LoadWebServer(path)
return server
def LoadWebServer(path):
try:
server = GetObject(path)
except pythoncom.com_error as details:
msg = details.strerror
if exc.excepinfo and exc.excepinfo[2]:
msg = exc.excepinfo[2]
msg = "WebServer %s: %s" % (path, msg)
raise ItemNotFound(msg)
return server
def FindWebServer(options, server_desc):
"""
Legacy function to allow options to define a .server property
to override the other parameter. Use GetWebServer instead.
"""
# options takes precedence
server_desc = options.server or server_desc
# make sure server_desc is unicode (could be mbcs if passed in
# sys.argv).
if server_desc and not isinstance(server_desc, str):
server_desc = server_desc.decode('mbcs')
# get the server (if server_desc is None, the default site is acquired)
server = GetWebServer(server_desc)
return server.adsPath
def split_path(path):
"""
Get the parent path and basename.
>>> split_path('/')
['', '']
>>> split_path('')
['', '']
>>> split_path('foo')
['', 'foo']
>>> split_path('/foo')
['', 'foo']
>>> split_path('/foo/bar')
['/foo', 'bar']
>>> split_path('foo/bar')
['/foo', 'bar']
"""
if not path.startswith('/'): path = '/' + path
return path.rsplit('/', 1)
def _CreateDirectory(iis_dir, name, params):
# We used to go to lengths to keep an existing virtual directory
# in place. However, in some cases the existing directories got
# into a bad state, and an update failed to get them working.
# So we nuke it first. If this is a problem, we could consider adding
# a --keep-existing option.
try:
# Also seen the Class change to a generic IISObject - so nuke
# *any* existing object, regardless of Class
assert name.strip("/"), "mustn't delete the root!"
iis_dir.Delete('', name)
log(2, "Deleted old directory '%s'" % (name,))
except pythoncom.com_error:
pass
newDir = iis_dir.Create(params.Type, name)
log(2, "Creating new directory '%s' in %s..." % (name,iis_dir.Name))
friendly = params.Description or params.Name
newDir.AppFriendlyName = friendly
# Note that the new directory won't be visible in the IIS UI
# unless the directory exists on the filesystem.
try:
path = params.Path or iis_dir.Path
newDir.Path = path
except AttributeError:
# If params.Type is IIS_WEBDIRECTORY, an exception is thrown
pass
newDir.AppCreate2(params.AppProtection)
# XXX - note that these Headers only work in IIS6 and earlier. IIS7
# only supports them on the w3svc node - not even on individial sites,
# let alone individual extensions in the site!
if params.Headers:
newDir.HttpCustomHeaders = params.Headers
log(2, "Setting directory options...")
newDir.AccessExecute = params.AccessExecute
newDir.AccessRead = params.AccessRead
newDir.AccessWrite = params.AccessWrite
newDir.AccessScript = params.AccessScript
newDir.ContentIndexed = params.ContentIndexed
newDir.EnableDirBrowsing = params.EnableDirBrowsing
newDir.EnableDefaultDoc = params.EnableDefaultDoc
if params.DefaultDoc is not None:
newDir.DefaultDoc = params.DefaultDoc
newDir.SetInfo()
return newDir
def CreateDirectory(params, options):
_CallHook(params, "PreInstall", options)
if not params.Name:
raise ConfigurationError("No Name param")
parent, name = params.split_path()
target_dir = GetObject(FindPath(options, params.Server, parent))
if not params.is_root():
target_dir = _CreateDirectory(target_dir, name, params)
AssignScriptMaps(params.ScriptMaps, target_dir, params.ScriptMapUpdate)
_CallHook(params, "PostInstall", options, target_dir)
log(1, "Configured Virtual Directory: %s" % (params.Name,))
return target_dir
def AssignScriptMaps(script_maps, target, update='replace'):
"""Updates IIS with the supplied script map information.
script_maps is a list of ScriptMapParameter objects
target is an IIS Virtual Directory to assign the script maps to
update is a string indicating how to update the maps, one of ('start',
'end', or 'replace')
"""
# determine which function to use to assign script maps
script_map_func = '_AssignScriptMaps' + update.capitalize()
try:
script_map_func = eval(script_map_func)
except NameError:
msg = "Unknown ScriptMapUpdate option '%s'" % update
raise ConfigurationError(msg)
# use the str method to format the script maps for IIS
script_maps = [str(s) for s in script_maps]
# call the correct function
script_map_func(target, script_maps)
target.SetInfo()
def get_unique_items(sequence, reference):
"Return items in sequence that can't be found in reference."
return tuple([item for item in sequence if item not in reference])
def _AssignScriptMapsReplace(target, script_maps):
target.ScriptMaps = script_maps
def _AssignScriptMapsEnd(target, script_maps):
unique_new_maps = get_unique_items(script_maps, target.ScriptMaps)
target.ScriptMaps = target.ScriptMaps + unique_new_maps
def _AssignScriptMapsStart(target, script_maps):
unique_new_maps = get_unique_items(script_maps, target.ScriptMaps)
target.ScriptMaps = unique_new_maps + target.ScriptMaps
def CreateISAPIFilter(filterParams, options):
server = FindWebServer(options, filterParams.Server)
_CallHook(filterParams, "PreInstall", options)
try:
filters = GetObject(server+"/Filters")
except pythoncom.com_error as exc:
# Brand new sites don't have the '/Filters' collection - create it.
# Any errors other than 'not found' we shouldn't ignore.
if winerror.HRESULT_FACILITY(exc.hresult) != winerror.FACILITY_WIN32 or \
winerror.HRESULT_CODE(exc.hresult) != winerror.ERROR_PATH_NOT_FOUND:
raise
server_ob = GetObject(server)
filters = server_ob.Create(_IIS_FILTERS, "Filters")
filters.FilterLoadOrder = ""
filters.SetInfo()
# As for VirtualDir, delete an existing one.
assert filterParams.Name.strip("/"), "mustn't delete the root!"
try:
filters.Delete(_IIS_FILTER, filterParams.Name)
log(2, "Deleted old filter '%s'" % (filterParams.Name,))
except pythoncom.com_error:
pass
newFilter = filters.Create(_IIS_FILTER, filterParams.Name)
log(2, "Created new ISAPI filter...")
assert os.path.isfile(filterParams.Path)
newFilter.FilterPath = filterParams.Path
newFilter.FilterDescription = filterParams.Description
newFilter.SetInfo()
load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b]
if filterParams.Name not in load_order:
load_order.append(filterParams.Name)
filters.FilterLoadOrder = ",".join(load_order)
filters.SetInfo()
_CallHook(filterParams, "PostInstall", options, newFilter)
log (1, "Configured Filter: %s" % (filterParams.Name,))
return newFilter
def DeleteISAPIFilter(filterParams, options):
_CallHook(filterParams, "PreRemove", options)
server = FindWebServer(options, filterParams.Server)
ob_path = server+"/Filters"
try:
filters = GetObject(ob_path)
except pythoncom.com_error as details:
# failure to open the filters just means a totally clean IIS install
# (IIS5 at least has no 'Filters' key when freshly installed).
log(2, "ISAPI filter path '%s' did not exist." % (ob_path,))
return
try:
assert filterParams.Name.strip("/"), "mustn't delete the root!"
filters.Delete(_IIS_FILTER, filterParams.Name)
log(2, "Deleted ISAPI filter '%s'" % (filterParams.Name,))
except pythoncom.com_error as details:
rc = _GetWin32ErrorCode(details)
if rc != winerror.ERROR_PATH_NOT_FOUND:
raise
log(2, "ISAPI filter '%s' did not exist." % (filterParams.Name,))
# Remove from the load order
load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b]
if filterParams.Name in load_order:
load_order.remove(filterParams.Name)
filters.FilterLoadOrder = ",".join(load_order)
filters.SetInfo()
_CallHook(filterParams, "PostRemove", options)
log (1, "Deleted Filter: %s" % (filterParams.Name,))
def _AddExtensionFile(module, def_groupid, def_desc, params, options):
group_id = params.AddExtensionFile_GroupID or def_groupid
desc = params.AddExtensionFile_Description or def_desc
try:
ob = GetObject(_IIS_OBJECT)
ob.AddExtensionFile(module,
params.AddExtensionFile_Enabled,
group_id,
params.AddExtensionFile_CanDelete,
desc)
log(2, "Added extension file '%s' (%s)" % (module, desc))
except (pythoncom.com_error, AttributeError) as details:
# IIS5 always fails. Probably should upgrade this to
# complain more loudly if IIS6 fails.
log(2, "Failed to add extension file '%s': %s" % (module, details))
def AddExtensionFiles(params, options):
"""Register the modules used by the filters/extensions as a trusted
'extension module' - required by the default IIS6 security settings."""
# Add each module only once.
added = {}
for vd in params.VirtualDirs:
for smp in vd.ScriptMaps:
if smp.Module not in added and smp.AddExtensionFile:
_AddExtensionFile(smp.Module, vd.Name, vd.Description, smp,
options)
added[smp.Module] = True
for fd in params.Filters:
if fd.Path not in added and fd.AddExtensionFile:
_AddExtensionFile(fd.Path, fd.Name, fd.Description, fd, options)
added[fd.Path] = True
def _DeleteExtensionFileRecord(module, options):
try:
ob = GetObject(_IIS_OBJECT)
ob.DeleteExtensionFileRecord(module)
log(2, "Deleted extension file record for '%s'" % module)
except (pythoncom.com_error, AttributeError) as details:
log(2, "Failed to remove extension file '%s': %s" % (module, details))
def DeleteExtensionFileRecords(params, options):
deleted = {} # only remove each .dll once.
for vd in params.VirtualDirs:
for smp in vd.ScriptMaps:
if smp.Module not in deleted and smp.AddExtensionFile:
_DeleteExtensionFileRecord(smp.Module, options)
deleted[smp.Module] = True
for filter_def in params.Filters:
if filter_def.Path not in deleted and filter_def.AddExtensionFile:
_DeleteExtensionFileRecord(filter_def.Path, options)
deleted[filter_def.Path] = True
def CheckLoaderModule(dll_name):
suffix = ""
if is_debug_build: suffix = "_d"
template = os.path.join(this_dir,
"PyISAPI_loader" + suffix + ".dll")
if not os.path.isfile(template):
raise ConfigurationError(
"Template loader '%s' does not exist" % (template,))
# We can't do a simple "is newer" check, as the DLL is specific to the
# Python version. So we check the date-time and size are identical,
# and skip the copy in that case.
src_stat = os.stat(template)
try:
dest_stat = os.stat(dll_name)
except os.error:
same = 0
else:
same = src_stat[stat.ST_SIZE]==dest_stat[stat.ST_SIZE] and \
src_stat[stat.ST_MTIME]==dest_stat[stat.ST_MTIME]
if not same:
log(2, "Updating %s->%s" % (template, dll_name))
shutil.copyfile(template, dll_name)
shutil.copystat(template, dll_name)
else:
log(2, "%s is up to date." % (dll_name,))
def _CallHook(ob, hook_name, options, *extra_args):
func = getattr(ob, hook_name, None)
if func is not None:
args = (ob,options) + extra_args
func(*args)
def Install(params, options):
_CallHook(params, "PreInstall", options)
for vd in params.VirtualDirs:
CreateDirectory(vd, options)
for filter_def in params.Filters:
CreateISAPIFilter(filter_def, options)
AddExtensionFiles(params, options)
_CallHook(params, "PostInstall", options)
def RemoveDirectory(params, options):
if params.is_root():
return
try:
directory = GetObject(FindPath(options, params.Server, params.Name))
except pythoncom.com_error as details:
rc = _GetWin32ErrorCode(details)
if rc != winerror.ERROR_PATH_NOT_FOUND:
raise
log(2, "VirtualDirectory '%s' did not exist" % params.Name)
directory = None
if directory is not None:
# Be robust should IIS get upset about unloading.
try:
directory.AppUnLoad()
except:
exc_val = sys.exc_info()[1]
log(2, "AppUnLoad() for %s failed: %s" % (params.Name, exc_val))
# Continue trying to delete it.
try:
parent = GetObject(directory.Parent)
parent.Delete(directory.Class, directory.Name)
log (1, "Deleted Virtual Directory: %s" % (params.Name,))
except:
exc_val = sys.exc_info()[1]
log(1, "Failed to remove directory %s: %s" % (params.Name, exc_val))
def RemoveScriptMaps(vd_params, options):
"Remove script maps from the already installed virtual directory"
parent, name = vd_params.split_path()
target_dir = GetObject(FindPath(options, vd_params.Server, parent))
installed_maps = list(target_dir.ScriptMaps)
for _map in map(str, vd_params.ScriptMaps):
if _map in installed_maps:
installed_maps.remove(_map)
target_dir.ScriptMaps = installed_maps
target_dir.SetInfo()
def Uninstall(params, options):
_CallHook(params, "PreRemove", options)
DeleteExtensionFileRecords(params, options)
for vd in params.VirtualDirs:
_CallHook(vd, "PreRemove", options)
RemoveDirectory(vd, options)
if vd.is_root():
# if this is installed to the root virtual directory, we can't delete it
# so remove the script maps.
RemoveScriptMaps(vd, options)
_CallHook(vd, "PostRemove", options)
for filter_def in params.Filters:
DeleteISAPIFilter(filter_def, options)
_CallHook(params, "PostRemove", options)
# Patch up any missing module names in the params, replacing them with
# the DLL name that hosts this extension/filter.
def _PatchParamsModule(params, dll_name, file_must_exist = True):
if file_must_exist:
if not os.path.isfile(dll_name):
raise ConfigurationError("%s does not exist" % (dll_name,))
# Patch up all references to the DLL.
for f in params.Filters:
if f.Path is None: f.Path = dll_name
for d in params.VirtualDirs:
for sm in d.ScriptMaps:
if sm.Module is None: sm.Module = dll_name
def GetLoaderModuleName(mod_name, check_module = None):
# find the name of the DLL hosting us.
# By default, this is "_{module_base_name}.dll"
if hasattr(sys, "frozen"):
# What to do? The .dll knows its name, but this is likely to be
# executed via a .exe, which does not know.
base, ext = os.path.splitext(mod_name)
path, base = os.path.split(base)
# handle the common case of 'foo.exe'/'foow.exe'
if base.endswith('w'):
base = base[:-1]
# For py2exe, we have '_foo.dll' as the standard pyisapi loader - but
# 'foo.dll' is what we use (it just delegates).
# So no leading '_' on the installed name.
dll_name = os.path.abspath(os.path.join(path, base + ".dll"))
else:
base, ext = os.path.splitext(mod_name)
path, base = os.path.split(base)
dll_name = os.path.abspath(os.path.join(path, "_" + base + ".dll"))
# Check we actually have it.
if check_module is None: check_module = not hasattr(sys, "frozen")
if check_module:
CheckLoaderModule(dll_name)
return dll_name
# Note the 'log' params to these 'builtin' args - old versions of pywin32
# didn't log at all in this function (by intent; anyone calling this was
# responsible). So existing code that calls this function with the old
# signature (ie, without a 'log' param) still gets the same behaviour as
# before...
def InstallModule(conf_module_name, params, options, log=lambda *args:None):
"Install the extension"
if not hasattr(sys, "frozen"):
conf_module_name = os.path.abspath(conf_module_name)
if not os.path.isfile(conf_module_name):
raise ConfigurationError("%s does not exist" % (conf_module_name,))
loader_dll = GetLoaderModuleName(conf_module_name)
_PatchParamsModule(params, loader_dll)
Install(params, options)
log(1, "Installation complete.")
def UninstallModule(conf_module_name, params, options, log=lambda *args:None):
"Remove the extension"
loader_dll = GetLoaderModuleName(conf_module_name, False)
_PatchParamsModule(params, loader_dll, False)
Uninstall(params, options)
log(1, "Uninstallation complete.")
standard_arguments = {
"install" : InstallModule,
"remove" : UninstallModule,
}
def build_usage(handler_map):
docstrings = [handler.__doc__ for handler in handler_map.values()]
all_args = dict(zip(iter(handler_map.keys()), docstrings))
arg_names = "|".join(iter(all_args.keys()))
usage_string = "%prog [options] [" + arg_names + "]\n"
usage_string += "commands:\n"
for arg, desc in all_args.items():
usage_string += " %-10s: %s" % (arg, desc) + "\n"
return usage_string[:-1]
def MergeStandardOptions(options, params):
"""
Take an options object generated by the command line and merge
the values into the IISParameters object.
"""
pass
# We support 2 ways of extending our command-line/install support.
# * Many of the installation items allow you to specify "PreInstall",
# "PostInstall", "PreRemove" and "PostRemove" hooks
# All hooks are called with the 'params' object being operated on, and
# the 'optparser' options for this session (ie, the command-line options)
# PostInstall for VirtualDirectories and Filters both have an additional
# param - the ADSI object just created.
# * You can pass your own option parser for us to use, and/or define a map
# with your own custom arg handlers. It is a map of 'arg'->function.
# The function is called with (options, log_fn, arg). The function's
# docstring is used in the usage output.
def HandleCommandLine(params, argv=None, conf_module_name = None,
default_arg = "install",
opt_parser = None, custom_arg_handlers = {}):
"""Perform installation or removal of an ISAPI filter or extension.
This module handles standard command-line options and configuration
information, and installs, removes or updates the configuration of an
ISAPI filter or extension.
You must pass your configuration information in params - all other
arguments are optional, and allow you to configure the installation
process.
"""
global verbose
from optparse import OptionParser
argv = argv or sys.argv
if not conf_module_name:
conf_module_name = sys.argv[0]
# convert to a long name so that if we were somehow registered with
# the "short" version but unregistered with the "long" version we
# still work (that will depend on exactly how the installer was
# started)
try:
conf_module_name = win32api.GetLongPathName(conf_module_name)
except win32api.error as exc:
log(2, "Couldn't determine the long name for %r: %s" %
(conf_module_name, exc))
if opt_parser is None:
# Build our own parser.
parser = OptionParser(usage='')
else:
# The caller is providing their own filter, presumably with their
# own options all setup.
parser = opt_parser
# build a usage string if we don't have one.
if not parser.get_usage():
all_handlers = standard_arguments.copy()
all_handlers.update(custom_arg_handlers)
parser.set_usage(build_usage(all_handlers))
# allow the user to use uninstall as a synonym for remove if it wasn't
# defined by the custom arg handlers.
all_handlers.setdefault('uninstall', all_handlers['remove'])
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
parser.add_option("-v", "--verbosity", action="count",
dest="verbose", default=1,
help="increase the verbosity of status messages")
parser.add_option("", "--server", action="store",
help="Specifies the IIS server to install/uninstall on." \
" Default is '%s/1'" % (_IIS_OBJECT,))
(options, args) = parser.parse_args(argv[1:])
MergeStandardOptions(options, params)
verbose = options.verbose
if not args:
args = [default_arg]
try:
for arg in args:
handler = all_handlers[arg]
handler(conf_module_name, params, options, log)
except (ItemNotFound, InstallationError) as details:
if options.verbose > 1:
traceback.print_exc()
print("%s: %s" % (details.__class__.__name__, details))
except KeyError:
parser.error("Invalid arg '%s'" % arg)

120
libs/win/isapi/isapicon.py Normal file
View file

@ -0,0 +1,120 @@
"""Constants needed by ISAPI filters and extensions."""
# ======================================================================
# Copyright 2002-2003 by Blackdog Software Pty Ltd.
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Blackdog Software not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# BLACKDOG SOFTWARE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL BLACKDOG SOFTWARE BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# ======================================================================
# HTTP reply codes
HTTP_CONTINUE = 100
HTTP_SWITCHING_PROTOCOLS = 101
HTTP_PROCESSING = 102
HTTP_OK = 200
HTTP_CREATED = 201
HTTP_ACCEPTED = 202
HTTP_NON_AUTHORITATIVE = 203
HTTP_NO_CONTENT = 204
HTTP_RESET_CONTENT = 205
HTTP_PARTIAL_CONTENT = 206
HTTP_MULTI_STATUS = 207
HTTP_MULTIPLE_CHOICES = 300
HTTP_MOVED_PERMANENTLY = 301
HTTP_MOVED_TEMPORARILY = 302
HTTP_SEE_OTHER = 303
HTTP_NOT_MODIFIED = 304
HTTP_USE_PROXY = 305
HTTP_TEMPORARY_REDIRECT = 307
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_PAYMENT_REQUIRED = 402
HTTP_FORBIDDEN = 403
HTTP_NOT_FOUND = 404
HTTP_METHOD_NOT_ALLOWED = 405
HTTP_NOT_ACCEPTABLE = 406
HTTP_PROXY_AUTHENTICATION_REQUIRED= 407
HTTP_REQUEST_TIME_OUT = 408
HTTP_CONFLICT = 409
HTTP_GONE = 410
HTTP_LENGTH_REQUIRED = 411
HTTP_PRECONDITION_FAILED = 412
HTTP_REQUEST_ENTITY_TOO_LARGE = 413
HTTP_REQUEST_URI_TOO_LARGE = 414
HTTP_UNSUPPORTED_MEDIA_TYPE = 415
HTTP_RANGE_NOT_SATISFIABLE = 416
HTTP_EXPECTATION_FAILED = 417
HTTP_UNPROCESSABLE_ENTITY = 422
HTTP_INTERNAL_SERVER_ERROR = 500
HTTP_NOT_IMPLEMENTED = 501
HTTP_BAD_GATEWAY = 502
HTTP_SERVICE_UNAVAILABLE = 503
HTTP_GATEWAY_TIME_OUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
HTTP_VARIANT_ALSO_VARIES = 506
HSE_STATUS_SUCCESS = 1
HSE_STATUS_SUCCESS_AND_KEEP_CONN = 2
HSE_STATUS_PENDING = 3
HSE_STATUS_ERROR = 4
SF_NOTIFY_SECURE_PORT = 0x00000001
SF_NOTIFY_NONSECURE_PORT = 0x00000002
SF_NOTIFY_READ_RAW_DATA = 0x00008000
SF_NOTIFY_PREPROC_HEADERS = 0x00004000
SF_NOTIFY_AUTHENTICATION = 0x00002000
SF_NOTIFY_URL_MAP = 0x00001000
SF_NOTIFY_ACCESS_DENIED = 0x00000800
SF_NOTIFY_SEND_RESPONSE = 0x00000040
SF_NOTIFY_SEND_RAW_DATA = 0x00000400
SF_NOTIFY_LOG = 0x00000200
SF_NOTIFY_END_OF_REQUEST = 0x00000080
SF_NOTIFY_END_OF_NET_SESSION = 0x00000100
SF_NOTIFY_ORDER_HIGH = 0x00080000
SF_NOTIFY_ORDER_MEDIUM = 0x00040000
SF_NOTIFY_ORDER_LOW = 0x00020000
SF_NOTIFY_ORDER_DEFAULT = SF_NOTIFY_ORDER_LOW
SF_NOTIFY_ORDER_MASK = (SF_NOTIFY_ORDER_HIGH | \
SF_NOTIFY_ORDER_MEDIUM | \
SF_NOTIFY_ORDER_LOW)
SF_STATUS_REQ_FINISHED = 134217728 # 0x8000000
SF_STATUS_REQ_FINISHED_KEEP_CONN = 134217728 + 1
SF_STATUS_REQ_NEXT_NOTIFICATION = 134217728 + 2
SF_STATUS_REQ_HANDLED_NOTIFICATION = 134217728 + 3
SF_STATUS_REQ_ERROR = 134217728 + 4
SF_STATUS_REQ_READ_NEXT = 134217728 + 5
HSE_IO_SYNC = 0x00000001 # for WriteClient
HSE_IO_ASYNC = 0x00000002 # for WriteClient/TF/EU
HSE_IO_DISCONNECT_AFTER_SEND = 0x00000004 # for TF
HSE_IO_SEND_HEADERS = 0x00000008 # for TF
HSE_IO_NODELAY = 0x00001000 # turn off nagling
# These two are only used by VectorSend
HSE_IO_FINAL_SEND = 0x00000010
HSE_IO_CACHE_RESPONSE = 0x00000020
HSE_EXEC_URL_NO_HEADERS = 0x02
HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR = 0x04
HSE_EXEC_URL_IGNORE_VALIDATION_AND_RANGE = 0x10
HSE_EXEC_URL_DISABLE_CUSTOM_ERROR = 0x20
HSE_EXEC_URL_SSI_CMD = 0x40
HSE_EXEC_URL_HTTP_CACHE_ELIGIBLE = 0x80

View file

@ -0,0 +1,20 @@
In this directory you will find examples of ISAPI filters and extensions.
The filter loading mechanism works like this:
* IIS loads the special Python "loader" DLL. This DLL will generally have a
leading underscore as part of its name.
* This loader DLL looks for a Python module, by removing the first letter of
the DLL base name.
This means that an ISAPI extension module consists of 2 key files - the loader
DLL (eg, "_MyIISModule.dll", and a Python module (which for this example
would be "MyIISModule.py")
When you install an ISAPI extension, the installation code checks to see if
there is a loader DLL for your implementation file - if one does not exist,
or the standard loader is different, it is copied and renamed accordingly.
We use this mechanism to provide the maximum separation between different
Python extensions installed on the same server - otherwise filter order and
other tricky IIS semantics would need to be replicated. Also, each filter
gets its own thread-pool, etc.

View file

@ -0,0 +1,196 @@
# This extension demonstrates some advanced features of the Python ISAPI
# framework.
# We demonstrate:
# * Reloading your Python module without shutting down IIS (eg, when your
# .py implementation file changes.)
# * Custom command-line handling - both additional options and commands.
# * Using a query string - any part of the URL after a '?' is assumed to
# be "variable names" separated by '&' - we will print the values of
# these server variables.
# * If the tail portion of the URL is "ReportUnhealthy", IIS will be
# notified we are unhealthy via a HSE_REQ_REPORT_UNHEALTHY request.
# Whether this is acted upon depends on if the IIS health-checking
# tools are installed, but you should always see the reason written
# to the Windows event log - see the IIS documentation for more.
from isapi import isapicon
from isapi.simple import SimpleExtension
import sys, os, stat
if hasattr(sys, "isapidllhandle"):
import win32traceutil
# Notes on reloading
# If your HttpFilterProc or HttpExtensionProc functions raises
# 'isapi.InternalReloadException', the framework will not treat it
# as an error but instead will terminate your extension, reload your
# extension module, re-initialize the instance, and re-issue the request.
# The Initialize functions are called with None as their param. The
# return code from the terminate function is ignored.
#
# This is all the framework does to help you. It is up to your code
# when you raise this exception. This sample uses a Win32 "find
# notification". Whenever windows tells us one of the files in the
# directory has changed, we check if the time of our source-file has
# changed, and set a flag. Next imcoming request, we check the flag and
# raise the special exception if set.
#
# The end result is that the module is automatically reloaded whenever
# the source-file changes - you need take no further action to see your
# changes reflected in the running server.
# The framework only reloads your module - if you have libraries you
# depend on and also want reloaded, you must arrange for this yourself.
# One way of doing this would be to special case the import of these
# modules. Eg:
# --
# try:
# my_module = reload(my_module) # module already imported - reload it
# except NameError:
# import my_module # first time around - import it.
# --
# When your module is imported for the first time, the NameError will
# be raised, and the module imported. When the ISAPI framework reloads
# your module, the existing module will avoid the NameError, and allow
# you to reload that module.
from isapi import InternalReloadException
import win32event, win32file, winerror, win32con, threading
try:
reload_counter += 1
except NameError:
reload_counter = 0
# A watcher thread that checks for __file__ changing.
# When it detects it, it simply sets "change_detected" to true.
class ReloadWatcherThread(threading.Thread):
def __init__(self):
self.change_detected = False
self.filename = __file__
if self.filename.endswith("c") or self.filename.endswith("o"):
self.filename = self.filename[:-1]
self.handle = win32file.FindFirstChangeNotification(
os.path.dirname(self.filename),
False, # watch tree?
win32con.FILE_NOTIFY_CHANGE_LAST_WRITE)
threading.Thread.__init__(self)
def run(self):
last_time = os.stat(self.filename)[stat.ST_MTIME]
while 1:
try:
rc = win32event.WaitForSingleObject(self.handle,
win32event.INFINITE)
win32file.FindNextChangeNotification(self.handle)
except win32event.error as details:
# handle closed - thread should terminate.
if details.winerror != winerror.ERROR_INVALID_HANDLE:
raise
break
this_time = os.stat(self.filename)[stat.ST_MTIME]
if this_time != last_time:
print("Detected file change - flagging for reload.")
self.change_detected = True
last_time = this_time
def stop(self):
win32file.FindCloseChangeNotification(self.handle)
# The ISAPI extension - handles requests in our virtual dir, and sends the
# response to the client.
class Extension(SimpleExtension):
"Python advanced sample Extension"
def __init__(self):
self.reload_watcher = ReloadWatcherThread()
self.reload_watcher.start()
def HttpExtensionProc(self, ecb):
# NOTE: If you use a ThreadPoolExtension, you must still perform
# this check in HttpExtensionProc - raising the exception from
# The "Dispatch" method will just cause the exception to be
# rendered to the browser.
if self.reload_watcher.change_detected:
print("Doing reload")
raise InternalReloadException
url = ecb.GetServerVariable("UNICODE_URL")
if url.endswith("ReportUnhealthy"):
ecb.ReportUnhealthy("I'm a little sick")
ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0)
print("<HTML><BODY>", file=ecb)
qs = ecb.GetServerVariable("QUERY_STRING")
if qs:
queries = qs.split("&")
print("<PRE>", file=ecb)
for q in queries:
val = ecb.GetServerVariable(q, '&lt;no such variable&gt;')
print("%s=%r" % (q, val), file=ecb)
print("</PRE><P/>", file=ecb)
print("This module has been imported", file=ecb)
print("%d times" % (reload_counter,), file=ecb)
print("</BODY></HTML>", file=ecb)
ecb.close()
return isapicon.HSE_STATUS_SUCCESS
def TerminateExtension(self, status):
self.reload_watcher.stop()
# The entry points for the ISAPI extension.
def __ExtensionFactory__():
return Extension()
# Our special command line customization.
# Pre-install hook for our virtual directory.
def PreInstallDirectory(params, options):
# If the user used our special '--description' option,
# then we override our default.
if options.description:
params.Description = options.description
# Post install hook for our entire script
def PostInstall(params, options):
print()
print("The sample has been installed.")
print("Point your browser to /AdvancedPythonSample")
print("If you modify the source file and reload the page,")
print("you should see the reload counter increment")
# Handler for our custom 'status' argument.
def status_handler(options, log, arg):
"Query the status of something"
print("Everything seems to be fine!")
custom_arg_handlers = {"status": status_handler}
if __name__=='__main__':
# If run from the command-line, install ourselves.
from isapi.install import *
params = ISAPIParameters(PostInstall = PostInstall)
# Setup the virtual directories - this is a list of directories our
# extension uses - in this case only 1.
# Each extension has a "script map" - this is the mapping of ISAPI
# extensions.
sm = [
ScriptMapParams(Extension="*", Flags=0)
]
vd = VirtualDirParameters(Name="AdvancedPythonSample",
Description = Extension.__doc__,
ScriptMaps = sm,
ScriptMapUpdate = "replace",
# specify the pre-install hook.
PreInstall = PreInstallDirectory
)
params.VirtualDirs = [vd]
# Setup our custom option parser.
from optparse import OptionParser
parser = OptionParser('') # blank usage, so isapi sets it.
parser.add_option("", "--description",
action="store",
help="custom description to use for the virtual directory")
HandleCommandLine(params, opt_parser=parser,
custom_arg_handlers = custom_arg_handlers)

View file

@ -0,0 +1,109 @@
# This is a sample ISAPI extension written in Python.
#
# Please see README.txt in this directory, and specifically the
# information about the "loader" DLL - installing this sample will create
# "_redirector.dll" in the current directory. The readme explains this.
# Executing this script (or any server config script) will install the extension
# into your web server. As the server executes, the PyISAPI framework will load
# this module and create your Extension and Filter objects.
# This is the simplest possible redirector (or proxy) we can write. The
# extension installs with a mask of '*' in the root of the site.
# As an added bonus though, we optionally show how, on IIS6 and later, we
# can use HSE_ERQ_EXEC_URL to ignore certain requests - in IIS5 and earlier
# we can only do this with an ISAPI filter - see redirector_with_filter for
# an example. If this sample is run on IIS5 or earlier it simply ignores
# any excludes.
from isapi import isapicon, threaded_extension
import sys
import traceback
try:
from urllib.request import urlopen
except ImportError:
# py3k spelling...
from urllib.request import urlopen
import win32api
# sys.isapidllhandle will exist when we are loaded by the IIS framework.
# In this case we redirect our output to the win32traceutil collector.
if hasattr(sys, "isapidllhandle"):
import win32traceutil
# The site we are proxying.
proxy = "http://www.python.org"
# Urls we exclude (ie, allow IIS to handle itself) - all are lowered,
# and these entries exist by default on Vista...
excludes = ["/iisstart.htm", "/welcome.png"]
# An "io completion" function, called when ecb.ExecURL completes...
def io_callback(ecb, url, cbIO, errcode):
# Get the status of our ExecURL
httpstatus, substatus, win32 = ecb.GetExecURLStatus()
print("ExecURL of %r finished with http status %d.%d, win32 status %d (%s)" % (
url, httpstatus, substatus, win32, win32api.FormatMessage(win32).strip()))
# nothing more to do!
ecb.DoneWithSession()
# The ISAPI extension - handles all requests in the site.
class Extension(threaded_extension.ThreadPoolExtension):
"Python sample Extension"
def Dispatch(self, ecb):
# Note that our ThreadPoolExtension base class will catch exceptions
# in our Dispatch method, and write the traceback to the client.
# That is perfect for this sample, so we don't catch our own.
#print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),)
url = ecb.GetServerVariable("URL").decode("ascii")
for exclude in excludes:
if url.lower().startswith(exclude):
print("excluding %s" % url)
if ecb.Version < 0x60000:
print("(but this is IIS5 or earlier - can't do 'excludes')")
else:
ecb.IOCompletion(io_callback, url)
ecb.ExecURL(None, None, None, None, None, isapicon.HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR)
return isapicon.HSE_STATUS_PENDING
new_url = proxy + url
print("Opening %s" % new_url)
fp = urlopen(new_url)
headers = fp.info()
# subtle py3k breakage: in py3k, str(headers) has normalized \r\n
# back to \n and also stuck an extra \n term. py2k leaves the
# \r\n from the server in tact and finishes with a single term.
if sys.version_info < (3,0):
header_text = str(headers) + "\r\n"
else:
# take *all* trailing \n off, replace remaining with
# \r\n, then add the 2 trailing \r\n.
header_text = str(headers).rstrip('\n').replace('\n', '\r\n') + '\r\n\r\n'
ecb.SendResponseHeaders("200 OK", header_text, False)
ecb.WriteClient(fp.read())
ecb.DoneWithSession()
print("Returned data from '%s'" % (new_url,))
return isapicon.HSE_STATUS_SUCCESS
# The entry points for the ISAPI extension.
def __ExtensionFactory__():
return Extension()
if __name__=='__main__':
# If run from the command-line, install ourselves.
from isapi.install import *
params = ISAPIParameters()
# Setup the virtual directories - this is a list of directories our
# extension uses - in this case only 1.
# Each extension has a "script map" - this is the mapping of ISAPI
# extensions.
sm = [
ScriptMapParams(Extension="*", Flags=0)
]
vd = VirtualDirParameters(Name="/",
Description = Extension.__doc__,
ScriptMaps = sm,
ScriptMapUpdate = "replace"
)
params.VirtualDirs = [vd]
HandleCommandLine(params)

View file

@ -0,0 +1,78 @@
# This is a sample ISAPI extension written in Python.
# This is like the other 'redirector' samples, but uses asnch IO when writing
# back to the client (it does *not* use asynch io talking to the remote
# server!)
from isapi import isapicon, threaded_extension
import sys
import traceback
import urllib.request, urllib.parse, urllib.error
# sys.isapidllhandle will exist when we are loaded by the IIS framework.
# In this case we redirect our output to the win32traceutil collector.
if hasattr(sys, "isapidllhandle"):
import win32traceutil
# The site we are proxying.
proxy = "http://www.python.org"
# We synchronously read chunks of this size then asynchronously write them.
CHUNK_SIZE=8192
# The callback made when IIS completes the asynch write.
def io_callback(ecb, fp, cbIO, errcode):
print("IO callback", ecb, fp, cbIO, errcode)
chunk = fp.read(CHUNK_SIZE)
if chunk:
ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC)
# and wait for the next callback to say this chunk is done.
else:
# eof - say we are complete.
fp.close()
ecb.DoneWithSession()
# The ISAPI extension - handles all requests in the site.
class Extension(threaded_extension.ThreadPoolExtension):
"Python sample proxy server - asynch version."
def Dispatch(self, ecb):
print('IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),))
url = ecb.GetServerVariable("URL")
new_url = proxy + url
print("Opening %s" % new_url)
fp = urllib.request.urlopen(new_url)
headers = fp.info()
ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False)
# now send the first chunk asynchronously
ecb.ReqIOCompletion(io_callback, fp)
chunk = fp.read(CHUNK_SIZE)
if chunk:
ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC)
return isapicon.HSE_STATUS_PENDING
# no data - just close things now.
ecb.DoneWithSession()
return isapicon.HSE_STATUS_SUCCESS
# The entry points for the ISAPI extension.
def __ExtensionFactory__():
return Extension()
if __name__=='__main__':
# If run from the command-line, install ourselves.
from isapi.install import *
params = ISAPIParameters()
# Setup the virtual directories - this is a list of directories our
# extension uses - in this case only 1.
# Each extension has a "script map" - this is the mapping of ISAPI
# extensions.
sm = [
ScriptMapParams(Extension="*", Flags=0)
]
vd = VirtualDirParameters(Name="/",
Description = Extension.__doc__,
ScriptMaps = sm,
ScriptMapUpdate = "replace"
)
params.VirtualDirs = [vd]
HandleCommandLine(params)

View file

@ -0,0 +1,155 @@
# This is a sample configuration file for an ISAPI filter and extension
# written in Python.
#
# Please see README.txt in this directory, and specifically the
# information about the "loader" DLL - installing this sample will create
# "_redirector_with_filter.dll" in the current directory. The readme explains
# this.
# Executing this script (or any server config script) will install the extension
# into your web server. As the server executes, the PyISAPI framework will load
# this module and create your Extension and Filter objects.
# This sample provides sample redirector:
# It is implemented by a filter and an extension, so that some requests can
# be ignored. Compare with 'redirector_simple' which avoids the filter, but
# is unable to selectively ignore certain requests.
# The process is sample uses is:
# * The filter is installed globally, as all filters are.
# * A Virtual Directory named "python" is setup. This dir has our ISAPI
# extension as the only application, mapped to file-extension '*'. Thus, our
# extension handles *all* requests in this directory.
# The basic process is that the filter does URL rewriting, redirecting every
# URL to our Virtual Directory. Our extension then handles this request,
# forwarding the data from the proxied site.
# For example:
# * URL of "index.html" comes in.
# * Filter rewrites this to "/python/index.html"
# * Our extension sees the full "/python/index.html", removes the leading
# portion, and opens and forwards the remote URL.
# This sample is very small - it avoid most error handling, etc. It is for
# demonstration purposes only.
from isapi import isapicon, threaded_extension
from isapi.simple import SimpleFilter
import sys
import traceback
import urllib.request, urllib.parse, urllib.error
# sys.isapidllhandle will exist when we are loaded by the IIS framework.
# In this case we redirect our output to the win32traceutil collector.
if hasattr(sys, "isapidllhandle"):
import win32traceutil
# The site we are proxying.
proxy = "http://www.python.org"
# The name of the virtual directory we install in, and redirect from.
virtualdir = "/python"
# The key feature of this redirector over the simple redirector is that it
# can choose to ignore certain responses by having the filter not rewrite them
# to our virtual dir. For this sample, we just exclude the IIS help directory.
# The ISAPI extension - handles requests in our virtual dir, and sends the
# response to the client.
class Extension(threaded_extension.ThreadPoolExtension):
"Python sample Extension"
def Dispatch(self, ecb):
# Note that our ThreadPoolExtension base class will catch exceptions
# in our Dispatch method, and write the traceback to the client.
# That is perfect for this sample, so we don't catch our own.
#print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),)
url = ecb.GetServerVariable("URL")
if url.startswith(virtualdir):
new_url = proxy + url[len(virtualdir):]
print("Opening", new_url)
fp = urllib.request.urlopen(new_url)
headers = fp.info()
ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False)
ecb.WriteClient(fp.read())
ecb.DoneWithSession()
print("Returned data from '%s'!" % (new_url,))
else:
# this should never happen - we should only see requests that
# start with our virtual directory name.
print("Not proxying '%s'" % (url,))
# The ISAPI filter.
class Filter(SimpleFilter):
"Sample Python Redirector"
filter_flags = isapicon.SF_NOTIFY_PREPROC_HEADERS | \
isapicon.SF_NOTIFY_ORDER_DEFAULT
def HttpFilterProc(self, fc):
#print "Filter Dispatch"
nt = fc.NotificationType
if nt != isapicon.SF_NOTIFY_PREPROC_HEADERS:
return isapicon.SF_STATUS_REQ_NEXT_NOTIFICATION
pp = fc.GetData()
url = pp.GetHeader("url")
#print "URL is '%s'" % (url,)
prefix = virtualdir
if not url.startswith(prefix):
new_url = prefix + url
print("New proxied URL is '%s'" % (new_url,))
pp.SetHeader("url", new_url)
# For the sake of demonstration, show how the FilterContext
# attribute is used. It always starts out life as None, and
# any assignments made are automatically decref'd by the
# framework during a SF_NOTIFY_END_OF_NET_SESSION notification.
if fc.FilterContext is None:
fc.FilterContext = 0
fc.FilterContext += 1
print("This is request number %d on this connection" % fc.FilterContext)
return isapicon.SF_STATUS_REQ_HANDLED_NOTIFICATION
else:
print("Filter ignoring URL '%s'" % (url,))
# Some older code that handled SF_NOTIFY_URL_MAP.
#~ print "Have URL_MAP notify"
#~ urlmap = fc.GetData()
#~ print "URI is", urlmap.URL
#~ print "Path is", urlmap.PhysicalPath
#~ if urlmap.URL.startswith("/UC/"):
#~ # Find the /UC/ in the physical path, and nuke it (except
#~ # as the path is physical, it is \)
#~ p = urlmap.PhysicalPath
#~ pos = p.index("\\UC\\")
#~ p = p[:pos] + p[pos+3:]
#~ p = r"E:\src\pyisapi\webroot\PyTest\formTest.htm"
#~ print "New path is", p
#~ urlmap.PhysicalPath = p
# The entry points for the ISAPI extension.
def __FilterFactory__():
return Filter()
def __ExtensionFactory__():
return Extension()
if __name__=='__main__':
# If run from the command-line, install ourselves.
from isapi.install import *
params = ISAPIParameters()
# Setup all filters - these are global to the site.
params.Filters = [
FilterParameters(Name="PythonRedirector",
Description=Filter.__doc__),
]
# Setup the virtual directories - this is a list of directories our
# extension uses - in this case only 1.
# Each extension has a "script map" - this is the mapping of ISAPI
# extensions.
sm = [
ScriptMapParams(Extension="*", Flags=0)
]
vd = VirtualDirParameters(Name=virtualdir[1:],
Description = Extension.__doc__,
ScriptMaps = sm,
ScriptMapUpdate = "replace"
)
params.VirtualDirs = [vd]
HandleCommandLine(params)

View file

@ -0,0 +1,154 @@
# This extension is used mainly for testing purposes - it is not
# designed to be a simple sample, but instead is a hotch-potch of things
# that attempts to exercise the framework.
from isapi import isapicon
from isapi.simple import SimpleExtension
import sys, os, stat
if hasattr(sys, "isapidllhandle"):
import win32traceutil
# We use the same reload support as 'advanced.py' demonstrates.
from isapi import InternalReloadException
import win32event, win32file, winerror, win32con, threading
# A watcher thread that checks for __file__ changing.
# When it detects it, it simply sets "change_detected" to true.
class ReloadWatcherThread(threading.Thread):
def __init__(self):
self.change_detected = False
self.filename = __file__
if self.filename.endswith("c") or self.filename.endswith("o"):
self.filename = self.filename[:-1]
self.handle = win32file.FindFirstChangeNotification(
os.path.dirname(self.filename),
False, # watch tree?
win32con.FILE_NOTIFY_CHANGE_LAST_WRITE)
threading.Thread.__init__(self)
def run(self):
last_time = os.stat(self.filename)[stat.ST_MTIME]
while 1:
try:
rc = win32event.WaitForSingleObject(self.handle,
win32event.INFINITE)
win32file.FindNextChangeNotification(self.handle)
except win32event.error as details:
# handle closed - thread should terminate.
if details.winerror != winerror.ERROR_INVALID_HANDLE:
raise
break
this_time = os.stat(self.filename)[stat.ST_MTIME]
if this_time != last_time:
print("Detected file change - flagging for reload.")
self.change_detected = True
last_time = this_time
def stop(self):
win32file.FindCloseChangeNotification(self.handle)
def TransmitFileCallback(ecb, hFile, cbIO, errCode):
print("Transmit complete!")
ecb.close()
# The ISAPI extension - handles requests in our virtual dir, and sends the
# response to the client.
class Extension(SimpleExtension):
"Python test Extension"
def __init__(self):
self.reload_watcher = ReloadWatcherThread()
self.reload_watcher.start()
def HttpExtensionProc(self, ecb):
# NOTE: If you use a ThreadPoolExtension, you must still perform
# this check in HttpExtensionProc - raising the exception from
# The "Dispatch" method will just cause the exception to be
# rendered to the browser.
if self.reload_watcher.change_detected:
print("Doing reload")
raise InternalReloadException
if ecb.GetServerVariable("UNICODE_URL").endswith("test.py"):
file_flags = win32con.FILE_FLAG_SEQUENTIAL_SCAN | win32con.FILE_FLAG_OVERLAPPED
hfile = win32file.CreateFile(__file__, win32con.GENERIC_READ,
0, None, win32con.OPEN_EXISTING,
file_flags, None)
flags = isapicon.HSE_IO_ASYNC | isapicon.HSE_IO_DISCONNECT_AFTER_SEND | \
isapicon.HSE_IO_SEND_HEADERS
# We pass hFile to the callback simply as a way of keeping it alive
# for the duration of the transmission
try:
ecb.TransmitFile(TransmitFileCallback, hfile,
int(hfile),
"200 OK",
0, 0, None, None, flags)
except:
# Errors keep this source file open!
hfile.Close()
raise
else:
# default response
ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0)
print("<HTML><BODY>", file=ecb)
print("The root of this site is at", ecb.MapURLToPath("/"), file=ecb)
print("</BODY></HTML>", file=ecb)
ecb.close()
return isapicon.HSE_STATUS_SUCCESS
def TerminateExtension(self, status):
self.reload_watcher.stop()
# The entry points for the ISAPI extension.
def __ExtensionFactory__():
return Extension()
# Our special command line customization.
# Pre-install hook for our virtual directory.
def PreInstallDirectory(params, options):
# If the user used our special '--description' option,
# then we override our default.
if options.description:
params.Description = options.description
# Post install hook for our entire script
def PostInstall(params, options):
print()
print("The sample has been installed.")
print("Point your browser to /PyISAPITest")
# Handler for our custom 'status' argument.
def status_handler(options, log, arg):
"Query the status of something"
print("Everything seems to be fine!")
custom_arg_handlers = {"status": status_handler}
if __name__=='__main__':
# If run from the command-line, install ourselves.
from isapi.install import *
params = ISAPIParameters(PostInstall = PostInstall)
# Setup the virtual directories - this is a list of directories our
# extension uses - in this case only 1.
# Each extension has a "script map" - this is the mapping of ISAPI
# extensions.
sm = [
ScriptMapParams(Extension="*", Flags=0)
]
vd = VirtualDirParameters(Name="PyISAPITest",
Description = Extension.__doc__,
ScriptMaps = sm,
ScriptMapUpdate = "replace",
# specify the pre-install hook.
PreInstall = PreInstallDirectory
)
params.VirtualDirs = [vd]
# Setup our custom option parser.
from optparse import OptionParser
parser = OptionParser('') # blank usage, so isapi sets it.
parser.add_option("", "--description",
action="store",
help="custom description to use for the virtual directory")
HandleCommandLine(params, opt_parser=parser,
custom_arg_handlers = custom_arg_handlers)

68
libs/win/isapi/simple.py Normal file
View file

@ -0,0 +1,68 @@
"""Simple base-classes for extensions and filters.
None of the filter and extension functions are considered 'optional' by the
framework. These base-classes provide simple implementations for the
Initialize and Terminate functions, allowing you to omit them,
It is not necessary to use these base-classes - but if you don't, you
must ensure each of the required methods are implemented.
"""
class SimpleExtension:
"Base class for a simple ISAPI extension"
def __init__(self):
pass
def GetExtensionVersion(self, vi):
"""Called by the ISAPI framework to get the extension version
The default implementation uses the classes docstring to
set the extension description."""
# nod to our reload capability - vi is None when we are reloaded.
if vi is not None:
vi.ExtensionDesc = self.__doc__
def HttpExtensionProc(self, control_block):
"""Called by the ISAPI framework for each extension request.
sub-classes must provide an implementation for this method.
"""
raise NotImplementedError("sub-classes should override HttpExtensionProc")
def TerminateExtension(self, status):
"""Called by the ISAPI framework as the extension terminates.
"""
pass
class SimpleFilter:
"Base class for a a simple ISAPI filter"
filter_flags = None
def __init__(self):
pass
def GetFilterVersion(self, fv):
"""Called by the ISAPI framework to get the extension version
The default implementation uses the classes docstring to
set the extension description, and uses the classes
filter_flags attribute to set the ISAPI filter flags - you
must specify filter_flags in your class.
"""
if self.filter_flags is None:
raise RuntimeError("You must specify the filter flags")
# nod to our reload capability - fv is None when we are reloaded.
if fv is not None:
fv.Flags = self.filter_flags
fv.FilterDesc = self.__doc__
def HttpFilterProc(self, fc):
"""Called by the ISAPI framework for each filter request.
sub-classes must provide an implementation for this method.
"""
raise NotImplementedError("sub-classes should override HttpExtensionProc")
def TerminateFilter(self, status):
"""Called by the ISAPI framework as the filter terminates.
"""
pass

View file

@ -0,0 +1,3 @@
This is a directory for tests of the PyISAPI framework.
For demos, please see the pyisapi 'samples' directory.

View file

@ -0,0 +1,111 @@
# This is an ISAPI extension purely for testing purposes. It is NOT
# a 'demo' (even though it may be useful!)
#
# Install this extension, then point your browser to:
# "http://localhost/pyisapi_test/test1"
# This will execute the method 'test1' below. See below for the list of
# test methods that are acceptable.
from isapi import isapicon, threaded_extension, ExtensionError
from isapi.simple import SimpleFilter
import traceback
import urllib.request, urllib.parse, urllib.error
import winerror
# If we have no console (eg, am running from inside IIS), redirect output
# somewhere useful - in this case, the standard win32 trace collector.
import win32api
try:
win32api.GetConsoleTitle()
except win32api.error:
# No console - redirect
import win32traceutil
# The ISAPI extension - handles requests in our virtual dir, and sends the
# response to the client.
class Extension(threaded_extension.ThreadPoolExtension):
"Python ISAPI Tester"
def Dispatch(self, ecb):
print('Tester dispatching "%s"' % (ecb.GetServerVariable("URL"),))
url = ecb.GetServerVariable("URL")
test_name = url.split("/")[-1]
meth = getattr(self, test_name, None)
if meth is None:
raise AttributeError("No test named '%s'" % (test_name,))
result = meth(ecb)
if result is None:
# This means the test finalized everything
return
ecb.SendResponseHeaders("200 OK", "Content-type: text/html\r\n\r\n",
False)
print("<HTML><BODY>Finished running test <i>", test_name, "</i>", file=ecb)
print("<pre>", file=ecb)
print(result, file=ecb)
print("</pre>", file=ecb)
print("</BODY></HTML>", file=ecb)
ecb.DoneWithSession()
def test1(self, ecb):
try:
ecb.GetServerVariable("foo bar")
raise RuntimeError("should have failed!")
except ExtensionError as err:
assert err.errno == winerror.ERROR_INVALID_INDEX, err
return "worked!"
def test_long_vars(self, ecb):
qs = ecb.GetServerVariable("QUERY_STRING")
# Our implementation has a default buffer size of 8k - so we test
# the code that handles an overflow by ensuring there are more
# than 8k worth of chars in the URL.
expected_query = ('x' * 8500)
if len(qs)==0:
# Just the URL with no query part - redirect to myself, but with
# a huge query portion.
me = ecb.GetServerVariable("URL")
headers = "Location: " + me + "?" + expected_query + "\r\n\r\n"
ecb.SendResponseHeaders("301 Moved", headers)
ecb.DoneWithSession()
return None
if qs == expected_query:
return "Total length of variable is %d - test worked!" % (len(qs),)
else:
return "Unexpected query portion! Got %d chars, expected %d" % \
(len(qs), len(expected_query))
def test_unicode_vars(self, ecb):
# We need to check that we are running IIS6! This seems the only
# effective way from an extension.
ver = float(ecb.GetServerVariable("SERVER_SOFTWARE").split('/')[1])
if ver < 6.0:
return "This is IIS version %g - unicode only works in IIS6 and later" % ver
us = ecb.GetServerVariable("UNICODE_SERVER_NAME")
if not isinstance(us, str):
raise RuntimeError("unexpected type!")
if us != str(ecb.GetServerVariable("SERVER_NAME")):
raise RuntimeError("Unicode and non-unicode values were not the same")
return "worked!"
# The entry points for the ISAPI extension.
def __ExtensionFactory__():
return Extension()
if __name__=='__main__':
# If run from the command-line, install ourselves.
from isapi.install import *
params = ISAPIParameters()
# Setup the virtual directories - this is a list of directories our
# extension uses - in this case only 1.
# Each extension has a "script map" - this is the mapping of ISAPI
# extensions.
sm = [
ScriptMapParams(Extension="*", Flags=0)
]
vd = VirtualDirParameters(Name="pyisapi_test",
Description = Extension.__doc__,
ScriptMaps = sm,
ScriptMapUpdate = "replace"
)
params.VirtualDirs = [vd]
HandleCommandLine(params)

View file

@ -0,0 +1,171 @@
"""An ISAPI extension base class implemented using a thread-pool."""
# $Id$
import sys
import time
from isapi import isapicon, ExtensionError
import isapi.simple
from win32file import GetQueuedCompletionStatus, CreateIoCompletionPort, \
PostQueuedCompletionStatus, CloseHandle
from win32security import SetThreadToken
from win32event import INFINITE
from pywintypes import OVERLAPPED
import threading
import traceback
ISAPI_REQUEST = 1
ISAPI_SHUTDOWN = 2
class WorkerThread(threading.Thread):
def __init__(self, extension, io_req_port):
self.running = False
self.io_req_port = io_req_port
self.extension = extension
threading.Thread.__init__(self)
# We wait 15 seconds for a thread to terminate, but if it fails to,
# we don't want the process to hang at exit waiting for it...
self.setDaemon(True)
def run(self):
self.running = True
while self.running:
errCode, bytes, key, overlapped = \
GetQueuedCompletionStatus(self.io_req_port, INFINITE)
if key == ISAPI_SHUTDOWN and overlapped is None:
break
# Let the parent extension handle the command.
dispatcher = self.extension.dispatch_map.get(key)
if dispatcher is None:
raise RuntimeError("Bad request '%s'" % (key,))
dispatcher(errCode, bytes, key, overlapped)
def call_handler(self, cblock):
self.extension.Dispatch(cblock)
# A generic thread-pool based extension, using IO Completion Ports.
# Sub-classes can override one method to implement a simple extension, or
# may leverage the CompletionPort to queue their own requests, and implement a
# fully asynch extension.
class ThreadPoolExtension(isapi.simple.SimpleExtension):
"Base class for an ISAPI extension based around a thread-pool"
max_workers = 20
worker_shutdown_wait = 15000 # 15 seconds for workers to quit...
def __init__(self):
self.workers = []
# extensible dispatch map, for sub-classes that need to post their
# own requests to the completion port.
# Each of these functions is called with the result of
# GetQueuedCompletionStatus for our port.
self.dispatch_map = {
ISAPI_REQUEST: self.DispatchConnection,
}
def GetExtensionVersion(self, vi):
isapi.simple.SimpleExtension.GetExtensionVersion(self, vi)
# As per Q192800, the CompletionPort should be created with the number
# of processors, even if the number of worker threads is much larger.
# Passing 0 means the system picks the number.
self.io_req_port = CreateIoCompletionPort(-1, None, 0, 0)
# start up the workers
self.workers = []
for i in range(self.max_workers):
worker = WorkerThread(self, self.io_req_port)
worker.start()
self.workers.append(worker)
def HttpExtensionProc(self, control_block):
overlapped = OVERLAPPED()
overlapped.object = control_block
PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_REQUEST, overlapped)
return isapicon.HSE_STATUS_PENDING
def TerminateExtension(self, status):
for worker in self.workers:
worker.running = False
for worker in self.workers:
PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_SHUTDOWN, None)
# wait for them to terminate - pity we aren't using 'native' threads
# as then we could do a smart wait - but now we need to poll....
end_time = time.time() + self.worker_shutdown_wait/1000
alive = self.workers
while alive:
if time.time() > end_time:
# xxx - might be nice to log something here.
break
time.sleep(0.2)
alive = [w for w in alive if w.isAlive()]
self.dispatch_map = {} # break circles
CloseHandle(self.io_req_port)
# This is the one operation the base class supports - a simple
# Connection request. We setup the thread-token, and dispatch to the
# sub-class's 'Dispatch' method.
def DispatchConnection(self, errCode, bytes, key, overlapped):
control_block = overlapped.object
# setup the correct user for this request
hRequestToken = control_block.GetImpersonationToken()
SetThreadToken(None, hRequestToken)
try:
try:
self.Dispatch(control_block)
except:
self.HandleDispatchError(control_block)
finally:
# reset the security context
SetThreadToken(None, None)
def Dispatch(self, ecb):
"""Overridden by the sub-class to handle connection requests.
This class creates a thread-pool using a Windows completion port,
and dispatches requests via this port. Sub-classes can generally
implement each connection request using blocking reads and writes, and
the thread-pool will still provide decent response to the end user.
The sub-class can set a max_workers attribute (default is 20). Note
that this generally does *not* mean 20 threads will all be concurrently
running, via the magic of Windows completion ports.
There is no default implementation - sub-classes must implement this.
"""
raise NotImplementedError("sub-classes should override Dispatch")
def HandleDispatchError(self, ecb):
"""Handles errors in the Dispatch method.
When a Dispatch method call fails, this method is called to handle
the exception. The default implementation formats the traceback
in the browser.
"""
ecb.HttpStatusCode = isapicon.HSE_STATUS_ERROR
#control_block.LogData = "we failed!"
exc_typ, exc_val, exc_tb = sys.exc_info()
limit = None
try:
try:
import cgi
ecb.SendResponseHeaders("200 OK", "Content-type: text/html\r\n\r\n",
False)
print(file=ecb)
print("<H3>Traceback (most recent call last):</H3>", file=ecb)
list = traceback.format_tb(exc_tb, limit) + \
traceback.format_exception_only(exc_typ, exc_val)
print("<PRE>%s<B>%s</B></PRE>" % (
cgi.escape("".join(list[:-1])), cgi.escape(list[-1]),), file=ecb)
except ExtensionError:
# The client disconnected without reading the error body -
# its probably not a real browser at the other end, ignore it.
pass
except:
print("FAILED to render the error message!")
traceback.print_exc()
print("ORIGINAL extension error:")
traceback.print_exception(exc_typ, exc_val, exc_tb)
finally:
# holding tracebacks in a local of a frame that may itself be
# part of a traceback used to be evil and cause leaks!
exc_tb = None
ecb.DoneWithSession()

View file

@ -0,0 +1 @@
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('jaraco',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('jaraco', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('jaraco', [os.path.dirname(p)])));m = m or sys.modules.setdefault('jaraco', types.ModuleType('jaraco'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)

View file

@ -0,0 +1 @@
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('jaraco',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('jaraco', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('jaraco', [os.path.dirname(p)])));m = m or sys.modules.setdefault('jaraco', types.ModuleType('jaraco'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)

View file

@ -0,0 +1 @@
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('jaraco',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('jaraco', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('jaraco', [os.path.dirname(p)])));m = m or sys.modules.setdefault('jaraco', types.ModuleType('jaraco'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)

View file

@ -0,0 +1 @@
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('jaraco',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('jaraco', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('jaraco', [os.path.dirname(p)])));m = m or sys.modules.setdefault('jaraco', types.ModuleType('jaraco'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)

View file

@ -0,0 +1 @@
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('jaraco',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('jaraco', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('jaraco', [os.path.dirname(p)])));m = m or sys.modules.setdefault('jaraco', types.ModuleType('jaraco'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)

View file

@ -0,0 +1 @@
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('jaraco',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('jaraco', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('jaraco', [os.path.dirname(p)])));m = m or sys.modules.setdefault('jaraco', types.ModuleType('jaraco'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)

View file

@ -0,0 +1 @@
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('jaraco',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('jaraco', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('jaraco', [os.path.dirname(p)])));m = m or sys.modules.setdefault('jaraco', types.ModuleType('jaraco'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)

View file

View file

@ -0,0 +1,75 @@
"""
Routines for obtaining the class names
of an object and its parent classes.
"""
from __future__ import unicode_literals
def all_bases(c):
"""
return a tuple of all base classes the class c has as a parent.
>>> object in all_bases(list)
True
"""
return c.mro()[1:]
def all_classes(c):
"""
return a tuple of all classes to which c belongs
>>> list in all_classes(list)
True
"""
return c.mro()
# borrowed from
# http://code.activestate.com/recipes/576949-find-all-subclasses-of-a-given-class/
def iter_subclasses(cls, _seen=None):
"""
Generator over all subclasses of a given class, in depth-first order.
>>> bool in list(iter_subclasses(int))
True
>>> class A(object): pass
>>> class B(A): pass
>>> class C(A): pass
>>> class D(B,C): pass
>>> class E(D): pass
>>>
>>> for cls in iter_subclasses(A):
... print(cls.__name__)
B
D
E
C
>>> # get ALL (new-style) classes currently defined
>>> res = [cls.__name__ for cls in iter_subclasses(object)]
>>> 'type' in res
True
>>> 'tuple' in res
True
>>> len(res) > 100
True
"""
if not isinstance(cls, type):
raise TypeError(
'iter_subclasses must be called with '
'new-style classes, not %.100r' % cls
)
if _seen is None:
_seen = set()
try:
subs = cls.__subclasses__()
except TypeError: # fails only when cls is type
subs = cls.__subclasses__(cls)
for sub in subs:
if sub in _seen:
continue
_seen.add(sub)
yield sub
for sub in iter_subclasses(sub, _seen):
yield sub

View file

@ -0,0 +1,41 @@
"""
meta.py
Some useful metaclasses.
"""
from __future__ import unicode_literals
class LeafClassesMeta(type):
"""
A metaclass for classes that keeps track of all of them that
aren't base classes.
"""
_leaf_classes = set()
def __init__(cls, name, bases, attrs):
if not hasattr(cls, '_leaf_classes'):
cls._leaf_classes = set()
leaf_classes = getattr(cls, '_leaf_classes')
leaf_classes.add(cls)
# remove any base classes
leaf_classes -= set(bases)
class TagRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all classes by a class attribute, indicated by attr_name.
"""
attr_name = 'tag'
def __init__(cls, name, bases, namespace):
super(TagRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
meta = cls.__class__
attr = getattr(cls, meta.attr_name, None)
if attr:
cls._registry[attr] = cls

View file

@ -0,0 +1,67 @@
from __future__ import unicode_literals
import six
__metaclass__ = type
class NonDataProperty:
"""Much like the property builtin, but only implements __get__,
making it a non-data property, and can be subsequently reset.
See http://users.rcn.com/python/download/Descriptor.htm for more
information.
>>> class X(object):
... @NonDataProperty
... def foo(self):
... return 3
>>> x = X()
>>> x.foo
3
>>> x.foo = 4
>>> x.foo
4
"""
def __init__(self, fget):
assert fget is not None, "fget cannot be none"
assert six.callable(fget), "fget must be callable"
self.fget = fget
def __get__(self, obj, objtype=None):
if obj is None:
return self
return self.fget(obj)
# from http://stackoverflow.com/a/5191224
class ClassPropertyDescriptor:
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)

View file

@ -0,0 +1,906 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, division
import re
import operator
import collections
import itertools
import copy
import functools
try:
import collections.abc
except ImportError:
# Python 2.7
collections.abc = collections
import six
from jaraco.classes.properties import NonDataProperty
import jaraco.text
class Projection(collections.abc.Mapping):
"""
Project a set of keys over a mapping
>>> sample = {'a': 1, 'b': 2, 'c': 3}
>>> prj = Projection(['a', 'c', 'd'], sample)
>>> prj == {'a': 1, 'c': 3}
True
Keys should only appear if they were specified and exist in the space.
>>> sorted(list(prj.keys()))
['a', 'c']
Use the projection to update another dict.
>>> target = {'a': 2, 'b': 2}
>>> target.update(prj)
>>> target == {'a': 1, 'b': 2, 'c': 3}
True
Also note that Projection keeps a reference to the original dict, so
if you modify the original dict, that could modify the Projection.
>>> del sample['a']
>>> dict(prj)
{'c': 3}
"""
def __init__(self, keys, space):
self._keys = tuple(keys)
self._space = space
def __getitem__(self, key):
if key not in self._keys:
raise KeyError(key)
return self._space[key]
def __iter__(self):
return iter(set(self._keys).intersection(self._space))
def __len__(self):
return len(tuple(iter(self)))
class DictFilter(object):
"""
Takes a dict, and simulates a sub-dict based on the keys.
>>> sample = {'a': 1, 'b': 2, 'c': 3}
>>> filtered = DictFilter(sample, ['a', 'c'])
>>> filtered == {'a': 1, 'c': 3}
True
One can also filter by a regular expression pattern
>>> sample['d'] = 4
>>> sample['ef'] = 5
Here we filter for only single-character keys
>>> filtered = DictFilter(sample, include_pattern='.$')
>>> filtered == {'a': 1, 'b': 2, 'c': 3, 'd': 4}
True
Also note that DictFilter keeps a reference to the original dict, so
if you modify the original dict, that could modify the filtered dict.
>>> del sample['d']
>>> del sample['a']
>>> filtered == {'b': 2, 'c': 3}
True
"""
def __init__(self, dict, include_keys=[], include_pattern=None):
self.dict = dict
self.specified_keys = set(include_keys)
if include_pattern is not None:
self.include_pattern = re.compile(include_pattern)
else:
# for performance, replace the pattern_keys property
self.pattern_keys = set()
def get_pattern_keys(self):
keys = filter(self.include_pattern.match, self.dict.keys())
return set(keys)
pattern_keys = NonDataProperty(get_pattern_keys)
@property
def include_keys(self):
return self.specified_keys.union(self.pattern_keys)
def keys(self):
return self.include_keys.intersection(self.dict.keys())
def values(self):
keys = self.keys()
values = map(self.dict.get, keys)
return values
def __getitem__(self, i):
if i not in self.include_keys:
return KeyError, i
return self.dict[i]
def items(self):
keys = self.keys()
values = map(self.dict.get, keys)
return zip(keys, values)
def __eq__(self, other):
return dict(self) == other
def __ne__(self, other):
return dict(self) != other
def dict_map(function, dictionary):
"""
dict_map is much like the built-in function map. It takes a dictionary
and applys a function to the values of that dictionary, returning a
new dictionary with the mapped values in the original keys.
>>> d = dict_map(lambda x:x+1, dict(a=1, b=2))
>>> d == dict(a=2,b=3)
True
"""
return dict((key, function(value)) for key, value in dictionary.items())
class RangeMap(dict):
"""
A dictionary-like object that uses the keys as bounds for a range.
Inclusion of the value for that range is determined by the
key_match_comparator, which defaults to less-than-or-equal.
A value is returned for a key if it is the first key that matches in
the sorted list of keys.
One may supply keyword parameters to be passed to the sort function used
to sort keys (i.e. cmp [python 2 only], keys, reverse) as sort_params.
Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b'
>>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy
>>> r[1], r[2], r[3], r[4], r[5], r[6]
('a', 'a', 'a', 'b', 'b', 'b')
Even float values should work so long as the comparison operator
supports it.
>>> r[4.5]
'b'
But you'll notice that the way rangemap is defined, it must be open-ended
on one side.
>>> r[0]
'a'
>>> r[-1]
'a'
One can close the open-end of the RangeMap by using undefined_value
>>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
>>> r[0]
Traceback (most recent call last):
...
KeyError: 0
One can get the first or last elements in the range by using RangeMap.Item
>>> last_item = RangeMap.Item(-1)
>>> r[last_item]
'b'
.last_item is a shortcut for Item(-1)
>>> r[RangeMap.last_item]
'b'
Sometimes it's useful to find the bounds for a RangeMap
>>> r.bounds()
(0, 6)
RangeMap supports .get(key, default)
>>> r.get(0, 'not found')
'not found'
>>> r.get(7, 'not found')
'not found'
"""
def __init__(self, source, sort_params={}, key_match_comparator=operator.le):
dict.__init__(self, source)
self.sort_params = sort_params
self.match = key_match_comparator
def __getitem__(self, item):
sorted_keys = sorted(self.keys(), **self.sort_params)
if isinstance(item, RangeMap.Item):
result = self.__getitem__(sorted_keys[item])
else:
key = self._find_first_match_(sorted_keys, item)
result = dict.__getitem__(self, key)
if result is RangeMap.undefined_value:
raise KeyError(key)
return result
def get(self, key, default=None):
"""
Return the value for key if key is in the dictionary, else default.
If default is not given, it defaults to None, so that this method
never raises a KeyError.
"""
try:
return self[key]
except KeyError:
return default
def _find_first_match_(self, keys, item):
is_match = functools.partial(self.match, item)
matches = list(filter(is_match, keys))
if matches:
return matches[0]
raise KeyError(item)
def bounds(self):
sorted_keys = sorted(self.keys(), **self.sort_params)
return (
sorted_keys[RangeMap.first_item],
sorted_keys[RangeMap.last_item],
)
# some special values for the RangeMap
undefined_value = type(str('RangeValueUndefined'), (object,), {})()
class Item(int):
"RangeMap Item"
first_item = Item(0)
last_item = Item(-1)
def __identity(x):
return x
def sorted_items(d, key=__identity, reverse=False):
"""
Return the items of the dictionary sorted by the keys
>>> sample = dict(foo=20, bar=42, baz=10)
>>> tuple(sorted_items(sample))
(('bar', 42), ('baz', 10), ('foo', 20))
>>> reverse_string = lambda s: ''.join(reversed(s))
>>> tuple(sorted_items(sample, key=reverse_string))
(('foo', 20), ('bar', 42), ('baz', 10))
>>> tuple(sorted_items(sample, reverse=True))
(('foo', 20), ('baz', 10), ('bar', 42))
"""
# wrap the key func so it operates on the first element of each item
def pairkey_key(item):
return key(item[0])
return sorted(d.items(), key=pairkey_key, reverse=reverse)
class KeyTransformingDict(dict):
"""
A dict subclass that transforms the keys before they're used.
Subclasses may override the default transform_key to customize behavior.
"""
@staticmethod
def transform_key(key):
return key
def __init__(self, *args, **kargs):
super(KeyTransformingDict, self).__init__()
# build a dictionary using the default constructs
d = dict(*args, **kargs)
# build this dictionary using transformed keys.
for item in d.items():
self.__setitem__(*item)
def __setitem__(self, key, val):
key = self.transform_key(key)
super(KeyTransformingDict, self).__setitem__(key, val)
def __getitem__(self, key):
key = self.transform_key(key)
return super(KeyTransformingDict, self).__getitem__(key)
def __contains__(self, key):
key = self.transform_key(key)
return super(KeyTransformingDict, self).__contains__(key)
def __delitem__(self, key):
key = self.transform_key(key)
return super(KeyTransformingDict, self).__delitem__(key)
def get(self, key, *args, **kwargs):
key = self.transform_key(key)
return super(KeyTransformingDict, self).get(key, *args, **kwargs)
def setdefault(self, key, *args, **kwargs):
key = self.transform_key(key)
return super(KeyTransformingDict, self).setdefault(key, *args, **kwargs)
def pop(self, key, *args, **kwargs):
key = self.transform_key(key)
return super(KeyTransformingDict, self).pop(key, *args, **kwargs)
def matching_key_for(self, key):
"""
Given a key, return the actual key stored in self that matches.
Raise KeyError if the key isn't found.
"""
try:
return next(e_key for e_key in self.keys() if e_key == key)
except StopIteration:
raise KeyError(key)
class FoldedCaseKeyedDict(KeyTransformingDict):
"""
A case-insensitive dictionary (keys are compared as insensitive
if they are strings).
>>> d = FoldedCaseKeyedDict()
>>> d['heLlo'] = 'world'
>>> list(d.keys()) == ['heLlo']
True
>>> list(d.values()) == ['world']
True
>>> d['hello'] == 'world'
True
>>> 'hello' in d
True
>>> 'HELLO' in d
True
>>> print(repr(FoldedCaseKeyedDict({'heLlo': 'world'})).replace("u'", "'"))
{'heLlo': 'world'}
>>> d = FoldedCaseKeyedDict({'heLlo': 'world'})
>>> print(d['hello'])
world
>>> print(d['Hello'])
world
>>> list(d.keys())
['heLlo']
>>> d = FoldedCaseKeyedDict({'heLlo': 'world', 'Hello': 'world'})
>>> list(d.values())
['world']
>>> key, = d.keys()
>>> key in ['heLlo', 'Hello']
True
>>> del d['HELLO']
>>> d
{}
get should work
>>> d['Sumthin'] = 'else'
>>> d.get('SUMTHIN')
'else'
>>> d.get('OTHER', 'thing')
'thing'
>>> del d['sumthin']
setdefault should also work
>>> d['This'] = 'that'
>>> print(d.setdefault('this', 'other'))
that
>>> len(d)
1
>>> print(d['this'])
that
>>> print(d.setdefault('That', 'other'))
other
>>> print(d['THAT'])
other
Make it pop!
>>> print(d.pop('THAT'))
other
To retrieve the key in its originally-supplied form, use matching_key_for
>>> print(d.matching_key_for('this'))
This
"""
@staticmethod
def transform_key(key):
return jaraco.text.FoldedCase(key)
class DictAdapter(object):
"""
Provide a getitem interface for attributes of an object.
Let's say you want to get at the string.lowercase property in a formatted
string. It's easy with DictAdapter.
>>> import string
>>> print("lowercase is %(ascii_lowercase)s" % DictAdapter(string))
lowercase is abcdefghijklmnopqrstuvwxyz
"""
def __init__(self, wrapped_ob):
self.object = wrapped_ob
def __getitem__(self, name):
return getattr(self.object, name)
class ItemsAsAttributes(object):
"""
Mix-in class to enable a mapping object to provide items as
attributes.
>>> C = type(str('C'), (dict, ItemsAsAttributes), dict())
>>> i = C()
>>> i['foo'] = 'bar'
>>> i.foo
'bar'
Natural attribute access takes precedence
>>> i.foo = 'henry'
>>> i.foo
'henry'
But as you might expect, the mapping functionality is preserved.
>>> i['foo']
'bar'
A normal attribute error should be raised if an attribute is
requested that doesn't exist.
>>> i.missing
Traceback (most recent call last):
...
AttributeError: 'C' object has no attribute 'missing'
It also works on dicts that customize __getitem__
>>> missing_func = lambda self, key: 'missing item'
>>> C = type(
... str('C'),
... (dict, ItemsAsAttributes),
... dict(__missing__ = missing_func),
... )
>>> i = C()
>>> i.missing
'missing item'
>>> i.foo
'missing item'
"""
def __getattr__(self, key):
try:
return getattr(super(ItemsAsAttributes, self), key)
except AttributeError as e:
# attempt to get the value from the mapping (return self[key])
# but be careful not to lose the original exception context.
noval = object()
def _safe_getitem(cont, key, missing_result):
try:
return cont[key]
except KeyError:
return missing_result
result = _safe_getitem(self, key, noval)
if result is not noval:
return result
# raise the original exception, but use the original class
# name, not 'super'.
message, = e.args
message = message.replace('super', self.__class__.__name__, 1)
e.args = message,
raise
def invert_map(map):
"""
Given a dictionary, return another dictionary with keys and values
switched. If any of the values resolve to the same key, raises
a ValueError.
>>> numbers = dict(a=1, b=2, c=3)
>>> letters = invert_map(numbers)
>>> letters[1]
'a'
>>> numbers['d'] = 3
>>> invert_map(numbers)
Traceback (most recent call last):
...
ValueError: Key conflict in inverted mapping
"""
res = dict((v, k) for k, v in map.items())
if not len(res) == len(map):
raise ValueError('Key conflict in inverted mapping')
return res
class IdentityOverrideMap(dict):
"""
A dictionary that by default maps each key to itself, but otherwise
acts like a normal dictionary.
>>> d = IdentityOverrideMap()
>>> d[42]
42
>>> d['speed'] = 'speedo'
>>> print(d['speed'])
speedo
"""
def __missing__(self, key):
return key
class DictStack(list, collections.abc.Mapping):
"""
A stack of dictionaries that behaves as a view on those dictionaries,
giving preference to the last.
>>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
>>> stack['a']
2
>>> stack['b']
2
>>> stack['c']
2
>>> stack.push(dict(a=3))
>>> stack['a']
3
>>> set(stack.keys()) == set(['a', 'b', 'c'])
True
>>> d = stack.pop()
>>> stack['a']
2
>>> d = stack.pop()
>>> stack['a']
1
>>> stack.get('b', None)
"""
def keys(self):
return list(set(itertools.chain.from_iterable(c.keys() for c in self)))
def __getitem__(self, key):
for scope in reversed(self):
if key in scope:
return scope[key]
raise KeyError(key)
push = list.append
class BijectiveMap(dict):
"""
A Bijective Map (two-way mapping).
Implemented as a simple dictionary of 2x the size, mapping values back
to keys.
Note, this implementation may be incomplete. If there's not a test for
your use case below, it's likely to fail, so please test and send pull
requests or patches for additional functionality needed.
>>> m = BijectiveMap()
>>> m['a'] = 'b'
>>> m == {'a': 'b', 'b': 'a'}
True
>>> print(m['b'])
a
>>> m['c'] = 'd'
>>> len(m)
2
Some weird things happen if you map an item to itself or overwrite a
single key of a pair, so it's disallowed.
>>> m['e'] = 'e'
Traceback (most recent call last):
ValueError: Key cannot map to itself
>>> m['d'] = 'e'
Traceback (most recent call last):
ValueError: Key/Value pairs may not overlap
>>> m['e'] = 'd'
Traceback (most recent call last):
ValueError: Key/Value pairs may not overlap
>>> print(m.pop('d'))
c
>>> 'c' in m
False
>>> m = BijectiveMap(dict(a='b'))
>>> len(m)
1
>>> print(m['b'])
a
>>> m = BijectiveMap()
>>> m.update(a='b')
>>> m['b']
'a'
>>> del m['b']
>>> len(m)
0
>>> 'a' in m
False
"""
def __init__(self, *args, **kwargs):
super(BijectiveMap, self).__init__()
self.update(*args, **kwargs)
def __setitem__(self, item, value):
if item == value:
raise ValueError("Key cannot map to itself")
overlap = (
item in self and self[item] != value
or
value in self and self[value] != item
)
if overlap:
raise ValueError("Key/Value pairs may not overlap")
super(BijectiveMap, self).__setitem__(item, value)
super(BijectiveMap, self).__setitem__(value, item)
def __delitem__(self, item):
self.pop(item)
def __len__(self):
return super(BijectiveMap, self).__len__() // 2
def pop(self, key, *args, **kwargs):
mirror = self[key]
super(BijectiveMap, self).__delitem__(mirror)
return super(BijectiveMap, self).pop(key, *args, **kwargs)
def update(self, *args, **kwargs):
# build a dictionary using the default constructs
d = dict(*args, **kwargs)
# build this dictionary using transformed keys.
for item in d.items():
self.__setitem__(*item)
class FrozenDict(collections.abc.Mapping, collections.abc.Hashable):
"""
An immutable mapping.
>>> a = FrozenDict(a=1, b=2)
>>> b = FrozenDict(a=1, b=2)
>>> a == b
True
>>> a == dict(a=1, b=2)
True
>>> dict(a=1, b=2) == a
True
>>> a['c'] = 3
Traceback (most recent call last):
...
TypeError: 'FrozenDict' object does not support item assignment
>>> a.update(y=3)
Traceback (most recent call last):
...
AttributeError: 'FrozenDict' object has no attribute 'update'
Copies should compare equal
>>> copy.copy(a) == a
True
Copies should be the same type
>>> isinstance(copy.copy(a), FrozenDict)
True
FrozenDict supplies .copy(), even though
collections.abc.Mapping doesn't demand it.
>>> a.copy() == a
True
>>> a.copy() is not a
True
"""
__slots__ = ['__data']
def __new__(cls, *args, **kwargs):
self = super(FrozenDict, cls).__new__(cls)
self.__data = dict(*args, **kwargs)
return self
# Container
def __contains__(self, key):
return key in self.__data
# Hashable
def __hash__(self):
return hash(tuple(sorted(self.__data.iteritems())))
# Mapping
def __iter__(self):
return iter(self.__data)
def __len__(self):
return len(self.__data)
def __getitem__(self, key):
return self.__data[key]
# override get for efficiency provided by dict
def get(self, *args, **kwargs):
return self.__data.get(*args, **kwargs)
# override eq to recognize underlying implementation
def __eq__(self, other):
if isinstance(other, FrozenDict):
other = other.__data
return self.__data.__eq__(other)
def copy(self):
"Return a shallow copy of self"
return copy.copy(self)
class Enumeration(ItemsAsAttributes, BijectiveMap):
"""
A convenient way to provide enumerated values
>>> e = Enumeration('a b c')
>>> e['a']
0
>>> e.a
0
>>> e[1]
'b'
>>> set(e.names) == set('abc')
True
>>> set(e.codes) == set(range(3))
True
>>> e.get('d') is None
True
Codes need not start with 0
>>> e = Enumeration('a b c', range(1, 4))
>>> e['a']
1
>>> e[3]
'c'
"""
def __init__(self, names, codes=None):
if isinstance(names, six.string_types):
names = names.split()
if codes is None:
codes = itertools.count()
super(Enumeration, self).__init__(zip(names, codes))
@property
def names(self):
return (key for key in self if isinstance(key, six.string_types))
@property
def codes(self):
return (self[name] for name in self.names)
class Everything(object):
"""
A collection "containing" every possible thing.
>>> 'foo' in Everything()
True
>>> import random
>>> random.randint(1, 999) in Everything()
True
>>> random.choice([None, 'foo', 42, ('a', 'b', 'c')]) in Everything()
True
"""
def __contains__(self, other):
return True
class InstrumentedDict(six.moves.UserDict):
"""
Instrument an existing dictionary with additional
functionality, but always reference and mutate
the original dictionary.
>>> orig = {'a': 1, 'b': 2}
>>> inst = InstrumentedDict(orig)
>>> inst['a']
1
>>> inst['c'] = 3
>>> orig['c']
3
>>> inst.keys() == orig.keys()
True
"""
def __init__(self, data):
six.moves.UserDict.__init__(self)
self.data = data
class Least(object):
"""
A value that is always lesser than any other
>>> least = Least()
>>> 3 < least
False
>>> 3 > least
True
>>> least < 3
True
>>> least <= 3
True
>>> least > 3
False
>>> 'x' > least
True
>>> None > least
True
"""
def __le__(self, other):
return True
__lt__ = __le__
def __ge__(self, other):
return False
__gt__ = __ge__
class Greatest(object):
"""
A value that is always greater than any other
>>> greatest = Greatest()
>>> 3 < greatest
True
>>> 3 > greatest
False
>>> greatest < 3
False
>>> greatest > 3
True
>>> greatest >= 3
True
>>> 'x' > greatest
False
>>> None > greatest
False
"""
def __ge__(self, other):
return True
__gt__ = __ge__
def __le__(self, other):
return False
__lt__ = __le__

View file

@ -0,0 +1,459 @@
from __future__ import (
absolute_import, unicode_literals, print_function, division,
)
import functools
import time
import warnings
import inspect
import collections
from itertools import count
__metaclass__ = type
try:
from functools import lru_cache
except ImportError:
try:
from backports.functools_lru_cache import lru_cache
except ImportError:
try:
from functools32 import lru_cache
except ImportError:
warnings.warn("No lru_cache available")
import more_itertools.recipes
def compose(*funcs):
"""
Compose any number of unary functions into a single unary function.
>>> import textwrap
>>> from six import text_type
>>> stripped = text_type.strip(textwrap.dedent(compose.__doc__))
>>> compose(text_type.strip, textwrap.dedent)(compose.__doc__) == stripped
True
Compose also allows the innermost function to take arbitrary arguments.
>>> round_three = lambda x: round(x, ndigits=3)
>>> f = compose(round_three, int.__truediv__)
>>> [f(3*x, x+1) for x in range(1,10)]
[1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
"""
def compose_two(f1, f2):
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
return functools.reduce(compose_two, funcs)
def method_caller(method_name, *args, **kwargs):
"""
Return a function that will call a named method on the
target object with optional positional and keyword
arguments.
>>> lower = method_caller('lower')
>>> lower('MyString')
'mystring'
"""
def call_method(target):
func = getattr(target, method_name)
return func(*args, **kwargs)
return call_method
def once(func):
"""
Decorate func so it's only ever called the first time.
This decorator can ensure that an expensive or non-idempotent function
will not be expensive on subsequent calls and is idempotent.
>>> add_three = once(lambda a: a+3)
>>> add_three(3)
6
>>> add_three(9)
6
>>> add_three('12')
6
To reset the stored value, simply clear the property ``saved_result``.
>>> del add_three.saved_result
>>> add_three(9)
12
>>> add_three(8)
12
Or invoke 'reset()' on it.
>>> add_three.reset()
>>> add_three(-3)
0
>>> add_three(0)
0
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not hasattr(wrapper, 'saved_result'):
wrapper.saved_result = func(*args, **kwargs)
return wrapper.saved_result
wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
return wrapper
def method_cache(method, cache_wrapper=None):
"""
Wrap lru_cache to support storing the cache data in the object instances.
Abstracts the common paradigm where the method explicitly saves an
underscore-prefixed protected property on first call and returns that
subsequently.
>>> class MyClass:
... calls = 0
...
... @method_cache
... def method(self, value):
... self.calls += 1
... return value
>>> a = MyClass()
>>> a.method(3)
3
>>> for x in range(75):
... res = a.method(x)
>>> a.calls
75
Note that the apparent behavior will be exactly like that of lru_cache
except that the cache is stored on each instance, so values in one
instance will not flush values from another, and when an instance is
deleted, so are the cached values for that instance.
>>> b = MyClass()
>>> for x in range(35):
... res = b.method(x)
>>> b.calls
35
>>> a.method(0)
0
>>> a.calls
75
Note that if method had been decorated with ``functools.lru_cache()``,
a.calls would have been 76 (due to the cached value of 0 having been
flushed by the 'b' instance).
Clear the cache with ``.cache_clear()``
>>> a.method.cache_clear()
Another cache wrapper may be supplied:
>>> cache = lru_cache(maxsize=2)
>>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
>>> a = MyClass()
>>> a.method2()
3
Caution - do not subsequently wrap the method with another decorator, such
as ``@property``, which changes the semantics of the function.
See also
http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
for another implementation and additional justification.
"""
cache_wrapper = cache_wrapper or lru_cache()
def wrapper(self, *args, **kwargs):
# it's the first call, replace the method with a cached, bound method
bound_method = functools.partial(method, self)
cached_method = cache_wrapper(bound_method)
setattr(self, method.__name__, cached_method)
return cached_method(*args, **kwargs)
return _special_method_cache(method, cache_wrapper) or wrapper
def _special_method_cache(method, cache_wrapper):
"""
Because Python treats special methods differently, it's not
possible to use instance attributes to implement the cached
methods.
Instead, install the wrapper method under a different name
and return a simple proxy to that wrapper.
https://github.com/jaraco/jaraco.functools/issues/5
"""
name = method.__name__
special_names = '__getattr__', '__getitem__'
if name not in special_names:
return
wrapper_name = '__cached' + name
def proxy(self, *args, **kwargs):
if wrapper_name not in vars(self):
bound = functools.partial(method, self)
cache = cache_wrapper(bound)
setattr(self, wrapper_name, cache)
else:
cache = getattr(self, wrapper_name)
return cache(*args, **kwargs)
return proxy
def apply(transform):
"""
Decorate a function with a transform function that is
invoked on results returned from the decorated function.
>>> @apply(reversed)
... def get_numbers(start):
... return range(start, start+3)
>>> list(get_numbers(4))
[6, 5, 4]
"""
def wrap(func):
return compose(transform, func)
return wrap
def result_invoke(action):
r"""
Decorate a function with an action function that is
invoked on the results returned from the decorated
function (for its side-effect), then return the original
result.
>>> @result_invoke(print)
... def add_two(a, b):
... return a + b
>>> x = add_two(2, 3)
5
"""
def wrap(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
action(result)
return result
return wrapper
return wrap
def call_aside(f, *args, **kwargs):
"""
Call a function for its side effect after initialization.
>>> @call_aside
... def func(): print("called")
called
>>> func()
called
Use functools.partial to pass parameters to the initial call
>>> @functools.partial(call_aside, name='bingo')
... def func(name): print("called with", name)
called with bingo
"""
f(*args, **kwargs)
return f
class Throttler:
"""
Rate-limit a function (or other callable)
"""
def __init__(self, func, max_rate=float('Inf')):
if isinstance(func, Throttler):
func = func.func
self.func = func
self.max_rate = max_rate
self.reset()
def reset(self):
self.last_called = 0
def __call__(self, *args, **kwargs):
self._wait()
return self.func(*args, **kwargs)
def _wait(self):
"ensure at least 1/max_rate seconds from last call"
elapsed = time.time() - self.last_called
must_wait = 1 / self.max_rate - elapsed
time.sleep(max(0, must_wait))
self.last_called = time.time()
def __get__(self, obj, type=None):
return first_invoke(self._wait, functools.partial(self.func, obj))
def first_invoke(func1, func2):
"""
Return a function that when invoked will invoke func1 without
any parameters (for its side-effect) and then invoke func2
with whatever parameters were passed, returning its result.
"""
def wrapper(*args, **kwargs):
func1()
return func2(*args, **kwargs)
return wrapper
def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
"""
Given a callable func, trap the indicated exceptions
for up to 'retries' times, invoking cleanup on the
exception. On the final attempt, allow any exceptions
to propagate.
"""
attempts = count() if retries == float('inf') else range(retries)
for attempt in attempts:
try:
return func()
except trap:
cleanup()
return func()
def retry(*r_args, **r_kwargs):
"""
Decorator wrapper for retry_call. Accepts arguments to retry_call
except func and then returns a decorator for the decorated function.
Ex:
>>> @retry(retries=3)
... def my_func(a, b):
... "this is my funk"
... print(a, b)
>>> my_func.__doc__
'this is my funk'
"""
def decorate(func):
@functools.wraps(func)
def wrapper(*f_args, **f_kwargs):
bound = functools.partial(func, *f_args, **f_kwargs)
return retry_call(bound, *r_args, **r_kwargs)
return wrapper
return decorate
def print_yielded(func):
"""
Convert a generator into a function that prints all yielded elements
>>> @print_yielded
... def x():
... yield 3; yield None
>>> x()
3
None
"""
print_all = functools.partial(map, print)
print_results = compose(more_itertools.recipes.consume, print_all, func)
return functools.wraps(func)(print_results)
def pass_none(func):
"""
Wrap func so it's not called if its first param is None
>>> print_text = pass_none(print)
>>> print_text('text')
text
>>> print_text(None)
"""
@functools.wraps(func)
def wrapper(param, *args, **kwargs):
if param is not None:
return func(param, *args, **kwargs)
return wrapper
def assign_params(func, namespace):
"""
Assign parameters from namespace where func solicits.
>>> def func(x, y=3):
... print(x, y)
>>> assigned = assign_params(func, dict(x=2, z=4))
>>> assigned()
2 3
The usual errors are raised if a function doesn't receive
its required parameters:
>>> assigned = assign_params(func, dict(y=3, z=4))
>>> assigned()
Traceback (most recent call last):
TypeError: func() ...argument...
"""
try:
sig = inspect.signature(func)
params = sig.parameters.keys()
except AttributeError:
spec = inspect.getargspec(func)
params = spec.args
call_ns = {
k: namespace[k]
for k in params
if k in namespace
}
return functools.partial(func, **call_ns)
def save_method_args(method):
"""
Wrap a method such that when it is called, the args and kwargs are
saved on the method.
>>> class MyClass:
... @save_method_args
... def method(self, a, b):
... print(a, b)
>>> my_ob = MyClass()
>>> my_ob.method(1, 2)
1 2
>>> my_ob._saved_method.args
(1, 2)
>>> my_ob._saved_method.kwargs
{}
>>> my_ob.method(a=3, b='foo')
3 foo
>>> my_ob._saved_method.args
()
>>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
True
The arguments are stored on the instance, allowing for
different instance to save different args.
>>> your_ob = MyClass()
>>> your_ob.method({str('x'): 3}, b=[4])
{'x': 3} [4]
>>> your_ob._saved_method.args
({'x': 3},)
>>> my_ob._saved_method.args
()
"""
args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
attr_name = '_saved_' + method.__name__
attr = args_and_kwargs(args, kwargs)
setattr(self, attr_name, attr)
return method(self, *args, **kwargs)
return wrapper

View file

View file

@ -0,0 +1,151 @@
from __future__ import absolute_import, unicode_literals
import numbers
from functools import reduce
def get_bit_values(number, size=32):
"""
Get bit values as a list for a given number
>>> get_bit_values(1) == [0]*31 + [1]
True
>>> get_bit_values(0xDEADBEEF)
[1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1]
You may override the default word size of 32-bits to match your actual
application.
>>> get_bit_values(0x3, 2)
[1, 1]
>>> get_bit_values(0x3, 4)
[0, 0, 1, 1]
"""
number += 2**size
return list(map(int, bin(number)[-size:]))
def gen_bit_values(number):
"""
Return a zero or one for each bit of a numeric value up to the most
significant 1 bit, beginning with the least significant bit.
>>> list(gen_bit_values(16))
[0, 0, 0, 0, 1]
"""
digits = bin(number)[2:]
return map(int, reversed(digits))
def coalesce(bits):
"""
Take a sequence of bits, most significant first, and
coalesce them into a number.
>>> coalesce([1,0,1])
5
"""
operation = lambda a, b: (a << 1 | b)
return reduce(operation, bits)
class Flags(object):
"""
Subclasses should define _names, a list of flag names beginning
with the least-significant bit.
>>> class MyFlags(Flags):
... _names = 'a', 'b', 'c'
>>> mf = MyFlags.from_number(5)
>>> mf['a']
1
>>> mf['b']
0
>>> mf['c'] == mf[2]
True
>>> mf['b'] = 1
>>> mf['a'] = 0
>>> mf.number
6
"""
def __init__(self, values):
self._values = list(values)
if hasattr(self, '_names'):
n_missing_bits = len(self._names) - len(self._values)
self._values.extend([0] * n_missing_bits)
@classmethod
def from_number(cls, number):
return cls(gen_bit_values(number))
@property
def number(self):
return coalesce(reversed(self._values))
def __setitem__(self, key, value):
# first try by index, then by name
try:
self._values[key] = value
except TypeError:
index = self._names.index(key)
self._values[index] = value
def __getitem__(self, key):
# first try by index, then by name
try:
return self._values[key]
except TypeError:
index = self._names.index(key)
return self._values[index]
class BitMask(type):
"""
A metaclass to create a bitmask with attributes. Subclass an int and
set this as the metaclass to use.
Here's how to create such a class on Python 3:
class MyBits(int, metaclass=BitMask):
a = 0x1
b = 0x4
c = 0x3
For testing purposes, construct explicitly to support Python 2
>>> ns = dict(a=0x1, b=0x4, c=0x3)
>>> MyBits = BitMask(str('MyBits'), (int,), ns)
>>> b1 = MyBits(3)
>>> b1.a, b1.b, b1.c
(True, False, True)
>>> b2 = MyBits(8)
>>> any([b2.a, b2.b, b2.c])
False
If the instance defines methods, they won't be wrapped in
properties.
>>> ns['get_value'] = classmethod(lambda cls: 'some value')
>>> ns['prop'] = property(lambda self: 'a property')
>>> MyBits = BitMask(str('MyBits'), (int,), ns)
>>> MyBits(3).get_value()
'some value'
>>> MyBits(3).prop
'a property'
"""
def __new__(cls, name, bases, attrs):
def make_property(name, value):
if name.startswith('_') or not isinstance(value, numbers.Number):
return value
return property(lambda self, value=value: bool(self & value))
newattrs = dict(
(name, make_property(name, value))
for name, value in attrs.items()
)
return type.__new__(cls, name, bases, newattrs)

452
libs/win/jaraco/text.py Normal file
View file

@ -0,0 +1,452 @@
from __future__ import absolute_import, unicode_literals, print_function
import sys
import re
import inspect
import itertools
import textwrap
import functools
import six
import jaraco.collections
from jaraco.functools import compose
def substitution(old, new):
"""
Return a function that will perform a substitution on a string
"""
return lambda s: s.replace(old, new)
def multi_substitution(*substitutions):
"""
Take a sequence of pairs specifying substitutions, and create
a function that performs those substitutions.
>>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
'baz'
"""
substitutions = itertools.starmap(substitution, substitutions)
# compose function applies last function first, so reverse the
# substitutions to get the expected order.
substitutions = reversed(tuple(substitutions))
return compose(*substitutions)
class FoldedCase(six.text_type):
"""
A case insensitive string class; behaves just like str
except compares equal when the only variation is case.
>>> s = FoldedCase('hello world')
>>> s == 'Hello World'
True
>>> 'Hello World' == s
True
>>> s != 'Hello World'
False
>>> s.index('O')
4
>>> s.split('O')
['hell', ' w', 'rld']
>>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
['alpha', 'Beta', 'GAMMA']
Sequence membership is straightforward.
>>> "Hello World" in [s]
True
>>> s in ["Hello World"]
True
You may test for set inclusion, but candidate and elements
must both be folded.
>>> FoldedCase("Hello World") in {s}
True
>>> s in {FoldedCase("Hello World")}
True
String inclusion works as long as the FoldedCase object
is on the right.
>>> "hello" in FoldedCase("Hello World")
True
But not if the FoldedCase object is on the left:
>>> FoldedCase('hello') in 'Hello World'
False
In that case, use in_:
>>> FoldedCase('hello').in_('Hello World')
True
"""
def __lt__(self, other):
return self.lower() < other.lower()
def __gt__(self, other):
return self.lower() > other.lower()
def __eq__(self, other):
return self.lower() == other.lower()
def __ne__(self, other):
return self.lower() != other.lower()
def __hash__(self):
return hash(self.lower())
def __contains__(self, other):
return super(FoldedCase, self).lower().__contains__(other.lower())
def in_(self, other):
"Does self appear in other?"
return self in FoldedCase(other)
# cache lower since it's likely to be called frequently.
@jaraco.functools.method_cache
def lower(self):
return super(FoldedCase, self).lower()
def index(self, sub):
return self.lower().index(sub.lower())
def split(self, splitter=' ', maxsplit=0):
pattern = re.compile(re.escape(splitter), re.I)
return pattern.split(self, maxsplit)
def local_format(string):
"""
format the string using variables in the caller's local namespace.
>>> a = 3
>>> local_format("{a:5}")
' 3'
"""
context = inspect.currentframe().f_back.f_locals
if sys.version_info < (3, 2):
return string.format(**context)
return string.format_map(context)
def global_format(string):
"""
format the string using variables in the caller's global namespace.
>>> a = 3
>>> fmt = "The func name: {global_format.__name__}"
>>> global_format(fmt)
'The func name: global_format'
"""
context = inspect.currentframe().f_back.f_globals
if sys.version_info < (3, 2):
return string.format(**context)
return string.format_map(context)
def namespace_format(string):
"""
Format the string using variable in the caller's scope (locals + globals).
>>> a = 3
>>> fmt = "A is {a} and this func is {namespace_format.__name__}"
>>> namespace_format(fmt)
'A is 3 and this func is namespace_format'
"""
context = jaraco.collections.DictStack()
context.push(inspect.currentframe().f_back.f_globals)
context.push(inspect.currentframe().f_back.f_locals)
if sys.version_info < (3, 2):
return string.format(**context)
return string.format_map(context)
def is_decodable(value):
r"""
Return True if the supplied value is decodable (using the default
encoding).
>>> is_decodable(b'\xff')
False
>>> is_decodable(b'\x32')
True
"""
# TODO: This code could be expressed more consisely and directly
# with a jaraco.context.ExceptionTrap, but that adds an unfortunate
# long dependency tree, so for now, use boolean literals.
try:
value.decode()
except UnicodeDecodeError:
return False
return True
def is_binary(value):
"""
Return True if the value appears to be binary (that is, it's a byte
string and isn't decodable).
"""
return isinstance(value, bytes) and not is_decodable(value)
def trim(s):
r"""
Trim something like a docstring to remove the whitespace that
is common due to indentation and formatting.
>>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
'foo = bar\n\tbar = baz'
"""
return textwrap.dedent(s).strip()
class Splitter(object):
"""object that will split a string with the given arguments for each call
>>> s = Splitter(',')
>>> s('hello, world, this is your, master calling')
['hello', ' world', ' this is your', ' master calling']
"""
def __init__(self, *args):
self.args = args
def __call__(self, s):
return s.split(*self.args)
def indent(string, prefix=' ' * 4):
return prefix + string
class WordSet(tuple):
"""
Given a Python identifier, return the words that identifier represents,
whether in camel case, underscore-separated, etc.
>>> WordSet.parse("camelCase")
('camel', 'Case')
>>> WordSet.parse("under_sep")
('under', 'sep')
Acronyms should be retained
>>> WordSet.parse("firstSNL")
('first', 'SNL')
>>> WordSet.parse("you_and_I")
('you', 'and', 'I')
>>> WordSet.parse("A simple test")
('A', 'simple', 'test')
Multiple caps should not interfere with the first cap of another word.
>>> WordSet.parse("myABCClass")
('my', 'ABC', 'Class')
The result is a WordSet, so you can get the form you need.
>>> WordSet.parse("myABCClass").underscore_separated()
'my_ABC_Class'
>>> WordSet.parse('a-command').camel_case()
'ACommand'
>>> WordSet.parse('someIdentifier').lowered().space_separated()
'some identifier'
Slices of the result should return another WordSet.
>>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
'out_of_context'
>>> WordSet.from_class_name(WordSet()).lowered().space_separated()
'word set'
"""
_pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
def capitalized(self):
return WordSet(word.capitalize() for word in self)
def lowered(self):
return WordSet(word.lower() for word in self)
def camel_case(self):
return ''.join(self.capitalized())
def headless_camel_case(self):
words = iter(self)
first = next(words).lower()
return itertools.chain((first,), WordSet(words).camel_case())
def underscore_separated(self):
return '_'.join(self)
def dash_separated(self):
return '-'.join(self)
def space_separated(self):
return ' '.join(self)
def __getitem__(self, item):
result = super(WordSet, self).__getitem__(item)
if isinstance(item, slice):
result = WordSet(result)
return result
# for compatibility with Python 2
def __getslice__(self, i, j):
return self.__getitem__(slice(i, j))
@classmethod
def parse(cls, identifier):
matches = cls._pattern.finditer(identifier)
return WordSet(match.group(0) for match in matches)
@classmethod
def from_class_name(cls, subject):
return cls.parse(subject.__class__.__name__)
# for backward compatibility
words = WordSet.parse
def simple_html_strip(s):
r"""
Remove HTML from the string `s`.
>>> str(simple_html_strip(''))
''
>>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
A stormy day in paradise
>>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
Somebody tell the truth.
>>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
What about
multiple lines?
"""
html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
texts = (
match.group(3) or ''
for match
in html_stripper.finditer(s)
)
return ''.join(texts)
class SeparatedValues(six.text_type):
"""
A string separated by a separator. Overrides __iter__ for getting
the values.
>>> list(SeparatedValues('a,b,c'))
['a', 'b', 'c']
Whitespace is stripped and empty values are discarded.
>>> list(SeparatedValues(' a, b , c, '))
['a', 'b', 'c']
"""
separator = ','
def __iter__(self):
parts = self.split(self.separator)
return six.moves.filter(None, (part.strip() for part in parts))
class Stripper:
r"""
Given a series of lines, find the common prefix and strip it from them.
>>> lines = [
... 'abcdefg\n',
... 'abc\n',
... 'abcde\n',
... ]
>>> res = Stripper.strip_prefix(lines)
>>> res.prefix
'abc'
>>> list(res.lines)
['defg\n', '\n', 'de\n']
If no prefix is common, nothing should be stripped.
>>> lines = [
... 'abcd\n',
... '1234\n',
... ]
>>> res = Stripper.strip_prefix(lines)
>>> res.prefix = ''
>>> list(res.lines)
['abcd\n', '1234\n']
"""
def __init__(self, prefix, lines):
self.prefix = prefix
self.lines = map(self, lines)
@classmethod
def strip_prefix(cls, lines):
prefix_lines, lines = itertools.tee(lines)
prefix = functools.reduce(cls.common_prefix, prefix_lines)
return cls(prefix, lines)
def __call__(self, line):
if not self.prefix:
return line
null, prefix, rest = line.partition(self.prefix)
return rest
@staticmethod
def common_prefix(s1, s2):
"""
Return the common prefix of two lines.
"""
index = min(len(s1), len(s2))
while s1[:index] != s2[:index]:
index -= 1
return s1[:index]
def remove_prefix(text, prefix):
"""
Remove the prefix from the text if it exists.
>>> remove_prefix('underwhelming performance', 'underwhelming ')
'performance'
>>> remove_prefix('something special', 'sample')
'something special'
"""
null, prefix, rest = text.rpartition(prefix)
return rest
def remove_suffix(text, suffix):
"""
Remove the suffix from the text if it exists.
>>> remove_suffix('name.git', '.git')
'name'
>>> remove_suffix('something special', 'sample')
'something special'
"""
rest, suffix, null = text.partition(suffix)
return rest

View file

View file

@ -0,0 +1,77 @@
import argparse
import six
from jaraco.classes import meta
from jaraco import text
@six.add_metaclass(meta.LeafClassesMeta)
class Command(object):
"""
A general-purpose base class for creating commands for a command-line
program using argparse. Each subclass of Command represents a separate
sub-command of a program.
For example, one might use Command subclasses to implement the Mercurial
command set::
class Commit(Command):
@staticmethod
def add_arguments(cls, parser):
parser.add_argument('-m', '--message')
@classmethod
def run(cls, args):
"Run the 'commit' command with args (parsed)"
class Merge(Command): pass
class Pull(Command): pass
...
Then one could create an entry point for Mercurial like so::
def hg_command():
Command.invoke()
"""
@classmethod
def add_subparsers(cls, parser):
subparsers = parser.add_subparsers()
[cmd_class.add_parser(subparsers) for cmd_class in cls._leaf_classes]
@classmethod
def add_parser(cls, subparsers):
cmd_string = text.words(cls.__name__).lowered().dash_separated()
parser = subparsers.add_parser(cmd_string)
parser.set_defaults(action=cls)
cls.add_arguments(parser)
return parser
@classmethod
def add_arguments(cls, parser):
pass
@classmethod
def invoke(cls):
"""
Invoke the command using ArgumentParser
"""
parser = argparse.ArgumentParser()
cls.add_subparsers(parser)
args = parser.parse_args()
args.action.run(args)
class Extend(argparse.Action):
"""
Argparse action to take an nargs=* argument
and add any values to the existing value.
>>> parser = argparse.ArgumentParser()
>>> _ = parser.add_argument('--foo', nargs='*', default=[], action=Extend)
>>> args = parser.parse_args(['--foo', 'a=1', '--foo', 'b=2', 'c=3'])
>>> args.foo
['a=1', 'b=2', 'c=3']
"""
def __call__(self, parser, namespace, values, option_string=None):
getattr(namespace, self.dest).extend(values)

View file

@ -0,0 +1,108 @@
from __future__ import unicode_literals, absolute_import
import tempfile
import os
import sys
import subprocess
import mimetypes
import collections
import io
import difflib
import six
class EditProcessException(RuntimeError): pass
class EditableFile(object):
"""
EditableFile saves some data to a temporary file, launches a
platform editor for interactive editing, and then reloads the data,
setting .changed to True if the data was edited.
e.g.::
x = EditableFile('foo')
x.edit()
if x.changed:
print(x.data)
The EDITOR environment variable can define which executable to use
(also XML_EDITOR if the content-type to edit includes 'xml'). If no
EDITOR is defined, defaults to 'notepad' on Windows and 'edit' on
other platforms.
"""
platform_default_editors = collections.defaultdict(
lambda: 'edit',
win32 = 'notepad',
linux2 = 'vi',
)
encoding = 'utf-8'
def __init__(self, data='', content_type='text/plain'):
self.data = six.text_type(data)
self.content_type = content_type
def __enter__(self):
extension = mimetypes.guess_extension(self.content_type) or ''
fobj, self.name = tempfile.mkstemp(extension)
os.write(fobj, self.data.encode(self.encoding))
os.close(fobj)
return self
def read(self):
with open(self.name, 'rb') as f:
return f.read().decode(self.encoding)
def __exit__(self, *tb_info):
os.remove(self.name)
def edit(self):
"""
Edit the file
"""
self.changed = False
with self:
editor = self.get_editor()
cmd = [editor, self.name]
try:
res = subprocess.call(cmd)
except Exception as e:
print("Error launching editor %(editor)s" % locals())
print(e)
return
if res != 0:
msg = '%(editor)s returned error status %(res)d' % locals()
raise EditProcessException(msg)
new_data = self.read()
if new_data != self.data:
self.changed = self._save_diff(self.data, new_data)
self.data = new_data
@staticmethod
def _search_env(keys):
"""
Search the environment for the supplied keys, returning the first
one found or None if none was found.
"""
matches = (os.environ[key] for key in keys if key in os.environ)
return next(matches, None)
def get_editor(self):
"""
Give preference to an XML_EDITOR or EDITOR defined in the
environment. Otherwise use a default editor based on platform.
"""
env_search = ['EDITOR']
if 'xml' in self.content_type:
env_search.insert(0, 'XML_EDITOR')
default_editor = self.platform_default_editors[sys.platform]
return self._search_env(env_search) or default_editor
@staticmethod
def _save_diff(*versions):
def get_lines(content):
return list(io.StringIO(content))
lines = map(get_lines, versions)
diff = difflib.context_diff(*lines)
return tuple(diff)

View file

@ -0,0 +1,26 @@
"""
This module currently provides a cross-platform getch function
"""
try:
# Windows
from msvcrt import getch
except ImportError:
pass
try:
# Unix
import sys
import tty
import termios
def getch():
fd = sys.stdin.fileno()
old = termios.tcgetattr(fd)
try:
tty.setraw(fd)
return sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old)
except ImportError:
pass

View file

@ -0,0 +1,34 @@
from __future__ import print_function, absolute_import, unicode_literals
import itertools
import six
class Menu(object):
"""
A simple command-line based menu
"""
def __init__(self, choices=None, formatter=str):
self.choices = choices or list()
self.formatter = formatter
def get_choice(self, prompt="> "):
n = len(self.choices)
number_width = len(str(n)) + 1
menu_fmt = '{number:{number_width}}) {choice}'
formatted_choices = map(self.formatter, self.choices)
for number, choice in zip(itertools.count(1), formatted_choices):
print(menu_fmt.format(**locals()))
print()
try:
answer = int(six.moves.input(prompt))
result = self.choices[answer - 1]
except ValueError:
print('invalid selection')
result = None
except IndexError:
print('invalid selection')
result = None
except KeyboardInterrupt:
result = None
return result

View file

@ -0,0 +1,152 @@
# deprecated -- use TQDM
from __future__ import (print_function, absolute_import, unicode_literals,
division)
import time
import sys
import itertools
import abc
import datetime
import six
@six.add_metaclass(abc.ABCMeta)
class AbstractProgressBar(object):
def __init__(self, unit='', size=70):
"""
Size is the nominal size in characters
"""
self.unit = unit
self.size = size
def report(self, amt):
sys.stdout.write('\r%s' % self.get_bar(amt))
sys.stdout.flush()
@abc.abstractmethod
def get_bar(self, amt):
"Return the string to be printed. Should be size >= self.size"
def summary(self, str):
return ' (' + self.unit_str(str) + ')'
def unit_str(self, str):
if self.unit:
str += ' ' + self.unit
return str
def finish(self):
print()
def __enter__(self):
self.report(0)
return self
def __exit__(self, exc, exc_val, tb):
if exc is None:
self.finish()
else:
print()
def iterate(self, iterable):
"""
Report the status as the iterable is consumed.
"""
with self:
for n, item in enumerate(iterable, 1):
self.report(n)
yield item
class SimpleProgressBar(AbstractProgressBar):
_PROG_DISPGLYPH = itertools.cycle(['|', '/', '-', '\\'])
def get_bar(self, amt):
bar = next(self._PROG_DISPGLYPH)
template = ' [{bar:^{bar_len}}]'
summary = self.summary('{amt}')
template += summary
empty = template.format(
bar='',
bar_len=0,
amt=amt,
)
bar_len = self.size - len(empty)
return template.format(**locals())
@classmethod
def demo(cls):
bar3 = cls(unit='cubes', size=30)
with bar3:
for x in six.moves.range(1, 759):
bar3.report(x)
time.sleep(0.01)
class TargetProgressBar(AbstractProgressBar):
def __init__(self, total=None, unit='', size=70):
"""
Size is the nominal size in characters
"""
self.total = total
super(TargetProgressBar, self).__init__(unit, size)
def get_bar(self, amt):
template = ' [{bar:<{bar_len}}]'
completed = amt / self.total
percent = int(completed * 100)
percent_str = ' {percent:3}%'
template += percent_str
summary = self.summary('{amt}/{total}')
template += summary
empty = template.format(
total=self.total,
bar='',
bar_len=0,
**locals()
)
bar_len = self.size - len(empty)
bar = '=' * int(completed * bar_len)
return template.format(total=self.total, **locals())
@classmethod
def demo(cls):
bar1 = cls(100, 'blocks')
with bar1:
for x in six.moves.range(1, 101):
bar1.report(x)
time.sleep(0.05)
bar2 = cls(758, size=50)
with bar2:
for x in six.moves.range(1, 759):
bar2.report(x)
time.sleep(0.01)
def finish(self):
self.report(self.total)
super(TargetProgressBar, self).finish()
def countdown(template, duration=datetime.timedelta(seconds=5)):
"""
Do a countdown for duration, printing the template (which may accept one
positional argument). Template should be something like
``countdown complete in {} seconds.``
"""
now = datetime.datetime.now()
deadline = now + duration
remaining = deadline - datetime.datetime.now()
while remaining:
remaining = deadline - datetime.datetime.now()
remaining = max(datetime.timedelta(), remaining)
msg = template.format(remaining.total_seconds())
print(msg, end=' '*10)
sys.stdout.flush()
time.sleep(.1)
print('\b'*80, end='')
sys.stdout.flush()
print()

View file

@ -0,0 +1,13 @@
#!/usr/bin/env python
# $Id$
"""
jaraco.windows
A lightweight wrapper to provide a pythonic API to the Windows Platform.
This package attempts to provide interfaces similar or compatible
with Mark Hammond's pywin32 library, but avoids the use of extension
modules by utilizing ctypes.
"""

View file

View file

@ -0,0 +1,53 @@
import ctypes.wintypes
# Clipboard Formats
CF_TEXT = 1
CF_BITMAP = 2
CF_METAFILEPICT = 3
CF_SYLK = 4
CF_DIF = 5
CF_TIFF = 6
CF_OEMTEXT = 7
CF_DIB = 8
CF_PALETTE = 9
CF_PENDATA = 10
CF_RIFF = 11
CF_WAVE = 12
CF_UNICODETEXT = 13
CF_ENHMETAFILE = 14
CF_HDROP = 15
CF_LOCALE = 16
CF_DIBV5 = 17
CF_MAX = 18
CF_OWNERDISPLAY = 0x0080
CF_DSPTEXT = 0x0081
CF_DSPBITMAP = 0x0082
CF_DSPMETAFILEPICT = 0x0083
CF_DSPENHMETAFILE = 0x008E
CF_PRIVATEFIRST = 0x0200
CF_PRIVATELAST = 0x02FF
CF_GDIOBJFIRST = 0x0300
CF_GDIOBJLAST = 0x03FF
RegisterClipboardFormat = ctypes.windll.user32.RegisterClipboardFormatW
RegisterClipboardFormat.argtypes = ctypes.wintypes.LPWSTR,
RegisterClipboardFormat.restype = ctypes.wintypes.UINT
CF_HTML = RegisterClipboardFormat('HTML Format')
EnumClipboardFormats = ctypes.windll.user32.EnumClipboardFormats
EnumClipboardFormats.argtypes = ctypes.wintypes.UINT,
EnumClipboardFormats.restype = ctypes.wintypes.UINT
GetClipboardData = ctypes.windll.user32.GetClipboardData
GetClipboardData.argtypes = ctypes.wintypes.UINT,
GetClipboardData.restype = ctypes.wintypes.HANDLE
SetClipboardData = ctypes.windll.user32.SetClipboardData
SetClipboardData.argtypes = ctypes.wintypes.UINT, ctypes.wintypes.HANDLE
SetClipboardData.restype = ctypes.wintypes.HANDLE
OpenClipboard = ctypes.windll.user32.OpenClipboard
OpenClipboard.argtypes = ctypes.wintypes.HANDLE,
OpenClipboard.restype = ctypes.wintypes.BOOL
ctypes.windll.user32.CloseClipboard.restype = ctypes.wintypes.BOOL

View file

@ -0,0 +1,62 @@
"""
Support for Credential Vault
"""
import ctypes
from ctypes.wintypes import DWORD, LPCWSTR, BOOL, LPWSTR, FILETIME
try:
from ctypes.wintypes import LPBYTE
except ImportError:
LPBYTE = ctypes.POINTER(ctypes.wintypes.BYTE)
class CredentialAttribute(ctypes.Structure):
_fields_ = []
class Credential(ctypes.Structure):
_fields_ = [
('flags', DWORD),
('type', DWORD),
('target_name', LPWSTR),
('comment', LPWSTR),
('last_written', FILETIME),
('credential_blob_size', DWORD),
('credential_blob', LPBYTE),
('persist', DWORD),
('attribute_count', DWORD),
('attributes', ctypes.POINTER(CredentialAttribute)),
('target_alias', LPWSTR),
('user_name', LPWSTR),
]
def __del__(self):
ctypes.windll.advapi32.CredFree(ctypes.byref(self))
PCREDENTIAL = ctypes.POINTER(Credential)
CredRead = ctypes.windll.advapi32.CredReadW
CredRead.argtypes = (
LPCWSTR, # TargetName
DWORD, # Type
DWORD, # Flags
ctypes.POINTER(PCREDENTIAL), # Credential
)
CredRead.restype = BOOL
CredWrite = ctypes.windll.advapi32.CredWriteW
CredWrite.argtypes = (
PCREDENTIAL, # Credential
DWORD, # Flags
)
CredWrite.restype = BOOL
CredDelete = ctypes.windll.advapi32.CredDeleteW
CredDelete.argtypes = (
LPCWSTR, # TargetName
DWORD, # Type
DWORD, # Flags
)
CredDelete.restype = BOOL

View file

@ -0,0 +1,13 @@
import ctypes.wintypes
SetEnvironmentVariable = ctypes.windll.kernel32.SetEnvironmentVariableW
SetEnvironmentVariable.restype = ctypes.wintypes.BOOL
SetEnvironmentVariable.argtypes = [ctypes.wintypes.LPCWSTR] * 2
GetEnvironmentVariable = ctypes.windll.kernel32.GetEnvironmentVariableW
GetEnvironmentVariable.restype = ctypes.wintypes.BOOL
GetEnvironmentVariable.argtypes = [
ctypes.wintypes.LPCWSTR,
ctypes.wintypes.LPWSTR,
ctypes.wintypes.DWORD,
]

View file

@ -0,0 +1,7 @@
# from error.h
NO_ERROR = 0
ERROR_INSUFFICIENT_BUFFER = 122
ERROR_BUFFER_OVERFLOW = 111
ERROR_NO_DATA = 232
ERROR_INVALID_PARAMETER = 87
ERROR_NOT_SUPPORTED = 50

View file

@ -0,0 +1,38 @@
from ctypes import windll, POINTER
from ctypes.wintypes import (
LPWSTR, DWORD, LPVOID, HANDLE, BOOL,
)
CreateEvent = windll.kernel32.CreateEventW
CreateEvent.argtypes = (
LPVOID, # LPSECURITY_ATTRIBUTES
BOOL,
BOOL,
LPWSTR,
)
CreateEvent.restype = HANDLE
SetEvent = windll.kernel32.SetEvent
SetEvent.argtypes = HANDLE,
SetEvent.restype = BOOL
WaitForSingleObject = windll.kernel32.WaitForSingleObject
WaitForSingleObject.argtypes = HANDLE, DWORD
WaitForSingleObject.restype = DWORD
_WaitForMultipleObjects = windll.kernel32.WaitForMultipleObjects
_WaitForMultipleObjects.argtypes = DWORD, POINTER(HANDLE), BOOL, DWORD
_WaitForMultipleObjects.restype = DWORD
def WaitForMultipleObjects(handles, wait_all=False, timeout=0):
n_handles = len(handles)
handle_array = (HANDLE * n_handles)()
for index, handle in enumerate(handles):
handle_array[index] = handle
return _WaitForMultipleObjects(n_handles, handle_array, wait_all, timeout)
WAIT_OBJECT_0 = 0
INFINITE = -1
WAIT_TIMEOUT = 0x102

View file

@ -0,0 +1,317 @@
import ctypes.wintypes
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.argtypes = (
ctypes.wintypes.LPWSTR,
ctypes.wintypes.LPWSTR,
ctypes.wintypes.DWORD,
)
CreateSymbolicLink.restype = ctypes.wintypes.BOOLEAN
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.argtypes = (
ctypes.wintypes.LPWSTR,
ctypes.wintypes.LPWSTR,
ctypes.wintypes.LPVOID, # reserved for LPSECURITY_ATTRIBUTES
)
CreateHardLink.restype = ctypes.wintypes.BOOLEAN
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
GetFileAttributes.argtypes = ctypes.wintypes.LPWSTR,
GetFileAttributes.restype = ctypes.wintypes.DWORD
SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
SetFileAttributes.restype = ctypes.wintypes.BOOL
MAX_PATH = 260
GetFinalPathNameByHandle = ctypes.windll.kernel32.GetFinalPathNameByHandleW
GetFinalPathNameByHandle.argtypes = (
ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
)
GetFinalPathNameByHandle.restype = ctypes.wintypes.DWORD
class SECURITY_ATTRIBUTES(ctypes.Structure):
_fields_ = (
('length', ctypes.wintypes.DWORD),
('p_security_descriptor', ctypes.wintypes.LPVOID),
('inherit_handle', ctypes.wintypes.BOOLEAN),
)
LPSECURITY_ATTRIBUTES = ctypes.POINTER(SECURITY_ATTRIBUTES)
CreateFile = ctypes.windll.kernel32.CreateFileW
CreateFile.argtypes = (
ctypes.wintypes.LPWSTR,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
LPSECURITY_ATTRIBUTES,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.HANDLE,
)
CreateFile.restype = ctypes.wintypes.HANDLE
FILE_SHARE_READ = 1
FILE_SHARE_WRITE = 2
FILE_SHARE_DELETE = 4
FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
FILE_FLAG_BACKUP_SEMANTICS = 0x2000000
NULL = 0
OPEN_EXISTING = 3
FILE_ATTRIBUTE_READONLY = 0x1
FILE_ATTRIBUTE_HIDDEN = 0x2
FILE_ATTRIBUTE_DIRECTORY = 0x10
FILE_ATTRIBUTE_NORMAL = 0x80
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
GENERIC_READ = 0x80000000
FILE_READ_ATTRIBUTES = 0x80
INVALID_HANDLE_VALUE = ctypes.wintypes.HANDLE(-1).value
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
ERROR_NO_MORE_FILES = 0x12
VOLUME_NAME_DOS = 0
CloseHandle = ctypes.windll.kernel32.CloseHandle
CloseHandle.argtypes = (ctypes.wintypes.HANDLE,)
CloseHandle.restype = ctypes.wintypes.BOOLEAN
class WIN32_FIND_DATA(ctypes.wintypes.WIN32_FIND_DATAW):
"""
_fields_ = [
("dwFileAttributes", DWORD),
("ftCreationTime", FILETIME),
("ftLastAccessTime", FILETIME),
("ftLastWriteTime", FILETIME),
("nFileSizeHigh", DWORD),
("nFileSizeLow", DWORD),
("dwReserved0", DWORD),
("dwReserved1", DWORD),
("cFileName", WCHAR * MAX_PATH),
("cAlternateFileName", WCHAR * 14)]
]
"""
@property
def file_attributes(self):
return self.dwFileAttributes
@property
def creation_time(self):
return self.ftCreationTime
@property
def last_access_time(self):
return self.ftLastAccessTime
@property
def last_write_time(self):
return self.ftLastWriteTime
@property
def file_size_words(self):
return [self.nFileSizeHigh, self.nFileSizeLow]
@property
def reserved(self):
return [self.dwReserved0, self.dwReserved1]
@property
def filename(self):
return self.cFileName
@property
def alternate_filename(self):
return self.cAlternateFileName
@property
def file_size(self):
return self.nFileSizeHigh << 32 + self.nFileSizeLow
LPWIN32_FIND_DATA = ctypes.POINTER(ctypes.wintypes.WIN32_FIND_DATAW)
FindFirstFile = ctypes.windll.kernel32.FindFirstFileW
FindFirstFile.argtypes = (ctypes.wintypes.LPWSTR, LPWIN32_FIND_DATA)
FindFirstFile.restype = ctypes.wintypes.HANDLE
FindNextFile = ctypes.windll.kernel32.FindNextFileW
FindNextFile.argtypes = (ctypes.wintypes.HANDLE, LPWIN32_FIND_DATA)
FindNextFile.restype = ctypes.wintypes.BOOLEAN
ctypes.windll.kernel32.FindClose.argtypes = ctypes.wintypes.HANDLE,
SCS_32BIT_BINARY = 0 # A 32-bit Windows-based application
SCS_64BIT_BINARY = 6 # A 64-bit Windows-based application
SCS_DOS_BINARY = 1 # An MS-DOS-based application
SCS_OS216_BINARY = 5 # A 16-bit OS/2-based application
SCS_PIF_BINARY = 3 # A PIF file that executes an MS-DOS-based application
SCS_POSIX_BINARY = 4 # A POSIX-based application
SCS_WOW_BINARY = 2 # A 16-bit Windows-based application
_GetBinaryType = ctypes.windll.kernel32.GetBinaryTypeW
_GetBinaryType.argtypes = (
ctypes.wintypes.LPWSTR, ctypes.POINTER(ctypes.wintypes.DWORD),
)
_GetBinaryType.restype = ctypes.wintypes.BOOL
FILEOP_FLAGS = ctypes.wintypes.WORD
class BY_HANDLE_FILE_INFORMATION(ctypes.Structure):
_fields_ = [
('file_attributes', ctypes.wintypes.DWORD),
('creation_time', ctypes.wintypes.FILETIME),
('last_access_time', ctypes.wintypes.FILETIME),
('last_write_time', ctypes.wintypes.FILETIME),
('volume_serial_number', ctypes.wintypes.DWORD),
('file_size_high', ctypes.wintypes.DWORD),
('file_size_low', ctypes.wintypes.DWORD),
('number_of_links', ctypes.wintypes.DWORD),
('file_index_high', ctypes.wintypes.DWORD),
('file_index_low', ctypes.wintypes.DWORD),
]
@property
def file_size(self):
return (self.file_size_high << 32) + self.file_size_low
@property
def file_index(self):
return (self.file_index_high << 32) + self.file_index_low
GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle
GetFileInformationByHandle.restype = ctypes.wintypes.BOOL
GetFileInformationByHandle.argtypes = (
ctypes.wintypes.HANDLE,
ctypes.POINTER(BY_HANDLE_FILE_INFORMATION),
)
class SHFILEOPSTRUCT(ctypes.Structure):
_fields_ = [
('status_dialog', ctypes.wintypes.HWND),
('operation', ctypes.wintypes.UINT),
('from_', ctypes.wintypes.LPWSTR),
('to', ctypes.wintypes.LPWSTR),
('flags', FILEOP_FLAGS),
('operations_aborted', ctypes.wintypes.BOOL),
('name_mapping_handles', ctypes.wintypes.LPVOID),
('progress_title', ctypes.wintypes.LPWSTR),
]
_SHFileOperation = ctypes.windll.shell32.SHFileOperationW
_SHFileOperation.argtypes = [ctypes.POINTER(SHFILEOPSTRUCT)]
_SHFileOperation.restype = ctypes.c_int
FOF_ALLOWUNDO = 64
FOF_NOCONFIRMATION = 16
FO_DELETE = 3
ReplaceFile = ctypes.windll.kernel32.ReplaceFileW
ReplaceFile.restype = ctypes.wintypes.BOOL
ReplaceFile.argtypes = [
ctypes.wintypes.LPWSTR,
ctypes.wintypes.LPWSTR,
ctypes.wintypes.LPWSTR,
ctypes.wintypes.DWORD,
ctypes.wintypes.LPVOID,
ctypes.wintypes.LPVOID,
]
REPLACEFILE_WRITE_THROUGH = 0x1
REPLACEFILE_IGNORE_MERGE_ERRORS = 0x2
REPLACEFILE_IGNORE_ACL_ERRORS = 0x4
class STAT_STRUCT(ctypes.Structure):
_fields_ = [
('dev', ctypes.c_uint),
('ino', ctypes.c_ushort),
('mode', ctypes.c_ushort),
('nlink', ctypes.c_short),
('uid', ctypes.c_short),
('gid', ctypes.c_short),
('rdev', ctypes.c_uint),
# the following 4 fields are ctypes.c_uint64 for _stat64
('size', ctypes.c_uint),
('atime', ctypes.c_uint),
('mtime', ctypes.c_uint),
('ctime', ctypes.c_uint),
]
_wstat = ctypes.windll.msvcrt._wstat
_wstat.argtypes = [ctypes.wintypes.LPWSTR, ctypes.POINTER(STAT_STRUCT)]
_wstat.restype = ctypes.c_int
FILE_NOTIFY_CHANGE_LAST_WRITE = 0x10
FindFirstChangeNotification = (
ctypes.windll.kernel32.FindFirstChangeNotificationW)
FindFirstChangeNotification.argtypes = (
ctypes.wintypes.LPWSTR, ctypes.wintypes.BOOL, ctypes.wintypes.DWORD,
)
FindFirstChangeNotification.restype = ctypes.wintypes.HANDLE
FindCloseChangeNotification = (
ctypes.windll.kernel32.FindCloseChangeNotification)
FindCloseChangeNotification.argtypes = ctypes.wintypes.HANDLE,
FindCloseChangeNotification.restype = ctypes.wintypes.BOOL
FindNextChangeNotification = ctypes.windll.kernel32.FindNextChangeNotification
FindNextChangeNotification.argtypes = ctypes.wintypes.HANDLE,
FindNextChangeNotification.restype = ctypes.wintypes.BOOL
FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
IO_REPARSE_TAG_SYMLINK = 0xA000000C
FSCTL_GET_REPARSE_POINT = 0x900a8
LPDWORD = ctypes.POINTER(ctypes.wintypes.DWORD)
LPOVERLAPPED = ctypes.wintypes.LPVOID
DeviceIoControl = ctypes.windll.kernel32.DeviceIoControl
DeviceIoControl.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.LPVOID,
ctypes.wintypes.DWORD,
ctypes.wintypes.LPVOID,
ctypes.wintypes.DWORD,
LPDWORD,
LPOVERLAPPED,
]
DeviceIoControl.restype = ctypes.wintypes.BOOL
class REPARSE_DATA_BUFFER(ctypes.Structure):
_fields_ = [
('tag', ctypes.c_ulong),
('data_length', ctypes.c_ushort),
('reserved', ctypes.c_ushort),
('substitute_name_offset', ctypes.c_ushort),
('substitute_name_length', ctypes.c_ushort),
('print_name_offset', ctypes.c_ushort),
('print_name_length', ctypes.c_ushort),
('flags', ctypes.c_ulong),
('path_buffer', ctypes.c_byte * 1),
]
def get_print_name(self):
wchar_size = ctypes.sizeof(ctypes.wintypes.WCHAR)
arr_typ = ctypes.wintypes.WCHAR * (self.print_name_length // wchar_size)
data = ctypes.byref(self.path_buffer, self.print_name_offset)
return ctypes.cast(data, ctypes.POINTER(arr_typ)).contents.value
def get_substitute_name(self):
wchar_size = ctypes.sizeof(ctypes.wintypes.WCHAR)
arr_typ = ctypes.wintypes.WCHAR * (self.substitute_name_length // wchar_size)
data = ctypes.byref(self.path_buffer, self.substitute_name_offset)
return ctypes.cast(data, ctypes.POINTER(arr_typ)).contents.value

View file

@ -0,0 +1,217 @@
import struct
import ctypes.wintypes
from ctypes.wintypes import DWORD, WCHAR, BYTE, BOOL
# from mprapi.h
MAX_INTERFACE_NAME_LEN = 2**8
# from iprtrmib.h
MAXLEN_PHYSADDR = 2**3
MAXLEN_IFDESCR = 2**8
# from iptypes.h
MAX_ADAPTER_ADDRESS_LENGTH = 8
MAX_DHCPV6_DUID_LENGTH = 130
class MIB_IFROW(ctypes.Structure):
_fields_ = (
('name', WCHAR * MAX_INTERFACE_NAME_LEN),
('index', DWORD),
('type', DWORD),
('MTU', DWORD),
('speed', DWORD),
('physical_address_length', DWORD),
('physical_address_raw', BYTE * MAXLEN_PHYSADDR),
('admin_status', DWORD),
('operational_status', DWORD),
('last_change', DWORD),
('octets_received', DWORD),
('unicast_packets_received', DWORD),
('non_unicast_packets_received', DWORD),
('incoming_discards', DWORD),
('incoming_errors', DWORD),
('incoming_unknown_protocols', DWORD),
('octets_sent', DWORD),
('unicast_packets_sent', DWORD),
('non_unicast_packets_sent', DWORD),
('outgoing_discards', DWORD),
('outgoing_errors', DWORD),
('outgoing_queue_length', DWORD),
('description_length', DWORD),
('description_raw', ctypes.c_char * MAXLEN_IFDESCR),
)
def _get_binary_property(self, name):
val_prop = '{0}_raw'.format(name)
val = getattr(self, val_prop)
len_prop = '{0}_length'.format(name)
length = getattr(self, len_prop)
return str(memoryview(val))[:length]
@property
def physical_address(self):
return self._get_binary_property('physical_address')
@property
def description(self):
return self._get_binary_property('description')
class MIB_IFTABLE(ctypes.Structure):
_fields_ = (
('num_entries', DWORD), # dwNumEntries
('entries', MIB_IFROW * 0), # table
)
class MIB_IPADDRROW(ctypes.Structure):
_fields_ = (
('address_num', DWORD),
('index', DWORD),
('mask', DWORD),
('broadcast_address', DWORD),
('reassembly_size', DWORD),
('unused', ctypes.c_ushort),
('type', ctypes.c_ushort),
)
@property
def address(self):
"The address in big-endian"
_ = struct.pack('L', self.address_num)
return struct.unpack('!L', _)[0]
class MIB_IPADDRTABLE(ctypes.Structure):
_fields_ = (
('num_entries', DWORD),
('entries', MIB_IPADDRROW * 0),
)
class SOCKADDR(ctypes.Structure):
_fields_ = (
('family', ctypes.c_ushort),
('data', ctypes.c_byte * 14),
)
LPSOCKADDR = ctypes.POINTER(SOCKADDR)
class SOCKET_ADDRESS(ctypes.Structure):
_fields_ = [
('address', LPSOCKADDR),
('length', ctypes.c_int),
]
class _IP_ADAPTER_ADDRESSES_METRIC(ctypes.Structure):
_fields_ = [
('length', ctypes.c_ulong),
('interface_index', DWORD),
]
class _IP_ADAPTER_ADDRESSES_U1(ctypes.Union):
_fields_ = [
('alignment', ctypes.c_ulonglong),
('metric', _IP_ADAPTER_ADDRESSES_METRIC),
]
class IP_ADAPTER_ADDRESSES(ctypes.Structure):
pass
LP_IP_ADAPTER_ADDRESSES = ctypes.POINTER(IP_ADAPTER_ADDRESSES)
# for now, just use void * for pointers to unused structures
PIP_ADAPTER_UNICAST_ADDRESS = ctypes.c_void_p
PIP_ADAPTER_ANYCAST_ADDRESS = ctypes.c_void_p
PIP_ADAPTER_MULTICAST_ADDRESS = ctypes.c_void_p
PIP_ADAPTER_DNS_SERVER_ADDRESS = ctypes.c_void_p
PIP_ADAPTER_PREFIX = ctypes.c_void_p
PIP_ADAPTER_WINS_SERVER_ADDRESS_LH = ctypes.c_void_p
PIP_ADAPTER_GATEWAY_ADDRESS_LH = ctypes.c_void_p
PIP_ADAPTER_DNS_SUFFIX = ctypes.c_void_p
IF_OPER_STATUS = ctypes.c_uint # this is an enum, consider
# http://code.activestate.com/recipes/576415/
IF_LUID = ctypes.c_uint64
NET_IF_COMPARTMENT_ID = ctypes.c_uint32
GUID = ctypes.c_byte * 16
NET_IF_NETWORK_GUID = GUID
NET_IF_CONNECTION_TYPE = ctypes.c_uint # enum
TUNNEL_TYPE = ctypes.c_uint # enum
IP_ADAPTER_ADDRESSES._fields_ = [
# ('u', _IP_ADAPTER_ADDRESSES_U1),
('length', ctypes.c_ulong),
('interface_index', DWORD),
('next', LP_IP_ADAPTER_ADDRESSES),
('adapter_name', ctypes.c_char_p),
('first_unicast_address', PIP_ADAPTER_UNICAST_ADDRESS),
('first_anycast_address', PIP_ADAPTER_ANYCAST_ADDRESS),
('first_multicast_address', PIP_ADAPTER_MULTICAST_ADDRESS),
('first_dns_server_address', PIP_ADAPTER_DNS_SERVER_ADDRESS),
('dns_suffix', ctypes.c_wchar_p),
('description', ctypes.c_wchar_p),
('friendly_name', ctypes.c_wchar_p),
('byte', BYTE * MAX_ADAPTER_ADDRESS_LENGTH),
('physical_address_length', DWORD),
('flags', DWORD),
('mtu', DWORD),
('interface_type', DWORD),
('oper_status', IF_OPER_STATUS),
('ipv6_interface_index', DWORD),
('zone_indices', DWORD),
('first_prefix', PIP_ADAPTER_PREFIX),
('transmit_link_speed', ctypes.c_uint64),
('receive_link_speed', ctypes.c_uint64),
('first_wins_server_address', PIP_ADAPTER_WINS_SERVER_ADDRESS_LH),
('first_gateway_address', PIP_ADAPTER_GATEWAY_ADDRESS_LH),
('ipv4_metric', ctypes.c_ulong),
('ipv6_metric', ctypes.c_ulong),
('luid', IF_LUID),
('dhcpv4_server', SOCKET_ADDRESS),
('compartment_id', NET_IF_COMPARTMENT_ID),
('network_guid', NET_IF_NETWORK_GUID),
('connection_type', NET_IF_CONNECTION_TYPE),
('tunnel_type', TUNNEL_TYPE),
('dhcpv6_server', SOCKET_ADDRESS),
('dhcpv6_client_duid', ctypes.c_byte * MAX_DHCPV6_DUID_LENGTH),
('dhcpv6_client_duid_length', ctypes.c_ulong),
('dhcpv6_iaid', ctypes.c_ulong),
('first_dns_suffix', PIP_ADAPTER_DNS_SUFFIX),
]
# define some parameters to the API Functions
GetIfTable = ctypes.windll.iphlpapi.GetIfTable
GetIfTable.argtypes = [
ctypes.POINTER(MIB_IFTABLE),
ctypes.POINTER(ctypes.c_ulong),
BOOL,
]
GetIfTable.restype = DWORD
GetIpAddrTable = ctypes.windll.iphlpapi.GetIpAddrTable
GetIpAddrTable.argtypes = [
ctypes.POINTER(MIB_IPADDRTABLE),
ctypes.POINTER(ctypes.c_ulong),
BOOL,
]
GetIpAddrTable.restype = DWORD
GetAdaptersAddresses = ctypes.windll.iphlpapi.GetAdaptersAddresses
GetAdaptersAddresses.argtypes = [
ctypes.c_ulong,
ctypes.c_ulong,
ctypes.c_void_p,
ctypes.POINTER(IP_ADAPTER_ADDRESSES),
ctypes.POINTER(ctypes.c_ulong),
]
GetAdaptersAddresses.restype = ctypes.c_ulong

View file

@ -0,0 +1,9 @@
import ctypes.wintypes
GetModuleFileName = ctypes.windll.kernel32.GetModuleFileNameW
GetModuleFileName.argtypes = (
ctypes.wintypes.HANDLE,
ctypes.wintypes.LPWSTR,
ctypes.wintypes.DWORD,
)
GetModuleFileName.restype = ctypes.wintypes.DWORD

View file

@ -0,0 +1,45 @@
import ctypes.wintypes
GMEM_MOVEABLE = 0x2
GlobalAlloc = ctypes.windll.kernel32.GlobalAlloc
GlobalAlloc.argtypes = ctypes.wintypes.UINT, ctypes.c_size_t
GlobalAlloc.restype = ctypes.wintypes.HANDLE
GlobalLock = ctypes.windll.kernel32.GlobalLock
GlobalLock.argtypes = ctypes.wintypes.HGLOBAL,
GlobalLock.restype = ctypes.wintypes.LPVOID
GlobalUnlock = ctypes.windll.kernel32.GlobalUnlock
GlobalUnlock.argtypes = ctypes.wintypes.HGLOBAL,
GlobalUnlock.restype = ctypes.wintypes.BOOL
GlobalSize = ctypes.windll.kernel32.GlobalSize
GlobalSize.argtypes = ctypes.wintypes.HGLOBAL,
GlobalSize.restype = ctypes.c_size_t
CreateFileMapping = ctypes.windll.kernel32.CreateFileMappingW
CreateFileMapping.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.c_void_p,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.LPWSTR,
]
CreateFileMapping.restype = ctypes.wintypes.HANDLE
MapViewOfFile = ctypes.windll.kernel32.MapViewOfFile
MapViewOfFile.restype = ctypes.wintypes.HANDLE
UnmapViewOfFile = ctypes.windll.kernel32.UnmapViewOfFile
UnmapViewOfFile.argtypes = ctypes.wintypes.HANDLE,
RtlMoveMemory = ctypes.windll.kernel32.RtlMoveMemory
RtlMoveMemory.argtypes = (
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_size_t,
)
ctypes.windll.kernel32.LocalFree.argtypes = ctypes.wintypes.HLOCAL,

View file

@ -0,0 +1,54 @@
#!/usr/bin/env python
"""
jaraco.windows.message
Windows Messaging support
"""
import ctypes
from ctypes.wintypes import HWND, UINT, WPARAM, LPARAM, DWORD, LPVOID
import six
LRESULT = LPARAM
class LPARAM_wstr(LPARAM):
"""
A special instance of LPARAM that can be constructed from a string
instance (for functions such as SendMessage, whose LPARAM may point to
a unicode string).
"""
@classmethod
def from_param(cls, param):
if isinstance(param, six.string_types):
return LPVOID.from_param(six.text_type(param))
return LPARAM.from_param(param)
SendMessage = ctypes.windll.user32.SendMessageW
SendMessage.argtypes = (HWND, UINT, WPARAM, LPARAM_wstr)
SendMessage.restype = LRESULT
HWND_BROADCAST = 0xFFFF
WM_SETTINGCHANGE = 0x1A
# constants from http://msdn.microsoft.com
# /en-us/library/ms644952%28v=vs.85%29.aspx
SMTO_ABORTIFHUNG = 0x02
SMTO_BLOCK = 0x01
SMTO_NORMAL = 0x00
SMTO_NOTIMEOUTIFNOTHUNG = 0x08
SMTO_ERRORONEXIT = 0x20
SendMessageTimeout = ctypes.windll.user32.SendMessageTimeoutW
SendMessageTimeout.argtypes = SendMessage.argtypes + (
UINT, UINT, ctypes.POINTER(DWORD)
)
SendMessageTimeout.restype = LRESULT
def unicode_as_lparam(source):
pointer = ctypes.cast(ctypes.c_wchar_p(source), ctypes.c_void_p)
return LPARAM(pointer.value)

View file

@ -0,0 +1,30 @@
import ctypes.wintypes
# MPR - Multiple Provider Router
mpr = ctypes.windll.mpr
RESOURCETYPE_ANY = 0
class NETRESOURCE(ctypes.Structure):
_fields_ = [
('scope', ctypes.wintypes.DWORD),
('type', ctypes.wintypes.DWORD),
('display_type', ctypes.wintypes.DWORD),
('usage', ctypes.wintypes.DWORD),
('local_name', ctypes.wintypes.LPWSTR),
('remote_name', ctypes.wintypes.LPWSTR),
('comment', ctypes.wintypes.LPWSTR),
('provider', ctypes.wintypes.LPWSTR),
]
LPNETRESOURCE = ctypes.POINTER(NETRESOURCE)
WNetAddConnection2 = mpr.WNetAddConnection2W
WNetAddConnection2.argtypes = (
LPNETRESOURCE,
ctypes.wintypes.LPCWSTR,
ctypes.wintypes.LPCWSTR,
ctypes.wintypes.DWORD,
)

View file

@ -0,0 +1,37 @@
import ctypes.wintypes
class SYSTEM_POWER_STATUS(ctypes.Structure):
_fields_ = (
('ac_line_status', ctypes.wintypes.BYTE),
('battery_flag', ctypes.wintypes.BYTE),
('battery_life_percent', ctypes.wintypes.BYTE),
('reserved', ctypes.wintypes.BYTE),
('battery_life_time', ctypes.wintypes.DWORD),
('battery_full_life_time', ctypes.wintypes.DWORD),
)
@property
def ac_line_status_string(self):
return {
0: 'offline', 1: 'online', 255: 'unknown'}[self.ac_line_status]
LPSYSTEM_POWER_STATUS = ctypes.POINTER(SYSTEM_POWER_STATUS)
GetSystemPowerStatus = ctypes.windll.kernel32.GetSystemPowerStatus
GetSystemPowerStatus.argtypes = LPSYSTEM_POWER_STATUS,
GetSystemPowerStatus.restype = ctypes.wintypes.BOOL
SetThreadExecutionState = ctypes.windll.kernel32.SetThreadExecutionState
SetThreadExecutionState.argtypes = [ctypes.c_uint]
SetThreadExecutionState.restype = ctypes.c_uint
class ES:
"""
Execution state constants
"""
continuous = 0x80000000
system_required = 1
display_required = 2
awaymode_required = 0x40

View file

@ -0,0 +1,117 @@
import ctypes.wintypes
class LUID(ctypes.Structure):
_fields_ = [
('low_part', ctypes.wintypes.DWORD),
('high_part', ctypes.wintypes.LONG),
]
def __eq__(self, other):
return (
self.high_part == other.high_part and
self.low_part == other.low_part
)
def __ne__(self, other):
return not (self == other)
LookupPrivilegeValue = ctypes.windll.advapi32.LookupPrivilegeValueW
LookupPrivilegeValue.argtypes = (
ctypes.wintypes.LPWSTR, # system name
ctypes.wintypes.LPWSTR, # name
ctypes.POINTER(LUID),
)
LookupPrivilegeValue.restype = ctypes.wintypes.BOOL
class TOKEN_INFORMATION_CLASS:
TokenUser = 1
TokenGroups = 2
TokenPrivileges = 3
# ... see http://msdn.microsoft.com/en-us/library/aa379626%28VS.85%29.aspx
SE_PRIVILEGE_ENABLED_BY_DEFAULT = 0x00000001
SE_PRIVILEGE_ENABLED = 0x00000002
SE_PRIVILEGE_REMOVED = 0x00000004
SE_PRIVILEGE_USED_FOR_ACCESS = 0x80000000
class LUID_AND_ATTRIBUTES(ctypes.Structure):
_fields_ = [
('LUID', LUID),
('attributes', ctypes.wintypes.DWORD),
]
def is_enabled(self):
return bool(self.attributes & SE_PRIVILEGE_ENABLED)
def enable(self):
self.attributes |= SE_PRIVILEGE_ENABLED
def get_name(self):
size = ctypes.wintypes.DWORD(10240)
buf = ctypes.create_unicode_buffer(size.value)
res = LookupPrivilegeName(None, self.LUID, buf, size)
if res == 0:
raise RuntimeError
return buf[:size.value]
def __str__(self):
res = self.get_name()
if self.is_enabled():
res += ' (enabled)'
return res
LookupPrivilegeName = ctypes.windll.advapi32.LookupPrivilegeNameW
LookupPrivilegeName.argtypes = (
ctypes.wintypes.LPWSTR, # lpSystemName
ctypes.POINTER(LUID), # lpLuid
ctypes.wintypes.LPWSTR, # lpName
ctypes.POINTER(ctypes.wintypes.DWORD), # cchName
)
LookupPrivilegeName.restype = ctypes.wintypes.BOOL
class TOKEN_PRIVILEGES(ctypes.Structure):
_fields_ = [
('count', ctypes.wintypes.DWORD),
('privileges', LUID_AND_ATTRIBUTES * 0),
]
def get_array(self):
array_type = LUID_AND_ATTRIBUTES * self.count
privileges = ctypes.cast(
self.privileges, ctypes.POINTER(array_type)).contents
return privileges
def __iter__(self):
return iter(self.get_array())
PTOKEN_PRIVILEGES = ctypes.POINTER(TOKEN_PRIVILEGES)
GetTokenInformation = ctypes.windll.advapi32.GetTokenInformation
GetTokenInformation.argtypes = [
ctypes.wintypes.HANDLE, # TokenHandle
ctypes.c_uint, # TOKEN_INFORMATION_CLASS value
ctypes.c_void_p, # TokenInformation
ctypes.wintypes.DWORD, # TokenInformationLength
ctypes.POINTER(ctypes.wintypes.DWORD), # ReturnLength
]
GetTokenInformation.restype = ctypes.wintypes.BOOL
# http://msdn.microsoft.com/en-us/library/aa375202%28VS.85%29.aspx
AdjustTokenPrivileges = ctypes.windll.advapi32.AdjustTokenPrivileges
AdjustTokenPrivileges.restype = ctypes.wintypes.BOOL
AdjustTokenPrivileges.argtypes = [
ctypes.wintypes.HANDLE, # TokenHandle
ctypes.wintypes.BOOL, # DisableAllPrivileges
PTOKEN_PRIVILEGES, # NewState (optional)
ctypes.wintypes.DWORD, # BufferLength of PreviousState
PTOKEN_PRIVILEGES, # PreviousState (out, optional)
ctypes.POINTER(ctypes.wintypes.DWORD), # ReturnLength
]

View file

@ -0,0 +1,11 @@
import ctypes.wintypes
TOKEN_ALL_ACCESS = 0xf01ff
GetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess
GetCurrentProcess.restype = ctypes.wintypes.HANDLE
OpenProcessToken = ctypes.windll.advapi32.OpenProcessToken
OpenProcessToken.argtypes = (
ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD,
ctypes.POINTER(ctypes.wintypes.HANDLE))
OpenProcessToken.restype = ctypes.wintypes.BOOL

View file

@ -0,0 +1,139 @@
import ctypes.wintypes
# from WinNT.h
READ_CONTROL = 0x00020000
STANDARD_RIGHTS_REQUIRED = 0x000F0000
STANDARD_RIGHTS_READ = READ_CONTROL
STANDARD_RIGHTS_WRITE = READ_CONTROL
STANDARD_RIGHTS_EXECUTE = READ_CONTROL
STANDARD_RIGHTS_ALL = 0x001F0000
# from NTSecAPI.h
POLICY_VIEW_LOCAL_INFORMATION = 0x00000001
POLICY_VIEW_AUDIT_INFORMATION = 0x00000002
POLICY_GET_PRIVATE_INFORMATION = 0x00000004
POLICY_TRUST_ADMIN = 0x00000008
POLICY_CREATE_ACCOUNT = 0x00000010
POLICY_CREATE_SECRET = 0x00000020
POLICY_CREATE_PRIVILEGE = 0x00000040
POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080
POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100
POLICY_AUDIT_LOG_ADMIN = 0x00000200
POLICY_SERVER_ADMIN = 0x00000400
POLICY_LOOKUP_NAMES = 0x00000800
POLICY_NOTIFICATION = 0x00001000
POLICY_ALL_ACCESS = (
STANDARD_RIGHTS_REQUIRED |
POLICY_VIEW_LOCAL_INFORMATION |
POLICY_VIEW_AUDIT_INFORMATION |
POLICY_GET_PRIVATE_INFORMATION |
POLICY_TRUST_ADMIN |
POLICY_CREATE_ACCOUNT |
POLICY_CREATE_SECRET |
POLICY_CREATE_PRIVILEGE |
POLICY_SET_DEFAULT_QUOTA_LIMITS |
POLICY_SET_AUDIT_REQUIREMENTS |
POLICY_AUDIT_LOG_ADMIN |
POLICY_SERVER_ADMIN |
POLICY_LOOKUP_NAMES)
POLICY_READ = (
STANDARD_RIGHTS_READ |
POLICY_VIEW_AUDIT_INFORMATION |
POLICY_GET_PRIVATE_INFORMATION)
POLICY_WRITE = (
STANDARD_RIGHTS_WRITE |
POLICY_TRUST_ADMIN |
POLICY_CREATE_ACCOUNT |
POLICY_CREATE_SECRET |
POLICY_CREATE_PRIVILEGE |
POLICY_SET_DEFAULT_QUOTA_LIMITS |
POLICY_SET_AUDIT_REQUIREMENTS |
POLICY_AUDIT_LOG_ADMIN |
POLICY_SERVER_ADMIN)
POLICY_EXECUTE = (
STANDARD_RIGHTS_EXECUTE |
POLICY_VIEW_LOCAL_INFORMATION |
POLICY_LOOKUP_NAMES)
class TokenAccess:
TOKEN_QUERY = 0x8
class TokenInformationClass:
TokenUser = 1
class TOKEN_USER(ctypes.Structure):
num = 1
_fields_ = [
('SID', ctypes.c_void_p),
('ATTRIBUTES', ctypes.wintypes.DWORD),
]
class SECURITY_DESCRIPTOR(ctypes.Structure):
"""
typedef struct _SECURITY_DESCRIPTOR
{
UCHAR Revision;
UCHAR Sbz1;
SECURITY_DESCRIPTOR_CONTROL Control;
PSID Owner;
PSID Group;
PACL Sacl;
PACL Dacl;
} SECURITY_DESCRIPTOR;
"""
SECURITY_DESCRIPTOR_CONTROL = ctypes.wintypes.USHORT
REVISION = 1
_fields_ = [
('Revision', ctypes.c_ubyte),
('Sbz1', ctypes.c_ubyte),
('Control', SECURITY_DESCRIPTOR_CONTROL),
('Owner', ctypes.c_void_p),
('Group', ctypes.c_void_p),
('Sacl', ctypes.c_void_p),
('Dacl', ctypes.c_void_p),
]
class SECURITY_ATTRIBUTES(ctypes.Structure):
"""
typedef struct _SECURITY_ATTRIBUTES {
DWORD nLength;
LPVOID lpSecurityDescriptor;
BOOL bInheritHandle;
} SECURITY_ATTRIBUTES;
"""
_fields_ = [
('nLength', ctypes.wintypes.DWORD),
('lpSecurityDescriptor', ctypes.c_void_p),
('bInheritHandle', ctypes.wintypes.BOOL),
]
def __init__(self, *args, **kwargs):
super(SECURITY_ATTRIBUTES, self).__init__(*args, **kwargs)
self.nLength = ctypes.sizeof(SECURITY_ATTRIBUTES)
@property
def descriptor(self):
return self._descriptor
@descriptor.setter
def descriptor(self, value):
self._descriptor = value
self.lpSecurityDescriptor = ctypes.addressof(value)
ctypes.windll.advapi32.SetSecurityDescriptorOwner.argtypes = (
ctypes.POINTER(SECURITY_DESCRIPTOR),
ctypes.c_void_p,
ctypes.wintypes.BOOL,
)

View file

@ -0,0 +1,130 @@
import ctypes.wintypes
BOOL = ctypes.wintypes.BOOL
class SHELLSTATE(ctypes.Structure):
_fields_ = [
('show_all_objects', BOOL, 1),
('show_extensions', BOOL, 1),
('no_confirm_recycle', BOOL, 1),
('show_sys_files', BOOL, 1),
('show_comp_color', BOOL, 1),
('double_click_in_web_view', BOOL, 1),
('desktop_HTML', BOOL, 1),
('win95_classic', BOOL, 1),
('dont_pretty_path', BOOL, 1),
('show_attrib_col', BOOL, 1),
('map_network_drive_button', BOOL, 1),
('show_info_tip', BOOL, 1),
('hide_icons', BOOL, 1),
('web_view', BOOL, 1),
('filter', BOOL, 1),
('show_super_hidden', BOOL, 1),
('no_net_crawling', BOOL, 1),
('win95_unused', ctypes.wintypes.DWORD),
('param_sort', ctypes.wintypes.LONG),
('sort_direction', ctypes.c_int),
('version', ctypes.wintypes.UINT),
('not_used', ctypes.wintypes.UINT),
('sep_process', BOOL, 1),
('start_panel_on', BOOL, 1),
('show_start_page', BOOL, 1),
('auto_check_select', BOOL, 1),
('icons_only', BOOL, 1),
('show_type_overlay', BOOL, 1),
('spare_flags', ctypes.wintypes.UINT, 13),
]
SSF_SHOWALLOBJECTS = 0x00000001
"The fShowAllObjects member is being requested."
SSF_SHOWEXTENSIONS = 0x00000002
"The fShowExtensions member is being requested."
SSF_HIDDENFILEEXTS = 0x00000004
"Not used."
SSF_SERVERADMINUI = 0x00000004
"Not used."
SSF_SHOWCOMPCOLOR = 0x00000008
"The fShowCompColor member is being requested."
SSF_SORTCOLUMNS = 0x00000010
"The lParamSort and iSortDirection members are being requested."
SSF_SHOWSYSFILES = 0x00000020
"The fShowSysFiles member is being requested."
SSF_DOUBLECLICKINWEBVIEW = 0x00000080
"The fDoubleClickInWebView member is being requested."
SSF_SHOWATTRIBCOL = 0x00000100
"The fShowAttribCol member is being requested. (Windows Vista: Not used.)"
SSF_DESKTOPHTML = 0x00000200
"""
The fDesktopHTML member is being requested. Set is not available.
Instead, for versions of Microsoft Windows prior to Windows XP,
enable Desktop HTML by IActiveDesktop. The use of IActiveDesktop
for this purpose, however, is not recommended for Windows XP and
later versions of Windows, and is deprecated in Windows Vista.
"""
SSF_WIN95CLASSIC = 0x00000400
"The fWin95Classic member is being requested."
SSF_DONTPRETTYPATH = 0x00000800
"The fDontPrettyPath member is being requested."
SSF_MAPNETDRVBUTTON = 0x00001000
"The fMapNetDrvBtn member is being requested."
SSF_SHOWINFOTIP = 0x00002000
"The fShowInfoTip member is being requested."
SSF_HIDEICONS = 0x00004000
"The fHideIcons member is being requested."
SSF_NOCONFIRMRECYCLE = 0x00008000
"The fNoConfirmRecycle member is being requested."
SSF_FILTER = 0x00010000
"The fFilter member is being requested. (Windows Vista: Not used.)"
SSF_WEBVIEW = 0x00020000
"The fWebView member is being requested."
SSF_SHOWSUPERHIDDEN = 0x00040000
"The fShowSuperHidden member is being requested."
SSF_SEPPROCESS = 0x00080000
"The fSepProcess member is being requested."
SSF_NONETCRAWLING = 0x00100000
"Windows XP and later. The fNoNetCrawling member is being requested."
SSF_STARTPANELON = 0x00200000
"Windows XP and later. The fStartPanelOn member is being requested."
SSF_SHOWSTARTPAGE = 0x00400000
"Not used."
SSF_AUTOCHECKSELECT = 0x00800000
"Windows Vista and later. The fAutoCheckSelect member is being requested."
SSF_ICONSONLY = 0x01000000
"Windows Vista and later. The fIconsOnly member is being requested."
SSF_SHOWTYPEOVERLAY = 0x02000000
"Windows Vista and later. The fShowTypeOverlay member is being requested."
SHGetSetSettings = ctypes.windll.shell32.SHGetSetSettings
SHGetSetSettings.argtypes = [
ctypes.POINTER(SHELLSTATE),
ctypes.wintypes.DWORD,
ctypes.wintypes.BOOL, # get or set (True: set)
]
SHGetSetSettings.restype = None

View file

@ -0,0 +1,14 @@
import ctypes.wintypes
SystemParametersInfo = ctypes.windll.user32.SystemParametersInfoW
SystemParametersInfo.argtypes = (
ctypes.wintypes.UINT,
ctypes.wintypes.UINT,
ctypes.c_void_p,
ctypes.wintypes.UINT,
)
SPI_GETACTIVEWINDOWTRACKING = 0x1000
SPI_SETACTIVEWINDOWTRACKING = 0x1001
SPI_GETACTIVEWNDTRKTIMEOUT = 0x2002
SPI_SETACTIVEWNDTRKTIMEOUT = 0x2003

View file

@ -0,0 +1,10 @@
import ctypes.wintypes
try:
from ctypes.wintypes import LPDWORD
except ImportError:
LPDWORD = ctypes.POINTER(ctypes.wintypes.DWORD)
GetUserName = ctypes.windll.advapi32.GetUserNameW
GetUserName.argtypes = ctypes.wintypes.LPWSTR, LPDWORD
GetUserName.restype = ctypes.wintypes.DWORD

Some files were not shown because too many files have changed in this diff Show more