mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 05:43:16 -07:00
Merge pull request #1428 from clinton-hall/libs/requirements
Update requirements
This commit is contained in:
commit
8dbb1a2451
75 changed files with 2171 additions and 3848 deletions
1
libs/backports/__init__.py
Normal file
1
libs/backports/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
|
@ -14,5 +14,10 @@ from concurrent.futures._base import (FIRST_COMPLETED,
|
|||
Executor,
|
||||
wait,
|
||||
as_completed)
|
||||
from concurrent.futures.process import ProcessPoolExecutor
|
||||
from concurrent.futures.thread import ThreadPoolExecutor
|
||||
|
||||
try:
|
||||
from concurrent.futures.process import ProcessPoolExecutor
|
||||
except ImportError:
|
||||
# some platforms don't have multiprocessing
|
||||
pass
|
||||
|
|
|
@ -1,15 +1,12 @@
|
|||
# Copyright 2009 Brian Quinlan. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
from __future__ import with_statement
|
||||
import collections
|
||||
import logging
|
||||
import threading
|
||||
import itertools
|
||||
import time
|
||||
|
||||
try:
|
||||
from collections import namedtuple
|
||||
except ImportError:
|
||||
from concurrent.futures._compat import namedtuple
|
||||
import types
|
||||
|
||||
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
|
||||
|
||||
|
@ -175,6 +172,29 @@ def _create_and_install_waiters(fs, return_when):
|
|||
|
||||
return waiter
|
||||
|
||||
|
||||
def _yield_finished_futures(fs, waiter, ref_collect):
|
||||
"""
|
||||
Iterate on the list *fs*, yielding finished futures one by one in
|
||||
reverse order.
|
||||
Before yielding a future, *waiter* is removed from its waiters
|
||||
and the future is removed from each set in the collection of sets
|
||||
*ref_collect*.
|
||||
|
||||
The aim of this function is to avoid keeping stale references after
|
||||
the future is yielded and before the iterator resumes.
|
||||
"""
|
||||
while fs:
|
||||
f = fs[-1]
|
||||
for futures_set in ref_collect:
|
||||
futures_set.remove(f)
|
||||
with f._condition:
|
||||
f._waiters.remove(waiter)
|
||||
del f
|
||||
# Careful not to keep a reference to the popped value
|
||||
yield fs.pop()
|
||||
|
||||
|
||||
def as_completed(fs, timeout=None):
|
||||
"""An iterator over the given futures that yields each as it completes.
|
||||
|
||||
|
@ -186,7 +206,8 @@ def as_completed(fs, timeout=None):
|
|||
|
||||
Returns:
|
||||
An iterator that yields the given Futures as they complete (finished or
|
||||
cancelled).
|
||||
cancelled). If any given Futures are duplicated, they will be returned
|
||||
once.
|
||||
|
||||
Raises:
|
||||
TimeoutError: If the entire result iterator could not be generated
|
||||
|
@ -195,16 +216,20 @@ def as_completed(fs, timeout=None):
|
|||
if timeout is not None:
|
||||
end_time = timeout + time.time()
|
||||
|
||||
fs = set(fs)
|
||||
total_futures = len(fs)
|
||||
with _AcquireFutures(fs):
|
||||
finished = set(
|
||||
f for f in fs
|
||||
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
|
||||
pending = set(fs) - finished
|
||||
pending = fs - finished
|
||||
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
|
||||
|
||||
finished = list(finished)
|
||||
try:
|
||||
for future in finished:
|
||||
yield future
|
||||
for f in _yield_finished_futures(finished, waiter,
|
||||
ref_collect=(fs,)):
|
||||
f = [f]
|
||||
yield f.pop()
|
||||
|
||||
while pending:
|
||||
if timeout is None:
|
||||
|
@ -214,7 +239,7 @@ def as_completed(fs, timeout=None):
|
|||
if wait_timeout < 0:
|
||||
raise TimeoutError(
|
||||
'%d (of %d) futures unfinished' % (
|
||||
len(pending), len(fs)))
|
||||
len(pending), total_futures))
|
||||
|
||||
waiter.event.wait(wait_timeout)
|
||||
|
||||
|
@ -223,15 +248,20 @@ def as_completed(fs, timeout=None):
|
|||
waiter.finished_futures = []
|
||||
waiter.event.clear()
|
||||
|
||||
for future in finished:
|
||||
yield future
|
||||
pending.remove(future)
|
||||
# reverse to keep finishing order
|
||||
finished.reverse()
|
||||
for f in _yield_finished_futures(finished, waiter,
|
||||
ref_collect=(fs, pending)):
|
||||
f = [f]
|
||||
yield f.pop()
|
||||
|
||||
finally:
|
||||
# Remove waiter from unfinished futures
|
||||
for f in fs:
|
||||
f._waiters.remove(waiter)
|
||||
with f._condition:
|
||||
f._waiters.remove(waiter)
|
||||
|
||||
DoneAndNotDoneFutures = namedtuple(
|
||||
DoneAndNotDoneFutures = collections.namedtuple(
|
||||
'DoneAndNotDoneFutures', 'done not_done')
|
||||
def wait(fs, timeout=None, return_when=ALL_COMPLETED):
|
||||
"""Wait for the futures in the given sequence to complete.
|
||||
|
@ -276,7 +306,8 @@ def wait(fs, timeout=None, return_when=ALL_COMPLETED):
|
|||
|
||||
waiter.event.wait(timeout)
|
||||
for f in fs:
|
||||
f._waiters.remove(waiter)
|
||||
with f._condition:
|
||||
f._waiters.remove(waiter)
|
||||
|
||||
done.update(waiter.finished_futures)
|
||||
return DoneAndNotDoneFutures(done, set(fs) - done)
|
||||
|
@ -290,6 +321,7 @@ class Future(object):
|
|||
self._state = PENDING
|
||||
self._result = None
|
||||
self._exception = None
|
||||
self._traceback = None
|
||||
self._waiters = []
|
||||
self._done_callbacks = []
|
||||
|
||||
|
@ -299,22 +331,41 @@ class Future(object):
|
|||
callback(self)
|
||||
except Exception:
|
||||
LOGGER.exception('exception calling callback for %r', self)
|
||||
except BaseException:
|
||||
# Explicitly let all other new-style exceptions through so
|
||||
# that we can catch all old-style exceptions with a simple
|
||||
# "except:" clause below.
|
||||
#
|
||||
# All old-style exception objects are instances of
|
||||
# types.InstanceType, but "except types.InstanceType:" does
|
||||
# not catch old-style exceptions for some reason. Thus, the
|
||||
# only way to catch all old-style exceptions without catching
|
||||
# any new-style exceptions is to filter out the new-style
|
||||
# exceptions, which all derive from BaseException.
|
||||
raise
|
||||
except:
|
||||
# Because of the BaseException clause above, this handler only
|
||||
# executes for old-style exception objects.
|
||||
LOGGER.exception('exception calling callback for %r', self)
|
||||
|
||||
def __repr__(self):
|
||||
with self._condition:
|
||||
if self._state == FINISHED:
|
||||
if self._exception:
|
||||
return '<Future at %s state=%s raised %s>' % (
|
||||
hex(id(self)),
|
||||
return '<%s at %#x state=%s raised %s>' % (
|
||||
self.__class__.__name__,
|
||||
id(self),
|
||||
_STATE_TO_DESCRIPTION_MAP[self._state],
|
||||
self._exception.__class__.__name__)
|
||||
else:
|
||||
return '<Future at %s state=%s returned %s>' % (
|
||||
hex(id(self)),
|
||||
return '<%s at %#x state=%s returned %s>' % (
|
||||
self.__class__.__name__,
|
||||
id(self),
|
||||
_STATE_TO_DESCRIPTION_MAP[self._state],
|
||||
self._result.__class__.__name__)
|
||||
return '<Future at %s state=%s>' % (
|
||||
hex(id(self)),
|
||||
return '<%s at %#x state=%s>' % (
|
||||
self.__class__.__name__,
|
||||
id(self),
|
||||
_STATE_TO_DESCRIPTION_MAP[self._state])
|
||||
|
||||
def cancel(self):
|
||||
|
@ -337,7 +388,7 @@ class Future(object):
|
|||
return True
|
||||
|
||||
def cancelled(self):
|
||||
"""Return True if the future has cancelled."""
|
||||
"""Return True if the future was cancelled."""
|
||||
with self._condition:
|
||||
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
|
||||
|
||||
|
@ -353,7 +404,14 @@ class Future(object):
|
|||
|
||||
def __get_result(self):
|
||||
if self._exception:
|
||||
raise self._exception
|
||||
if isinstance(self._exception, types.InstanceType):
|
||||
# The exception is an instance of an old-style class, which
|
||||
# means type(self._exception) returns types.ClassType instead
|
||||
# of the exception's actual class type.
|
||||
exception_type = self._exception.__class__
|
||||
else:
|
||||
exception_type = type(self._exception)
|
||||
raise exception_type, self._exception, self._traceback
|
||||
else:
|
||||
return self._result
|
||||
|
||||
|
@ -405,6 +463,39 @@ class Future(object):
|
|||
else:
|
||||
raise TimeoutError()
|
||||
|
||||
def exception_info(self, timeout=None):
|
||||
"""Return a tuple of (exception, traceback) raised by the call that the
|
||||
future represents.
|
||||
|
||||
Args:
|
||||
timeout: The number of seconds to wait for the exception if the
|
||||
future isn't done. If None, then there is no limit on the wait
|
||||
time.
|
||||
|
||||
Returns:
|
||||
The exception raised by the call that the future represents or None
|
||||
if the call completed without raising.
|
||||
|
||||
Raises:
|
||||
CancelledError: If the future was cancelled.
|
||||
TimeoutError: If the future didn't finish executing before the given
|
||||
timeout.
|
||||
"""
|
||||
with self._condition:
|
||||
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
|
||||
raise CancelledError()
|
||||
elif self._state == FINISHED:
|
||||
return self._exception, self._traceback
|
||||
|
||||
self._condition.wait(timeout)
|
||||
|
||||
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
|
||||
raise CancelledError()
|
||||
elif self._state == FINISHED:
|
||||
return self._exception, self._traceback
|
||||
else:
|
||||
raise TimeoutError()
|
||||
|
||||
def exception(self, timeout=None):
|
||||
"""Return the exception raised by the call that the future represents.
|
||||
|
||||
|
@ -422,21 +513,7 @@ class Future(object):
|
|||
TimeoutError: If the future didn't finish executing before the given
|
||||
timeout.
|
||||
"""
|
||||
|
||||
with self._condition:
|
||||
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
|
||||
raise CancelledError()
|
||||
elif self._state == FINISHED:
|
||||
return self._exception
|
||||
|
||||
self._condition.wait(timeout)
|
||||
|
||||
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
|
||||
raise CancelledError()
|
||||
elif self._state == FINISHED:
|
||||
return self._exception
|
||||
else:
|
||||
raise TimeoutError()
|
||||
return self.exception_info(timeout)[0]
|
||||
|
||||
# The following methods should only be used by Executors and in tests.
|
||||
def set_running_or_notify_cancel(self):
|
||||
|
@ -475,8 +552,8 @@ class Future(object):
|
|||
return True
|
||||
else:
|
||||
LOGGER.critical('Future %s in unexpected state: %s',
|
||||
id(self.future),
|
||||
self.future._state)
|
||||
id(self),
|
||||
self._state)
|
||||
raise RuntimeError('Future in unexpected state')
|
||||
|
||||
def set_result(self, result):
|
||||
|
@ -492,19 +569,28 @@ class Future(object):
|
|||
self._condition.notify_all()
|
||||
self._invoke_callbacks()
|
||||
|
||||
def set_exception(self, exception):
|
||||
"""Sets the result of the future as being the given exception.
|
||||
def set_exception_info(self, exception, traceback):
|
||||
"""Sets the result of the future as being the given exception
|
||||
and traceback.
|
||||
|
||||
Should only be used by Executor implementations and unit tests.
|
||||
"""
|
||||
with self._condition:
|
||||
self._exception = exception
|
||||
self._traceback = traceback
|
||||
self._state = FINISHED
|
||||
for waiter in self._waiters:
|
||||
waiter.add_exception(self)
|
||||
self._condition.notify_all()
|
||||
self._invoke_callbacks()
|
||||
|
||||
def set_exception(self, exception):
|
||||
"""Sets the result of the future as being the given exception.
|
||||
|
||||
Should only be used by Executor implementations and unit tests.
|
||||
"""
|
||||
self.set_exception_info(exception, None)
|
||||
|
||||
class Executor(object):
|
||||
"""This is an abstract base class for concrete asynchronous executors."""
|
||||
|
||||
|
@ -520,7 +606,7 @@ class Executor(object):
|
|||
raise NotImplementedError()
|
||||
|
||||
def map(self, fn, *iterables, **kwargs):
|
||||
"""Returns a iterator equivalent to map(fn, iter).
|
||||
"""Returns an iterator equivalent to map(fn, iter).
|
||||
|
||||
Args:
|
||||
fn: A callable that will take as many arguments as there are
|
||||
|
@ -541,17 +627,24 @@ class Executor(object):
|
|||
if timeout is not None:
|
||||
end_time = timeout + time.time()
|
||||
|
||||
fs = [self.submit(fn, *args) for args in zip(*iterables)]
|
||||
fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)]
|
||||
|
||||
try:
|
||||
for future in fs:
|
||||
if timeout is None:
|
||||
yield future.result()
|
||||
else:
|
||||
yield future.result(end_time - time.time())
|
||||
finally:
|
||||
for future in fs:
|
||||
future.cancel()
|
||||
# Yield must be hidden in closure so that the futures are submitted
|
||||
# before the first iterator value is required.
|
||||
def result_iterator():
|
||||
try:
|
||||
# reverse to keep finishing order
|
||||
fs.reverse()
|
||||
while fs:
|
||||
# Careful not to keep a reference to the popped future
|
||||
if timeout is None:
|
||||
yield fs.pop().result()
|
||||
else:
|
||||
yield fs.pop().result(end_time - time.time())
|
||||
finally:
|
||||
for future in fs:
|
||||
future.cancel()
|
||||
return result_iterator()
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
"""Clean-up the resources associated with the Executor.
|
||||
|
|
|
@ -1,101 +0,0 @@
|
|||
from keyword import iskeyword as _iskeyword
|
||||
from operator import itemgetter as _itemgetter
|
||||
import sys as _sys
|
||||
|
||||
|
||||
def namedtuple(typename, field_names):
|
||||
"""Returns a new subclass of tuple with named fields.
|
||||
|
||||
>>> Point = namedtuple('Point', 'x y')
|
||||
>>> Point.__doc__ # docstring for the new class
|
||||
'Point(x, y)'
|
||||
>>> p = Point(11, y=22) # instantiate with positional args or keywords
|
||||
>>> p[0] + p[1] # indexable like a plain tuple
|
||||
33
|
||||
>>> x, y = p # unpack like a regular tuple
|
||||
>>> x, y
|
||||
(11, 22)
|
||||
>>> p.x + p.y # fields also accessable by name
|
||||
33
|
||||
>>> d = p._asdict() # convert to a dictionary
|
||||
>>> d['x']
|
||||
11
|
||||
>>> Point(**d) # convert from a dictionary
|
||||
Point(x=11, y=22)
|
||||
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
|
||||
Point(x=100, y=22)
|
||||
|
||||
"""
|
||||
|
||||
# Parse and validate the field names. Validation serves two purposes,
|
||||
# generating informative error messages and preventing template injection attacks.
|
||||
if isinstance(field_names, basestring):
|
||||
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
|
||||
field_names = tuple(map(str, field_names))
|
||||
for name in (typename,) + field_names:
|
||||
if not all(c.isalnum() or c=='_' for c in name):
|
||||
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
|
||||
if _iskeyword(name):
|
||||
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
|
||||
if name[0].isdigit():
|
||||
raise ValueError('Type names and field names cannot start with a number: %r' % name)
|
||||
seen_names = set()
|
||||
for name in field_names:
|
||||
if name.startswith('_'):
|
||||
raise ValueError('Field names cannot start with an underscore: %r' % name)
|
||||
if name in seen_names:
|
||||
raise ValueError('Encountered duplicate field name: %r' % name)
|
||||
seen_names.add(name)
|
||||
|
||||
# Create and fill-in the class template
|
||||
numfields = len(field_names)
|
||||
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
|
||||
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
|
||||
dicttxt = ', '.join('%r: t[%d]' % (name, pos) for pos, name in enumerate(field_names))
|
||||
template = '''class %(typename)s(tuple):
|
||||
'%(typename)s(%(argtxt)s)' \n
|
||||
__slots__ = () \n
|
||||
_fields = %(field_names)r \n
|
||||
def __new__(_cls, %(argtxt)s):
|
||||
return _tuple.__new__(_cls, (%(argtxt)s)) \n
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
'Make a new %(typename)s object from a sequence or iterable'
|
||||
result = new(cls, iterable)
|
||||
if len(result) != %(numfields)d:
|
||||
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
|
||||
return result \n
|
||||
def __repr__(self):
|
||||
return '%(typename)s(%(reprtxt)s)' %% self \n
|
||||
def _asdict(t):
|
||||
'Return a new dict which maps field names to their values'
|
||||
return {%(dicttxt)s} \n
|
||||
def _replace(_self, **kwds):
|
||||
'Return a new %(typename)s object replacing specified fields with new values'
|
||||
result = _self._make(map(kwds.pop, %(field_names)r, _self))
|
||||
if kwds:
|
||||
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
|
||||
return result \n
|
||||
def __getnewargs__(self):
|
||||
return tuple(self) \n\n''' % locals()
|
||||
for i, name in enumerate(field_names):
|
||||
template += ' %s = _property(_itemgetter(%d))\n' % (name, i)
|
||||
|
||||
# Execute the template string in a temporary namespace and
|
||||
# support tracing utilities by setting a value for frame.f_globals['__name__']
|
||||
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
|
||||
_property=property, _tuple=tuple)
|
||||
try:
|
||||
exec(template, namespace)
|
||||
except SyntaxError:
|
||||
e = _sys.exc_info()[1]
|
||||
raise SyntaxError(e.message + ':\n' + template)
|
||||
result = namespace[typename]
|
||||
|
||||
# For pickling to work, the __module__ variable needs to be set to the frame
|
||||
# where the named tuple is created. Bypass this step in enviroments where
|
||||
# sys._getframe is not defined (Jython for example).
|
||||
if hasattr(_sys, '_getframe'):
|
||||
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
|
||||
|
||||
return result
|
|
@ -43,20 +43,14 @@ Process #1..n:
|
|||
_ResultItems in "Request Q"
|
||||
"""
|
||||
|
||||
from __future__ import with_statement
|
||||
import atexit
|
||||
from concurrent.futures import _base
|
||||
import Queue as queue
|
||||
import multiprocessing
|
||||
import threading
|
||||
import weakref
|
||||
import sys
|
||||
|
||||
from concurrent.futures import _base
|
||||
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
|
||||
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
|
||||
|
||||
# Workers are created as daemon threads and processes. This is done to allow the
|
||||
|
@ -79,11 +73,11 @@ _shutdown = False
|
|||
def _python_exit():
|
||||
global _shutdown
|
||||
_shutdown = True
|
||||
items = list(_threads_queues.items())
|
||||
items = list(_threads_queues.items()) if _threads_queues else ()
|
||||
for t, q in items:
|
||||
q.put(None)
|
||||
for t, q in items:
|
||||
t.join()
|
||||
t.join(sys.maxint)
|
||||
|
||||
# Controls how many more calls than processes will be queued in the call queue.
|
||||
# A smaller number will mean that processes spend more time idle waiting for
|
||||
|
@ -132,7 +126,7 @@ def _process_worker(call_queue, result_queue):
|
|||
return
|
||||
try:
|
||||
r = call_item.fn(*call_item.args, **call_item.kwargs)
|
||||
except BaseException:
|
||||
except:
|
||||
e = sys.exc_info()[1]
|
||||
result_queue.put(_ResultItem(call_item.work_id,
|
||||
exception=e))
|
||||
|
@ -220,6 +214,8 @@ def _queue_management_worker(executor_reference,
|
|||
work_item.future.set_exception(result_item.exception)
|
||||
else:
|
||||
work_item.future.set_result(result_item.result)
|
||||
# Delete references to object. See issue16284
|
||||
del work_item
|
||||
# Check whether we should start shutting down.
|
||||
executor = executor_reference()
|
||||
# No more work items can be added if:
|
||||
|
@ -266,6 +262,7 @@ def _check_system_limits():
|
|||
_system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
|
||||
raise NotImplementedError(_system_limited)
|
||||
|
||||
|
||||
class ProcessPoolExecutor(_base.Executor):
|
||||
def __init__(self, max_workers=None):
|
||||
"""Initializes a new ProcessPoolExecutor instance.
|
||||
|
@ -280,6 +277,9 @@ class ProcessPoolExecutor(_base.Executor):
|
|||
if max_workers is None:
|
||||
self._max_workers = multiprocessing.cpu_count()
|
||||
else:
|
||||
if max_workers <= 0:
|
||||
raise ValueError("max_workers must be greater than 0")
|
||||
|
||||
self._max_workers = max_workers
|
||||
|
||||
# Make the call queue slightly larger than the number of processes to
|
||||
|
@ -351,7 +351,7 @@ class ProcessPoolExecutor(_base.Executor):
|
|||
# Wake up queue management thread
|
||||
self._result_queue.put(None)
|
||||
if wait:
|
||||
self._queue_management_thread.join()
|
||||
self._queue_management_thread.join(sys.maxint)
|
||||
# To reduce the risk of openning too many files, remove references to
|
||||
# objects that use file descriptors.
|
||||
self._queue_management_thread = None
|
||||
|
|
|
@ -3,18 +3,20 @@
|
|||
|
||||
"""Implements ThreadPoolExecutor."""
|
||||
|
||||
from __future__ import with_statement
|
||||
import atexit
|
||||
from concurrent.futures import _base
|
||||
import itertools
|
||||
import Queue as queue
|
||||
import threading
|
||||
import weakref
|
||||
import sys
|
||||
|
||||
from concurrent.futures import _base
|
||||
|
||||
try:
|
||||
import queue
|
||||
from multiprocessing import cpu_count
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
# some platforms don't have multiprocessing
|
||||
def cpu_count():
|
||||
return None
|
||||
|
||||
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
|
||||
|
||||
|
@ -38,11 +40,11 @@ _shutdown = False
|
|||
def _python_exit():
|
||||
global _shutdown
|
||||
_shutdown = True
|
||||
items = list(_threads_queues.items())
|
||||
items = list(_threads_queues.items()) if _threads_queues else ()
|
||||
for t, q in items:
|
||||
q.put(None)
|
||||
for t, q in items:
|
||||
t.join()
|
||||
t.join(sys.maxint)
|
||||
|
||||
atexit.register(_python_exit)
|
||||
|
||||
|
@ -59,9 +61,9 @@ class _WorkItem(object):
|
|||
|
||||
try:
|
||||
result = self.fn(*self.args, **self.kwargs)
|
||||
except BaseException:
|
||||
e = sys.exc_info()[1]
|
||||
self.future.set_exception(e)
|
||||
except:
|
||||
e, tb = sys.exc_info()[1:]
|
||||
self.future.set_exception_info(e, tb)
|
||||
else:
|
||||
self.future.set_result(result)
|
||||
|
||||
|
@ -71,6 +73,8 @@ def _worker(executor_reference, work_queue):
|
|||
work_item = work_queue.get(block=True)
|
||||
if work_item is not None:
|
||||
work_item.run()
|
||||
# Delete references to object. See issue16284
|
||||
del work_item
|
||||
continue
|
||||
executor = executor_reference()
|
||||
# Exit if:
|
||||
|
@ -82,22 +86,37 @@ def _worker(executor_reference, work_queue):
|
|||
work_queue.put(None)
|
||||
return
|
||||
del executor
|
||||
except BaseException:
|
||||
except:
|
||||
_base.LOGGER.critical('Exception in worker', exc_info=True)
|
||||
|
||||
|
||||
class ThreadPoolExecutor(_base.Executor):
|
||||
def __init__(self, max_workers):
|
||||
|
||||
# Used to assign unique thread names when thread_name_prefix is not supplied.
|
||||
_counter = itertools.count().next
|
||||
|
||||
def __init__(self, max_workers=None, thread_name_prefix=''):
|
||||
"""Initializes a new ThreadPoolExecutor instance.
|
||||
|
||||
Args:
|
||||
max_workers: The maximum number of threads that can be used to
|
||||
execute the given calls.
|
||||
thread_name_prefix: An optional name prefix to give our threads.
|
||||
"""
|
||||
if max_workers is None:
|
||||
# Use this number because ThreadPoolExecutor is often
|
||||
# used to overlap I/O instead of CPU work.
|
||||
max_workers = (cpu_count() or 1) * 5
|
||||
if max_workers <= 0:
|
||||
raise ValueError("max_workers must be greater than 0")
|
||||
|
||||
self._max_workers = max_workers
|
||||
self._work_queue = queue.Queue()
|
||||
self._threads = set()
|
||||
self._shutdown = False
|
||||
self._shutdown_lock = threading.Lock()
|
||||
self._thread_name_prefix = (thread_name_prefix or
|
||||
("ThreadPoolExecutor-%d" % self._counter()))
|
||||
|
||||
def submit(self, fn, *args, **kwargs):
|
||||
with self._shutdown_lock:
|
||||
|
@ -119,8 +138,11 @@ class ThreadPoolExecutor(_base.Executor):
|
|||
q.put(None)
|
||||
# TODO(bquinlan): Should avoid creating new threads if there are more
|
||||
# idle threads than items in the work queue.
|
||||
if len(self._threads) < self._max_workers:
|
||||
t = threading.Thread(target=_worker,
|
||||
num_threads = len(self._threads)
|
||||
if num_threads < self._max_workers:
|
||||
thread_name = '%s_%d' % (self._thread_name_prefix or self,
|
||||
num_threads)
|
||||
t = threading.Thread(name=thread_name, target=_worker,
|
||||
args=(weakref.ref(self, weakref_cb),
|
||||
self._work_queue))
|
||||
t.daemon = True
|
||||
|
@ -134,5 +156,5 @@ class ThreadPoolExecutor(_base.Executor):
|
|||
self._work_queue.put(None)
|
||||
if wait:
|
||||
for t in self._threads:
|
||||
t.join()
|
||||
t.join(sys.maxint)
|
||||
shutdown.__doc__ = _base.Executor.shutdown.__doc__
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
|
||||
Copyright (c) 2005-2016 Marko Kreen <markokr@gmail.com>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
include README.rst Makefile MANIFEST.in LICENSE dumprar.py
|
||||
include doc/*.rst doc/Makefile doc/conf.py doc/make.bat
|
||||
include test/Makefile test/*.sh test/files/*.rar test/files/*.exp
|
|
@ -1,31 +0,0 @@
|
|||
|
||||
prefix = /usr/local
|
||||
|
||||
all:
|
||||
python setup.py build
|
||||
|
||||
install:
|
||||
python setup.py install --prefix=$(prefix)
|
||||
|
||||
tgz: clean
|
||||
python setup.py sdist
|
||||
|
||||
clean:
|
||||
rm -rf __pycache__ build dist
|
||||
rm -f *.pyc MANIFEST *.orig *.rej *.html *.class
|
||||
rm -rf doc/_build doc/_static doc/_templates
|
||||
make -C test clean
|
||||
|
||||
html:
|
||||
rst2html README.rst > README.html
|
||||
make -C doc html
|
||||
|
||||
lint:
|
||||
pylint -E rarfile.py
|
||||
|
||||
rbuild:
|
||||
curl -X POST https://readthedocs.org/build/6715
|
||||
|
||||
upload:
|
||||
python setup.py sdist upload
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
Metadata-Version: 1.1
|
||||
Name: rarfile
|
||||
Version: 2.8
|
||||
Summary: RAR archive reader for Python
|
||||
Home-page: https://github.com/markokr/rarfile
|
||||
Author: Marko Kreen
|
||||
Author-email: markokr@gmail.com
|
||||
License: ISC
|
||||
Description: rarfile - RAR archive reader for Python
|
||||
=======================================
|
||||
|
||||
This is Python module for RAR_ archive reading. The interface
|
||||
is made as zipfile_ like as possible. Licensed under ISC_
|
||||
license.
|
||||
|
||||
Features:
|
||||
|
||||
- Supports both RAR2 and RAR3 archives (WinRAR 2.x .. WinRAR 4.x).
|
||||
- Supports multi volume archives.
|
||||
- Supports Unicode filenames.
|
||||
- Supports password-protected archives.
|
||||
- Supports archive and file comments.
|
||||
- Archive parsing and non-compressed files are handled in pure Python code.
|
||||
- Compressed files are extracted by executing external tool: either ``unrar``
|
||||
from RARLAB_ or ``bsdtar`` from libarchive_.
|
||||
- Works with both Python 2.7 and 3.x.
|
||||
|
||||
Notes:
|
||||
|
||||
- Does not support the RAR5 format introduced in WinRAR 5.0.
|
||||
- ``bsdtar`` does not support all RAR3 features.
|
||||
|
||||
Links:
|
||||
|
||||
- `Documentation`_
|
||||
- `Downloads`_
|
||||
- `Git`_ repo
|
||||
|
||||
.. _RAR: https://en.wikipedia.org/wiki/RAR_%28file_format%29
|
||||
.. _zipfile: https://docs.python.org/2/library/zipfile.html
|
||||
.. _ISC: https://en.wikipedia.org/wiki/ISC_license
|
||||
.. _Git: https://github.com/markokr/rarfile
|
||||
.. _Downloads: https://pypi.python.org/pypi/rarfile
|
||||
.. _Documentation: https://rarfile.readthedocs.io/
|
||||
.. _libarchive: https://github.com/libarchive/libarchive
|
||||
.. _RARLAB: http://www.rarlab.com/
|
||||
Keywords: rar,unrar,archive
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: ISC License (ISCL)
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: System :: Archiving :: Compression
|
|
@ -1,39 +0,0 @@
|
|||
|
||||
rarfile - RAR archive reader for Python
|
||||
=======================================
|
||||
|
||||
This is Python module for RAR_ archive reading. The interface
|
||||
is made as zipfile_ like as possible. Licensed under ISC_
|
||||
license.
|
||||
|
||||
Features:
|
||||
|
||||
- Supports both RAR2 and RAR3 archives (WinRAR 2.x .. WinRAR 4.x).
|
||||
- Supports multi volume archives.
|
||||
- Supports Unicode filenames.
|
||||
- Supports password-protected archives.
|
||||
- Supports archive and file comments.
|
||||
- Archive parsing and non-compressed files are handled in pure Python code.
|
||||
- Compressed files are extracted by executing external tool: either ``unrar``
|
||||
from RARLAB_ or ``bsdtar`` from libarchive_.
|
||||
- Works with both Python 2.7 and 3.x.
|
||||
|
||||
Notes:
|
||||
|
||||
- Does not support the RAR5 format introduced in WinRAR 5.0.
|
||||
- ``bsdtar`` does not support all RAR3 features.
|
||||
|
||||
Links:
|
||||
|
||||
- `Documentation`_
|
||||
- `Downloads`_
|
||||
- `Git`_ repo
|
||||
|
||||
.. _RAR: https://en.wikipedia.org/wiki/RAR_%28file_format%29
|
||||
.. _zipfile: https://docs.python.org/2/library/zipfile.html
|
||||
.. _ISC: https://en.wikipedia.org/wiki/ISC_license
|
||||
.. _Git: https://github.com/markokr/rarfile
|
||||
.. _Downloads: https://pypi.python.org/pypi/rarfile
|
||||
.. _Documentation: https://rarfile.readthedocs.io/
|
||||
.. _libarchive: https://github.com/libarchive/libarchive
|
||||
.. _RARLAB: http://www.rarlab.com/
|
|
@ -1,153 +0,0 @@
|
|||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
-rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/RarFile.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/RarFile.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/RarFile"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/RarFile"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
|
@ -1,111 +0,0 @@
|
|||
|
||||
rarfile API documentation
|
||||
=========================
|
||||
|
||||
.. contents:: Table Of Contents
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
.. automodule:: rarfile
|
||||
|
||||
RarFile class
|
||||
-------------
|
||||
|
||||
.. autoclass:: RarFile
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
RarInfo class
|
||||
-------------
|
||||
|
||||
.. autoclass:: RarInfo
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
RarExtFile class
|
||||
----------------
|
||||
|
||||
.. autoclass:: RarExtFile
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
Functions
|
||||
---------
|
||||
|
||||
.. autofunction:: is_rarfile
|
||||
|
||||
Module Configuration
|
||||
--------------------
|
||||
|
||||
.. autodata:: UNRAR_TOOL
|
||||
.. autodata:: DEFAULT_CHARSET
|
||||
.. autodata:: TRY_ENCODINGS
|
||||
.. autodata:: USE_DATETIME
|
||||
.. autodata:: PATH_SEP
|
||||
.. autodata:: NEED_COMMENTS
|
||||
.. autodata:: UNICODE_COMMENTS
|
||||
.. autodata:: USE_EXTRACT_HACK
|
||||
.. autodata:: HACK_SIZE_LIMIT
|
||||
|
||||
Constants
|
||||
---------
|
||||
|
||||
.. py:data:: RAR_M0
|
||||
|
||||
No compression.
|
||||
|
||||
.. py:data:: RAR_M1
|
||||
|
||||
Compression level `-m1` - Fastest compression.
|
||||
|
||||
.. py:data:: RAR_M2
|
||||
|
||||
Compression level `-m2`.
|
||||
|
||||
.. py:data:: RAR_M3
|
||||
|
||||
Compression level `-m3`.
|
||||
|
||||
.. py:data:: RAR_M4
|
||||
|
||||
Compression level `-m4`.
|
||||
|
||||
.. py:data:: RAR_M5
|
||||
|
||||
Compression level `-m5` - Maximum compression.
|
||||
|
||||
.. py:data:: RAR_OS_MSDOS
|
||||
.. py:data:: RAR_OS_OS2
|
||||
.. py:data:: RAR_OS_WIN32
|
||||
.. py:data:: RAR_OS_UNIX
|
||||
.. py:data:: RAR_OS_MACOS
|
||||
.. py:data:: RAR_OS_BEOS
|
||||
|
||||
Exceptions
|
||||
----------
|
||||
|
||||
.. autoclass:: Error
|
||||
.. autoclass:: BadRarFile
|
||||
.. autoclass:: NotRarFile
|
||||
.. autoclass:: BadRarName
|
||||
.. autoclass:: NoRarEntry
|
||||
.. autoclass:: PasswordRequired
|
||||
.. autoclass:: NeedFirstVolume
|
||||
.. autoclass:: NoCrypto
|
||||
.. autoclass:: RarExecError
|
||||
.. autoclass:: RarWarning
|
||||
.. autoclass:: RarFatalError
|
||||
.. autoclass:: RarCRCError
|
||||
.. autoclass:: RarLockedArchiveError
|
||||
.. autoclass:: RarWriteError
|
||||
.. autoclass:: RarOpenError
|
||||
.. autoclass:: RarUserError
|
||||
.. autoclass:: RarMemoryError
|
||||
.. autoclass:: RarCreateError
|
||||
.. autoclass:: RarNoFilesError
|
||||
.. autoclass:: RarUserBreak
|
||||
.. autoclass:: RarUnknownError
|
||||
.. autoclass:: RarSignalExit
|
||||
|
||||
|
|
@ -1,249 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# RarFile documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sun Mar 24 13:29:46 2013.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os, os.path
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
import rarfile
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
|
||||
|
||||
autodoc_member_order = 'bysource'
|
||||
autoclass_content = 'both'
|
||||
autodoc_default_flags = ['show-inheritance']
|
||||
|
||||
intersphinx_mapping = {'python': ('http://docs.python.org/2', None)}
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'RarFile'
|
||||
copyright = u'2005-2016, Marko Kreen'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = rarfile.__version__
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = rarfile.__version__
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
#html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
html_show_sphinx = False
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
#htmlhelp_basename = 'RarFiledoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'RarFile.tex', u'RarFile Documentation',
|
||||
u'Marko Kreen', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
#man_pages = [
|
||||
# ('index', 'rarfile', u'RarFile Documentation',
|
||||
# [u'Marko Kreen'], 1)
|
||||
#]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'RarFile', u'RarFile Documentation',
|
||||
u'Marko Kreen', 'RarFile', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
|
@ -1,87 +0,0 @@
|
|||
|
||||
rarfile FAQ
|
||||
===========
|
||||
|
||||
.. contents:: Table of Contents
|
||||
|
||||
What are the dependencies?
|
||||
--------------------------
|
||||
|
||||
It depends on ``unrar`` command-line utility to do the actual decompression.
|
||||
Note that by default it expect it to be in ``PATH``. If unrar
|
||||
launching fails, you need to fix this.
|
||||
|
||||
Alternatively, :mod:`rarfile` can use bsdtar_ from libarchive_ as
|
||||
decompression backend, but that is a bit problematic as bsdtar_ does not support
|
||||
all RAR features.
|
||||
|
||||
.. _bsdtar: https://github.com/libarchive/libarchive/wiki/ManPageBsdtar1
|
||||
.. _libarchive: http://www.libarchive.org/
|
||||
|
||||
It depends on cryptography_ or PyCrypto_ modules to process
|
||||
archives with password-protected headers.
|
||||
|
||||
.. _cryptography: https://pypi.python.org/pypi/cryptography
|
||||
.. _PyCrypto: https://pypi.python.org/pypi/pycrypto
|
||||
|
||||
Does it parse ``unrar`` output to get archive contents?
|
||||
-------------------------------------------------------
|
||||
|
||||
No, :mod:`rarfile` parses RAR structure in Python code. Also it can
|
||||
read uncompressed files from archive without external utility.
|
||||
|
||||
Will rarfile support wrapping unrarlib/unrar.dll/unrar.so in the future?
|
||||
------------------------------------------------------------------------
|
||||
|
||||
No. The current architecture - parsing in Python and decompression with
|
||||
command line tools work well across all interesting operating systems
|
||||
(Windows/Linux/MacOS), wrapping a library does not bring any advantages.
|
||||
|
||||
Simple execution of command-line tools is also legally simpler situation
|
||||
than linking with external library.
|
||||
|
||||
How can I get it work on Windows?
|
||||
---------------------------------
|
||||
|
||||
On Windows the ``unrar.exe`` is not in ``PATH`` so simple ``Popen("unrar ..")`` does not work.
|
||||
It can be solved several ways:
|
||||
|
||||
1. Add location of ``unrar.exe`` to PATH.
|
||||
2. Set :data:`rarfile.UNRAR_TOOL` to full path of ``unrar.exe``.
|
||||
3. Copy ``unrar.exe`` to your program directory.
|
||||
4. Copy ``unrar.exe`` to system directory that is in PATH, eg. ``C:\Windows``.
|
||||
|
||||
How to avoid the need for user to manually install rarfile/unrar?
|
||||
-----------------------------------------------------------------
|
||||
|
||||
Include ``rarfile.py`` and/or ``unrar`` with your application.
|
||||
|
||||
Will it support creating RAR archives?
|
||||
--------------------------------------
|
||||
|
||||
No. RARLAB_ is not interested in RAR becoming open format
|
||||
and specifically discourages writing RAR creation software.
|
||||
|
||||
In the meantime use either Zip_ (better compatibility) or 7z_ (better compression)
|
||||
format for your own archives.
|
||||
|
||||
.. _RARLAB: http://www.rarlab.com/
|
||||
.. _Zip: https://en.wikipedia.org/wiki/ZIP_%28file_format%29
|
||||
.. _7z: https://en.wikipedia.org/wiki/7z
|
||||
|
||||
What is the USE_EXTRACT_HACK?
|
||||
-----------------------------
|
||||
|
||||
RarFile uses ``unrar`` to extract compressed files. But when extracting
|
||||
single file from archive containing many entries, ``unrar`` needs to parse
|
||||
whole archive until it finds the right entry. This makes random-access
|
||||
to entries slow. To avoid that, RarFile remembers location of compressed
|
||||
data for each entry and on read it copies it to temporary archive containing
|
||||
only data for that one file, thus making ``unrar`` fast.
|
||||
|
||||
The logic is only activated for entries smaller than :data:`rarfile.HACK_SIZE_LIMIT`
|
||||
(20M by default). Bigger files are accessed directly from RAR.
|
||||
|
||||
Note - it only works for non-solid archives. So if you care about
|
||||
random access to files in your archive, do not create solid archives.
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
|
||||
rarfile - RAR archive reader for Python
|
||||
=======================================
|
||||
|
||||
This is Python module for RAR_ archive reading. The interface
|
||||
is made as zipfile_ like as possible. Licensed under ISC_
|
||||
license.
|
||||
|
||||
.. _RAR: http://en.wikipedia.org/wiki/RAR
|
||||
.. _zipfile: http://docs.python.org/library/zipfile.html
|
||||
.. _ISC: http://en.wikipedia.org/wiki/ISC_license
|
||||
|
||||
Features:
|
||||
|
||||
- Supports both RAR 2.x and 3.x archives.
|
||||
- Supports multi volume archives.
|
||||
- Supports Unicode filenames.
|
||||
- Supports password-protected archives.
|
||||
- Supports archive and file comments.
|
||||
- Archive parsing and non-compressed files are handled in pure Python code.
|
||||
- For compressed files runs ``unrar`` utility.
|
||||
- Works with both Python 2.x and 3.x.
|
||||
|
||||
|
||||
|
||||
Documentation:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
Module Documentation <api>
|
||||
FAQs <faq>
|
||||
Release News <news>
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
|
@ -1,190 +0,0 @@
|
|||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\RarFile.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\RarFile.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
|
@ -1,243 +0,0 @@
|
|||
|
||||
rarfile history
|
||||
===============
|
||||
|
||||
.. py:currentmodule:: rarfile
|
||||
|
||||
Version 2.8 (2016-06-07)
|
||||
------------------------
|
||||
|
||||
* Fix: support solid archives from in-memory file object.
|
||||
Full archive will be written out to temp file.
|
||||
[`#21 <https://github.com/markokr/rarfile/issues/21>`_]
|
||||
|
||||
* Fix: ask unrar stop switches scanning,
|
||||
to handle archive names starting with "-".
|
||||
(Alexander Shadchin)
|
||||
[`#12 <https://github.com/markokr/rarfile/pull/12>`_]
|
||||
|
||||
* Fix: add missing _parse_error variable to RarFile object.
|
||||
(Gregory Mazzola)
|
||||
[`#20 <https://github.com/markokr/rarfile/pull/20>`_]
|
||||
|
||||
* Fix: return proper boolean from :meth:`RarInfo.needs_password`.
|
||||
[`#22 <https://github.com/markokr/rarfile/issues/22>`_]
|
||||
|
||||
* Fix: do not insert non-string rarfile into exception string.
|
||||
(Tim Muller)
|
||||
[`#23 <https://github.com/markokr/rarfile/pull/23>`_]
|
||||
|
||||
* Fix: make :meth:`RarFile.extract` and :meth:`RarFile.testrar`
|
||||
support in-memory archives.
|
||||
|
||||
* Use cryptography_ module as preferred crypto backend.
|
||||
PyCrypto_ will be used as fallback.
|
||||
|
||||
* Cleanup: remove compat code for Python 2.4/2.5/2.6.
|
||||
|
||||
.. _cryptography: https://pypi.python.org/pypi/cryptography
|
||||
.. _PyCrypto: https://pypi.python.org/pypi/pycrypto
|
||||
|
||||
Version 2.7 (2014-11-23)
|
||||
------------------------
|
||||
|
||||
* Allow use of bsdtar_ as decompression backend. It sits
|
||||
on top of libarchive_, which has support for reading RAR archives.
|
||||
|
||||
Limitations of ``libarchive`` RAR backend:
|
||||
|
||||
- Does not support solid archives.
|
||||
- Does not support password-protected archives.
|
||||
- Does not support "parsing filters" used for audio/image/executable data,
|
||||
so few non-solid, non-encrypted archives also fail.
|
||||
|
||||
Now :mod:`rarfile` checks if ``unrar`` and if not then tries ``bsdtar``.
|
||||
If that works, then keeps using it. If not then configuration
|
||||
stays with ``unrar`` which will then appear in error messages.
|
||||
|
||||
.. _bsdtar: https://github.com/libarchive/libarchive/wiki/ManPageBsdtar1
|
||||
.. _libarchive: http://www.libarchive.org/
|
||||
|
||||
* Both :class:`RarFile` and :func:`is_rarfile` now accept file-like
|
||||
object. Eg. :class:`io.BytesIO`. Only requirement is that the object
|
||||
must be seekable. This mirrors similar funtionality in zipfile.
|
||||
|
||||
Based on patch by Chase Zhang.
|
||||
|
||||
* Uniform error handling. :class:`RarFile` accepts ``errors="strict"``
|
||||
argument.
|
||||
|
||||
Allow user to tune whether parsing and missing file errors will raise
|
||||
exception. If error is not raised, the error string can be queried
|
||||
with :meth:`RarFile.strerror` method.
|
||||
|
||||
Version 2.6 (2013-04-10)
|
||||
------------------------
|
||||
|
||||
* Add context manager support for :class:`RarFile` class.
|
||||
Both :class:`RarFile` and :class:`RarExtFile` support
|
||||
:keyword:`with` statement now.
|
||||
(Wentao Han)
|
||||
* :meth:`RarFile.volumelist` method, returns filenames of archive volumes.
|
||||
* Re-throw clearer error in case ``unrar`` is not found in ``PATH``.
|
||||
* Sync new unrar4.x error code from ``rar.txt``.
|
||||
* Use Sphinx for documentation, push docs to rtfd.org_
|
||||
|
||||
.. _rtfd.org: https://rarfile.readthedocs.org/
|
||||
|
||||
Version 2.5 (2012-01-19)
|
||||
------------------------
|
||||
|
||||
Fixes:
|
||||
|
||||
* :meth:`RarExtFile.read` and :meth:`RarExtFile.readinto` now do looping read
|
||||
to work properly on short reads. Important for Python 3.2+ where read from pipe
|
||||
can return short result even on blocking file descriptor.
|
||||
* Proper error reporting in :meth:`RarFile.extract`, :meth:`RarFile.extractall`
|
||||
and :meth:`RarFile.testrar`.
|
||||
* :meth:`RarExtFile.read` from unrar pipe: prefer to return unrar error code,
|
||||
if thats not available, do own error checks.
|
||||
* Avoid string addition in :meth:`RarExtFile.read`, instead use always list+join to
|
||||
merge multi-part reads.
|
||||
* dumprar: dont re-encode byte strings (Python 2.x). This avoids
|
||||
unneccessary failure when printing invalid unicode.
|
||||
|
||||
Version 2.4 (2011-11-05)
|
||||
------------------------
|
||||
|
||||
Fixes:
|
||||
|
||||
* :data:`USE_DATETIME`: survive bad values from RAR
|
||||
* Fix bug in corrupt unicode filename handling
|
||||
* dumprar: make unicode chars work with both pipe and console
|
||||
|
||||
Version 2.3 (2011-07-03)
|
||||
------------------------
|
||||
|
||||
Features:
|
||||
|
||||
* Support .seek() method on file streams. (Kristian Larsson)
|
||||
* Support .readinto() method on file streams. Optimized implementation
|
||||
is available on Python 2.6+ where :class:`memoryview` is available.
|
||||
* Support file comments - :attr:`RarInfo.comment` contains decompressed data if available.
|
||||
* File objects returned by :meth:`RarFile.open()` are :class:`io.RawIOBase`-compatible.
|
||||
They can further wrapped with :class:`io.BufferedReader` and :class:`io.TextIOWrapper`.
|
||||
* Now .getinfo() uses dict lookup instead of sequential scan when
|
||||
searching archive entry. This speeds up prococessing for archives that
|
||||
have many entries.
|
||||
* Option :data:`UNICODE_COMMENTS` to decode both archive and file comments to unicode.
|
||||
It uses :data:`TRY_ENCODINGS` for list of encodings to try. If off, comments are
|
||||
left as byte strings. Default: 0
|
||||
* Option :data:`PATH_SEP` to change path separator. Default: ``r'\'``,
|
||||
set ``rarfile.PATH_SEP='/'`` to be compatibe with zipfile.
|
||||
* Option :data:`USE_DATETIME` to convert timestamps to datetime objects.
|
||||
Default: 0, timestamps are tuples.
|
||||
* Option :data:`TRY_ENCODINGS` to allow tuning attempted encoding list.
|
||||
* Reorder :class:`RarInfo` fiels to better show zipfile-compatible fields.
|
||||
* Standard regtests to make sure various features work
|
||||
|
||||
Compatibility:
|
||||
|
||||
* Drop :attr:`RarInfo.unicode_filename`, plain :attr:`RarInfo.filename` is already unicode since 2.0.
|
||||
* .read(-1) reads now until EOF. Previously it returned empty buffer.
|
||||
|
||||
Fixes:
|
||||
|
||||
* Make encrypted headers work with Python 3.x bytes() and with old 2.x 'sha' module.
|
||||
* Simplify :class:`subprocess.Popen` usage when launching ``unrar``. Previously
|
||||
it tried to optimize and work around OS/Python bugs, but this is not
|
||||
maintainable.
|
||||
* Use temp rar file hack on multi-volume archives too.
|
||||
* Always .wait() on unrar, to avoid zombies
|
||||
* Convert struct.error to BadRarFile
|
||||
* Plug some fd leaks. Affected: Jython, PyPy.
|
||||
* Broken archives are handled more robustly.
|
||||
|
||||
Version 2.2 (2010-08-19)
|
||||
------------------------
|
||||
|
||||
Fixes:
|
||||
|
||||
* Relaxed volume naming. Now it just calculates new volume name by finding number
|
||||
in old one and increasing it, without any expectations what that number should be.
|
||||
* Files with 4G of compressed data in one colume were handled wrong. Fix.
|
||||
* DOS timestamp seconds need to be multiplied with 2.
|
||||
* Correct EXTTIME parsing.
|
||||
|
||||
Cleanups:
|
||||
|
||||
* Compressed size is per-volume, sum them together, so that user sees complete
|
||||
compressed size for files split over several volumes.
|
||||
* dumprar: Show unknown bits.
|
||||
* Use :class:`struct.Struct` to cache unpack formats.
|
||||
* Support missing :data:`os.devnull`. (Python 2.3)
|
||||
|
||||
Version 2.1 (2010-07-31)
|
||||
------------------------
|
||||
|
||||
Features:
|
||||
|
||||
* Minimal implmentation for :meth:`RarFile.extract`, :meth:`RarFile.extractall`, :meth:`RarFile.testrar`.
|
||||
They are simple shortcuts to ``unrar`` invocation.
|
||||
* Accept :class:`RarInfo` object where filename is expected.
|
||||
* Include ``dumprar.py`` in .tgz. It can be used to visualize RAR structure
|
||||
and test module.
|
||||
* Support for encrypted file headers.
|
||||
|
||||
Fixes:
|
||||
|
||||
* Don't read past ENDARC, there could be non-RAR data there.
|
||||
* RAR 2.x: It does not write ENDARC, but our volume code expected it. Fix that.
|
||||
* RAR 2.x: Support more than 200 old-style volumes.
|
||||
|
||||
Cleanups:
|
||||
|
||||
* Load comment only when requested.
|
||||
* Cleanup of internal config variables. They should have now final names.
|
||||
* :meth:`RarFile.open`: Add mode=r argument to match zipfile.
|
||||
* Doc and comments cleanup, minimize duplication.
|
||||
* Common wrappers for both compressed and uncompressed files,
|
||||
now :meth:`RarFile.open` also does CRC-checking.
|
||||
|
||||
Version 2.0 (2010-04-29)
|
||||
------------------------
|
||||
|
||||
Features:
|
||||
|
||||
* Python 3 support. Still works with 2.x.
|
||||
* Parses extended time fields. (.mtime, .ctime, .atime)
|
||||
* :meth:`RarFile.open` method. This makes possible to process large
|
||||
entries that do not fit into memory.
|
||||
* Supports password-protected archives.
|
||||
* Supports archive comments.
|
||||
|
||||
Cleanups:
|
||||
|
||||
* Uses :mod:`subprocess` module to launch ``unrar``.
|
||||
* .filename is always Unicode string, .unicode_filename is now deprecated.
|
||||
* .CRC is unsigned again, as python3 crc32() is unsigned.
|
||||
|
||||
Version 1.1 (2008-08-31)
|
||||
------------------------
|
||||
|
||||
Fixes:
|
||||
|
||||
* Replace :func:`os.tempnam` with :func:`tempfile.mkstemp`. (Jason Moiron)
|
||||
* Fix infinite loop in _extract_hack on unexpected EOF
|
||||
* :attr:`RarInfo.CRC` is now signed value to match crc32()
|
||||
* :meth:`RarFile.read` now checks file crc
|
||||
|
||||
Cleanups:
|
||||
|
||||
* more docstrings
|
||||
* throw proper exceptions (subclasses of :exc:`rarfile.Error`)
|
||||
* RarInfo has fields pre-initialized, so they appear in help()
|
||||
* rename RarInfo.data to RarInfo.header_data
|
||||
* dont use "print" when header parsing fails
|
||||
* use try/finally to delete temp rar
|
||||
|
||||
Version 1.0 (2005-08-08)
|
||||
------------------------
|
||||
|
||||
* First release.
|
||||
|
|
@ -1,361 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
"""Dump archive contents, test extraction."""
|
||||
|
||||
import io
|
||||
import sys
|
||||
import rarfile as rf
|
||||
from binascii import crc32, hexlify
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
bytearray
|
||||
except NameError:
|
||||
import array
|
||||
def bytearray(v):
|
||||
return array.array('B', v)
|
||||
|
||||
rf.UNICODE_COMMENTS = 1
|
||||
rf.USE_DATETIME = 1
|
||||
|
||||
usage = """
|
||||
dumprar [switches] [ARC1 ARC2 ...] [@ARCLIST]
|
||||
switches:
|
||||
@file read archive names from file
|
||||
-pPSW set password
|
||||
-Ccharset set fallback charset
|
||||
-v increase verbosity
|
||||
-t attempt to read all files
|
||||
-x write read files out
|
||||
-c show archive comment
|
||||
-h show usage
|
||||
-- stop switch parsing
|
||||
""".strip()
|
||||
|
||||
os_list = ['DOS', 'OS2', 'WIN', 'UNIX', 'MACOS', 'BEOS']
|
||||
|
||||
block_strs = ['MARK', 'MAIN', 'FILE', 'OLD_COMMENT', 'OLD_EXTRA',
|
||||
'OLD_SUB', 'OLD_RECOVERY', 'OLD_AUTH', 'SUB', 'ENDARC']
|
||||
|
||||
def rarType(type):
|
||||
if type < rf.RAR_BLOCK_MARK or type > rf.RAR_BLOCK_ENDARC:
|
||||
return "*UNKNOWN*"
|
||||
return block_strs[type - rf.RAR_BLOCK_MARK]
|
||||
|
||||
main_bits = (
|
||||
(rf.RAR_MAIN_VOLUME, "VOL"),
|
||||
(rf.RAR_MAIN_COMMENT, "COMMENT"),
|
||||
(rf.RAR_MAIN_LOCK, "LOCK"),
|
||||
(rf.RAR_MAIN_SOLID, "SOLID"),
|
||||
(rf.RAR_MAIN_NEWNUMBERING, "NEWNR"),
|
||||
(rf.RAR_MAIN_AUTH, "AUTH"),
|
||||
(rf.RAR_MAIN_RECOVERY, "RECOVERY"),
|
||||
(rf.RAR_MAIN_PASSWORD, "PASSWORD"),
|
||||
(rf.RAR_MAIN_FIRSTVOLUME, "FIRSTVOL"),
|
||||
(rf.RAR_SKIP_IF_UNKNOWN, "SKIP"),
|
||||
(rf.RAR_LONG_BLOCK, "LONG"),
|
||||
)
|
||||
|
||||
endarc_bits = (
|
||||
(rf.RAR_ENDARC_NEXT_VOLUME, "NEXTVOL"),
|
||||
(rf.RAR_ENDARC_DATACRC, "DATACRC"),
|
||||
(rf.RAR_ENDARC_REVSPACE, "REVSPACE"),
|
||||
(rf.RAR_ENDARC_VOLNR, "VOLNR"),
|
||||
(rf.RAR_SKIP_IF_UNKNOWN, "SKIP"),
|
||||
(rf.RAR_LONG_BLOCK, "LONG"),
|
||||
)
|
||||
|
||||
file_bits = (
|
||||
(rf.RAR_FILE_SPLIT_BEFORE, "SPLIT_BEFORE"),
|
||||
(rf.RAR_FILE_SPLIT_AFTER, "SPLIT_AFTER"),
|
||||
(rf.RAR_FILE_PASSWORD, "PASSWORD"),
|
||||
(rf.RAR_FILE_COMMENT, "COMMENT"),
|
||||
(rf.RAR_FILE_SOLID, "SOLID"),
|
||||
(rf.RAR_FILE_LARGE, "LARGE"),
|
||||
(rf.RAR_FILE_UNICODE, "UNICODE"),
|
||||
(rf.RAR_FILE_SALT, "SALT"),
|
||||
(rf.RAR_FILE_VERSION, "VERSION"),
|
||||
(rf.RAR_FILE_EXTTIME, "EXTTIME"),
|
||||
(rf.RAR_FILE_EXTFLAGS, "EXTFLAGS"),
|
||||
(rf.RAR_SKIP_IF_UNKNOWN, "SKIP"),
|
||||
(rf.RAR_LONG_BLOCK, "LONG"),
|
||||
)
|
||||
|
||||
generic_bits = (
|
||||
(rf.RAR_SKIP_IF_UNKNOWN, "SKIP"),
|
||||
(rf.RAR_LONG_BLOCK, "LONG"),
|
||||
)
|
||||
|
||||
file_parms = ("D64", "D128", "D256", "D512",
|
||||
"D1024", "D2048", "D4096", "DIR")
|
||||
|
||||
def xprint(m, *args):
|
||||
if sys.hexversion < 0x3000000:
|
||||
m = m.decode('utf8')
|
||||
if args:
|
||||
m = m % args
|
||||
if sys.hexversion < 0x3000000:
|
||||
m = m.encode('utf8')
|
||||
sys.stdout.write(m)
|
||||
sys.stdout.write('\n')
|
||||
|
||||
def render_flags(flags, bit_list):
|
||||
res = []
|
||||
known = 0
|
||||
for bit in bit_list:
|
||||
known = known | bit[0]
|
||||
if flags & bit[0]:
|
||||
res.append(bit[1])
|
||||
unknown = flags & ~known
|
||||
n = 0
|
||||
while unknown:
|
||||
if unknown & 1:
|
||||
res.append("UNK_%04x" % (1 << n))
|
||||
unknown = unknown >> 1
|
||||
n += 1
|
||||
|
||||
return ",".join(res)
|
||||
|
||||
def get_file_flags(flags):
|
||||
res = render_flags(flags & ~rf.RAR_FILE_DICTMASK, file_bits)
|
||||
|
||||
xf = (flags & rf.RAR_FILE_DICTMASK) >> 5
|
||||
res += "," + file_parms[xf]
|
||||
return res
|
||||
|
||||
def get_main_flags(flags):
|
||||
return render_flags(flags, main_bits)
|
||||
|
||||
def get_endarc_flags(flags):
|
||||
return render_flags(flags, endarc_bits)
|
||||
|
||||
def get_generic_flags(flags):
|
||||
return render_flags(flags, generic_bits)
|
||||
|
||||
def fmt_time(t):
|
||||
if isinstance(t, datetime):
|
||||
return t.isoformat(' ')
|
||||
return "%04d-%02d-%02d %02d:%02d:%02d" % t
|
||||
|
||||
def show_item(h):
|
||||
st = rarType(h.type)
|
||||
unknown = h.header_size - h.header_base
|
||||
xprint("%s: hdrlen=%d datlen=%d hdr_unknown=%d", st, h.header_size,
|
||||
h.add_size, unknown)
|
||||
if unknown > 0 and cf_verbose > 1:
|
||||
dat = h.header_data[h.header_base : ]
|
||||
xprint(" unknown: %s", hexlify(dat))
|
||||
if h.type in (rf.RAR_BLOCK_FILE, rf.RAR_BLOCK_SUB):
|
||||
if h.host_os == rf.RAR_OS_UNIX:
|
||||
s_mode = "0%o" % h.mode
|
||||
else:
|
||||
s_mode = "0x%x" % h.mode
|
||||
xprint(" flags=0x%04x:%s", h.flags, get_file_flags(h.flags))
|
||||
if h.host_os >= 0 and h.host_os < len(os_list):
|
||||
s_os = os_list[h.host_os]
|
||||
else:
|
||||
s_os = "?"
|
||||
xprint(" os=%d:%s ver=%d mode=%s meth=%c cmp=%d dec=%d vol=%d",
|
||||
h.host_os, s_os,
|
||||
h.extract_version, s_mode, h.compress_type,
|
||||
h.compress_size, h.file_size, h.volume)
|
||||
ucrc = (h.CRC + (1 << 32)) & ((1 << 32) - 1)
|
||||
xprint(" crc=0x%08x (%d) time=%s", ucrc, h.CRC, fmt_time(h.date_time))
|
||||
xprint(" name=%s", h.filename)
|
||||
if h.mtime:
|
||||
xprint(" mtime=%s", fmt_time(h.mtime))
|
||||
if h.ctime:
|
||||
xprint(" ctime=%s", fmt_time(h.ctime))
|
||||
if h.atime:
|
||||
xprint(" atime=%s", fmt_time(h.atime))
|
||||
if h.arctime:
|
||||
xprint(" arctime=%s", fmt_time(h.arctime))
|
||||
elif h.type == rf.RAR_BLOCK_MAIN:
|
||||
xprint(" flags=0x%04x:%s", h.flags, get_main_flags(h.flags))
|
||||
elif h.type == rf.RAR_BLOCK_ENDARC:
|
||||
xprint(" flags=0x%04x:%s", h.flags, get_endarc_flags(h.flags))
|
||||
elif h.type == rf.RAR_BLOCK_MARK:
|
||||
xprint(" flags=0x%04x:", h.flags)
|
||||
else:
|
||||
xprint(" flags=0x%04x:%s", h.flags, get_generic_flags(h.flags))
|
||||
|
||||
if h.comment is not None:
|
||||
cm = repr(h.comment)
|
||||
if cm[0] == 'u':
|
||||
cm = cm[1:]
|
||||
xprint(" comment=%s", cm)
|
||||
|
||||
cf_show_comment = 0
|
||||
cf_verbose = 0
|
||||
cf_charset = None
|
||||
cf_extract = 0
|
||||
cf_test_read = 0
|
||||
cf_test_unrar = 0
|
||||
cf_test_memory = 0
|
||||
|
||||
def check_crc(f, inf):
|
||||
ucrc = f.CRC
|
||||
if ucrc < 0:
|
||||
ucrc += (long(1) << 32)
|
||||
if ucrc != inf.CRC:
|
||||
print ('crc error')
|
||||
|
||||
def test_read_long(r, inf):
|
||||
f = r.open(inf.filename)
|
||||
total = 0
|
||||
while 1:
|
||||
data = f.read(8192)
|
||||
if not data:
|
||||
break
|
||||
total += len(data)
|
||||
if total != inf.file_size:
|
||||
xprint("\n *** %s has corrupt file: %s ***", r.rarfile, inf.filename)
|
||||
xprint(" *** short read: got=%d, need=%d ***\n", total, inf.file_size)
|
||||
check_crc(f, inf)
|
||||
|
||||
# test .seek() & .readinto()
|
||||
if cf_test_read > 1:
|
||||
f.seek(0,0)
|
||||
|
||||
# hack: re-enable crc calc
|
||||
f.crc_check = 1
|
||||
f.CRC = 0
|
||||
|
||||
total = 0
|
||||
buf = bytearray(rf.ZERO*4096)
|
||||
while 1:
|
||||
res = f.readinto(buf)
|
||||
if not res:
|
||||
break
|
||||
total += res
|
||||
if inf.file_size != total:
|
||||
xprint(" *** readinto failed: got=%d, need=%d ***\n", total, inf.file_size)
|
||||
check_crc(f, inf)
|
||||
f.close()
|
||||
|
||||
def test_read(r, inf):
|
||||
test_read_long(r, inf)
|
||||
|
||||
|
||||
def test_real(fn, psw):
|
||||
xprint("Archive: %s", fn)
|
||||
|
||||
cb = None
|
||||
if cf_verbose > 1:
|
||||
cb = show_item
|
||||
|
||||
rfarg = fn
|
||||
if cf_test_memory:
|
||||
rfarg = io.BytesIO(open(fn, 'rb').read())
|
||||
|
||||
# check if rar
|
||||
if not rf.is_rarfile(rfarg):
|
||||
xprint(" --- %s is not a RAR file ---", fn)
|
||||
return
|
||||
|
||||
# open
|
||||
r = rf.RarFile(rfarg, charset = cf_charset, info_callback = cb)
|
||||
# set password
|
||||
if r.needs_password():
|
||||
if psw:
|
||||
r.setpassword(psw)
|
||||
else:
|
||||
xprint(" --- %s requires password ---", fn)
|
||||
return
|
||||
|
||||
# show comment
|
||||
if cf_show_comment and r.comment:
|
||||
for ln in r.comment.split('\n'):
|
||||
xprint(" %s", ln)
|
||||
elif cf_verbose == 1 and r.comment:
|
||||
cm = repr(r.comment)
|
||||
if cm[0] == 'u':
|
||||
cm = cm[1:]
|
||||
xprint(" comment=%s", cm)
|
||||
|
||||
# process
|
||||
for n in r.namelist():
|
||||
inf = r.getinfo(n)
|
||||
if inf.isdir():
|
||||
continue
|
||||
if cf_verbose == 1:
|
||||
show_item(inf)
|
||||
if cf_test_read:
|
||||
test_read(r, inf)
|
||||
|
||||
if cf_extract:
|
||||
r.extractall()
|
||||
for inf in r.infolist():
|
||||
r.extract(inf)
|
||||
|
||||
if cf_test_unrar:
|
||||
r.testrar()
|
||||
|
||||
def test(fn, psw):
|
||||
try:
|
||||
test_real(fn, psw)
|
||||
except rf.NeedFirstVolume:
|
||||
xprint(" --- %s is middle part of multi-vol archive ---", fn)
|
||||
except rf.Error:
|
||||
exc, msg, tb = sys.exc_info()
|
||||
xprint("\n *** %s: %s ***\n", exc.__name__, msg)
|
||||
del tb
|
||||
except IOError:
|
||||
exc, msg, tb = sys.exc_info()
|
||||
xprint("\n *** %s: %s ***\n", exc.__name__, msg)
|
||||
del tb
|
||||
|
||||
def main():
|
||||
global cf_verbose, cf_show_comment, cf_charset
|
||||
global cf_extract, cf_test_read, cf_test_unrar
|
||||
global cf_test_memory
|
||||
|
||||
# parse args
|
||||
args = []
|
||||
psw = None
|
||||
noswitch = False
|
||||
for a in sys.argv[1:]:
|
||||
if noswitch:
|
||||
args.append(a)
|
||||
elif a[0] == "@":
|
||||
for ln in open(a[1:], 'r'):
|
||||
fn = ln[:-1]
|
||||
args.append(fn)
|
||||
elif a[0] != '-':
|
||||
args.append(a)
|
||||
elif a[1] == 'p':
|
||||
psw = a[2:]
|
||||
elif a == '--':
|
||||
noswitch = True
|
||||
elif a == '-h':
|
||||
xprint(usage)
|
||||
return
|
||||
elif a == '-v':
|
||||
cf_verbose += 1
|
||||
elif a == '-c':
|
||||
cf_show_comment = 1
|
||||
elif a == '-x':
|
||||
cf_extract = 1
|
||||
elif a == '-t':
|
||||
cf_test_read += 1
|
||||
elif a == '-T':
|
||||
cf_test_unrar = 1
|
||||
elif a == '-M':
|
||||
cf_test_memory = 1
|
||||
elif a[1] == 'C':
|
||||
cf_charset = a[2:]
|
||||
else:
|
||||
raise Exception("unknown switch: "+a)
|
||||
if not args:
|
||||
xprint(usage)
|
||||
|
||||
for fn in args:
|
||||
test(fn, psw)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
import rarfile
|
||||
|
||||
ver = rarfile.__version__
|
||||
ldesc = open("README.rst").read().strip()
|
||||
sdesc = ldesc.split('\n')[0].split(' - ')[1].strip()
|
||||
|
||||
setup(
|
||||
name = "rarfile",
|
||||
version = ver,
|
||||
description = sdesc,
|
||||
long_description = ldesc,
|
||||
author = "Marko Kreen",
|
||||
license = "ISC",
|
||||
author_email = "markokr@gmail.com",
|
||||
url = "https://github.com/markokr/rarfile",
|
||||
py_modules = ['rarfile'],
|
||||
keywords = ['rar', 'unrar', 'archive'],
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: ISC License (ISCL)",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: System :: Archiving :: Compression",
|
||||
]
|
||||
)
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
test:
|
||||
./test1.sh
|
||||
./test2.sh
|
||||
|
||||
clean:
|
||||
rm -rf __pycache__
|
||||
rm -f files/*.rar.[pj]* *.pyc *.class *.diffs
|
||||
rm -f rarfile.py
|
||||
|
Binary file not shown.
|
@ -1,7 +0,0 @@
|
|||
Archive: files/ctime0.rar
|
||||
FILE: hdrlen=46 datlen=0 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2011-05-10 21:28:47.899345
|
||||
name=afile.txt
|
||||
mtime=2011-05-10 21:28:47.899345
|
Binary file not shown.
|
@ -1,8 +0,0 @@
|
|||
Archive: files/ctime1.rar
|
||||
FILE: hdrlen=50 datlen=0 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2011-05-10 21:28:47.899345
|
||||
name=afile.txt
|
||||
mtime=2011-05-10 21:28:47.899345
|
||||
ctime=2011-05-10 21:28:47
|
Binary file not shown.
|
@ -1,8 +0,0 @@
|
|||
Archive: files/ctime2.rar
|
||||
FILE: hdrlen=51 datlen=0 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2011-05-10 21:28:47.899345
|
||||
name=afile.txt
|
||||
mtime=2011-05-10 21:28:47.899345
|
||||
ctime=2011-05-10 21:28:47.897843
|
Binary file not shown.
|
@ -1,8 +0,0 @@
|
|||
Archive: files/ctime3.rar
|
||||
FILE: hdrlen=52 datlen=0 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2011-05-10 21:28:47.899345
|
||||
name=afile.txt
|
||||
mtime=2011-05-10 21:28:47.899345
|
||||
ctime=2011-05-10 21:28:47.899327
|
Binary file not shown.
|
@ -1,8 +0,0 @@
|
|||
Archive: files/ctime4.rar
|
||||
FILE: hdrlen=53 datlen=0 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2011-05-10 21:28:47.899345
|
||||
name=afile.txt
|
||||
mtime=2011-05-10 21:28:47.899345
|
||||
ctime=2011-05-10 21:28:47.899345
|
Binary file not shown.
|
@ -1,14 +0,0 @@
|
|||
Archive: files/rar15-comment-lock.rar
|
||||
comment='RARcomment -----'
|
||||
FILE: hdrlen=72 datlen=7 hdr_unknown=31
|
||||
flags=0x8008:COMMENT,LONG,D64
|
||||
os=0:DOS ver=15 mode=0x20 meth=3 cmp=7 dec=7 vol=0
|
||||
crc=0xe27f07a9 (3799975849) time=2010-11-03 19:49:32
|
||||
name=FILE1.TXT
|
||||
comment='file1comment -----'
|
||||
FILE: hdrlen=72 datlen=8 hdr_unknown=31
|
||||
flags=0x8008:COMMENT,LONG,D64
|
||||
os=0:DOS ver=15 mode=0x20 meth=0 cmp=8 dec=8 vol=0
|
||||
crc=0x3c4306f7 (1011025655) time=2010-11-03 19:49:38
|
||||
name=FILE2.TXT
|
||||
comment='file2comment -----'
|
Binary file not shown.
|
@ -1,14 +0,0 @@
|
|||
Archive: files/rar15-comment.rar
|
||||
comment='RARcomment -----'
|
||||
FILE: hdrlen=72 datlen=7 hdr_unknown=31
|
||||
flags=0x8008:COMMENT,LONG,D64
|
||||
os=0:DOS ver=15 mode=0x20 meth=3 cmp=7 dec=7 vol=0
|
||||
crc=0xe27f07a9 (3799975849) time=2010-11-03 19:49:32
|
||||
name=FILE1.TXT
|
||||
comment='file1comment -----'
|
||||
FILE: hdrlen=72 datlen=8 hdr_unknown=31
|
||||
flags=0x8008:COMMENT,LONG,D64
|
||||
os=0:DOS ver=15 mode=0x20 meth=0 cmp=8 dec=8 vol=0
|
||||
crc=0x3c4306f7 (1011025655) time=2010-11-03 19:49:38
|
||||
name=FILE2.TXT
|
||||
comment='file2comment -----'
|
Binary file not shown.
|
@ -1,14 +0,0 @@
|
|||
Archive: files/rar202-comment-nopsw.rar
|
||||
comment='RARcomment'
|
||||
FILE: hdrlen=66 datlen=7 hdr_unknown=25
|
||||
flags=0x8008:COMMENT,LONG,D64
|
||||
os=0:DOS ver=20 mode=0x20 meth=0 cmp=7 dec=7 vol=0
|
||||
crc=0x7a197dba (2048490938) time=2010-11-03 00:27:28
|
||||
name=FILE1.TXT
|
||||
comment='file1comment'
|
||||
FILE: hdrlen=66 datlen=7 hdr_unknown=25
|
||||
flags=0x8008:COMMENT,LONG,D64
|
||||
os=0:DOS ver=20 mode=0x20 meth=0 cmp=7 dec=7 vol=0
|
||||
crc=0x785fc3e3 (2019541987) time=2010-11-03 00:27:34
|
||||
name=FILE2.TXT
|
||||
comment='file2comment'
|
Binary file not shown.
|
@ -1,14 +0,0 @@
|
|||
Archive: files/rar202-comment-psw.rar
|
||||
comment='RARcomment'
|
||||
FILE: hdrlen=66 datlen=32 hdr_unknown=25
|
||||
flags=0x800c:PASSWORD,COMMENT,LONG,D64
|
||||
os=0:DOS ver=20 mode=0x20 meth=3 cmp=32 dec=7 vol=0
|
||||
crc=0x7a197dba (2048490938) time=2010-11-03 00:27:28
|
||||
name=FILE1.TXT
|
||||
comment='file1comment'
|
||||
FILE: hdrlen=66 datlen=32 hdr_unknown=25
|
||||
flags=0x800c:PASSWORD,COMMENT,LONG,D64
|
||||
os=0:DOS ver=20 mode=0x20 meth=3 cmp=32 dec=7 vol=0
|
||||
crc=0x785fc3e3 (2019541987) time=2010-11-03 00:27:34
|
||||
name=FILE2.TXT
|
||||
comment='file2comment'
|
Binary file not shown.
|
@ -1,16 +0,0 @@
|
|||
Archive: files/rar3-comment-hpsw.rar
|
||||
comment='RARcomment\n'
|
||||
FILE: hdrlen=51 datlen=16 hdr_unknown=0
|
||||
flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2010-11-02 10:03:25
|
||||
name=file1.txt
|
||||
mtime=2010-11-02 10:03:25
|
||||
comment='Comment1v2\n'
|
||||
FILE: hdrlen=51 datlen=16 hdr_unknown=0
|
||||
flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2010-11-02 10:03:25
|
||||
name=file2.txt
|
||||
mtime=2010-11-02 10:03:25
|
||||
comment='Comment2v2\n'
|
Binary file not shown.
|
@ -1,16 +0,0 @@
|
|||
Archive: files/rar3-comment-plain.rar
|
||||
comment='RARcomment\n'
|
||||
FILE: hdrlen=43 datlen=8 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=29 mode=0100644 meth=3 cmp=8 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2010-11-02 10:03:25
|
||||
name=file1.txt
|
||||
mtime=2010-11-02 10:03:25
|
||||
comment='Comment1v2\n'
|
||||
FILE: hdrlen=43 datlen=8 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=29 mode=0100644 meth=3 cmp=8 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2010-11-02 10:03:25
|
||||
name=file2.txt
|
||||
mtime=2010-11-02 10:03:25
|
||||
comment='Comment2v2\n'
|
Binary file not shown.
|
@ -1,16 +0,0 @@
|
|||
Archive: files/rar3-comment-psw.rar
|
||||
comment='RARcomment\n'
|
||||
FILE: hdrlen=51 datlen=16 hdr_unknown=0
|
||||
flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2010-11-02 10:03:25
|
||||
name=file1.txt
|
||||
mtime=2010-11-02 10:03:25
|
||||
comment='Comment1v2\n'
|
||||
FILE: hdrlen=51 datlen=16 hdr_unknown=0
|
||||
flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
|
||||
crc=0x00000000 (0) time=2010-11-02 10:03:25
|
||||
name=file2.txt
|
||||
mtime=2010-11-02 10:03:25
|
||||
comment='Comment2v2\n'
|
Binary file not shown.
|
@ -1,13 +0,0 @@
|
|||
Archive: files/seektest.rar
|
||||
FILE: hdrlen=44 datlen=90 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=29 mode=0100644 meth=5 cmp=90 dec=2048 vol=0
|
||||
crc=0xc5b7e6a2 (3317163682) time=2011-06-12 12:53:33
|
||||
name=stest1.txt
|
||||
mtime=2011-06-12 12:53:33
|
||||
FILE: hdrlen=44 datlen=2048 hdr_unknown=0
|
||||
flags=0x9020:EXTTIME,LONG,D128
|
||||
os=3:UNIX ver=20 mode=0100644 meth=0 cmp=2048 dec=2048 vol=0
|
||||
crc=0xc5b7e6a2 (3317163682) time=2011-06-12 12:53:33
|
||||
name=stest2.txt
|
||||
mtime=2011-06-12 12:53:33
|
Binary file not shown.
|
@ -1,11 +0,0 @@
|
|||
Archive: files/unicode.rar
|
||||
FILE: hdrlen=54 datlen=17 hdr_unknown=0
|
||||
flags=0x8080:LONG,D1024
|
||||
os=3:UNIX ver=29 mode=0100644 meth=5 cmp=17 dec=2 vol=0
|
||||
crc=0x6751fc53 (1733426259) time=2011-07-06 16:48:04
|
||||
name=уииоотивл.txt
|
||||
FILE: hdrlen=52 datlen=13 hdr_unknown=0
|
||||
flags=0x8090:SOLID,LONG,D1024
|
||||
os=3:UNIX ver=29 mode=0100644 meth=5 cmp=13 dec=2 vol=0
|
||||
crc=0x6751fc53 (1733426259) time=2011-07-06 16:48:04
|
||||
name=𝐀𝐁𝐁𝐂.txt
|
|
@ -1,32 +0,0 @@
|
|||
#! /bin/sh
|
||||
|
||||
PYTHONPATH=..:$PYTHONPATH
|
||||
export PYTHONPATH
|
||||
|
||||
JAVA_OPTIONS="-Dpython.path=`pwd`/.."
|
||||
export JAVA_OPTIONS
|
||||
|
||||
plist="python2.7 python3.2 python3.3 python3.4 python3.5 python3.6 pypy jython jython2.7"
|
||||
|
||||
rm -f test.diffs
|
||||
|
||||
for py in $plist; do
|
||||
if which $py > /dev/null; then
|
||||
for f in files/*.rar; do
|
||||
printf "%s -> %-30s .. " $py $f
|
||||
$py ../dumprar.py -t -t -v -ppassword $f > $f.$py
|
||||
if diff -uw $f.exp $f.$py > /dev/null; then
|
||||
echo "ok"
|
||||
else
|
||||
echo "FAIL"
|
||||
echo "#### $py ####" >> test.diffs
|
||||
diff -uw $f.exp $f.$py >> test.diffs
|
||||
fi
|
||||
done
|
||||
echo ""
|
||||
else
|
||||
echo $py not available
|
||||
echo ""
|
||||
fi
|
||||
done
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
#! /bin/sh
|
||||
|
||||
cp ../rarfile.py .
|
||||
|
||||
#ulimit -n 16
|
||||
|
||||
plist="python2.7 python3.2 python3.3 python3.4 python3.5 python3.6 pypy jython jython2.7"
|
||||
|
||||
for py in $plist; do
|
||||
if which $py > /dev/null; then
|
||||
echo "== $py =="
|
||||
$py ./testseek.py
|
||||
$py ./testio.py
|
||||
$py ./testcorrupt.py --quick
|
||||
fi
|
||||
done
|
||||
|
||||
rm -f rarfile.py
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
import rarfile
|
||||
import sys, os, time
|
||||
import tempfile
|
||||
|
||||
def progress():
|
||||
sys.stdout.write('.')
|
||||
sys.stdout.flush()
|
||||
|
||||
def try_read(tmpfn):
|
||||
#progress()
|
||||
try:
|
||||
rf = rarfile.RarFile(tmpfn)
|
||||
if rf.needs_password():
|
||||
rf.setpassword('password')
|
||||
except rarfile.Error:
|
||||
return
|
||||
for fn in rf.namelist():
|
||||
try:
|
||||
data = rf.read(fn)
|
||||
pass
|
||||
except rarfile.Error:
|
||||
pass
|
||||
|
||||
def test_rar(rarfn):
|
||||
data = open(rarfn, "rb").read()
|
||||
|
||||
fd, tmpfn = tempfile.mkstemp('.rar')
|
||||
os.close(fd)
|
||||
|
||||
print('testcorrupt 1')
|
||||
for n in range(len(data)):
|
||||
bad = data[:n]
|
||||
f = open(tmpfn, 'wb')
|
||||
f.write(bad)
|
||||
f.close()
|
||||
|
||||
try_read(tmpfn)
|
||||
|
||||
print('testcorrupt 2')
|
||||
crap = rarfile.RAR_ID
|
||||
for n in range(1, len(data)):
|
||||
for i in range(len(crap)):
|
||||
c = crap[i:i+1]
|
||||
bad = data[:n - 1] + c + data[n:]
|
||||
f = open(tmpfn, 'wb')
|
||||
f.write(bad)
|
||||
f.close()
|
||||
try_read(tmpfn)
|
||||
|
||||
os.unlink(tmpfn)
|
||||
|
||||
test_rar_list = [
|
||||
"files/ctime0.rar",
|
||||
"files/ctime1.rar",
|
||||
"files/ctime2.rar",
|
||||
"files/ctime3.rar",
|
||||
"files/ctime4.rar",
|
||||
"files/seektest.rar",
|
||||
"files/rar15-comment-lock.rar",
|
||||
"files/rar15-comment.rar",
|
||||
"files/rar202-comment-nopsw.rar",
|
||||
"files/rar202-comment-psw.rar",
|
||||
"files/rar3-comment-hpsw.rar",
|
||||
"files/rar3-comment-plain.rar",
|
||||
"files/rar3-comment-psw.rar",
|
||||
"files/unicode.rar",
|
||||
]
|
||||
|
||||
def main():
|
||||
if sys.argv[-1] == '--quick':
|
||||
test_rar("files/rar3-comment-plain.rar")
|
||||
return
|
||||
for rar in test_rar_list:
|
||||
print(rar)
|
||||
test_rar(rar)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except OSError:
|
||||
print('OSError: pid = %d' % os.getpid())
|
||||
time.sleep(80000)
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
import rarfile, os, os.path, time, sys
|
||||
|
||||
try:
|
||||
from io import BufferedReader, TextIOWrapper
|
||||
except ImportError:
|
||||
print('no io module')
|
||||
sys.exit(0)
|
||||
def BufferedReader(x): return x
|
||||
def TextIOWrapper(x): return x
|
||||
|
||||
def test_readline(rf, fn):
|
||||
f = rf.open(fn)
|
||||
tr = TextIOWrapper(BufferedReader(f))
|
||||
while 1:
|
||||
ln = tr.readline()
|
||||
if not ln:
|
||||
break
|
||||
tr.close()
|
||||
|
||||
def main():
|
||||
files = ['stest1.txt', 'stest2.txt']
|
||||
arc = 'files/seektest.rar'
|
||||
|
||||
rf = rarfile.RarFile(arc, crc_check=0)
|
||||
for fn in files:
|
||||
sys.stdout.write('test/readline: %s .. ' % fn)
|
||||
sys.stdout.flush()
|
||||
test_readline(rf, fn)
|
||||
print('ok')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
import rarfile, os, os.path, time, sys
|
||||
|
||||
def show_fds():
|
||||
fdir = "/proc/%d/fd" % os.getpid()
|
||||
if os.path.isdir(fdir):
|
||||
os.system('printf "fds = "; ls -l %s | wc -l' % fdir)
|
||||
|
||||
def do_seek(f, pos, lim):
|
||||
ofs = pos*4
|
||||
fsize = lim*4
|
||||
|
||||
if ofs < 0:
|
||||
exp = 0
|
||||
elif ofs > fsize:
|
||||
exp = fsize
|
||||
else:
|
||||
exp = ofs
|
||||
|
||||
f.seek(ofs)
|
||||
|
||||
got = f.tell()
|
||||
|
||||
if got != exp:
|
||||
raise Exception('seek failed (got=%d, exp=%d)' % (got, exp))
|
||||
ln = f.read(4)
|
||||
if got == fsize and ln:
|
||||
raise Exception('unexpected read')
|
||||
if not ln and got < fsize:
|
||||
raise Exception('unexpected read failure')
|
||||
if ln:
|
||||
spos = int(ln)
|
||||
if spos*4 != got:
|
||||
raise Exception('unexpected pos: spos=%d pos=%d' % (spos, pos))
|
||||
|
||||
def test_seek(rf, fn):
|
||||
inf = rf.getinfo(fn)
|
||||
cnt = int(inf.file_size / 4)
|
||||
f = rf.open(fn)
|
||||
|
||||
do_seek(f, int(cnt/2), cnt)
|
||||
do_seek(f, 0, cnt)
|
||||
|
||||
for i in range(int(cnt/2)):
|
||||
do_seek(f, i*2, cnt)
|
||||
|
||||
for i in range(cnt):
|
||||
do_seek(f, i*2 - int(cnt / 2), cnt)
|
||||
|
||||
for i in range(cnt + 10):
|
||||
do_seek(f, cnt - i - 5, cnt)
|
||||
|
||||
f.close()
|
||||
|
||||
print('OK')
|
||||
|
||||
def test_arc(arc, desc):
|
||||
files = ['stest1.txt', 'stest2.txt']
|
||||
rf = rarfile.RarFile(arc, crc_check=0)
|
||||
for fn in files:
|
||||
sys.stdout.write('%s | test/seek %s .. ' % (desc, fn))
|
||||
sys.stdout.flush()
|
||||
test_seek(rf, fn)
|
||||
|
||||
def main():
|
||||
arc = 'files/seektest.rar'
|
||||
data = open(arc, 'rb').read()
|
||||
|
||||
# filename
|
||||
test_arc(arc, "fn")
|
||||
|
||||
# filelike: cStringIO
|
||||
try:
|
||||
import cStringIO
|
||||
test_arc(cStringIO.StringIO(data), "cStringIO")
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# filelike: io.BytesIO, io.open()
|
||||
try:
|
||||
import io
|
||||
test_arc(io.BytesIO(data), "io.BytesIO")
|
||||
test_arc(io.open(arc, 'rb'), "io.open")
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# filelike: StringIO
|
||||
try:
|
||||
import StringIO
|
||||
test_arc(StringIO.StringIO(data), "StringIO")
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# filelike: file()
|
||||
test_arc(open(arc, 'rb'), "file")
|
||||
|
||||
time.sleep(1)
|
||||
show_fds()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -2,25 +2,25 @@
|
|||
# Copyright (c) 2008-2013 Erik Svensson <erik.public@gmail.com>
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import re, time, operator, warnings, os
|
||||
import base64
|
||||
import json
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import warnings
|
||||
|
||||
from six import PY3, integer_types, iteritems, string_types
|
||||
from six.moves.urllib_parse import urlparse
|
||||
from six.moves.urllib_request import urlopen
|
||||
from transmissionrpc.constants import DEFAULT_PORT, DEFAULT_TIMEOUT
|
||||
from transmissionrpc.error import TransmissionError, HTTPHandlerError
|
||||
from transmissionrpc.utils import LOGGER, get_arguments, make_rpc_name, argument_value_convert, rpc_bool
|
||||
from transmissionrpc.httphandler import DefaultHTTPHandler
|
||||
from transmissionrpc.torrent import Torrent
|
||||
from transmissionrpc.session import Session
|
||||
|
||||
from .constants import DEFAULT_PORT, DEFAULT_TIMEOUT
|
||||
from .error import HTTPHandlerError, TransmissionError
|
||||
from .httphandler import DefaultHTTPHandler
|
||||
from .session import Session
|
||||
from .torrent import Torrent
|
||||
from .utils import LOGGER, argument_value_convert, get_arguments, make_rpc_name, rpc_bool
|
||||
from six import PY3, integer_types, string_types, iteritems
|
||||
|
||||
if PY3:
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
else:
|
||||
from urlparse import urlparse
|
||||
from urllib2 import urlopen
|
||||
|
||||
def debug_httperror(error):
|
||||
"""
|
||||
|
@ -45,7 +45,6 @@ def debug_httperror(error):
|
|||
)
|
||||
)
|
||||
|
||||
|
||||
def parse_torrent_id(arg):
|
||||
"""Parse an torrent id or torrent hashString."""
|
||||
torrent_id = None
|
||||
|
@ -59,7 +58,7 @@ def parse_torrent_id(arg):
|
|||
elif isinstance(arg, string_types):
|
||||
try:
|
||||
torrent_id = int(arg)
|
||||
if torrent_id >= 2 ** 31:
|
||||
if torrent_id >= 2**31:
|
||||
torrent_id = None
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
@ -72,7 +71,6 @@ def parse_torrent_id(arg):
|
|||
pass
|
||||
return torrent_id
|
||||
|
||||
|
||||
def parse_torrent_ids(args):
|
||||
"""
|
||||
Take things and make them valid torrent identifiers
|
||||
|
@ -100,20 +98,19 @@ def parse_torrent_ids(args):
|
|||
except ValueError:
|
||||
pass
|
||||
if not addition:
|
||||
raise ValueError('Invalid torrent id, {item!r}'.format(item=item))
|
||||
raise ValueError('Invalid torrent id, \"%s\"' % item)
|
||||
ids.extend(addition)
|
||||
elif isinstance(args, (list, tuple)):
|
||||
for item in args:
|
||||
ids.extend(parse_torrent_ids(item))
|
||||
else:
|
||||
torrent_id = parse_torrent_id(args)
|
||||
if torrent_id is None:
|
||||
if torrent_id == None:
|
||||
raise ValueError('Invalid torrent id')
|
||||
else:
|
||||
ids = [torrent_id]
|
||||
return ids
|
||||
|
||||
|
||||
"""
|
||||
Torrent ids
|
||||
|
||||
|
@ -128,27 +125,26 @@ possible to provide a argument called ``timeout``. Timeout is only effective
|
|||
when using Python 2.6 or later and the default timeout is 30 seconds.
|
||||
"""
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""
|
||||
Client is the class handling the Transmission JSON-RPC client protocol.
|
||||
"""
|
||||
|
||||
def __init__(self, address='localhost', port=DEFAULT_PORT, user=None, password=None, http_handler=None,
|
||||
timeout=None):
|
||||
def __init__(self, address='localhost', port=DEFAULT_PORT, user=None, password=None, http_handler=None, timeout=None):
|
||||
if isinstance(timeout, (integer_types, float)):
|
||||
self._query_timeout = float(timeout)
|
||||
else:
|
||||
self._query_timeout = DEFAULT_TIMEOUT
|
||||
urlo = urlparse(address)
|
||||
if not urlo.scheme:
|
||||
self.url = 'http://{host}:{port}/transmission/rpc/'.format(host=address, port=port)
|
||||
if urlo.scheme == '':
|
||||
base_url = 'http://' + address + ':' + str(port)
|
||||
self.url = base_url + '/transmission/rpc'
|
||||
else:
|
||||
if urlo.port:
|
||||
self.url = '{url.scheme}://{url.hostname}:{url.port}{url.path}'.format(url=urlo)
|
||||
self.url = urlo.scheme + '://' + urlo.hostname + ':' + str(urlo.port) + urlo.path
|
||||
else:
|
||||
self.url = '{url.scheme}://{url.hostname}{url.path}'.format(url=urlo)
|
||||
LOGGER.info('Using custom URL {url!r}.'.format(url=self.url))
|
||||
self.url = urlo.scheme + '://' + urlo.hostname + urlo.path
|
||||
LOGGER.info('Using custom URL "' + self.url + '".')
|
||||
if urlo.username and urlo.password:
|
||||
user = urlo.username
|
||||
password = urlo.password
|
||||
|
@ -204,8 +200,7 @@ class Client(object):
|
|||
if timeout is None:
|
||||
timeout = self._query_timeout
|
||||
while True:
|
||||
LOGGER.debug(
|
||||
json.dumps({'url': self.url, 'headers': headers, 'query': query, 'timeout': timeout}, indent=2))
|
||||
LOGGER.debug(json.dumps({'url': self.url, 'headers': headers, 'query': query, 'timeout': timeout}, indent=2))
|
||||
try:
|
||||
result = self.http_handler.request(self.url, query, headers, timeout)
|
||||
break
|
||||
|
@ -245,25 +240,26 @@ class Client(object):
|
|||
elif require_ids:
|
||||
raise ValueError('request require ids')
|
||||
|
||||
query = json.dumps({'tag': self._sequence, 'method': method, 'arguments': arguments})
|
||||
query = json.dumps({'tag': self._sequence, 'method': method
|
||||
, 'arguments': arguments})
|
||||
self._sequence += 1
|
||||
start = time.time()
|
||||
http_data = self._http_query(query, timeout)
|
||||
elapsed = time.time() - start
|
||||
LOGGER.info('http request took {time:.3f} s'.format(time=elapsed))
|
||||
LOGGER.info('http request took %.3f s' % (elapsed))
|
||||
|
||||
try:
|
||||
data = json.loads(http_data)
|
||||
except ValueError as error:
|
||||
LOGGER.error('Error: {msg}'.format(msg=error))
|
||||
LOGGER.error('Request: {request!r}'.format(request=query))
|
||||
LOGGER.error('HTTP data: {data!r}'.format(data=http_data))
|
||||
LOGGER.error('Error: ' + str(error))
|
||||
LOGGER.error('Request: \"%s\"' % (query))
|
||||
LOGGER.error('HTTP data: \"%s\"' % (http_data))
|
||||
raise
|
||||
|
||||
LOGGER.debug(json.dumps(data, indent=2))
|
||||
if 'result' in data:
|
||||
if data['result'] != 'success':
|
||||
raise TransmissionError('Query failed with result {result!r}.'.format(result=data['result']))
|
||||
raise TransmissionError('Query failed with result \"%s\".' % (data['result']))
|
||||
else:
|
||||
raise TransmissionError('Query failed without result.')
|
||||
|
||||
|
@ -347,9 +343,8 @@ class Client(object):
|
|||
Add a warning to the log if the Transmission RPC version is lower then the provided version.
|
||||
"""
|
||||
if self.rpc_version < version:
|
||||
LOGGER.warning('Using feature not supported by server. '
|
||||
'RPC version for server {x}, feature introduced in {y}.'.format
|
||||
(x=self.rpc_version, y=version))
|
||||
LOGGER.warning('Using feature not supported by server. RPC version for server %d, feature introduced in %d.'
|
||||
% (self.rpc_version, version))
|
||||
|
||||
def add_torrent(self, torrent, timeout=None, **kwargs):
|
||||
"""
|
||||
|
@ -409,8 +404,11 @@ class Client(object):
|
|||
pass
|
||||
if might_be_base64:
|
||||
torrent_data = torrent
|
||||
|
||||
args = {'metainfo': torrent_data} if torrent_data else {'filename': torrent}
|
||||
args = {}
|
||||
if torrent_data:
|
||||
args = {'metainfo': torrent_data}
|
||||
else:
|
||||
args = {'filename': torrent}
|
||||
for key, value in iteritems(kwargs):
|
||||
argument = make_rpc_name(key)
|
||||
(arg, val) = argument_value_convert('torrent-add', argument, value, self.rpc_version)
|
||||
|
@ -474,7 +472,7 @@ class Client(object):
|
|||
"""
|
||||
self._rpc_version_warning(3)
|
||||
self._request('torrent-remove',
|
||||
{'delete-local-data': rpc_bool(delete_data)}, ids, True, timeout=timeout)
|
||||
{'delete-local-data':rpc_bool(delete_data)}, ids, True, timeout=timeout)
|
||||
|
||||
def remove(self, ids, delete_data=False, timeout=None):
|
||||
"""
|
||||
|
@ -604,34 +602,34 @@ class Client(object):
|
|||
the new methods. list returns a dictionary indexed by torrent id.
|
||||
"""
|
||||
warnings.warn('list has been deprecated, please use get_torrent or get_torrents instead.', DeprecationWarning)
|
||||
fields = ['id', 'hashString', 'name', 'sizeWhenDone', 'leftUntilDone',
|
||||
'eta', 'status', 'rateUpload', 'rateDownload', 'uploadedEver',
|
||||
'downloadedEver', 'uploadRatio', 'queuePosition']
|
||||
fields = ['id', 'hashString', 'name', 'sizeWhenDone', 'leftUntilDone'
|
||||
, 'eta', 'status', 'rateUpload', 'rateDownload', 'uploadedEver'
|
||||
, 'downloadedEver', 'uploadRatio', 'queuePosition']
|
||||
return self._request('torrent-get', {'fields': fields}, timeout=timeout)
|
||||
|
||||
def get_files(self, ids=None, timeout=None):
|
||||
"""
|
||||
Get list of files for provided torrent id(s). If ids is empty,
|
||||
information for all torrents are fetched. This function returns a dictionary
|
||||
for each requested torrent id holding the information about the files.
|
||||
Get list of files for provided torrent id(s). If ids is empty,
|
||||
information for all torrents are fetched. This function returns a dictionary
|
||||
for each requested torrent id holding the information about the files.
|
||||
|
||||
::
|
||||
::
|
||||
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'name': <file name>,
|
||||
'size': <file size in bytes>,
|
||||
'completed': <bytes completed>,
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'name': <file name>,
|
||||
'size': <file size in bytes>,
|
||||
'completed': <bytes completed>,
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
"""
|
||||
fields = ['id', 'name', 'hashString', 'files', 'priorities', 'wanted']
|
||||
request_result = self._request('torrent-get', {'fields': fields}, ids, timeout=timeout)
|
||||
|
@ -643,22 +641,22 @@ class Client(object):
|
|||
def set_files(self, items, timeout=None):
|
||||
"""
|
||||
Set file properties. Takes a dictionary with similar contents as the result
|
||||
of `get_files`.
|
||||
of `get_files`.
|
||||
|
||||
::
|
||||
::
|
||||
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
"""
|
||||
if not isinstance(items, dict):
|
||||
raise ValueError('Invalid file description')
|
||||
|
@ -701,8 +699,8 @@ class Client(object):
|
|||
|
||||
def change_torrent(self, ids, timeout=None, **kwargs):
|
||||
"""
|
||||
Change torrent parameters for the torrent(s) with the supplied id's. The
|
||||
parameters are:
|
||||
Change torrent parameters for the torrent(s) with the supplied id's. The
|
||||
parameters are:
|
||||
|
||||
============================ ===== =============== =======================================================================================
|
||||
Argument RPC Replaced by Description
|
||||
|
@ -734,13 +732,13 @@ class Client(object):
|
|||
``uploadLimited`` 5 - Enable upload speed limiter.
|
||||
============================ ===== =============== =======================================================================================
|
||||
|
||||
.. NOTE::
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
.. NOTE::
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
"""
|
||||
args = {}
|
||||
for key, value in iteritems(kwargs):
|
||||
argument = make_rpc_name(key)
|
||||
(arg, val) = argument_value_convert('torrent-set', argument, value, self.rpc_version)
|
||||
(arg, val) = argument_value_convert('torrent-set' , argument, value, self.rpc_version)
|
||||
args[arg] = val
|
||||
|
||||
if len(args) > 0:
|
||||
|
@ -801,7 +799,7 @@ class Client(object):
|
|||
raise ValueError("Target name cannot contain a path delimiter")
|
||||
args = {'path': location, 'name': name}
|
||||
result = self._request('torrent-rename-path', args, torrent_id, True, timeout=timeout)
|
||||
return result['path'], result['name']
|
||||
return (result['path'], result['name'])
|
||||
|
||||
def queue_top(self, ids, timeout=None):
|
||||
"""Move transfer to the top of the queue."""
|
||||
|
@ -886,14 +884,14 @@ class Client(object):
|
|||
================================ ===== ================= ==========================================================================================================================
|
||||
|
||||
.. NOTE::
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
"""
|
||||
args = {}
|
||||
for key, value in iteritems(kwargs):
|
||||
if key == 'encryption' and value not in ['required', 'preferred', 'tolerated']:
|
||||
raise ValueError('Invalid encryption value')
|
||||
argument = make_rpc_name(key)
|
||||
(arg, val) = argument_value_convert('session-set', argument, value, self.rpc_version)
|
||||
(arg, val) = argument_value_convert('session-set' , argument, value, self.rpc_version)
|
||||
args[arg] = val
|
||||
if len(args) > 0:
|
||||
self._request('session-set', args, timeout=timeout)
|
||||
|
|
|
@ -3,13 +3,11 @@
|
|||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
|
||||
from six import iteritems
|
||||
|
||||
LOGGER = logging.getLogger('transmissionrpc')
|
||||
LOGGER.setLevel(logging.ERROR)
|
||||
|
||||
|
||||
def mirror_dict(source):
|
||||
"""
|
||||
Creates a dictionary with all values as keys and all keys as values.
|
||||
|
@ -17,39 +15,38 @@ def mirror_dict(source):
|
|||
source.update(dict((value, key) for key, value in iteritems(source)))
|
||||
return source
|
||||
|
||||
|
||||
DEFAULT_PORT = 9091
|
||||
|
||||
DEFAULT_TIMEOUT = 30.0
|
||||
|
||||
TR_PRI_LOW = -1
|
||||
TR_PRI_NORMAL = 0
|
||||
TR_PRI_HIGH = 1
|
||||
TR_PRI_LOW = -1
|
||||
TR_PRI_NORMAL = 0
|
||||
TR_PRI_HIGH = 1
|
||||
|
||||
PRIORITY = mirror_dict({
|
||||
'low': TR_PRI_LOW,
|
||||
'normal': TR_PRI_NORMAL,
|
||||
'high': TR_PRI_HIGH
|
||||
'low' : TR_PRI_LOW,
|
||||
'normal' : TR_PRI_NORMAL,
|
||||
'high' : TR_PRI_HIGH
|
||||
})
|
||||
|
||||
TR_RATIOLIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_RATIOLIMIT_SINGLE = 1 # override the global settings, seeding until a certain ratio
|
||||
TR_RATIOLIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of ratio
|
||||
TR_RATIOLIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_RATIOLIMIT_SINGLE = 1 # override the global settings, seeding until a certain ratio
|
||||
TR_RATIOLIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of ratio
|
||||
|
||||
RATIO_LIMIT = mirror_dict({
|
||||
'global': TR_RATIOLIMIT_GLOBAL,
|
||||
'single': TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited': TR_RATIOLIMIT_UNLIMITED
|
||||
'global' : TR_RATIOLIMIT_GLOBAL,
|
||||
'single' : TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited' : TR_RATIOLIMIT_UNLIMITED
|
||||
})
|
||||
|
||||
TR_IDLELIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_IDLELIMIT_SINGLE = 1 # override the global settings, seeding until a certain idle time
|
||||
TR_IDLELIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of activity
|
||||
TR_IDLELIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_IDLELIMIT_SINGLE = 1 # override the global settings, seeding until a certain idle time
|
||||
TR_IDLELIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of activity
|
||||
|
||||
IDLE_LIMIT = mirror_dict({
|
||||
'global': TR_RATIOLIMIT_GLOBAL,
|
||||
'single': TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited': TR_RATIOLIMIT_UNLIMITED
|
||||
'global' : TR_RATIOLIMIT_GLOBAL,
|
||||
'single' : TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited' : TR_RATIOLIMIT_UNLIMITED
|
||||
})
|
||||
|
||||
# A note on argument maps
|
||||
|
@ -63,266 +60,236 @@ IDLE_LIMIT = mirror_dict({
|
|||
|
||||
# Arguments for torrent methods
|
||||
TORRENT_ARGS = {
|
||||
'get': {
|
||||
'activityDate': ('number', 1, None, None, None, 'Last time of upload or download activity.'),
|
||||
'addedDate': ('number', 1, None, None, None, 'The date when this torrent was first added.'),
|
||||
'announceResponse': ('string', 1, 7, None, None, 'The announce message from the tracker.'),
|
||||
'announceURL': ('string', 1, 7, None, None, 'Current announce URL.'),
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Bandwidth priority. Low (-1), Normal (0) or High (1).'),
|
||||
'comment': ('string', 1, None, None, None, 'Torrent comment.'),
|
||||
'corruptEver': ('number', 1, None, None, None, 'Number of bytes of corrupt data downloaded.'),
|
||||
'creator': ('string', 1, None, None, None, 'Torrent creator.'),
|
||||
'dateCreated': ('number', 1, None, None, None, 'Torrent creation date.'),
|
||||
'desiredAvailable': ('number', 1, None, None, None, 'Number of bytes avalable and left to be downloaded.'),
|
||||
'doneDate': ('number', 1, None, None, None, 'The date when the torrent finished downloading.'),
|
||||
'downloadDir': ('string', 4, None, None, None, 'The directory path where the torrent is downloaded to.'),
|
||||
'downloadedEver': ('number', 1, None, None, None, 'Number of bytes of good data downloaded.'),
|
||||
'downloaders': ('number', 4, 7, None, None, 'Number of downloaders.'),
|
||||
'downloadLimit': ('number', 1, None, None, None, 'Download limit in Kbps.'),
|
||||
'downloadLimited': ('boolean', 5, None, None, None, 'Download limit is enabled'),
|
||||
'downloadLimitMode': (
|
||||
'number', 1, 5, None, None, 'Download limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'error': ('number', 1, None, None, None,
|
||||
'Kind of error. 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error.'),
|
||||
'errorString': ('number', 1, None, None, None, 'Error message.'),
|
||||
'eta': ('number', 1, None, None, None,
|
||||
'Estimated number of seconds left when downloading or seeding. -1 means not available and -2 means unknown.'),
|
||||
'etaIdle': ('number', 15, None, None, None,
|
||||
'Estimated number of seconds left until the idle time limit is reached. -1 means not available and -2 means unknown.'),
|
||||
'files': (
|
||||
'array', 1, None, None, None, 'Array of file object containing key, bytesCompleted, length and name.'),
|
||||
'fileStats': (
|
||||
'array', 5, None, None, None, 'Aray of file statistics containing bytesCompleted, wanted and priority.'),
|
||||
'hashString': ('string', 1, None, None, None, 'Hashstring unique for the torrent even between sessions.'),
|
||||
'haveUnchecked': ('number', 1, None, None, None, 'Number of bytes of partial pieces.'),
|
||||
'haveValid': ('number', 1, None, None, None, 'Number of bytes of checksum verified data.'),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None, 'True if session upload limits are honored'),
|
||||
'id': ('number', 1, None, None, None, 'Session unique torrent id.'),
|
||||
'isFinished': ('boolean', 9, None, None, None, 'True if the torrent is finished. Downloaded and seeded.'),
|
||||
'isPrivate': ('boolean', 1, None, None, None, 'True if the torrent is private.'),
|
||||
'isStalled': ('boolean', 14, None, None, None, 'True if the torrent has stalled (been idle for a long time).'),
|
||||
'lastAnnounceTime': ('number', 1, 7, None, None, 'The time of the last announcement.'),
|
||||
'lastScrapeTime': ('number', 1, 7, None, None, 'The time af the last successful scrape.'),
|
||||
'leechers': ('number', 1, 7, None, None, 'Number of leechers.'),
|
||||
'leftUntilDone': ('number', 1, None, None, None, 'Number of bytes left until the download is done.'),
|
||||
'magnetLink': ('string', 7, None, None, None, 'The magnet link for this torrent.'),
|
||||
'manualAnnounceTime': ('number', 1, None, None, None, 'The time until you manually ask for more peers.'),
|
||||
'maxConnectedPeers': ('number', 1, None, None, None, 'Maximum of connected peers.'),
|
||||
'metadataPercentComplete': ('number', 7, None, None, None, 'Download progress of metadata. 0.0 to 1.0.'),
|
||||
'name': ('string', 1, None, None, None, 'Torrent name.'),
|
||||
'nextAnnounceTime': ('number', 1, 7, None, None, 'Next announce time.'),
|
||||
'nextScrapeTime': ('number', 1, 7, None, None, 'Next scrape time.'),
|
||||
'peer-limit': ('number', 5, None, None, None, 'Maximum number of peers.'),
|
||||
'peers': ('array', 2, None, None, None, 'Array of peer objects.'),
|
||||
'peersConnected': ('number', 1, None, None, None, 'Number of peers we are connected to.'),
|
||||
'peersFrom': (
|
||||
'object', 1, None, None, None, 'Object containing download peers counts for different peer types.'),
|
||||
'peersGettingFromUs': ('number', 1, None, None, None, 'Number of peers we are sending data to.'),
|
||||
'peersKnown': ('number', 1, 13, None, None, 'Number of peers that the tracker knows.'),
|
||||
'peersSendingToUs': ('number', 1, None, None, None, 'Number of peers sending to us'),
|
||||
'percentDone': ('double', 5, None, None, None, 'Download progress of selected files. 0.0 to 1.0.'),
|
||||
'pieces': ('string', 5, None, None, None, 'String with base64 encoded bitfield indicating finished pieces.'),
|
||||
'pieceCount': ('number', 1, None, None, None, 'Number of pieces.'),
|
||||
'pieceSize': ('number', 1, None, None, None, 'Number of bytes in a piece.'),
|
||||
'priorities': ('array', 1, None, None, None, 'Array of file priorities.'),
|
||||
'queuePosition': ('number', 14, None, None, None, 'The queue position.'),
|
||||
'rateDownload': ('number', 1, None, None, None, 'Download rate in bps.'),
|
||||
'rateUpload': ('number', 1, None, None, None, 'Upload rate in bps.'),
|
||||
'recheckProgress': ('double', 1, None, None, None, 'Progress of recheck. 0.0 to 1.0.'),
|
||||
'secondsDownloading': ('number', 15, None, None, None, ''),
|
||||
'secondsSeeding': ('number', 15, None, None, None, ''),
|
||||
'scrapeResponse': ('string', 1, 7, None, None, 'Scrape response message.'),
|
||||
'scrapeURL': ('string', 1, 7, None, None, 'Current scrape URL'),
|
||||
'seeders': ('number', 1, 7, None, None, 'Number of seeders reported by the tracker.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Idle limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seed ratio limit.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'sizeWhenDone': ('number', 1, None, None, None, 'Size of the torrent download in bytes.'),
|
||||
'startDate': ('number', 1, None, None, None, 'The date when the torrent was last started.'),
|
||||
'status': ('number', 1, None, None, None, 'Current status, see source'),
|
||||
'swarmSpeed': ('number', 1, 7, None, None, 'Estimated speed in Kbps in the swarm.'),
|
||||
'timesCompleted': ('number', 1, 7, None, None, 'Number of successful downloads reported by the tracker.'),
|
||||
'trackers': ('array', 1, None, None, None, 'Array of tracker objects.'),
|
||||
'trackerStats': ('object', 7, None, None, None, 'Array of object containing tracker statistics.'),
|
||||
'totalSize': ('number', 1, None, None, None, 'Total size of the torrent in bytes'),
|
||||
'torrentFile': ('string', 5, None, None, None, 'Path to .torrent file.'),
|
||||
'uploadedEver': ('number', 1, None, None, None, 'Number of bytes uploaded, ever.'),
|
||||
'uploadLimit': ('number', 1, None, None, None, 'Upload limit in Kbps'),
|
||||
'uploadLimitMode': (
|
||||
'number', 1, 5, None, None, 'Upload limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'uploadLimited': ('boolean', 5, None, None, None, 'Upload limit enabled.'),
|
||||
'uploadRatio': ('double', 1, None, None, None, 'Seed ratio.'),
|
||||
'wanted': ('array', 1, None, None, None, 'Array of booleans indicated wanted files.'),
|
||||
'webseeds': ('array', 1, None, None, None, 'Array of webseeds objects'),
|
||||
'webseedsSendingToUs': ('number', 1, None, None, None, 'Number of webseeds seeding to us.'),
|
||||
'get' : {
|
||||
'activityDate': ('number', 1, None, None, None, 'Last time of upload or download activity.'),
|
||||
'addedDate': ('number', 1, None, None, None, 'The date when this torrent was first added.'),
|
||||
'announceResponse': ('string', 1, 7, None, None, 'The announce message from the tracker.'),
|
||||
'announceURL': ('string', 1, 7, None, None, 'Current announce URL.'),
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Bandwidth priority. Low (-1), Normal (0) or High (1).'),
|
||||
'comment': ('string', 1, None, None, None, 'Torrent comment.'),
|
||||
'corruptEver': ('number', 1, None, None, None, 'Number of bytes of corrupt data downloaded.'),
|
||||
'creator': ('string', 1, None, None, None, 'Torrent creator.'),
|
||||
'dateCreated': ('number', 1, None, None, None, 'Torrent creation date.'),
|
||||
'desiredAvailable': ('number', 1, None, None, None, 'Number of bytes avalable and left to be downloaded.'),
|
||||
'doneDate': ('number', 1, None, None, None, 'The date when the torrent finished downloading.'),
|
||||
'downloadDir': ('string', 4, None, None, None, 'The directory path where the torrent is downloaded to.'),
|
||||
'downloadedEver': ('number', 1, None, None, None, 'Number of bytes of good data downloaded.'),
|
||||
'downloaders': ('number', 4, 7, None, None, 'Number of downloaders.'),
|
||||
'downloadLimit': ('number', 1, None, None, None, 'Download limit in Kbps.'),
|
||||
'downloadLimited': ('boolean', 5, None, None, None, 'Download limit is enabled'),
|
||||
'downloadLimitMode': ('number', 1, 5, None, None, 'Download limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'error': ('number', 1, None, None, None, 'Kind of error. 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error.'),
|
||||
'errorString': ('number', 1, None, None, None, 'Error message.'),
|
||||
'eta': ('number', 1, None, None, None, 'Estimated number of seconds left when downloading or seeding. -1 means not available and -2 means unknown.'),
|
||||
'etaIdle': ('number', 15, None, None, None, 'Estimated number of seconds left until the idle time limit is reached. -1 means not available and -2 means unknown.'),
|
||||
'files': ('array', 1, None, None, None, 'Array of file object containing key, bytesCompleted, length and name.'),
|
||||
'fileStats': ('array', 5, None, None, None, 'Aray of file statistics containing bytesCompleted, wanted and priority.'),
|
||||
'hashString': ('string', 1, None, None, None, 'Hashstring unique for the torrent even between sessions.'),
|
||||
'haveUnchecked': ('number', 1, None, None, None, 'Number of bytes of partial pieces.'),
|
||||
'haveValid': ('number', 1, None, None, None, 'Number of bytes of checksum verified data.'),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None, 'True if session upload limits are honored'),
|
||||
'id': ('number', 1, None, None, None, 'Session unique torrent id.'),
|
||||
'isFinished': ('boolean', 9, None, None, None, 'True if the torrent is finished. Downloaded and seeded.'),
|
||||
'isPrivate': ('boolean', 1, None, None, None, 'True if the torrent is private.'),
|
||||
'isStalled': ('boolean', 14, None, None, None, 'True if the torrent has stalled (been idle for a long time).'),
|
||||
'lastAnnounceTime': ('number', 1, 7, None, None, 'The time of the last announcement.'),
|
||||
'lastScrapeTime': ('number', 1, 7, None, None, 'The time af the last successful scrape.'),
|
||||
'leechers': ('number', 1, 7, None, None, 'Number of leechers.'),
|
||||
'leftUntilDone': ('number', 1, None, None, None, 'Number of bytes left until the download is done.'),
|
||||
'magnetLink': ('string', 7, None, None, None, 'The magnet link for this torrent.'),
|
||||
'manualAnnounceTime': ('number', 1, None, None, None, 'The time until you manually ask for more peers.'),
|
||||
'maxConnectedPeers': ('number', 1, None, None, None, 'Maximum of connected peers.'),
|
||||
'metadataPercentComplete': ('number', 7, None, None, None, 'Download progress of metadata. 0.0 to 1.0.'),
|
||||
'name': ('string', 1, None, None, None, 'Torrent name.'),
|
||||
'nextAnnounceTime': ('number', 1, 7, None, None, 'Next announce time.'),
|
||||
'nextScrapeTime': ('number', 1, 7, None, None, 'Next scrape time.'),
|
||||
'peer-limit': ('number', 5, None, None, None, 'Maximum number of peers.'),
|
||||
'peers': ('array', 2, None, None, None, 'Array of peer objects.'),
|
||||
'peersConnected': ('number', 1, None, None, None, 'Number of peers we are connected to.'),
|
||||
'peersFrom': ('object', 1, None, None, None, 'Object containing download peers counts for different peer types.'),
|
||||
'peersGettingFromUs': ('number', 1, None, None, None, 'Number of peers we are sending data to.'),
|
||||
'peersKnown': ('number', 1, 13, None, None, 'Number of peers that the tracker knows.'),
|
||||
'peersSendingToUs': ('number', 1, None, None, None, 'Number of peers sending to us'),
|
||||
'percentDone': ('double', 5, None, None, None, 'Download progress of selected files. 0.0 to 1.0.'),
|
||||
'pieces': ('string', 5, None, None, None, 'String with base64 encoded bitfield indicating finished pieces.'),
|
||||
'pieceCount': ('number', 1, None, None, None, 'Number of pieces.'),
|
||||
'pieceSize': ('number', 1, None, None, None, 'Number of bytes in a piece.'),
|
||||
'priorities': ('array', 1, None, None, None, 'Array of file priorities.'),
|
||||
'queuePosition': ('number', 14, None, None, None, 'The queue position.'),
|
||||
'rateDownload': ('number', 1, None, None, None, 'Download rate in bps.'),
|
||||
'rateUpload': ('number', 1, None, None, None, 'Upload rate in bps.'),
|
||||
'recheckProgress': ('double', 1, None, None, None, 'Progress of recheck. 0.0 to 1.0.'),
|
||||
'secondsDownloading': ('number', 15, None, None, None, ''),
|
||||
'secondsSeeding': ('number', 15, None, None, None, ''),
|
||||
'scrapeResponse': ('string', 1, 7, None, None, 'Scrape response message.'),
|
||||
'scrapeURL': ('string', 1, 7, None, None, 'Current scrape URL'),
|
||||
'seeders': ('number', 1, 7, None, None, 'Number of seeders reported by the tracker.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Idle limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seed ratio limit.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'sizeWhenDone': ('number', 1, None, None, None, 'Size of the torrent download in bytes.'),
|
||||
'startDate': ('number', 1, None, None, None, 'The date when the torrent was last started.'),
|
||||
'status': ('number', 1, None, None, None, 'Current status, see source'),
|
||||
'swarmSpeed': ('number', 1, 7, None, None, 'Estimated speed in Kbps in the swarm.'),
|
||||
'timesCompleted': ('number', 1, 7, None, None, 'Number of successful downloads reported by the tracker.'),
|
||||
'trackers': ('array', 1, None, None, None, 'Array of tracker objects.'),
|
||||
'trackerStats': ('object', 7, None, None, None, 'Array of object containing tracker statistics.'),
|
||||
'totalSize': ('number', 1, None, None, None, 'Total size of the torrent in bytes'),
|
||||
'torrentFile': ('string', 5, None, None, None, 'Path to .torrent file.'),
|
||||
'uploadedEver': ('number', 1, None, None, None, 'Number of bytes uploaded, ever.'),
|
||||
'uploadLimit': ('number', 1, None, None, None, 'Upload limit in Kbps'),
|
||||
'uploadLimitMode': ('number', 1, 5, None, None, 'Upload limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'uploadLimited': ('boolean', 5, None, None, None, 'Upload limit enabled.'),
|
||||
'uploadRatio': ('double', 1, None, None, None, 'Seed ratio.'),
|
||||
'wanted': ('array', 1, None, None, None, 'Array of booleans indicated wanted files.'),
|
||||
'webseeds': ('array', 1, None, None, None, 'Array of webseeds objects'),
|
||||
'webseedsSendingToUs': ('number', 1, None, None, None, 'Number of webseeds seeding to us.'),
|
||||
},
|
||||
'set': {
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Priority for this transfer.'),
|
||||
'downloadLimit': ('number', 5, None, 'speed-limit-down', None, 'Set the speed limit for download in Kib/s.'),
|
||||
'downloadLimited': ('boolean', 5, None, 'speed-limit-down-enabled', None, 'Enable download speed limiter.'),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None,
|
||||
"Enables or disables the transfer to honour the upload limit set in the session."),
|
||||
'location': ('array', 1, None, None, None, 'Local download location.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'The peer limit for the torrents.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'queuePosition': ('number', 14, None, None, None, 'Position of this transfer in its queue.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None,
|
||||
'Seed inactivity mode. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seeding ratio.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None,
|
||||
'Which ratio to use. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'speed-limit-down': ('number', 1, 5, None, 'downloadLimit', 'Set the speed limit for download in Kib/s.'),
|
||||
'speed-limit-down-enabled': ('boolean', 1, 5, None, 'downloadLimited', 'Enable download speed limiter.'),
|
||||
'speed-limit-up': ('number', 1, 5, None, 'uploadLimit', 'Set the speed limit for upload in Kib/s.'),
|
||||
'speed-limit-up-enabled': ('boolean', 1, 5, None, 'uploadLimited', 'Enable upload speed limiter.'),
|
||||
'trackerAdd': ('array', 10, None, None, None, 'Array of string with announce URLs to add.'),
|
||||
'trackerRemove': ('array', 10, None, None, None, 'Array of ids of trackers to remove.'),
|
||||
'trackerReplace': (
|
||||
'array', 10, None, None, None, 'Array of (id, url) tuples where the announce URL should be replaced.'),
|
||||
'uploadLimit': ('number', 5, None, 'speed-limit-up', None, 'Set the speed limit for upload in Kib/s.'),
|
||||
'uploadLimited': ('boolean', 5, None, 'speed-limit-up-enabled', None, 'Enable upload speed limiter.'),
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Priority for this transfer.'),
|
||||
'downloadLimit': ('number', 5, None, 'speed-limit-down', None, 'Set the speed limit for download in Kib/s.'),
|
||||
'downloadLimited': ('boolean', 5, None, 'speed-limit-down-enabled', None, 'Enable download speed limiter.'),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None, "Enables or disables the transfer to honour the upload limit set in the session."),
|
||||
'location': ('array', 1, None, None, None, 'Local download location.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'The peer limit for the torrents.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'queuePosition': ('number', 14, None, None, None, 'Position of this transfer in its queue.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None, 'Seed inactivity mode. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seeding ratio.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None, 'Which ratio to use. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'speed-limit-down': ('number', 1, 5, None, 'downloadLimit', 'Set the speed limit for download in Kib/s.'),
|
||||
'speed-limit-down-enabled': ('boolean', 1, 5, None, 'downloadLimited', 'Enable download speed limiter.'),
|
||||
'speed-limit-up': ('number', 1, 5, None, 'uploadLimit', 'Set the speed limit for upload in Kib/s.'),
|
||||
'speed-limit-up-enabled': ('boolean', 1, 5, None, 'uploadLimited', 'Enable upload speed limiter.'),
|
||||
'trackerAdd': ('array', 10, None, None, None, 'Array of string with announce URLs to add.'),
|
||||
'trackerRemove': ('array', 10, None, None, None, 'Array of ids of trackers to remove.'),
|
||||
'trackerReplace': ('array', 10, None, None, None, 'Array of (id, url) tuples where the announce URL should be replaced.'),
|
||||
'uploadLimit': ('number', 5, None, 'speed-limit-up', None, 'Set the speed limit for upload in Kib/s.'),
|
||||
'uploadLimited': ('boolean', 5, None, 'speed-limit-up-enabled', None, 'Enable upload speed limiter.'),
|
||||
},
|
||||
'add': {
|
||||
'bandwidthPriority': ('number', 8, None, None, None, 'Priority for this transfer.'),
|
||||
'download-dir': (
|
||||
'string', 1, None, None, None, 'The directory where the downloaded contents will be saved in.'),
|
||||
'cookies': ('string', 13, None, None, None, 'One or more HTTP cookie(s).'),
|
||||
'filename': ('string', 1, None, None, None, "A file path or URL to a torrent file or a magnet link."),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'metainfo': ('string', 1, None, None, None, 'The content of a torrent file, base64 encoded.'),
|
||||
'paused': ('boolean', 1, None, None, None, 'If True, does not start the transfer when added.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'Maximum number of peers allowed.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
'bandwidthPriority': ('number', 8, None, None, None, 'Priority for this transfer.'),
|
||||
'download-dir': ('string', 1, None, None, None, 'The directory where the downloaded contents will be saved in.'),
|
||||
'cookies': ('string', 13, None, None, None, 'One or more HTTP cookie(s).'),
|
||||
'filename': ('string', 1, None, None, None, "A file path or URL to a torrent file or a magnet link."),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'metainfo': ('string', 1, None, None, None, 'The content of a torrent file, base64 encoded.'),
|
||||
'paused': ('boolean', 1, None, None, None, 'If True, does not start the transfer when added.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'Maximum number of peers allowed.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
}
|
||||
}
|
||||
|
||||
# Arguments for session methods
|
||||
SESSION_ARGS = {
|
||||
'get': {
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": (
|
||||
'boolean', 5, None, None, None, 'True if alternate global download speed limiter is ebabled.'),
|
||||
"alt-speed-time-begin": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'True if alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-time-end": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Days alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s)'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'True when blocklist is enabled.'),
|
||||
"blocklist-size": ('number', 5, None, None, None, 'Number of rules in the blocklist'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"config-dir": ('string', 8, None, None, None, 'location of transmissions configuration directory'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'True if DHT enabled.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'The download directory.'),
|
||||
"download-dir-free-space": ('number', 12, None, None, None, 'Free space in the download directory, in bytes'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'True if the download queue is enabled.'),
|
||||
"encryption": (
|
||||
'string', 1, None, None, None, 'Encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'True if the seed activity limit is enabled.'),
|
||||
"incomplete-dir": (
|
||||
'string', 7, None, None, None, 'The path to the directory for incomplete torrent transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None, 'True if the incomplete dir is enabled.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'True if local peer discovery is enabled.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'True if PEX is allowed.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'True if PEX is enabled.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": (
|
||||
'boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'True if port forwarding is enabled.'),
|
||||
"queue-stalled-minutes": (
|
||||
'number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'True if stalled tracking of transfers is enabled.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'True if ".part" is appended to incomplete files'),
|
||||
"rpc-version": ('number', 4, None, None, None, 'Transmission RPC API Version.'),
|
||||
"rpc-version-minimum": ('number', 4, None, None, None, 'Minimum accepted RPC API Version.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'True if the done script is enabled.'),
|
||||
"script-torrent-done-filename": (
|
||||
'string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'True if seed ration limit is enabled.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'True if upload queue is enabled.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'True if the download speed is limited.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'True if the upload speed is limited.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'When true uploaded torrents will start right away.'),
|
||||
"trash-original-torrent-files": (
|
||||
'boolean', 9, None, None, None, 'When true added .torrent files will be deleted.'),
|
||||
'units': ('object', 10, None, None, None, 'An object containing units for size and speed.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'True if Micro Transport Protocol (UTP) is enabled.'),
|
||||
"version": ('string', 3, None, None, None, 'Transmission version.'),
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": ('boolean', 5, None, None, None, 'True if alternate global download speed limiter is ebabled.'),
|
||||
"alt-speed-time-begin": ('number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'True if alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-time-end": ('number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Days alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s)'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'True when blocklist is enabled.'),
|
||||
"blocklist-size": ('number', 5, None, None, None, 'Number of rules in the blocklist'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"config-dir": ('string', 8, None, None, None, 'location of transmissions configuration directory'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'True if DHT enabled.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'The download directory.'),
|
||||
"download-dir-free-space": ('number', 12, None, None, None, 'Free space in the download directory, in bytes'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'True if the download queue is enabled.'),
|
||||
"encryption": ('string', 1, None, None, None, 'Encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'True if the seed activity limit is enabled.'),
|
||||
"incomplete-dir": ('string', 7, None, None, None, 'The path to the directory for incomplete torrent transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None, 'True if the incomplete dir is enabled.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'True if local peer discovery is enabled.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'True if PEX is allowed.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'True if PEX is enabled.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": ('boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'True if port forwarding is enabled.'),
|
||||
"queue-stalled-minutes": ('number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'True if stalled tracking of transfers is enabled.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'True if ".part" is appended to incomplete files'),
|
||||
"rpc-version": ('number', 4, None, None, None, 'Transmission RPC API Version.'),
|
||||
"rpc-version-minimum": ('number', 4, None, None, None, 'Minimum accepted RPC API Version.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'True if the done script is enabled.'),
|
||||
"script-torrent-done-filename": ('string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'True if seed ration limit is enabled.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'True if upload queue is enabled.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'True if the download speed is limited.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'True if the upload speed is limited.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'When true uploaded torrents will start right away.'),
|
||||
"trash-original-torrent-files": ('boolean', 9, None, None, None, 'When true added .torrent files will be deleted.'),
|
||||
'units': ('object', 10, None, None, None, 'An object containing units for size and speed.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'True if Micro Transport Protocol (UTP) is enabled.'),
|
||||
"version": ('string', 3, None, None, None, 'Transmission version.'),
|
||||
},
|
||||
'set': {
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": ('boolean', 5, None, None, None, 'Enables alternate global download speed limiter.'),
|
||||
"alt-speed-time-begin": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'Enables alternate speeds scheduling.'),
|
||||
"alt-speed-time-end": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Enables alternate speeds scheduling these days.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s).'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'Enables the block list'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'Enables DHT.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'Set the session download directory.'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'Enables download queue.'),
|
||||
"encryption": ('string', 1, None, None, None,
|
||||
'Set the session encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'The default seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'Enables the default seed inactivity limit'),
|
||||
"incomplete-dir": ('string', 7, None, None, None, 'The path to the directory of incomplete transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None,
|
||||
'Enables the incomplete transfer data directory. Otherwise data for incomplete transfers are stored in the download target.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'Enables local peer discovery for public torrents.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'Allowing PEX in public torrents.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'Allowing PEX in public torrents.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": (
|
||||
'boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'Enables port forwarding.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'Appends ".part" to incomplete files'),
|
||||
"queue-stalled-minutes": (
|
||||
'number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'Enable tracking of stalled transfers.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'Whether or not to call the "done" script.'),
|
||||
"script-torrent-done-filename": (
|
||||
'string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'Enables upload queue.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'Enables seed ration limit.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'Enables download speed limiting.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'Enables upload speed limiting.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'Added torrents will be started right away.'),
|
||||
"trash-original-torrent-files": (
|
||||
'boolean', 9, None, None, None, 'The .torrent file of added torrents will be deleted.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'Enables Micro Transport Protocol (UTP).'),
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": ('boolean', 5, None, None, None, 'Enables alternate global download speed limiter.'),
|
||||
"alt-speed-time-begin": ('number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'Enables alternate speeds scheduling.'),
|
||||
"alt-speed-time-end": ('number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Enables alternate speeds scheduling these days.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s).'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'Enables the block list'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'Enables DHT.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'Set the session download directory.'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'Enables download queue.'),
|
||||
"encryption": ('string', 1, None, None, None, 'Set the session encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'The default seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'Enables the default seed inactivity limit'),
|
||||
"incomplete-dir": ('string', 7, None, None, None, 'The path to the directory of incomplete transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None, 'Enables the incomplete transfer data directory. Otherwise data for incomplete transfers are stored in the download target.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'Enables local peer discovery for public torrents.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'Allowing PEX in public torrents.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'Allowing PEX in public torrents.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": ('boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'Enables port forwarding.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'Appends ".part" to incomplete files'),
|
||||
"queue-stalled-minutes": ('number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'Enable tracking of stalled transfers.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'Whether or not to call the "done" script.'),
|
||||
"script-torrent-done-filename": ('string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'Enables upload queue.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'Enables seed ration limit.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'Enables download speed limiting.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'Enables upload speed limiting.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'Added torrents will be started right away.'),
|
||||
"trash-original-torrent-files": ('boolean', 9, None, None, None, 'The .torrent file of added torrents will be deleted.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'Enables Micro Transport Protocol (UTP).'),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -2,15 +2,13 @@
|
|||
# Copyright (c) 2008-2013 Erik Svensson <erik.public@gmail.com>
|
||||
# Licensed under the MIT license.
|
||||
|
||||
from six import integer_types, string_types
|
||||
|
||||
from six import string_types, integer_types
|
||||
|
||||
class TransmissionError(Exception):
|
||||
"""
|
||||
This exception is raised when there has occurred an error related to
|
||||
communication with Transmission. It is a subclass of Exception.
|
||||
This exception is raised when there has occurred an error related to
|
||||
communication with Transmission. It is a subclass of Exception.
|
||||
"""
|
||||
|
||||
def __init__(self, message='', original=None):
|
||||
Exception.__init__(self)
|
||||
self.message = message
|
||||
|
@ -19,17 +17,15 @@ class TransmissionError(Exception):
|
|||
def __str__(self):
|
||||
if self.original:
|
||||
original_name = type(self.original).__name__
|
||||
return '{0} Original exception: {1}, "{2}"'.format(self.message, original_name, str(self.original))
|
||||
return '%s Original exception: %s, "%s"' % (self.message, original_name, str(self.original))
|
||||
else:
|
||||
return self.message
|
||||
|
||||
|
||||
class HTTPHandlerError(Exception):
|
||||
"""
|
||||
This exception is raised when there has occurred an error related to
|
||||
the HTTP handler. It is a subclass of Exception.
|
||||
This exception is raised when there has occurred an error related to
|
||||
the HTTP handler. It is a subclass of Exception.
|
||||
"""
|
||||
|
||||
def __init__(self, httpurl=None, httpcode=None, httpmsg=None, httpheaders=None, httpdata=None):
|
||||
Exception.__init__(self)
|
||||
self.url = ''
|
||||
|
@ -49,10 +45,10 @@ class HTTPHandlerError(Exception):
|
|||
self.data = httpdata
|
||||
|
||||
def __repr__(self):
|
||||
return '<HTTPHandlerError {0:d}, {1}>'.format(self.code, self.message)
|
||||
return '<HTTPHandlerError %d, %s>' % (self.code, self.message)
|
||||
|
||||
def __str__(self):
|
||||
return 'HTTPHandlerError {0:d}: {1}'.format(self.code, self.message)
|
||||
return 'HTTPHandlerError %d: %s' % (self.code, self.message)
|
||||
|
||||
def __unicode__(self):
|
||||
return 'HTTPHandlerError {0:d}: {1}'.format(self.code, self.message)
|
||||
return 'HTTPHandlerError %d: %s' % (self.code, self.message)
|
||||
|
|
|
@ -4,24 +4,25 @@
|
|||
|
||||
import sys
|
||||
|
||||
from six.moves.http_client import BadStatusLine
|
||||
from six.moves.urllib_error import HTTPError, URLError
|
||||
from six.moves.urllib_request import (
|
||||
HTTPBasicAuthHandler,
|
||||
HTTPDigestAuthHandler,
|
||||
HTTPPasswordMgrWithDefaultRealm,
|
||||
Request,
|
||||
build_opener,
|
||||
)
|
||||
from transmissionrpc.error import HTTPHandlerError
|
||||
|
||||
from .error import HTTPHandlerError
|
||||
from six import PY3
|
||||
|
||||
if PY3:
|
||||
from urllib.request import Request, build_opener, \
|
||||
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler
|
||||
from urllib.error import HTTPError, URLError
|
||||
from http.client import BadStatusLine
|
||||
else:
|
||||
from urllib2 import Request, build_opener, \
|
||||
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler
|
||||
from urllib2 import HTTPError, URLError
|
||||
from httplib import BadStatusLine
|
||||
|
||||
class HTTPHandler(object):
|
||||
"""
|
||||
Prototype for HTTP handling.
|
||||
"""
|
||||
|
||||
def set_authentication(self, uri, login, password):
|
||||
"""
|
||||
Transmission use basic authentication in earlier versions and digest
|
||||
|
@ -44,12 +45,10 @@ class HTTPHandler(object):
|
|||
"""
|
||||
raise NotImplementedError("Bad HTTPHandler, failed to implement request.")
|
||||
|
||||
|
||||
class DefaultHTTPHandler(HTTPHandler):
|
||||
"""
|
||||
The default HTTP handler provided with transmissionrpc.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
HTTPHandler.__init__(self)
|
||||
self.http_opener = build_opener()
|
||||
|
@ -77,7 +76,7 @@ class DefaultHTTPHandler(HTTPHandler):
|
|||
if hasattr(error.reason, 'args') and isinstance(error.reason.args, tuple) and len(error.reason.args) == 2:
|
||||
raise HTTPHandlerError(httpcode=error.reason.args[0], httpmsg=error.reason.args[1])
|
||||
else:
|
||||
raise HTTPHandlerError(httpmsg='urllib2.URLError: {error.reason}'.format(error=error))
|
||||
raise HTTPHandlerError(httpmsg='urllib2.URLError: %s' % (error.reason))
|
||||
except BadStatusLine as error:
|
||||
raise HTTPHandlerError(httpmsg='httplib.BadStatusLine: {error.line}'.format(error=error))
|
||||
raise HTTPHandlerError(httpmsg='httplib.BadStatusLine: %s' % (error.line))
|
||||
return response.read().decode('utf-8')
|
||||
|
|
|
@ -2,10 +2,9 @@
|
|||
# Copyright (c) 2008-2013 Erik Svensson <erik.public@gmail.com>
|
||||
# Licensed under the MIT license.
|
||||
|
||||
from six import integer_types, iteritems
|
||||
|
||||
from .utils import Field
|
||||
from transmissionrpc.utils import Field
|
||||
|
||||
from six import iteritems, integer_types
|
||||
|
||||
class Session(object):
|
||||
"""
|
||||
|
@ -27,12 +26,12 @@ class Session(object):
|
|||
try:
|
||||
return self._fields[name].value
|
||||
except KeyError:
|
||||
raise AttributeError('No attribute {0}'.format(name))
|
||||
raise AttributeError('No attribute %s' % name)
|
||||
|
||||
def __str__(self):
|
||||
text = ''
|
||||
for key in sorted(self._fields.keys()):
|
||||
text += "{0:32}: {1}\n".format(key[-32:], self._fields[key].value)
|
||||
text += "% 32s: %s\n" % (key[-32:], self._fields[key].value)
|
||||
return text
|
||||
|
||||
def _update_fields(self, other):
|
||||
|
|
|
@ -2,27 +2,25 @@
|
|||
# Copyright (c) 2008-2013 Erik Svensson <erik.public@gmail.com>
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import datetime
|
||||
import sys
|
||||
import sys, datetime
|
||||
|
||||
from six import integer_types, iteritems, string_types, text_type
|
||||
from transmissionrpc.constants import PRIORITY, RATIO_LIMIT, IDLE_LIMIT
|
||||
from transmissionrpc.utils import Field, format_timedelta
|
||||
|
||||
from .constants import IDLE_LIMIT, PRIORITY, RATIO_LIMIT
|
||||
from .utils import Field, format_timedelta
|
||||
from six import integer_types, string_types, text_type, iteritems
|
||||
|
||||
|
||||
def get_status_old(code):
|
||||
"""Get the torrent status using old status codes"""
|
||||
mapping = {
|
||||
(1 << 0): 'check pending',
|
||||
(1 << 1): 'checking',
|
||||
(1 << 2): 'downloading',
|
||||
(1 << 3): 'seeding',
|
||||
(1 << 4): 'stopped',
|
||||
(1<<0): 'check pending',
|
||||
(1<<1): 'checking',
|
||||
(1<<2): 'downloading',
|
||||
(1<<3): 'seeding',
|
||||
(1<<4): 'stopped',
|
||||
}
|
||||
return mapping[code]
|
||||
|
||||
|
||||
def get_status_new(code):
|
||||
"""Get the torrent status using new status codes"""
|
||||
mapping = {
|
||||
|
@ -36,7 +34,6 @@ def get_status_new(code):
|
|||
}
|
||||
return mapping[code]
|
||||
|
||||
|
||||
class Torrent(object):
|
||||
"""
|
||||
Torrent is a class holding the data received from Transmission regarding a bittorrent transfer.
|
||||
|
@ -74,14 +71,14 @@ class Torrent(object):
|
|||
tid = self._fields['id'].value
|
||||
name = self._get_name_string()
|
||||
if isinstance(name, str):
|
||||
return '<Torrent {0:d} \"{1}\">'.format(tid, name)
|
||||
return '<Torrent %d \"%s\">' % (tid, name)
|
||||
else:
|
||||
return '<Torrent {0:d}>'.format(tid)
|
||||
return '<Torrent %d>' % (tid)
|
||||
|
||||
def __str__(self):
|
||||
name = self._get_name_string()
|
||||
if isinstance(name, str):
|
||||
return 'Torrent \"{0}\"'.format(name)
|
||||
return 'Torrent \"%s\"' % (name)
|
||||
else:
|
||||
return 'Torrent'
|
||||
|
||||
|
@ -92,7 +89,7 @@ class Torrent(object):
|
|||
try:
|
||||
return self._fields[name].value
|
||||
except KeyError:
|
||||
raise AttributeError('No attribute {0}'.format(name))
|
||||
raise AttributeError('No attribute %s' % name)
|
||||
|
||||
def _rpc_version(self):
|
||||
"""Get the Transmission RPC API version."""
|
||||
|
@ -102,9 +99,8 @@ class Torrent(object):
|
|||
|
||||
def _dirty_fields(self):
|
||||
"""Enumerate changed fields"""
|
||||
outgoing_keys = ['bandwidthPriority', 'downloadLimit', 'downloadLimited', 'peer_limit', 'queuePosition',
|
||||
'seedIdleLimit', 'seedIdleMode', 'seedRatioLimit', 'seedRatioMode', 'uploadLimit',
|
||||
'uploadLimited']
|
||||
outgoing_keys = ['bandwidthPriority', 'downloadLimit', 'downloadLimited', 'peer_limit', 'queuePosition'
|
||||
, 'seedIdleLimit', 'seedIdleMode', 'seedRatioLimit', 'seedRatioMode', 'uploadLimit', 'uploadLimited']
|
||||
fields = []
|
||||
for key in outgoing_keys:
|
||||
if key in self._fields and self._fields[key].dirty:
|
||||
|
@ -125,6 +121,7 @@ class Torrent(object):
|
|||
"""
|
||||
Update the torrent data from a Transmission JSON-RPC arguments dictionary
|
||||
"""
|
||||
fields = None
|
||||
if isinstance(other, dict):
|
||||
for key, value in iteritems(other):
|
||||
self._fields[key.replace('-', '_')] = Field(value, False)
|
||||
|
@ -267,14 +264,13 @@ class Torrent(object):
|
|||
self._fields['downloadLimited'] = Field(True, True)
|
||||
self._fields['downloadLimit'] = Field(limit, True)
|
||||
self._push()
|
||||
elif limit is None:
|
||||
elif limit == None:
|
||||
self._fields['downloadLimited'] = Field(False, True)
|
||||
self._push()
|
||||
else:
|
||||
raise ValueError("Not a valid limit")
|
||||
|
||||
download_limit = property(_get_download_limit, _set_download_limit, None,
|
||||
"Download limit in Kbps or None. This is a mutator.")
|
||||
download_limit = property(_get_download_limit, _set_download_limit, None, "Download limit in Kbps or None. This is a mutator.")
|
||||
|
||||
def _get_peer_limit(self):
|
||||
"""
|
||||
|
@ -311,7 +307,7 @@ class Torrent(object):
|
|||
self._push()
|
||||
|
||||
priority = property(_get_priority, _set_priority, None
|
||||
, "Bandwidth priority as string. Can be one of 'low', 'normal', 'high'. This is a mutator.")
|
||||
, "Bandwidth priority as string. Can be one of 'low', 'normal', 'high'. This is a mutator.")
|
||||
|
||||
def _get_seed_idle_limit(self):
|
||||
"""
|
||||
|
@ -330,7 +326,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_idle_limit = property(_get_seed_idle_limit, _set_seed_idle_limit, None
|
||||
, "Torrent seed idle limit in minutes. Also see seed_idle_mode. This is a mutator.")
|
||||
, "Torrent seed idle limit in minutes. Also see seed_idle_mode. This is a mutator.")
|
||||
|
||||
def _get_seed_idle_mode(self):
|
||||
"""
|
||||
|
@ -349,7 +345,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_idle_mode = property(_get_seed_idle_mode, _set_seed_idle_mode, None,
|
||||
"""
|
||||
"""
|
||||
Seed idle mode as string. Can be one of 'global', 'single' or 'unlimited'.
|
||||
|
||||
* global, use session seed idle limit.
|
||||
|
@ -358,7 +354,7 @@ class Torrent(object):
|
|||
|
||||
This is a mutator.
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def _get_seed_ratio_limit(self):
|
||||
"""
|
||||
|
@ -377,7 +373,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_ratio_limit = property(_get_seed_ratio_limit, _set_seed_ratio_limit, None
|
||||
, "Torrent seed ratio limit as float. Also see seed_ratio_mode. This is a mutator.")
|
||||
, "Torrent seed ratio limit as float. Also see seed_ratio_mode. This is a mutator.")
|
||||
|
||||
def _get_seed_ratio_mode(self):
|
||||
"""
|
||||
|
@ -396,7 +392,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_ratio_mode = property(_get_seed_ratio_mode, _set_seed_ratio_mode, None,
|
||||
"""
|
||||
"""
|
||||
Seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'.
|
||||
|
||||
* global, use session seed ratio limit.
|
||||
|
@ -405,7 +401,7 @@ class Torrent(object):
|
|||
|
||||
This is a mutator.
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def _get_upload_limit(self):
|
||||
"""
|
||||
|
@ -426,14 +422,13 @@ class Torrent(object):
|
|||
self._fields['uploadLimited'] = Field(True, True)
|
||||
self._fields['uploadLimit'] = Field(limit, True)
|
||||
self._push()
|
||||
elif limit is None:
|
||||
elif limit == None:
|
||||
self._fields['uploadLimited'] = Field(False, True)
|
||||
self._push()
|
||||
else:
|
||||
raise ValueError("Not a valid limit")
|
||||
|
||||
upload_limit = property(_get_upload_limit, _set_upload_limit, None,
|
||||
"Upload limit in Kbps or None. This is a mutator.")
|
||||
upload_limit = property(_get_upload_limit, _set_upload_limit, None, "Upload limit in Kbps or None. This is a mutator.")
|
||||
|
||||
def _get_queue_position(self):
|
||||
"""Get the queue position for this torrent."""
|
||||
|
|
|
@ -2,19 +2,15 @@
|
|||
# Copyright (c) 2008-2013 Erik Svensson <erik.public@gmail.com>
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import socket
|
||||
import socket, datetime, logging
|
||||
from collections import namedtuple
|
||||
import transmissionrpc.constants as constants
|
||||
from transmissionrpc.constants import LOGGER
|
||||
|
||||
from six import iteritems, string_types
|
||||
|
||||
from . import constants
|
||||
from .constants import LOGGER
|
||||
from six import string_types, iteritems
|
||||
|
||||
UNITS = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB']
|
||||
|
||||
|
||||
def format_size(size):
|
||||
"""
|
||||
Format byte size into IEC prefixes, B, KiB, MiB ...
|
||||
|
@ -24,16 +20,14 @@ def format_size(size):
|
|||
while size >= 1024.0 and i < len(UNITS):
|
||||
i += 1
|
||||
size /= 1024.0
|
||||
return size, UNITS[i]
|
||||
|
||||
return (size, UNITS[i])
|
||||
|
||||
def format_speed(size):
|
||||
"""
|
||||
Format bytes per second speed into IEC prefixes, B/s, KiB/s, MiB/s ...
|
||||
"""
|
||||
(size, unit) = format_size(size)
|
||||
return size, '{unit}/s'.format(unit=unit)
|
||||
|
||||
return (size, unit + '/s')
|
||||
|
||||
def format_timedelta(delta):
|
||||
"""
|
||||
|
@ -41,8 +35,7 @@ def format_timedelta(delta):
|
|||
"""
|
||||
minutes, seconds = divmod(delta.seconds, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
return '{0:d} {1:02d}:{2:02d}:{3:02d}'.format(delta.days, hours, minutes, seconds)
|
||||
|
||||
return '%d %02d:%02d:%02d' % (delta.days, hours, minutes, seconds)
|
||||
|
||||
def format_timestamp(timestamp, utc=False):
|
||||
"""
|
||||
|
@ -57,14 +50,12 @@ def format_timestamp(timestamp, utc=False):
|
|||
else:
|
||||
return '-'
|
||||
|
||||
|
||||
class INetAddressError(Exception):
|
||||
"""
|
||||
Error parsing / generating a internet address.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def inet_address(address, default_port, default_address='localhost'):
|
||||
"""
|
||||
Parse internet address.
|
||||
|
@ -81,19 +72,18 @@ def inet_address(address, default_port, default_address='localhost'):
|
|||
try:
|
||||
port = int(addr[1])
|
||||
except ValueError:
|
||||
raise INetAddressError('Invalid address "{0}".'.format(address))
|
||||
raise INetAddressError('Invalid address "%s".' % address)
|
||||
if len(addr[0]) == 0:
|
||||
addr = default_address
|
||||
else:
|
||||
addr = addr[0]
|
||||
else:
|
||||
raise INetAddressError('Invalid address "{0}".'.format(address))
|
||||
raise INetAddressError('Invalid address "%s".' % address)
|
||||
try:
|
||||
socket.getaddrinfo(addr, port, socket.AF_INET, socket.SOCK_STREAM)
|
||||
except socket.gaierror:
|
||||
raise INetAddressError('Cannot look up address "{0}".'.format(address))
|
||||
return addr, port
|
||||
|
||||
raise INetAddressError('Cannot look up address "%s".' % address)
|
||||
return (addr, port)
|
||||
|
||||
def rpc_bool(arg):
|
||||
"""
|
||||
|
@ -106,31 +96,27 @@ def rpc_bool(arg):
|
|||
arg = arg.lower() in ['true', 'yes']
|
||||
return 1 if bool(arg) else 0
|
||||
|
||||
|
||||
TR_TYPE_MAP = {
|
||||
'number': int,
|
||||
'string': str,
|
||||
'number' : int,
|
||||
'string' : str,
|
||||
'double': float,
|
||||
'boolean': rpc_bool,
|
||||
'boolean' : rpc_bool,
|
||||
'array': list,
|
||||
'object': dict
|
||||
}
|
||||
|
||||
|
||||
def make_python_name(name):
|
||||
"""
|
||||
Convert Transmission RPC name to python compatible name.
|
||||
"""
|
||||
return name.replace('-', '_')
|
||||
|
||||
|
||||
def make_rpc_name(name):
|
||||
"""
|
||||
Convert python compatible name to Transmission RPC name.
|
||||
"""
|
||||
return name.replace('_', '-')
|
||||
|
||||
|
||||
def argument_value_convert(method, argument, value, rpc_version):
|
||||
"""
|
||||
Check and fix Transmission RPC issues with regards to methods, arguments and values.
|
||||
|
@ -140,7 +126,7 @@ def argument_value_convert(method, argument, value, rpc_version):
|
|||
elif method in ('session-get', 'session-set'):
|
||||
args = constants.SESSION_ARGS[method[-3:]]
|
||||
else:
|
||||
return ValueError('Method "{0}" not supported'.format(method))
|
||||
return ValueError('Method "%s" not supported' % (method))
|
||||
if argument in args:
|
||||
info = args[argument]
|
||||
invalid_version = True
|
||||
|
@ -156,18 +142,19 @@ def argument_value_convert(method, argument, value, rpc_version):
|
|||
if invalid_version:
|
||||
if replacement:
|
||||
LOGGER.warning(
|
||||
'Replacing requested argument "{0}" with "{1}".'.format(argument, replacement))
|
||||
'Replacing requested argument "%s" with "%s".'
|
||||
% (argument, replacement))
|
||||
argument = replacement
|
||||
info = args[argument]
|
||||
else:
|
||||
raise ValueError(
|
||||
'Method "{0}" Argument "{1}" does not exist in version {2:d}.'.format(method, argument, rpc_version))
|
||||
return argument, TR_TYPE_MAP[info[0]](value)
|
||||
'Method "%s" Argument "%s" does not exist in version %d.'
|
||||
% (method, argument, rpc_version))
|
||||
return (argument, TR_TYPE_MAP[info[0]](value))
|
||||
else:
|
||||
raise ValueError('Argument "%s" does not exists for method "%s".',
|
||||
(argument, method))
|
||||
|
||||
|
||||
def get_arguments(method, rpc_version):
|
||||
"""
|
||||
Get arguments for method in specified Transmission RPC version.
|
||||
|
@ -177,7 +164,7 @@ def get_arguments(method, rpc_version):
|
|||
elif method in ('session-get', 'session-set'):
|
||||
args = constants.SESSION_ARGS[method[-3:]]
|
||||
else:
|
||||
return ValueError('Method "{0}" not supported'.format(method))
|
||||
return ValueError('Method "%s" not supported' % (method))
|
||||
accessible = []
|
||||
for argument, info in iteritems(args):
|
||||
valid_version = True
|
||||
|
@ -189,7 +176,6 @@ def get_arguments(method, rpc_version):
|
|||
accessible.append(argument)
|
||||
return accessible
|
||||
|
||||
|
||||
def add_stdout_logger(level='debug'):
|
||||
"""
|
||||
Add a stdout target for the transmissionrpc logging.
|
||||
|
@ -204,7 +190,6 @@ def add_stdout_logger(level='debug'):
|
|||
loghandler.setLevel(loglevel)
|
||||
trpc_logger.addHandler(loghandler)
|
||||
|
||||
|
||||
def add_file_logger(filepath, level='debug'):
|
||||
"""
|
||||
Add a stdout target for the transmissionrpc logging.
|
||||
|
@ -219,5 +204,4 @@ def add_file_logger(filepath, level='debug'):
|
|||
loghandler.setLevel(loglevel)
|
||||
trpc_logger.addHandler(loghandler)
|
||||
|
||||
|
||||
Field = namedtuple('Field', ['value', 'dirty'])
|
||||
|
|
|
@ -25,7 +25,7 @@ Typical usage:
|
|||
Note: see the rox.Options module for a higher-level API for managing options.
|
||||
"""
|
||||
|
||||
import os
|
||||
import os, stat
|
||||
|
||||
_home = os.path.expanduser('~')
|
||||
xdg_data_home = os.environ.get('XDG_DATA_HOME') or \
|
||||
|
@ -131,15 +131,30 @@ def get_runtime_dir(strict=True):
|
|||
|
||||
import getpass
|
||||
fallback = '/tmp/pyxdg-runtime-dir-fallback-' + getpass.getuser()
|
||||
create = False
|
||||
|
||||
try:
|
||||
os.mkdir(fallback, 0o700)
|
||||
# This must be a real directory, not a symlink, so attackers can't
|
||||
# point it elsewhere. So we use lstat to check it.
|
||||
st = os.lstat(fallback)
|
||||
except OSError as e:
|
||||
import errno
|
||||
if e.errno == errno.EEXIST:
|
||||
# Already exists - set 700 permissions again.
|
||||
import stat
|
||||
os.chmod(fallback, stat.S_IRUSR|stat.S_IWUSR|stat.S_IXUSR)
|
||||
else: # pragma: no cover
|
||||
if e.errno == errno.ENOENT:
|
||||
create = True
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
# The fallback must be a directory
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
os.unlink(fallback)
|
||||
create = True
|
||||
# Must be owned by the user and not accessible by anyone else
|
||||
elif (st.st_uid != os.getuid()) \
|
||||
or (st.st_mode & (stat.S_IRWXG | stat.S_IRWXO)):
|
||||
os.rmdir(fallback)
|
||||
create = True
|
||||
|
||||
if create:
|
||||
os.mkdir(fallback, 0o700)
|
||||
|
||||
return fallback
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Complete implementation of the XDG Desktop Entry Specification Version 0.9.4
|
||||
Complete implementation of the XDG Desktop Entry Specification
|
||||
http://standards.freedesktop.org/desktop-entry-spec/
|
||||
|
||||
Not supported:
|
||||
|
@ -13,6 +13,7 @@ Not supported:
|
|||
from xdg.IniFile import IniFile, is_ascii
|
||||
import xdg.Locale
|
||||
from xdg.Exceptions import ParsingError
|
||||
from xdg.util import which
|
||||
import os.path
|
||||
import re
|
||||
import warnings
|
||||
|
@ -23,7 +24,7 @@ class DesktopEntry(IniFile):
|
|||
defaultGroup = 'Desktop Entry'
|
||||
|
||||
def __init__(self, filename=None):
|
||||
"""Create a new DesktopEntry
|
||||
"""Create a new DesktopEntry.
|
||||
|
||||
If filename exists, it will be parsed as a desktop entry file. If not,
|
||||
or if filename is None, a blank DesktopEntry is created.
|
||||
|
@ -38,9 +39,23 @@ class DesktopEntry(IniFile):
|
|||
return self.getName()
|
||||
|
||||
def parse(self, file):
|
||||
"""Parse a desktop entry file."""
|
||||
"""Parse a desktop entry file.
|
||||
|
||||
This can raise :class:`~xdg.Exceptions.ParsingError`,
|
||||
:class:`~xdg.Exceptions.DuplicateGroupError` or
|
||||
:class:`~xdg.Exceptions.DuplicateKeyError`.
|
||||
"""
|
||||
IniFile.parse(self, file, ["Desktop Entry", "KDE Desktop Entry"])
|
||||
|
||||
def findTryExec(self):
|
||||
"""Looks in the PATH for the executable given in the TryExec field.
|
||||
|
||||
Returns the full path to the executable if it is found, None if not.
|
||||
Raises :class:`~xdg.Exceptions.NoKeyError` if TryExec is not present.
|
||||
"""
|
||||
tryexec = self.get('TryExec', strict=True)
|
||||
return which(tryexec)
|
||||
|
||||
# start standard keys
|
||||
def getType(self):
|
||||
return self.get('Type')
|
||||
|
@ -140,10 +155,11 @@ class DesktopEntry(IniFile):
|
|||
|
||||
# desktop entry edit stuff
|
||||
def new(self, filename):
|
||||
"""Make this instance into a new desktop entry.
|
||||
"""Make this instance into a new, blank desktop entry.
|
||||
|
||||
If filename has a .desktop extension, Type is set to Application. If it
|
||||
has a .directory extension, Type is Directory.
|
||||
has a .directory extension, Type is Directory. Other extensions will
|
||||
cause :class:`~xdg.Exceptions.ParsingError` to be raised.
|
||||
"""
|
||||
if os.path.splitext(filename)[1] == ".desktop":
|
||||
type = "Application"
|
||||
|
@ -185,7 +201,7 @@ class DesktopEntry(IniFile):
|
|||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if not (group == self.defaultGroup \
|
||||
or re.match("^Desktop Action [a-zA-Z0-9\-]+$", group) \
|
||||
or re.match("^Desktop Action [a-zA-Z0-9-]+$", group) \
|
||||
or (re.match("^X-", group) and is_ascii(group))):
|
||||
self.errors.append("Invalid Group name: %s" % group)
|
||||
else:
|
||||
|
|
|
@ -5,6 +5,7 @@ Exception Classes for the xdg package
|
|||
debug = False
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for exceptions defined here."""
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
Exception.__init__(self, msg)
|
||||
|
@ -12,40 +13,72 @@ class Error(Exception):
|
|||
return self.msg
|
||||
|
||||
class ValidationError(Error):
|
||||
"""Raised when a file fails to validate.
|
||||
|
||||
The filename is the .file attribute.
|
||||
"""
|
||||
def __init__(self, msg, file):
|
||||
self.msg = msg
|
||||
self.file = file
|
||||
Error.__init__(self, "ValidationError in file '%s': %s " % (file, msg))
|
||||
|
||||
class ParsingError(Error):
|
||||
"""Raised when a file cannot be parsed.
|
||||
|
||||
The filename is the .file attribute.
|
||||
"""
|
||||
def __init__(self, msg, file):
|
||||
self.msg = msg
|
||||
self.file = file
|
||||
Error.__init__(self, "ParsingError in file '%s', %s" % (file, msg))
|
||||
|
||||
class NoKeyError(Error):
|
||||
"""Raised when trying to access a nonexistant key in an INI-style file.
|
||||
|
||||
Attributes are .key, .group and .file.
|
||||
"""
|
||||
def __init__(self, key, group, file):
|
||||
Error.__init__(self, "No key '%s' in group %s of file %s" % (key, group, file))
|
||||
self.key = key
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class DuplicateKeyError(Error):
|
||||
"""Raised when the same key occurs twice in an INI-style file.
|
||||
|
||||
Attributes are .key, .group and .file.
|
||||
"""
|
||||
def __init__(self, key, group, file):
|
||||
Error.__init__(self, "Duplicate key '%s' in group %s of file %s" % (key, group, file))
|
||||
self.key = key
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class NoGroupError(Error):
|
||||
"""Raised when trying to access a nonexistant group in an INI-style file.
|
||||
|
||||
Attributes are .group and .file.
|
||||
"""
|
||||
def __init__(self, group, file):
|
||||
Error.__init__(self, "No group: %s in file %s" % (group, file))
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class DuplicateGroupError(Error):
|
||||
"""Raised when the same key occurs twice in an INI-style file.
|
||||
|
||||
Attributes are .group and .file.
|
||||
"""
|
||||
def __init__(self, group, file):
|
||||
Error.__init__(self, "Duplicate group: %s in file %s" % (group, file))
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class NoThemeError(Error):
|
||||
"""Raised when trying to access a nonexistant icon theme.
|
||||
|
||||
The name of the theme is the .theme attribute.
|
||||
"""
|
||||
def __init__(self, theme):
|
||||
Error.__init__(self, "No such icon-theme: %s" % theme)
|
||||
self.theme = theme
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Complete implementation of the XDG Icon Spec Version 0.8
|
||||
Complete implementation of the XDG Icon Spec
|
||||
http://standards.freedesktop.org/icon-theme-spec/
|
||||
"""
|
||||
|
||||
|
@ -37,6 +37,8 @@ class IconTheme(IniFile):
|
|||
return self.get('Inherits', list=True)
|
||||
def getDirectories(self):
|
||||
return self.get('Directories', list=True)
|
||||
def getScaledDirectories(self):
|
||||
return self.get('ScaledDirectories', list=True)
|
||||
def getHidden(self):
|
||||
return self.get('Hidden', type="boolean")
|
||||
def getExample(self):
|
||||
|
@ -72,6 +74,10 @@ class IconTheme(IniFile):
|
|||
else:
|
||||
return 2
|
||||
|
||||
def getScale(self, directory):
|
||||
value = self.get('Scale', type="integer", group=directory)
|
||||
return value or 1
|
||||
|
||||
# validation stuff
|
||||
def checkExtras(self):
|
||||
# header
|
||||
|
@ -125,7 +131,7 @@ class IconTheme(IniFile):
|
|||
self.name = self.content[group]["Size"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Size' in Group '%s' is missing" % group)
|
||||
elif not (re.match("^\[X-", group) and is_ascii(group)):
|
||||
elif not (re.match(r"^\[X-", group) and is_ascii(group)):
|
||||
self.errors.append("Invalid Group name: %s" % group)
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
|
@ -139,6 +145,8 @@ class IconTheme(IniFile):
|
|||
self.checkValue(key, value, list=True)
|
||||
elif key == "Directories":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "ScaledDirectories":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "Hidden":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
elif key == "Example":
|
||||
|
@ -168,6 +176,8 @@ class IconTheme(IniFile):
|
|||
self.checkValue(key, value, type="integer")
|
||||
if self.type != "Threshold":
|
||||
self.errors.append("Key 'Threshold' give, but Type is %s" % self.type)
|
||||
elif key == "Scale":
|
||||
self.checkValue(key, value, type="integer")
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
else:
|
||||
|
@ -211,7 +221,7 @@ class IconData(IniFile):
|
|||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if not (group == self.defaultGroup \
|
||||
or (re.match("^\[X-", group) and is_ascii(group))):
|
||||
or (re.match(r"^\[X-", group) and is_ascii(group))):
|
||||
self.errors.append("Invalid Group name: %s" % group.encode("ascii", "replace"))
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
|
|
|
@ -102,7 +102,7 @@ class IniFile:
|
|||
raise ParsingError("[%s]-Header missing" % headers[0], filename)
|
||||
|
||||
# start stuff to access the keys
|
||||
def get(self, key, group=None, locale=False, type="string", list=False):
|
||||
def get(self, key, group=None, locale=False, type="string", list=False, strict=False):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
@ -114,7 +114,7 @@ class IniFile:
|
|||
else:
|
||||
value = self.content[group][key]
|
||||
else:
|
||||
if debug:
|
||||
if strict or debug:
|
||||
if group not in self.content:
|
||||
raise NoGroupError(group, self.filename)
|
||||
elif key not in self.content[group]:
|
||||
|
@ -192,8 +192,8 @@ class IniFile:
|
|||
|
||||
# start validation stuff
|
||||
def validate(self, report="All"):
|
||||
"""Validate the contents, raising ``ValidationError`` if there
|
||||
is anything amiss.
|
||||
"""Validate the contents, raising :class:`~xdg.Exceptions.ValidationError`
|
||||
if there is anything amiss.
|
||||
|
||||
report can be 'All' / 'Warnings' / 'Errors'
|
||||
"""
|
||||
|
|
|
@ -9,7 +9,7 @@ http://cvs.sourceforge.net/viewcvs.py/rox/ROX-Lib2/python/rox/i18n.py?rev=1.3&vi
|
|||
import os
|
||||
from locale import normalize
|
||||
|
||||
regex = "(\[([a-zA-Z]+)(_[a-zA-Z]+)?(\.[a-zA-Z\-0-9]+)?(@[a-zA-Z]+)?\])?"
|
||||
regex = r"(\[([a-zA-Z]+)(_[a-zA-Z]+)?(\.[a-zA-Z0-9-]+)?(@[a-zA-Z]+)?\])?"
|
||||
|
||||
def _expand_lang(locale):
|
||||
locale = normalize(locale)
|
||||
|
|
1530
libs/xdg/Menu.py
1530
libs/xdg/Menu.py
File diff suppressed because it is too large
Load diff
|
@ -1,14 +1,14 @@
|
|||
""" CLass to edit XDG Menus """
|
||||
|
||||
from xdg.Menu import *
|
||||
from xdg.BaseDirectory import *
|
||||
from xdg.Exceptions import *
|
||||
from xdg.DesktopEntry import *
|
||||
from xdg.Config import *
|
||||
|
||||
import xml.dom.minidom
|
||||
import os
|
||||
import re
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
from xdg.Menu import Menu, MenuEntry, Layout, Separator, XMLMenuBuilder
|
||||
from xdg.BaseDirectory import xdg_config_dirs, xdg_data_dirs
|
||||
from xdg.Exceptions import ParsingError
|
||||
from xdg.Config import setRootMode
|
||||
|
||||
# XML-Cleanups: Move / Exclude
|
||||
# FIXME: proper reverte/delete
|
||||
|
@ -20,28 +20,31 @@ import re
|
|||
# FIXME: Advanced MenuEditing Stuff: LegacyDir/MergeFile
|
||||
# Complex Rules/Deleted/OnlyAllocated/AppDirs/DirectoryDirs
|
||||
|
||||
class MenuEditor:
|
||||
|
||||
class MenuEditor(object):
|
||||
|
||||
def __init__(self, menu=None, filename=None, root=False):
|
||||
self.menu = None
|
||||
self.filename = None
|
||||
self.doc = None
|
||||
self.tree = None
|
||||
self.parser = XMLMenuBuilder()
|
||||
self.parse(menu, filename, root)
|
||||
|
||||
# fix for creating two menus with the same name on the fly
|
||||
self.filenames = []
|
||||
|
||||
def parse(self, menu=None, filename=None, root=False):
|
||||
if root == True:
|
||||
if root:
|
||||
setRootMode(True)
|
||||
|
||||
if isinstance(menu, Menu):
|
||||
self.menu = menu
|
||||
elif menu:
|
||||
self.menu = parse(menu)
|
||||
self.menu = self.parser.parse(menu)
|
||||
else:
|
||||
self.menu = parse()
|
||||
self.menu = self.parser.parse()
|
||||
|
||||
if root == True:
|
||||
if root:
|
||||
self.filename = self.menu.Filename
|
||||
elif filename:
|
||||
self.filename = filename
|
||||
|
@ -49,13 +52,21 @@ class MenuEditor:
|
|||
self.filename = os.path.join(xdg_config_dirs[0], "menus", os.path.split(self.menu.Filename)[1])
|
||||
|
||||
try:
|
||||
self.doc = xml.dom.minidom.parse(self.filename)
|
||||
self.tree = etree.parse(self.filename)
|
||||
except IOError:
|
||||
self.doc = xml.dom.minidom.parseString('<!DOCTYPE Menu PUBLIC "-//freedesktop//DTD Menu 1.0//EN" "http://standards.freedesktop.org/menu-spec/menu-1.0.dtd"><Menu><Name>Applications</Name><MergeFile type="parent">'+self.menu.Filename+'</MergeFile></Menu>')
|
||||
except xml.parsers.expat.ExpatError:
|
||||
root = etree.fromtring("""
|
||||
<!DOCTYPE Menu PUBLIC "-//freedesktop//DTD Menu 1.0//EN" "http://standards.freedesktop.org/menu-spec/menu-1.0.dtd">
|
||||
<Menu>
|
||||
<Name>Applications</Name>
|
||||
<MergeFile type="parent">%s</MergeFile>
|
||||
</Menu>
|
||||
""" % self.menu.Filename)
|
||||
self.tree = etree.ElementTree(root)
|
||||
except ParsingError:
|
||||
raise ParsingError('Not a valid .menu file', self.filename)
|
||||
|
||||
self.__remove_whilespace_nodes(self.doc)
|
||||
#FIXME: is this needed with etree ?
|
||||
self.__remove_whitespace_nodes(self.tree)
|
||||
|
||||
def save(self):
|
||||
self.__saveEntries(self.menu)
|
||||
|
@ -67,7 +78,7 @@ class MenuEditor:
|
|||
|
||||
self.__addEntry(parent, menuentry, after, before)
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
|
@ -83,7 +94,7 @@ class MenuEditor:
|
|||
|
||||
self.__addEntry(parent, menu, after, before)
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
|
@ -92,7 +103,7 @@ class MenuEditor:
|
|||
|
||||
self.__addEntry(parent, separator, after, before)
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
|
@ -100,7 +111,7 @@ class MenuEditor:
|
|||
self.__deleteEntry(oldparent, menuentry, after, before)
|
||||
self.__addEntry(newparent, menuentry, after, before)
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
|
@ -112,7 +123,7 @@ class MenuEditor:
|
|||
if oldparent.getPath(True) != newparent.getPath(True):
|
||||
self.__addXmlMove(root_menu, os.path.join(oldparent.getPath(True), menu.Name), os.path.join(newparent.getPath(True), menu.Name))
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
|
@ -120,14 +131,14 @@ class MenuEditor:
|
|||
self.__deleteEntry(parent, separator, after, before)
|
||||
self.__addEntry(parent, separator, after, before)
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
def copyMenuEntry(self, menuentry, oldparent, newparent, after=None, before=None):
|
||||
self.__addEntry(newparent, menuentry, after, before)
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
|
@ -137,39 +148,39 @@ class MenuEditor:
|
|||
if name:
|
||||
if not deskentry.hasKey("Name"):
|
||||
deskentry.set("Name", name)
|
||||
deskentry.set("Name", name, locale = True)
|
||||
deskentry.set("Name", name, locale=True)
|
||||
if comment:
|
||||
if not deskentry.hasKey("Comment"):
|
||||
deskentry.set("Comment", comment)
|
||||
deskentry.set("Comment", comment, locale = True)
|
||||
deskentry.set("Comment", comment, locale=True)
|
||||
if genericname:
|
||||
if not deskentry.hasKey("GnericNe"):
|
||||
if not deskentry.hasKey("GenericName"):
|
||||
deskentry.set("GenericName", genericname)
|
||||
deskentry.set("GenericName", genericname, locale = True)
|
||||
deskentry.set("GenericName", genericname, locale=True)
|
||||
if command:
|
||||
deskentry.set("Exec", command)
|
||||
if icon:
|
||||
deskentry.set("Icon", icon)
|
||||
|
||||
if terminal == True:
|
||||
if terminal:
|
||||
deskentry.set("Terminal", "true")
|
||||
elif terminal == False:
|
||||
elif not terminal:
|
||||
deskentry.set("Terminal", "false")
|
||||
|
||||
if nodisplay == True:
|
||||
if nodisplay is True:
|
||||
deskentry.set("NoDisplay", "true")
|
||||
elif nodisplay == False:
|
||||
elif nodisplay is False:
|
||||
deskentry.set("NoDisplay", "false")
|
||||
|
||||
if hidden == True:
|
||||
if hidden is True:
|
||||
deskentry.set("Hidden", "true")
|
||||
elif hidden == False:
|
||||
elif hidden is False:
|
||||
deskentry.set("Hidden", "false")
|
||||
|
||||
menuentry.updateAttributes()
|
||||
|
||||
if len(menuentry.Parents) > 0:
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
|
@ -195,56 +206,58 @@ class MenuEditor:
|
|||
if name:
|
||||
if not deskentry.hasKey("Name"):
|
||||
deskentry.set("Name", name)
|
||||
deskentry.set("Name", name, locale = True)
|
||||
deskentry.set("Name", name, locale=True)
|
||||
if genericname:
|
||||
if not deskentry.hasKey("GenericName"):
|
||||
deskentry.set("GenericName", genericname)
|
||||
deskentry.set("GenericName", genericname, locale = True)
|
||||
deskentry.set("GenericName", genericname, locale=True)
|
||||
if comment:
|
||||
if not deskentry.hasKey("Comment"):
|
||||
deskentry.set("Comment", comment)
|
||||
deskentry.set("Comment", comment, locale = True)
|
||||
deskentry.set("Comment", comment, locale=True)
|
||||
if icon:
|
||||
deskentry.set("Icon", icon)
|
||||
|
||||
if nodisplay == True:
|
||||
if nodisplay is True:
|
||||
deskentry.set("NoDisplay", "true")
|
||||
elif nodisplay == False:
|
||||
elif nodisplay is False:
|
||||
deskentry.set("NoDisplay", "false")
|
||||
|
||||
if hidden == True:
|
||||
if hidden is True:
|
||||
deskentry.set("Hidden", "true")
|
||||
elif hidden == False:
|
||||
elif hidden is False:
|
||||
deskentry.set("Hidden", "false")
|
||||
|
||||
menu.Directory.updateAttributes()
|
||||
|
||||
if isinstance(menu.Parent, Menu):
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
def hideMenuEntry(self, menuentry):
|
||||
self.editMenuEntry(menuentry, nodisplay = True)
|
||||
self.editMenuEntry(menuentry, nodisplay=True)
|
||||
|
||||
def unhideMenuEntry(self, menuentry):
|
||||
self.editMenuEntry(menuentry, nodisplay = False, hidden = False)
|
||||
self.editMenuEntry(menuentry, nodisplay=False, hidden=False)
|
||||
|
||||
def hideMenu(self, menu):
|
||||
self.editMenu(menu, nodisplay = True)
|
||||
self.editMenu(menu, nodisplay=True)
|
||||
|
||||
def unhideMenu(self, menu):
|
||||
self.editMenu(menu, nodisplay = False, hidden = False)
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True,True), False)
|
||||
for node in self.__getXmlNodesByName(["Deleted", "NotDeleted"], xml_menu):
|
||||
node.parentNode.removeChild(node)
|
||||
self.editMenu(menu, nodisplay=False, hidden=False)
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True), False)
|
||||
deleted = xml_menu.findall('Deleted')
|
||||
not_deleted = xml_menu.findall('NotDeleted')
|
||||
for node in deleted + not_deleted:
|
||||
xml_menu.remove(node)
|
||||
|
||||
def deleteMenuEntry(self, menuentry):
|
||||
if self.getAction(menuentry) == "delete":
|
||||
self.__deleteFile(menuentry.DesktopEntry.filename)
|
||||
for parent in menuentry.Parents:
|
||||
self.__deleteEntry(parent, menuentry)
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
return menuentry
|
||||
|
||||
def revertMenuEntry(self, menuentry):
|
||||
|
@ -257,7 +270,7 @@ class MenuEditor:
|
|||
index = parent.MenuEntries.index(menuentry)
|
||||
parent.MenuEntries[index] = menuentry.Original
|
||||
menuentry.Original.Parents.append(parent)
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
return menuentry
|
||||
|
||||
def deleteMenu(self, menu):
|
||||
|
@ -265,21 +278,22 @@ class MenuEditor:
|
|||
self.__deleteFile(menu.Directory.DesktopEntry.filename)
|
||||
self.__deleteEntry(menu.Parent, menu)
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True))
|
||||
xml_menu.parentNode.removeChild(xml_menu)
|
||||
sort(self.menu)
|
||||
parent = self.__get_parent_node(xml_menu)
|
||||
parent.remove(xml_menu)
|
||||
self.menu.sort()
|
||||
return menu
|
||||
|
||||
def revertMenu(self, menu):
|
||||
if self.getAction(menu) == "revert":
|
||||
self.__deleteFile(menu.Directory.DesktopEntry.filename)
|
||||
menu.Directory = menu.Directory.Original
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
return menu
|
||||
|
||||
def deleteSeparator(self, separator):
|
||||
self.__deleteEntry(separator.Parent, separator, after=True)
|
||||
|
||||
sort(self.menu)
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
|
@ -290,8 +304,9 @@ class MenuEditor:
|
|||
return "none"
|
||||
elif entry.Directory.getType() == "Both":
|
||||
return "revert"
|
||||
elif entry.Directory.getType() == "User" \
|
||||
and (len(entry.Submenus) + len(entry.MenuEntries)) == 0:
|
||||
elif entry.Directory.getType() == "User" and (
|
||||
len(entry.Submenus) + len(entry.MenuEntries)
|
||||
) == 0:
|
||||
return "delete"
|
||||
|
||||
elif isinstance(entry, MenuEntry):
|
||||
|
@ -318,9 +333,7 @@ class MenuEditor:
|
|||
def __saveMenu(self):
|
||||
if not os.path.isdir(os.path.dirname(self.filename)):
|
||||
os.makedirs(os.path.dirname(self.filename))
|
||||
fd = open(self.filename, 'w')
|
||||
fd.write(re.sub("\n[\s]*([^\n<]*)\n[\s]*</", "\\1</", self.doc.toprettyxml().replace('<?xml version="1.0" ?>\n', '')))
|
||||
fd.close()
|
||||
self.tree.write(self.filename, encoding='utf-8')
|
||||
|
||||
def __getFileName(self, name, extension):
|
||||
postfix = 0
|
||||
|
@ -333,8 +346,9 @@ class MenuEditor:
|
|||
dir = "applications"
|
||||
elif extension == ".directory":
|
||||
dir = "desktop-directories"
|
||||
if not filename in self.filenames and not \
|
||||
os.path.isfile(os.path.join(xdg_data_dirs[0], dir, filename)):
|
||||
if not filename in self.filenames and not os.path.isfile(
|
||||
os.path.join(xdg_data_dirs[0], dir, filename)
|
||||
):
|
||||
self.filenames.append(filename)
|
||||
break
|
||||
else:
|
||||
|
@ -343,8 +357,11 @@ class MenuEditor:
|
|||
return filename
|
||||
|
||||
def __getXmlMenu(self, path, create=True, element=None):
|
||||
# FIXME: we should also return the menu's parent,
|
||||
# to avoid looking for it later on
|
||||
# @see Element.getiterator()
|
||||
if not element:
|
||||
element = self.doc
|
||||
element = self.tree
|
||||
|
||||
if "/" in path:
|
||||
(name, path) = path.split("/", 1)
|
||||
|
@ -353,17 +370,16 @@ class MenuEditor:
|
|||
path = ""
|
||||
|
||||
found = None
|
||||
for node in self.__getXmlNodesByName("Menu", element):
|
||||
for child in self.__getXmlNodesByName("Name", node):
|
||||
if child.childNodes[0].nodeValue == name:
|
||||
if path:
|
||||
found = self.__getXmlMenu(path, create, node)
|
||||
else:
|
||||
found = node
|
||||
break
|
||||
for node in element.findall("Menu"):
|
||||
name_node = node.find('Name')
|
||||
if name_node.text == name:
|
||||
if path:
|
||||
found = self.__getXmlMenu(path, create, node)
|
||||
else:
|
||||
found = node
|
||||
if found:
|
||||
break
|
||||
if not found and create == True:
|
||||
if not found and create:
|
||||
node = self.__addXmlMenuElement(element, name)
|
||||
if path:
|
||||
found = self.__getXmlMenu(path, create, node)
|
||||
|
@ -373,58 +389,62 @@ class MenuEditor:
|
|||
return found
|
||||
|
||||
def __addXmlMenuElement(self, element, name):
|
||||
node = self.doc.createElement('Menu')
|
||||
self.__addXmlTextElement(node, 'Name', name)
|
||||
return element.appendChild(node)
|
||||
menu_node = etree.SubElement('Menu', element)
|
||||
name_node = etree.SubElement('Name', menu_node)
|
||||
name_node.text = name
|
||||
return menu_node
|
||||
|
||||
def __addXmlTextElement(self, element, name, text):
|
||||
node = self.doc.createElement(name)
|
||||
text = self.doc.createTextNode(text)
|
||||
node.appendChild(text)
|
||||
return element.appendChild(node)
|
||||
node = etree.SubElement(name, element)
|
||||
node.text = text
|
||||
return node
|
||||
|
||||
def __addXmlFilename(self, element, filename, type = "Include"):
|
||||
def __addXmlFilename(self, element, filename, type_="Include"):
|
||||
# remove old filenames
|
||||
for node in self.__getXmlNodesByName(["Include", "Exclude"], element):
|
||||
if node.childNodes[0].nodeName == "Filename" and node.childNodes[0].childNodes[0].nodeValue == filename:
|
||||
element.removeChild(node)
|
||||
includes = element.findall('Include')
|
||||
excludes = element.findall('Exclude')
|
||||
rules = includes + excludes
|
||||
for rule in rules:
|
||||
#FIXME: this finds only Rules whose FIRST child is a Filename element
|
||||
if rule[0].tag == "Filename" and rule[0].text == filename:
|
||||
element.remove(rule)
|
||||
# shouldn't it remove all occurences, like the following:
|
||||
#filename_nodes = rule.findall('.//Filename'):
|
||||
#for fn in filename_nodes:
|
||||
#if fn.text == filename:
|
||||
##element.remove(rule)
|
||||
#parent = self.__get_parent_node(fn)
|
||||
#parent.remove(fn)
|
||||
|
||||
# add new filename
|
||||
node = self.doc.createElement(type)
|
||||
node.appendChild(self.__addXmlTextElement(node, 'Filename', filename))
|
||||
return element.appendChild(node)
|
||||
node = etree.SubElement(type_, element)
|
||||
self.__addXmlTextElement(node, 'Filename', filename)
|
||||
return node
|
||||
|
||||
def __addXmlMove(self, element, old, new):
|
||||
node = self.doc.createElement("Move")
|
||||
node.appendChild(self.__addXmlTextElement(node, 'Old', old))
|
||||
node.appendChild(self.__addXmlTextElement(node, 'New', new))
|
||||
return element.appendChild(node)
|
||||
node = etree.SubElement("Move", element)
|
||||
self.__addXmlTextElement(node, 'Old', old)
|
||||
self.__addXmlTextElement(node, 'New', new)
|
||||
return node
|
||||
|
||||
def __addXmlLayout(self, element, layout):
|
||||
# remove old layout
|
||||
for node in self.__getXmlNodesByName("Layout", element):
|
||||
element.removeChild(node)
|
||||
for node in element.findall("Layout"):
|
||||
element.remove(node)
|
||||
|
||||
# add new layout
|
||||
node = self.doc.createElement("Layout")
|
||||
node = etree.SubElement("Layout", element)
|
||||
for order in layout.order:
|
||||
if order[0] == "Separator":
|
||||
child = self.doc.createElement("Separator")
|
||||
node.appendChild(child)
|
||||
child = etree.SubElement("Separator", node)
|
||||
elif order[0] == "Filename":
|
||||
child = self.__addXmlTextElement(node, "Filename", order[1])
|
||||
elif order[0] == "Menuname":
|
||||
child = self.__addXmlTextElement(node, "Menuname", order[1])
|
||||
elif order[0] == "Merge":
|
||||
child = self.doc.createElement("Merge")
|
||||
child.setAttribute("type", order[1])
|
||||
node.appendChild(child)
|
||||
return element.appendChild(node)
|
||||
|
||||
def __getXmlNodesByName(self, name, element):
|
||||
for child in element.childNodes:
|
||||
if child.nodeType == xml.dom.Node.ELEMENT_NODE and child.nodeName in name:
|
||||
yield child
|
||||
child = etree.SubElement("Merge", node)
|
||||
child.attrib["type"] = order[1]
|
||||
return node
|
||||
|
||||
def __addLayout(self, parent):
|
||||
layout = Layout()
|
||||
|
@ -498,14 +518,24 @@ class MenuEditor:
|
|||
except ValueError:
|
||||
pass
|
||||
|
||||
def __remove_whilespace_nodes(self, node):
|
||||
remove_list = []
|
||||
for child in node.childNodes:
|
||||
if child.nodeType == xml.dom.minidom.Node.TEXT_NODE:
|
||||
child.data = child.data.strip()
|
||||
if not child.data.strip():
|
||||
remove_list.append(child)
|
||||
elif child.hasChildNodes():
|
||||
def __remove_whitespace_nodes(self, node):
|
||||
for child in node:
|
||||
text = child.text.strip()
|
||||
if not text:
|
||||
child.text = ''
|
||||
tail = child.tail.strip()
|
||||
if not tail:
|
||||
child.tail = ''
|
||||
if len(child):
|
||||
self.__remove_whilespace_nodes(child)
|
||||
for node in remove_list:
|
||||
node.parentNode.removeChild(node)
|
||||
|
||||
def __get_parent_node(self, node):
|
||||
# elements in ElementTree doesn't hold a reference to their parent
|
||||
for parent, child in self.__iter_parent():
|
||||
if child is node:
|
||||
return child
|
||||
|
||||
def __iter_parent(self):
|
||||
for parent in self.tree.getiterator():
|
||||
for child in parent:
|
||||
yield parent, child
|
||||
|
|
700
libs/xdg/Mime.py
700
libs/xdg/Mime.py
|
@ -20,6 +20,7 @@ information about the format of these files.
|
|||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import fnmatch
|
||||
|
@ -46,25 +47,42 @@ def _get_node_data(node):
|
|||
return ''.join([n.nodeValue for n in node.childNodes]).strip()
|
||||
|
||||
def lookup(media, subtype = None):
|
||||
"""Get the MIMEtype object for this type, creating a new one if needed.
|
||||
"""Get the MIMEtype object for the given type.
|
||||
|
||||
This remains for backwards compatibility; calling MIMEtype now does
|
||||
the same thing.
|
||||
|
||||
The name can either be passed as one part ('text/plain'), or as two
|
||||
('text', 'plain').
|
||||
"""
|
||||
if subtype is None and '/' in media:
|
||||
media, subtype = media.split('/', 1)
|
||||
if (media, subtype) not in types:
|
||||
types[(media, subtype)] = MIMEtype(media, subtype)
|
||||
return types[(media, subtype)]
|
||||
return MIMEtype(media, subtype)
|
||||
|
||||
class MIMEtype:
|
||||
"""Type holding data about a MIME type"""
|
||||
def __init__(self, media, subtype):
|
||||
"Don't use this constructor directly; use mime.lookup() instead."
|
||||
assert media and '/' not in media
|
||||
assert subtype and '/' not in subtype
|
||||
assert (media, subtype) not in types
|
||||
class MIMEtype(object):
|
||||
"""Class holding data about a MIME type.
|
||||
|
||||
Calling the class will return a cached instance, so there is only one
|
||||
instance for each MIME type. The name can either be passed as one part
|
||||
('text/plain'), or as two ('text', 'plain').
|
||||
"""
|
||||
def __new__(cls, media, subtype=None):
|
||||
if subtype is None and '/' in media:
|
||||
media, subtype = media.split('/', 1)
|
||||
assert '/' not in subtype
|
||||
media = media.lower()
|
||||
subtype = subtype.lower()
|
||||
|
||||
try:
|
||||
return types[(media, subtype)]
|
||||
except KeyError:
|
||||
mtype = super(MIMEtype, cls).__new__(cls)
|
||||
mtype._init(media, subtype)
|
||||
types[(media, subtype)] = mtype
|
||||
return mtype
|
||||
|
||||
# If this is done in __init__, it is automatically called again each time
|
||||
# the MIMEtype is returned by __new__, which we don't want. So we call it
|
||||
# explicitly only when we construct a new instance.
|
||||
def _init(self, media, subtype):
|
||||
self.media = media
|
||||
self.subtype = subtype
|
||||
self._comment = None
|
||||
|
@ -109,100 +127,106 @@ class MIMEtype:
|
|||
return self.media + '/' + self.subtype
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self, self._comment or '(comment not loaded)')
|
||||
return 'MIMEtype(%r, %r)' % (self.media, self.subtype)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.media) ^ hash(self.subtype)
|
||||
|
||||
class UnknownMagicRuleFormat(ValueError):
|
||||
pass
|
||||
|
||||
class DiscardMagicRules(Exception):
|
||||
"Raised when __NOMAGIC__ is found, and caught to discard previous rules."
|
||||
pass
|
||||
|
||||
class MagicRule:
|
||||
def __init__(self, f):
|
||||
self.next=None
|
||||
self.prev=None
|
||||
also = None
|
||||
|
||||
def __init__(self, start, value, mask, word, range):
|
||||
self.start = start
|
||||
self.value = value
|
||||
self.mask = mask
|
||||
self.word = word
|
||||
self.range = range
|
||||
|
||||
rule_ending_re = re.compile(br'(?:~(\d+))?(?:\+(\d+))?\n$')
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, f):
|
||||
"""Read a rule from the binary magics file. Returns a 2-tuple of
|
||||
the nesting depth and the MagicRule."""
|
||||
line = f.readline()
|
||||
#print line
|
||||
ind=b''
|
||||
while True:
|
||||
c=f.read(1)
|
||||
if c == b'>':
|
||||
break
|
||||
ind+=c
|
||||
if not ind:
|
||||
self.nest=0
|
||||
|
||||
# [indent] '>'
|
||||
nest_depth, line = line.split(b'>', 1)
|
||||
nest_depth = int(nest_depth) if nest_depth else 0
|
||||
|
||||
# start-offset '='
|
||||
start, line = line.split(b'=', 1)
|
||||
start = int(start)
|
||||
|
||||
if line == b'__NOMAGIC__\n':
|
||||
raise DiscardMagicRules
|
||||
|
||||
# value length (2 bytes, big endian)
|
||||
if sys.version_info[0] >= 3:
|
||||
lenvalue = int.from_bytes(line[:2], byteorder='big')
|
||||
else:
|
||||
self.nest=int(ind.decode('ascii'))
|
||||
lenvalue = (ord(line[0])<<8)+ord(line[1])
|
||||
line = line[2:]
|
||||
|
||||
start = b''
|
||||
while True:
|
||||
c = f.read(1)
|
||||
if c == b'=':
|
||||
break
|
||||
start += c
|
||||
self.start = int(start.decode('ascii'))
|
||||
# value
|
||||
# This can contain newlines, so we may need to read more lines
|
||||
while len(line) <= lenvalue:
|
||||
line += f.readline()
|
||||
value, line = line[:lenvalue], line[lenvalue:]
|
||||
|
||||
hb=f.read(1)
|
||||
lb=f.read(1)
|
||||
self.lenvalue = ord(lb)+(ord(hb)<<8)
|
||||
|
||||
self.value = f.read(self.lenvalue)
|
||||
|
||||
c = f.read(1)
|
||||
if c == b'&':
|
||||
self.mask = f.read(self.lenvalue)
|
||||
c = f.read(1)
|
||||
# ['&' mask]
|
||||
if line.startswith(b'&'):
|
||||
# This can contain newlines, so we may need to read more lines
|
||||
while len(line) <= lenvalue:
|
||||
line += f.readline()
|
||||
mask, line = line[1:lenvalue+1], line[lenvalue+1:]
|
||||
else:
|
||||
self.mask=None
|
||||
mask = None
|
||||
|
||||
if c == b'~':
|
||||
w = b''
|
||||
while c!=b'+' and c!=b'\n':
|
||||
c=f.read(1)
|
||||
if c==b'+' or c==b'\n':
|
||||
break
|
||||
w+=c
|
||||
# ['~' word-size] ['+' range-length]
|
||||
ending = cls.rule_ending_re.match(line)
|
||||
if not ending:
|
||||
# Per the spec, this will be caught and ignored, to allow
|
||||
# for future extensions.
|
||||
raise UnknownMagicRuleFormat(repr(line))
|
||||
|
||||
self.word=int(w.decode('ascii'))
|
||||
else:
|
||||
self.word=1
|
||||
word, range = ending.groups()
|
||||
word = int(word) if (word is not None) else 1
|
||||
range = int(range) if (range is not None) else 1
|
||||
|
||||
if c==b'+':
|
||||
r=b''
|
||||
while c!=b'\n':
|
||||
c=f.read(1)
|
||||
if c==b'\n':
|
||||
break
|
||||
r+=c
|
||||
#print r
|
||||
self.range = int(r.decode('ascii'))
|
||||
else:
|
||||
self.range = 1
|
||||
return nest_depth, cls(start, value, mask, word, range)
|
||||
|
||||
if c != b'\n':
|
||||
raise ValueError('Malformed MIME magic line')
|
||||
|
||||
def getLength(self):
|
||||
return self.start+self.lenvalue+self.range
|
||||
|
||||
def appendRule(self, rule):
|
||||
if self.nest<rule.nest:
|
||||
self.next=rule
|
||||
rule.prev=self
|
||||
|
||||
elif self.prev:
|
||||
self.prev.appendRule(rule)
|
||||
def maxlen(self):
|
||||
l = self.start + len(self.value) + self.range
|
||||
if self.also:
|
||||
return max(l, self.also.maxlen())
|
||||
return l
|
||||
|
||||
def match(self, buffer):
|
||||
if self.match0(buffer):
|
||||
if self.next:
|
||||
return self.next.match(buffer)
|
||||
if self.also:
|
||||
return self.also.match(buffer)
|
||||
return True
|
||||
|
||||
def match0(self, buffer):
|
||||
l=len(buffer)
|
||||
lenvalue = len(self.value)
|
||||
for o in range(self.range):
|
||||
s=self.start+o
|
||||
e=s+self.lenvalue
|
||||
e=s+lenvalue
|
||||
if l<e:
|
||||
return False
|
||||
if self.mask:
|
||||
test=''
|
||||
for i in range(self.lenvalue):
|
||||
for i in range(lenvalue):
|
||||
if PY3:
|
||||
c = buffer[s+i] & self.mask[i]
|
||||
else:
|
||||
|
@ -215,46 +239,81 @@ class MagicRule:
|
|||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return '<MagicRule %d>%d=[%d]%r&%r~%d+%d>' % (self.nest,
|
||||
return 'MagicRule(start=%r, value=%r, mask=%r, word=%r, range=%r)' %(
|
||||
self.start,
|
||||
self.lenvalue,
|
||||
self.value,
|
||||
self.mask,
|
||||
self.word,
|
||||
self.range)
|
||||
|
||||
class MagicType:
|
||||
def __init__(self, mtype):
|
||||
self.mtype=mtype
|
||||
self.top_rules=[]
|
||||
self.last_rule=None
|
||||
|
||||
def getLine(self, f):
|
||||
nrule=MagicRule(f)
|
||||
class MagicMatchAny(object):
|
||||
"""Match any of a set of magic rules.
|
||||
|
||||
if nrule.nest and self.last_rule:
|
||||
self.last_rule.appendRule(nrule)
|
||||
else:
|
||||
self.top_rules.append(nrule)
|
||||
|
||||
self.last_rule=nrule
|
||||
|
||||
return nrule
|
||||
This has a similar interface to MagicRule objects (i.e. its match() and
|
||||
maxlen() methods), to allow for duck typing.
|
||||
"""
|
||||
def __init__(self, rules):
|
||||
self.rules = rules
|
||||
|
||||
def match(self, buffer):
|
||||
for rule in self.top_rules:
|
||||
if rule.match(buffer):
|
||||
return self.mtype
|
||||
return any(r.match(buffer) for r in self.rules)
|
||||
|
||||
def __repr__(self):
|
||||
return '<MagicType %s>' % self.mtype
|
||||
def maxlen(self):
|
||||
return max(r.maxlen() for r in self.rules)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, f):
|
||||
"""Read a set of rules from the binary magic file."""
|
||||
c=f.read(1)
|
||||
f.seek(-1, 1)
|
||||
depths_rules = []
|
||||
while c and c != b'[':
|
||||
try:
|
||||
depths_rules.append(MagicRule.from_file(f))
|
||||
except UnknownMagicRuleFormat:
|
||||
# Ignored to allow for extensions to the rule format.
|
||||
pass
|
||||
c=f.read(1)
|
||||
if c:
|
||||
f.seek(-1, 1)
|
||||
|
||||
# Build the rule tree
|
||||
tree = [] # (rule, [(subrule,[subsubrule,...]), ...])
|
||||
insert_points = {0:tree}
|
||||
for depth, rule in depths_rules:
|
||||
subrules = []
|
||||
insert_points[depth].append((rule, subrules))
|
||||
insert_points[depth+1] = subrules
|
||||
|
||||
return cls.from_rule_tree(tree)
|
||||
|
||||
@classmethod
|
||||
def from_rule_tree(cls, tree):
|
||||
"""From a nested list of (rule, subrules) pairs, build a MagicMatchAny
|
||||
instance, recursing down the tree.
|
||||
|
||||
Where there's only one top-level rule, this is returned directly,
|
||||
to simplify the nested structure. Returns None if no rules were read.
|
||||
"""
|
||||
rules = []
|
||||
for rule, subrules in tree:
|
||||
if subrules:
|
||||
rule.also = cls.from_rule_tree(subrules)
|
||||
rules.append(rule)
|
||||
|
||||
if len(rules)==0:
|
||||
return None
|
||||
if len(rules)==1:
|
||||
return rules[0]
|
||||
return cls(rules)
|
||||
|
||||
class MagicDB:
|
||||
def __init__(self):
|
||||
self.types={} # Indexed by priority, each entry is a list of type rules
|
||||
self.maxlen=0
|
||||
self.bytype = defaultdict(list) # mimetype -> [(priority, rule), ...]
|
||||
|
||||
def mergeFile(self, fname):
|
||||
def merge_file(self, fname):
|
||||
"""Read a magic binary file, and add its rules to this MagicDB."""
|
||||
with open(fname, 'rb') as f:
|
||||
line = f.readline()
|
||||
if line != b'MIME-Magic\0\n':
|
||||
|
@ -262,68 +321,210 @@ class MagicDB:
|
|||
|
||||
while True:
|
||||
shead = f.readline().decode('ascii')
|
||||
#print shead
|
||||
#print(shead)
|
||||
if not shead:
|
||||
break
|
||||
if shead[0] != '[' or shead[-2:] != ']\n':
|
||||
raise ValueError('Malformed section heading')
|
||||
raise ValueError('Malformed section heading', shead)
|
||||
pri, tname = shead[1:-2].split(':')
|
||||
#print shead[1:-2]
|
||||
pri = int(pri)
|
||||
mtype = lookup(tname)
|
||||
|
||||
try:
|
||||
ents = self.types[pri]
|
||||
except:
|
||||
ents = []
|
||||
self.types[pri] = ents
|
||||
rule = MagicMatchAny.from_file(f)
|
||||
except DiscardMagicRules:
|
||||
self.bytype.pop(mtype, None)
|
||||
rule = MagicMatchAny.from_file(f)
|
||||
if rule is None:
|
||||
continue
|
||||
#print rule
|
||||
|
||||
magictype = MagicType(mtype)
|
||||
#print tname
|
||||
self.bytype[mtype].append((pri, rule))
|
||||
|
||||
#rline=f.readline()
|
||||
c=f.read(1)
|
||||
f.seek(-1, 1)
|
||||
while c and c != b'[':
|
||||
rule=magictype.getLine(f)
|
||||
#print rule
|
||||
if rule and rule.getLength() > self.maxlen:
|
||||
self.maxlen = rule.getLength()
|
||||
def finalise(self):
|
||||
"""Prepare the MagicDB for matching.
|
||||
|
||||
c = f.read(1)
|
||||
f.seek(-1, 1)
|
||||
This should be called after all rules have been merged into it.
|
||||
"""
|
||||
maxlen = 0
|
||||
self.alltypes = [] # (priority, mimetype, rule)
|
||||
|
||||
ents.append(magictype)
|
||||
#self.types[pri]=ents
|
||||
if not c:
|
||||
break
|
||||
for mtype, rules in self.bytype.items():
|
||||
for pri, rule in rules:
|
||||
self.alltypes.append((pri, mtype, rule))
|
||||
maxlen = max(maxlen, rule.maxlen())
|
||||
|
||||
def match_data(self, data, max_pri=100, min_pri=0):
|
||||
for priority in sorted(self.types.keys(), reverse=True):
|
||||
self.maxlen = maxlen # Number of bytes to read from files
|
||||
self.alltypes.sort(key=lambda x: x[0], reverse=True)
|
||||
|
||||
def match_data(self, data, max_pri=100, min_pri=0, possible=None):
|
||||
"""Do magic sniffing on some bytes.
|
||||
|
||||
max_pri & min_pri can be used to specify the maximum & minimum priority
|
||||
rules to look for. possible can be a list of mimetypes to check, or None
|
||||
(the default) to check all mimetypes until one matches.
|
||||
|
||||
Returns the MIMEtype found, or None if no entries match.
|
||||
"""
|
||||
if possible is not None:
|
||||
types = []
|
||||
for mt in possible:
|
||||
for pri, rule in self.bytype[mt]:
|
||||
types.append((pri, mt, rule))
|
||||
types.sort(key=lambda x: x[0])
|
||||
else:
|
||||
types = self.alltypes
|
||||
|
||||
for priority, mimetype, rule in types:
|
||||
#print priority, max_pri, min_pri
|
||||
if priority > max_pri:
|
||||
continue
|
||||
if priority < min_pri:
|
||||
break
|
||||
for type in self.types[priority]:
|
||||
m=type.match(data)
|
||||
if m:
|
||||
return m
|
||||
|
||||
def match(self, path, max_pri=100, min_pri=0):
|
||||
try:
|
||||
with open(path, 'rb') as f:
|
||||
buf = f.read(self.maxlen)
|
||||
return self.match_data(buf, max_pri, min_pri)
|
||||
except:
|
||||
pass
|
||||
if rule.match(data):
|
||||
return mimetype
|
||||
|
||||
def match(self, path, max_pri=100, min_pri=0, possible=None):
|
||||
"""Read data from the file and do magic sniffing on it.
|
||||
|
||||
max_pri & min_pri can be used to specify the maximum & minimum priority
|
||||
rules to look for. possible can be a list of mimetypes to check, or None
|
||||
(the default) to check all mimetypes until one matches.
|
||||
|
||||
Returns the MIMEtype found, or None if no entries match. Raises IOError
|
||||
if the file can't be opened.
|
||||
"""
|
||||
with open(path, 'rb') as f:
|
||||
buf = f.read(self.maxlen)
|
||||
return self.match_data(buf, max_pri, min_pri, possible)
|
||||
|
||||
def __repr__(self):
|
||||
return '<MagicDB %s>' % self.types
|
||||
return '<MagicDB (%d types)>' % len(self.alltypes)
|
||||
|
||||
class GlobDB(object):
|
||||
def __init__(self):
|
||||
"""Prepare the GlobDB. It can't actually be used until .finalise() is
|
||||
called, but merge_file() can be used to add data before that.
|
||||
"""
|
||||
# Maps mimetype to {(weight, glob, flags), ...}
|
||||
self.allglobs = defaultdict(set)
|
||||
|
||||
def merge_file(self, path):
|
||||
"""Loads name matching information from a globs2 file."""#
|
||||
allglobs = self.allglobs
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
if line.startswith('#'): continue # Comment
|
||||
|
||||
fields = line[:-1].split(':')
|
||||
weight, type_name, pattern = fields[:3]
|
||||
weight = int(weight)
|
||||
mtype = lookup(type_name)
|
||||
if len(fields) > 3:
|
||||
flags = fields[3].split(',')
|
||||
else:
|
||||
flags = ()
|
||||
|
||||
if pattern == '__NOGLOBS__':
|
||||
# This signals to discard any previous globs
|
||||
allglobs.pop(mtype, None)
|
||||
continue
|
||||
|
||||
allglobs[mtype].add((weight, pattern, tuple(flags)))
|
||||
|
||||
def finalise(self):
|
||||
"""Prepare the GlobDB for matching.
|
||||
|
||||
This should be called after all files have been merged into it.
|
||||
"""
|
||||
self.exts = defaultdict(list) # Maps extensions to [(type, weight),...]
|
||||
self.cased_exts = defaultdict(list)
|
||||
self.globs = [] # List of (regex, type, weight) triplets
|
||||
self.literals = {} # Maps literal names to (type, weight)
|
||||
self.cased_literals = {}
|
||||
|
||||
for mtype, globs in self.allglobs.items():
|
||||
mtype = mtype.canonical()
|
||||
for weight, pattern, flags in globs:
|
||||
|
||||
cased = 'cs' in flags
|
||||
|
||||
if pattern.startswith('*.'):
|
||||
# *.foo -- extension pattern
|
||||
rest = pattern[2:]
|
||||
if not ('*' in rest or '[' in rest or '?' in rest):
|
||||
if cased:
|
||||
self.cased_exts[rest].append((mtype, weight))
|
||||
else:
|
||||
self.exts[rest.lower()].append((mtype, weight))
|
||||
continue
|
||||
|
||||
if ('*' in pattern or '[' in pattern or '?' in pattern):
|
||||
# Translate the glob pattern to a regex & compile it
|
||||
re_flags = 0 if cased else re.I
|
||||
pattern = re.compile(fnmatch.translate(pattern), flags=re_flags)
|
||||
self.globs.append((pattern, mtype, weight))
|
||||
else:
|
||||
# No wildcards - literal pattern
|
||||
if cased:
|
||||
self.cased_literals[pattern] = (mtype, weight)
|
||||
else:
|
||||
self.literals[pattern.lower()] = (mtype, weight)
|
||||
|
||||
# Sort globs by weight & length
|
||||
self.globs.sort(reverse=True, key=lambda x: (x[2], len(x[0].pattern)) )
|
||||
|
||||
def first_match(self, path):
|
||||
"""Return the first match found for a given path, or None if no match
|
||||
is found."""
|
||||
try:
|
||||
return next(self._match_path(path))[0]
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
def all_matches(self, path):
|
||||
"""Return a list of (MIMEtype, glob weight) pairs for the path."""
|
||||
return list(self._match_path(path))
|
||||
|
||||
def _match_path(self, path):
|
||||
"""Yields pairs of (mimetype, glob weight)."""
|
||||
leaf = os.path.basename(path)
|
||||
|
||||
# Literals (no wildcards)
|
||||
if leaf in self.cased_literals:
|
||||
yield self.cased_literals[leaf]
|
||||
|
||||
lleaf = leaf.lower()
|
||||
if lleaf in self.literals:
|
||||
yield self.literals[lleaf]
|
||||
|
||||
# Extensions
|
||||
ext = leaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p + 1:]
|
||||
if ext in self.cased_exts:
|
||||
for res in self.cased_exts[ext]:
|
||||
yield res
|
||||
ext = lleaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p+1:]
|
||||
if ext in self.exts:
|
||||
for res in self.exts[ext]:
|
||||
yield res
|
||||
|
||||
# Other globs
|
||||
for (regex, mime_type, weight) in self.globs:
|
||||
if regex.match(leaf):
|
||||
yield (mime_type, weight)
|
||||
|
||||
# Some well-known types
|
||||
text = lookup('text', 'plain')
|
||||
octet_stream = lookup('application', 'octet-stream')
|
||||
inode_block = lookup('inode', 'blockdevice')
|
||||
inode_char = lookup('inode', 'chardevice')
|
||||
inode_dir = lookup('inode', 'directory')
|
||||
|
@ -336,44 +537,12 @@ app_exe = lookup('application', 'executable')
|
|||
_cache_uptodate = False
|
||||
|
||||
def _cache_database():
|
||||
global exts, globs, literals, magic, aliases, inheritance, _cache_uptodate
|
||||
global globs, magic, aliases, inheritance, _cache_uptodate
|
||||
|
||||
_cache_uptodate = True
|
||||
|
||||
exts = {} # Maps extensions to types
|
||||
globs = [] # List of (glob, type) pairs
|
||||
literals = {} # Maps literal names to types
|
||||
aliases = {} # Maps alias Mime types to canonical names
|
||||
inheritance = defaultdict(set) # Maps to sets of parent mime types.
|
||||
magic = MagicDB()
|
||||
|
||||
def _import_glob_file(path):
|
||||
"""Loads name matching information from a MIME directory."""
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
if line.startswith('#'): continue
|
||||
line = line[:-1]
|
||||
|
||||
type_name, pattern = line.split(':', 1)
|
||||
mtype = lookup(type_name)
|
||||
|
||||
if pattern.startswith('*.'):
|
||||
rest = pattern[2:]
|
||||
if not ('*' in rest or '[' in rest or '?' in rest):
|
||||
exts[rest] = mtype
|
||||
continue
|
||||
if '*' in pattern or '[' in pattern or '?' in pattern:
|
||||
globs.append((pattern, mtype))
|
||||
else:
|
||||
literals[pattern] = mtype
|
||||
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'globs')):
|
||||
_import_glob_file(path)
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'magic')):
|
||||
magic.mergeFile(path)
|
||||
|
||||
# Sort globs by length
|
||||
globs.sort(key=lambda x: len(x[0]) )
|
||||
|
||||
# Load aliases
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'aliases')):
|
||||
|
@ -382,6 +551,18 @@ def _cache_database():
|
|||
alias, canonical = line.strip().split(None, 1)
|
||||
aliases[alias] = canonical
|
||||
|
||||
# Load filename patterns (globs)
|
||||
globs = GlobDB()
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'globs2')):
|
||||
globs.merge_file(path)
|
||||
globs.finalise()
|
||||
|
||||
# Load magic sniffing data
|
||||
magic = MagicDB()
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'magic')):
|
||||
magic.merge_file(path)
|
||||
magic.finalise()
|
||||
|
||||
# Load subclasses
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'subclasses')):
|
||||
with open(path, 'r') as f:
|
||||
|
@ -396,35 +577,7 @@ def update_cache():
|
|||
def get_type_by_name(path):
|
||||
"""Returns type of file by its name, or None if not known"""
|
||||
update_cache()
|
||||
|
||||
leaf = os.path.basename(path)
|
||||
if leaf in literals:
|
||||
return literals[leaf]
|
||||
|
||||
lleaf = leaf.lower()
|
||||
if lleaf in literals:
|
||||
return literals[lleaf]
|
||||
|
||||
ext = leaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p + 1:]
|
||||
if ext in exts:
|
||||
return exts[ext]
|
||||
ext = lleaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p+1:]
|
||||
if ext in exts:
|
||||
return exts[ext]
|
||||
for (glob, mime_type) in globs:
|
||||
if fnmatch.fnmatch(leaf, glob):
|
||||
return mime_type
|
||||
if fnmatch.fnmatch(lleaf, glob):
|
||||
return mime_type
|
||||
return None
|
||||
return globs.first_match(path)
|
||||
|
||||
def get_type_by_contents(path, max_pri=100, min_pri=0):
|
||||
"""Returns type of file by its contents, or None if not known"""
|
||||
|
@ -438,15 +591,24 @@ def get_type_by_data(data, max_pri=100, min_pri=0):
|
|||
|
||||
return magic.match_data(data, max_pri, min_pri)
|
||||
|
||||
def _get_type_by_stat(st_mode):
|
||||
"""Match special filesystem objects to Mimetypes."""
|
||||
if stat.S_ISDIR(st_mode): return inode_dir
|
||||
elif stat.S_ISCHR(st_mode): return inode_char
|
||||
elif stat.S_ISBLK(st_mode): return inode_block
|
||||
elif stat.S_ISFIFO(st_mode): return inode_fifo
|
||||
elif stat.S_ISLNK(st_mode): return inode_symlink
|
||||
elif stat.S_ISSOCK(st_mode): return inode_socket
|
||||
return inode_door
|
||||
|
||||
def get_type(path, follow=True, name_pri=100):
|
||||
"""Returns type of file indicated by path.
|
||||
|
||||
path :
|
||||
pathname to check (need not exist)
|
||||
follow :
|
||||
when reading file, follow symbolic links
|
||||
name_pri :
|
||||
Priority to do name matches. 100=override magic
|
||||
This function is *deprecated* - :func:`get_type2` is more accurate.
|
||||
|
||||
:param path: pathname to check (need not exist)
|
||||
:param follow: when reading file, follow symbolic links
|
||||
:param name_pri: Priority to do name matches. 100=override magic
|
||||
|
||||
This tries to use the contents of the file, and falls back to the name. It
|
||||
can also handle special filesystem objects like directories and sockets.
|
||||
|
@ -463,6 +625,7 @@ def get_type(path, follow=True, name_pri=100):
|
|||
return t or text
|
||||
|
||||
if stat.S_ISREG(st.st_mode):
|
||||
# Regular file
|
||||
t = get_type_by_contents(path, min_pri=name_pri)
|
||||
if not t: t = get_type_by_name(path)
|
||||
if not t: t = get_type_by_contents(path, max_pri=name_pri)
|
||||
|
@ -472,13 +635,112 @@ def get_type(path, follow=True, name_pri=100):
|
|||
else:
|
||||
return text
|
||||
return t
|
||||
elif stat.S_ISDIR(st.st_mode): return inode_dir
|
||||
elif stat.S_ISCHR(st.st_mode): return inode_char
|
||||
elif stat.S_ISBLK(st.st_mode): return inode_block
|
||||
elif stat.S_ISFIFO(st.st_mode): return inode_fifo
|
||||
elif stat.S_ISLNK(st.st_mode): return inode_symlink
|
||||
elif stat.S_ISSOCK(st.st_mode): return inode_socket
|
||||
return inode_door
|
||||
else:
|
||||
return _get_type_by_stat(st.st_mode)
|
||||
|
||||
def get_type2(path, follow=True):
|
||||
"""Find the MIMEtype of a file using the XDG recommended checking order.
|
||||
|
||||
This first checks the filename, then uses file contents if the name doesn't
|
||||
give an unambiguous MIMEtype. It can also handle special filesystem objects
|
||||
like directories and sockets.
|
||||
|
||||
:param path: file path to examine (need not exist)
|
||||
:param follow: whether to follow symlinks
|
||||
|
||||
:rtype: :class:`MIMEtype`
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
update_cache()
|
||||
|
||||
try:
|
||||
st = os.stat(path) if follow else os.lstat(path)
|
||||
except OSError:
|
||||
return get_type_by_name(path) or octet_stream
|
||||
|
||||
if not stat.S_ISREG(st.st_mode):
|
||||
# Special filesystem objects
|
||||
return _get_type_by_stat(st.st_mode)
|
||||
|
||||
mtypes = sorted(globs.all_matches(path), key=(lambda x: x[1]), reverse=True)
|
||||
if mtypes:
|
||||
max_weight = mtypes[0][1]
|
||||
i = 1
|
||||
for mt, w in mtypes[1:]:
|
||||
if w < max_weight:
|
||||
break
|
||||
i += 1
|
||||
mtypes = mtypes[:i]
|
||||
if len(mtypes) == 1:
|
||||
return mtypes[0][0]
|
||||
|
||||
possible = [mt for mt,w in mtypes]
|
||||
else:
|
||||
possible = None # Try all magic matches
|
||||
|
||||
try:
|
||||
t = magic.match(path, possible=possible)
|
||||
except IOError:
|
||||
t = None
|
||||
|
||||
if t:
|
||||
return t
|
||||
elif mtypes:
|
||||
return mtypes[0][0]
|
||||
elif stat.S_IMODE(st.st_mode) & 0o111:
|
||||
return app_exe
|
||||
else:
|
||||
return text if is_text_file(path) else octet_stream
|
||||
|
||||
def is_text_file(path):
|
||||
"""Guess whether a file contains text or binary data.
|
||||
|
||||
Heuristic: binary if the first 32 bytes include ASCII control characters.
|
||||
This rule may change in future versions.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError:
|
||||
return False
|
||||
|
||||
with f:
|
||||
return _is_text(f.read(32))
|
||||
|
||||
if PY3:
|
||||
def _is_text(data):
|
||||
return not any(b <= 0x8 or 0xe <= b < 0x20 or b == 0x7f for b in data)
|
||||
else:
|
||||
def _is_text(data):
|
||||
return not any(b <= '\x08' or '\x0e' <= b < '\x20' or b == '\x7f' \
|
||||
for b in data)
|
||||
|
||||
_mime2ext_cache = None
|
||||
_mime2ext_cache_uptodate = False
|
||||
|
||||
def get_extensions(mimetype):
|
||||
"""Retrieve the set of filename extensions matching a given MIMEtype.
|
||||
|
||||
Extensions are returned without a leading dot, e.g. 'py'. If no extensions
|
||||
are registered for the MIMEtype, returns an empty set.
|
||||
|
||||
The extensions are stored in a cache the first time this is called.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
global _mime2ext_cache, _mime2ext_cache_uptodate
|
||||
update_cache()
|
||||
if not _mime2ext_cache_uptodate:
|
||||
_mime2ext_cache = defaultdict(set)
|
||||
for ext, mtypes in globs.exts.items():
|
||||
for mtype, prio in mtypes:
|
||||
_mime2ext_cache[mtype].add(ext)
|
||||
_mime2ext_cache_uptodate = True
|
||||
|
||||
return _mime2ext_cache[mimetype]
|
||||
|
||||
|
||||
def install_mime_info(application, package_file):
|
||||
"""Copy 'package_file' as ``~/.local/share/mime/packages/<application>.xml.``
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Implementation of the XDG Recent File Storage Specification Version 0.2
|
||||
Implementation of the XDG Recent File Storage Specification
|
||||
http://standards.freedesktop.org/recent-file-spec
|
||||
"""
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
__all__ = [ "BaseDirectory", "DesktopEntry", "Menu", "Exceptions", "IniFile", "IconTheme", "Locale", "Config", "Mime", "RecentFiles", "MenuEditor" ]
|
||||
|
||||
__version__ = "0.25"
|
||||
__version__ = "0.26"
|
||||
|
|
|
@ -9,3 +9,67 @@ else:
|
|||
# Unicode-like literals
|
||||
def u(s):
|
||||
return s.decode('utf-8')
|
||||
|
||||
try:
|
||||
# which() is available from Python 3.3
|
||||
from shutil import which
|
||||
except ImportError:
|
||||
import os
|
||||
# This is a copy of which() from Python 3.3
|
||||
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
||||
"""Given a command, mode, and a PATH string, return the path which
|
||||
conforms to the given mode on the PATH, or None if there is no such
|
||||
file.
|
||||
|
||||
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
|
||||
of os.environ.get("PATH"), or can be overridden with a custom search
|
||||
path.
|
||||
|
||||
"""
|
||||
# Check that a given file can be accessed with the correct mode.
|
||||
# Additionally check that `file` is not a directory, as on Windows
|
||||
# directories pass the os.access check.
|
||||
def _access_check(fn, mode):
|
||||
return (os.path.exists(fn) and os.access(fn, mode)
|
||||
and not os.path.isdir(fn))
|
||||
|
||||
# If we're given a path with a directory part, look it up directly rather
|
||||
# than referring to PATH directories. This includes checking relative to the
|
||||
# current directory, e.g. ./script
|
||||
if os.path.dirname(cmd):
|
||||
if _access_check(cmd, mode):
|
||||
return cmd
|
||||
return None
|
||||
|
||||
path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep)
|
||||
|
||||
if sys.platform == "win32":
|
||||
# The current directory takes precedence on Windows.
|
||||
if not os.curdir in path:
|
||||
path.insert(0, os.curdir)
|
||||
|
||||
# PATHEXT is necessary to check on Windows.
|
||||
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
|
||||
# See if the given file matches any of the expected path extensions.
|
||||
# This will allow us to short circuit when given "python.exe".
|
||||
# If it does match, only test that one, otherwise we have to try
|
||||
# others.
|
||||
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
|
||||
files = [cmd]
|
||||
else:
|
||||
files = [cmd + ext for ext in pathext]
|
||||
else:
|
||||
# On other platforms you don't have things like PATHEXT to tell you
|
||||
# what file suffixes are executable, so just pass on cmd as-is.
|
||||
files = [cmd]
|
||||
|
||||
seen = set()
|
||||
for dir in path:
|
||||
normdir = os.path.normcase(dir)
|
||||
if not normdir in seen:
|
||||
seen.add(normdir)
|
||||
for thefile in files:
|
||||
name = os.path.join(dir, thefile)
|
||||
if _access_check(name, mode):
|
||||
return name
|
||||
return None
|
||||
|
|
3
requirements-py2.txt
Normal file
3
requirements-py2.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
backports.functools-lru-cache
|
||||
enum34
|
||||
futures
|
|
@ -4,6 +4,7 @@ configobj
|
|||
guessit
|
||||
linktastic
|
||||
python-qbittorrent
|
||||
pyxdg
|
||||
rencode
|
||||
requests
|
||||
setuptools
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue