mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-20 13:23:18 -07:00
Merge pull request #1049 from labrys/code-cleanup-core
Code cleanup core
This commit is contained in:
commit
e10ca1a1c3
41 changed files with 2484 additions and 1622 deletions
|
@ -18,7 +18,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
uniquePath = 1
|
||||
|
||||
if clientAgent != 'manual' and not core.DOWNLOADINFO:
|
||||
logger.debug('Adding TORRENT download info for directory %s to database' % (inputDirectory))
|
||||
logger.debug('Adding TORRENT download info for directory {0} to database'.format(inputDirectory))
|
||||
|
||||
myDB = nzbToMediaDB.DBConnection()
|
||||
|
||||
|
@ -42,7 +42,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
}
|
||||
myDB.upsert("downloads", newValueDict, controlValueDict)
|
||||
|
||||
logger.debug("Received Directory: %s | Name: %s | Category: %s" % (inputDirectory, inputName, inputCategory))
|
||||
logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory))
|
||||
|
||||
inputDirectory, inputName, inputCategory, root = core.category_search(inputDirectory, inputName,
|
||||
inputCategory, root,
|
||||
|
@ -58,7 +58,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
inputDirectory = inputDirectory.encode(core.SYS_ENCODING)
|
||||
except: pass
|
||||
|
||||
logger.debug("Determined Directory: %s | Name: %s | Category: %s" % (inputDirectory, inputName, inputCategory))
|
||||
logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory))
|
||||
|
||||
# auto-detect section
|
||||
section = core.CFG.findsection(inputCategory).isenabled()
|
||||
|
@ -66,7 +66,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
section = core.CFG.findsection("ALL").isenabled()
|
||||
if section is None:
|
||||
logger.error(
|
||||
'Category:[%s] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.' % (
|
||||
'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format(
|
||||
inputCategory))
|
||||
return [-1, ""]
|
||||
else:
|
||||
|
@ -74,15 +74,15 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
|
||||
if len(section) > 1:
|
||||
logger.error(
|
||||
'Category:[%s] is not unique, %s are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.' % (
|
||||
'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format(
|
||||
usercat, section.keys()))
|
||||
return [-1, ""]
|
||||
|
||||
if section:
|
||||
sectionName = section.keys()[0]
|
||||
logger.info('Auto-detected SECTION:%s' % (sectionName))
|
||||
logger.info('Auto-detected SECTION:{0}'.format(sectionName))
|
||||
else:
|
||||
logger.error("Unable to locate a section with subsection:%s enabled in your autoProcessMedia.cfg, exiting!" % (
|
||||
logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format(
|
||||
inputCategory))
|
||||
return [-1, ""]
|
||||
|
||||
|
@ -129,15 +129,15 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
if outputDestination in inputDirectory:
|
||||
outputDestination = inputDirectory
|
||||
|
||||
logger.info("Output directory set to: %s" % (outputDestination))
|
||||
logger.info("Output directory set to: {0}".format(outputDestination))
|
||||
|
||||
if core.SAFE_MODE and outputDestination == core.TORRENT_DEFAULTDIR:
|
||||
logger.error(
|
||||
'The output directory:[%s] is the Download Directory. Edit outputDirectory in autoProcessMedia.cfg. Exiting' % (
|
||||
'The output directory:[{0}] is the Download Directory. Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format(
|
||||
inputDirectory))
|
||||
return [-1, ""]
|
||||
|
||||
logger.debug("Scanning files in directory: %s" % (inputDirectory))
|
||||
logger.debug("Scanning files in directory: {0}".format(inputDirectory))
|
||||
|
||||
if sectionName == 'HeadPhones':
|
||||
core.NOFLATTEN.extend(
|
||||
|
@ -149,7 +149,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
inputFiles = core.listMediaFiles(inputDirectory, archives=False)
|
||||
else:
|
||||
inputFiles = core.listMediaFiles(inputDirectory)
|
||||
logger.debug("Found %s files in %s" % (str(len(inputFiles)), inputDirectory))
|
||||
logger.debug("Found {0} files in {1}".format(len(inputFiles), inputDirectory))
|
||||
for inputFile in inputFiles:
|
||||
filePath = os.path.dirname(inputFile)
|
||||
fileName, fileExt = os.path.splitext(os.path.basename(inputFile))
|
||||
|
@ -161,17 +161,17 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
targetFile = core.os.path.join(
|
||||
core.os.path.join(outputDestination, os.path.basename(filePath)), fullFileName)
|
||||
logger.debug(
|
||||
"Setting outputDestination to %s to preserve folder structure" % (os.path.dirname(targetFile)))
|
||||
"Setting outputDestination to {0} to preserve folder structure".format(os.path.dirname(targetFile)))
|
||||
try:
|
||||
targetFile = targetFile.encode(core.SYS_ENCODING)
|
||||
except: pass
|
||||
if root == 1:
|
||||
if not foundFile:
|
||||
logger.debug("Looking for %s in: %s" % (inputName, inputFile))
|
||||
logger.debug("Looking for {0} in: {1}".format(inputName, inputFile))
|
||||
if (core.sanitizeName(inputName) in core.sanitizeName(inputFile)) or (
|
||||
core.sanitizeName(fileName) in core.sanitizeName(inputName)):
|
||||
foundFile = True
|
||||
logger.debug("Found file %s that matches Torrent Name %s" % (fullFileName, inputName))
|
||||
logger.debug("Found file {0} that matches Torrent Name {1}".format(fullFileName, inputName))
|
||||
else:
|
||||
continue
|
||||
|
||||
|
@ -183,7 +183,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
logger.debug("Looking for files with modified/created dates less than 5 minutes old.")
|
||||
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)):
|
||||
foundFile = True
|
||||
logger.debug("Found file %s with date modifed/created less than 5 minutes ago." % (fullFileName))
|
||||
logger.debug("Found file {0} with date modifed/created less than 5 minutes ago.".format(fullFileName))
|
||||
else:
|
||||
continue # This file has not been recently moved or created, skip it
|
||||
|
||||
|
@ -192,12 +192,12 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
core.copy_link(inputFile, targetFile, core.USELINK)
|
||||
core.rmReadOnly(targetFile)
|
||||
except:
|
||||
logger.error("Failed to link: %s to %s" % (inputFile, targetFile))
|
||||
logger.error("Failed to link: {0} to {1}".format(inputFile, targetFile))
|
||||
|
||||
inputName, outputDestination = convert_to_ascii(inputName, outputDestination)
|
||||
|
||||
if extract == 1:
|
||||
logger.debug('Checking for archives to extract in directory: %s' % (inputDirectory))
|
||||
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory))
|
||||
core.extractFiles(inputDirectory, outputDestination, keep_archive)
|
||||
|
||||
if not inputCategory in core.NOFLATTEN: #don't flatten hp in case multi cd albums, and we need to copy this back later.
|
||||
|
@ -208,19 +208,19 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
numVideos = len(
|
||||
core.listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False))
|
||||
if numVideos > 0:
|
||||
logger.info("Found %s media files in %s" % (numVideos, outputDestination))
|
||||
logger.info("Found {0} media files in {1}".format(numVideos, outputDestination))
|
||||
status = 0
|
||||
elif extract != 1:
|
||||
logger.info("Found no media files in %s. Sending to %s to process" % (outputDestination, sectionName))
|
||||
logger.info("Found no media files in {0}. Sending to {1} to process".format(outputDestination, sectionName))
|
||||
status = 0
|
||||
else:
|
||||
logger.warning("Found no media files in %s" % outputDestination)
|
||||
logger.warning("Found no media files in {0}".format(outputDestination))
|
||||
|
||||
# Only these sections can handling failed downloads so make sure everything else gets through without the check for failed
|
||||
if not sectionName in ['CouchPotato', 'SickBeard', 'NzbDrone']:
|
||||
status = 0
|
||||
|
||||
logger.info("Calling %s:%s to post-process:%s" % (sectionName, usercat, inputName))
|
||||
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, usercat, inputName))
|
||||
|
||||
if core.TORRENT_CHMOD_DIRECTORY:
|
||||
core.rchmod(outputDestination, core.TORRENT_CHMOD_DIRECTORY)
|
||||
|
@ -262,10 +262,10 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
|
||||
# remove torrent
|
||||
if core.USELINK == 'move-sym' and not core.DELETE_ORIGINAL == 1:
|
||||
logger.debug('Checking for sym-links to re-direct in: %s' % (inputDirectory))
|
||||
logger.debug('Checking for sym-links to re-direct in: {0}'.format(inputDirectory))
|
||||
for dirpath, dirs, files in os.walk(inputDirectory):
|
||||
for file in files:
|
||||
logger.debug('Checking symlink: %s' % (os.path.join(dirpath,file)))
|
||||
logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath,file)))
|
||||
core.replace_links(os.path.join(dirpath,file))
|
||||
core.remove_torrent(clientAgent, inputHash, inputID, inputName)
|
||||
|
||||
|
@ -284,11 +284,11 @@ def main(args):
|
|||
clientAgent = core.TORRENT_CLIENTAGENT
|
||||
|
||||
logger.info("#########################################################")
|
||||
logger.info("## ..::[%s]::.. ##" % os.path.basename(__file__))
|
||||
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__)))
|
||||
logger.info("#########################################################")
|
||||
|
||||
# debug command line options
|
||||
logger.debug("Options passed into TorrentToMedia: %s" % (args))
|
||||
logger.debug("Options passed into TorrentToMedia: {0}".format(args))
|
||||
|
||||
# Post-Processing Result
|
||||
result = [ 0, "" ]
|
||||
|
@ -310,16 +310,16 @@ def main(args):
|
|||
if not core.CFG[section][subsection].isenabled():
|
||||
continue
|
||||
for dirName in core.getDirs(section, subsection, link='hard'):
|
||||
logger.info("Starting manual run for %s:%s - Folder:%s" % (section, subsection, dirName))
|
||||
logger.info("Starting manual run for {0}:{1} - Folder:{2}".format(section, subsection, dirName))
|
||||
|
||||
logger.info("Checking database for download info for %s ..." % (os.path.basename(dirName)))
|
||||
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dirName)))
|
||||
core.DOWNLOADINFO = core.get_downloadInfo(os.path.basename(dirName), 0)
|
||||
if core.DOWNLOADINFO:
|
||||
logger.info(
|
||||
"Found download info for %s, setting variables now ..." % (os.path.basename(dirName)))
|
||||
"Found download info for {0}, setting variables now ...".format(os.path.basename(dirName)))
|
||||
else:
|
||||
logger.info(
|
||||
'Unable to locate download info for %s, continuing to try and process this release ...' % (
|
||||
'Unable to locate download info for {0}, continuing to try and process this release ...'.format(
|
||||
os.path.basename(dirName))
|
||||
)
|
||||
|
||||
|
@ -350,14 +350,14 @@ def main(args):
|
|||
results = processTorrent(dirName, inputName, subsection, inputHash, inputID,
|
||||
clientAgent)
|
||||
if results[0] != 0:
|
||||
logger.error("A problem was reported when trying to perform a manual run for %s:%s." % (
|
||||
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format(
|
||||
section, subsection))
|
||||
result = results
|
||||
|
||||
if result[0] == 0:
|
||||
logger.info("The %s script completed successfully." % (args[0]))
|
||||
logger.info("The {0} script completed successfully.".format(args[0]))
|
||||
else:
|
||||
logger.error("A problem was reported in the %s script." % (args[0]))
|
||||
logger.error("A problem was reported in the {0} script.".format(args[0]))
|
||||
del core.MYAPP
|
||||
return result[0]
|
||||
|
||||
|
|
515
core/__init__.py
515
core/__init__.py
|
@ -1,3 +1,7 @@
|
|||
# coding=utf-8
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import locale
|
||||
import os
|
||||
import re
|
||||
|
@ -6,6 +10,8 @@ import sys
|
|||
import platform
|
||||
import time
|
||||
|
||||
from six.moves import reload_module
|
||||
|
||||
# init libs
|
||||
PROGRAM_DIR = os.path.dirname(os.path.normpath(os.path.abspath(os.path.join(__file__, os.pardir))))
|
||||
LIBS_DIR = os.path.join(PROGRAM_DIR, 'libs')
|
||||
|
@ -32,15 +38,17 @@ from core.autoProcess.autoProcessMusic import autoProcessMusic
|
|||
from core.autoProcess.autoProcessTV import autoProcessTV
|
||||
from core import logger, versionCheck, nzbToMediaDB
|
||||
from core.nzbToMediaConfig import config
|
||||
from core.nzbToMediaUtil import category_search, sanitizeName, copy_link, parse_args, flatten, getDirs, \
|
||||
rmReadOnly,rmDir, pause_torrent, resume_torrent, remove_torrent, listMediaFiles, \
|
||||
extractFiles, cleanDir, update_downloadInfoStatus, get_downloadInfo, WakeUp, makeDir, cleanDir, \
|
||||
create_torrent_class, listMediaFiles, RunningProcess
|
||||
from core.nzbToMediaUtil import (
|
||||
category_search, sanitizeName, copy_link, parse_args, flatten, getDirs,
|
||||
rmReadOnly, rmDir, pause_torrent, resume_torrent, remove_torrent, listMediaFiles,
|
||||
extractFiles, cleanDir, update_downloadInfoStatus, get_downloadInfo, WakeUp, makeDir, cleanDir,
|
||||
create_torrent_class, listMediaFiles, RunningProcess,
|
||||
)
|
||||
from core.transcoder import transcoder
|
||||
from core.databases import mainDB
|
||||
|
||||
# Client Agents
|
||||
NZB_CLIENTS = ['sabnzbd','nzbget']
|
||||
NZB_CLIENTS = ['sabnzbd', 'nzbget']
|
||||
TORRENT_CLIENTS = ['transmission', 'deluge', 'utorrent', 'rtorrent', 'other']
|
||||
|
||||
# sabnzbd constants
|
||||
|
@ -61,7 +69,8 @@ FORKS[FORK_FAILED_TORRENT] = {"dir": None, "failed": None, "process_method": Non
|
|||
FORKS[FORK_SICKRAGETV] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None}
|
||||
FORKS[FORK_SICKRAGE] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None}
|
||||
FORKS[FORK_SICKGEAR] = {"dir": None, "failed": None, "process_method": None, "force": None}
|
||||
ALL_FORKS = {"dir": None, "dirName": None, "proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None}
|
||||
ALL_FORKS = {"dir": None, "dirName": None, "proc_dir": None, "failed": None, "process_method": None, "force": None,
|
||||
"delete_on": None}
|
||||
|
||||
# NZBGet Exit Codes
|
||||
NZBGET_POSTPROCESS_PARCHECK = 92
|
||||
|
@ -201,6 +210,7 @@ USER_SCRIPT_RUNONCE = None
|
|||
|
||||
__INITIALIZED__ = False
|
||||
|
||||
|
||||
def initialize(section=None):
|
||||
global NZBGET_POSTPROCESS_ERROR, NZBGET_POSTPROCESS_NONE, NZBGET_POSTPROCESS_PARCHECK, NZBGET_POSTPROCESS_SUCCESS, \
|
||||
NZBTOMEDIA_TIMEOUT, FORKS, FORK_DEFAULT, FORK_FAILED_TORRENT, FORK_FAILED, \
|
||||
|
@ -223,8 +233,8 @@ def initialize(section=None):
|
|||
|
||||
if __INITIALIZED__:
|
||||
return False
|
||||
|
||||
if os.environ.has_key('NTM_LOGFILE'):
|
||||
|
||||
if 'NTM_LOGFILE' in os.environ:
|
||||
LOG_FILE = os.environ['NTM_LOGFILE']
|
||||
LOG_DIR = os.path.split(LOG_FILE)[0]
|
||||
|
||||
|
@ -247,16 +257,16 @@ def initialize(section=None):
|
|||
SYS_ENCODING = 'UTF-8'
|
||||
|
||||
if not hasattr(sys, "setdefaultencoding"):
|
||||
reload(sys)
|
||||
reload_module(sys)
|
||||
|
||||
try:
|
||||
# pylint: disable=E1101
|
||||
# On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
|
||||
sys.setdefaultencoding(SYS_ENCODING)
|
||||
except:
|
||||
print 'Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable'
|
||||
print 'or find another way to force Python to use ' + SYS_ENCODING + ' for string encoding.'
|
||||
if os.environ.has_key('NZBOP_SCRIPTDIR'):
|
||||
print('Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable')
|
||||
print('or find another way to force Python to use ' + SYS_ENCODING + ' for string encoding.')
|
||||
if 'NZBOP_SCRIPTDIR' in os.environ:
|
||||
sys.exit(NZBGET_POSTPROCESS_ERROR)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
@ -266,18 +276,18 @@ def initialize(section=None):
|
|||
|
||||
# run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
|
||||
if not config.migrate():
|
||||
logger.error("Unable to migrate config file %s, exiting ..." % (CONFIG_FILE))
|
||||
if os.environ.has_key('NZBOP_SCRIPTDIR'):
|
||||
logger.error("Unable to migrate config file {0}, exiting ...".format(CONFIG_FILE))
|
||||
if 'NZBOP_SCRIPTDIR' in os.environ:
|
||||
pass # We will try and read config from Environment.
|
||||
else:
|
||||
sys.exit(-1)
|
||||
|
||||
# run migrate to convert NzbGet data from old cfg style to new cfg style
|
||||
if os.environ.has_key('NZBOP_SCRIPTDIR'):
|
||||
if 'NZBOP_SCRIPTDIR' in os.environ:
|
||||
CFG = config.addnzbget()
|
||||
|
||||
else: # load newly migrated config
|
||||
logger.info("Loading config from [%s]" % (CONFIG_FILE))
|
||||
logger.info("Loading config from [{0}]".format(CONFIG_FILE))
|
||||
CFG = config()
|
||||
|
||||
# Enable/Disable DEBUG Logging
|
||||
|
@ -288,7 +298,7 @@ def initialize(section=None):
|
|||
|
||||
if LOG_ENV:
|
||||
for item in os.environ:
|
||||
logger.info("%s: %s" % (item, os.environ[item]), "ENVIRONMENT")
|
||||
logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT")
|
||||
|
||||
# initialize the main SB database
|
||||
nzbToMediaDB.upgradeDatabase(nzbToMediaDB.DBConnection(), mainDB.InitialSchema)
|
||||
|
@ -315,7 +325,8 @@ def initialize(section=None):
|
|||
# restart nzbToMedia
|
||||
try:
|
||||
del MYAPP
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
restart()
|
||||
else:
|
||||
logger.error("Update wasn't successful, not restarting. Check your log for more information.")
|
||||
|
@ -333,8 +344,10 @@ def initialize(section=None):
|
|||
SABNZBDAPIKEY = CFG["Nzb"]["sabnzbd_apikey"]
|
||||
NZB_DEFAULTDIR = CFG["Nzb"]["default_downloadDirectory"]
|
||||
GROUPS = CFG["Custom"]["remove_group"]
|
||||
if isinstance(GROUPS, str): GROUPS = GROUPS.split(',')
|
||||
if GROUPS == ['']: GROUPS = None
|
||||
if isinstance(GROUPS, str):
|
||||
GROUPS = GROUPS.split(',')
|
||||
if GROUPS == ['']:
|
||||
GROUPS = None
|
||||
|
||||
TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"] # utorrent | deluge | transmission | rtorrent | vuze |other
|
||||
USELINK = CFG["Torrent"]["useLink"] # no | hard | sym
|
||||
|
@ -342,8 +355,10 @@ def initialize(section=None):
|
|||
TORRENT_DEFAULTDIR = CFG["Torrent"]["default_downloadDirectory"]
|
||||
CATEGORIES = (CFG["Torrent"]["categories"]) # music,music_videos,pictures,software
|
||||
NOFLATTEN = (CFG["Torrent"]["noFlatten"])
|
||||
if isinstance(NOFLATTEN, str): NOFLATTEN = NOFLATTEN.split(',')
|
||||
if isinstance(CATEGORIES, str): CATEGORIES = CATEGORIES.split(',')
|
||||
if isinstance(NOFLATTEN, str):
|
||||
NOFLATTEN = NOFLATTEN.split(',')
|
||||
if isinstance(CATEGORIES, str):
|
||||
CATEGORIES = CATEGORIES.split(',')
|
||||
DELETE_ORIGINAL = int(CFG["Torrent"]["deleteOriginal"])
|
||||
TORRENT_CHMOD_DIRECTORY = int(str(CFG["Torrent"]["chmodDirectory"]), 8)
|
||||
TORRENT_RESUME_ON_FAILURE = int(CFG["Torrent"]["resumeOnFailure"])
|
||||
|
@ -364,9 +379,12 @@ def initialize(section=None):
|
|||
|
||||
REMOTEPATHS = CFG["Network"]["mount_points"] or []
|
||||
if REMOTEPATHS:
|
||||
if isinstance(REMOTEPATHS, list): REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list.
|
||||
REMOTEPATHS = [ tuple(item.split(',')) for item in REMOTEPATHS.split('|') ] # /volume1/Public/,E:\|/volume2/share/,\\NAS\
|
||||
REMOTEPATHS = [ (local.strip(), remote.strip()) for local, remote in REMOTEPATHS ] # strip trailing and leading whitespaces
|
||||
if isinstance(REMOTEPATHS, list):
|
||||
REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list.
|
||||
REMOTEPATHS = [tuple(item.split(',')) for item in
|
||||
REMOTEPATHS.split('|')] # /volume1/Public/,E:\|/volume2/share/,\\NAS\
|
||||
REMOTEPATHS = [(local.strip(), remote.strip()) for local, remote in
|
||||
REMOTEPATHS] # strip trailing and leading whitespaces
|
||||
|
||||
PLEXSSL = int(CFG["Plex"]["plex_ssl"])
|
||||
PLEXHOST = CFG["Plex"]["plex_host"]
|
||||
|
@ -374,62 +392,79 @@ def initialize(section=None):
|
|||
PLEXTOKEN = CFG["Plex"]["plex_token"]
|
||||
PLEXSEC = CFG["Plex"]["plex_sections"] or []
|
||||
if PLEXSEC:
|
||||
if isinstance(PLEXSEC, list): PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list.
|
||||
PLEXSEC = [ tuple(item.split(',')) for item in PLEXSEC.split('|') ]
|
||||
if isinstance(PLEXSEC, list):
|
||||
PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list.
|
||||
PLEXSEC = [tuple(item.split(',')) for item in PLEXSEC.split('|')]
|
||||
|
||||
devnull = open(os.devnull, 'w')
|
||||
try:
|
||||
subprocess.Popen(["nice"], stdout=devnull, stderr=devnull).communicate()
|
||||
NICENESS.extend(['nice', '-n%s' % (int(CFG["Posix"]["niceness"]))])
|
||||
except: pass
|
||||
NICENESS.extend(['nice', '-n{0}'.format(int(CFG["Posix"]["niceness"]))])
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
subprocess.Popen(["ionice"], stdout=devnull, stderr=devnull).communicate()
|
||||
try:
|
||||
NICENESS.extend(['ionice', '-c%s' % (int(CFG["Posix"]["ionice_class"]))])
|
||||
except: pass
|
||||
NICENESS.extend(['ionice', '-c{0}'.format(int(CFG["Posix"]["ionice_class"]))])
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
if 'ionice' in NICENESS:
|
||||
NICENESS.extend(['-n%s' % (int(CFG["Posix"]["ionice_classdata"]))])
|
||||
NICENESS.extend(['-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))])
|
||||
else:
|
||||
NICENESS.extend(['ionice', '-n%s' % (int(CFG["Posix"]["ionice_classdata"]))])
|
||||
except: pass
|
||||
except: pass
|
||||
NICENESS.extend(['ionice', '-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))])
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
devnull.close()
|
||||
|
||||
COMPRESSEDCONTAINER = [re.compile('.r\d{2}$', re.I),
|
||||
re.compile('.part\d+.rar$', re.I),
|
||||
re.compile('.rar$', re.I)]
|
||||
COMPRESSEDCONTAINER += [re.compile('%s$' % ext, re.I) for ext in CFG["Extensions"]["compressedExtensions"]]
|
||||
re.compile('.part\d+.rar$', re.I),
|
||||
re.compile('.rar$', re.I)]
|
||||
COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG["Extensions"]["compressedExtensions"]]
|
||||
MEDIACONTAINER = CFG["Extensions"]["mediaExtensions"]
|
||||
AUDIOCONTAINER = CFG["Extensions"]["audioExtensions"]
|
||||
METACONTAINER = CFG["Extensions"]["metaExtensions"] # .nfo,.sub,.srt
|
||||
if isinstance(COMPRESSEDCONTAINER, str): COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',')
|
||||
if isinstance(MEDIACONTAINER, str): MEDIACONTAINER = MEDIACONTAINER.split(',')
|
||||
if isinstance(AUDIOCONTAINER, str): AUDIOCONTAINER = AUDIOCONTAINER.split(',')
|
||||
if isinstance(METACONTAINER, str): METACONTAINER = METACONTAINER.split(',')
|
||||
if isinstance(COMPRESSEDCONTAINER, str):
|
||||
COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',')
|
||||
if isinstance(MEDIACONTAINER, str):
|
||||
MEDIACONTAINER = MEDIACONTAINER.split(',')
|
||||
if isinstance(AUDIOCONTAINER, str):
|
||||
AUDIOCONTAINER = AUDIOCONTAINER.split(',')
|
||||
if isinstance(METACONTAINER, str):
|
||||
METACONTAINER = METACONTAINER.split(',')
|
||||
|
||||
GETSUBS = int(CFG["Transcoder"]["getSubs"])
|
||||
TRANSCODE = int(CFG["Transcoder"]["transcode"])
|
||||
DUPLICATE = int(CFG["Transcoder"]["duplicate"])
|
||||
CONCAT = int(CFG["Transcoder"]["concat"])
|
||||
IGNOREEXTENSIONS = (CFG["Transcoder"]["ignoreExtensions"])
|
||||
if isinstance(IGNOREEXTENSIONS, str): IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',')
|
||||
if isinstance(IGNOREEXTENSIONS, str):
|
||||
IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',')
|
||||
OUTPUTFASTSTART = int(CFG["Transcoder"]["outputFastStart"])
|
||||
GENERALOPTS = (CFG["Transcoder"]["generalOptions"])
|
||||
if isinstance(GENERALOPTS, str): GENERALOPTS = GENERALOPTS.split(',')
|
||||
if GENERALOPTS == ['']: GENERALOPTS = []
|
||||
if not '-fflags' in GENERALOPTS: GENERALOPTS.append('-fflags')
|
||||
if not '+genpts' in GENERALOPTS: GENERALOPTS.append('+genpts')
|
||||
if isinstance(GENERALOPTS, str):
|
||||
GENERALOPTS = GENERALOPTS.split(',')
|
||||
if GENERALOPTS == ['']:
|
||||
GENERALOPTS = []
|
||||
if '-fflags' not in GENERALOPTS:
|
||||
GENERALOPTS.append('-fflags')
|
||||
if '+genpts' not in GENERALOPTS:
|
||||
GENERALOPTS.append('+genpts')
|
||||
try:
|
||||
OUTPUTQUALITYPERCENT = int(CFG["Transcoder"]["outputQualityPercent"])
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
OUTPUTVIDEOPATH = CFG["Transcoder"]["outputVideoPath"]
|
||||
PROCESSOUTPUT = int(CFG["Transcoder"]["processOutput"])
|
||||
ALANGUAGE = CFG["Transcoder"]["audioLanguage"]
|
||||
AINCLUDE = int(CFG["Transcoder"]["allAudioLanguages"])
|
||||
SLANGUAGES = CFG["Transcoder"]["subLanguages"]
|
||||
if isinstance(SLANGUAGES, str): SLANGUAGES = SLANGUAGES.split(',')
|
||||
if SLANGUAGES == ['']: SLANGUAGES = []
|
||||
if isinstance(SLANGUAGES, str):
|
||||
SLANGUAGES = SLANGUAGES.split(',')
|
||||
if SLANGUAGES == ['']:
|
||||
SLANGUAGES = []
|
||||
SINCLUDE = int(CFG["Transcoder"]["allSubLanguages"])
|
||||
SEXTRACT = int(CFG["Transcoder"]["extractSubs"])
|
||||
SEMBED = int(CFG["Transcoder"]["embedSubs"])
|
||||
|
@ -437,169 +472,215 @@ def initialize(section=None):
|
|||
VEXTENSION = CFG["Transcoder"]["outputVideoExtension"].strip()
|
||||
VCODEC = CFG["Transcoder"]["outputVideoCodec"].strip()
|
||||
VCODEC_ALLOW = CFG["Transcoder"]["VideoCodecAllow"].strip()
|
||||
if isinstance(VCODEC_ALLOW, str): VCODEC_ALLOW = VCODEC_ALLOW.split(',')
|
||||
if VCODEC_ALLOW == ['']: VCODEC_ALLOW = []
|
||||
if isinstance(VCODEC_ALLOW, str):
|
||||
VCODEC_ALLOW = VCODEC_ALLOW.split(',')
|
||||
if VCODEC_ALLOW == ['']:
|
||||
VCODEC_ALLOW = []
|
||||
VPRESET = CFG["Transcoder"]["outputVideoPreset"].strip()
|
||||
try:
|
||||
VFRAMERATE = float(CFG["Transcoder"]["outputVideoFramerate"].strip())
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
VCRF = int(CFG["Transcoder"]["outputVideoCRF"].strip())
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
VLEVEL = CFG["Transcoder"]["outputVideoLevel"].strip()
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k','000'))
|
||||
except: pass
|
||||
VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k', '000'))
|
||||
except:
|
||||
pass
|
||||
VRESOLUTION = CFG["Transcoder"]["outputVideoResolution"]
|
||||
ACODEC = CFG["Transcoder"]["outputAudioCodec"].strip()
|
||||
ACODEC_ALLOW = CFG["Transcoder"]["AudioCodecAllow"].strip()
|
||||
if isinstance(ACODEC_ALLOW, str): ACODEC_ALLOW = ACODEC_ALLOW.split(',')
|
||||
if ACODEC_ALLOW == ['']: ACODEC_ALLOW = []
|
||||
if isinstance(ACODEC_ALLOW, str):
|
||||
ACODEC_ALLOW = ACODEC_ALLOW.split(',')
|
||||
if ACODEC_ALLOW == ['']:
|
||||
ACODEC_ALLOW = []
|
||||
try:
|
||||
ACHANNELS = int(CFG["Transcoder"]["outputAudioChannels"].strip())
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k','000'))
|
||||
except: pass
|
||||
ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k', '000'))
|
||||
except:
|
||||
pass
|
||||
ACODEC2 = CFG["Transcoder"]["outputAudioTrack2Codec"].strip()
|
||||
ACODEC2_ALLOW = CFG["Transcoder"]["AudioCodec2Allow"].strip()
|
||||
if isinstance(ACODEC2_ALLOW, str): ACODEC2_ALLOW = ACODEC2_ALLOW.split(',')
|
||||
if ACODEC2_ALLOW == ['']: ACODEC2_ALLOW = []
|
||||
if isinstance(ACODEC2_ALLOW, str):
|
||||
ACODEC2_ALLOW = ACODEC2_ALLOW.split(',')
|
||||
if ACODEC2_ALLOW == ['']:
|
||||
ACODEC2_ALLOW = []
|
||||
try:
|
||||
ACHANNELS2 = int(CFG["Transcoder"]["outputAudioTrack2Channels"].strip())
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k','000'))
|
||||
except: pass
|
||||
ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k', '000'))
|
||||
except:
|
||||
pass
|
||||
ACODEC3 = CFG["Transcoder"]["outputAudioOtherCodec"].strip()
|
||||
ACODEC3_ALLOW = CFG["Transcoder"]["AudioOtherCodecAllow"].strip()
|
||||
if isinstance(ACODEC3_ALLOW, str): ACODEC3_ALLOW = ACODEC3_ALLOW.split(',')
|
||||
if ACODEC3_ALLOW == ['']: ACODEC3_ALLOW = []
|
||||
if isinstance(ACODEC3_ALLOW, str):
|
||||
ACODEC3_ALLOW = ACODEC3_ALLOW.split(',')
|
||||
if ACODEC3_ALLOW == ['']:
|
||||
ACODEC3_ALLOW = []
|
||||
try:
|
||||
ACHANNELS3 = int(CFG["Transcoder"]["outputAudioOtherChannels"].strip())
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k','000'))
|
||||
except: pass
|
||||
ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k', '000'))
|
||||
except:
|
||||
pass
|
||||
SCODEC = CFG["Transcoder"]["outputSubtitleCodec"].strip()
|
||||
BURN = int(CFG["Transcoder"]["burnInSubtitle"].strip())
|
||||
DEFAULTS = CFG["Transcoder"]["outputDefault"].strip()
|
||||
HWACCEL = int(CFG["Transcoder"]["hwAccel"])
|
||||
|
||||
allow_subs = ['.mkv','.mp4', '.m4v', 'asf', 'wma', 'wmv']
|
||||
allow_subs = ['.mkv', '.mp4', '.m4v', 'asf', 'wma', 'wmv']
|
||||
codec_alias = {
|
||||
'libx264':['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'],
|
||||
'libmp3lame':['libmp3lame', 'mp3'],
|
||||
'libfaac':['libfaac', 'aac', 'faac']
|
||||
}
|
||||
'libx264': ['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'],
|
||||
'libmp3lame': ['libmp3lame', 'mp3'],
|
||||
'libfaac': ['libfaac', 'aac', 'faac']
|
||||
}
|
||||
transcode_defaults = {
|
||||
'iPad':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
|
||||
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'iPad-1080p':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':'1920:1080','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
|
||||
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'iPad-720p':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
|
||||
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'Apple-TV':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
|
||||
'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'iPod':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
|
||||
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'iPhone':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':'460:320','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
|
||||
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'PS3':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
|
||||
'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'xbox':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
|
||||
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'Roku-480p':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
|
||||
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'Roku-720p':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
|
||||
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'Roku-1080p':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':160000, 'ACHANNELS':2,
|
||||
'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
|
||||
'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'mkv':{
|
||||
'VEXTENSION':'.mkv','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
|
||||
'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8,
|
||||
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
|
||||
'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8,
|
||||
'SCODEC':'mov_text'
|
||||
},
|
||||
'mp4-scene-release':{
|
||||
'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':19,'VLEVEL':'3.1',
|
||||
'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
|
||||
'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8,
|
||||
'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
|
||||
'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8,
|
||||
'SCODEC':'mov_text'
|
||||
}
|
||||
'iPad': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2,
|
||||
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'iPad-1080p': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': '1920:1080',
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2,
|
||||
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'iPad-720p': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': '1280:720',
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2,
|
||||
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'Apple-TV': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': '1280:720',
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6,
|
||||
'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'iPod': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': '1280:720',
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
|
||||
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'iPhone': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': '460:320',
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
|
||||
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'PS3': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6,
|
||||
'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'xbox': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6,
|
||||
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'Roku-480p': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
|
||||
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'Roku-720p': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
|
||||
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'Roku-1080p': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
|
||||
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 160000, 'ACHANNELS': 2,
|
||||
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
|
||||
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'mkv': {
|
||||
'VEXTENSION': '.mkv', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': None, 'VLEVEL': None,
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
|
||||
'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8,
|
||||
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
|
||||
'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None,
|
||||
'ACHANNELS3': 8,
|
||||
'SCODEC': 'mov_text'
|
||||
},
|
||||
'mp4-scene-release': {
|
||||
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None,
|
||||
'VCRF': 19, 'VLEVEL': '3.1',
|
||||
'VRESOLUTION': None,
|
||||
'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
|
||||
'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8,
|
||||
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
|
||||
'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None,
|
||||
'ACHANNELS3': 8,
|
||||
'SCODEC': 'mov_text'
|
||||
}
|
||||
}
|
||||
if DEFAULTS and DEFAULTS in transcode_defaults:
|
||||
VEXTENSION = transcode_defaults[DEFAULTS]['VEXTENSION']
|
||||
VCODEC = transcode_defaults[DEFAULTS]['VCODEC']
|
||||
|
@ -629,25 +710,29 @@ def initialize(section=None):
|
|||
|
||||
if VEXTENSION in allow_subs:
|
||||
ALLOWSUBS = 1
|
||||
if not VCODEC_ALLOW and VCODEC: VCODEC_ALLOW.extend([VCODEC])
|
||||
if not VCODEC_ALLOW and VCODEC:
|
||||
VCODEC_ALLOW.extend([VCODEC])
|
||||
for codec in VCODEC_ALLOW:
|
||||
if codec in codec_alias:
|
||||
extra = [ item for item in codec_alias[codec] if item not in VCODEC_ALLOW ]
|
||||
extra = [item for item in codec_alias[codec] if item not in VCODEC_ALLOW]
|
||||
VCODEC_ALLOW.extend(extra)
|
||||
if not ACODEC_ALLOW and ACODEC: ACODEC_ALLOW.extend([ACODEC])
|
||||
if not ACODEC_ALLOW and ACODEC:
|
||||
ACODEC_ALLOW.extend([ACODEC])
|
||||
for codec in ACODEC_ALLOW:
|
||||
if codec in codec_alias:
|
||||
extra = [ item for item in codec_alias[codec] if item not in ACODEC_ALLOW ]
|
||||
extra = [item for item in codec_alias[codec] if item not in ACODEC_ALLOW]
|
||||
ACODEC_ALLOW.extend(extra)
|
||||
if not ACODEC2_ALLOW and ACODEC2: ACODEC2_ALLOW.extend([ACODEC2])
|
||||
if not ACODEC2_ALLOW and ACODEC2:
|
||||
ACODEC2_ALLOW.extend([ACODEC2])
|
||||
for codec in ACODEC2_ALLOW:
|
||||
if codec in codec_alias:
|
||||
extra = [ item for item in codec_alias[codec] if item not in ACODEC2_ALLOW ]
|
||||
extra = [item for item in codec_alias[codec] if item not in ACODEC2_ALLOW]
|
||||
ACODEC2_ALLOW.extend(extra)
|
||||
if not ACODEC3_ALLOW and ACODEC3: ACODEC3_ALLOW.extend([ACODEC3])
|
||||
if not ACODEC3_ALLOW and ACODEC3:
|
||||
ACODEC3_ALLOW.extend([ACODEC3])
|
||||
for codec in ACODEC3_ALLOW:
|
||||
if codec in codec_alias:
|
||||
extra = [ item for item in codec_alias[codec] if item not in ACODEC3_ALLOW ]
|
||||
extra = [item for item in codec_alias[codec] if item not in ACODEC3_ALLOW]
|
||||
ACODEC3_ALLOW.extend(extra)
|
||||
codec_alias = {} # clear memory
|
||||
|
||||
|
@ -673,47 +758,59 @@ def initialize(section=None):
|
|||
else:
|
||||
try:
|
||||
SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip()
|
||||
except: pass
|
||||
if not SEVENZIP:
|
||||
except:
|
||||
pass
|
||||
if not SEVENZIP:
|
||||
try:
|
||||
SEVENZIP = subprocess.Popen(['which', '7zr'], stdout=subprocess.PIPE).communicate()[0].strip()
|
||||
except: pass
|
||||
if not SEVENZIP:
|
||||
except:
|
||||
pass
|
||||
if not SEVENZIP:
|
||||
try:
|
||||
SEVENZIP = subprocess.Popen(['which', '7za'], stdout=subprocess.PIPE).communicate()[0].strip()
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
if not SEVENZIP:
|
||||
SEVENZIP = None
|
||||
logger.warning("Failed to locate 7zip. Transcosing of disk images and extraction of .7z files will not be possible!")
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'), os.X_OK):
|
||||
logger.warning(
|
||||
"Failed to locate 7zip. Transcosing of disk images and extraction of .7z files will not be possible!")
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'),
|
||||
os.X_OK):
|
||||
FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg')
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(os.path.join(FFMPEG_PATH, 'avconv'), os.X_OK):
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(os.path.join(FFMPEG_PATH, 'avconv'),
|
||||
os.X_OK):
|
||||
FFMPEG = os.path.join(FFMPEG_PATH, 'avconv')
|
||||
else:
|
||||
try:
|
||||
FFMPEG = subprocess.Popen(['which', 'ffmpeg'], stdout=subprocess.PIPE).communicate()[0].strip()
|
||||
except: pass
|
||||
if not FFMPEG:
|
||||
except:
|
||||
pass
|
||||
if not FFMPEG:
|
||||
try:
|
||||
FFMPEG = subprocess.Popen(['which', 'avconv'], stdout=subprocess.PIPE).communicate()[0].strip()
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
if not FFMPEG:
|
||||
FFMPEG = None
|
||||
logger.warning("Failed to locate ffmpeg. Transcoding disabled!")
|
||||
logger.warning("Install ffmpeg with x264 support to enable this feature ...")
|
||||
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'), os.X_OK):
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'),
|
||||
os.X_OK):
|
||||
FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe')
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(os.path.join(FFMPEG_PATH, 'avprobe'), os.X_OK):
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(os.path.join(FFMPEG_PATH, 'avprobe'),
|
||||
os.X_OK):
|
||||
FFPROBE = os.path.join(FFMPEG_PATH, 'avprobe')
|
||||
else:
|
||||
try:
|
||||
FFPROBE = subprocess.Popen(['which', 'ffprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
|
||||
except: pass
|
||||
if not FFPROBE:
|
||||
except:
|
||||
pass
|
||||
if not FFPROBE:
|
||||
try:
|
||||
FFPROBE = subprocess.Popen(['which', 'avprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
if not FFPROBE:
|
||||
FFPROBE = None
|
||||
if CHECK_MEDIA:
|
||||
|
@ -722,7 +819,7 @@ def initialize(section=None):
|
|||
|
||||
# check for script-defied section and if None set to allow sections
|
||||
SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)]
|
||||
for section,subsections in SECTIONS.items():
|
||||
for section, subsections in SECTIONS.items():
|
||||
CATEGORIES.extend([subsection for subsection in subsections if CFG[section][subsection].isenabled()])
|
||||
CATEGORIES = list(set(CATEGORIES))
|
||||
|
||||
|
@ -732,6 +829,7 @@ def initialize(section=None):
|
|||
# finished initalizing
|
||||
return True
|
||||
|
||||
|
||||
def restart():
|
||||
install_type = versionCheck.CheckVersion().install_type
|
||||
|
||||
|
@ -751,11 +849,12 @@ def restart():
|
|||
|
||||
os._exit(status)
|
||||
|
||||
|
||||
def rchmod(path, mod):
|
||||
logger.log("Changing file mode of %s to %s" % (path, oct(mod)))
|
||||
logger.log("Changing file mode of {0} to {1}".format(path, oct(mod)))
|
||||
os.chmod(path, mod)
|
||||
if not os.path.isdir(path):
|
||||
return # Skip files
|
||||
return # Skip files
|
||||
|
||||
for root, dirs, files in os.walk(path):
|
||||
for d in dirs:
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
# coding=utf-8
|
|
@ -169,11 +169,11 @@ class autoProcessMovie(object):
|
|||
if transcoder.isVideoGood(video, status):
|
||||
import_subs(video)
|
||||
good_files += 1
|
||||
if num_files > 0 and good_files == num_files:
|
||||
if num_files and good_files == num_files:
|
||||
if status:
|
||||
logger.info("Status shown as failed from Downloader, but {0} valid video files found. Setting as success.".format(good_files), section)
|
||||
status = 0
|
||||
elif num_files > 0 and good_files < num_files:
|
||||
elif num_files and good_files < num_files:
|
||||
logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section)
|
||||
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
|
||||
print('[NZB] MARK=BAD')
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
__all__ = ["mainDB"]
|
||||
# coding=utf-8
|
||||
__all__ = ["mainDB"]
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import core
|
||||
# coding=utf-8
|
||||
|
||||
from core import logger, nzbToMediaDB
|
||||
from core.nzbToMediaUtil import backupVersionedFile
|
||||
|
||||
MIN_DB_VERSION = 1 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 2
|
||||
|
||||
|
||||
def backupDatabase(version):
|
||||
logger.info("Backing up database before upgrade")
|
||||
if not backupVersionedFile(nzbToMediaDB.dbFilename(), version):
|
||||
|
@ -12,6 +14,7 @@ def backupDatabase(version):
|
|||
else:
|
||||
logger.info("Proceeding with upgrade")
|
||||
|
||||
|
||||
# ======================
|
||||
# = Main DB Migrations =
|
||||
# ======================
|
||||
|
@ -43,21 +46,21 @@ class InitialSchema(nzbToMediaDB.SchemaUpgrade):
|
|||
cur_db_version) + ") is too old to migrate from what this version of nzbToMedia supports (" + \
|
||||
str(MIN_DB_VERSION) + ").\n" + \
|
||||
"Please remove nzbtomedia.db file to begin fresh."
|
||||
)
|
||||
)
|
||||
|
||||
if cur_db_version > MAX_DB_VERSION:
|
||||
logger.log_error_and_exit("Your database version (" + str(
|
||||
cur_db_version) + ") has been incremented past what this version of nzbToMedia supports (" + \
|
||||
str(MAX_DB_VERSION) + ").\n" + \
|
||||
"If you have used other forks of nzbToMedia, your database may be unusable due to their modifications."
|
||||
)
|
||||
)
|
||||
if cur_db_version < MAX_DB_VERSION: # We need to upgrade.
|
||||
queries = [
|
||||
"CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));",
|
||||
"INSERT INTO downloads2 SELECT * FROM downloads;",
|
||||
"DROP TABLE IF EXISTS downloads;",
|
||||
"ALTER TABLE downloads2 RENAME TO downloads;",
|
||||
"ALTER TABLE downloads2 RENAME TO downloads;",
|
||||
"INSERT INTO db_version (db_version) VALUES (2);"
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
self.connection.action(query)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
# coding=utf-8
|
|
@ -1,3 +1,5 @@
|
|||
# coding=utf-8
|
||||
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
|
@ -7,6 +9,7 @@ import core
|
|||
from subprocess import call, Popen
|
||||
import subprocess
|
||||
|
||||
|
||||
def extract(filePath, outputDestination):
|
||||
success = 0
|
||||
# Using Windows
|
||||
|
@ -21,9 +24,9 @@ def extract(filePath, outputDestination):
|
|||
# Using unix
|
||||
else:
|
||||
required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"]
|
||||
## Possible future suport:
|
||||
# ## Possible future suport:
|
||||
# gunzip: gz (cmd will delete original archive)
|
||||
## the following do not extract to dest dir
|
||||
# ## the following do not extract to dest dir
|
||||
# ".xz": ["xz", "-d --keep"],
|
||||
# ".lzma": ["xz", "-d --format=lzma --keep"],
|
||||
# ".bz2": ["bzip2", "-d --keep"],
|
||||
|
@ -42,15 +45,18 @@ def extract(filePath, outputDestination):
|
|||
if not os.getenv('TR_TORRENT_DIR'):
|
||||
devnull = open(os.devnull, 'w')
|
||||
for cmd in required_cmds:
|
||||
if call(['which', cmd], stdout=devnull, stderr=devnull): #note, returns 0 if exists, or 1 if doesn't exist.
|
||||
if call(['which', cmd], stdout=devnull,
|
||||
stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist.
|
||||
if cmd == "7zr" and not call(["which", "7z"]): # we do have "7z" command
|
||||
EXTRACT_COMMANDS[".7z"] = ["7z", "x"]
|
||||
elif cmd == "7zr" and not call(["which", "7za"]): # we do have "7za" command
|
||||
EXTRACT_COMMANDS[".7z"] = ["7za", "x"]
|
||||
else:
|
||||
else:
|
||||
for k, v in EXTRACT_COMMANDS.items():
|
||||
if cmd in v[0]:
|
||||
core.logger.error("EXTRACTOR: %s not found, disabling support for %s" % (cmd, k))
|
||||
core.logger.error("EXTRACTOR: {cmd} not found, "
|
||||
"disabling support for {feature}".format
|
||||
(cmd=cmd, feature=k))
|
||||
del EXTRACT_COMMANDS[k]
|
||||
devnull.close()
|
||||
else:
|
||||
|
@ -73,10 +79,11 @@ def extract(filePath, outputDestination):
|
|||
if ext[1] in EXTRACT_COMMANDS:
|
||||
cmd = EXTRACT_COMMANDS[ext[1]]
|
||||
else:
|
||||
core.logger.debug("EXTRACTOR: Unknown file type: %s" % ext[1])
|
||||
core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format
|
||||
(ext=ext[1]))
|
||||
return False
|
||||
|
||||
# Create outputDestination folder
|
||||
# Create outputDestination folder
|
||||
core.makeDir(outputDestination)
|
||||
|
||||
if core.PASSWORDSFILE != "" and os.path.isfile(os.path.normpath(core.PASSWORDSFILE)):
|
||||
|
@ -84,8 +91,10 @@ def extract(filePath, outputDestination):
|
|||
else:
|
||||
passwords = []
|
||||
|
||||
core.logger.info("Extracting %s to %s" % (filePath, outputDestination))
|
||||
core.logger.debug("Extracting %s %s %s" % (cmd, filePath, outputDestination))
|
||||
core.logger.info("Extracting {file} to {destination}".format
|
||||
(file=filePath, destination=outputDestination))
|
||||
core.logger.debug("Extracting {cmd} {file} {destination}".format
|
||||
(cmd=cmd, file=filePath, destination=outputDestination))
|
||||
|
||||
origFiles = []
|
||||
origDirs = []
|
||||
|
@ -98,7 +107,7 @@ def extract(filePath, outputDestination):
|
|||
pwd = os.getcwd() # Get our Present Working Directory
|
||||
os.chdir(outputDestination) # Not all unpack commands accept full paths, so just extract into this directory
|
||||
devnull = open(os.devnull, 'w')
|
||||
|
||||
|
||||
try: # now works same for nt and *nix
|
||||
info = None
|
||||
cmd.append(filePath) # add filePath to final cmd arg.
|
||||
|
@ -112,7 +121,8 @@ def extract(filePath, outputDestination):
|
|||
p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine.
|
||||
res = p.wait()
|
||||
if (res >= 0 and os.name == 'nt') or res == 0: # for windows chp returns process id if successful or -1*Error code. Linux returns 0 for successful.
|
||||
core.logger.info("EXTRACTOR: Extraction was successful for %s to %s" % (filePath, outputDestination))
|
||||
core.logger.info("EXTRACTOR: Extraction was successful for {file} to {destination}".format
|
||||
(file=filePath, destination=outputDestination))
|
||||
success = 1
|
||||
elif len(passwords) > 0:
|
||||
core.logger.info("EXTRACTOR: Attempting to extract with passwords")
|
||||
|
@ -120,20 +130,23 @@ def extract(filePath, outputDestination):
|
|||
if password == "": # if edited in windows or otherwise if blank lines.
|
||||
continue
|
||||
cmd2 = cmd
|
||||
#append password here.
|
||||
# append password here.
|
||||
passcmd = "-p" + password
|
||||
cmd2.append(passcmd)
|
||||
p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine.
|
||||
res = p.wait()
|
||||
if (res >= 0 and platform == 'Windows') or res == 0:
|
||||
core.logger.info("EXTRACTOR: Extraction was successful for %s to %s using password: %s" % (
|
||||
filePath, outputDestination, password))
|
||||
core.logger.info("EXTRACTOR: Extraction was successful "
|
||||
"for {file} to {destination} using password: {pwd}".format
|
||||
(file=filePath, destination=outputDestination, pwd=password))
|
||||
success = 1
|
||||
break
|
||||
else:
|
||||
continue
|
||||
except:
|
||||
core.logger.error("EXTRACTOR: Extraction failed for %s. Could not call command %s" % (filePath, cmd))
|
||||
core.logger.error("EXTRACTOR: Extraction failed for {file}. "
|
||||
"Could not call command {cmd}".format
|
||||
(file=filePath, cmd=cmd))
|
||||
os.chdir(pwd)
|
||||
return False
|
||||
|
||||
|
@ -141,20 +154,24 @@ def extract(filePath, outputDestination):
|
|||
os.chdir(pwd) # Go back to our Original Working Directory
|
||||
if success:
|
||||
# sleep to let files finish writing to disk
|
||||
sleep (3)
|
||||
sleep(3)
|
||||
perms = stat.S_IMODE(os.lstat(os.path.split(filePath)[0]).st_mode)
|
||||
for dir, subdirs, files in os.walk(outputDestination):
|
||||
for subdir in subdirs:
|
||||
if not os.path.join(dir, subdir) in origFiles:
|
||||
try:
|
||||
os.chmod(os.path.join(dir, subdir), perms)
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
for file in files:
|
||||
if not os.path.join(dir, file) in origFiles:
|
||||
try:
|
||||
shutil.copymode(filePath, os.path.join(dir, file))
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
return True
|
||||
else:
|
||||
core.logger.error("EXTRACTOR: Extraction failed for %s. Result was %s" % (filePath, res))
|
||||
core.logger.error("EXTRACTOR: Extraction failed for {file}. "
|
||||
"Result was {result}".format
|
||||
(file=filePath, result=res))
|
||||
return False
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import json
|
||||
# coding=utf-8
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class GitHub(object):
|
||||
"""
|
||||
Simple api wrapper for the Github API v3.
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
# coding=utf-8
|
|
@ -1,3 +1,4 @@
|
|||
# coding=utf-8
|
||||
# Linktastic Module
|
||||
# - A python2/3 compatible module that can create hardlinks/symlinks on windows-based systems
|
||||
#
|
||||
|
@ -29,61 +30,65 @@ if os.name == 'nt':
|
|||
info = subprocess.STARTUPINFO()
|
||||
info.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
|
||||
|
||||
# Prevent spaces from messing with us!
|
||||
def _escape_param(param):
|
||||
return '"%s"' % param
|
||||
return '"{0}"'.format(param)
|
||||
|
||||
|
||||
# Private function to create link on nt-based systems
|
||||
def _link_windows(src, dest):
|
||||
try:
|
||||
subprocess.check_output(
|
||||
'cmd /C mklink /H %s %s' % (_escape_param(dest), _escape_param(src)),
|
||||
'cmd /C mklink /H {0} {1}'.format(_escape_param(dest), _escape_param(src)),
|
||||
stderr=subprocess.STDOUT, startupinfo=info)
|
||||
except CalledProcessError as err:
|
||||
|
||||
raise IOError(err.output.decode('utf-8'))
|
||||
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
|
||||
|
||||
def _symlink_windows(src, dest):
|
||||
try:
|
||||
subprocess.check_output(
|
||||
'cmd /C mklink %s %s' % (_escape_param(dest), _escape_param(src)),
|
||||
'cmd /C mklink {0} {1}'.format(_escape_param(dest), _escape_param(src)),
|
||||
stderr=subprocess.STDOUT, startupinfo=info)
|
||||
except CalledProcessError as err:
|
||||
raise IOError(err.output.decode('utf-8'))
|
||||
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
|
||||
|
||||
def _dirlink_windows(src, dest):
|
||||
try:
|
||||
subprocess.check_output(
|
||||
'cmd /C mklink /J %s %s' % (_escape_param(dest), _escape_param(src)),
|
||||
'cmd /C mklink /J {0} {1}'.format(_escape_param(dest), _escape_param(src)),
|
||||
stderr=subprocess.STDOUT, startupinfo=info)
|
||||
except CalledProcessError as err:
|
||||
raise IOError(err.output.decode('utf-8'))
|
||||
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
|
||||
|
||||
def _junctionlink_windows(src, dest):
|
||||
try:
|
||||
subprocess.check_output(
|
||||
'cmd /C mklink /D %s %s' % (_escape_param(dest), _escape_param(src)),
|
||||
'cmd /C mklink /D {0} {1}'.format(_escape_param(dest), _escape_param(src)),
|
||||
stderr=subprocess.STDOUT, startupinfo=info)
|
||||
except CalledProcessError as err:
|
||||
raise IOError(err.output.decode('utf-8'))
|
||||
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
# TODO, find out what kind of messages Windows sends us from mklink
|
||||
# print(stdout)
|
||||
# assume if they ret-coded 0 we're good
|
||||
|
||||
|
||||
# Create a hard link to src named as dest
|
||||
# This version of link, unlike os.link, supports nt systems as well
|
||||
|
@ -101,6 +106,7 @@ def symlink(src, dest):
|
|||
else:
|
||||
os.symlink(src, dest)
|
||||
|
||||
|
||||
# Create a symlink to src named as dest, but don't fail if you're on nt
|
||||
def dirlink(src, dest):
|
||||
if os.name == 'nt':
|
||||
|
@ -108,9 +114,10 @@ def dirlink(src, dest):
|
|||
else:
|
||||
os.symlink(src, dest)
|
||||
|
||||
|
||||
# Create a symlink to src named as dest, but don't fail if you're on nt
|
||||
def junctionlink(src, dest):
|
||||
if os.name == 'nt':
|
||||
_junctionlink_windows(src, dest)
|
||||
else:
|
||||
os.symlink(src, dest)
|
||||
os.symlink(src, dest)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# coding=utf-8
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
|
@ -26,6 +27,7 @@ reverseNames = {u'ERROR': ERROR,
|
|||
u'POSTPROCESS': POSTPROCESS,
|
||||
u'DB': DB}
|
||||
|
||||
|
||||
class NTMRotatingLogHandler(object):
|
||||
def __init__(self, log_file, num_files, num_bytes):
|
||||
self.num_files = num_files
|
||||
|
@ -67,7 +69,7 @@ class NTMRotatingLogHandler(object):
|
|||
if self.cur_handler:
|
||||
old_handler = self.cur_handler
|
||||
else:
|
||||
#Add a new logging levels
|
||||
# Add a new logging levels
|
||||
logging.addLevelName(21, 'POSTPROCESS')
|
||||
logging.addLevelName(5, 'DB')
|
||||
|
||||
|
@ -84,7 +86,7 @@ class NTMRotatingLogHandler(object):
|
|||
{'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'db': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S')
|
||||
},
|
||||
},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
# add the handler to the root logger
|
||||
|
@ -121,7 +123,7 @@ class NTMRotatingLogHandler(object):
|
|||
{'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'db': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
},
|
||||
},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
return file_handler
|
||||
|
@ -191,9 +193,9 @@ class NTMRotatingLogHandler(object):
|
|||
self.writes_since_check += 1
|
||||
|
||||
try:
|
||||
message = u"%s: %s" % (str(section).upper(), toLog)
|
||||
message = u"{0}: {1}".format(str(section).upper(), toLog)
|
||||
except:
|
||||
message = u"%s: Message contains non-utf-8 string" % (str(section).upper())
|
||||
message = u"{0}: Message contains non-utf-8 string".format(str(section).upper())
|
||||
|
||||
out_line = message
|
||||
|
||||
|
@ -226,14 +228,15 @@ class NTMRotatingLogHandler(object):
|
|||
def log_error_and_exit(self, error_msg):
|
||||
log(error_msg, ERROR)
|
||||
|
||||
if os.environ.has_key('NZBOP_SCRIPTDIR'):
|
||||
if 'NZBOP_SCRIPTDIR' in os.environ:
|
||||
sys.exit(core.NZBGET_POSTPROCESS_ERROR)
|
||||
elif not self.console_logging:
|
||||
sys.exit(error_msg.encode(core.SYS_ENCODING, 'xmlcharrefreplace'))
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
class DispatchingFormatter:
|
||||
|
||||
class DispatchingFormatter(object):
|
||||
def __init__(self, formatters, default_formatter):
|
||||
self._formatters = formatters
|
||||
self._default_formatter = default_formatter
|
||||
|
@ -242,31 +245,41 @@ class DispatchingFormatter:
|
|||
formatter = self._formatters.get(record.name, self._default_formatter)
|
||||
return formatter.format(record)
|
||||
|
||||
|
||||
ntm_log_instance = NTMRotatingLogHandler(core.LOG_FILE, NUM_LOGS, LOG_SIZE)
|
||||
|
||||
|
||||
def log(toLog, logLevel=MESSAGE, section='MAIN'):
|
||||
ntm_log_instance.log(toLog, logLevel, section)
|
||||
|
||||
|
||||
def info(toLog, section='MAIN'):
|
||||
log(toLog, MESSAGE, section)
|
||||
|
||||
|
||||
def error(toLog, section='MAIN'):
|
||||
log(toLog, ERROR, section)
|
||||
|
||||
|
||||
def warning(toLog, section='MAIN'):
|
||||
log(toLog, WARNING, section)
|
||||
|
||||
|
||||
def debug(toLog, section='MAIN'):
|
||||
log(toLog, DEBUG, section)
|
||||
|
||||
|
||||
def postprocess(toLog, section='POSTPROCESS'):
|
||||
log(toLog, POSTPROCESS, section)
|
||||
|
||||
|
||||
def db(toLog, section='DB'):
|
||||
log(toLog, DB, section)
|
||||
|
||||
|
||||
def log_error_and_exit(error_msg):
|
||||
ntm_log_instance.log_error_and_exit(error_msg)
|
||||
|
||||
|
||||
def close():
|
||||
ntm_log_instance.close_log()
|
||||
|
|
|
@ -1,103 +1,93 @@
|
|||
import urllib
|
||||
import core
|
||||
# coding=utf-8
|
||||
|
||||
import requests
|
||||
|
||||
from six import iteritems
|
||||
|
||||
import core
|
||||
from core import logger
|
||||
|
||||
|
||||
def autoFork(section, inputCategory):
|
||||
# auto-detect correct section
|
||||
# config settings
|
||||
try:
|
||||
host = core.CFG[section][inputCategory]["host"]
|
||||
port = core.CFG[section][inputCategory]["port"]
|
||||
except:
|
||||
host = None
|
||||
port = None
|
||||
|
||||
try:
|
||||
username = core.CFG[section][inputCategory]["username"]
|
||||
password = core.CFG[section][inputCategory]["password"]
|
||||
except:
|
||||
username = None
|
||||
password = None
|
||||
cfg = core.CFG[section][inputCategory]
|
||||
|
||||
try:
|
||||
apikey = core.CFG[section][inputCategory]["apikey"]
|
||||
except:
|
||||
apikey = None
|
||||
|
||||
try:
|
||||
ssl = int(core.CFG[section][inputCategory]["ssl"])
|
||||
except:
|
||||
ssl = 0
|
||||
|
||||
try:
|
||||
web_root = core.CFG[section][inputCategory]["web_root"]
|
||||
except:
|
||||
web_root = ""
|
||||
|
||||
try:
|
||||
fork = core.FORKS.items()[core.FORKS.keys().index(core.CFG[section][inputCategory]["fork"])]
|
||||
except:
|
||||
fork = "auto"
|
||||
|
||||
if ssl:
|
||||
protocol = "https://"
|
||||
else:
|
||||
protocol = "http://"
|
||||
host = cfg.get("host")
|
||||
port = cfg.get("port")
|
||||
username = cfg.get("username")
|
||||
password = cfg.get("password")
|
||||
apikey = cfg.get("apikey")
|
||||
ssl = int(cfg.get("ssl", 0))
|
||||
web_root = cfg.get("web_root", "")
|
||||
fork = core.FORKS.items()[core.FORKS.keys().index(cfg.get("fork", "auto"))]
|
||||
protocol = "https://" if ssl else "http://"
|
||||
|
||||
detected = False
|
||||
if section == "NzbDrone":
|
||||
logger.info("Attempting to verify %s fork" % inputCategory)
|
||||
url = "%s%s:%s%s/api/rootfolder" % (protocol,host,port,web_root)
|
||||
headers={"X-Api-Key": apikey}
|
||||
logger.info("Attempting to verify {category} fork".format
|
||||
(category=inputCategory))
|
||||
url = "{protocol}{host}:{port}{root}/api/rootfolder".format(
|
||||
protocol=protocol, host=host, port=port, root=web_root)
|
||||
headers = {"X-Api-Key": apikey}
|
||||
try:
|
||||
r = requests.get(url, headers=headers, stream=True, verify=False)
|
||||
except requests.ConnectionError:
|
||||
logger.warning("Could not connect to %s:%s to verify fork!" % (section, inputCategory))
|
||||
|
||||
logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, inputCategory))
|
||||
|
||||
if not r.ok:
|
||||
logger.warning("Connection to %s:%s failed! Check your configuration" % (section, inputCategory))
|
||||
logger.warning("Connection to {section}:{category} failed! "
|
||||
"Check your configuration".format
|
||||
(section=section, category=inputCategory))
|
||||
|
||||
fork = ['default', {}]
|
||||
|
||||
elif fork == "auto":
|
||||
params = core.ALL_FORKS
|
||||
rem_params = []
|
||||
logger.info("Attempting to auto-detect %s fork" % inputCategory)
|
||||
logger.info("Attempting to auto-detect {category} fork".format(category=inputCategory))
|
||||
# define the order to test. Default must be first since the default fork doesn't reject parameters.
|
||||
# then in order of most unique parameters.
|
||||
url = "%s%s:%s%s/home/postprocess/" % (protocol,host,port,web_root)
|
||||
url = "{protocol}{host}:{port}{root}/home/postprocess/".format(
|
||||
protocol=protocol, host=host, port=port, root=web_root)
|
||||
# attempting to auto-detect fork
|
||||
try:
|
||||
if username and password:
|
||||
s = requests.Session()
|
||||
login = "%s%s:%s%s/login" % (protocol,host,port,web_root)
|
||||
login = "{protocol}{host}:{port}{root}/login".format(
|
||||
protocol=protocol, host=host, port=port, root=web_root)
|
||||
login_params = {'username': username, 'password': password}
|
||||
s.post(login, data=login_params, stream=True, verify=False)
|
||||
r = s.get(url, auth=(username, password), verify=False)
|
||||
else:
|
||||
r = requests.get(url, verify=False)
|
||||
except requests.ConnectionError:
|
||||
logger.info("Could not connect to %s:%s to perform auto-fork detection!" % (section, inputCategory))
|
||||
logger.info("Could not connect to {section}:{category} to perform auto-fork detection!".format
|
||||
(section=section, category=inputCategory))
|
||||
r = []
|
||||
if r and r.ok:
|
||||
for param in params:
|
||||
if not 'name="%s"' %(param) in r.text:
|
||||
if not 'name={param!r}'.format(param=param) in r.text:
|
||||
rem_params.append(param)
|
||||
for param in rem_params:
|
||||
params.pop(param)
|
||||
for fork in sorted(core.FORKS.iteritems(), reverse=False):
|
||||
params.pop(param)
|
||||
for fork in sorted(iteritems(core.FORKS), reverse=False):
|
||||
if params == fork[1]:
|
||||
detected = True
|
||||
break
|
||||
if detected:
|
||||
logger.info("%s:%s fork auto-detection successful ..." % (section, inputCategory))
|
||||
logger.info("{section}:{category} fork auto-detection successful ...".format
|
||||
(section=section, category=inputCategory))
|
||||
elif rem_params:
|
||||
logger.info("%s:%s fork auto-detection found custom params %s" % (section, inputCategory, params))
|
||||
logger.info("{section}:{category} fork auto-detection found custom params {params}".format
|
||||
(section=section, category=inputCategory, params=params))
|
||||
fork = ['custom', params]
|
||||
else:
|
||||
logger.info("%s:%s fork auto-detection failed" % (section, inputCategory))
|
||||
logger.info("{section}:{category} fork auto-detection failed".format
|
||||
(section=section, category=inputCategory))
|
||||
fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)]
|
||||
|
||||
logger.info("%s:%s fork set to %s" % (section, inputCategory, fork[0]))
|
||||
return fork[0], fork[1]
|
||||
logger.info("{section}:{category} fork set to {fork}".format
|
||||
(section=section, category=inputCategory, fork=fork[0]))
|
||||
return fork[0], fork[1]
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
from six import iteritems
|
||||
import os
|
||||
import shutil
|
||||
import copy
|
||||
|
@ -7,13 +10,15 @@ from core import logger
|
|||
|
||||
from itertools import chain
|
||||
|
||||
class Section(configobj.Section):
|
||||
|
||||
class Section(configobj.Section, object):
|
||||
def isenabled(section):
|
||||
# checks if subsection enabled, returns true/false if subsection specified otherwise returns true/false in {}
|
||||
if not section.sections:
|
||||
try:
|
||||
value = list(ConfigObj.find_key(section, 'enabled'))[0]
|
||||
except:value = 0
|
||||
except:
|
||||
value = 0
|
||||
if int(value) == 1:
|
||||
return section
|
||||
else:
|
||||
|
@ -22,7 +27,8 @@ class Section(configobj.Section):
|
|||
for subsection in subsections:
|
||||
try:
|
||||
value = list(ConfigObj.find_key(subsections, 'enabled'))[0]
|
||||
except:value = 0
|
||||
except:
|
||||
value = 0
|
||||
|
||||
if int(value) != 1:
|
||||
del to_return[section_name][subsection]
|
||||
|
@ -38,7 +44,8 @@ class Section(configobj.Section):
|
|||
for subsection in to_return:
|
||||
try:
|
||||
value = list(ConfigObj.find_key(to_return[subsection], key))[0]
|
||||
except:value = None
|
||||
except:
|
||||
value = None
|
||||
|
||||
if not value:
|
||||
del to_return[subsection]
|
||||
|
@ -79,6 +86,7 @@ class Section(configobj.Section):
|
|||
|
||||
return to_return
|
||||
|
||||
|
||||
class ConfigObj(configobj.ConfigObj, Section):
|
||||
def __init__(self, *args, **kw):
|
||||
if len(args) == 0:
|
||||
|
@ -110,16 +118,16 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if not os.path.isfile(core.CONFIG_FILE):
|
||||
shutil.copyfile(core.CONFIG_SPEC_FILE, core.CONFIG_FILE)
|
||||
CFG_OLD = config(core.CONFIG_FILE)
|
||||
except Exception, e:
|
||||
logger.debug("Error %s when copying to .cfg" % (e))
|
||||
except Exception as error:
|
||||
logger.debug("Error {msg} when copying to .cfg".format(msg=error))
|
||||
|
||||
try:
|
||||
# check for autoProcessMedia.cfg.spec and create if it does not exist
|
||||
if not os.path.isfile(core.CONFIG_SPEC_FILE):
|
||||
shutil.copyfile(core.CONFIG_FILE, core.CONFIG_SPEC_FILE)
|
||||
CFG_NEW = config(core.CONFIG_SPEC_FILE)
|
||||
except Exception, e:
|
||||
logger.debug("Error %s when copying to .spec" % (e))
|
||||
except Exception as error:
|
||||
logger.debug("Error {msg} when copying to .spec".format(msg=error))
|
||||
|
||||
# check for autoProcessMedia.cfg and autoProcessMedia.cfg.spec and if they don't exist return and fail
|
||||
if CFG_NEW is None or CFG_OLD is None:
|
||||
|
@ -144,7 +152,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
continue
|
||||
|
||||
def cleanup_values(values, section):
|
||||
for option, value in values.iteritems():
|
||||
for option, value in iteritems(values):
|
||||
if section in ['CouchPotato']:
|
||||
if option == ['outputDirectory']:
|
||||
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
|
||||
|
@ -180,7 +188,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
CFG_NEW['Posix'][option] = value
|
||||
values.pop(option)
|
||||
if option == "remote_path":
|
||||
if value and not value in ['0', '1', 0, 1]:
|
||||
if value and value not in ['0', '1', 0, 1]:
|
||||
value = 1
|
||||
elif not value:
|
||||
value = 0
|
||||
|
@ -189,7 +197,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if not list(ConfigObj.find_key(CFG_NEW, option)):
|
||||
try:
|
||||
values.pop(option)
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
|
||||
return values
|
||||
|
||||
|
@ -220,7 +229,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
subsection = None
|
||||
if section in list(chain.from_iterable(subsections.values())):
|
||||
subsection = section
|
||||
section = ''.join([k for k,v in subsections.iteritems() if subsection in v])
|
||||
section = ''.join([k for k, v in iteritems(subsections) if subsection in v])
|
||||
process_section(section, subsection)
|
||||
elif section in subsections.keys():
|
||||
subsection = subsections[section]
|
||||
|
@ -244,13 +253,15 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
CFG_NEW = config()
|
||||
|
||||
try:
|
||||
if os.environ.has_key('NZBPO_NDCATEGORY') and os.environ.has_key('NZBPO_SBCATEGORY'):
|
||||
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']:
|
||||
logger.warning("%s category is set for SickBeard and NzbDrone. Please check your config in NZBGet" % (os.environ['NZBPO_NDCATEGORY']))
|
||||
logger.warning("{x} category is set for SickBeard and NzbDrone. "
|
||||
"Please check your config in NZBGet".format
|
||||
(x=os.environ['NZBPO_NDCATEGORY']))
|
||||
|
||||
section = "Nzb"
|
||||
key = 'NZBOP_DESTDIR'
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = 'default_downloadDirectory'
|
||||
value = os.environ[key]
|
||||
CFG_NEW[section][option] = value
|
||||
|
@ -260,7 +271,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfgKeys = ['auto_update', 'check_media', 'safe_mode']
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
CFG_NEW[section][option] = value
|
||||
|
@ -270,19 +281,21 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfgKeys = ['mount_points']
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
CFG_NEW[section][option] = value
|
||||
CFG_NEW[section][option] = value
|
||||
|
||||
section = "CouchPotato"
|
||||
envCatKey = 'NZBPO_CPSCATEGORY'
|
||||
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', 'WAIT_FOR', 'WATCH_DIR']
|
||||
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path', 'wait_for', 'watch_dir']
|
||||
if os.environ.has_key(envCatKey):
|
||||
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'WAIT_FOR', 'WATCH_DIR']
|
||||
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
|
||||
'wait_for', 'watch_dir']
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_CPS' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
|
@ -292,12 +305,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = "SickBeard"
|
||||
envCatKey = 'NZBPO_SBCATEGORY'
|
||||
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
|
||||
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
|
||||
if os.environ.has_key(envCatKey):
|
||||
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
|
||||
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
|
||||
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork',
|
||||
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_SB' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
|
@ -311,10 +326,10 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
envCatKey = 'NZBPO_HPCATEGORY'
|
||||
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH']
|
||||
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path']
|
||||
if os.environ.has_key(envCatKey):
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_HP' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
|
@ -324,12 +339,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = "Mylar"
|
||||
envCatKey = 'NZBPO_MYCATEGORY'
|
||||
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'REMOTE_PATH']
|
||||
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir', 'remote_path']
|
||||
if os.environ.has_key(envCatKey):
|
||||
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
|
||||
'REMOTE_PATH']
|
||||
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
|
||||
'remote_path']
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_MY' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
|
@ -341,10 +358,10 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
envCatKey = 'NZBPO_GZCATEGORY'
|
||||
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
|
||||
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
|
||||
if os.environ.has_key(envCatKey):
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_GZ' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
|
@ -354,12 +371,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = "NzbDrone"
|
||||
envCatKey = 'NZBPO_NDCATEGORY'
|
||||
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
|
||||
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
|
||||
if os.environ.has_key(envCatKey):
|
||||
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
|
||||
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_ND' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
|
@ -374,7 +393,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
CFG_NEW[section][option] = value
|
||||
|
@ -384,25 +403,35 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata']
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
CFG_NEW[section][option] = value
|
||||
|
||||
section = "Transcoder"
|
||||
envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH', 'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
|
||||
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR', 'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
|
||||
'OUTPUTVIDEOPRESET', 'OUTPUTVIDEOFRAMERATE', 'OUTPUTVIDEOBITRATE', 'OUTPUTAUDIOCODEC', 'AUDIOCODECALLOW', 'OUTPUTAUDIOBITRATE', 'OUTPUTQUALITYPERCENT', 'GETSUBS',
|
||||
'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE', 'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
|
||||
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS', 'OUTPUTAUDIOOTHERCHANNELS']
|
||||
cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath', 'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
|
||||
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir', 'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
|
||||
'outputVideoPreset', 'outputVideoFramerate', 'outputVideoBitrate', 'outputAudioCodec', 'AudioCodecAllow', 'outputAudioBitrate', 'outputQualityPercent', 'getSubs',
|
||||
'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate', 'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
|
||||
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels', 'outputAudioOtherChannels']
|
||||
envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
|
||||
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
|
||||
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
|
||||
'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
|
||||
'OUTPUTVIDEOPRESET', 'OUTPUTVIDEOFRAMERATE', 'OUTPUTVIDEOBITRATE', 'OUTPUTAUDIOCODEC',
|
||||
'AUDIOCODECALLOW', 'OUTPUTAUDIOBITRATE', 'OUTPUTQUALITYPERCENT', 'GETSUBS',
|
||||
'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE',
|
||||
'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
|
||||
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS',
|
||||
'OUTPUTAUDIOOTHERCHANNELS']
|
||||
cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
|
||||
'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
|
||||
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir',
|
||||
'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
|
||||
'outputVideoPreset', 'outputVideoFramerate', 'outputVideoBitrate', 'outputAudioCodec',
|
||||
'AudioCodecAllow', 'outputAudioBitrate', 'outputQualityPercent', 'getSubs',
|
||||
'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate',
|
||||
'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
|
||||
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
|
||||
'outputAudioOtherChannels']
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
CFG_NEW[section][option] = value
|
||||
|
@ -412,19 +441,21 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfgKeys = ['wake', 'host', 'port', 'mac']
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_WOL' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
CFG_NEW[section][option] = value
|
||||
|
||||
section = "UserScript"
|
||||
envCatKey = 'NZBPO_USCATEGORY'
|
||||
envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE', 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
|
||||
cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce', 'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
|
||||
if os.environ.has_key(envCatKey):
|
||||
envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
|
||||
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
|
||||
cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
|
||||
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_' + envKeys[index]
|
||||
if os.environ.has_key(key):
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
|
@ -432,18 +463,19 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
CFG_NEW[section][os.environ[envCatKey]][option] = value
|
||||
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
|
||||
|
||||
except Exception, e:
|
||||
logger.debug("Error %s when applying NZBGet config" % (e))
|
||||
except Exception as error:
|
||||
logger.debug("Error {msg} when applying NZBGet config".format(msg=error))
|
||||
|
||||
try:
|
||||
# write our new config to autoProcessMedia.cfg
|
||||
CFG_NEW.filename = core.CONFIG_FILE
|
||||
CFG_NEW.write()
|
||||
except Exception, e:
|
||||
logger.debug("Error %s when writing changes to .cfg" % (e))
|
||||
except Exception as error:
|
||||
logger.debug("Error {msg} when writing changes to .cfg".format(msg=error))
|
||||
|
||||
return CFG_NEW
|
||||
|
||||
|
||||
configobj.Section = Section
|
||||
configobj.ConfigObj = ConfigObj
|
||||
config = ConfigObj
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from __future__ import with_statement
|
||||
# coding=utf-8
|
||||
|
||||
from __future__ import print_function, with_statement
|
||||
|
||||
import re
|
||||
import sqlite3
|
||||
|
@ -7,6 +9,7 @@ import time
|
|||
import core
|
||||
from core import logger
|
||||
|
||||
|
||||
def dbFilename(filename="nzbtomedia.db", suffix=None):
|
||||
"""
|
||||
@param filename: The sqlite database filename to use. If not specified,
|
||||
|
@ -16,11 +19,11 @@ def dbFilename(filename="nzbtomedia.db", suffix=None):
|
|||
@return: the correct location of the database file.
|
||||
"""
|
||||
if suffix:
|
||||
filename = "%s.%s" % (filename, suffix)
|
||||
filename = "{0}.{1}".format(filename, suffix)
|
||||
return core.os.path.join(core.PROGRAM_DIR, filename)
|
||||
|
||||
|
||||
class DBConnection:
|
||||
class DBConnection(object):
|
||||
def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None):
|
||||
|
||||
self.filename = filename
|
||||
|
@ -34,7 +37,7 @@ class DBConnection:
|
|||
result = None
|
||||
try:
|
||||
result = self.select("SELECT db_version FROM db_version")
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
if "no such table: db_version" in e.args[0]:
|
||||
return 0
|
||||
|
||||
|
@ -44,7 +47,7 @@ class DBConnection:
|
|||
return 0
|
||||
|
||||
def fetch(self, query, args=None):
|
||||
if query == None:
|
||||
if query is None:
|
||||
return
|
||||
|
||||
sqlResult = None
|
||||
|
@ -52,7 +55,7 @@ class DBConnection:
|
|||
|
||||
while attempt < 5:
|
||||
try:
|
||||
if args == None:
|
||||
if args is None:
|
||||
logger.log(self.filename + ": " + query, logger.DB)
|
||||
cursor = self.connection.cursor()
|
||||
cursor.execute(query)
|
||||
|
@ -65,7 +68,7 @@ class DBConnection:
|
|||
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + str(e), logger.WARNING)
|
||||
attempt += 1
|
||||
|
@ -73,14 +76,14 @@ class DBConnection:
|
|||
else:
|
||||
logger.log(u"DB error: " + str(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
except sqlite3.DatabaseError as e:
|
||||
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
return sqlResult
|
||||
|
||||
def mass_action(self, querylist, logTransaction=False):
|
||||
if querylist == None:
|
||||
if querylist is None:
|
||||
return
|
||||
|
||||
sqlResult = []
|
||||
|
@ -100,7 +103,7 @@ class DBConnection:
|
|||
self.connection.commit()
|
||||
logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG)
|
||||
return sqlResult
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
sqlResult = []
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
|
@ -111,8 +114,7 @@ class DBConnection:
|
|||
else:
|
||||
logger.log(u"DB error: " + str(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
sqlResult = []
|
||||
except sqlite3.DatabaseError as e:
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR)
|
||||
|
@ -121,7 +123,7 @@ class DBConnection:
|
|||
return sqlResult
|
||||
|
||||
def action(self, query, args=None):
|
||||
if query == None:
|
||||
if query is None:
|
||||
return
|
||||
|
||||
sqlResult = None
|
||||
|
@ -129,7 +131,7 @@ class DBConnection:
|
|||
|
||||
while attempt < 5:
|
||||
try:
|
||||
if args == None:
|
||||
if args is None:
|
||||
logger.log(self.filename + ": " + query, logger.DB)
|
||||
sqlResult = self.connection.execute(query)
|
||||
else:
|
||||
|
@ -138,7 +140,7 @@ class DBConnection:
|
|||
self.connection.commit()
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
except sqlite3.OperationalError as e:
|
||||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + str(e), logger.WARNING)
|
||||
attempt += 1
|
||||
|
@ -146,18 +148,17 @@ class DBConnection:
|
|||
else:
|
||||
logger.log(u"DB error: " + str(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
except sqlite3.DatabaseError as e:
|
||||
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
return sqlResult
|
||||
|
||||
|
||||
def select(self, query, args=None):
|
||||
|
||||
sqlResults = self.action(query, args).fetchall()
|
||||
|
||||
if sqlResults == None:
|
||||
if sqlResults is None:
|
||||
return []
|
||||
|
||||
return sqlResults
|
||||
|
@ -180,7 +181,7 @@ class DBConnection:
|
|||
|
||||
def tableInfo(self, tableName):
|
||||
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
|
||||
cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName)
|
||||
cursor = self.connection.execute("PRAGMA table_info({0})".format(tableName))
|
||||
columns = {}
|
||||
for column in cursor:
|
||||
columns[column['name']] = {'type': column['type']}
|
||||
|
@ -226,8 +227,8 @@ def _processUpgrade(connection, upgradeClass):
|
|||
logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.MESSAGE)
|
||||
try:
|
||||
instance.execute()
|
||||
except sqlite3.DatabaseError, e:
|
||||
print "Error in " + str(upgradeClass.__name__) + ": " + str(e)
|
||||
except sqlite3.DatabaseError as e:
|
||||
print("Error in " + str(upgradeClass.__name__) + ": " + str(e))
|
||||
raise
|
||||
logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG)
|
||||
else:
|
||||
|
@ -243,14 +244,14 @@ class SchemaUpgrade(object):
|
|||
self.connection = connection
|
||||
|
||||
def hasTable(self, tableName):
|
||||
return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, )).fetchall()) > 0
|
||||
return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName,)).fetchall()) > 0
|
||||
|
||||
def hasColumn(self, tableName, column):
|
||||
return column in self.connection.tableInfo(tableName)
|
||||
|
||||
def addColumn(self, table, column, type="NUMERIC", default=0):
|
||||
self.connection.action("ALTER TABLE %s ADD %s %s" % (table, column, type))
|
||||
self.connection.action("UPDATE %s SET %s = ?" % (table, column), (default,))
|
||||
self.connection.action("ALTER TABLE {0} ADD {1} {2}".format(table, column, type))
|
||||
self.connection.action("UPDATE {0} SET {1} = ?".format(table, column), (default,))
|
||||
|
||||
def checkDBVersion(self):
|
||||
result = self.connection.select("SELECT db_version FROM db_version")
|
||||
|
@ -263,4 +264,3 @@ class SchemaUpgrade(object):
|
|||
new_version = self.checkDBVersion() + 1
|
||||
self.connection.action("UPDATE db_version SET db_version = ?", [new_version])
|
||||
return new_version
|
||||
|
||||
|
|
|
@ -1,23 +1,29 @@
|
|||
# coding=utf-8
|
||||
import os
|
||||
import re
|
||||
import core
|
||||
import shlex
|
||||
import shlex
|
||||
from core import logger
|
||||
from core.nzbToMediaUtil import listMediaFiles
|
||||
|
||||
reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.", r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.",
|
||||
r"\.ld[.-]?bew\.", r"\.pir.?(dov|dvd|bew|db|rb)\.", r"\brdvd\.", r"\.vts\.", r"\.reneercs\.", r"\.dcv\.", r"\b(pir|mac)dh\b", r"\.reporp\.", r"\.kcaper\.",
|
||||
reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.",
|
||||
r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.",
|
||||
r"\.ld[.-]?bew\.", r"\.pir.?(dov|dvd|bew|db|rb)\.", r"\brdvd\.", r"\.vts\.", r"\.reneercs\.",
|
||||
r"\.dcv\.", r"\b(pir|mac)dh\b", r"\.reporp\.", r"\.kcaper\.",
|
||||
r"\.lanretni\.", r"\b3ca\b", r"\.cstn\."]
|
||||
reverse_pattern = re.compile('|'.join(reverse_list), flags=re.IGNORECASE)
|
||||
season_pattern = re.compile(r"(.*\.\d{2}e\d{2}s\.)(.*)", flags=re.IGNORECASE)
|
||||
word_pattern = re.compile(r"([^A-Z0-9]*[A-Z0-9]+)")
|
||||
media_list = [r"\.s\d{2}e\d{2}\.", r"\.1080[pi]\.", r"\.720p\.", r"\.576[pi]", r"\.480[pi]\.", r"\.360p\.", r"\.[xh]26[45]\b", r"\.bluray\.", r"\.[hp]dtv\.",
|
||||
r"\.web[.-]?dl\.", r"\.(vod|dvd|web|bd|br).?rip\.", r"\.dvdr\b", r"\.stv\.", r"\.screener\.", r"\.vcd\.", r"\bhd(cam|rip)\b", r"\.proper\.", r"\.repack\.",
|
||||
media_list = [r"\.s\d{2}e\d{2}\.", r"\.1080[pi]\.", r"\.720p\.", r"\.576[pi]", r"\.480[pi]\.", r"\.360p\.",
|
||||
r"\.[xh]26[45]\b", r"\.bluray\.", r"\.[hp]dtv\.",
|
||||
r"\.web[.-]?dl\.", r"\.(vod|dvd|web|bd|br).?rip\.", r"\.dvdr\b", r"\.stv\.", r"\.screener\.", r"\.vcd\.",
|
||||
r"\bhd(cam|rip)\b", r"\.proper\.", r"\.repack\.",
|
||||
r"\.internal\.", r"\bac3\b", r"\.ntsc\.", r"\.pal\.", r"\.secam\.", r"\bdivx\b", r"\bxvid\b"]
|
||||
media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE)
|
||||
garbage_name = re.compile(r"^[a-zA-Z0-9]*$")
|
||||
char_replace = [[r"(\w)1\.(\w)",r"\1i\2"]
|
||||
]
|
||||
char_replace = [[r"(\w)1\.(\w)", r"\1i\2"]
|
||||
]
|
||||
|
||||
|
||||
def process_all_exceptions(name, dirname):
|
||||
rename_script(dirname)
|
||||
|
@ -26,7 +32,7 @@ def process_all_exceptions(name, dirname):
|
|||
parentDir = os.path.dirname(filename)
|
||||
head, fileExtension = os.path.splitext(os.path.basename(filename))
|
||||
if reverse_pattern.search(head) is not None:
|
||||
exception = reverse_filename
|
||||
exception = reverse_filename
|
||||
elif garbage_name.search(head) is not None:
|
||||
exception = replace_filename
|
||||
else:
|
||||
|
@ -37,7 +43,8 @@ def process_all_exceptions(name, dirname):
|
|||
if core.GROUPS:
|
||||
newfilename = strip_groups(newfilename)
|
||||
if newfilename != filename:
|
||||
rename_file(filename, newfilename)
|
||||
rename_file(filename, newfilename)
|
||||
|
||||
|
||||
def strip_groups(filename):
|
||||
if not core.GROUPS:
|
||||
|
@ -47,33 +54,38 @@ def strip_groups(filename):
|
|||
newname = head.replace(' ', '.')
|
||||
for group in core.GROUPS:
|
||||
newname = newname.replace(group, '')
|
||||
newname = newname.replace('[]', '')
|
||||
newname = newname.replace('[]', '')
|
||||
newfile = newname + fileExtension
|
||||
newfilePath = os.path.join(dirname, newfile)
|
||||
return newfilePath
|
||||
|
||||
|
||||
def rename_file(filename, newfilePath):
|
||||
logger.debug("Replacing file name %s with download name %s" % (filename, newfilePath), "EXCEPTION")
|
||||
logger.debug("Replacing file name {old} with download name {new}".format
|
||||
(old=filename, new=newfilePath), "EXCEPTION")
|
||||
try:
|
||||
os.rename(filename, newfilePath)
|
||||
except Exception,e:
|
||||
logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION")
|
||||
except Exception as error:
|
||||
logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION")
|
||||
|
||||
|
||||
def replace_filename(filename, dirname, name):
|
||||
head, fileExtension = os.path.splitext(os.path.basename(filename))
|
||||
if media_pattern.search(os.path.basename(dirname).replace(' ','.')) is not None:
|
||||
if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
|
||||
newname = os.path.basename(dirname).replace(' ', '.')
|
||||
logger.debug("Replacing file name %s with directory name %s" % (head, newname), "EXCEPTION")
|
||||
elif media_pattern.search(name.replace(' ','.').lower()) is not None:
|
||||
logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION")
|
||||
elif media_pattern.search(name.replace(' ', '.').lower()) is not None:
|
||||
newname = name.replace(' ', '.')
|
||||
logger.debug("Replacing file name %s with download name %s" % (head, newname), "EXCEPTION")
|
||||
logger.debug("Replacing file name {old} with download name {new}".format
|
||||
(old=head, new=newname), "EXCEPTION")
|
||||
else:
|
||||
logger.warning("No name replacement determined for %s" % (head), "EXCEPTION")
|
||||
newname = name
|
||||
logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION")
|
||||
newname = name
|
||||
newfile = newname + fileExtension
|
||||
newfilePath = os.path.join(dirname, newfile)
|
||||
return newfilePath
|
||||
|
||||
|
||||
def reverse_filename(filename, dirname, name):
|
||||
head, fileExtension = os.path.splitext(os.path.basename(filename))
|
||||
na_parts = season_pattern.search(head)
|
||||
|
@ -84,29 +96,31 @@ def reverse_filename(filename, dirname, name):
|
|||
for wp in word_p:
|
||||
if wp[0] == ".":
|
||||
new_words += "."
|
||||
new_words += re.sub(r"\W","",wp)
|
||||
new_words += re.sub(r"\W", "", wp)
|
||||
else:
|
||||
new_words = na_parts.group(2)
|
||||
for cr in char_replace:
|
||||
new_words = re.sub(cr[0],cr[1],new_words)
|
||||
new_words = re.sub(cr[0], cr[1], new_words)
|
||||
newname = new_words[::-1] + na_parts.group(1)[::-1]
|
||||
else:
|
||||
newname = head[::-1].title()
|
||||
newname = newname.replace(' ', '.')
|
||||
logger.debug("Reversing filename %s to %s" % (head, newname), "EXCEPTION")
|
||||
logger.debug("Reversing filename {old} to {new}".format
|
||||
(old=head, new=newname), "EXCEPTION")
|
||||
newfile = newname + fileExtension
|
||||
newfilePath = os.path.join(dirname, newfile)
|
||||
return newfilePath
|
||||
|
||||
|
||||
def rename_script(dirname):
|
||||
rename_file = ""
|
||||
for dir, dirs, files in os.walk(dirname):
|
||||
for file in files:
|
||||
if re.search('(rename\S*\.(sh|bat)$)',file,re.IGNORECASE):
|
||||
if re.search('(rename\S*\.(sh|bat)$)', file, re.IGNORECASE):
|
||||
rename_file = os.path.join(dir, file)
|
||||
dirname = dir
|
||||
break
|
||||
if rename_file:
|
||||
if rename_file:
|
||||
rename_lines = [line.strip() for line in open(rename_file)]
|
||||
for line in rename_lines:
|
||||
if re.search('^(mv|Move)', line, re.IGNORECASE):
|
||||
|
@ -118,13 +132,13 @@ def rename_script(dirname):
|
|||
dest = os.path.join(dirname, cmd[1].split('\\')[-1].split('/')[-1])
|
||||
if os.path.isfile(dest):
|
||||
continue
|
||||
logger.debug("Renaming file %s to %s" % (orig, dest), "EXCEPTION")
|
||||
logger.debug("Renaming file {source} to {destination}".format
|
||||
(source=orig, destination=dest), "EXCEPTION")
|
||||
try:
|
||||
os.rename(orig, dest)
|
||||
except Exception,e:
|
||||
logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION")
|
||||
except Exception as error:
|
||||
logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION")
|
||||
|
||||
# dict for custom groups
|
||||
# we can add more to this list
|
||||
#__customgroups__ = {'Q o Q': process_qoq, '-ECI': process_eci}
|
||||
|
||||
# _customgroups = {'Q o Q': process_qoq, '-ECI': process_eci}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# coding=utf-8
|
||||
import os
|
||||
import core
|
||||
from subprocess import Popen
|
||||
|
@ -5,45 +6,43 @@ from core.transcoder import transcoder
|
|||
from core.nzbToMediaUtil import import_subs, listMediaFiles, rmDir
|
||||
from core import logger
|
||||
|
||||
|
||||
def external_script(outputDestination, torrentName, torrentLabel, settings):
|
||||
final_result = 0 # start at 0.
|
||||
num_files = 0
|
||||
try:
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = settings["user_script_mediaExtensions"]
|
||||
if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str): core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',')
|
||||
if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str):
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',')
|
||||
except:
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = []
|
||||
try:
|
||||
core.USER_SCRIPT = settings["user_script_path"]
|
||||
except:
|
||||
core.USER_SCRIPT = None
|
||||
if core.USER_SCRIPT is None or core.USER_SCRIPT == "None": # do nothing and return success.
|
||||
|
||||
core.USER_SCRIPT = settings.get("user_script_path")
|
||||
|
||||
if not core.USER_SCRIPT or core.USER_SCRIPT == "None": # do nothing and return success.
|
||||
return [0, ""]
|
||||
try:
|
||||
core.USER_SCRIPT_PARAM = settings["user_script_param"]
|
||||
if isinstance(core.USER_SCRIPT_PARAM, str): core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',')
|
||||
if isinstance(core.USER_SCRIPT_PARAM, str):
|
||||
core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',')
|
||||
except:
|
||||
core.USER_SCRIPT_PARAM = []
|
||||
try:
|
||||
core.USER_SCRIPT_SUCCESSCODES = settings["user_script_successCodes"]
|
||||
if isinstance(core.USER_SCRIPT_SUCCESSCODES, str): core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',')
|
||||
if isinstance(core.USER_SCRIPT_SUCCESSCODES, str):
|
||||
core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',')
|
||||
except:
|
||||
core.USER_SCRIPT_SUCCESSCODES = 0
|
||||
try:
|
||||
core.USER_SCRIPT_CLEAN = int(settings["user_script_clean"])
|
||||
except:
|
||||
core.USER_SCRIPT_CLEAN = 1
|
||||
try:
|
||||
core.USER_SCRIPT_RUNONCE = int(settings["user_script_runOnce"])
|
||||
except:
|
||||
core.USER_SCRIPT_RUNONCE = 1
|
||||
|
||||
core.USER_SCRIPT_CLEAN = int(settings.get("user_script_clean", 1))
|
||||
core.USER_SCRIPT_RUNONCE = int(settings.get("user_script_runOnce", 1))
|
||||
|
||||
if core.CHECK_MEDIA:
|
||||
for video in listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False):
|
||||
if transcoder.isVideoGood(video, 0):
|
||||
import_subs(video)
|
||||
else:
|
||||
logger.info("Corrupt video file found %s. Deleting." % (video), "USERSCRIPT")
|
||||
logger.info("Corrupt video file found {0}. Deleting.".format(video), "USERSCRIPT")
|
||||
os.unlink(video)
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||
|
@ -53,28 +52,28 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
|
|||
fileName, fileExtension = os.path.splitext(file)
|
||||
|
||||
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or "ALL" in core.USER_SCRIPT_MEDIAEXTENSIONS:
|
||||
num_files = num_files + 1
|
||||
num_files += 1
|
||||
if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files.
|
||||
continue
|
||||
command = [core.USER_SCRIPT]
|
||||
for param in core.USER_SCRIPT_PARAM:
|
||||
if param == "FN":
|
||||
command.append('%s' % file)
|
||||
command.append('{0}'.format(file))
|
||||
continue
|
||||
elif param == "FP":
|
||||
command.append('%s' % filePath)
|
||||
command.append('{0}'.format(filePath))
|
||||
continue
|
||||
elif param == "TN":
|
||||
command.append('%s' % torrentName)
|
||||
command.append('{0}'.format(torrentName))
|
||||
continue
|
||||
elif param == "TL":
|
||||
command.append('%s' % torrentLabel)
|
||||
command.append('{0}'.format(torrentLabel))
|
||||
continue
|
||||
elif param == "DN":
|
||||
if core.USER_SCRIPT_RUNONCE == 1:
|
||||
command.append('%s' % outputDestination)
|
||||
command.append('{0}'.format(outputDestination))
|
||||
else:
|
||||
command.append('%s' % dirpath)
|
||||
command.append('{0}'.format(dirpath))
|
||||
continue
|
||||
else:
|
||||
command.append(param)
|
||||
|
@ -82,37 +81,36 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
|
|||
cmd = ""
|
||||
for item in command:
|
||||
cmd = cmd + " " + item
|
||||
logger.info("Running script %s on file %s." % (cmd, filePath), "USERSCRIPT")
|
||||
logger.info("Running script {0} on file {1}.".format(cmd, filePath), "USERSCRIPT")
|
||||
try:
|
||||
p = Popen(command)
|
||||
res = p.wait()
|
||||
if str(res) in core.USER_SCRIPT_SUCCESSCODES: # Linux returns 0 for successful.
|
||||
logger.info("UserScript %s was successfull" % (command[0]))
|
||||
logger.info("UserScript {0} was successfull".format(command[0]))
|
||||
result = 0
|
||||
else:
|
||||
logger.error("UserScript %s has failed with return code: %s" % (command[0], res), "USERSCRIPT")
|
||||
logger.error("UserScript {0} has failed with return code: {1}".format(command[0], res), "USERSCRIPT")
|
||||
logger.info(
|
||||
"If the UserScript completed successfully you should add %s to the user_script_successCodes" % (
|
||||
"If the UserScript completed successfully you should add {0} to the user_script_successCodes".format(
|
||||
res), "USERSCRIPT")
|
||||
result = int(1)
|
||||
except:
|
||||
logger.error("UserScript %s has failed" % (command[0]), "USERSCRIPT")
|
||||
logger.error("UserScript {0} has failed".format(command[0]), "USERSCRIPT")
|
||||
result = int(1)
|
||||
final_result = final_result + result
|
||||
final_result += result
|
||||
|
||||
num_files_new = 0
|
||||
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||
for file in filenames:
|
||||
filePath = core.os.path.join(dirpath, file)
|
||||
fileName, fileExtension = os.path.splitext(file)
|
||||
|
||||
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL":
|
||||
num_files_new = num_files_new + 1
|
||||
num_files_new += 1
|
||||
|
||||
if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0:
|
||||
logger.info("All files have been processed. Cleaning outputDirectory %s" % (outputDestination))
|
||||
logger.info("All files have been processed. Cleaning outputDirectory {0}".format(outputDestination))
|
||||
rmDir(outputDestination)
|
||||
elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0:
|
||||
logger.info("%s files were processed, but %s still remain. outputDirectory will not be cleaned." % (
|
||||
logger.info("{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.".format(
|
||||
num_files, num_files_new))
|
||||
return [final_result, '']
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,3 +1,4 @@
|
|||
# coding=utf-8
|
||||
"""A synchronous implementation of the Deluge RPC protocol
|
||||
based on gevent-deluge by Christopher Rosell.
|
||||
|
||||
|
@ -14,10 +15,9 @@ Example usage:
|
|||
download_location = client.core.get_config_value("download_location").get()
|
||||
"""
|
||||
|
||||
from core.synchronousdeluge.exceptions import DelugeRPCError
|
||||
|
||||
|
||||
__title__ = "synchronous-deluge"
|
||||
__version__ = "0.1"
|
||||
__author__ = "Christian Dale"
|
||||
|
||||
from core.synchronousdeluge.exceptions import DelugeRPCError
|
||||
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
# coding=utf-8
|
||||
import os
|
||||
import platform
|
||||
|
||||
from collections import defaultdict
|
||||
from itertools import imap
|
||||
from exceptions import DelugeRPCError
|
||||
from protocol import DelugeRPCRequest, DelugeRPCResponse
|
||||
from transfer import DelugeTransfer
|
||||
|
||||
from .exceptions import DelugeRPCError
|
||||
from .protocol import DelugeRPCRequest, DelugeRPCResponse
|
||||
from .transfer import DelugeTransfer
|
||||
|
||||
__all__ = ["DelugeClient"]
|
||||
|
||||
|
||||
RPC_RESPONSE = 1
|
||||
RPC_ERROR = 2
|
||||
RPC_EVENT = 3
|
||||
|
@ -24,13 +23,13 @@ class DelugeClient(object):
|
|||
self._request_counter = 0
|
||||
|
||||
def _get_local_auth(self):
|
||||
auth_file = ""
|
||||
username = password = ""
|
||||
if platform.system() in ('Windows', 'Microsoft'):
|
||||
appDataPath = os.environ.get("APPDATA")
|
||||
if not appDataPath:
|
||||
import _winreg
|
||||
hkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders")
|
||||
hkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
|
||||
"Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders")
|
||||
appDataReg = _winreg.QueryValueEx(hkey, "AppData")
|
||||
appDataPath = appDataReg[0]
|
||||
_winreg.CloseKey(hkey)
|
||||
|
@ -40,10 +39,9 @@ class DelugeClient(object):
|
|||
from xdg.BaseDirectory import save_config_path
|
||||
try:
|
||||
auth_file = os.path.join(save_config_path("deluge"), "auth")
|
||||
except OSError, e:
|
||||
except OSError:
|
||||
return username, password
|
||||
|
||||
|
||||
if os.path.exists(auth_file):
|
||||
for line in open(auth_file):
|
||||
if line.startswith("#"):
|
||||
|
@ -52,7 +50,7 @@ class DelugeClient(object):
|
|||
line = line.strip()
|
||||
try:
|
||||
lsplit = line.split(":")
|
||||
except Exception, e:
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if len(lsplit) == 2:
|
||||
|
@ -63,9 +61,9 @@ class DelugeClient(object):
|
|||
continue
|
||||
|
||||
if username == "localclient":
|
||||
return (username, password)
|
||||
return username, password
|
||||
|
||||
return ("", "")
|
||||
return "", ""
|
||||
|
||||
def _create_module_method(self, module, method):
|
||||
fullname = "{0}.{1}".format(module, method)
|
||||
|
@ -107,20 +105,20 @@ class DelugeClient(object):
|
|||
|
||||
message_type = message[0]
|
||||
|
||||
# if message_type == RPC_EVENT:
|
||||
# event = message[1]
|
||||
# values = message[2]
|
||||
#
|
||||
# if event in self._event_handlers:
|
||||
# for handler in self._event_handlers[event]:
|
||||
# gevent.spawn(handler, *values)
|
||||
#
|
||||
# elif message_type in (RPC_RESPONSE, RPC_ERROR):
|
||||
# if message_type == RPC_EVENT:
|
||||
# event = message[1]
|
||||
# values = message[2]
|
||||
#
|
||||
# if event in self._event_handlers:
|
||||
# for handler in self._event_handlers[event]:
|
||||
# gevent.spawn(handler, *values)
|
||||
#
|
||||
# elif message_type in (RPC_RESPONSE, RPC_ERROR):
|
||||
if message_type in (RPC_RESPONSE, RPC_ERROR):
|
||||
request_id = message[1]
|
||||
value = message[2]
|
||||
|
||||
if request_id == self._request_counter :
|
||||
if request_id == self._request_counter:
|
||||
if message_type == RPC_RESPONSE:
|
||||
response.set(value)
|
||||
elif message_type == RPC_ERROR:
|
||||
|
@ -159,4 +157,3 @@ class DelugeClient(object):
|
|||
def disconnect(self):
|
||||
"""Disconnects from the daemon."""
|
||||
self.transfer.disconnect()
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# coding=utf-8
|
||||
__all__ = ["DelugeRPCError"]
|
||||
|
||||
|
||||
class DelugeRPCError(Exception):
|
||||
def __init__(self, name, msg, traceback):
|
||||
self.name = name
|
||||
|
@ -8,4 +10,3 @@ class DelugeRPCError(Exception):
|
|||
|
||||
def __str__(self):
|
||||
return "{0}: {1}: {2}".format(self.__class__.__name__, self.name, self.msg)
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# coding=utf-8
|
||||
__all__ = ["DelugeRPCRequest", "DelugeRPCResponse"]
|
||||
|
||||
|
||||
class DelugeRPCRequest(object):
|
||||
def __init__(self, request_id, method, *args, **kwargs):
|
||||
self.request_id = request_id
|
||||
|
@ -8,7 +10,8 @@ class DelugeRPCRequest(object):
|
|||
self.kwargs = kwargs
|
||||
|
||||
def format(self):
|
||||
return (self.request_id, self.method, self.args, self.kwargs)
|
||||
return self.request_id, self.method, self.args, self.kwargs
|
||||
|
||||
|
||||
class DelugeRPCResponse(object):
|
||||
def __init__(self):
|
||||
|
@ -35,4 +38,3 @@ class DelugeRPCResponse(object):
|
|||
return self.value
|
||||
else:
|
||||
raise self._exception
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
|
||||
# coding=utf-8
|
||||
"""
|
||||
rencode -- Web safe object pickling/unpickling.
|
||||
|
||||
|
@ -9,9 +9,9 @@ BitTorrent project. For complex, heterogeneous data structures with
|
|||
many small elements, r-encodings take up significantly less space than
|
||||
b-encodings:
|
||||
|
||||
>>> len(rencode.dumps({'a':0, 'b':[1,2], 'c':99}))
|
||||
>>> len(rencode.dumps({'a': 0, 'b': [1, 2], 'c': 99}))
|
||||
13
|
||||
>>> len(bencode.bencode({'a':0, 'b':[1,2], 'c':99}))
|
||||
>>> len(bencode.bencode({'a': 0, 'b': [1, 2], 'c': 99}))
|
||||
26
|
||||
|
||||
The rencode format is not standardized, and may change with different
|
||||
|
@ -19,6 +19,13 @@ rencode module versions, so you should check that you are using the
|
|||
same rencode version throughout your project.
|
||||
"""
|
||||
|
||||
import struct
|
||||
from threading import Lock
|
||||
from six import PY3
|
||||
|
||||
if PY3:
|
||||
long = int
|
||||
|
||||
__version__ = '1.0.1'
|
||||
__all__ = ['dumps', 'loads']
|
||||
|
||||
|
@ -62,9 +69,6 @@ __all__ = ['dumps', 'loads']
|
|||
# (The rencode module is licensed under the above license as well).
|
||||
#
|
||||
|
||||
import struct
|
||||
from threading import Lock
|
||||
|
||||
# Default number of bits for serialized floats, either 32 or 64 (also a parameter for dumps()).
|
||||
DEFAULT_FLOAT_BITS = 32
|
||||
|
||||
|
@ -73,19 +77,19 @@ MAX_INT_LENGTH = 64
|
|||
|
||||
# The bencode 'typecodes' such as i, d, etc have been extended and
|
||||
# relocated on the base-256 character set.
|
||||
CHR_LIST = chr(59)
|
||||
CHR_DICT = chr(60)
|
||||
CHR_INT = chr(61)
|
||||
CHR_INT1 = chr(62)
|
||||
CHR_INT2 = chr(63)
|
||||
CHR_INT4 = chr(64)
|
||||
CHR_INT8 = chr(65)
|
||||
CHR_LIST = chr(59)
|
||||
CHR_DICT = chr(60)
|
||||
CHR_INT = chr(61)
|
||||
CHR_INT1 = chr(62)
|
||||
CHR_INT2 = chr(63)
|
||||
CHR_INT4 = chr(64)
|
||||
CHR_INT8 = chr(65)
|
||||
CHR_FLOAT32 = chr(66)
|
||||
CHR_FLOAT64 = chr(44)
|
||||
CHR_TRUE = chr(67)
|
||||
CHR_FALSE = chr(68)
|
||||
CHR_NONE = chr(69)
|
||||
CHR_TERM = chr(127)
|
||||
CHR_TRUE = chr(67)
|
||||
CHR_FALSE = chr(68)
|
||||
CHR_NONE = chr(69)
|
||||
CHR_TERM = chr(127)
|
||||
|
||||
# Positive integers with value embedded in typecode.
|
||||
INT_POS_FIXED_START = 0
|
||||
|
@ -104,9 +108,10 @@ STR_FIXED_START = 128
|
|||
STR_FIXED_COUNT = 64
|
||||
|
||||
# Lists with length embedded in typecode.
|
||||
LIST_FIXED_START = STR_FIXED_START+STR_FIXED_COUNT
|
||||
LIST_FIXED_START = STR_FIXED_START + STR_FIXED_COUNT
|
||||
LIST_FIXED_COUNT = 64
|
||||
|
||||
|
||||
def decode_int(x, f):
|
||||
f += 1
|
||||
newf = x.index(CHR_TERM, f)
|
||||
|
@ -119,35 +124,42 @@ def decode_int(x, f):
|
|||
if x[f] == '-':
|
||||
if x[f + 1] == '0':
|
||||
raise ValueError
|
||||
elif x[f] == '0' and newf != f+1:
|
||||
elif x[f] == '0' and newf != f + 1:
|
||||
raise ValueError
|
||||
return (n, newf+1)
|
||||
return n, newf + 1
|
||||
|
||||
|
||||
def decode_intb(x, f):
|
||||
f += 1
|
||||
return (struct.unpack('!b', x[f:f+1])[0], f+1)
|
||||
return struct.unpack('!b', x[f:f + 1])[0], f + 1
|
||||
|
||||
|
||||
def decode_inth(x, f):
|
||||
f += 1
|
||||
return (struct.unpack('!h', x[f:f+2])[0], f+2)
|
||||
return struct.unpack('!h', x[f:f + 2])[0], f + 2
|
||||
|
||||
|
||||
def decode_intl(x, f):
|
||||
f += 1
|
||||
return (struct.unpack('!l', x[f:f+4])[0], f+4)
|
||||
return struct.unpack('!l', x[f:f + 4])[0], f + 4
|
||||
|
||||
|
||||
def decode_intq(x, f):
|
||||
f += 1
|
||||
return (struct.unpack('!q', x[f:f+8])[0], f+8)
|
||||
return struct.unpack('!q', x[f:f + 8])[0], f + 8
|
||||
|
||||
|
||||
def decode_float32(x, f):
|
||||
f += 1
|
||||
n = struct.unpack('!f', x[f:f+4])[0]
|
||||
return (n, f+4)
|
||||
n = struct.unpack('!f', x[f:f + 4])[0]
|
||||
return n, f + 4
|
||||
|
||||
|
||||
def decode_float64(x, f):
|
||||
f += 1
|
||||
n = struct.unpack('!d', x[f:f+8])[0]
|
||||
return (n, f+8)
|
||||
n = struct.unpack('!d', x[f:f + 8])[0]
|
||||
return n, f + 8
|
||||
|
||||
|
||||
def decode_string(x, f):
|
||||
colon = x.index(':', f)
|
||||
|
@ -155,123 +167,147 @@ def decode_string(x, f):
|
|||
n = int(x[f:colon])
|
||||
except (OverflowError, ValueError):
|
||||
n = long(x[f:colon])
|
||||
if x[f] == '0' and colon != f+1:
|
||||
if x[f] == '0' and colon != f + 1:
|
||||
raise ValueError
|
||||
colon += 1
|
||||
s = x[colon:colon+n]
|
||||
s = x[colon:colon + n]
|
||||
try:
|
||||
t = s.decode("utf8")
|
||||
if len(t) != len(s):
|
||||
s = t
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
return (s, colon+n)
|
||||
return s, colon + n
|
||||
|
||||
|
||||
def decode_list(x, f):
|
||||
r, f = [], f+1
|
||||
r, f = [], f + 1
|
||||
while x[f] != CHR_TERM:
|
||||
v, f = decode_func[x[f]](x, f)
|
||||
r.append(v)
|
||||
return (tuple(r), f + 1)
|
||||
return tuple(r), f + 1
|
||||
|
||||
|
||||
def decode_dict(x, f):
|
||||
r, f = {}, f+1
|
||||
r, f = {}, f + 1
|
||||
while x[f] != CHR_TERM:
|
||||
k, f = decode_func[x[f]](x, f)
|
||||
r[k], f = decode_func[x[f]](x, f)
|
||||
return (r, f + 1)
|
||||
return r, f + 1
|
||||
|
||||
|
||||
def decode_true(x, f):
|
||||
return (True, f+1)
|
||||
return True, f + 1
|
||||
|
||||
|
||||
def decode_false(x, f):
|
||||
return (False, f+1)
|
||||
return False, f + 1
|
||||
|
||||
|
||||
def decode_none(x, f):
|
||||
return (None, f+1)
|
||||
return None, f + 1
|
||||
|
||||
|
||||
decode_func = {
|
||||
'0': decode_string,
|
||||
'1': decode_string,
|
||||
'2': decode_string,
|
||||
'3': decode_string,
|
||||
'4': decode_string,
|
||||
'5': decode_string,
|
||||
'6': decode_string,
|
||||
'7': decode_string,
|
||||
'8': decode_string,
|
||||
'9': decode_string,
|
||||
CHR_LIST: decode_list,
|
||||
CHR_DICT: decode_dict,
|
||||
CHR_INT: decode_int,
|
||||
CHR_INT1: decode_intb,
|
||||
CHR_INT2: decode_inth,
|
||||
CHR_INT4: decode_intl,
|
||||
CHR_INT8: decode_intq,
|
||||
CHR_FLOAT32: decode_float32,
|
||||
CHR_FLOAT64: decode_float64,
|
||||
CHR_TRUE: decode_true,
|
||||
CHR_FALSE: decode_false,
|
||||
CHR_NONE: decode_none,
|
||||
}
|
||||
|
||||
decode_func = {}
|
||||
decode_func['0'] = decode_string
|
||||
decode_func['1'] = decode_string
|
||||
decode_func['2'] = decode_string
|
||||
decode_func['3'] = decode_string
|
||||
decode_func['4'] = decode_string
|
||||
decode_func['5'] = decode_string
|
||||
decode_func['6'] = decode_string
|
||||
decode_func['7'] = decode_string
|
||||
decode_func['8'] = decode_string
|
||||
decode_func['9'] = decode_string
|
||||
decode_func[CHR_LIST ] = decode_list
|
||||
decode_func[CHR_DICT ] = decode_dict
|
||||
decode_func[CHR_INT ] = decode_int
|
||||
decode_func[CHR_INT1 ] = decode_intb
|
||||
decode_func[CHR_INT2 ] = decode_inth
|
||||
decode_func[CHR_INT4 ] = decode_intl
|
||||
decode_func[CHR_INT8 ] = decode_intq
|
||||
decode_func[CHR_FLOAT32] = decode_float32
|
||||
decode_func[CHR_FLOAT64] = decode_float64
|
||||
decode_func[CHR_TRUE ] = decode_true
|
||||
decode_func[CHR_FALSE ] = decode_false
|
||||
decode_func[CHR_NONE ] = decode_none
|
||||
|
||||
def make_fixed_length_string_decoders():
|
||||
def make_decoder(slen):
|
||||
def f(x, f):
|
||||
s = x[f+1:f+1+slen]
|
||||
s = x[f + 1:f + 1 + slen]
|
||||
try:
|
||||
t = s.decode("utf8")
|
||||
if len(t) != len(s):
|
||||
s = t
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
return (s, f+1+slen)
|
||||
return s, f + 1 + slen
|
||||
|
||||
return f
|
||||
|
||||
for i in range(STR_FIXED_COUNT):
|
||||
decode_func[chr(STR_FIXED_START+i)] = make_decoder(i)
|
||||
decode_func[chr(STR_FIXED_START + i)] = make_decoder(i)
|
||||
|
||||
|
||||
make_fixed_length_string_decoders()
|
||||
|
||||
|
||||
def make_fixed_length_list_decoders():
|
||||
def make_decoder(slen):
|
||||
def f(x, f):
|
||||
r, f = [], f+1
|
||||
r, f = [], f + 1
|
||||
for i in range(slen):
|
||||
v, f = decode_func[x[f]](x, f)
|
||||
r.append(v)
|
||||
return (tuple(r), f)
|
||||
return tuple(r), f
|
||||
|
||||
return f
|
||||
|
||||
for i in range(LIST_FIXED_COUNT):
|
||||
decode_func[chr(LIST_FIXED_START+i)] = make_decoder(i)
|
||||
decode_func[chr(LIST_FIXED_START + i)] = make_decoder(i)
|
||||
|
||||
|
||||
make_fixed_length_list_decoders()
|
||||
|
||||
|
||||
def make_fixed_length_int_decoders():
|
||||
def make_decoder(j):
|
||||
def f(x, f):
|
||||
return (j, f+1)
|
||||
return j, f + 1
|
||||
|
||||
return f
|
||||
|
||||
for i in range(INT_POS_FIXED_COUNT):
|
||||
decode_func[chr(INT_POS_FIXED_START+i)] = make_decoder(i)
|
||||
decode_func[chr(INT_POS_FIXED_START + i)] = make_decoder(i)
|
||||
for i in range(INT_NEG_FIXED_COUNT):
|
||||
decode_func[chr(INT_NEG_FIXED_START+i)] = make_decoder(-1-i)
|
||||
decode_func[chr(INT_NEG_FIXED_START + i)] = make_decoder(-1 - i)
|
||||
|
||||
|
||||
make_fixed_length_int_decoders()
|
||||
|
||||
|
||||
def make_fixed_length_dict_decoders():
|
||||
def make_decoder(slen):
|
||||
def f(x, f):
|
||||
r, f = {}, f+1
|
||||
r, f = {}, f + 1
|
||||
for j in range(slen):
|
||||
k, f = decode_func[x[f]](x, f)
|
||||
r[k], f = decode_func[x[f]](x, f)
|
||||
return (r, f)
|
||||
return r, f
|
||||
|
||||
return f
|
||||
|
||||
for i in range(DICT_FIXED_COUNT):
|
||||
decode_func[chr(DICT_FIXED_START+i)] = make_decoder(i)
|
||||
decode_func[chr(DICT_FIXED_START + i)] = make_decoder(i)
|
||||
|
||||
|
||||
make_fixed_length_dict_decoders()
|
||||
|
||||
def encode_dict(x,r):
|
||||
|
||||
def encode_dict(x, r):
|
||||
r.append(CHR_DICT)
|
||||
for k, v in x.items():
|
||||
encode_func[type(k)](k, r)
|
||||
|
@ -288,13 +324,15 @@ def loads(x):
|
|||
raise ValueError
|
||||
return r
|
||||
|
||||
|
||||
from types import StringType, IntType, LongType, DictType, ListType, TupleType, FloatType, NoneType, UnicodeType
|
||||
|
||||
|
||||
def encode_int(x, r):
|
||||
if 0 <= x < INT_POS_FIXED_COUNT:
|
||||
r.append(chr(INT_POS_FIXED_START+x))
|
||||
r.append(chr(INT_POS_FIXED_START + x))
|
||||
elif -INT_NEG_FIXED_COUNT <= x < 0:
|
||||
r.append(chr(INT_NEG_FIXED_START-1-x))
|
||||
r.append(chr(INT_NEG_FIXED_START - 1 - x))
|
||||
elif -128 <= x < 128:
|
||||
r.extend((CHR_INT1, struct.pack('!b', x)))
|
||||
elif -32768 <= x < 32768:
|
||||
|
@ -309,27 +347,34 @@ def encode_int(x, r):
|
|||
raise ValueError('overflow')
|
||||
r.extend((CHR_INT, s, CHR_TERM))
|
||||
|
||||
|
||||
def encode_float32(x, r):
|
||||
r.extend((CHR_FLOAT32, struct.pack('!f', x)))
|
||||
|
||||
|
||||
def encode_float64(x, r):
|
||||
r.extend((CHR_FLOAT64, struct.pack('!d', x)))
|
||||
|
||||
|
||||
def encode_bool(x, r):
|
||||
r.extend({False: CHR_FALSE, True: CHR_TRUE}[bool(x)])
|
||||
|
||||
|
||||
def encode_none(x, r):
|
||||
r.extend(CHR_NONE)
|
||||
|
||||
|
||||
def encode_string(x, r):
|
||||
if len(x) < STR_FIXED_COUNT:
|
||||
r.extend((chr(STR_FIXED_START + len(x)), x))
|
||||
else:
|
||||
r.extend((str(len(x)), ':', x))
|
||||
|
||||
|
||||
def encode_unicode(x, r):
|
||||
encode_string(x.encode("utf8"), r)
|
||||
|
||||
|
||||
def encode_list(x, r):
|
||||
if len(x) < LIST_FIXED_COUNT:
|
||||
r.append(chr(LIST_FIXED_START + len(x)))
|
||||
|
@ -341,7 +386,8 @@ def encode_list(x, r):
|
|||
encode_func[type(i)](i, r)
|
||||
r.append(CHR_TERM)
|
||||
|
||||
def encode_dict(x,r):
|
||||
|
||||
def encode_dict(x, r):
|
||||
if len(x) < DICT_FIXED_COUNT:
|
||||
r.append(chr(DICT_FIXED_START + len(x)))
|
||||
for k, v in x.items():
|
||||
|
@ -354,24 +400,28 @@ def encode_dict(x,r):
|
|||
encode_func[type(v)](v, r)
|
||||
r.append(CHR_TERM)
|
||||
|
||||
encode_func = {}
|
||||
encode_func[IntType] = encode_int
|
||||
encode_func[LongType] = encode_int
|
||||
encode_func[StringType] = encode_string
|
||||
encode_func[ListType] = encode_list
|
||||
encode_func[TupleType] = encode_list
|
||||
encode_func[DictType] = encode_dict
|
||||
encode_func[NoneType] = encode_none
|
||||
encode_func[UnicodeType] = encode_unicode
|
||||
|
||||
encode_func = {
|
||||
IntType: encode_int,
|
||||
LongType: encode_int,
|
||||
StringType: encode_string,
|
||||
ListType: encode_list,
|
||||
TupleType: encode_list,
|
||||
DictType: encode_dict,
|
||||
NoneType: encode_none,
|
||||
UnicodeType: encode_unicode,
|
||||
}
|
||||
|
||||
lock = Lock()
|
||||
|
||||
try:
|
||||
from types import BooleanType
|
||||
|
||||
encode_func[BooleanType] = encode_bool
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def dumps(x, float_bits=DEFAULT_FLOAT_BITS):
|
||||
"""
|
||||
Dump data structure to str.
|
||||
|
@ -385,48 +435,53 @@ def dumps(x, float_bits=DEFAULT_FLOAT_BITS):
|
|||
elif float_bits == 64:
|
||||
encode_func[FloatType] = encode_float64
|
||||
else:
|
||||
raise ValueError('Float bits (%d) is not 32 or 64' % float_bits)
|
||||
raise ValueError('Float bits ({0:d}) is not 32 or 64'.format(float_bits))
|
||||
r = []
|
||||
encode_func[type(x)](x, r)
|
||||
finally:
|
||||
lock.release()
|
||||
return ''.join(r)
|
||||
|
||||
|
||||
def test():
|
||||
f1 = struct.unpack('!f', struct.pack('!f', 25.5))[0]
|
||||
f2 = struct.unpack('!f', struct.pack('!f', 29.3))[0]
|
||||
f3 = struct.unpack('!f', struct.pack('!f', -0.6))[0]
|
||||
L = (({'a':15, 'bb':f1, 'ccc':f2, '':(f3,(),False,True,'')},('a',10**20),tuple(range(-100000,100000)),'b'*31,'b'*62,'b'*64,2**30,2**33,2**62,2**64,2**30,2**33,2**62,2**64,False,False, True, -1, 2, 0),)
|
||||
L = (({'a': 15, 'bb': f1, 'ccc': f2, '': (f3, (), False, True, '')}, ('a', 10 ** 20), tuple(range(-100000, 100000)),
|
||||
'b' * 31, 'b' * 62, 'b' * 64, 2 ** 30, 2 ** 33, 2 ** 62, 2 ** 64, 2 ** 30, 2 ** 33, 2 ** 62, 2 ** 64, False,
|
||||
False, True, -1, 2, 0),)
|
||||
assert loads(dumps(L)) == L
|
||||
d = dict(zip(range(-100000,100000),range(-100000,100000)))
|
||||
d.update({'a':20, 20:40, 40:41, f1:f2, f2:f3, f3:False, False:True, True:False})
|
||||
L = (d, {}, {5:6}, {7:7,True:8}, {9:10, 22:39, 49:50, 44: ''})
|
||||
d = dict(zip(range(-100000, 100000), range(-100000, 100000)))
|
||||
d.update({'a': 20, 20: 40, 40: 41, f1: f2, f2: f3, f3: False, False: True, True: False})
|
||||
L = (d, {}, {5: 6}, {7: 7, True: 8}, {9: 10, 22: 39, 49: 50, 44: ''})
|
||||
assert loads(dumps(L)) == L
|
||||
L = ('', 'a'*10, 'a'*100, 'a'*1000, 'a'*10000, 'a'*100000, 'a'*1000000, 'a'*10000000)
|
||||
L = ('', 'a' * 10, 'a' * 100, 'a' * 1000, 'a' * 10000, 'a' * 100000, 'a' * 1000000, 'a' * 10000000)
|
||||
assert loads(dumps(L)) == L
|
||||
L = tuple([dict(zip(range(n),range(n))) for n in range(100)]) + ('b',)
|
||||
L = tuple([dict(zip(range(n), range(n))) for n in range(100)]) + ('b',)
|
||||
assert loads(dumps(L)) == L
|
||||
L = tuple([dict(zip(range(n),range(-n,0))) for n in range(100)]) + ('b',)
|
||||
L = tuple([dict(zip(range(n), range(-n, 0))) for n in range(100)]) + ('b',)
|
||||
assert loads(dumps(L)) == L
|
||||
L = tuple([tuple(range(n)) for n in range(100)]) + ('b',)
|
||||
assert loads(dumps(L)) == L
|
||||
L = tuple(['a'*n for n in range(1000)]) + ('b',)
|
||||
L = tuple(['a' * n for n in range(1000)]) + ('b',)
|
||||
assert loads(dumps(L)) == L
|
||||
L = tuple(['a'*n for n in range(1000)]) + (None,True,None)
|
||||
L = tuple(['a' * n for n in range(1000)]) + (None, True, None)
|
||||
assert loads(dumps(L)) == L
|
||||
assert loads(dumps(None)) == None
|
||||
assert loads(dumps({None:None})) == {None:None}
|
||||
assert 1e-10<abs(loads(dumps(1.1))-1.1)<1e-6
|
||||
assert 1e-10<abs(loads(dumps(1.1,32))-1.1)<1e-6
|
||||
assert abs(loads(dumps(1.1,64))-1.1)<1e-12
|
||||
assert loads(dumps(None)) is None
|
||||
assert loads(dumps({None: None})) == {None: None}
|
||||
assert 1e-10 < abs(loads(dumps(1.1)) - 1.1) < 1e-6
|
||||
assert 1e-10 < abs(loads(dumps(1.1, 32)) - 1.1) < 1e-6
|
||||
assert abs(loads(dumps(1.1, 64)) - 1.1) < 1e-12
|
||||
assert loads(dumps(u"Hello World!!"))
|
||||
|
||||
|
||||
try:
|
||||
import psyco
|
||||
|
||||
psyco.bind(dumps)
|
||||
psyco.bind(loads)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
test()
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# coding=utf-8
|
||||
import zlib
|
||||
import struct
|
||||
import socket
|
||||
|
@ -5,9 +6,9 @@ import ssl
|
|||
|
||||
from core.synchronousdeluge import rencode
|
||||
|
||||
|
||||
__all__ = ["DelugeTransfer"]
|
||||
|
||||
|
||||
class DelugeTransfer(object):
|
||||
def __init__(self):
|
||||
self.sock = None
|
||||
|
@ -53,5 +54,3 @@ class DelugeTransfer(object):
|
|||
buf = dobj.unused_data
|
||||
|
||||
yield message
|
||||
|
||||
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
# coding=utf-8
|
||||
__author__ = 'Justin'
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
# coding=utf-8
|
||||
|
||||
from six import iteritems
|
||||
import errno
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import urllib2
|
||||
import traceback
|
||||
import core
|
||||
import json
|
||||
import shutil
|
||||
|
@ -11,6 +12,7 @@ import re
|
|||
from core import logger
|
||||
from core.nzbToMediaUtil import makeDir
|
||||
|
||||
|
||||
def isVideoGood(videofile, status):
|
||||
fileNameExt = os.path.basename(videofile)
|
||||
fileName, fileExt = os.path.splitext(fileNameExt)
|
||||
|
@ -19,7 +21,7 @@ def isVideoGood(videofile, status):
|
|||
disable = True
|
||||
else:
|
||||
test_details, res = getVideoDetails(core.TEST_FILE)
|
||||
if res !=0 or test_details.get("error"):
|
||||
if res != 0 or test_details.get("error"):
|
||||
disable = True
|
||||
logger.info("DISABLED: ffprobe failed to analyse test file. Stopping corruption check.", 'TRANSCODER')
|
||||
if test_details.get("streams"):
|
||||
|
@ -27,41 +29,46 @@ def isVideoGood(videofile, status):
|
|||
audStreams = [item for item in test_details["streams"] if item["codec_type"] == "audio"]
|
||||
if not (len(vidStreams) > 0 and len(audStreams) > 0):
|
||||
disable = True
|
||||
logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.", 'TRANSCODER')
|
||||
logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.",
|
||||
'TRANSCODER')
|
||||
if disable:
|
||||
if status: # if the download was "failed", assume bad. If it was successful, assume good.
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
logger.info('Checking [%s] for corruption, please stand by ...' % (fileNameExt), 'TRANSCODER')
|
||||
logger.info('Checking [{0}] for corruption, please stand by ...'.format(fileNameExt), 'TRANSCODER')
|
||||
video_details, result = getVideoDetails(videofile)
|
||||
|
||||
if result != 0:
|
||||
logger.error("FAILED: [%s] is corrupted!" % (fileNameExt), 'TRANSCODER')
|
||||
logger.error("FAILED: [{0}] is corrupted!".format(fileNameExt), 'TRANSCODER')
|
||||
return False
|
||||
if video_details.get("error"):
|
||||
logger.info("FAILED: [%s] returned error [%s]." % (fileNameExt, str(video_details.get("error"))), 'TRANSCODER')
|
||||
logger.info("FAILED: [{0}] returned error [{1}].".format(fileNameExt, video_details.get("error")), 'TRANSCODER')
|
||||
return False
|
||||
if video_details.get("streams"):
|
||||
videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
|
||||
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
|
||||
if len(videoStreams) > 0 and len(audioStreams) > 0:
|
||||
logger.info("SUCCESS: [%s] has no corruption." % (fileNameExt), 'TRANSCODER')
|
||||
logger.info("SUCCESS: [{0}] has no corruption.".format(fileNameExt), 'TRANSCODER')
|
||||
return True
|
||||
else:
|
||||
logger.info("FAILED: [%s] has %s video streams and %s audio streams. Assume corruption." % (fileNameExt, str(len(videoStreams)), str(len(audioStreams))), 'TRANSCODER')
|
||||
logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. "
|
||||
"Assume corruption.".format
|
||||
(fileNameExt, len(videoStreams), len(audioStreams)), 'TRANSCODER')
|
||||
return False
|
||||
|
||||
|
||||
def zip_out(file, img, bitbucket):
|
||||
procin = None
|
||||
cmd = [core.SEVENZIP, '-so', 'e', img, file]
|
||||
try:
|
||||
procin = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
|
||||
except:
|
||||
logger.error("Extracting [%s] has failed" % (file), 'TRANSCODER')
|
||||
logger.error("Extracting [{0}] has failed".format(file), 'TRANSCODER')
|
||||
return procin
|
||||
|
||||
|
||||
def getVideoDetails(videofile, img=None, bitbucket=None):
|
||||
video_details = {}
|
||||
result = 1
|
||||
|
@ -75,7 +82,8 @@ def getVideoDetails(videofile, img=None, bitbucket=None):
|
|||
try:
|
||||
if img:
|
||||
videofile = '-'
|
||||
command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', '-show_error', videofile]
|
||||
command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', '-show_error',
|
||||
videofile]
|
||||
print_cmd(command)
|
||||
if img:
|
||||
procin = zip_out(file, img, bitbucket)
|
||||
|
@ -86,7 +94,8 @@ def getVideoDetails(videofile, img=None, bitbucket=None):
|
|||
out, err = proc.communicate()
|
||||
result = proc.returncode
|
||||
video_details = json.loads(out)
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
if not video_details:
|
||||
try:
|
||||
command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', videofile]
|
||||
|
@ -100,9 +109,10 @@ def getVideoDetails(videofile, img=None, bitbucket=None):
|
|||
result = proc.returncode
|
||||
video_details = json.loads(out)
|
||||
except:
|
||||
logger.error("Checking [%s] has failed" % (file), 'TRANSCODER')
|
||||
logger.error("Checking [{0}] has failed".format(file), 'TRANSCODER')
|
||||
return video_details, result
|
||||
|
||||
|
||||
def buildCommands(file, newDir, movieName, bitbucket):
|
||||
if isinstance(file, str):
|
||||
inputFile = file
|
||||
|
@ -115,13 +125,13 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
if check and core.CONCAT:
|
||||
name = movieName
|
||||
elif check:
|
||||
name = ('%s.cd%s' % (movieName, check.groups()[0]))
|
||||
name = ('{0}.cd{1}'.format(movieName, check.groups()[0]))
|
||||
elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name):
|
||||
name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name)
|
||||
if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself.
|
||||
core.VEXTENSION = '-transcoded' + core.VEXTENSION # adds '-transcoded.ext'
|
||||
if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself.
|
||||
core.VEXTENSION = '-transcoded' + core.VEXTENSION # adds '-transcoded.ext'
|
||||
else:
|
||||
img, data = file.iteritems().next()
|
||||
img, data = iteritems(file).next()
|
||||
name = data['name']
|
||||
video_details, result = getVideoDetails(data['files'][0], img, bitbucket)
|
||||
inputFile = '-'
|
||||
|
@ -133,12 +143,12 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
video_cmd = []
|
||||
audio_cmd = []
|
||||
audio_cmd2 = []
|
||||
audio_cmd3 = []
|
||||
sub_cmd = []
|
||||
meta_cmd = []
|
||||
other_cmd = []
|
||||
|
||||
if not video_details or not video_details.get("streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
|
||||
if not video_details or not video_details.get(
|
||||
"streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
|
||||
videoStreams = []
|
||||
audioStreams = []
|
||||
subStreams = []
|
||||
|
@ -165,12 +175,13 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
|
||||
if core.ACODEC:
|
||||
audio_cmd.extend(['-c:a', core.ACODEC])
|
||||
if core.ACODEC in ['aac', 'dts']: # Allow users to use the experimental AAC codec that's built into recent versions of ffmpeg
|
||||
if core.ACODEC in ['aac',
|
||||
'dts']: # Allow users to use the experimental AAC codec that's built into recent versions of ffmpeg
|
||||
audio_cmd.extend(['-strict', '-2'])
|
||||
else:
|
||||
audio_cmd.extend(['-c:a', 'copy'])
|
||||
if core.ACHANNELS:
|
||||
audio_cmd.extend(['-ac', str(core.ACHANNELS)])
|
||||
audio_cmd.extend(['-ac', str(core.ACHANNELS)])
|
||||
if core.ABITRATE:
|
||||
audio_cmd.extend(['-b:a', str(core.ABITRATE)])
|
||||
if core.OUTPUTQUALITYPERCENT:
|
||||
|
@ -182,7 +193,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
sub_cmd.extend(['-c:s', 'copy'])
|
||||
else: # http://en.wikibooks.org/wiki/FFMPEG_An_Intermediate_Guide/subtitle_options
|
||||
sub_cmd.extend(['-sn']) # Don't copy the subtitles over
|
||||
|
||||
|
||||
if core.OUTPUTFASTSTART:
|
||||
other_cmd.extend(['-movflags', '+faststart'])
|
||||
|
||||
|
@ -191,23 +202,16 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
|
||||
subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"]
|
||||
if core.VEXTENSION not in ['.mkv', '.mpegts']:
|
||||
subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"]
|
||||
subStreams = [item for item in video_details["streams"] if
|
||||
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
|
||||
"codec_name"] != "pgssub"]
|
||||
|
||||
for video in videoStreams:
|
||||
codec = video["codec_name"]
|
||||
try:
|
||||
fr = video["avg_frame_rate"]
|
||||
except: fr = 0
|
||||
try:
|
||||
width = video["width"]
|
||||
except: width = 0
|
||||
try:
|
||||
height = video["height"]
|
||||
except: height = 0
|
||||
fr = video.get("avg_frame_rate", 0)
|
||||
width = video.get("width", 0)
|
||||
height = video.get("height", 0)
|
||||
scale = core.VRESOLUTION
|
||||
try:
|
||||
framerate = float(fr.split('/')[0])/float(fr.split('/')[1])
|
||||
except: framerate = 0
|
||||
if codec in core.VCODEC_ALLOW or not core.VCODEC:
|
||||
video_cmd.extend(['-c:v', 'copy'])
|
||||
else:
|
||||
|
@ -215,16 +219,16 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
if core.VFRAMERATE and not (core.VFRAMERATE * 0.999 <= fr <= core.VFRAMERATE * 1.001):
|
||||
video_cmd.extend(['-r', str(core.VFRAMERATE)])
|
||||
if scale:
|
||||
w_scale = width/float(scale.split(':')[0])
|
||||
h_scale = height/float(scale.split(':')[1])
|
||||
if w_scale > h_scale: # widescreen, Scale by width only.
|
||||
scale = scale.split(':')[0] + ":" + str(int((height/w_scale)/2)*2)
|
||||
if w_scale > 1:
|
||||
video_cmd.extend(['-vf', 'scale=' + scale])
|
||||
w_scale = width / float(scale.split(':')[0])
|
||||
h_scale = height / float(scale.split(':')[1])
|
||||
if w_scale > h_scale: # widescreen, Scale by width only.
|
||||
scale = scale.split(':')[0] + ":" + str(int((height / w_scale) / 2) * 2)
|
||||
if w_scale > 1:
|
||||
video_cmd.extend(['-vf', 'scale=' + scale])
|
||||
else: # lower or mathcing ratio, scale by height only.
|
||||
scale = str(int((width/h_scale)/2)*2) + ":" + scale.split(':')[1]
|
||||
if h_scale > 1:
|
||||
video_cmd.extend(['-vf', 'scale=' + scale])
|
||||
scale = str(int((width / h_scale) / 2) * 2) + ":" + scale.split(':')[1]
|
||||
if h_scale > 1:
|
||||
video_cmd.extend(['-vf', 'scale=' + scale])
|
||||
if core.VBITRATE:
|
||||
video_cmd.extend(['-b:v', str(core.VBITRATE)])
|
||||
if core.VPRESET:
|
||||
|
@ -237,7 +241,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
if video_cmd[1] == 'copy' and any(i in video_cmd for i in no_copy):
|
||||
video_cmd[1] = core.VCODEC
|
||||
if core.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding.
|
||||
video_cmd = ['-c:v', 'copy']
|
||||
video_cmd = ['-c:v', 'copy']
|
||||
map_cmd.extend(['-map', '0:' + str(video["index"])])
|
||||
break # Only one video needed
|
||||
|
||||
|
@ -245,34 +249,26 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
a_mapped = []
|
||||
if audioStreams:
|
||||
try:
|
||||
audio1 = [ item for item in audioStreams if item["tags"]["language"] == core.ALANGUAGE ]
|
||||
audio1 = [item for item in audioStreams if item["tags"]["language"] == core.ALANGUAGE]
|
||||
except: # no language tags. Assume only 1 language.
|
||||
audio1 = audioStreams
|
||||
audio2 = [ item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW ]
|
||||
audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW]
|
||||
try:
|
||||
audio3 = [ item for item in audioStreams if item["tags"]["language"] != core.ALANGUAGE ]
|
||||
audio3 = [item for item in audioStreams if item["tags"]["language"] != core.ALANGUAGE]
|
||||
except:
|
||||
audio3 = []
|
||||
|
||||
if audio2: # right language and codec...
|
||||
map_cmd.extend(['-map', '0:' + str(audio2[0]["index"])])
|
||||
a_mapped.extend([audio2[0]["index"]])
|
||||
try:
|
||||
bitrate = int(audio2[0]["bit_rate"])/1000
|
||||
except: bitrate = 0
|
||||
try:
|
||||
channels = int(audio2[0]["channels"])
|
||||
except: channels = 0
|
||||
bitrate = int(audio2[0].get("bit_rate", 0)) / 1000
|
||||
channels = int(audio2[0].get("channels", 0))
|
||||
audio_cmd.extend(['-c:a:' + str(used_audio), 'copy'])
|
||||
elif audio1: # right language wrong codec.
|
||||
map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])])
|
||||
a_mapped.extend([audio1[0]["index"]])
|
||||
try:
|
||||
bitrate = int(audio1[0]["bit_rate"])/1000
|
||||
except: bitrate = 0
|
||||
try:
|
||||
channels = int(audio1[0]["channels"])
|
||||
except: channels = 0
|
||||
bitrate = int(audio1[0].get("bit_rate", 0)) / 1000
|
||||
channels = int(audio1[0].get("channels", 0))
|
||||
if core.ACODEC:
|
||||
audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC])
|
||||
else:
|
||||
|
@ -280,12 +276,8 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
elif audio3: # just pick the default audio track
|
||||
map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])])
|
||||
a_mapped.extend([audio3[0]["index"]])
|
||||
try:
|
||||
bitrate = int(audio3[0]["bit_rate"])/1000
|
||||
except: bitrate = 0
|
||||
try:
|
||||
channels = int(audio3[0]["channels"])
|
||||
except: channels = 0
|
||||
bitrate = int(audio3[0].get("bit_rate", 0)) / 1000
|
||||
channels = int(audio3[0].get("channels", 0))
|
||||
if core.ACODEC:
|
||||
audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC])
|
||||
else:
|
||||
|
@ -308,26 +300,18 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
|
||||
if core.ACODEC2_ALLOW:
|
||||
used_audio += 1
|
||||
audio4 = [ item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW ]
|
||||
audio4 = [item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW]
|
||||
if audio4: # right language and codec.
|
||||
map_cmd.extend(['-map', '0:' + str(audio4[0]["index"])])
|
||||
a_mapped.extend([audio4[0]["index"]])
|
||||
try:
|
||||
bitrate = int(audio4[0]["bit_rate"])/1000
|
||||
except: bitrate = 0
|
||||
try:
|
||||
channels = int(audio4[0]["channels"])
|
||||
except: channels = 0
|
||||
bitrate = int(audio4[0].get("bit_rate", 0)) / 1000
|
||||
channels = int(audio4[0].get("channels", 0))
|
||||
audio_cmd2.extend(['-c:a:' + str(used_audio), 'copy'])
|
||||
elif audio1: # right language wrong codec.
|
||||
map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])])
|
||||
a_mapped.extend([audio1[0]["index"]])
|
||||
try:
|
||||
bitrate = int(audio1[0]["bit_rate"])/1000
|
||||
except: bitrate = 0
|
||||
try:
|
||||
channels = int(audio1[0]["channels"])
|
||||
except: channels = 0
|
||||
bitrate = int(audio1[0].get("bit_rate", 0)) / 1000
|
||||
channels = int(audio1[0].get("channels", 0))
|
||||
if core.ACODEC2:
|
||||
audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2])
|
||||
else:
|
||||
|
@ -335,12 +319,8 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
elif audio3: # just pick the default audio track
|
||||
map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])])
|
||||
a_mapped.extend([audio3[0]["index"]])
|
||||
try:
|
||||
bitrate = int(audio3[0]["bit_rate"])/1000
|
||||
except: bitrate = 0
|
||||
try:
|
||||
channels = int(audio3[0]["channels"])
|
||||
except: channels = 0
|
||||
bitrate = int(audio3[0].get("bit_rate", 0)) / 1000
|
||||
channels = int(audio3[0].get("channels", 0))
|
||||
if core.ACODEC2:
|
||||
audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2])
|
||||
else:
|
||||
|
@ -369,12 +349,8 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
used_audio += 1
|
||||
map_cmd.extend(['-map', '0:' + str(audio["index"])])
|
||||
audio_cmd3 = []
|
||||
try:
|
||||
bitrate = int(audio["bit_rate"])/1000
|
||||
except: bitrate = 0
|
||||
try:
|
||||
channels = int(audio["channels"])
|
||||
except: channels = 0
|
||||
bitrate = int(audio.get("bit_rate", 0)) / 1000
|
||||
channels = int(audio.get("channels", 0))
|
||||
if audio["codec_name"] in core.ACODEC3_ALLOW:
|
||||
audio_cmd3.extend(['-c:a:' + str(used_audio), 'copy'])
|
||||
else:
|
||||
|
@ -400,13 +376,13 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
audio_cmd.extend(audio_cmd3)
|
||||
|
||||
s_mapped = []
|
||||
subs1 = []
|
||||
burnt = 0
|
||||
n = 0
|
||||
for lan in core.SLANGUAGES:
|
||||
try:
|
||||
subs1 = [ item for item in subStreams if item["tags"]["language"] == lan ]
|
||||
except: subs1 = []
|
||||
subs1 = [item for item in subStreams if item["tags"]["language"] == lan]
|
||||
except:
|
||||
subs1 = []
|
||||
if core.BURN and not subs1 and not burnt and os.path.isfile(file):
|
||||
for subfile in get_subs(file):
|
||||
if lan in os.path.split(subfile)[1]:
|
||||
|
@ -425,7 +401,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
break
|
||||
map_cmd.extend(['-map', '0:' + str(sub["index"])])
|
||||
s_mapped.extend([sub["index"]])
|
||||
|
||||
|
||||
if core.SINCLUDE:
|
||||
for sub in subStreams:
|
||||
if not core.ALLOWSUBS:
|
||||
|
@ -433,7 +409,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
if sub["index"] in s_mapped:
|
||||
continue
|
||||
map_cmd.extend(['-map', '0:' + str(sub["index"])])
|
||||
s_mapped.extend([sub["index"]])
|
||||
s_mapped.extend([sub["index"]])
|
||||
|
||||
if core.OUTPUTFASTSTART:
|
||||
other_cmd.extend(['-movflags', '+faststart'])
|
||||
|
@ -445,7 +421,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
if core.GENERALOPTS:
|
||||
command.extend(core.GENERALOPTS)
|
||||
|
||||
command.extend([ '-i', inputFile])
|
||||
command.extend(['-i', inputFile])
|
||||
|
||||
if core.SEMBED and os.path.isfile(file):
|
||||
for subfile in get_subs(file):
|
||||
|
@ -460,7 +436,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
|
||||
if not core.ALLOWSUBS or (not s_mapped and not n):
|
||||
sub_cmd.extend(['-sn'])
|
||||
else:
|
||||
else:
|
||||
if core.SCODEC:
|
||||
sub_cmd.extend(['-c:s', core.SCODEC])
|
||||
else:
|
||||
|
@ -477,6 +453,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
command = core.NICENESS + command
|
||||
return command
|
||||
|
||||
|
||||
def get_subs(file):
|
||||
filepaths = []
|
||||
subExt = ['.srt', '.sub', '.idx']
|
||||
|
@ -485,9 +462,10 @@ def get_subs(file):
|
|||
for dirname, dirs, filenames in os.walk(dir):
|
||||
for filename in filenames:
|
||||
filepaths.extend([os.path.join(dirname, filename)])
|
||||
subfiles = [ item for item in filepaths if os.path.splitext(item)[1] in subExt and name in item ]
|
||||
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in subExt and name in item]
|
||||
return subfiles
|
||||
|
||||
|
||||
def extract_subs(file, newfilePath, bitbucket):
|
||||
video_details, result = getVideoDetails(file)
|
||||
if not video_details:
|
||||
|
@ -500,34 +478,36 @@ def extract_subs(file, newfilePath, bitbucket):
|
|||
name = os.path.splitext(os.path.split(newfilePath)[1])[0]
|
||||
|
||||
try:
|
||||
subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item["codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"]
|
||||
subStreams = [item for item in video_details["streams"] if
|
||||
item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[
|
||||
"codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"]
|
||||
except:
|
||||
subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"]
|
||||
subStreams = [item for item in video_details["streams"] if
|
||||
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
|
||||
"codec_name"] != "pgssub"]
|
||||
num = len(subStreams)
|
||||
for n in range(num):
|
||||
sub = subStreams[n]
|
||||
idx = sub["index"]
|
||||
try:
|
||||
lan = sub["tags"]["language"]
|
||||
except:
|
||||
lan = "unk"
|
||||
lan = sub.geet("tags", {}).get("language", "unk")
|
||||
|
||||
if num == 1:
|
||||
outputFile = os.path.join(subdir, "%s.srt" %(name))
|
||||
if os.path.isfile(outputFile):
|
||||
outputFile = os.path.join(subdir, "%s.%s.srt" %(name, n))
|
||||
outputFile = os.path.join(subdir, "{0}.srt".format(name))
|
||||
if os.path.isfile(outputFile):
|
||||
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, n))
|
||||
else:
|
||||
outputFile = os.path.join(subdir, "%s.%s.srt" %(name, lan))
|
||||
if os.path.isfile(outputFile):
|
||||
outputFile = os.path.join(subdir, "%s.%s.%s.srt" %(name, lan, n))
|
||||
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, lan))
|
||||
if os.path.isfile(outputFile):
|
||||
outputFile = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n))
|
||||
|
||||
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', '-codec:' + str(idx), 'srt', outputFile]
|
||||
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', '-codec:' + str(idx), 'srt',
|
||||
outputFile]
|
||||
if platform.system() != 'Windows':
|
||||
command = core.NICENESS + command
|
||||
|
||||
logger.info("Extracting %s subtitle from: %s" % (lan, file))
|
||||
logger.info("Extracting {0} subtitle from: {1}".format(lan, file))
|
||||
print_cmd(command)
|
||||
result = 1 # set result to failed in case call fails.
|
||||
result = 1 # set result to failed in case call fails.
|
||||
try:
|
||||
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket)
|
||||
proc.communicate()
|
||||
|
@ -538,30 +518,30 @@ def extract_subs(file, newfilePath, bitbucket):
|
|||
if result == 0:
|
||||
try:
|
||||
shutil.copymode(file, outputFile)
|
||||
except: pass
|
||||
logger.info("Extracting %s subtitle from %s has succeeded" % (lan, file))
|
||||
except:
|
||||
pass
|
||||
logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file))
|
||||
else:
|
||||
logger.error("Extracting subtitles has failed")
|
||||
|
||||
|
||||
def processList(List, newDir, bitbucket):
|
||||
remList = []
|
||||
newList = []
|
||||
delList = []
|
||||
combine = []
|
||||
vtsPath = None
|
||||
success = True
|
||||
for item in List:
|
||||
newfile = None
|
||||
ext = os.path.splitext(item)[1].lower()
|
||||
if ext in ['.iso', '.bin', '.img'] and not ext in core.IGNOREEXTENSIONS:
|
||||
logger.debug("Attempting to rip disk image: %s" % (item), "TRANSCODER")
|
||||
if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS:
|
||||
logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER")
|
||||
newList.extend(ripISO(item, newDir, bitbucket))
|
||||
remList.append(item)
|
||||
elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and not '.vob' in core.IGNOREEXTENSIONS:
|
||||
logger.debug("Found VIDEO_TS image file: %s" % (item), "TRANSCODER")
|
||||
elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS:
|
||||
logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER")
|
||||
if not vtsPath:
|
||||
try:
|
||||
vtsPath = re.match("(.+VIDEO_TS)",item).groups()[0]
|
||||
vtsPath = re.match("(.+VIDEO_TS)", item).groups()[0]
|
||||
except:
|
||||
vtsPath = os.path.split(item)[0]
|
||||
remList.append(item)
|
||||
|
@ -570,48 +550,50 @@ def processList(List, newDir, bitbucket):
|
|||
elif core.CONCAT and re.match(".+[cC][dD][0-9].", item):
|
||||
remList.append(item)
|
||||
combine.append(item)
|
||||
else: continue
|
||||
else:
|
||||
continue
|
||||
if vtsPath:
|
||||
newList.extend(combineVTS(vtsPath))
|
||||
if combine:
|
||||
newList.extend(combineCD(combine))
|
||||
for file in newList:
|
||||
if isinstance(file, str) and not 'concat:' in file and not os.path.isfile(file):
|
||||
if isinstance(file, str) and 'concat:' not in file and not os.path.isfile(file):
|
||||
success = False
|
||||
break
|
||||
if success and newList:
|
||||
List.extend(newList)
|
||||
for item in remList:
|
||||
List.remove(item)
|
||||
logger.debug("Successfully extracted .vob file %s from disk image" % (newList[0]), "TRANSCODER")
|
||||
logger.debug("Successfully extracted .vob file {0} from disk image".format(newList[0]), "TRANSCODER")
|
||||
elif newList and not success:
|
||||
newList = []
|
||||
remList = []
|
||||
logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER")
|
||||
return List, remList, newList, success
|
||||
return List, remList, newList, success
|
||||
|
||||
|
||||
def ripISO(item, newDir, bitbucket):
|
||||
newFiles = []
|
||||
failure_dir = 'failure'
|
||||
# Mount the ISO in your OS and call combineVTS.
|
||||
if not core.SEVENZIP:
|
||||
logger.error("No 7zip installed. Can't extract image file %s" % (item), "TRANSCODER")
|
||||
logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER")
|
||||
newFiles = [failure_dir]
|
||||
return newFiles
|
||||
cmd = [core.SEVENZIP, 'l', item]
|
||||
try:
|
||||
logger.debug("Attempting to extract .vob from image file %s" % (item), "TRANSCODER")
|
||||
logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER")
|
||||
print_cmd(cmd)
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
|
||||
out, err = proc.communicate()
|
||||
result = proc.returncode
|
||||
fileList = [ re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line) ]
|
||||
fileList = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in
|
||||
out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)]
|
||||
combined = []
|
||||
for n in range(99):
|
||||
concat = []
|
||||
m = 1
|
||||
while True:
|
||||
vtsName = 'VIDEO_TS%sVTS_%02d_%d.VOB' % (os.sep, n+1, m)
|
||||
vtsName = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m)
|
||||
if vtsName in fileList:
|
||||
concat.append(vtsName)
|
||||
m += 1
|
||||
|
@ -622,19 +604,20 @@ def ripISO(item, newDir, bitbucket):
|
|||
if core.CONCAT:
|
||||
combined.extend(concat)
|
||||
continue
|
||||
name = '%s.cd%s' % (os.path.splitext(os.path.split(item)[1])[0] ,str(n+1))
|
||||
newFiles.append({item: {'name': name , 'files': concat}})
|
||||
name = '{0}.cd{1}'.format(os.path.splitext(os.path.split(item)[1])[0], str(n + 1))
|
||||
newFiles.append({item: {'name': name, 'files': concat}})
|
||||
if core.CONCAT:
|
||||
name = os.path.splitext(os.path.split(item)[1])[0]
|
||||
newFiles.append({item: {'name': name , 'files': combined}})
|
||||
newFiles.append({item: {'name': name, 'files': combined}})
|
||||
if not newFiles:
|
||||
logger.error("No VIDEO_TS folder found in image file %s" % (item), "TRANSCODER")
|
||||
logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER")
|
||||
newFiles = [failure_dir]
|
||||
except:
|
||||
logger.error("Failed to extract from image file %s" % (item), "TRANSCODER")
|
||||
logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER")
|
||||
newFiles = [failure_dir]
|
||||
return newFiles
|
||||
|
||||
|
||||
def combineVTS(vtsPath):
|
||||
newFiles = []
|
||||
combined = ''
|
||||
|
@ -642,7 +625,7 @@ def combineVTS(vtsPath):
|
|||
concat = ''
|
||||
m = 1
|
||||
while True:
|
||||
vtsName = 'VTS_%02d_%d.VOB' % (n+1, m)
|
||||
vtsName = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
|
||||
if os.path.isfile(os.path.join(vtsPath, vtsName)):
|
||||
concat = concat + os.path.join(vtsPath, vtsName) + '|'
|
||||
m += 1
|
||||
|
@ -653,36 +636,40 @@ def combineVTS(vtsPath):
|
|||
if core.CONCAT:
|
||||
combined = combined + concat + '|'
|
||||
continue
|
||||
newFiles.append('concat:%s' % concat[:-1])
|
||||
newFiles.append('concat:{0}'.format(concat[:-1]))
|
||||
if core.CONCAT:
|
||||
newFiles.append('concat:%s' % combined[:-1])
|
||||
newFiles.append('concat:{0}'.format(combined[:-1]))
|
||||
return newFiles
|
||||
|
||||
|
||||
def combineCD(combine):
|
||||
newFiles = []
|
||||
for item in set([ re.match("(.+)[cC][dD][0-9].",item).groups()[0] for item in combine ]):
|
||||
for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]):
|
||||
concat = ''
|
||||
for n in range(99):
|
||||
files = [ file for file in combine if n+1 == int(re.match(".+[cC][dD]([0-9]+).",file).groups()[0]) and item in file ]
|
||||
files = [file for file in combine if
|
||||
n + 1 == int(re.match(".+[cC][dD]([0-9]+).", file).groups()[0]) and item in file]
|
||||
if files:
|
||||
concat = concat + files[0] + '|'
|
||||
else:
|
||||
break
|
||||
if concat:
|
||||
newFiles.append('concat:%s' % concat[:-1])
|
||||
newFiles.append('concat:{0}'.format(concat[:-1]))
|
||||
return newFiles
|
||||
|
||||
|
||||
def print_cmd(command):
|
||||
cmd = ""
|
||||
for item in command:
|
||||
cmd = cmd + " " + str(item)
|
||||
logger.debug("calling command:%s" % (cmd))
|
||||
logger.debug("calling command:{0}".format(cmd))
|
||||
|
||||
|
||||
def Transcode_directory(dirName):
|
||||
if not core.FFMPEG:
|
||||
return 1, dirName
|
||||
logger.info("Checking for files to be transcoded")
|
||||
final_result = 0 # initialize as successful
|
||||
final_result = 0 # initialize as successful
|
||||
if core.OUTPUTVIDEOPATH:
|
||||
newDir = core.OUTPUTVIDEOPATH
|
||||
makeDir(newDir)
|
||||
|
@ -712,22 +699,22 @@ def Transcode_directory(dirName):
|
|||
if core.SEXTRACT and isinstance(file, str):
|
||||
extract_subs(file, newfilePath, bitbucket)
|
||||
|
||||
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
|
||||
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
|
||||
os.remove(newfilePath)
|
||||
except OSError, e:
|
||||
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
|
||||
logger.debug("Error when removing transcoding target: %s" % (e))
|
||||
except Exception, e:
|
||||
logger.debug("Error when removing transcoding target: %s" % (e))
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
|
||||
logger.debug("Error when removing transcoding target: {0}".format(e))
|
||||
except Exception as e:
|
||||
logger.debug("Error when removing transcoding target: {0}".format(e))
|
||||
|
||||
logger.info("Transcoding video: %s" % (newfilePath))
|
||||
logger.info("Transcoding video: {0}".format(newfilePath))
|
||||
print_cmd(command)
|
||||
result = 1 # set result to failed in case call fails.
|
||||
result = 1 # set result to failed in case call fails.
|
||||
try:
|
||||
if isinstance(file, str):
|
||||
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket)
|
||||
else:
|
||||
img, data = file.iteritems().next()
|
||||
img, data = iteritems(file).next()
|
||||
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket, stdin=subprocess.PIPE)
|
||||
for vob in data['files']:
|
||||
procin = zip_out(vob, img, bitbucket)
|
||||
|
@ -737,7 +724,7 @@ def Transcode_directory(dirName):
|
|||
proc.communicate()
|
||||
result = proc.returncode
|
||||
except:
|
||||
logger.error("Transcoding of video %s has failed" % (newfilePath))
|
||||
logger.error("Transcoding of video {0} has failed".format(newfilePath))
|
||||
|
||||
if core.SUBSDIR and result == 0 and isinstance(file, str):
|
||||
for sub in get_subs(file):
|
||||
|
@ -751,22 +738,25 @@ def Transcode_directory(dirName):
|
|||
if result == 0:
|
||||
try:
|
||||
shutil.copymode(file, newfilePath)
|
||||
except: pass
|
||||
logger.info("Transcoding of video to %s succeeded" % (newfilePath))
|
||||
except:
|
||||
pass
|
||||
logger.info("Transcoding of video to {0} succeeded".format(newfilePath))
|
||||
if os.path.isfile(newfilePath) and (file in newList or not core.DUPLICATE):
|
||||
try:
|
||||
os.unlink(file)
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
logger.error("Transcoding of video to %s failed with result %s" % (newfilePath, str(result)))
|
||||
logger.error("Transcoding of video to {0} failed with result {1}".format(newfilePath, result))
|
||||
# this will be 0 (successful) it all are successful, else will return a positive integer for failure.
|
||||
final_result = final_result + result
|
||||
if final_result == 0 and not core.DUPLICATE:
|
||||
for file in remList:
|
||||
try:
|
||||
os.unlink(file)
|
||||
except: pass
|
||||
if not os.listdir(newDir): #this is an empty directory and we didn't transcode into it.
|
||||
except:
|
||||
pass
|
||||
if not os.listdir(newDir): # this is an empty directory and we didn't transcode into it.
|
||||
os.rmdir(newDir)
|
||||
newDir = dirName
|
||||
if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB
|
||||
|
|
|
@ -10,9 +10,9 @@ from core.transmissionrpc.session import Session
|
|||
from core.transmissionrpc.client import Client
|
||||
from core.transmissionrpc.utils import add_stdout_logger, add_file_logger
|
||||
|
||||
__author__ = 'Erik Svensson <erik.public@gmail.com>'
|
||||
__version_major__ = 0
|
||||
__version_minor__ = 11
|
||||
__version__ = '{0}.{1}'.format(__version_major__, __version_minor__)
|
||||
__copyright__ = 'Copyright (c) 2008-2013 Erik Svensson'
|
||||
__license__ = 'MIT'
|
||||
__author__ = 'Erik Svensson <erik.public@gmail.com>'
|
||||
__version_major__ = 0
|
||||
__version_minor__ = 11
|
||||
__version__ = '{0}.{1}'.format(__version_major__, __version_minor__)
|
||||
__copyright__ = 'Copyright (c) 2008-2013 Erik Svensson'
|
||||
__license__ = 'MIT'
|
||||
|
|
|
@ -18,13 +18,9 @@ from core.transmissionrpc.torrent import Torrent
|
|||
from core.transmissionrpc.session import Session
|
||||
from six import PY3, integer_types, string_types, iteritems
|
||||
|
||||
from six.moves.urllib_parse import urlparse
|
||||
from six.moves.urllib_request import urlopen
|
||||
|
||||
if PY3:
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
else:
|
||||
from urlparse import urlparse
|
||||
from urllib2 import urlopen
|
||||
|
||||
def debug_httperror(error):
|
||||
"""
|
||||
|
@ -49,6 +45,7 @@ def debug_httperror(error):
|
|||
)
|
||||
)
|
||||
|
||||
|
||||
def parse_torrent_id(arg):
|
||||
"""Parse an torrent id or torrent hashString."""
|
||||
torrent_id = None
|
||||
|
@ -62,7 +59,7 @@ def parse_torrent_id(arg):
|
|||
elif isinstance(arg, string_types):
|
||||
try:
|
||||
torrent_id = int(arg)
|
||||
if torrent_id >= 2**31:
|
||||
if torrent_id >= 2 ** 31:
|
||||
torrent_id = None
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
@ -75,6 +72,7 @@ def parse_torrent_id(arg):
|
|||
pass
|
||||
return torrent_id
|
||||
|
||||
|
||||
def parse_torrent_ids(args):
|
||||
"""
|
||||
Take things and make them valid torrent identifiers
|
||||
|
@ -102,19 +100,20 @@ def parse_torrent_ids(args):
|
|||
except ValueError:
|
||||
pass
|
||||
if not addition:
|
||||
raise ValueError('Invalid torrent id, \"%s\"' % item)
|
||||
raise ValueError('Invalid torrent id, {item!r}'.format(item=item))
|
||||
ids.extend(addition)
|
||||
elif isinstance(args, (list, tuple)):
|
||||
for item in args:
|
||||
ids.extend(parse_torrent_ids(item))
|
||||
else:
|
||||
torrent_id = parse_torrent_id(args)
|
||||
if torrent_id == None:
|
||||
if torrent_id is None:
|
||||
raise ValueError('Invalid torrent id')
|
||||
else:
|
||||
ids = [torrent_id]
|
||||
return ids
|
||||
|
||||
|
||||
"""
|
||||
Torrent ids
|
||||
|
||||
|
@ -129,12 +128,14 @@ possible to provide a argument called ``timeout``. Timeout is only effective
|
|||
when using Python 2.6 or later and the default timeout is 30 seconds.
|
||||
"""
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""
|
||||
Client is the class handling the Transmission JSON-RPC client protocol.
|
||||
"""
|
||||
|
||||
def __init__(self, address='localhost', port=DEFAULT_PORT, user=None, password=None, http_handler=None, timeout=None):
|
||||
def __init__(self, address='localhost', port=DEFAULT_PORT, user=None, password=None, http_handler=None,
|
||||
timeout=None):
|
||||
if isinstance(timeout, (integer_types, float)):
|
||||
self._query_timeout = float(timeout)
|
||||
else:
|
||||
|
@ -204,7 +205,8 @@ class Client(object):
|
|||
if timeout is None:
|
||||
timeout = self._query_timeout
|
||||
while True:
|
||||
LOGGER.debug(json.dumps({'url': self.url, 'headers': headers, 'query': query, 'timeout': timeout}, indent=2))
|
||||
LOGGER.debug(
|
||||
json.dumps({'url': self.url, 'headers': headers, 'query': query, 'timeout': timeout}, indent=2))
|
||||
try:
|
||||
result = self.http_handler.request(self.url, query, headers, timeout)
|
||||
break
|
||||
|
@ -244,26 +246,25 @@ class Client(object):
|
|||
elif require_ids:
|
||||
raise ValueError('request require ids')
|
||||
|
||||
query = json.dumps({'tag': self._sequence, 'method': method
|
||||
, 'arguments': arguments})
|
||||
query = json.dumps({'tag': self._sequence, 'method': method, 'arguments': arguments})
|
||||
self._sequence += 1
|
||||
start = time.time()
|
||||
http_data = self._http_query(query, timeout)
|
||||
elapsed = time.time() - start
|
||||
LOGGER.info('http request took %.3f s' % (elapsed))
|
||||
LOGGER.info('http request took {time:.3f} s'.format(time=elapsed))
|
||||
|
||||
try:
|
||||
data = json.loads(http_data)
|
||||
except ValueError as error:
|
||||
LOGGER.error('Error: ' + str(error))
|
||||
LOGGER.error('Request: \"%s\"' % (query))
|
||||
LOGGER.error('HTTP data: \"%s\"' % (http_data))
|
||||
LOGGER.error('Request: {request!r}'.format(request=query))
|
||||
LOGGER.error('HTTP data: {data!r}'.format(data=http_data))
|
||||
raise
|
||||
|
||||
LOGGER.debug(json.dumps(data, indent=2))
|
||||
if 'result' in data:
|
||||
if data['result'] != 'success':
|
||||
raise TransmissionError('Query failed with result \"%s\".' % (data['result']))
|
||||
raise TransmissionError('Query failed with result {result!r}.'.format(result=data['result']))
|
||||
else:
|
||||
raise TransmissionError('Query failed without result.')
|
||||
|
||||
|
@ -347,8 +348,9 @@ class Client(object):
|
|||
Add a warning to the log if the Transmission RPC version is lower then the provided version.
|
||||
"""
|
||||
if self.rpc_version < version:
|
||||
LOGGER.warning('Using feature not supported by server. RPC version for server %d, feature introduced in %d.'
|
||||
% (self.rpc_version, version))
|
||||
LOGGER.warning('Using feature not supported by server. '
|
||||
'RPC version for server {x}, feature introduced in {y}.'.format
|
||||
(x=self.rpc_version, y=version))
|
||||
|
||||
def add_torrent(self, torrent, timeout=None, **kwargs):
|
||||
"""
|
||||
|
@ -408,11 +410,8 @@ class Client(object):
|
|||
pass
|
||||
if might_be_base64:
|
||||
torrent_data = torrent
|
||||
args = {}
|
||||
if torrent_data:
|
||||
args = {'metainfo': torrent_data}
|
||||
else:
|
||||
args = {'filename': torrent}
|
||||
|
||||
args = {'metainfo': torrent_data} if torrent_data else {'filename': torrent}
|
||||
for key, value in iteritems(kwargs):
|
||||
argument = make_rpc_name(key)
|
||||
(arg, val) = argument_value_convert('torrent-add', argument, value, self.rpc_version)
|
||||
|
@ -476,7 +475,7 @@ class Client(object):
|
|||
"""
|
||||
self._rpc_version_warning(3)
|
||||
self._request('torrent-remove',
|
||||
{'delete-local-data':rpc_bool(delete_data)}, ids, True, timeout=timeout)
|
||||
{'delete-local-data': rpc_bool(delete_data)}, ids, True, timeout=timeout)
|
||||
|
||||
def remove(self, ids, delete_data=False, timeout=None):
|
||||
"""
|
||||
|
@ -606,34 +605,34 @@ class Client(object):
|
|||
the new methods. list returns a dictionary indexed by torrent id.
|
||||
"""
|
||||
warnings.warn('list has been deprecated, please use get_torrent or get_torrents instead.', DeprecationWarning)
|
||||
fields = ['id', 'hashString', 'name', 'sizeWhenDone', 'leftUntilDone'
|
||||
, 'eta', 'status', 'rateUpload', 'rateDownload', 'uploadedEver'
|
||||
, 'downloadedEver', 'uploadRatio', 'queuePosition']
|
||||
fields = ['id', 'hashString', 'name', 'sizeWhenDone', 'leftUntilDone',
|
||||
'eta', 'status', 'rateUpload', 'rateDownload', 'uploadedEver',
|
||||
'downloadedEver', 'uploadRatio', 'queuePosition']
|
||||
return self._request('torrent-get', {'fields': fields}, timeout=timeout)
|
||||
|
||||
def get_files(self, ids=None, timeout=None):
|
||||
"""
|
||||
Get list of files for provided torrent id(s). If ids is empty,
|
||||
information for all torrents are fetched. This function returns a dictionary
|
||||
for each requested torrent id holding the information about the files.
|
||||
Get list of files for provided torrent id(s). If ids is empty,
|
||||
information for all torrents are fetched. This function returns a dictionary
|
||||
for each requested torrent id holding the information about the files.
|
||||
|
||||
::
|
||||
::
|
||||
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'name': <file name>,
|
||||
'size': <file size in bytes>,
|
||||
'completed': <bytes completed>,
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'name': <file name>,
|
||||
'size': <file size in bytes>,
|
||||
'completed': <bytes completed>,
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
"""
|
||||
fields = ['id', 'name', 'hashString', 'files', 'priorities', 'wanted']
|
||||
request_result = self._request('torrent-get', {'fields': fields}, ids, timeout=timeout)
|
||||
|
@ -645,22 +644,22 @@ class Client(object):
|
|||
def set_files(self, items, timeout=None):
|
||||
"""
|
||||
Set file properties. Takes a dictionary with similar contents as the result
|
||||
of `get_files`.
|
||||
of `get_files`.
|
||||
|
||||
::
|
||||
::
|
||||
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
{
|
||||
<torrent id>: {
|
||||
<file id>: {
|
||||
'priority': <priority ('high'|'normal'|'low')>,
|
||||
'selected': <selected for download (True|False)>
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
"""
|
||||
if not isinstance(items, dict):
|
||||
raise ValueError('Invalid file description')
|
||||
|
@ -703,8 +702,8 @@ class Client(object):
|
|||
|
||||
def change_torrent(self, ids, timeout=None, **kwargs):
|
||||
"""
|
||||
Change torrent parameters for the torrent(s) with the supplied id's. The
|
||||
parameters are:
|
||||
Change torrent parameters for the torrent(s) with the supplied id's. The
|
||||
parameters are:
|
||||
|
||||
============================ ===== =============== =======================================================================================
|
||||
Argument RPC Replaced by Description
|
||||
|
@ -736,13 +735,13 @@ class Client(object):
|
|||
``uploadLimited`` 5 - Enable upload speed limiter.
|
||||
============================ ===== =============== =======================================================================================
|
||||
|
||||
.. NOTE::
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
.. NOTE::
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
"""
|
||||
args = {}
|
||||
for key, value in iteritems(kwargs):
|
||||
argument = make_rpc_name(key)
|
||||
(arg, val) = argument_value_convert('torrent-set' , argument, value, self.rpc_version)
|
||||
(arg, val) = argument_value_convert('torrent-set', argument, value, self.rpc_version)
|
||||
args[arg] = val
|
||||
|
||||
if len(args) > 0:
|
||||
|
@ -803,7 +802,7 @@ class Client(object):
|
|||
raise ValueError("Target name cannot contain a path delimiter")
|
||||
args = {'path': location, 'name': name}
|
||||
result = self._request('torrent-rename-path', args, torrent_id, True, timeout=timeout)
|
||||
return (result['path'], result['name'])
|
||||
return result['path'], result['name']
|
||||
|
||||
def queue_top(self, ids, timeout=None):
|
||||
"""Move transfer to the top of the queue."""
|
||||
|
@ -814,7 +813,7 @@ class Client(object):
|
|||
"""Move transfer to the bottom of the queue."""
|
||||
self._rpc_version_warning(14)
|
||||
self._request('queue-move-bottom', ids=ids, require_ids=True, timeout=timeout)
|
||||
|
||||
|
||||
def queue_up(self, ids, timeout=None):
|
||||
"""Move transfer up in the queue."""
|
||||
self._rpc_version_warning(14)
|
||||
|
@ -888,14 +887,14 @@ class Client(object):
|
|||
================================ ===== ================= ==========================================================================================================================
|
||||
|
||||
.. NOTE::
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
transmissionrpc will try to automatically fix argument errors.
|
||||
"""
|
||||
args = {}
|
||||
for key, value in iteritems(kwargs):
|
||||
if key == 'encryption' and value not in ['required', 'preferred', 'tolerated']:
|
||||
raise ValueError('Invalid encryption value')
|
||||
argument = make_rpc_name(key)
|
||||
(arg, val) = argument_value_convert('session-set' , argument, value, self.rpc_version)
|
||||
(arg, val) = argument_value_convert('session-set', argument, value, self.rpc_version)
|
||||
args[arg] = val
|
||||
if len(args) > 0:
|
||||
self._request('session-set', args, timeout=timeout)
|
||||
|
|
|
@ -6,10 +6,10 @@ import logging
|
|||
|
||||
from core.transmissionrpc.six import iteritems
|
||||
|
||||
|
||||
LOGGER = logging.getLogger('transmissionrpc')
|
||||
LOGGER.setLevel(logging.ERROR)
|
||||
|
||||
|
||||
def mirror_dict(source):
|
||||
"""
|
||||
Creates a dictionary with all values as keys and all keys as values.
|
||||
|
@ -17,38 +17,39 @@ def mirror_dict(source):
|
|||
source.update(dict((value, key) for key, value in iteritems(source)))
|
||||
return source
|
||||
|
||||
|
||||
DEFAULT_PORT = 9091
|
||||
|
||||
DEFAULT_TIMEOUT = 30.0
|
||||
|
||||
TR_PRI_LOW = -1
|
||||
TR_PRI_NORMAL = 0
|
||||
TR_PRI_HIGH = 1
|
||||
TR_PRI_LOW = -1
|
||||
TR_PRI_NORMAL = 0
|
||||
TR_PRI_HIGH = 1
|
||||
|
||||
PRIORITY = mirror_dict({
|
||||
'low' : TR_PRI_LOW,
|
||||
'normal' : TR_PRI_NORMAL,
|
||||
'high' : TR_PRI_HIGH
|
||||
'low': TR_PRI_LOW,
|
||||
'normal': TR_PRI_NORMAL,
|
||||
'high': TR_PRI_HIGH
|
||||
})
|
||||
|
||||
TR_RATIOLIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_RATIOLIMIT_SINGLE = 1 # override the global settings, seeding until a certain ratio
|
||||
TR_RATIOLIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of ratio
|
||||
TR_RATIOLIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_RATIOLIMIT_SINGLE = 1 # override the global settings, seeding until a certain ratio
|
||||
TR_RATIOLIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of ratio
|
||||
|
||||
RATIO_LIMIT = mirror_dict({
|
||||
'global' : TR_RATIOLIMIT_GLOBAL,
|
||||
'single' : TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited' : TR_RATIOLIMIT_UNLIMITED
|
||||
'global': TR_RATIOLIMIT_GLOBAL,
|
||||
'single': TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited': TR_RATIOLIMIT_UNLIMITED
|
||||
})
|
||||
|
||||
TR_IDLELIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_IDLELIMIT_SINGLE = 1 # override the global settings, seeding until a certain idle time
|
||||
TR_IDLELIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of activity
|
||||
TR_IDLELIMIT_GLOBAL = 0 # follow the global settings
|
||||
TR_IDLELIMIT_SINGLE = 1 # override the global settings, seeding until a certain idle time
|
||||
TR_IDLELIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of activity
|
||||
|
||||
IDLE_LIMIT = mirror_dict({
|
||||
'global' : TR_RATIOLIMIT_GLOBAL,
|
||||
'single' : TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited' : TR_RATIOLIMIT_UNLIMITED
|
||||
'global': TR_RATIOLIMIT_GLOBAL,
|
||||
'single': TR_RATIOLIMIT_SINGLE,
|
||||
'unlimited': TR_RATIOLIMIT_UNLIMITED
|
||||
})
|
||||
|
||||
# A note on argument maps
|
||||
|
@ -62,236 +63,266 @@ IDLE_LIMIT = mirror_dict({
|
|||
|
||||
# Arguments for torrent methods
|
||||
TORRENT_ARGS = {
|
||||
'get' : {
|
||||
'activityDate': ('number', 1, None, None, None, 'Last time of upload or download activity.'),
|
||||
'addedDate': ('number', 1, None, None, None, 'The date when this torrent was first added.'),
|
||||
'announceResponse': ('string', 1, 7, None, None, 'The announce message from the tracker.'),
|
||||
'announceURL': ('string', 1, 7, None, None, 'Current announce URL.'),
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Bandwidth priority. Low (-1), Normal (0) or High (1).'),
|
||||
'comment': ('string', 1, None, None, None, 'Torrent comment.'),
|
||||
'corruptEver': ('number', 1, None, None, None, 'Number of bytes of corrupt data downloaded.'),
|
||||
'creator': ('string', 1, None, None, None, 'Torrent creator.'),
|
||||
'dateCreated': ('number', 1, None, None, None, 'Torrent creation date.'),
|
||||
'desiredAvailable': ('number', 1, None, None, None, 'Number of bytes avalable and left to be downloaded.'),
|
||||
'doneDate': ('number', 1, None, None, None, 'The date when the torrent finished downloading.'),
|
||||
'downloadDir': ('string', 4, None, None, None, 'The directory path where the torrent is downloaded to.'),
|
||||
'downloadedEver': ('number', 1, None, None, None, 'Number of bytes of good data downloaded.'),
|
||||
'downloaders': ('number', 4, 7, None, None, 'Number of downloaders.'),
|
||||
'downloadLimit': ('number', 1, None, None, None, 'Download limit in Kbps.'),
|
||||
'downloadLimited': ('boolean', 5, None, None, None, 'Download limit is enabled'),
|
||||
'downloadLimitMode': ('number', 1, 5, None, None, 'Download limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'error': ('number', 1, None, None, None, 'Kind of error. 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error.'),
|
||||
'errorString': ('number', 1, None, None, None, 'Error message.'),
|
||||
'eta': ('number', 1, None, None, None, 'Estimated number of seconds left when downloading or seeding. -1 means not available and -2 means unknown.'),
|
||||
'etaIdle': ('number', 15, None, None, None, 'Estimated number of seconds left until the idle time limit is reached. -1 means not available and -2 means unknown.'),
|
||||
'files': ('array', 1, None, None, None, 'Array of file object containing key, bytesCompleted, length and name.'),
|
||||
'fileStats': ('array', 5, None, None, None, 'Aray of file statistics containing bytesCompleted, wanted and priority.'),
|
||||
'hashString': ('string', 1, None, None, None, 'Hashstring unique for the torrent even between sessions.'),
|
||||
'haveUnchecked': ('number', 1, None, None, None, 'Number of bytes of partial pieces.'),
|
||||
'haveValid': ('number', 1, None, None, None, 'Number of bytes of checksum verified data.'),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None, 'True if session upload limits are honored'),
|
||||
'id': ('number', 1, None, None, None, 'Session unique torrent id.'),
|
||||
'isFinished': ('boolean', 9, None, None, None, 'True if the torrent is finished. Downloaded and seeded.'),
|
||||
'isPrivate': ('boolean', 1, None, None, None, 'True if the torrent is private.'),
|
||||
'isStalled': ('boolean', 14, None, None, None, 'True if the torrent has stalled (been idle for a long time).'),
|
||||
'lastAnnounceTime': ('number', 1, 7, None, None, 'The time of the last announcement.'),
|
||||
'lastScrapeTime': ('number', 1, 7, None, None, 'The time af the last successful scrape.'),
|
||||
'leechers': ('number', 1, 7, None, None, 'Number of leechers.'),
|
||||
'leftUntilDone': ('number', 1, None, None, None, 'Number of bytes left until the download is done.'),
|
||||
'magnetLink': ('string', 7, None, None, None, 'The magnet link for this torrent.'),
|
||||
'manualAnnounceTime': ('number', 1, None, None, None, 'The time until you manually ask for more peers.'),
|
||||
'maxConnectedPeers': ('number', 1, None, None, None, 'Maximum of connected peers.'),
|
||||
'metadataPercentComplete': ('number', 7, None, None, None, 'Download progress of metadata. 0.0 to 1.0.'),
|
||||
'name': ('string', 1, None, None, None, 'Torrent name.'),
|
||||
'nextAnnounceTime': ('number', 1, 7, None, None, 'Next announce time.'),
|
||||
'nextScrapeTime': ('number', 1, 7, None, None, 'Next scrape time.'),
|
||||
'peer-limit': ('number', 5, None, None, None, 'Maximum number of peers.'),
|
||||
'peers': ('array', 2, None, None, None, 'Array of peer objects.'),
|
||||
'peersConnected': ('number', 1, None, None, None, 'Number of peers we are connected to.'),
|
||||
'peersFrom': ('object', 1, None, None, None, 'Object containing download peers counts for different peer types.'),
|
||||
'peersGettingFromUs': ('number', 1, None, None, None, 'Number of peers we are sending data to.'),
|
||||
'peersKnown': ('number', 1, 13, None, None, 'Number of peers that the tracker knows.'),
|
||||
'peersSendingToUs': ('number', 1, None, None, None, 'Number of peers sending to us'),
|
||||
'percentDone': ('double', 5, None, None, None, 'Download progress of selected files. 0.0 to 1.0.'),
|
||||
'pieces': ('string', 5, None, None, None, 'String with base64 encoded bitfield indicating finished pieces.'),
|
||||
'pieceCount': ('number', 1, None, None, None, 'Number of pieces.'),
|
||||
'pieceSize': ('number', 1, None, None, None, 'Number of bytes in a piece.'),
|
||||
'priorities': ('array', 1, None, None, None, 'Array of file priorities.'),
|
||||
'queuePosition': ('number', 14, None, None, None, 'The queue position.'),
|
||||
'rateDownload': ('number', 1, None, None, None, 'Download rate in bps.'),
|
||||
'rateUpload': ('number', 1, None, None, None, 'Upload rate in bps.'),
|
||||
'recheckProgress': ('double', 1, None, None, None, 'Progress of recheck. 0.0 to 1.0.'),
|
||||
'secondsDownloading': ('number', 15, None, None, None, ''),
|
||||
'secondsSeeding': ('number', 15, None, None, None, ''),
|
||||
'scrapeResponse': ('string', 1, 7, None, None, 'Scrape response message.'),
|
||||
'scrapeURL': ('string', 1, 7, None, None, 'Current scrape URL'),
|
||||
'seeders': ('number', 1, 7, None, None, 'Number of seeders reported by the tracker.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Idle limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seed ratio limit.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'sizeWhenDone': ('number', 1, None, None, None, 'Size of the torrent download in bytes.'),
|
||||
'startDate': ('number', 1, None, None, None, 'The date when the torrent was last started.'),
|
||||
'status': ('number', 1, None, None, None, 'Current status, see source'),
|
||||
'swarmSpeed': ('number', 1, 7, None, None, 'Estimated speed in Kbps in the swarm.'),
|
||||
'timesCompleted': ('number', 1, 7, None, None, 'Number of successful downloads reported by the tracker.'),
|
||||
'trackers': ('array', 1, None, None, None, 'Array of tracker objects.'),
|
||||
'trackerStats': ('object', 7, None, None, None, 'Array of object containing tracker statistics.'),
|
||||
'totalSize': ('number', 1, None, None, None, 'Total size of the torrent in bytes'),
|
||||
'torrentFile': ('string', 5, None, None, None, 'Path to .torrent file.'),
|
||||
'uploadedEver': ('number', 1, None, None, None, 'Number of bytes uploaded, ever.'),
|
||||
'uploadLimit': ('number', 1, None, None, None, 'Upload limit in Kbps'),
|
||||
'uploadLimitMode': ('number', 1, 5, None, None, 'Upload limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'uploadLimited': ('boolean', 5, None, None, None, 'Upload limit enabled.'),
|
||||
'uploadRatio': ('double', 1, None, None, None, 'Seed ratio.'),
|
||||
'wanted': ('array', 1, None, None, None, 'Array of booleans indicated wanted files.'),
|
||||
'webseeds': ('array', 1, None, None, None, 'Array of webseeds objects'),
|
||||
'webseedsSendingToUs': ('number', 1, None, None, None, 'Number of webseeds seeding to us.'),
|
||||
'get': {
|
||||
'activityDate': ('number', 1, None, None, None, 'Last time of upload or download activity.'),
|
||||
'addedDate': ('number', 1, None, None, None, 'The date when this torrent was first added.'),
|
||||
'announceResponse': ('string', 1, 7, None, None, 'The announce message from the tracker.'),
|
||||
'announceURL': ('string', 1, 7, None, None, 'Current announce URL.'),
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Bandwidth priority. Low (-1), Normal (0) or High (1).'),
|
||||
'comment': ('string', 1, None, None, None, 'Torrent comment.'),
|
||||
'corruptEver': ('number', 1, None, None, None, 'Number of bytes of corrupt data downloaded.'),
|
||||
'creator': ('string', 1, None, None, None, 'Torrent creator.'),
|
||||
'dateCreated': ('number', 1, None, None, None, 'Torrent creation date.'),
|
||||
'desiredAvailable': ('number', 1, None, None, None, 'Number of bytes avalable and left to be downloaded.'),
|
||||
'doneDate': ('number', 1, None, None, None, 'The date when the torrent finished downloading.'),
|
||||
'downloadDir': ('string', 4, None, None, None, 'The directory path where the torrent is downloaded to.'),
|
||||
'downloadedEver': ('number', 1, None, None, None, 'Number of bytes of good data downloaded.'),
|
||||
'downloaders': ('number', 4, 7, None, None, 'Number of downloaders.'),
|
||||
'downloadLimit': ('number', 1, None, None, None, 'Download limit in Kbps.'),
|
||||
'downloadLimited': ('boolean', 5, None, None, None, 'Download limit is enabled'),
|
||||
'downloadLimitMode': (
|
||||
'number', 1, 5, None, None, 'Download limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'error': ('number', 1, None, None, None,
|
||||
'Kind of error. 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error.'),
|
||||
'errorString': ('number', 1, None, None, None, 'Error message.'),
|
||||
'eta': ('number', 1, None, None, None,
|
||||
'Estimated number of seconds left when downloading or seeding. -1 means not available and -2 means unknown.'),
|
||||
'etaIdle': ('number', 15, None, None, None,
|
||||
'Estimated number of seconds left until the idle time limit is reached. -1 means not available and -2 means unknown.'),
|
||||
'files': (
|
||||
'array', 1, None, None, None, 'Array of file object containing key, bytesCompleted, length and name.'),
|
||||
'fileStats': (
|
||||
'array', 5, None, None, None, 'Aray of file statistics containing bytesCompleted, wanted and priority.'),
|
||||
'hashString': ('string', 1, None, None, None, 'Hashstring unique for the torrent even between sessions.'),
|
||||
'haveUnchecked': ('number', 1, None, None, None, 'Number of bytes of partial pieces.'),
|
||||
'haveValid': ('number', 1, None, None, None, 'Number of bytes of checksum verified data.'),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None, 'True if session upload limits are honored'),
|
||||
'id': ('number', 1, None, None, None, 'Session unique torrent id.'),
|
||||
'isFinished': ('boolean', 9, None, None, None, 'True if the torrent is finished. Downloaded and seeded.'),
|
||||
'isPrivate': ('boolean', 1, None, None, None, 'True if the torrent is private.'),
|
||||
'isStalled': ('boolean', 14, None, None, None, 'True if the torrent has stalled (been idle for a long time).'),
|
||||
'lastAnnounceTime': ('number', 1, 7, None, None, 'The time of the last announcement.'),
|
||||
'lastScrapeTime': ('number', 1, 7, None, None, 'The time af the last successful scrape.'),
|
||||
'leechers': ('number', 1, 7, None, None, 'Number of leechers.'),
|
||||
'leftUntilDone': ('number', 1, None, None, None, 'Number of bytes left until the download is done.'),
|
||||
'magnetLink': ('string', 7, None, None, None, 'The magnet link for this torrent.'),
|
||||
'manualAnnounceTime': ('number', 1, None, None, None, 'The time until you manually ask for more peers.'),
|
||||
'maxConnectedPeers': ('number', 1, None, None, None, 'Maximum of connected peers.'),
|
||||
'metadataPercentComplete': ('number', 7, None, None, None, 'Download progress of metadata. 0.0 to 1.0.'),
|
||||
'name': ('string', 1, None, None, None, 'Torrent name.'),
|
||||
'nextAnnounceTime': ('number', 1, 7, None, None, 'Next announce time.'),
|
||||
'nextScrapeTime': ('number', 1, 7, None, None, 'Next scrape time.'),
|
||||
'peer-limit': ('number', 5, None, None, None, 'Maximum number of peers.'),
|
||||
'peers': ('array', 2, None, None, None, 'Array of peer objects.'),
|
||||
'peersConnected': ('number', 1, None, None, None, 'Number of peers we are connected to.'),
|
||||
'peersFrom': (
|
||||
'object', 1, None, None, None, 'Object containing download peers counts for different peer types.'),
|
||||
'peersGettingFromUs': ('number', 1, None, None, None, 'Number of peers we are sending data to.'),
|
||||
'peersKnown': ('number', 1, 13, None, None, 'Number of peers that the tracker knows.'),
|
||||
'peersSendingToUs': ('number', 1, None, None, None, 'Number of peers sending to us'),
|
||||
'percentDone': ('double', 5, None, None, None, 'Download progress of selected files. 0.0 to 1.0.'),
|
||||
'pieces': ('string', 5, None, None, None, 'String with base64 encoded bitfield indicating finished pieces.'),
|
||||
'pieceCount': ('number', 1, None, None, None, 'Number of pieces.'),
|
||||
'pieceSize': ('number', 1, None, None, None, 'Number of bytes in a piece.'),
|
||||
'priorities': ('array', 1, None, None, None, 'Array of file priorities.'),
|
||||
'queuePosition': ('number', 14, None, None, None, 'The queue position.'),
|
||||
'rateDownload': ('number', 1, None, None, None, 'Download rate in bps.'),
|
||||
'rateUpload': ('number', 1, None, None, None, 'Upload rate in bps.'),
|
||||
'recheckProgress': ('double', 1, None, None, None, 'Progress of recheck. 0.0 to 1.0.'),
|
||||
'secondsDownloading': ('number', 15, None, None, None, ''),
|
||||
'secondsSeeding': ('number', 15, None, None, None, ''),
|
||||
'scrapeResponse': ('string', 1, 7, None, None, 'Scrape response message.'),
|
||||
'scrapeURL': ('string', 1, 7, None, None, 'Current scrape URL'),
|
||||
'seeders': ('number', 1, 7, None, None, 'Number of seeders reported by the tracker.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Idle limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seed ratio limit.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'),
|
||||
'sizeWhenDone': ('number', 1, None, None, None, 'Size of the torrent download in bytes.'),
|
||||
'startDate': ('number', 1, None, None, None, 'The date when the torrent was last started.'),
|
||||
'status': ('number', 1, None, None, None, 'Current status, see source'),
|
||||
'swarmSpeed': ('number', 1, 7, None, None, 'Estimated speed in Kbps in the swarm.'),
|
||||
'timesCompleted': ('number', 1, 7, None, None, 'Number of successful downloads reported by the tracker.'),
|
||||
'trackers': ('array', 1, None, None, None, 'Array of tracker objects.'),
|
||||
'trackerStats': ('object', 7, None, None, None, 'Array of object containing tracker statistics.'),
|
||||
'totalSize': ('number', 1, None, None, None, 'Total size of the torrent in bytes'),
|
||||
'torrentFile': ('string', 5, None, None, None, 'Path to .torrent file.'),
|
||||
'uploadedEver': ('number', 1, None, None, None, 'Number of bytes uploaded, ever.'),
|
||||
'uploadLimit': ('number', 1, None, None, None, 'Upload limit in Kbps'),
|
||||
'uploadLimitMode': (
|
||||
'number', 1, 5, None, None, 'Upload limit mode. 0 means global, 1 means signle, 2 unlimited.'),
|
||||
'uploadLimited': ('boolean', 5, None, None, None, 'Upload limit enabled.'),
|
||||
'uploadRatio': ('double', 1, None, None, None, 'Seed ratio.'),
|
||||
'wanted': ('array', 1, None, None, None, 'Array of booleans indicated wanted files.'),
|
||||
'webseeds': ('array', 1, None, None, None, 'Array of webseeds objects'),
|
||||
'webseedsSendingToUs': ('number', 1, None, None, None, 'Number of webseeds seeding to us.'),
|
||||
},
|
||||
'set': {
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Priority for this transfer.'),
|
||||
'downloadLimit': ('number', 5, None, 'speed-limit-down', None, 'Set the speed limit for download in Kib/s.'),
|
||||
'downloadLimited': ('boolean', 5, None, 'speed-limit-down-enabled', None, 'Enable download speed limiter.'),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None, "Enables or disables the transfer to honour the upload limit set in the session."),
|
||||
'location': ('array', 1, None, None, None, 'Local download location.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'The peer limit for the torrents.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'queuePosition': ('number', 14, None, None, None, 'Position of this transfer in its queue.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None, 'Seed inactivity mode. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seeding ratio.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None, 'Which ratio to use. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'speed-limit-down': ('number', 1, 5, None, 'downloadLimit', 'Set the speed limit for download in Kib/s.'),
|
||||
'speed-limit-down-enabled': ('boolean', 1, 5, None, 'downloadLimited', 'Enable download speed limiter.'),
|
||||
'speed-limit-up': ('number', 1, 5, None, 'uploadLimit', 'Set the speed limit for upload in Kib/s.'),
|
||||
'speed-limit-up-enabled': ('boolean', 1, 5, None, 'uploadLimited', 'Enable upload speed limiter.'),
|
||||
'trackerAdd': ('array', 10, None, None, None, 'Array of string with announce URLs to add.'),
|
||||
'trackerRemove': ('array', 10, None, None, None, 'Array of ids of trackers to remove.'),
|
||||
'trackerReplace': ('array', 10, None, None, None, 'Array of (id, url) tuples where the announce URL should be replaced.'),
|
||||
'uploadLimit': ('number', 5, None, 'speed-limit-up', None, 'Set the speed limit for upload in Kib/s.'),
|
||||
'uploadLimited': ('boolean', 5, None, 'speed-limit-up-enabled', None, 'Enable upload speed limiter.'),
|
||||
'bandwidthPriority': ('number', 5, None, None, None, 'Priority for this transfer.'),
|
||||
'downloadLimit': ('number', 5, None, 'speed-limit-down', None, 'Set the speed limit for download in Kib/s.'),
|
||||
'downloadLimited': ('boolean', 5, None, 'speed-limit-down-enabled', None, 'Enable download speed limiter.'),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'honorsSessionLimits': ('boolean', 5, None, None, None,
|
||||
"Enables or disables the transfer to honour the upload limit set in the session."),
|
||||
'location': ('array', 1, None, None, None, 'Local download location.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'The peer limit for the torrents.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'queuePosition': ('number', 14, None, None, None, 'Position of this transfer in its queue.'),
|
||||
'seedIdleLimit': ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
'seedIdleMode': ('number', 10, None, None, None,
|
||||
'Seed inactivity mode. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'seedRatioLimit': ('double', 5, None, None, None, 'Seeding ratio.'),
|
||||
'seedRatioMode': ('number', 5, None, None, None,
|
||||
'Which ratio to use. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'),
|
||||
'speed-limit-down': ('number', 1, 5, None, 'downloadLimit', 'Set the speed limit for download in Kib/s.'),
|
||||
'speed-limit-down-enabled': ('boolean', 1, 5, None, 'downloadLimited', 'Enable download speed limiter.'),
|
||||
'speed-limit-up': ('number', 1, 5, None, 'uploadLimit', 'Set the speed limit for upload in Kib/s.'),
|
||||
'speed-limit-up-enabled': ('boolean', 1, 5, None, 'uploadLimited', 'Enable upload speed limiter.'),
|
||||
'trackerAdd': ('array', 10, None, None, None, 'Array of string with announce URLs to add.'),
|
||||
'trackerRemove': ('array', 10, None, None, None, 'Array of ids of trackers to remove.'),
|
||||
'trackerReplace': (
|
||||
'array', 10, None, None, None, 'Array of (id, url) tuples where the announce URL should be replaced.'),
|
||||
'uploadLimit': ('number', 5, None, 'speed-limit-up', None, 'Set the speed limit for upload in Kib/s.'),
|
||||
'uploadLimited': ('boolean', 5, None, 'speed-limit-up-enabled', None, 'Enable upload speed limiter.'),
|
||||
},
|
||||
'add': {
|
||||
'bandwidthPriority': ('number', 8, None, None, None, 'Priority for this transfer.'),
|
||||
'download-dir': ('string', 1, None, None, None, 'The directory where the downloaded contents will be saved in.'),
|
||||
'cookies': ('string', 13, None, None, None, 'One or more HTTP cookie(s).'),
|
||||
'filename': ('string', 1, None, None, None, "A file path or URL to a torrent file or a magnet link."),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'metainfo': ('string', 1, None, None, None, 'The content of a torrent file, base64 encoded.'),
|
||||
'paused': ('boolean', 1, None, None, None, 'If True, does not start the transfer when added.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'Maximum number of peers allowed.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
'bandwidthPriority': ('number', 8, None, None, None, 'Priority for this transfer.'),
|
||||
'download-dir': (
|
||||
'string', 1, None, None, None, 'The directory where the downloaded contents will be saved in.'),
|
||||
'cookies': ('string', 13, None, None, None, 'One or more HTTP cookie(s).'),
|
||||
'filename': ('string', 1, None, None, None, "A file path or URL to a torrent file or a magnet link."),
|
||||
'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."),
|
||||
'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."),
|
||||
'metainfo': ('string', 1, None, None, None, 'The content of a torrent file, base64 encoded.'),
|
||||
'paused': ('boolean', 1, None, None, None, 'If True, does not start the transfer when added.'),
|
||||
'peer-limit': ('number', 1, None, None, None, 'Maximum number of peers allowed.'),
|
||||
'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."),
|
||||
'priority-low': ('array', 1, None, None, None, "A list of file id's that should have low priority."),
|
||||
'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have normal priority."),
|
||||
}
|
||||
}
|
||||
|
||||
# Arguments for session methods
|
||||
SESSION_ARGS = {
|
||||
'get': {
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": ('boolean', 5, None, None, None, 'True if alternate global download speed limiter is ebabled.'),
|
||||
"alt-speed-time-begin": ('number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'True if alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-time-end": ('number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Days alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s)'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'True when blocklist is enabled.'),
|
||||
"blocklist-size": ('number', 5, None, None, None, 'Number of rules in the blocklist'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"config-dir": ('string', 8, None, None, None, 'location of transmissions configuration directory'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'True if DHT enabled.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'The download directory.'),
|
||||
"download-dir-free-space": ('number', 12, None, None, None, 'Free space in the download directory, in bytes'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'True if the download queue is enabled.'),
|
||||
"encryption": ('string', 1, None, None, None, 'Encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'True if the seed activity limit is enabled.'),
|
||||
"incomplete-dir": ('string', 7, None, None, None, 'The path to the directory for incomplete torrent transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None, 'True if the incomplete dir is enabled.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'True if local peer discovery is enabled.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'True if PEX is allowed.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'True if PEX is enabled.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": ('boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'True if port forwarding is enabled.'),
|
||||
"queue-stalled-minutes": ('number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'True if stalled tracking of transfers is enabled.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'True if ".part" is appended to incomplete files'),
|
||||
"rpc-version": ('number', 4, None, None, None, 'Transmission RPC API Version.'),
|
||||
"rpc-version-minimum": ('number', 4, None, None, None, 'Minimum accepted RPC API Version.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'True if the done script is enabled.'),
|
||||
"script-torrent-done-filename": ('string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'True if seed ration limit is enabled.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'True if upload queue is enabled.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'True if the download speed is limited.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'True if the upload speed is limited.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'When true uploaded torrents will start right away.'),
|
||||
"trash-original-torrent-files": ('boolean', 9, None, None, None, 'When true added .torrent files will be deleted.'),
|
||||
'units': ('object', 10, None, None, None, 'An object containing units for size and speed.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'True if Micro Transport Protocol (UTP) is enabled.'),
|
||||
"version": ('string', 3, None, None, None, 'Transmission version.'),
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": (
|
||||
'boolean', 5, None, None, None, 'True if alternate global download speed limiter is ebabled.'),
|
||||
"alt-speed-time-begin": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'True if alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-time-end": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Days alternate speeds scheduling is enabled.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s)'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'True when blocklist is enabled.'),
|
||||
"blocklist-size": ('number', 5, None, None, None, 'Number of rules in the blocklist'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"config-dir": ('string', 8, None, None, None, 'location of transmissions configuration directory'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'True if DHT enabled.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'The download directory.'),
|
||||
"download-dir-free-space": ('number', 12, None, None, None, 'Free space in the download directory, in bytes'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'True if the download queue is enabled.'),
|
||||
"encryption": (
|
||||
'string', 1, None, None, None, 'Encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'True if the seed activity limit is enabled.'),
|
||||
"incomplete-dir": (
|
||||
'string', 7, None, None, None, 'The path to the directory for incomplete torrent transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None, 'True if the incomplete dir is enabled.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'True if local peer discovery is enabled.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'True if PEX is allowed.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'True if PEX is enabled.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": (
|
||||
'boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'True if port forwarding is enabled.'),
|
||||
"queue-stalled-minutes": (
|
||||
'number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'True if stalled tracking of transfers is enabled.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'True if ".part" is appended to incomplete files'),
|
||||
"rpc-version": ('number', 4, None, None, None, 'Transmission RPC API Version.'),
|
||||
"rpc-version-minimum": ('number', 4, None, None, None, 'Minimum accepted RPC API Version.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'True if the done script is enabled.'),
|
||||
"script-torrent-done-filename": (
|
||||
'string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'True if seed ration limit is enabled.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'True if upload queue is enabled.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'True if the download speed is limited.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'True if the upload speed is limited.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'When true uploaded torrents will start right away.'),
|
||||
"trash-original-torrent-files": (
|
||||
'boolean', 9, None, None, None, 'When true added .torrent files will be deleted.'),
|
||||
'units': ('object', 10, None, None, None, 'An object containing units for size and speed.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'True if Micro Transport Protocol (UTP) is enabled.'),
|
||||
"version": ('string', 3, None, None, None, 'Transmission version.'),
|
||||
},
|
||||
'set': {
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": ('boolean', 5, None, None, None, 'Enables alternate global download speed limiter.'),
|
||||
"alt-speed-time-begin": ('number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'Enables alternate speeds scheduling.'),
|
||||
"alt-speed-time-end": ('number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Enables alternate speeds scheduling these days.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s).'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'Enables the block list'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'Enables DHT.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'Set the session download directory.'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'Enables download queue.'),
|
||||
"encryption": ('string', 1, None, None, None, 'Set the session encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'The default seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'Enables the default seed inactivity limit'),
|
||||
"incomplete-dir": ('string', 7, None, None, None, 'The path to the directory of incomplete transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None, 'Enables the incomplete transfer data directory. Otherwise data for incomplete transfers are stored in the download target.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'Enables local peer discovery for public torrents.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'Allowing PEX in public torrents.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'Allowing PEX in public torrents.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": ('boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'Enables port forwarding.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'Appends ".part" to incomplete files'),
|
||||
"queue-stalled-minutes": ('number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'Enable tracking of stalled transfers.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'Whether or not to call the "done" script.'),
|
||||
"script-torrent-done-filename": ('string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'Enables upload queue.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'Enables seed ration limit.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'Enables download speed limiting.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'Enables upload speed limiting.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'Added torrents will be started right away.'),
|
||||
"trash-original-torrent-files": ('boolean', 9, None, None, None, 'The .torrent file of added torrents will be deleted.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'Enables Micro Transport Protocol (UTP).'),
|
||||
"alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'),
|
||||
"alt-speed-enabled": ('boolean', 5, None, None, None, 'Enables alternate global download speed limiter.'),
|
||||
"alt-speed-time-begin": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'),
|
||||
"alt-speed-time-enabled": ('boolean', 5, None, None, None, 'Enables alternate speeds scheduling.'),
|
||||
"alt-speed-time-end": (
|
||||
'number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'),
|
||||
"alt-speed-time-day": ('number', 5, None, None, None, 'Enables alternate speeds scheduling these days.'),
|
||||
"alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s).'),
|
||||
"blocklist-enabled": ('boolean', 5, None, None, None, 'Enables the block list'),
|
||||
"blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'),
|
||||
"cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'),
|
||||
"dht-enabled": ('boolean', 6, None, None, None, 'Enables DHT.'),
|
||||
"download-dir": ('string', 1, None, None, None, 'Set the session download directory.'),
|
||||
"download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'),
|
||||
"download-queue-enabled": ('boolean', 14, None, None, None, 'Enables download queue.'),
|
||||
"encryption": ('string', 1, None, None, None,
|
||||
'Set the session encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'),
|
||||
"idle-seeding-limit": ('number', 10, None, None, None, 'The default seed inactivity limit in minutes.'),
|
||||
"idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'Enables the default seed inactivity limit'),
|
||||
"incomplete-dir": ('string', 7, None, None, None, 'The path to the directory of incomplete transfer data.'),
|
||||
"incomplete-dir-enabled": ('boolean', 7, None, None, None,
|
||||
'Enables the incomplete transfer data directory. Otherwise data for incomplete transfers are stored in the download target.'),
|
||||
"lpd-enabled": ('boolean', 9, None, None, None, 'Enables local peer discovery for public torrents.'),
|
||||
"peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'),
|
||||
"peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'),
|
||||
"peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'),
|
||||
"pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'Allowing PEX in public torrents.'),
|
||||
"pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'Allowing PEX in public torrents.'),
|
||||
"port": ('number', 1, 5, None, 'peer-port', 'Peer port.'),
|
||||
"peer-port": ('number', 5, None, 'port', None, 'Peer port.'),
|
||||
"peer-port-random-on-start": (
|
||||
'boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'),
|
||||
"port-forwarding-enabled": ('boolean', 1, None, None, None, 'Enables port forwarding.'),
|
||||
"rename-partial-files": ('boolean', 8, None, None, None, 'Appends ".part" to incomplete files'),
|
||||
"queue-stalled-minutes": (
|
||||
'number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'),
|
||||
"queue-stalled-enabled": ('boolean', 14, None, None, None, 'Enable tracking of stalled transfers.'),
|
||||
"script-torrent-done-enabled": ('boolean', 9, None, None, None, 'Whether or not to call the "done" script.'),
|
||||
"script-torrent-done-filename": (
|
||||
'string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'),
|
||||
"seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'),
|
||||
"seed-queue-enabled": ('boolean', 14, None, None, None, 'Enables upload queue.'),
|
||||
"seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'),
|
||||
"seedRatioLimited": ('boolean', 5, None, None, None, 'Enables seed ration limit.'),
|
||||
"speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'),
|
||||
"speed-limit-down-enabled": ('boolean', 1, None, None, None, 'Enables download speed limiting.'),
|
||||
"speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'),
|
||||
"speed-limit-up-enabled": ('boolean', 1, None, None, None, 'Enables upload speed limiting.'),
|
||||
"start-added-torrents": ('boolean', 9, None, None, None, 'Added torrents will be started right away.'),
|
||||
"trash-original-torrent-files": (
|
||||
'boolean', 9, None, None, None, 'The .torrent file of added torrents will be deleted.'),
|
||||
'utp-enabled': ('boolean', 13, None, None, None, 'Enables Micro Transport Protocol (UTP).'),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -4,11 +4,13 @@
|
|||
|
||||
from core.transmissionrpc.six import string_types, integer_types
|
||||
|
||||
|
||||
class TransmissionError(Exception):
|
||||
"""
|
||||
This exception is raised when there has occurred an error related to
|
||||
communication with Transmission. It is a subclass of Exception.
|
||||
This exception is raised when there has occurred an error related to
|
||||
communication with Transmission. It is a subclass of Exception.
|
||||
"""
|
||||
|
||||
def __init__(self, message='', original=None):
|
||||
Exception.__init__(self)
|
||||
self.message = message
|
||||
|
@ -17,15 +19,17 @@ class TransmissionError(Exception):
|
|||
def __str__(self):
|
||||
if self.original:
|
||||
original_name = type(self.original).__name__
|
||||
return '%s Original exception: %s, "%s"' % (self.message, original_name, str(self.original))
|
||||
return '{0} Original exception: {1}, "{2}"'.format(self.message, original_name, str(self.original))
|
||||
else:
|
||||
return self.message
|
||||
|
||||
|
||||
class HTTPHandlerError(Exception):
|
||||
"""
|
||||
This exception is raised when there has occurred an error related to
|
||||
the HTTP handler. It is a subclass of Exception.
|
||||
This exception is raised when there has occurred an error related to
|
||||
the HTTP handler. It is a subclass of Exception.
|
||||
"""
|
||||
|
||||
def __init__(self, httpurl=None, httpcode=None, httpmsg=None, httpheaders=None, httpdata=None):
|
||||
Exception.__init__(self)
|
||||
self.url = ''
|
||||
|
@ -45,10 +49,10 @@ class HTTPHandlerError(Exception):
|
|||
self.data = httpdata
|
||||
|
||||
def __repr__(self):
|
||||
return '<HTTPHandlerError %d, %s>' % (self.code, self.message)
|
||||
return '<HTTPHandlerError {0:d}, {1}>'.format(self.code, self.message)
|
||||
|
||||
def __str__(self):
|
||||
return 'HTTPHandlerError %d: %s' % (self.code, self.message)
|
||||
return 'HTTPHandlerError {0:d}: {1}'.format(self.code, self.message)
|
||||
|
||||
def __unicode__(self):
|
||||
return 'HTTPHandlerError %d: %s' % (self.code, self.message)
|
||||
return 'HTTPHandlerError {0:d}: {1}'.format(self.code, self.message)
|
||||
|
|
|
@ -4,24 +4,22 @@
|
|||
|
||||
import sys
|
||||
|
||||
from core.transmissionrpc.error import HTTPHandlerError
|
||||
from six import PY3
|
||||
from six.moves.urllib_request import (
|
||||
build_opener, install_opener,
|
||||
HTTPBasicAuthHandler, HTTPDigestAuthHandler, HTTPPasswordMgrWithDefaultRealm,
|
||||
Request,
|
||||
)
|
||||
from six.moves.urllib_error import HTTPError, URLError
|
||||
from six.moves.http_client import BadStatusLine
|
||||
|
||||
from core.transmissionrpc.error import HTTPHandlerError
|
||||
|
||||
if PY3:
|
||||
from urllib.request import Request, build_opener, \
|
||||
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler
|
||||
from urllib.error import HTTPError, URLError
|
||||
from http.client import BadStatusLine
|
||||
else:
|
||||
from urllib2 import Request, build_opener, \
|
||||
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler
|
||||
from urllib2 import HTTPError, URLError
|
||||
from httplib import BadStatusLine
|
||||
|
||||
class HTTPHandler(object):
|
||||
"""
|
||||
Prototype for HTTP handling.
|
||||
"""
|
||||
|
||||
def set_authentication(self, uri, login, password):
|
||||
"""
|
||||
Transmission use basic authentication in earlier versions and digest
|
||||
|
@ -44,10 +42,12 @@ class HTTPHandler(object):
|
|||
"""
|
||||
raise NotImplementedError("Bad HTTPHandler, failed to implement request.")
|
||||
|
||||
|
||||
class DefaultHTTPHandler(HTTPHandler):
|
||||
"""
|
||||
The default HTTP handler provided with transmissionrpc.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
HTTPHandler.__init__(self)
|
||||
self.http_opener = build_opener()
|
||||
|
@ -75,7 +75,7 @@ class DefaultHTTPHandler(HTTPHandler):
|
|||
if hasattr(error.reason, 'args') and isinstance(error.reason.args, tuple) and len(error.reason.args) == 2:
|
||||
raise HTTPHandlerError(httpcode=error.reason.args[0], httpmsg=error.reason.args[1])
|
||||
else:
|
||||
raise HTTPHandlerError(httpmsg='urllib2.URLError: %s' % (error.reason))
|
||||
raise HTTPHandlerError(httpmsg='urllib2.URLError: {error.reason}'.format(error=error))
|
||||
except BadStatusLine as error:
|
||||
raise HTTPHandlerError(httpmsg='httplib.BadStatusLine: %s' % (error.line))
|
||||
raise HTTPHandlerError(httpmsg='httplib.BadStatusLine: {error.line}'.format(error=error))
|
||||
return response.read().decode('utf-8')
|
||||
|
|
|
@ -6,6 +6,7 @@ from core.transmissionrpc.utils import Field
|
|||
|
||||
from core.transmissionrpc.six import iteritems, integer_types
|
||||
|
||||
|
||||
class Session(object):
|
||||
"""
|
||||
Session is a class holding the session data for a Transmission daemon.
|
||||
|
@ -26,12 +27,12 @@ class Session(object):
|
|||
try:
|
||||
return self._fields[name].value
|
||||
except KeyError:
|
||||
raise AttributeError('No attribute %s' % name)
|
||||
raise AttributeError('No attribute {0}'.format(name))
|
||||
|
||||
def __str__(self):
|
||||
text = ''
|
||||
for key in sorted(self._fields.keys()):
|
||||
text += "% 32s: %s\n" % (key[-32:], self._fields[key].value)
|
||||
text += "{0:32}: {1}\n".format(key[-32:], self._fields[key].value)
|
||||
return text
|
||||
|
||||
def _update_fields(self, other):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2013 Benjamin Peterson
|
||||
# Copyright (c) 2010-2015 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -20,17 +20,22 @@
|
|||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.4.1"
|
||||
__version__ = "1.10.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
|
@ -53,6 +58,7 @@ else:
|
|||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
|
@ -84,9 +90,13 @@ class _LazyDescr(object):
|
|||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result)
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(tp, self.name)
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
|
@ -104,6 +114,27 @@ class MovedModule(_LazyDescr):
|
|||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
|
@ -130,9 +161,75 @@ class MovedAttribute(_LazyDescr):
|
|||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
class _MovedItems(types.ModuleType):
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
|
@ -140,25 +237,33 @@ _moved_attributes = [
|
|||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("config", "config"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
|
@ -168,12 +273,14 @@ _moved_attributes = [
|
|||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
|
@ -189,22 +296,35 @@ _moved_attributes = [
|
|||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
class Module_six_moves_urllib_parse(types.ModuleType):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
|
@ -218,16 +338,27 @@ _urllib_parse_moved_attributes = [
|
|||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
|
||||
sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib.parse")
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(types.ModuleType):
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
|
@ -240,11 +371,14 @@ for attr in _urllib_error_moved_attributes:
|
|||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib_error")
|
||||
sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(types.ModuleType):
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
|
@ -281,16 +415,20 @@ _urllib_request_moved_attributes = [
|
|||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib_request")
|
||||
sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(types.ModuleType):
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
|
@ -304,11 +442,14 @@ for attr in _urllib_response_moved_attributes:
|
|||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib_response")
|
||||
sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(types.ModuleType):
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
|
@ -319,20 +460,27 @@ for attr in _urllib_robotparser_moved_attributes:
|
|||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib_robotparser")
|
||||
sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
parse = sys.modules[__name__ + ".moves.urllib_parse"]
|
||||
error = sys.modules[__name__ + ".moves.urllib_error"]
|
||||
request = sys.modules[__name__ + ".moves.urllib_request"]
|
||||
response = sys.modules[__name__ + ".moves.urllib_response"]
|
||||
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
|
@ -359,11 +507,6 @@ if PY3:
|
|||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
_iterlists = "lists"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
@ -373,11 +516,6 @@ else:
|
|||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
_iterlists = "iterlists"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
|
@ -400,6 +538,9 @@ if PY3:
|
|||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
|
@ -408,6 +549,9 @@ else:
|
|||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
|
@ -426,74 +570,121 @@ get_function_defaults = operator.attrgetter(_func_defaults)
|
|||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
def iterkeys(d, **kw):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)(**kw))
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)(**kw))
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)(**kw))
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iterlists)(**kw))
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s, "unicode_escape")
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
if PY3:
|
||||
import builtins
|
||||
exec_ = getattr(builtins, "exec")
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(builtins, "print")
|
||||
del builtins
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
|
@ -507,20 +698,45 @@ else:
|
|||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
raise value from from_value
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function."""
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
|
@ -557,21 +773,96 @@ else:
|
|||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", bases, {})
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
for slots_var in orig_vars.get('__slots__', ()):
|
||||
orig_vars.pop(slots_var)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
|
|
|
@ -13,14 +13,15 @@ from six import integer_types, string_types, text_type, iteritems
|
|||
def get_status_old(code):
|
||||
"""Get the torrent status using old status codes"""
|
||||
mapping = {
|
||||
(1<<0): 'check pending',
|
||||
(1<<1): 'checking',
|
||||
(1<<2): 'downloading',
|
||||
(1<<3): 'seeding',
|
||||
(1<<4): 'stopped',
|
||||
(1 << 0): 'check pending',
|
||||
(1 << 1): 'checking',
|
||||
(1 << 2): 'downloading',
|
||||
(1 << 3): 'seeding',
|
||||
(1 << 4): 'stopped',
|
||||
}
|
||||
return mapping[code]
|
||||
|
||||
|
||||
def get_status_new(code):
|
||||
"""Get the torrent status using new status codes"""
|
||||
mapping = {
|
||||
|
@ -34,6 +35,7 @@ def get_status_new(code):
|
|||
}
|
||||
return mapping[code]
|
||||
|
||||
|
||||
class Torrent(object):
|
||||
"""
|
||||
Torrent is a class holding the data received from Transmission regarding a bittorrent transfer.
|
||||
|
@ -71,14 +73,14 @@ class Torrent(object):
|
|||
tid = self._fields['id'].value
|
||||
name = self._get_name_string()
|
||||
if isinstance(name, str):
|
||||
return '<Torrent %d \"%s\">' % (tid, name)
|
||||
return '<Torrent {0:d} \"{1}\">'.format(tid, name)
|
||||
else:
|
||||
return '<Torrent %d>' % (tid)
|
||||
return '<Torrent {0:d}>'.format(tid)
|
||||
|
||||
def __str__(self):
|
||||
name = self._get_name_string()
|
||||
if isinstance(name, str):
|
||||
return 'Torrent \"%s\"' % (name)
|
||||
return 'Torrent \"{0}\"'.format(name)
|
||||
else:
|
||||
return 'Torrent'
|
||||
|
||||
|
@ -89,7 +91,7 @@ class Torrent(object):
|
|||
try:
|
||||
return self._fields[name].value
|
||||
except KeyError:
|
||||
raise AttributeError('No attribute %s' % name)
|
||||
raise AttributeError('No attribute {0}'.format(name))
|
||||
|
||||
def _rpc_version(self):
|
||||
"""Get the Transmission RPC API version."""
|
||||
|
@ -99,8 +101,9 @@ class Torrent(object):
|
|||
|
||||
def _dirty_fields(self):
|
||||
"""Enumerate changed fields"""
|
||||
outgoing_keys = ['bandwidthPriority', 'downloadLimit', 'downloadLimited', 'peer_limit', 'queuePosition'
|
||||
, 'seedIdleLimit', 'seedIdleMode', 'seedRatioLimit', 'seedRatioMode', 'uploadLimit', 'uploadLimited']
|
||||
outgoing_keys = ['bandwidthPriority', 'downloadLimit', 'downloadLimited', 'peer_limit', 'queuePosition',
|
||||
'seedIdleLimit', 'seedIdleMode', 'seedRatioLimit', 'seedRatioMode', 'uploadLimit',
|
||||
'uploadLimited']
|
||||
fields = []
|
||||
for key in outgoing_keys:
|
||||
if key in self._fields and self._fields[key].dirty:
|
||||
|
@ -121,7 +124,6 @@ class Torrent(object):
|
|||
"""
|
||||
Update the torrent data from a Transmission JSON-RPC arguments dictionary
|
||||
"""
|
||||
fields = None
|
||||
if isinstance(other, dict):
|
||||
for key, value in iteritems(other):
|
||||
self._fields[key.replace('-', '_')] = Field(value, False)
|
||||
|
@ -131,7 +133,7 @@ class Torrent(object):
|
|||
else:
|
||||
raise ValueError('Cannot update with supplied data')
|
||||
self._incoming_pending = False
|
||||
|
||||
|
||||
def _status(self):
|
||||
"""Get the torrent status"""
|
||||
code = self._fields['status'].value
|
||||
|
@ -264,13 +266,14 @@ class Torrent(object):
|
|||
self._fields['downloadLimited'] = Field(True, True)
|
||||
self._fields['downloadLimit'] = Field(limit, True)
|
||||
self._push()
|
||||
elif limit == None:
|
||||
elif limit is None:
|
||||
self._fields['downloadLimited'] = Field(False, True)
|
||||
self._push()
|
||||
else:
|
||||
raise ValueError("Not a valid limit")
|
||||
|
||||
download_limit = property(_get_download_limit, _set_download_limit, None, "Download limit in Kbps or None. This is a mutator.")
|
||||
download_limit = property(_get_download_limit, _set_download_limit, None,
|
||||
"Download limit in Kbps or None. This is a mutator.")
|
||||
|
||||
def _get_peer_limit(self):
|
||||
"""
|
||||
|
@ -307,7 +310,7 @@ class Torrent(object):
|
|||
self._push()
|
||||
|
||||
priority = property(_get_priority, _set_priority, None
|
||||
, "Bandwidth priority as string. Can be one of 'low', 'normal', 'high'. This is a mutator.")
|
||||
, "Bandwidth priority as string. Can be one of 'low', 'normal', 'high'. This is a mutator.")
|
||||
|
||||
def _get_seed_idle_limit(self):
|
||||
"""
|
||||
|
@ -326,7 +329,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_idle_limit = property(_get_seed_idle_limit, _set_seed_idle_limit, None
|
||||
, "Torrent seed idle limit in minutes. Also see seed_idle_mode. This is a mutator.")
|
||||
, "Torrent seed idle limit in minutes. Also see seed_idle_mode. This is a mutator.")
|
||||
|
||||
def _get_seed_idle_mode(self):
|
||||
"""
|
||||
|
@ -345,7 +348,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_idle_mode = property(_get_seed_idle_mode, _set_seed_idle_mode, None,
|
||||
"""
|
||||
"""
|
||||
Seed idle mode as string. Can be one of 'global', 'single' or 'unlimited'.
|
||||
|
||||
* global, use session seed idle limit.
|
||||
|
@ -354,7 +357,7 @@ class Torrent(object):
|
|||
|
||||
This is a mutator.
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def _get_seed_ratio_limit(self):
|
||||
"""
|
||||
|
@ -373,7 +376,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_ratio_limit = property(_get_seed_ratio_limit, _set_seed_ratio_limit, None
|
||||
, "Torrent seed ratio limit as float. Also see seed_ratio_mode. This is a mutator.")
|
||||
, "Torrent seed ratio limit as float. Also see seed_ratio_mode. This is a mutator.")
|
||||
|
||||
def _get_seed_ratio_mode(self):
|
||||
"""
|
||||
|
@ -392,7 +395,7 @@ class Torrent(object):
|
|||
raise ValueError("Not a valid limit")
|
||||
|
||||
seed_ratio_mode = property(_get_seed_ratio_mode, _set_seed_ratio_mode, None,
|
||||
"""
|
||||
"""
|
||||
Seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'.
|
||||
|
||||
* global, use session seed ratio limit.
|
||||
|
@ -401,7 +404,7 @@ class Torrent(object):
|
|||
|
||||
This is a mutator.
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def _get_upload_limit(self):
|
||||
"""
|
||||
|
@ -422,13 +425,14 @@ class Torrent(object):
|
|||
self._fields['uploadLimited'] = Field(True, True)
|
||||
self._fields['uploadLimit'] = Field(limit, True)
|
||||
self._push()
|
||||
elif limit == None:
|
||||
elif limit is None:
|
||||
self._fields['uploadLimited'] = Field(False, True)
|
||||
self._push()
|
||||
else:
|
||||
raise ValueError("Not a valid limit")
|
||||
|
||||
upload_limit = property(_get_upload_limit, _set_upload_limit, None, "Upload limit in Kbps or None. This is a mutator.")
|
||||
upload_limit = property(_get_upload_limit, _set_upload_limit, None,
|
||||
"Upload limit in Kbps or None. This is a mutator.")
|
||||
|
||||
def _get_queue_position(self):
|
||||
"""Get the queue position for this torrent."""
|
||||
|
|
|
@ -2,14 +2,18 @@
|
|||
# Copyright (c) 2008-2013 Erik Svensson <erik.public@gmail.com>
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import socket, datetime, logging, constants
|
||||
import constants
|
||||
import datetime
|
||||
import logging
|
||||
import socket
|
||||
from collections import namedtuple
|
||||
from constants import LOGGER
|
||||
|
||||
from constants import LOGGER
|
||||
from six import string_types, iteritems
|
||||
|
||||
UNITS = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB']
|
||||
|
||||
|
||||
def format_size(size):
|
||||
"""
|
||||
Format byte size into IEC prefixes, B, KiB, MiB ...
|
||||
|
@ -19,14 +23,16 @@ def format_size(size):
|
|||
while size >= 1024.0 and i < len(UNITS):
|
||||
i += 1
|
||||
size /= 1024.0
|
||||
return (size, UNITS[i])
|
||||
return size, UNITS[i]
|
||||
|
||||
|
||||
def format_speed(size):
|
||||
"""
|
||||
Format bytes per second speed into IEC prefixes, B/s, KiB/s, MiB/s ...
|
||||
"""
|
||||
(size, unit) = format_size(size)
|
||||
return (size, unit + '/s')
|
||||
return size, unit + '/s'
|
||||
|
||||
|
||||
def format_timedelta(delta):
|
||||
"""
|
||||
|
@ -34,7 +40,8 @@ def format_timedelta(delta):
|
|||
"""
|
||||
minutes, seconds = divmod(delta.seconds, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
return '%d %02d:%02d:%02d' % (delta.days, hours, minutes, seconds)
|
||||
return '{0:d} {1:02d}:{2:02d}:{3:02d}'.format(delta.days, hours, minutes, seconds)
|
||||
|
||||
|
||||
def format_timestamp(timestamp, utc=False):
|
||||
"""
|
||||
|
@ -49,12 +56,14 @@ def format_timestamp(timestamp, utc=False):
|
|||
else:
|
||||
return '-'
|
||||
|
||||
|
||||
class INetAddressError(Exception):
|
||||
"""
|
||||
Error parsing / generating a internet address.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def inet_address(address, default_port, default_address='localhost'):
|
||||
"""
|
||||
Parse internet address.
|
||||
|
@ -71,18 +80,19 @@ def inet_address(address, default_port, default_address='localhost'):
|
|||
try:
|
||||
port = int(addr[1])
|
||||
except ValueError:
|
||||
raise INetAddressError('Invalid address "%s".' % address)
|
||||
raise INetAddressError('Invalid address "{0}".'.format(address))
|
||||
if len(addr[0]) == 0:
|
||||
addr = default_address
|
||||
else:
|
||||
addr = addr[0]
|
||||
else:
|
||||
raise INetAddressError('Invalid address "%s".' % address)
|
||||
raise INetAddressError('Invalid address "{0}".'.format(address))
|
||||
try:
|
||||
socket.getaddrinfo(addr, port, socket.AF_INET, socket.SOCK_STREAM)
|
||||
except socket.gaierror:
|
||||
raise INetAddressError('Cannot look up address "%s".' % address)
|
||||
return (addr, port)
|
||||
raise INetAddressError('Cannot look up address "{0}".'.format(address))
|
||||
return addr, port
|
||||
|
||||
|
||||
def rpc_bool(arg):
|
||||
"""
|
||||
|
@ -95,27 +105,31 @@ def rpc_bool(arg):
|
|||
arg = arg.lower() in ['true', 'yes']
|
||||
return 1 if bool(arg) else 0
|
||||
|
||||
|
||||
TR_TYPE_MAP = {
|
||||
'number' : int,
|
||||
'string' : str,
|
||||
'number': int,
|
||||
'string': str,
|
||||
'double': float,
|
||||
'boolean' : rpc_bool,
|
||||
'boolean': rpc_bool,
|
||||
'array': list,
|
||||
'object': dict
|
||||
}
|
||||
|
||||
|
||||
def make_python_name(name):
|
||||
"""
|
||||
Convert Transmission RPC name to python compatible name.
|
||||
"""
|
||||
return name.replace('-', '_')
|
||||
|
||||
|
||||
def make_rpc_name(name):
|
||||
"""
|
||||
Convert python compatible name to Transmission RPC name.
|
||||
"""
|
||||
return name.replace('_', '-')
|
||||
|
||||
|
||||
def argument_value_convert(method, argument, value, rpc_version):
|
||||
"""
|
||||
Check and fix Transmission RPC issues with regards to methods, arguments and values.
|
||||
|
@ -125,7 +139,7 @@ def argument_value_convert(method, argument, value, rpc_version):
|
|||
elif method in ('session-get', 'session-set'):
|
||||
args = constants.SESSION_ARGS[method[-3:]]
|
||||
else:
|
||||
return ValueError('Method "%s" not supported' % (method))
|
||||
return ValueError('Method "{0}" not supported'.format(method))
|
||||
if argument in args:
|
||||
info = args[argument]
|
||||
invalid_version = True
|
||||
|
@ -141,19 +155,18 @@ def argument_value_convert(method, argument, value, rpc_version):
|
|||
if invalid_version:
|
||||
if replacement:
|
||||
LOGGER.warning(
|
||||
'Replacing requested argument "%s" with "%s".'
|
||||
% (argument, replacement))
|
||||
'Replacing requested argument "{0}" with "{1}".'.format(argument, replacement))
|
||||
argument = replacement
|
||||
info = args[argument]
|
||||
else:
|
||||
raise ValueError(
|
||||
'Method "%s" Argument "%s" does not exist in version %d.'
|
||||
% (method, argument, rpc_version))
|
||||
return (argument, TR_TYPE_MAP[info[0]](value))
|
||||
'Method "{0}" Argument "{1}" does not exist in version {2:d}.'.format(method, argument, rpc_version))
|
||||
return argument, TR_TYPE_MAP[info[0]](value)
|
||||
else:
|
||||
raise ValueError('Argument "%s" does not exists for method "%s".',
|
||||
(argument, method))
|
||||
|
||||
|
||||
def get_arguments(method, rpc_version):
|
||||
"""
|
||||
Get arguments for method in specified Transmission RPC version.
|
||||
|
@ -163,7 +176,7 @@ def get_arguments(method, rpc_version):
|
|||
elif method in ('session-get', 'session-set'):
|
||||
args = constants.SESSION_ARGS[method[-3:]]
|
||||
else:
|
||||
return ValueError('Method "%s" not supported' % (method))
|
||||
return ValueError('Method "{0}" not supported'.format(method))
|
||||
accessible = []
|
||||
for argument, info in iteritems(args):
|
||||
valid_version = True
|
||||
|
@ -175,6 +188,7 @@ def get_arguments(method, rpc_version):
|
|||
accessible.append(argument)
|
||||
return accessible
|
||||
|
||||
|
||||
def add_stdout_logger(level='debug'):
|
||||
"""
|
||||
Add a stdout target for the transmissionrpc logging.
|
||||
|
@ -189,6 +203,7 @@ def add_stdout_logger(level='debug'):
|
|||
loghandler.setLevel(loglevel)
|
||||
trpc_logger.addHandler(loghandler)
|
||||
|
||||
|
||||
def add_file_logger(filepath, level='debug'):
|
||||
"""
|
||||
Add a stdout target for the transmissionrpc logging.
|
||||
|
@ -203,4 +218,5 @@ def add_file_logger(filepath, level='debug'):
|
|||
loghandler.setLevel(loglevel)
|
||||
trpc_logger.addHandler(loghandler)
|
||||
|
||||
|
||||
Field = namedtuple('Field', ['value', 'dirty'])
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
# coding=utf-8
|
|
@ -1,115 +1,117 @@
|
|||
#coding=utf8
|
||||
import urllib
|
||||
import urllib2
|
||||
import urlparse
|
||||
import cookielib
|
||||
# coding=utf8
|
||||
|
||||
import json
|
||||
import re
|
||||
import StringIO
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
from six import StringIO
|
||||
from six.moves.http_cookiejar import CookieJar
|
||||
from six.moves.urllib_error import HTTPError
|
||||
from six.moves.urllib_parse import urljoin, urlencode
|
||||
from six.moves.urllib_request import (
|
||||
build_opener, install_opener,
|
||||
HTTPBasicAuthHandler, HTTPCookieProcessor,
|
||||
Request,
|
||||
)
|
||||
|
||||
from core.utorrent.upload import MultiPartForm
|
||||
|
||||
class UTorrentClient(object):
|
||||
|
||||
class UTorrentClient(object):
|
||||
def __init__(self, base_url, username, password):
|
||||
self.base_url = base_url
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.opener = self._make_opener('uTorrent', base_url, username, password)
|
||||
self.token = self._get_token()
|
||||
#TODO refresh token, when necessary
|
||||
# TODO refresh token, when necessary
|
||||
|
||||
def _make_opener(self, realm, base_url, username, password):
|
||||
'''uTorrent API need HTTP Basic Auth and cookie support for token verify.'''
|
||||
"""uTorrent API need HTTP Basic Auth and cookie support for token verify."""
|
||||
|
||||
auth_handler = urllib2.HTTPBasicAuthHandler()
|
||||
auth_handler = HTTPBasicAuthHandler()
|
||||
auth_handler.add_password(realm=realm,
|
||||
uri=base_url,
|
||||
user=username,
|
||||
passwd=password)
|
||||
opener = urllib2.build_opener(auth_handler)
|
||||
urllib2.install_opener(opener)
|
||||
opener = build_opener(auth_handler)
|
||||
install_opener(opener)
|
||||
|
||||
cookie_jar = cookielib.CookieJar()
|
||||
cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar)
|
||||
cookie_jar = CookieJar()
|
||||
cookie_handler = HTTPCookieProcessor(cookie_jar)
|
||||
|
||||
handlers = [auth_handler, cookie_handler]
|
||||
opener = urllib2.build_opener(*handlers)
|
||||
opener = build_opener(*handlers)
|
||||
return opener
|
||||
|
||||
def _get_token(self):
|
||||
url = urlparse.urljoin(self.base_url, 'token.html')
|
||||
url = urljoin(self.base_url, 'token.html')
|
||||
response = self.opener.open(url)
|
||||
token_re = "<div id='token' style='display:none;'>([^<>]+)</div>"
|
||||
match = re.search(token_re, response.read())
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def list(self, **kwargs):
|
||||
params = [('list', '1')]
|
||||
params += kwargs.items()
|
||||
return self._action(params)
|
||||
|
||||
def start(self, *hashes):
|
||||
params = [('action', 'start'),]
|
||||
params = [('action', 'start'), ]
|
||||
for hash in hashes:
|
||||
params.append(('hash', hash))
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def stop(self, *hashes):
|
||||
params = [('action', 'stop'),]
|
||||
params = [('action', 'stop'), ]
|
||||
for hash in hashes:
|
||||
params.append(('hash', hash))
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def pause(self, *hashes):
|
||||
params = [('action', 'pause'),]
|
||||
params = [('action', 'pause'), ]
|
||||
for hash in hashes:
|
||||
params.append(('hash', hash))
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def forcestart(self, *hashes):
|
||||
params = [('action', 'forcestart'),]
|
||||
params = [('action', 'forcestart'), ]
|
||||
for hash in hashes:
|
||||
params.append(('hash', hash))
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def remove(self, *hashes):
|
||||
params = [('action', 'remove'),]
|
||||
params = [('action', 'remove'), ]
|
||||
for hash in hashes:
|
||||
params.append(('hash', hash))
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def removedata(self, *hashes):
|
||||
params = [('action', 'removedata'),]
|
||||
params = [('action', 'removedata'), ]
|
||||
for hash in hashes:
|
||||
params.append(('hash', hash))
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def recheck(self, *hashes):
|
||||
params = [('action', 'recheck'),]
|
||||
params = [('action', 'recheck'), ]
|
||||
for hash in hashes:
|
||||
params.append(('hash', hash))
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def getfiles(self, hash):
|
||||
params = [('action', 'getfiles'), ('hash', hash)]
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def getprops(self, hash):
|
||||
params = [('action', 'getprops'), ('hash', hash)]
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def setprio(self, hash, priority, *files):
|
||||
params = [('action', 'setprio'), ('hash', hash), ('p', str(priority))]
|
||||
for file_index in files:
|
||||
params.append(('f', str(file_index)))
|
||||
|
||||
return self._action(params)
|
||||
|
||||
|
||||
def addfile(self, filename, filepath=None, bytes=None):
|
||||
params = [('action', 'add-file')]
|
||||
|
||||
|
@ -118,15 +120,15 @@ class UTorrentClient(object):
|
|||
file_handler = open(filepath)
|
||||
else:
|
||||
file_handler = StringIO.StringIO(bytes)
|
||||
|
||||
|
||||
form.add_file('torrent_file', filename.encode('utf-8'), file_handler)
|
||||
|
||||
return self._action(params, str(form), form.get_content_type())
|
||||
|
||||
def _action(self, params, body=None, content_type=None):
|
||||
#about token, see https://github.com/bittorrent/webui/wiki/TokenSystem
|
||||
url = self.base_url + '?token=' + self.token + '&' + urllib.urlencode(params)
|
||||
request = urllib2.Request(url)
|
||||
# about token, see https://github.com/bittorrent/webui/wiki/TokenSystem
|
||||
url = self.base_url + '?token=' + self.token + '&' + urlencode(params)
|
||||
request = Request(url)
|
||||
|
||||
if body:
|
||||
request.add_data(body)
|
||||
|
@ -137,6 +139,5 @@ class UTorrentClient(object):
|
|||
try:
|
||||
response = self.opener.open(request)
|
||||
return response.code, json.loads(response.read())
|
||||
except urllib2.HTTPError,e:
|
||||
raise
|
||||
|
||||
except HTTPError:
|
||||
raise
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#code copied from http://www.doughellmann.com/PyMOTW/urllib2/
|
||||
# coding=utf-8
|
||||
# code copied from http://www.doughellmann.com/PyMOTW/urllib2/
|
||||
|
||||
from email.generator import _make_boundary as make_boundary
|
||||
import itertools
|
||||
import mimetools
|
||||
import mimetypes
|
||||
|
||||
|
||||
|
@ -11,11 +12,11 @@ class MultiPartForm(object):
|
|||
def __init__(self):
|
||||
self.form_fields = []
|
||||
self.files = []
|
||||
self.boundary = mimetools.choose_boundary()
|
||||
self.boundary = make_boundary()
|
||||
return
|
||||
|
||||
|
||||
def get_content_type(self):
|
||||
return 'multipart/form-data; boundary=%s' % self.boundary
|
||||
return 'multipart/form-data; boundary={0}'.format(self.boundary)
|
||||
|
||||
def add_field(self, name, value):
|
||||
"""Add a simple field to the form data."""
|
||||
|
@ -29,7 +30,7 @@ class MultiPartForm(object):
|
|||
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
|
||||
self.files.append((fieldname, filename, mimetype, body))
|
||||
return
|
||||
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representing the form data, including attached files."""
|
||||
# Build a list of lists, each containing "lines" of the
|
||||
|
@ -38,29 +39,28 @@ class MultiPartForm(object):
|
|||
# line is separated by '\r\n'.
|
||||
parts = []
|
||||
part_boundary = '--' + self.boundary
|
||||
|
||||
|
||||
# Add the form fields
|
||||
parts.extend(
|
||||
[ part_boundary,
|
||||
'Content-Disposition: form-data; name="%s"' % name,
|
||||
'',
|
||||
value,
|
||||
]
|
||||
[part_boundary,
|
||||
'Content-Disposition: form-data; name="{0}"'.format(name),
|
||||
'',
|
||||
value,
|
||||
]
|
||||
for name, value in self.form_fields
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
# Add the files to upload
|
||||
parts.extend(
|
||||
[ part_boundary,
|
||||
'Content-Disposition: file; name="%s"; filename="%s"' % \
|
||||
(field_name, filename),
|
||||
'Content-Type: %s' % content_type,
|
||||
'',
|
||||
body,
|
||||
]
|
||||
[part_boundary,
|
||||
'Content-Disposition: file; name="{0}"; filename="{1}"'.format(field_name, filename),
|
||||
'Content-Type: {0}'.format(content_type),
|
||||
'',
|
||||
body,
|
||||
]
|
||||
for field_name, filename, content_type, body in self.files
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
# Flatten the list and add closing boundary marker,
|
||||
# then return CR+LF separated data
|
||||
flattened = list(itertools.chain(*parts))
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# coding=utf-8
|
||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# Modified by: echel0n
|
||||
|
||||
|
@ -15,7 +16,8 @@ import gh_api as github
|
|||
import core
|
||||
from core import logger
|
||||
|
||||
class CheckVersion():
|
||||
|
||||
class CheckVersion(object):
|
||||
"""
|
||||
Version check class meant to run as a thread object with the SB scheduler.
|
||||
"""
|
||||
|
@ -79,7 +81,8 @@ class CheckVersion():
|
|||
if self.updater.need_update():
|
||||
return self.updater.update()
|
||||
|
||||
class UpdateManager():
|
||||
|
||||
class UpdateManager(object):
|
||||
def get_github_repo_user(self):
|
||||
return core.GIT_USER
|
||||
|
||||
|
@ -89,6 +92,7 @@ class UpdateManager():
|
|||
def get_github_branch(self):
|
||||
return core.GIT_BRANCH
|
||||
|
||||
|
||||
class GitUpdateManager(UpdateManager):
|
||||
def __init__(self):
|
||||
self._git_path = self._find_working_git()
|
||||
|
@ -102,7 +106,8 @@ class GitUpdateManager(UpdateManager):
|
|||
self._num_commits_ahead = 0
|
||||
|
||||
def _git_error(self):
|
||||
logger.debug('Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
|
||||
logger.debug(
|
||||
'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
|
||||
|
||||
def _find_working_git(self):
|
||||
test_cmd = 'version'
|
||||
|
@ -147,18 +152,20 @@ class GitUpdateManager(UpdateManager):
|
|||
logger.log(u"Not using: " + cur_git, logger.DEBUG)
|
||||
|
||||
# Still haven't found a working git
|
||||
logger.debug('Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
|
||||
logger.debug(
|
||||
'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
|
||||
|
||||
return None
|
||||
|
||||
def _run_git(self, git_path, args):
|
||||
|
||||
output = err = exit_status = None
|
||||
output = None
|
||||
err = None
|
||||
|
||||
if not git_path:
|
||||
logger.log(u"No git specified, can't use git commands", logger.DEBUG)
|
||||
exit_status = 1
|
||||
return (output, err, exit_status)
|
||||
return output, err, exit_status
|
||||
|
||||
cmd = git_path + ' ' + args
|
||||
|
||||
|
@ -197,7 +204,7 @@ class GitUpdateManager(UpdateManager):
|
|||
logger.log(cmd + u" returned : " + output + u", treat as error for now", logger.DEBUG)
|
||||
exit_status = 1
|
||||
|
||||
return (output, err, exit_status)
|
||||
return output, err, exit_status
|
||||
|
||||
def _find_installed_version(self):
|
||||
"""
|
||||
|
@ -278,9 +285,10 @@ class GitUpdateManager(UpdateManager):
|
|||
logger.log(u"git didn't return numbers for behind and ahead, not using it", logger.DEBUG)
|
||||
return
|
||||
|
||||
logger.log(u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash)
|
||||
+ u", num_commits_behind = " + str(self._num_commits_behind) + u", num_commits_ahead = " + str(
|
||||
self._num_commits_ahead), logger.DEBUG)
|
||||
logger.log(
|
||||
u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) +
|
||||
u", num_commits_behind = " + str(self._num_commits_behind) + u", num_commits_ahead = " +
|
||||
str(self._num_commits_ahead), logger.DEBUG)
|
||||
|
||||
def set_newest_text(self):
|
||||
if self._num_commits_ahead:
|
||||
|
@ -305,7 +313,7 @@ class GitUpdateManager(UpdateManager):
|
|||
else:
|
||||
try:
|
||||
self._check_github_for_update()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -349,7 +357,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
try:
|
||||
with open(version_file, 'r') as fp:
|
||||
self._cur_commit_hash = fp.read().strip(' \n\r')
|
||||
except EnvironmentError, e:
|
||||
except EnvironmentError as e:
|
||||
logger.log(u"Unable to open 'version.txt': " + str(e), logger.DEBUG)
|
||||
|
||||
if not self._cur_commit_hash:
|
||||
|
@ -363,7 +371,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
|
||||
try:
|
||||
self._check_github_for_update()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -410,8 +418,9 @@ class SourceUpdateManager(UpdateManager):
|
|||
# when _cur_commit_hash doesn't match anything _num_commits_behind == 100
|
||||
self._num_commits_behind += 1
|
||||
|
||||
logger.log(u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash)
|
||||
+ u", num_commits_behind = " + str(self._num_commits_behind), logger.DEBUG)
|
||||
logger.log(
|
||||
u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) +
|
||||
u", num_commits_behind = " + str(self._num_commits_behind), logger.DEBUG)
|
||||
|
||||
def set_newest_text(self):
|
||||
|
||||
|
@ -488,15 +497,15 @@ class SourceUpdateManager(UpdateManager):
|
|||
old_path = os.path.join(content_dir, dirname, curfile)
|
||||
new_path = os.path.join(core.PROGRAM_DIR, dirname, curfile)
|
||||
|
||||
#Avoid DLL access problem on WIN32/64
|
||||
#These files needing to be updated manually
|
||||
#or find a way to kill the access from memory
|
||||
# Avoid DLL access problem on WIN32/64
|
||||
# These files needing to be updated manually
|
||||
# or find a way to kill the access from memory
|
||||
if curfile in ('unrar.dll', 'unrar64.dll'):
|
||||
try:
|
||||
os.chmod(new_path, stat.S_IWRITE)
|
||||
os.remove(new_path)
|
||||
os.renames(old_path, new_path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Unable to update " + new_path + ': ' + str(e), logger.DEBUG)
|
||||
os.remove(old_path) # Trash the updated file without moving in new path
|
||||
continue
|
||||
|
@ -509,13 +518,13 @@ class SourceUpdateManager(UpdateManager):
|
|||
try:
|
||||
with open(version_path, 'w') as ver_file:
|
||||
ver_file.write(self._newest_commit_hash)
|
||||
except EnvironmentError, e:
|
||||
except EnvironmentError as e:
|
||||
logger.log(u"Unable to write version file, update not complete: " + str(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.log(u"Error while trying to update: " + str(e), logger.ERROR)
|
||||
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG)
|
||||
return False
|
||||
|
||||
return True
|
||||
return True
|
||||
|
|
372
libs/six.py
372
libs/six.py
|
@ -1,6 +1,6 @@
|
|||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2014 Benjamin Peterson
|
||||
# Copyright (c) 2010-2015 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -20,17 +20,22 @@
|
|||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.5.2"
|
||||
__version__ = "1.10.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
|
@ -53,6 +58,7 @@ else:
|
|||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
|
@ -84,9 +90,13 @@ class _LazyDescr(object):
|
|||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(obj.__class__, self.name)
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
|
@ -105,14 +115,6 @@ class MovedModule(_LazyDescr):
|
|||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
# Hack around the Django autoreloader. The reloader tries to get
|
||||
# __file__ or __name__ of every module in sys.modules. This doesn't work
|
||||
# well if this MovedModule is for an module that is unavailable on this
|
||||
# machine (like winreg on Unix systems). Thus, we pretend __file__ and
|
||||
# __name__ don't exist if the module hasn't been loaded yet. See issues
|
||||
# #51 and #53.
|
||||
if attr in ("__file__", "__name__") and self.mod not in sys.modules:
|
||||
raise AttributeError
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
|
@ -159,9 +161,75 @@ class MovedAttribute(_LazyDescr):
|
|||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
|
@ -169,26 +237,33 @@ _moved_attributes = [
|
|||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
|
@ -222,25 +297,34 @@ _moved_attributes = [
|
|||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
sys.modules[__name__ + ".moves." + attr.name] = attr
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
|
@ -254,6 +338,14 @@ _urllib_parse_moved_attributes = [
|
|||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
|
@ -261,10 +353,12 @@ del attr
|
|||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
|
@ -279,10 +373,12 @@ del attr
|
|||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
|
@ -327,10 +423,12 @@ del attr
|
|||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
|
@ -346,10 +444,12 @@ del attr
|
|||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
|
@ -362,22 +462,25 @@ del attr
|
|||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
parse = sys.modules[__name__ + ".moves.urllib_parse"]
|
||||
error = sys.modules[__name__ + ".moves.urllib_error"]
|
||||
request = sys.modules[__name__ + ".moves.urllib_request"]
|
||||
response = sys.modules[__name__ + ".moves.urllib_response"]
|
||||
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
|
@ -404,11 +507,6 @@ if PY3:
|
|||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
_iterlists = "lists"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
@ -418,11 +516,6 @@ else:
|
|||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
_iterlists = "iterlists"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
|
@ -445,6 +538,9 @@ if PY3:
|
|||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
|
@ -453,6 +549,9 @@ else:
|
|||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
|
@ -471,66 +570,117 @@ get_function_defaults = operator.attrgetter(_func_defaults)
|
|||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
def iterkeys(d, **kw):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)(**kw))
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)(**kw))
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)(**kw))
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iterlists)(**kw))
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
@ -548,12 +698,26 @@ else:
|
|||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
raise value from from_value
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
|
@ -561,13 +725,14 @@ if print_ is None:
|
|||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
|
@ -608,25 +773,96 @@ if print_ is None:
|
|||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", bases, {})
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
|
|
|
@ -514,7 +514,7 @@ from core import logger, nzbToMediaDB
|
|||
def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None, failureLink=None):
|
||||
if core.SAFE_MODE and inputDirectory == core.NZB_DEFAULTDIR:
|
||||
logger.error(
|
||||
'The input directory:[%s] is the Default Download Directory. Please configure category directories to prevent processing of other media.' % (
|
||||
'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format(
|
||||
inputDirectory))
|
||||
return [-1, ""]
|
||||
|
||||
|
@ -522,7 +522,7 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
|
|||
download_id = get_nzoid(inputName)
|
||||
|
||||
if clientAgent != 'manual' and not core.DOWNLOADINFO:
|
||||
logger.debug('Adding NZB download info for directory %s to database' % (inputDirectory))
|
||||
logger.debug('Adding NZB download info for directory {0} to database'.format(inputDirectory))
|
||||
|
||||
myDB = nzbToMediaDB.DBConnection()
|
||||
|
||||
|
@ -555,7 +555,7 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
|
|||
section = core.CFG.findsection("ALL").isenabled()
|
||||
if section is None:
|
||||
logger.error(
|
||||
'Category:[%s] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.' % (
|
||||
'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format(
|
||||
inputCategory))
|
||||
return [-1, ""]
|
||||
else:
|
||||
|
@ -563,15 +563,15 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
|
|||
|
||||
if len(section) > 1:
|
||||
logger.error(
|
||||
'Category:[%s] is not unique, %s are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.' % (
|
||||
'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format(
|
||||
inputCategory, section.keys()))
|
||||
return [-1, ""]
|
||||
|
||||
if section:
|
||||
sectionName = section.keys()[0]
|
||||
logger.info('Auto-detected SECTION:%s' % (sectionName))
|
||||
logger.info('Auto-detected SECTION:{0}'.format(sectionName))
|
||||
else:
|
||||
logger.error("Unable to locate a section with subsection:%s enabled in your autoProcessMedia.cfg, exiting!" % (
|
||||
logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format(
|
||||
inputCategory))
|
||||
return [-1, ""]
|
||||
|
||||
|
@ -582,20 +582,20 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
|
|||
|
||||
try:
|
||||
if int(section[usercat]['remote_path']) and not core.REMOTEPATHS:
|
||||
logger.error('Remote Path is enabled for %s:%s but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!' % (
|
||||
logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format(
|
||||
sectionName, inputCategory))
|
||||
return [-1, ""]
|
||||
except:
|
||||
logger.error('Remote Path %s is not valid for %s:%s Please set this to either 0 to disable or 1 to enable!' % (
|
||||
logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format(
|
||||
section[usercat]['remote_path'], sectionName, inputCategory))
|
||||
|
||||
inputName, inputDirectory = convert_to_ascii(inputName, inputDirectory)
|
||||
|
||||
if extract == 1:
|
||||
logger.debug('Checking for archives to extract in directory: %s' % (inputDirectory))
|
||||
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory))
|
||||
extractFiles(inputDirectory)
|
||||
|
||||
logger.info("Calling %s:%s to post-process:%s" % (sectionName, inputCategory, inputName))
|
||||
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, inputCategory, inputName))
|
||||
|
||||
if sectionName == "CouchPotato":
|
||||
result = autoProcessMovie().process(sectionName, inputDirectory, inputName, status, clientAgent, download_id,
|
||||
|
@ -636,11 +636,11 @@ def main(args, section=None):
|
|||
clientAgent = core.NZB_CLIENTAGENT
|
||||
|
||||
logger.info("#########################################################")
|
||||
logger.info("## ..::[%s]::.. ##" % os.path.basename(__file__))
|
||||
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__)))
|
||||
logger.info("#########################################################")
|
||||
|
||||
# debug command line options
|
||||
logger.debug("Options passed into nzbToMedia: %s" % args)
|
||||
logger.debug("Options passed into nzbToMedia: {0}".format(args))
|
||||
|
||||
# Post-Processing Result
|
||||
result = [0, ""]
|
||||
|
@ -650,15 +650,15 @@ def main(args, section=None):
|
|||
if os.environ.has_key('NZBOP_SCRIPTDIR'):
|
||||
# Check if the script is called from nzbget 11.0 or later
|
||||
if os.environ['NZBOP_VERSION'][0:5] < '11.0':
|
||||
logger.error("NZBGet Version %s is not supported. Please update NZBGet." %(str(os.environ['NZBOP_VERSION'])))
|
||||
logger.error("NZBGet Version {0} is not supported. Please update NZBGet.".format(os.environ['NZBOP_VERSION']))
|
||||
sys.exit(core.NZBGET_POSTPROCESS_ERROR)
|
||||
|
||||
logger.info("Script triggered from NZBGet Version %s." %(str(os.environ['NZBOP_VERSION'])))
|
||||
logger.info("Script triggered from NZBGet Version {0}.".format(os.environ['NZBOP_VERSION']))
|
||||
|
||||
# Check if the script is called from nzbget 13.0 or later
|
||||
if os.environ.has_key('NZBPP_TOTALSTATUS'):
|
||||
if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS':
|
||||
logger.info("Download failed with status %s." %(os.environ['NZBPP_STATUS']))
|
||||
logger.info("Download failed with status {0}.".format(os.environ['NZBPP_STATUS']))
|
||||
status = 1
|
||||
|
||||
else:
|
||||
|
@ -745,16 +745,16 @@ def main(args, section=None):
|
|||
if not core.CFG[section][subsection].isenabled():
|
||||
continue
|
||||
for dirName in getDirs(section, subsection, link = 'move'):
|
||||
logger.info("Starting manual run for %s:%s - Folder:%s" % (section, subsection, dirName))
|
||||
logger.info("Starting manual run for {0}:{1} - Folder:{2}".format(section, subsection, dirName))
|
||||
|
||||
logger.info("Checking database for download info for %s ..." % (os.path.basename(dirName)))
|
||||
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dirName)))
|
||||
core.DOWNLOADINFO = get_downloadInfo(os.path.basename(dirName), 0)
|
||||
if core.DOWNLOADINFO:
|
||||
logger.info(
|
||||
"Found download info for %s, setting variables now ..." % (os.path.basename(dirName)))
|
||||
"Found download info for {0}, setting variables now ...".format(os.path.basename(dirName)))
|
||||
else:
|
||||
logger.info(
|
||||
'Unable to locate download info for %s, continuing to try and process this release ...' % (
|
||||
'Unable to locate download info for {0}, continuing to try and process this release ...'.format(
|
||||
os.path.basename(dirName))
|
||||
)
|
||||
|
||||
|
@ -781,19 +781,19 @@ def main(args, section=None):
|
|||
results = process(dirName, inputName, 0, clientAgent=clientAgent,
|
||||
download_id=download_id, inputCategory=subsection)
|
||||
if results[0] != 0:
|
||||
logger.error("A problem was reported when trying to perform a manual run for %s:%s." % (
|
||||
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format(
|
||||
section, subsection))
|
||||
result = results
|
||||
|
||||
if result[0] == 0:
|
||||
logger.info("The %s script completed successfully." % args[0])
|
||||
logger.info("The {0} script completed successfully.".format(args[0]))
|
||||
if result[1]:
|
||||
print result[1] + "!" # For SABnzbd Status display.
|
||||
if os.environ.has_key('NZBOP_SCRIPTDIR'): # return code for nzbget v11
|
||||
del core.MYAPP
|
||||
return (core.NZBGET_POSTPROCESS_SUCCESS)
|
||||
else:
|
||||
logger.error("A problem was reported in the %s script." % args[0])
|
||||
logger.error("A problem was reported in the {0} script.".format(args[0]))
|
||||
if result[1]:
|
||||
print result[1] + "!" # For SABnzbd Status display.
|
||||
if os.environ.has_key('NZBOP_SCRIPTDIR'): # return code for nzbget v11
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue