From 81ffe0456d46b6b2160e3f6aacdb9e6238a25bb7 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 21:36:03 -0400 Subject: [PATCH 01/21] Add encoding declaration --- core/__init__.py | 1 + core/autoProcess/__init__.py | 1 + core/databases/__init__.py | 1 + core/databases/mainDB.py | 2 ++ core/extractor/__init__.py | 1 + core/extractor/extractor.py | 1 + core/gh_api.py | 1 + core/linktastic/__init__.py | 1 + core/linktastic/linktastic.py | 1 + core/logger.py | 1 + core/nzbToMediaAutoFork.py | 1 + core/nzbToMediaConfig.py | 1 + core/nzbToMediaDB.py | 1 + core/nzbToMediaSceneExceptions.py | 1 + core/nzbToMediaUserScript.py | 1 + core/nzbToMediaUtil.py | 1 + core/synchronousdeluge/__init__.py | 1 + core/synchronousdeluge/client.py | 1 + core/synchronousdeluge/exceptions.py | 1 + core/synchronousdeluge/protocol.py | 1 + core/synchronousdeluge/rencode.py | 2 +- core/synchronousdeluge/transfer.py | 1 + core/transcoder/__init__.py | 1 + core/transcoder/transcoder.py | 1 + core/transmissionrpc/six.py | 1 + core/utorrent/__init__.py | 1 + core/utorrent/upload.py | 1 + core/versionCheck.py | 1 + 28 files changed, 29 insertions(+), 1 deletion(-) diff --git a/core/__init__.py b/core/__init__.py index a9d2a1e9..50ffc928 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -1,3 +1,4 @@ +# coding=utf-8 import locale import os import re diff --git a/core/autoProcess/__init__.py b/core/autoProcess/__init__.py index e69de29b..bf893c06 100644 --- a/core/autoProcess/__init__.py +++ b/core/autoProcess/__init__.py @@ -0,0 +1 @@ +# coding=utf-8 \ No newline at end of file diff --git a/core/databases/__init__.py b/core/databases/__init__.py index 96661806..737828fb 100644 --- a/core/databases/__init__.py +++ b/core/databases/__init__.py @@ -1 +1,2 @@ +# coding=utf-8 __all__ = ["mainDB"] \ No newline at end of file diff --git a/core/databases/mainDB.py b/core/databases/mainDB.py index 89b89529..71c1e3b2 100644 --- a/core/databases/mainDB.py +++ b/core/databases/mainDB.py @@ -1,3 +1,4 @@ +# coding=utf-8 import core from core import logger, nzbToMediaDB from core.nzbToMediaUtil import backupVersionedFile @@ -5,6 +6,7 @@ from core.nzbToMediaUtil import backupVersionedFile MIN_DB_VERSION = 1 # oldest db version we support migrating from MAX_DB_VERSION = 2 + def backupDatabase(version): logger.info("Backing up database before upgrade") if not backupVersionedFile(nzbToMediaDB.dbFilename(), version): diff --git a/core/extractor/__init__.py b/core/extractor/__init__.py index e69de29b..bf893c06 100644 --- a/core/extractor/__init__.py +++ b/core/extractor/__init__.py @@ -0,0 +1 @@ +# coding=utf-8 \ No newline at end of file diff --git a/core/extractor/extractor.py b/core/extractor/extractor.py index 92faf7be..68d5e6df 100644 --- a/core/extractor/extractor.py +++ b/core/extractor/extractor.py @@ -1,3 +1,4 @@ +# coding=utf-8 import os import platform import shutil diff --git a/core/gh_api.py b/core/gh_api.py index abdf8b1d..8da2a794 100644 --- a/core/gh_api.py +++ b/core/gh_api.py @@ -1,3 +1,4 @@ +# coding=utf-8 import json import requests diff --git a/core/linktastic/__init__.py b/core/linktastic/__init__.py index e69de29b..bf893c06 100644 --- a/core/linktastic/__init__.py +++ b/core/linktastic/__init__.py @@ -0,0 +1 @@ +# coding=utf-8 \ No newline at end of file diff --git a/core/linktastic/linktastic.py b/core/linktastic/linktastic.py index 408bbc2e..9d981b57 100644 --- a/core/linktastic/linktastic.py +++ b/core/linktastic/linktastic.py @@ -1,3 +1,4 @@ +# coding=utf-8 # Linktastic Module # - A python2/3 compatible module that can create hardlinks/symlinks on windows-based systems # diff --git a/core/logger.py b/core/logger.py index 671bfc04..0b8a5446 100644 --- a/core/logger.py +++ b/core/logger.py @@ -1,3 +1,4 @@ +# coding=utf-8 from __future__ import with_statement import os diff --git a/core/nzbToMediaAutoFork.py b/core/nzbToMediaAutoFork.py index 966b453f..f5140228 100644 --- a/core/nzbToMediaAutoFork.py +++ b/core/nzbToMediaAutoFork.py @@ -1,3 +1,4 @@ +# coding=utf-8 import urllib import core import requests diff --git a/core/nzbToMediaConfig.py b/core/nzbToMediaConfig.py index c5c14512..40a48b2b 100644 --- a/core/nzbToMediaConfig.py +++ b/core/nzbToMediaConfig.py @@ -1,3 +1,4 @@ +# coding=utf-8 import os import shutil import copy diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index 2c3335e7..9ca856d5 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -1,3 +1,4 @@ +# coding=utf-8 from __future__ import with_statement import re diff --git a/core/nzbToMediaSceneExceptions.py b/core/nzbToMediaSceneExceptions.py index 91796582..7eedd7a0 100644 --- a/core/nzbToMediaSceneExceptions.py +++ b/core/nzbToMediaSceneExceptions.py @@ -1,3 +1,4 @@ +# coding=utf-8 import os import re import core diff --git a/core/nzbToMediaUserScript.py b/core/nzbToMediaUserScript.py index 5d07ab0b..6acc0169 100644 --- a/core/nzbToMediaUserScript.py +++ b/core/nzbToMediaUserScript.py @@ -1,3 +1,4 @@ +# coding=utf-8 import os import core from subprocess import Popen diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 3dad6577..53bdf27a 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -1,3 +1,4 @@ +# coding=utf-8 from __future__ import unicode_literals import os import re diff --git a/core/synchronousdeluge/__init__.py b/core/synchronousdeluge/__init__.py index d7edfe22..6155881f 100644 --- a/core/synchronousdeluge/__init__.py +++ b/core/synchronousdeluge/__init__.py @@ -1,3 +1,4 @@ +# coding=utf-8 """A synchronous implementation of the Deluge RPC protocol based on gevent-deluge by Christopher Rosell. diff --git a/core/synchronousdeluge/client.py b/core/synchronousdeluge/client.py index f35663e4..b4228d83 100644 --- a/core/synchronousdeluge/client.py +++ b/core/synchronousdeluge/client.py @@ -1,3 +1,4 @@ +# coding=utf-8 import os import platform diff --git a/core/synchronousdeluge/exceptions.py b/core/synchronousdeluge/exceptions.py index da6cf022..ff622cb1 100644 --- a/core/synchronousdeluge/exceptions.py +++ b/core/synchronousdeluge/exceptions.py @@ -1,3 +1,4 @@ +# coding=utf-8 __all__ = ["DelugeRPCError"] class DelugeRPCError(Exception): diff --git a/core/synchronousdeluge/protocol.py b/core/synchronousdeluge/protocol.py index 756d4dfc..9af38b4d 100644 --- a/core/synchronousdeluge/protocol.py +++ b/core/synchronousdeluge/protocol.py @@ -1,3 +1,4 @@ +# coding=utf-8 __all__ = ["DelugeRPCRequest", "DelugeRPCResponse"] class DelugeRPCRequest(object): diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index a0a6eec3..0f6ca1ec 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -1,4 +1,4 @@ - +# coding=utf-8 """ rencode -- Web safe object pickling/unpickling. diff --git a/core/synchronousdeluge/transfer.py b/core/synchronousdeluge/transfer.py index 27982fab..0f39bcab 100644 --- a/core/synchronousdeluge/transfer.py +++ b/core/synchronousdeluge/transfer.py @@ -1,3 +1,4 @@ +# coding=utf-8 import zlib import struct import socket diff --git a/core/transcoder/__init__.py b/core/transcoder/__init__.py index 1f47cffe..b1629751 100644 --- a/core/transcoder/__init__.py +++ b/core/transcoder/__init__.py @@ -1 +1,2 @@ +# coding=utf-8 __author__ = 'Justin' diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index a3e536f5..622b5f08 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -1,3 +1,4 @@ +# coding=utf-8 import errno import os import platform diff --git a/core/transmissionrpc/six.py b/core/transmissionrpc/six.py index 836d516c..b73b777a 100644 --- a/core/transmissionrpc/six.py +++ b/core/transmissionrpc/six.py @@ -1,3 +1,4 @@ +# coding=utf-8 """Utilities for writing code that runs on Python 2 and 3""" # Copyright (c) 2010-2013 Benjamin Peterson diff --git a/core/utorrent/__init__.py b/core/utorrent/__init__.py index e69de29b..bf893c06 100644 --- a/core/utorrent/__init__.py +++ b/core/utorrent/__init__.py @@ -0,0 +1 @@ +# coding=utf-8 \ No newline at end of file diff --git a/core/utorrent/upload.py b/core/utorrent/upload.py index 9886c3ec..8a72306a 100644 --- a/core/utorrent/upload.py +++ b/core/utorrent/upload.py @@ -1,3 +1,4 @@ +# coding=utf-8 #code copied from http://www.doughellmann.com/PyMOTW/urllib2/ import itertools diff --git a/core/versionCheck.py b/core/versionCheck.py index 5066a3e9..773a7f25 100644 --- a/core/versionCheck.py +++ b/core/versionCheck.py @@ -1,3 +1,4 @@ +# coding=utf-8 # Author: Nic Wolfe # Modified by: echel0n From 8cd0e76ef82c8abe44559b4e936dccbd01387e4e Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 22:07:03 -0400 Subject: [PATCH 02/21] PEP8: Fix formatting * Remove redundant backslash between brackets * Fix multiple statements on one line * Fix missing/excess whitespace * Fix comments not starting with a single # and a space * Convert tabs to spaces * Use triple-quoted docstring --- core/__init__.py | 469 +++++++++++++++---------- core/autoProcess/__init__.py | 2 +- core/databases/__init__.py | 2 +- core/databases/mainDB.py | 9 +- core/extractor/__init__.py | 2 +- core/extractor/extractor.py | 29 +- core/gh_api.py | 1 + core/linktastic/__init__.py | 2 +- core/linktastic/linktastic.py | 32 +- core/logger.py | 18 +- core/nzbToMediaAutoFork.py | 17 +- core/nzbToMediaConfig.py | 84 +++-- core/nzbToMediaDB.py | 5 +- core/nzbToMediaSceneExceptions.py | 51 +-- core/nzbToMediaUserScript.py | 10 +- core/nzbToMediaUtil.py | 219 +++++++----- core/synchronousdeluge/client.py | 27 +- core/synchronousdeluge/exceptions.py | 2 +- core/synchronousdeluge/protocol.py | 3 +- core/synchronousdeluge/rencode.py | 191 ++++++---- core/synchronousdeluge/transfer.py | 4 +- core/transcoder/transcoder.py | 233 +++++++----- core/transmissionrpc/__init__.py | 12 +- core/transmissionrpc/client.py | 101 +++--- core/transmissionrpc/constants.py | 507 ++++++++++++++------------- core/transmissionrpc/error.py | 12 +- core/transmissionrpc/httphandler.py | 4 + core/transmissionrpc/session.py | 1 + core/transmissionrpc/six.py | 52 ++- core/transmissionrpc/torrent.py | 39 ++- core/transmissionrpc/utils.py | 21 +- core/utorrent/__init__.py | 2 +- core/utorrent/client.py | 58 +-- core/utorrent/upload.py | 39 +-- core/versionCheck.py | 29 +- 35 files changed, 1342 insertions(+), 947 deletions(-) diff --git a/core/__init__.py b/core/__init__.py index 50ffc928..21864078 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -34,14 +34,14 @@ from core.autoProcess.autoProcessTV import autoProcessTV from core import logger, versionCheck, nzbToMediaDB from core.nzbToMediaConfig import config from core.nzbToMediaUtil import category_search, sanitizeName, copy_link, parse_args, flatten, getDirs, \ - rmReadOnly,rmDir, pause_torrent, resume_torrent, remove_torrent, listMediaFiles, \ + rmReadOnly, rmDir, pause_torrent, resume_torrent, remove_torrent, listMediaFiles, \ extractFiles, cleanDir, update_downloadInfoStatus, get_downloadInfo, WakeUp, makeDir, cleanDir, \ create_torrent_class, listMediaFiles, RunningProcess from core.transcoder import transcoder from core.databases import mainDB # Client Agents -NZB_CLIENTS = ['sabnzbd','nzbget'] +NZB_CLIENTS = ['sabnzbd', 'nzbget'] TORRENT_CLIENTS = ['transmission', 'deluge', 'utorrent', 'rtorrent', 'other'] # sabnzbd constants @@ -62,7 +62,8 @@ FORKS[FORK_FAILED_TORRENT] = {"dir": None, "failed": None, "process_method": Non FORKS[FORK_SICKRAGETV] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None} FORKS[FORK_SICKRAGE] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None} FORKS[FORK_SICKGEAR] = {"dir": None, "failed": None, "process_method": None, "force": None} -ALL_FORKS = {"dir": None, "dirName": None, "proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None} +ALL_FORKS = {"dir": None, "dirName": None, "proc_dir": None, "failed": None, "process_method": None, "force": None, + "delete_on": None} # NZBGet Exit Codes NZBGET_POSTPROCESS_PARCHECK = 92 @@ -202,6 +203,7 @@ USER_SCRIPT_RUNONCE = None __INITIALIZED__ = False + def initialize(section=None): global NZBGET_POSTPROCESS_ERROR, NZBGET_POSTPROCESS_NONE, NZBGET_POSTPROCESS_PARCHECK, NZBGET_POSTPROCESS_SUCCESS, \ NZBTOMEDIA_TIMEOUT, FORKS, FORK_DEFAULT, FORK_FAILED_TORRENT, FORK_FAILED, \ @@ -224,7 +226,7 @@ def initialize(section=None): if __INITIALIZED__: return False - + if os.environ.has_key('NTM_LOGFILE'): LOG_FILE = os.environ['NTM_LOGFILE'] LOG_DIR = os.path.split(LOG_FILE)[0] @@ -316,7 +318,8 @@ def initialize(section=None): # restart nzbToMedia try: del MYAPP - except: pass + except: + pass restart() else: logger.error("Update wasn't successful, not restarting. Check your log for more information.") @@ -334,8 +337,10 @@ def initialize(section=None): SABNZBDAPIKEY = CFG["Nzb"]["sabnzbd_apikey"] NZB_DEFAULTDIR = CFG["Nzb"]["default_downloadDirectory"] GROUPS = CFG["Custom"]["remove_group"] - if isinstance(GROUPS, str): GROUPS = GROUPS.split(',') - if GROUPS == ['']: GROUPS = None + if isinstance(GROUPS, str): + GROUPS = GROUPS.split(',') + if GROUPS == ['']: + GROUPS = None TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"] # utorrent | deluge | transmission | rtorrent | vuze |other USELINK = CFG["Torrent"]["useLink"] # no | hard | sym @@ -343,8 +348,10 @@ def initialize(section=None): TORRENT_DEFAULTDIR = CFG["Torrent"]["default_downloadDirectory"] CATEGORIES = (CFG["Torrent"]["categories"]) # music,music_videos,pictures,software NOFLATTEN = (CFG["Torrent"]["noFlatten"]) - if isinstance(NOFLATTEN, str): NOFLATTEN = NOFLATTEN.split(',') - if isinstance(CATEGORIES, str): CATEGORIES = CATEGORIES.split(',') + if isinstance(NOFLATTEN, str): + NOFLATTEN = NOFLATTEN.split(',') + if isinstance(CATEGORIES, str): + CATEGORIES = CATEGORIES.split(',') DELETE_ORIGINAL = int(CFG["Torrent"]["deleteOriginal"]) TORRENT_CHMOD_DIRECTORY = int(str(CFG["Torrent"]["chmodDirectory"]), 8) TORRENT_RESUME_ON_FAILURE = int(CFG["Torrent"]["resumeOnFailure"]) @@ -365,9 +372,12 @@ def initialize(section=None): REMOTEPATHS = CFG["Network"]["mount_points"] or [] if REMOTEPATHS: - if isinstance(REMOTEPATHS, list): REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list. - REMOTEPATHS = [ tuple(item.split(',')) for item in REMOTEPATHS.split('|') ] # /volume1/Public/,E:\|/volume2/share/,\\NAS\ - REMOTEPATHS = [ (local.strip(), remote.strip()) for local, remote in REMOTEPATHS ] # strip trailing and leading whitespaces + if isinstance(REMOTEPATHS, list): + REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list. + REMOTEPATHS = [tuple(item.split(',')) for item in + REMOTEPATHS.split('|')] # /volume1/Public/,E:\|/volume2/share/,\\NAS\ + REMOTEPATHS = [(local.strip(), remote.strip()) for local, remote in + REMOTEPATHS] # strip trailing and leading whitespaces PLEXSSL = int(CFG["Plex"]["plex_ssl"]) PLEXHOST = CFG["Plex"]["plex_host"] @@ -375,62 +385,79 @@ def initialize(section=None): PLEXTOKEN = CFG["Plex"]["plex_token"] PLEXSEC = CFG["Plex"]["plex_sections"] or [] if PLEXSEC: - if isinstance(PLEXSEC, list): PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list. - PLEXSEC = [ tuple(item.split(',')) for item in PLEXSEC.split('|') ] + if isinstance(PLEXSEC, list): + PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list. + PLEXSEC = [tuple(item.split(',')) for item in PLEXSEC.split('|')] devnull = open(os.devnull, 'w') try: subprocess.Popen(["nice"], stdout=devnull, stderr=devnull).communicate() NICENESS.extend(['nice', '-n%s' % (int(CFG["Posix"]["niceness"]))]) - except: pass + except: + pass try: subprocess.Popen(["ionice"], stdout=devnull, stderr=devnull).communicate() try: NICENESS.extend(['ionice', '-c%s' % (int(CFG["Posix"]["ionice_class"]))]) - except: pass + except: + pass try: if 'ionice' in NICENESS: NICENESS.extend(['-n%s' % (int(CFG["Posix"]["ionice_classdata"]))]) else: NICENESS.extend(['ionice', '-n%s' % (int(CFG["Posix"]["ionice_classdata"]))]) - except: pass - except: pass + except: + pass + except: + pass devnull.close() COMPRESSEDCONTAINER = [re.compile('.r\d{2}$', re.I), - re.compile('.part\d+.rar$', re.I), - re.compile('.rar$', re.I)] + re.compile('.part\d+.rar$', re.I), + re.compile('.rar$', re.I)] COMPRESSEDCONTAINER += [re.compile('%s$' % ext, re.I) for ext in CFG["Extensions"]["compressedExtensions"]] MEDIACONTAINER = CFG["Extensions"]["mediaExtensions"] AUDIOCONTAINER = CFG["Extensions"]["audioExtensions"] METACONTAINER = CFG["Extensions"]["metaExtensions"] # .nfo,.sub,.srt - if isinstance(COMPRESSEDCONTAINER, str): COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',') - if isinstance(MEDIACONTAINER, str): MEDIACONTAINER = MEDIACONTAINER.split(',') - if isinstance(AUDIOCONTAINER, str): AUDIOCONTAINER = AUDIOCONTAINER.split(',') - if isinstance(METACONTAINER, str): METACONTAINER = METACONTAINER.split(',') + if isinstance(COMPRESSEDCONTAINER, str): + COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',') + if isinstance(MEDIACONTAINER, str): + MEDIACONTAINER = MEDIACONTAINER.split(',') + if isinstance(AUDIOCONTAINER, str): + AUDIOCONTAINER = AUDIOCONTAINER.split(',') + if isinstance(METACONTAINER, str): + METACONTAINER = METACONTAINER.split(',') GETSUBS = int(CFG["Transcoder"]["getSubs"]) TRANSCODE = int(CFG["Transcoder"]["transcode"]) DUPLICATE = int(CFG["Transcoder"]["duplicate"]) CONCAT = int(CFG["Transcoder"]["concat"]) IGNOREEXTENSIONS = (CFG["Transcoder"]["ignoreExtensions"]) - if isinstance(IGNOREEXTENSIONS, str): IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',') + if isinstance(IGNOREEXTENSIONS, str): + IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',') OUTPUTFASTSTART = int(CFG["Transcoder"]["outputFastStart"]) GENERALOPTS = (CFG["Transcoder"]["generalOptions"]) - if isinstance(GENERALOPTS, str): GENERALOPTS = GENERALOPTS.split(',') - if GENERALOPTS == ['']: GENERALOPTS = [] - if not '-fflags' in GENERALOPTS: GENERALOPTS.append('-fflags') - if not '+genpts' in GENERALOPTS: GENERALOPTS.append('+genpts') + if isinstance(GENERALOPTS, str): + GENERALOPTS = GENERALOPTS.split(',') + if GENERALOPTS == ['']: + GENERALOPTS = [] + if not '-fflags' in GENERALOPTS: + GENERALOPTS.append('-fflags') + if not '+genpts' in GENERALOPTS: + GENERALOPTS.append('+genpts') try: OUTPUTQUALITYPERCENT = int(CFG["Transcoder"]["outputQualityPercent"]) - except: pass + except: + pass OUTPUTVIDEOPATH = CFG["Transcoder"]["outputVideoPath"] PROCESSOUTPUT = int(CFG["Transcoder"]["processOutput"]) ALANGUAGE = CFG["Transcoder"]["audioLanguage"] AINCLUDE = int(CFG["Transcoder"]["allAudioLanguages"]) SLANGUAGES = CFG["Transcoder"]["subLanguages"] - if isinstance(SLANGUAGES, str): SLANGUAGES = SLANGUAGES.split(',') - if SLANGUAGES == ['']: SLANGUAGES = [] + if isinstance(SLANGUAGES, str): + SLANGUAGES = SLANGUAGES.split(',') + if SLANGUAGES == ['']: + SLANGUAGES = [] SINCLUDE = int(CFG["Transcoder"]["allSubLanguages"]) SEXTRACT = int(CFG["Transcoder"]["extractSubs"]) SEMBED = int(CFG["Transcoder"]["embedSubs"]) @@ -438,169 +465,215 @@ def initialize(section=None): VEXTENSION = CFG["Transcoder"]["outputVideoExtension"].strip() VCODEC = CFG["Transcoder"]["outputVideoCodec"].strip() VCODEC_ALLOW = CFG["Transcoder"]["VideoCodecAllow"].strip() - if isinstance(VCODEC_ALLOW, str): VCODEC_ALLOW = VCODEC_ALLOW.split(',') - if VCODEC_ALLOW == ['']: VCODEC_ALLOW = [] + if isinstance(VCODEC_ALLOW, str): + VCODEC_ALLOW = VCODEC_ALLOW.split(',') + if VCODEC_ALLOW == ['']: + VCODEC_ALLOW = [] VPRESET = CFG["Transcoder"]["outputVideoPreset"].strip() try: VFRAMERATE = float(CFG["Transcoder"]["outputVideoFramerate"].strip()) - except: pass + except: + pass try: VCRF = int(CFG["Transcoder"]["outputVideoCRF"].strip()) - except: pass + except: + pass try: VLEVEL = CFG["Transcoder"]["outputVideoLevel"].strip() - except: pass + except: + pass try: - VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k','000')) - except: pass + VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k', '000')) + except: + pass VRESOLUTION = CFG["Transcoder"]["outputVideoResolution"] ACODEC = CFG["Transcoder"]["outputAudioCodec"].strip() ACODEC_ALLOW = CFG["Transcoder"]["AudioCodecAllow"].strip() - if isinstance(ACODEC_ALLOW, str): ACODEC_ALLOW = ACODEC_ALLOW.split(',') - if ACODEC_ALLOW == ['']: ACODEC_ALLOW = [] + if isinstance(ACODEC_ALLOW, str): + ACODEC_ALLOW = ACODEC_ALLOW.split(',') + if ACODEC_ALLOW == ['']: + ACODEC_ALLOW = [] try: ACHANNELS = int(CFG["Transcoder"]["outputAudioChannels"].strip()) - except: pass + except: + pass try: - ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k','000')) - except: pass + ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k', '000')) + except: + pass ACODEC2 = CFG["Transcoder"]["outputAudioTrack2Codec"].strip() ACODEC2_ALLOW = CFG["Transcoder"]["AudioCodec2Allow"].strip() - if isinstance(ACODEC2_ALLOW, str): ACODEC2_ALLOW = ACODEC2_ALLOW.split(',') - if ACODEC2_ALLOW == ['']: ACODEC2_ALLOW = [] + if isinstance(ACODEC2_ALLOW, str): + ACODEC2_ALLOW = ACODEC2_ALLOW.split(',') + if ACODEC2_ALLOW == ['']: + ACODEC2_ALLOW = [] try: ACHANNELS2 = int(CFG["Transcoder"]["outputAudioTrack2Channels"].strip()) - except: pass + except: + pass try: - ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k','000')) - except: pass + ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k', '000')) + except: + pass ACODEC3 = CFG["Transcoder"]["outputAudioOtherCodec"].strip() ACODEC3_ALLOW = CFG["Transcoder"]["AudioOtherCodecAllow"].strip() - if isinstance(ACODEC3_ALLOW, str): ACODEC3_ALLOW = ACODEC3_ALLOW.split(',') - if ACODEC3_ALLOW == ['']: ACODEC3_ALLOW = [] + if isinstance(ACODEC3_ALLOW, str): + ACODEC3_ALLOW = ACODEC3_ALLOW.split(',') + if ACODEC3_ALLOW == ['']: + ACODEC3_ALLOW = [] try: ACHANNELS3 = int(CFG["Transcoder"]["outputAudioOtherChannels"].strip()) - except: pass + except: + pass try: - ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k','000')) - except: pass + ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k', '000')) + except: + pass SCODEC = CFG["Transcoder"]["outputSubtitleCodec"].strip() BURN = int(CFG["Transcoder"]["burnInSubtitle"].strip()) DEFAULTS = CFG["Transcoder"]["outputDefault"].strip() HWACCEL = int(CFG["Transcoder"]["hwAccel"]) - allow_subs = ['.mkv','.mp4', '.m4v', 'asf', 'wma', 'wmv'] + allow_subs = ['.mkv', '.mp4', '.m4v', 'asf', 'wma', 'wmv'] codec_alias = { - 'libx264':['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'], - 'libmp3lame':['libmp3lame', 'mp3'], - 'libfaac':['libfaac', 'aac', 'faac'] - } + 'libx264': ['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'], + 'libmp3lame': ['libmp3lame', 'mp3'], + 'libfaac': ['libfaac', 'aac', 'faac'] + } transcode_defaults = { - 'iPad':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2, - 'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'iPad-1080p':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':'1920:1080','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2, - 'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'iPad-720p':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2, - 'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'Apple-TV':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6, - 'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'iPod':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2, - 'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'iPhone':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':'460:320','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2, - 'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'PS3':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6, - 'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'xbox':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6, - 'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'Roku-480p':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2, - 'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'Roku-720p':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2, - 'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'Roku-1080p':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], - 'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':160000, 'ACHANNELS':2, - 'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6, - 'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None, - 'SCODEC':'mov_text' - }, - 'mkv':{ - 'VEXTENSION':'.mkv','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None, - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'], - 'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8, - 'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None, - 'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8, - 'SCODEC':'mov_text' - }, - 'mp4-scene-release':{ - 'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':19,'VLEVEL':'3.1', - 'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'], - 'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8, - 'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None, - 'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8, - 'SCODEC':'mov_text' - } + 'iPad': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2, + 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'iPad-1080p': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': '1920:1080', + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2, + 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'iPad-720p': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': '1280:720', + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2, + 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'Apple-TV': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': '1280:720', + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6, + 'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'iPod': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': '1280:720', + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, + 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'iPhone': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': '460:320', + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, + 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'PS3': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6, + 'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'xbox': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6, + 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'Roku-480p': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, + 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'Roku-720p': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, + 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'Roku-1080p': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'], + 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 160000, 'ACHANNELS': 2, + 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, + 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, + 'SCODEC': 'mov_text' + }, + 'mkv': { + 'VEXTENSION': '.mkv', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': None, 'VLEVEL': None, + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'], + 'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8, + 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, + 'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, + 'ACHANNELS3': 8, + 'SCODEC': 'mov_text' + }, + 'mp4-scene-release': { + 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, + 'VCRF': 19, 'VLEVEL': '3.1', + 'VRESOLUTION': None, + 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'], + 'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8, + 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, + 'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, + 'ACHANNELS3': 8, + 'SCODEC': 'mov_text' } + } if DEFAULTS and DEFAULTS in transcode_defaults: VEXTENSION = transcode_defaults[DEFAULTS]['VEXTENSION'] VCODEC = transcode_defaults[DEFAULTS]['VCODEC'] @@ -630,25 +703,29 @@ def initialize(section=None): if VEXTENSION in allow_subs: ALLOWSUBS = 1 - if not VCODEC_ALLOW and VCODEC: VCODEC_ALLOW.extend([VCODEC]) + if not VCODEC_ALLOW and VCODEC: + VCODEC_ALLOW.extend([VCODEC]) for codec in VCODEC_ALLOW: if codec in codec_alias: - extra = [ item for item in codec_alias[codec] if item not in VCODEC_ALLOW ] + extra = [item for item in codec_alias[codec] if item not in VCODEC_ALLOW] VCODEC_ALLOW.extend(extra) - if not ACODEC_ALLOW and ACODEC: ACODEC_ALLOW.extend([ACODEC]) + if not ACODEC_ALLOW and ACODEC: + ACODEC_ALLOW.extend([ACODEC]) for codec in ACODEC_ALLOW: if codec in codec_alias: - extra = [ item for item in codec_alias[codec] if item not in ACODEC_ALLOW ] + extra = [item for item in codec_alias[codec] if item not in ACODEC_ALLOW] ACODEC_ALLOW.extend(extra) - if not ACODEC2_ALLOW and ACODEC2: ACODEC2_ALLOW.extend([ACODEC2]) + if not ACODEC2_ALLOW and ACODEC2: + ACODEC2_ALLOW.extend([ACODEC2]) for codec in ACODEC2_ALLOW: if codec in codec_alias: - extra = [ item for item in codec_alias[codec] if item not in ACODEC2_ALLOW ] + extra = [item for item in codec_alias[codec] if item not in ACODEC2_ALLOW] ACODEC2_ALLOW.extend(extra) - if not ACODEC3_ALLOW and ACODEC3: ACODEC3_ALLOW.extend([ACODEC3]) + if not ACODEC3_ALLOW and ACODEC3: + ACODEC3_ALLOW.extend([ACODEC3]) for codec in ACODEC3_ALLOW: if codec in codec_alias: - extra = [ item for item in codec_alias[codec] if item not in ACODEC3_ALLOW ] + extra = [item for item in codec_alias[codec] if item not in ACODEC3_ALLOW] ACODEC3_ALLOW.extend(extra) codec_alias = {} # clear memory @@ -674,47 +751,59 @@ def initialize(section=None): else: try: SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip() - except: pass - if not SEVENZIP: + except: + pass + if not SEVENZIP: try: SEVENZIP = subprocess.Popen(['which', '7zr'], stdout=subprocess.PIPE).communicate()[0].strip() - except: pass - if not SEVENZIP: + except: + pass + if not SEVENZIP: try: SEVENZIP = subprocess.Popen(['which', '7za'], stdout=subprocess.PIPE).communicate()[0].strip() - except: pass + except: + pass if not SEVENZIP: SEVENZIP = None - logger.warning("Failed to locate 7zip. Transcosing of disk images and extraction of .7z files will not be possible!") - if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'), os.X_OK): + logger.warning( + "Failed to locate 7zip. Transcosing of disk images and extraction of .7z files will not be possible!") + if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'), + os.X_OK): FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg') - elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(os.path.join(FFMPEG_PATH, 'avconv'), os.X_OK): + elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(os.path.join(FFMPEG_PATH, 'avconv'), + os.X_OK): FFMPEG = os.path.join(FFMPEG_PATH, 'avconv') else: try: FFMPEG = subprocess.Popen(['which', 'ffmpeg'], stdout=subprocess.PIPE).communicate()[0].strip() - except: pass - if not FFMPEG: + except: + pass + if not FFMPEG: try: FFMPEG = subprocess.Popen(['which', 'avconv'], stdout=subprocess.PIPE).communicate()[0].strip() - except: pass + except: + pass if not FFMPEG: FFMPEG = None logger.warning("Failed to locate ffmpeg. Transcoding disabled!") logger.warning("Install ffmpeg with x264 support to enable this feature ...") - if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'), os.X_OK): + if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'), + os.X_OK): FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe') - elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(os.path.join(FFMPEG_PATH, 'avprobe'), os.X_OK): + elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(os.path.join(FFMPEG_PATH, 'avprobe'), + os.X_OK): FFPROBE = os.path.join(FFMPEG_PATH, 'avprobe') else: try: FFPROBE = subprocess.Popen(['which', 'ffprobe'], stdout=subprocess.PIPE).communicate()[0].strip() - except: pass - if not FFPROBE: + except: + pass + if not FFPROBE: try: FFPROBE = subprocess.Popen(['which', 'avprobe'], stdout=subprocess.PIPE).communicate()[0].strip() - except: pass + except: + pass if not FFPROBE: FFPROBE = None if CHECK_MEDIA: @@ -723,7 +812,7 @@ def initialize(section=None): # check for script-defied section and if None set to allow sections SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)] - for section,subsections in SECTIONS.items(): + for section, subsections in SECTIONS.items(): CATEGORIES.extend([subsection for subsection in subsections if CFG[section][subsection].isenabled()]) CATEGORIES = list(set(CATEGORIES)) @@ -733,6 +822,7 @@ def initialize(section=None): # finished initalizing return True + def restart(): install_type = versionCheck.CheckVersion().install_type @@ -752,11 +842,12 @@ def restart(): os._exit(status) + def rchmod(path, mod): logger.log("Changing file mode of %s to %s" % (path, oct(mod))) os.chmod(path, mod) if not os.path.isdir(path): - return # Skip files + return # Skip files for root, dirs, files in os.walk(path): for d in dirs: diff --git a/core/autoProcess/__init__.py b/core/autoProcess/__init__.py index bf893c06..9bad5790 100644 --- a/core/autoProcess/__init__.py +++ b/core/autoProcess/__init__.py @@ -1 +1 @@ -# coding=utf-8 \ No newline at end of file +# coding=utf-8 diff --git a/core/databases/__init__.py b/core/databases/__init__.py index 737828fb..14f97982 100644 --- a/core/databases/__init__.py +++ b/core/databases/__init__.py @@ -1,2 +1,2 @@ # coding=utf-8 -__all__ = ["mainDB"] \ No newline at end of file +__all__ = ["mainDB"] diff --git a/core/databases/mainDB.py b/core/databases/mainDB.py index 71c1e3b2..0d27b526 100644 --- a/core/databases/mainDB.py +++ b/core/databases/mainDB.py @@ -14,6 +14,7 @@ def backupDatabase(version): else: logger.info("Proceeding with upgrade") + # ====================== # = Main DB Migrations = # ====================== @@ -45,21 +46,21 @@ class InitialSchema(nzbToMediaDB.SchemaUpgrade): cur_db_version) + ") is too old to migrate from what this version of nzbToMedia supports (" + \ str(MIN_DB_VERSION) + ").\n" + \ "Please remove nzbtomedia.db file to begin fresh." - ) + ) if cur_db_version > MAX_DB_VERSION: logger.log_error_and_exit("Your database version (" + str( cur_db_version) + ") has been incremented past what this version of nzbToMedia supports (" + \ str(MAX_DB_VERSION) + ").\n" + \ "If you have used other forks of nzbToMedia, your database may be unusable due to their modifications." - ) + ) if cur_db_version < MAX_DB_VERSION: # We need to upgrade. queries = [ "CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));", "INSERT INTO downloads2 SELECT * FROM downloads;", "DROP TABLE IF EXISTS downloads;", - "ALTER TABLE downloads2 RENAME TO downloads;", + "ALTER TABLE downloads2 RENAME TO downloads;", "INSERT INTO db_version (db_version) VALUES (2);" ] for query in queries: - self.connection.action(query) \ No newline at end of file + self.connection.action(query) diff --git a/core/extractor/__init__.py b/core/extractor/__init__.py index bf893c06..9bad5790 100644 --- a/core/extractor/__init__.py +++ b/core/extractor/__init__.py @@ -1 +1 @@ -# coding=utf-8 \ No newline at end of file +# coding=utf-8 diff --git a/core/extractor/extractor.py b/core/extractor/extractor.py index 68d5e6df..865802ba 100644 --- a/core/extractor/extractor.py +++ b/core/extractor/extractor.py @@ -8,6 +8,7 @@ import core from subprocess import call, Popen import subprocess + def extract(filePath, outputDestination): success = 0 # Using Windows @@ -22,9 +23,9 @@ def extract(filePath, outputDestination): # Using unix else: required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"] - ## Possible future suport: + # ## Possible future suport: # gunzip: gz (cmd will delete original archive) - ## the following do not extract to dest dir + # ## the following do not extract to dest dir # ".xz": ["xz", "-d --keep"], # ".lzma": ["xz", "-d --format=lzma --keep"], # ".bz2": ["bzip2", "-d --keep"], @@ -43,12 +44,13 @@ def extract(filePath, outputDestination): if not os.getenv('TR_TORRENT_DIR'): devnull = open(os.devnull, 'w') for cmd in required_cmds: - if call(['which', cmd], stdout=devnull, stderr=devnull): #note, returns 0 if exists, or 1 if doesn't exist. + if call(['which', cmd], stdout=devnull, + stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist. if cmd == "7zr" and not call(["which", "7z"]): # we do have "7z" command EXTRACT_COMMANDS[".7z"] = ["7z", "x"] elif cmd == "7zr" and not call(["which", "7za"]): # we do have "7za" command EXTRACT_COMMANDS[".7z"] = ["7za", "x"] - else: + else: for k, v in EXTRACT_COMMANDS.items(): if cmd in v[0]: core.logger.error("EXTRACTOR: %s not found, disabling support for %s" % (cmd, k)) @@ -77,7 +79,7 @@ def extract(filePath, outputDestination): core.logger.debug("EXTRACTOR: Unknown file type: %s" % ext[1]) return False - # Create outputDestination folder + # Create outputDestination folder core.makeDir(outputDestination) if core.PASSWORDSFILE != "" and os.path.isfile(os.path.normpath(core.PASSWORDSFILE)): @@ -99,7 +101,7 @@ def extract(filePath, outputDestination): pwd = os.getcwd() # Get our Present Working Directory os.chdir(outputDestination) # Not all unpack commands accept full paths, so just extract into this directory devnull = open(os.devnull, 'w') - + try: # now works same for nt and *nix info = None cmd.append(filePath) # add filePath to final cmd arg. @@ -112,7 +114,8 @@ def extract(filePath, outputDestination): cmd2.append("-p-") # don't prompt for password. p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. res = p.wait() - if (res >= 0 and os.name == 'nt') or res == 0: # for windows chp returns process id if successful or -1*Error code. Linux returns 0 for successful. + if ( + res >= 0 and os.name == 'nt') or res == 0: # for windows chp returns process id if successful or -1*Error code. Linux returns 0 for successful. core.logger.info("EXTRACTOR: Extraction was successful for %s to %s" % (filePath, outputDestination)) success = 1 elif len(passwords) > 0: @@ -121,14 +124,14 @@ def extract(filePath, outputDestination): if password == "": # if edited in windows or otherwise if blank lines. continue cmd2 = cmd - #append password here. + # append password here. passcmd = "-p" + password cmd2.append(passcmd) p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. res = p.wait() if (res >= 0 and platform == 'Windows') or res == 0: core.logger.info("EXTRACTOR: Extraction was successful for %s to %s using password: %s" % ( - filePath, outputDestination, password)) + filePath, outputDestination, password)) success = 1 break else: @@ -142,19 +145,21 @@ def extract(filePath, outputDestination): os.chdir(pwd) # Go back to our Original Working Directory if success: # sleep to let files finish writing to disk - sleep (3) + sleep(3) perms = stat.S_IMODE(os.lstat(os.path.split(filePath)[0]).st_mode) for dir, subdirs, files in os.walk(outputDestination): for subdir in subdirs: if not os.path.join(dir, subdir) in origFiles: try: os.chmod(os.path.join(dir, subdir), perms) - except: pass + except: + pass for file in files: if not os.path.join(dir, file) in origFiles: try: shutil.copymode(filePath, os.path.join(dir, file)) - except: pass + except: + pass return True else: core.logger.error("EXTRACTOR: Extraction failed for %s. Result was %s" % (filePath, res)) diff --git a/core/gh_api.py b/core/gh_api.py index 8da2a794..95faf10e 100644 --- a/core/gh_api.py +++ b/core/gh_api.py @@ -2,6 +2,7 @@ import json import requests + class GitHub(object): """ Simple api wrapper for the Github API v3. diff --git a/core/linktastic/__init__.py b/core/linktastic/__init__.py index bf893c06..9bad5790 100644 --- a/core/linktastic/__init__.py +++ b/core/linktastic/__init__.py @@ -1 +1 @@ -# coding=utf-8 \ No newline at end of file +# coding=utf-8 diff --git a/core/linktastic/linktastic.py b/core/linktastic/linktastic.py index 9d981b57..af690158 100644 --- a/core/linktastic/linktastic.py +++ b/core/linktastic/linktastic.py @@ -30,6 +30,7 @@ if os.name == 'nt': info = subprocess.STARTUPINFO() info.dwFlags |= subprocess.STARTF_USESHOWWINDOW + # Prevent spaces from messing with us! def _escape_param(param): return '"%s"' % param @@ -45,9 +46,9 @@ def _link_windows(src, dest): raise IOError(err.output.decode('utf-8')) - # TODO, find out what kind of messages Windows sends us from mklink - # print(stdout) - # assume if they ret-coded 0 we're good + # TODO, find out what kind of messages Windows sends us from mklink + # print(stdout) + # assume if they ret-coded 0 we're good def _symlink_windows(src, dest): @@ -58,9 +59,10 @@ def _symlink_windows(src, dest): except CalledProcessError as err: raise IOError(err.output.decode('utf-8')) - # TODO, find out what kind of messages Windows sends us from mklink - # print(stdout) - # assume if they ret-coded 0 we're good + # TODO, find out what kind of messages Windows sends us from mklink + # print(stdout) + # assume if they ret-coded 0 we're good + def _dirlink_windows(src, dest): try: @@ -70,9 +72,10 @@ def _dirlink_windows(src, dest): except CalledProcessError as err: raise IOError(err.output.decode('utf-8')) - # TODO, find out what kind of messages Windows sends us from mklink - # print(stdout) - # assume if they ret-coded 0 we're good + # TODO, find out what kind of messages Windows sends us from mklink + # print(stdout) + # assume if they ret-coded 0 we're good + def _junctionlink_windows(src, dest): try: @@ -82,9 +85,10 @@ def _junctionlink_windows(src, dest): except CalledProcessError as err: raise IOError(err.output.decode('utf-8')) - # TODO, find out what kind of messages Windows sends us from mklink - # print(stdout) - # assume if they ret-coded 0 we're good + # TODO, find out what kind of messages Windows sends us from mklink + # print(stdout) + # assume if they ret-coded 0 we're good + # Create a hard link to src named as dest # This version of link, unlike os.link, supports nt systems as well @@ -102,6 +106,7 @@ def symlink(src, dest): else: os.symlink(src, dest) + # Create a symlink to src named as dest, but don't fail if you're on nt def dirlink(src, dest): if os.name == 'nt': @@ -109,9 +114,10 @@ def dirlink(src, dest): else: os.symlink(src, dest) + # Create a symlink to src named as dest, but don't fail if you're on nt def junctionlink(src, dest): if os.name == 'nt': _junctionlink_windows(src, dest) else: - os.symlink(src, dest) \ No newline at end of file + os.symlink(src, dest) diff --git a/core/logger.py b/core/logger.py index 0b8a5446..324248d4 100644 --- a/core/logger.py +++ b/core/logger.py @@ -27,6 +27,7 @@ reverseNames = {u'ERROR': ERROR, u'POSTPROCESS': POSTPROCESS, u'DB': DB} + class NTMRotatingLogHandler(object): def __init__(self, log_file, num_files, num_bytes): self.num_files = num_files @@ -68,7 +69,7 @@ class NTMRotatingLogHandler(object): if self.cur_handler: old_handler = self.cur_handler else: - #Add a new logging levels + # Add a new logging levels logging.addLevelName(21, 'POSTPROCESS') logging.addLevelName(5, 'DB') @@ -85,7 +86,7 @@ class NTMRotatingLogHandler(object): {'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'), 'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'), 'db': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S') - }, + }, logging.Formatter('%(message)s'), )) # add the handler to the root logger @@ -122,7 +123,7 @@ class NTMRotatingLogHandler(object): {'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'), 'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'), 'db': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S') - }, + }, logging.Formatter('%(message)s'), )) return file_handler @@ -234,6 +235,7 @@ class NTMRotatingLogHandler(object): else: sys.exit(1) + class DispatchingFormatter: def __init__(self, formatters, default_formatter): self._formatters = formatters @@ -243,31 +245,41 @@ class DispatchingFormatter: formatter = self._formatters.get(record.name, self._default_formatter) return formatter.format(record) + ntm_log_instance = NTMRotatingLogHandler(core.LOG_FILE, NUM_LOGS, LOG_SIZE) + def log(toLog, logLevel=MESSAGE, section='MAIN'): ntm_log_instance.log(toLog, logLevel, section) + def info(toLog, section='MAIN'): log(toLog, MESSAGE, section) + def error(toLog, section='MAIN'): log(toLog, ERROR, section) + def warning(toLog, section='MAIN'): log(toLog, WARNING, section) + def debug(toLog, section='MAIN'): log(toLog, DEBUG, section) + def postprocess(toLog, section='POSTPROCESS'): log(toLog, POSTPROCESS, section) + def db(toLog, section='DB'): log(toLog, DB, section) + def log_error_and_exit(error_msg): ntm_log_instance.log_error_and_exit(error_msg) + def close(): ntm_log_instance.close_log() diff --git a/core/nzbToMediaAutoFork.py b/core/nzbToMediaAutoFork.py index f5140228..d1c122c1 100644 --- a/core/nzbToMediaAutoFork.py +++ b/core/nzbToMediaAutoFork.py @@ -4,6 +4,7 @@ import core import requests from core import logger + def autoFork(section, inputCategory): # auto-detect correct section # config settings @@ -49,13 +50,13 @@ def autoFork(section, inputCategory): detected = False if section == "NzbDrone": logger.info("Attempting to verify %s fork" % inputCategory) - url = "%s%s:%s%s/api/rootfolder" % (protocol,host,port,web_root) - headers={"X-Api-Key": apikey} + url = "%s%s:%s%s/api/rootfolder" % (protocol, host, port, web_root) + headers = {"X-Api-Key": apikey} try: r = requests.get(url, headers=headers, stream=True, verify=False) except requests.ConnectionError: logger.warning("Could not connect to %s:%s to verify fork!" % (section, inputCategory)) - + if not r.ok: logger.warning("Connection to %s:%s failed! Check your configuration" % (section, inputCategory)) @@ -67,12 +68,12 @@ def autoFork(section, inputCategory): logger.info("Attempting to auto-detect %s fork" % inputCategory) # define the order to test. Default must be first since the default fork doesn't reject parameters. # then in order of most unique parameters. - url = "%s%s:%s%s/home/postprocess/" % (protocol,host,port,web_root) + url = "%s%s:%s%s/home/postprocess/" % (protocol, host, port, web_root) # attempting to auto-detect fork try: if username and password: s = requests.Session() - login = "%s%s:%s%s/login" % (protocol,host,port,web_root) + login = "%s%s:%s%s/login" % (protocol, host, port, web_root) login_params = {'username': username, 'password': password} s.post(login, data=login_params, stream=True, verify=False) r = s.get(url, auth=(username, password), verify=False) @@ -83,10 +84,10 @@ def autoFork(section, inputCategory): r = [] if r and r.ok: for param in params: - if not 'name="%s"' %(param) in r.text: + if not 'name="%s"' % (param) in r.text: rem_params.append(param) for param in rem_params: - params.pop(param) + params.pop(param) for fork in sorted(core.FORKS.iteritems(), reverse=False): if params == fork[1]: detected = True @@ -101,4 +102,4 @@ def autoFork(section, inputCategory): fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)] logger.info("%s:%s fork set to %s" % (section, inputCategory, fork[0])) - return fork[0], fork[1] \ No newline at end of file + return fork[0], fork[1] diff --git a/core/nzbToMediaConfig.py b/core/nzbToMediaConfig.py index 40a48b2b..e0bb8172 100644 --- a/core/nzbToMediaConfig.py +++ b/core/nzbToMediaConfig.py @@ -8,13 +8,15 @@ from core import logger from itertools import chain + class Section(configobj.Section): def isenabled(section): # checks if subsection enabled, returns true/false if subsection specified otherwise returns true/false in {} if not section.sections: try: value = list(ConfigObj.find_key(section, 'enabled'))[0] - except:value = 0 + except: + value = 0 if int(value) == 1: return section else: @@ -23,7 +25,8 @@ class Section(configobj.Section): for subsection in subsections: try: value = list(ConfigObj.find_key(subsections, 'enabled'))[0] - except:value = 0 + except: + value = 0 if int(value) != 1: del to_return[section_name][subsection] @@ -39,7 +42,8 @@ class Section(configobj.Section): for subsection in to_return: try: value = list(ConfigObj.find_key(to_return[subsection], key))[0] - except:value = None + except: + value = None if not value: del to_return[subsection] @@ -80,6 +84,7 @@ class Section(configobj.Section): return to_return + class ConfigObj(configobj.ConfigObj, Section): def __init__(self, *args, **kw): if len(args) == 0: @@ -190,7 +195,8 @@ class ConfigObj(configobj.ConfigObj, Section): if not list(ConfigObj.find_key(CFG_NEW, option)): try: values.pop(option) - except: pass + except: + pass return values @@ -221,7 +227,7 @@ class ConfigObj(configobj.ConfigObj, Section): subsection = None if section in list(chain.from_iterable(subsections.values())): subsection = section - section = ''.join([k for k,v in subsections.iteritems() if subsection in v]) + section = ''.join([k for k, v in subsections.iteritems() if subsection in v]) process_section(section, subsection) elif section in subsections.keys(): subsection = subsections[section] @@ -247,7 +253,8 @@ class ConfigObj(configobj.ConfigObj, Section): try: if os.environ.has_key('NZBPO_NDCATEGORY') and os.environ.has_key('NZBPO_SBCATEGORY'): if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']: - logger.warning("%s category is set for SickBeard and NzbDrone. Please check your config in NZBGet" % (os.environ['NZBPO_NDCATEGORY'])) + logger.warning("%s category is set for SickBeard and NzbDrone. " + "Please check your config in NZBGet" % (os.environ['NZBPO_NDCATEGORY'])) section = "Nzb" key = 'NZBOP_DESTDIR' @@ -274,12 +281,14 @@ class ConfigObj(configobj.ConfigObj, Section): if os.environ.has_key(key): option = cfgKeys[index] value = os.environ[key] - CFG_NEW[section][option] = value + CFG_NEW[section][option] = value section = "CouchPotato" envCatKey = 'NZBPO_CPSCATEGORY' - envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', 'WAIT_FOR', 'WATCH_DIR'] - cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path', 'wait_for', 'watch_dir'] + envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', + 'WAIT_FOR', 'WATCH_DIR'] + cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path', + 'wait_for', 'watch_dir'] if os.environ.has_key(envCatKey): for index in range(len(envKeys)): key = 'NZBPO_CPS' + envKeys[index] @@ -293,8 +302,10 @@ class ConfigObj(configobj.ConfigObj, Section): section = "SickBeard" envCatKey = 'NZBPO_SBCATEGORY' - envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD'] - cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method'] + envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', + 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD'] + cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', + 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method'] if os.environ.has_key(envCatKey): for index in range(len(envKeys)): key = 'NZBPO_SB' + envKeys[index] @@ -325,8 +336,10 @@ class ConfigObj(configobj.ConfigObj, Section): section = "Mylar" envCatKey = 'NZBPO_MYCATEGORY' - envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'REMOTE_PATH'] - cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir', 'remote_path'] + envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR', + 'REMOTE_PATH'] + cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir', + 'remote_path'] if os.environ.has_key(envCatKey): for index in range(len(envKeys)): key = 'NZBPO_MY' + envKeys[index] @@ -355,8 +368,10 @@ class ConfigObj(configobj.ConfigObj, Section): section = "NzbDrone" envCatKey = 'NZBPO_NDCATEGORY' - envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH'] - cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path'] + envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', + 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH'] + cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', + 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path'] if os.environ.has_key(envCatKey): for index in range(len(envKeys)): key = 'NZBPO_ND' + envKeys[index] @@ -391,16 +406,26 @@ class ConfigObj(configobj.ConfigObj, Section): CFG_NEW[section][option] = value section = "Transcoder" - envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH', 'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES', - 'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR', 'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW', - 'OUTPUTVIDEOPRESET', 'OUTPUTVIDEOFRAMERATE', 'OUTPUTVIDEOBITRATE', 'OUTPUTAUDIOCODEC', 'AUDIOCODECALLOW', 'OUTPUTAUDIOBITRATE', 'OUTPUTQUALITYPERCENT', 'GETSUBS', - 'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE', 'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE', - 'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS', 'OUTPUTAUDIOOTHERCHANNELS'] - cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath', 'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages', - 'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir', 'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow', - 'outputVideoPreset', 'outputVideoFramerate', 'outputVideoBitrate', 'outputAudioCodec', 'AudioCodecAllow', 'outputAudioBitrate', 'outputQualityPercent', 'getSubs', - 'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate', 'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate', - 'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels', 'outputAudioOtherChannels'] + envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH', + 'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES', + 'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR', + 'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW', + 'OUTPUTVIDEOPRESET', 'OUTPUTVIDEOFRAMERATE', 'OUTPUTVIDEOBITRATE', 'OUTPUTAUDIOCODEC', + 'AUDIOCODECALLOW', 'OUTPUTAUDIOBITRATE', 'OUTPUTQUALITYPERCENT', 'GETSUBS', + 'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE', + 'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE', + 'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS', + 'OUTPUTAUDIOOTHERCHANNELS'] + cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath', + 'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages', + 'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir', + 'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow', + 'outputVideoPreset', 'outputVideoFramerate', 'outputVideoBitrate', 'outputAudioCodec', + 'AudioCodecAllow', 'outputAudioBitrate', 'outputQualityPercent', 'getSubs', + 'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate', + 'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate', + 'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels', + 'outputAudioOtherChannels'] for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] if os.environ.has_key(key): @@ -420,8 +445,10 @@ class ConfigObj(configobj.ConfigObj, Section): section = "UserScript" envCatKey = 'NZBPO_USCATEGORY' - envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE', 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH'] - cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce', 'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path'] + envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE', + 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH'] + cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce', + 'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path'] if os.environ.has_key(envCatKey): for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] @@ -441,10 +468,11 @@ class ConfigObj(configobj.ConfigObj, Section): CFG_NEW.filename = core.CONFIG_FILE CFG_NEW.write() except Exception, e: - logger.debug("Error %s when writing changes to .cfg" % (e)) + logger.debug("Error %s when writing changes to .cfg" % (e)) return CFG_NEW + configobj.Section = Section configobj.ConfigObj = ConfigObj config = ConfigObj diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index 9ca856d5..a34b8662 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -8,6 +8,7 @@ import time import core from core import logger + def dbFilename(filename="nzbtomedia.db", suffix=None): """ @param filename: The sqlite database filename to use. If not specified, @@ -153,7 +154,6 @@ class DBConnection: return sqlResult - def select(self, query, args=None): sqlResults = self.action(query, args).fetchall() @@ -244,7 +244,7 @@ class SchemaUpgrade(object): self.connection = connection def hasTable(self, tableName): - return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, )).fetchall()) > 0 + return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName,)).fetchall()) > 0 def hasColumn(self, tableName, column): return column in self.connection.tableInfo(tableName) @@ -264,4 +264,3 @@ class SchemaUpgrade(object): new_version = self.checkDBVersion() + 1 self.connection.action("UPDATE db_version SET db_version = ?", [new_version]) return new_version - diff --git a/core/nzbToMediaSceneExceptions.py b/core/nzbToMediaSceneExceptions.py index 7eedd7a0..b37ec268 100644 --- a/core/nzbToMediaSceneExceptions.py +++ b/core/nzbToMediaSceneExceptions.py @@ -2,23 +2,28 @@ import os import re import core -import shlex +import shlex from core import logger from core.nzbToMediaUtil import listMediaFiles -reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.", r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.", - r"\.ld[.-]?bew\.", r"\.pir.?(dov|dvd|bew|db|rb)\.", r"\brdvd\.", r"\.vts\.", r"\.reneercs\.", r"\.dcv\.", r"\b(pir|mac)dh\b", r"\.reporp\.", r"\.kcaper\.", +reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.", + r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.", + r"\.ld[.-]?bew\.", r"\.pir.?(dov|dvd|bew|db|rb)\.", r"\brdvd\.", r"\.vts\.", r"\.reneercs\.", + r"\.dcv\.", r"\b(pir|mac)dh\b", r"\.reporp\.", r"\.kcaper\.", r"\.lanretni\.", r"\b3ca\b", r"\.cstn\."] reverse_pattern = re.compile('|'.join(reverse_list), flags=re.IGNORECASE) season_pattern = re.compile(r"(.*\.\d{2}e\d{2}s\.)(.*)", flags=re.IGNORECASE) word_pattern = re.compile(r"([^A-Z0-9]*[A-Z0-9]+)") -media_list = [r"\.s\d{2}e\d{2}\.", r"\.1080[pi]\.", r"\.720p\.", r"\.576[pi]", r"\.480[pi]\.", r"\.360p\.", r"\.[xh]26[45]\b", r"\.bluray\.", r"\.[hp]dtv\.", - r"\.web[.-]?dl\.", r"\.(vod|dvd|web|bd|br).?rip\.", r"\.dvdr\b", r"\.stv\.", r"\.screener\.", r"\.vcd\.", r"\bhd(cam|rip)\b", r"\.proper\.", r"\.repack\.", +media_list = [r"\.s\d{2}e\d{2}\.", r"\.1080[pi]\.", r"\.720p\.", r"\.576[pi]", r"\.480[pi]\.", r"\.360p\.", + r"\.[xh]26[45]\b", r"\.bluray\.", r"\.[hp]dtv\.", + r"\.web[.-]?dl\.", r"\.(vod|dvd|web|bd|br).?rip\.", r"\.dvdr\b", r"\.stv\.", r"\.screener\.", r"\.vcd\.", + r"\bhd(cam|rip)\b", r"\.proper\.", r"\.repack\.", r"\.internal\.", r"\bac3\b", r"\.ntsc\.", r"\.pal\.", r"\.secam\.", r"\bdivx\b", r"\bxvid\b"] media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE) garbage_name = re.compile(r"^[a-zA-Z0-9]*$") -char_replace = [[r"(\w)1\.(\w)",r"\1i\2"] -] +char_replace = [[r"(\w)1\.(\w)", r"\1i\2"] + ] + def process_all_exceptions(name, dirname): rename_script(dirname) @@ -27,7 +32,7 @@ def process_all_exceptions(name, dirname): parentDir = os.path.dirname(filename) head, fileExtension = os.path.splitext(os.path.basename(filename)) if reverse_pattern.search(head) is not None: - exception = reverse_filename + exception = reverse_filename elif garbage_name.search(head) is not None: exception = replace_filename else: @@ -38,7 +43,8 @@ def process_all_exceptions(name, dirname): if core.GROUPS: newfilename = strip_groups(newfilename) if newfilename != filename: - rename_file(filename, newfilename) + rename_file(filename, newfilename) + def strip_groups(filename): if not core.GROUPS: @@ -48,33 +54,36 @@ def strip_groups(filename): newname = head.replace(' ', '.') for group in core.GROUPS: newname = newname.replace(group, '') - newname = newname.replace('[]', '') + newname = newname.replace('[]', '') newfile = newname + fileExtension newfilePath = os.path.join(dirname, newfile) return newfilePath + def rename_file(filename, newfilePath): logger.debug("Replacing file name %s with download name %s" % (filename, newfilePath), "EXCEPTION") try: os.rename(filename, newfilePath) - except Exception,e: + except Exception, e: logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION") + def replace_filename(filename, dirname, name): head, fileExtension = os.path.splitext(os.path.basename(filename)) - if media_pattern.search(os.path.basename(dirname).replace(' ','.')) is not None: + if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None: newname = os.path.basename(dirname).replace(' ', '.') logger.debug("Replacing file name %s with directory name %s" % (head, newname), "EXCEPTION") - elif media_pattern.search(name.replace(' ','.').lower()) is not None: + elif media_pattern.search(name.replace(' ', '.').lower()) is not None: newname = name.replace(' ', '.') logger.debug("Replacing file name %s with download name %s" % (head, newname), "EXCEPTION") else: logger.warning("No name replacement determined for %s" % (head), "EXCEPTION") - newname = name + newname = name newfile = newname + fileExtension newfilePath = os.path.join(dirname, newfile) return newfilePath + def reverse_filename(filename, dirname, name): head, fileExtension = os.path.splitext(os.path.basename(filename)) na_parts = season_pattern.search(head) @@ -85,11 +94,11 @@ def reverse_filename(filename, dirname, name): for wp in word_p: if wp[0] == ".": new_words += "." - new_words += re.sub(r"\W","",wp) + new_words += re.sub(r"\W", "", wp) else: new_words = na_parts.group(2) for cr in char_replace: - new_words = re.sub(cr[0],cr[1],new_words) + new_words = re.sub(cr[0], cr[1], new_words) newname = new_words[::-1] + na_parts.group(1)[::-1] else: newname = head[::-1].title() @@ -99,15 +108,16 @@ def reverse_filename(filename, dirname, name): newfilePath = os.path.join(dirname, newfile) return newfilePath + def rename_script(dirname): rename_file = "" for dir, dirs, files in os.walk(dirname): for file in files: - if re.search('(rename\S*\.(sh|bat)$)',file,re.IGNORECASE): + if re.search('(rename\S*\.(sh|bat)$)', file, re.IGNORECASE): rename_file = os.path.join(dir, file) dirname = dir break - if rename_file: + if rename_file: rename_lines = [line.strip() for line in open(rename_file)] for line in rename_lines: if re.search('^(mv|Move)', line, re.IGNORECASE): @@ -122,10 +132,9 @@ def rename_script(dirname): logger.debug("Renaming file %s to %s" % (orig, dest), "EXCEPTION") try: os.rename(orig, dest) - except Exception,e: + except Exception, e: logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION") # dict for custom groups # we can add more to this list -#__customgroups__ = {'Q o Q': process_qoq, '-ECI': process_eci} - +# _customgroups = {'Q o Q': process_qoq, '-ECI': process_eci} diff --git a/core/nzbToMediaUserScript.py b/core/nzbToMediaUserScript.py index 6acc0169..23c3c5de 100644 --- a/core/nzbToMediaUserScript.py +++ b/core/nzbToMediaUserScript.py @@ -6,12 +6,14 @@ from core.transcoder import transcoder from core.nzbToMediaUtil import import_subs, listMediaFiles, rmDir from core import logger + def external_script(outputDestination, torrentName, torrentLabel, settings): final_result = 0 # start at 0. num_files = 0 try: core.USER_SCRIPT_MEDIAEXTENSIONS = settings["user_script_mediaExtensions"] - if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str): core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',') + if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str): + core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',') except: core.USER_SCRIPT_MEDIAEXTENSIONS = [] try: @@ -22,12 +24,14 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): return [0, ""] try: core.USER_SCRIPT_PARAM = settings["user_script_param"] - if isinstance(core.USER_SCRIPT_PARAM, str): core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',') + if isinstance(core.USER_SCRIPT_PARAM, str): + core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',') except: core.USER_SCRIPT_PARAM = [] try: core.USER_SCRIPT_SUCCESSCODES = settings["user_script_successCodes"] - if isinstance(core.USER_SCRIPT_SUCCESSCODES, str): core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',') + if isinstance(core.USER_SCRIPT_SUCCESSCODES, str): + core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',') except: core.USER_SCRIPT_SUCCESSCODES = 0 try: diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 53bdf27a..3899766d 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -14,7 +14,7 @@ import beets import requests import core from babelfish import Language -import subliminal +import subliminal from core.extractor import extractor from core.linktastic import linktastic @@ -25,13 +25,14 @@ from core import logger, nzbToMediaDB requests.packages.urllib3.disable_warnings() + def reportNzb(failure_link, clientAgent): # Contact indexer site logger.info("Sending failure notification to indexer site") if clientAgent == 'nzbget': - headers = {'User-Agent' : 'NZBGet / nzbToMedia.py'} + headers = {'User-Agent': 'NZBGet / nzbToMedia.py'} elif clientAgent == 'sabnzbd': - headers = {'User-Agent' : 'SABnzbd / nzbToMedia.py'} + headers = {'User-Agent': 'SABnzbd / nzbToMedia.py'} else: return try: @@ -40,8 +41,9 @@ def reportNzb(failure_link, clientAgent): logger.error("Unable to open URL %s due to %s" % (failure_link, e)) return + def sanitizeName(name): - ''' + """ >>> sanitizeName('a/b/c') 'a-b-c' >>> sanitizeName('abc') @@ -50,7 +52,7 @@ def sanitizeName(name): 'ab' >>> sanitizeName('.a.b..') 'a.b' - ''' + """ # remove bad chars from the filename name = re.sub(r'[\\\/*]', '-', name) @@ -60,10 +62,12 @@ def sanitizeName(name): name = name.strip(' .') try: name = name.encode(core.SYS_ENCODING) - except: pass + except: + pass return name - + + def makeDir(path): if not os.path.isdir(path): try: @@ -72,12 +76,13 @@ def makeDir(path): return False return True + def remoteDir(path): if not core.REMOTEPATHS: return path - for local,remote in core.REMOTEPATHS: + for local, remote in core.REMOTEPATHS: if local in path: - base_dirs = path.replace(local,"").split(os.sep) + base_dirs = path.replace(local, "").split(os.sep) if '/' in remote: remote_sep = '/' else: @@ -89,22 +94,25 @@ def remoteDir(path): return new_path return path + def category_search(inputDirectory, inputName, inputCategory, root, categories): tordir = False try: inputName = inputName.encode(core.SYS_ENCODING) - except: pass + except: + pass try: inputDirectory = inputDirectory.encode(core.SYS_ENCODING) - except: pass + except: + pass if inputDirectory is None: # =Nothing to process here. return inputDirectory, inputName, inputCategory, root pathlist = os.path.normpath(inputDirectory).split(os.sep) - if inputCategory and inputCategory in pathlist: + if inputCategory and inputCategory in pathlist: logger.debug("SEARCH: Found the Category: %s in directory structure" % (inputCategory)) elif inputCategory: logger.debug("SEARCH: Could not find the category: %s in the directory structure" % (inputCategory)) @@ -116,7 +124,8 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): inputCategory = "" logger.debug("SEARCH: Could not find a category in the directory structure") if not os.path.isdir(inputDirectory) and os.path.isfile(inputDirectory): # If the input directory is a file - if not inputName: inputName = os.path.split(os.path.normpath(inputDirectory))[1] + if not inputName: + inputName = os.path.split(os.path.normpath(inputDirectory))[1] return inputDirectory, inputName, inputCategory, root if inputCategory and os.path.isdir(os.path.join(inputDirectory, inputCategory)): @@ -158,7 +167,8 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): if index + 1 < len(pathlist): tordir = True logger.info("SEARCH: Found a unique directory %s in the category directory" % (pathlist[index + 1])) - if not inputName: inputName = pathlist[index + 1] + if not inputName: + inputName = pathlist[index + 1] except ValueError: pass @@ -177,15 +187,17 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): return inputDirectory, inputName, inputCategory, root + def getDirSize(inputPath): - from functools import partial - prepend = partial(os.path.join, inputPath) - return sum([(os.path.getsize(f) if os.path.isfile(f) else getDirSize(f)) for f in map(prepend, os.listdir(inputPath))]) + from functools import partial + prepend = partial(os.path.join, inputPath) + return sum( + [(os.path.getsize(f) if os.path.isfile(f) else getDirSize(f)) for f in map(prepend, os.listdir(inputPath))]) + def is_minSize(inputName, minSize): fileName, fileExt = os.path.splitext(os.path.basename(inputName)) - # audio files we need to check directory size not file size inputSize = os.path.getsize(inputName) if fileExt in (core.AUDIOCONTAINER): @@ -199,11 +211,13 @@ def is_minSize(inputName, minSize): if inputSize > minSize * 1048576: return True + def is_sample(inputName): # Ignore 'sample' in files if re.search('(^|[\W_])sample\d*[\W_]', inputName.lower()): return True + def copy_link(src, targetLink, useLink): logger.info("MEDIAFILE: [%s]" % (os.path.basename(targetLink)), 'COPYLINK') logger.info("SOURCE FOLDER: [%s]" % (os.path.dirname(src)), 'COPYLINK') @@ -254,6 +268,7 @@ def copy_link(src, targetLink, useLink): return True + def replace_links(link): n = 0 target = link @@ -277,6 +292,7 @@ def replace_links(link): os.unlink(link) linktastic.symlink(target, link) + def flatten(outputDestination): logger.info("FLATTEN: Flattening directory: %s" % (outputDestination)) for outputFile in listMediaFiles(outputDestination): @@ -295,29 +311,31 @@ def flatten(outputDestination): removeEmptyFolders(outputDestination) # Cleanup empty directories + def removeEmptyFolders(path, removeRoot=True): - 'Function to remove empty folders' - if not os.path.isdir(path): - return + """Function to remove empty folders""" + if not os.path.isdir(path): + return - # remove empty subfolders - logger.debug("Checking for empty folders in:%s" % (path)) - files = os.listdir(path) - if len(files): - for f in files: - fullpath = os.path.join(path, f) - if os.path.isdir(fullpath): - removeEmptyFolders(fullpath) + # remove empty subfolders + logger.debug("Checking for empty folders in:%s" % (path)) + files = os.listdir(path) + if len(files): + for f in files: + fullpath = os.path.join(path, f) + if os.path.isdir(fullpath): + removeEmptyFolders(fullpath) + + # if folder empty, delete it + files = os.listdir(path) + if len(files) == 0 and removeRoot: + logger.debug("Removing empty folder:%s" % (path)) + os.rmdir(path) - # if folder empty, delete it - files = os.listdir(path) - if len(files) == 0 and removeRoot: - logger.debug("Removing empty folder:%s" % (path)) - os.rmdir(path) def rmReadOnly(filename): if os.path.isfile(filename): - #check first the read-only attribute + # check first the read-only attribute file_attribute = os.stat(filename)[0] if (not file_attribute & stat.S_IWRITE): # File is read-only, so make it writeable @@ -327,7 +345,8 @@ def rmReadOnly(filename): except: logger.warning('Cannot change permissions of ' + filename, logger.WARNING) -#Wake function + +# Wake function def WakeOnLan(ethernet_address): addr_byte = ethernet_address.split(':') hw_addr = struct.pack(b'BBBBBB', int(addr_byte[0], 16), @@ -349,7 +368,7 @@ def WakeOnLan(ethernet_address): ss.close() -#Test Connection function +# Test Connection function def TestCon(host, port): try: socket.create_connection((host, port)) @@ -372,7 +391,7 @@ def WakeUp(): if TestCon(host, port) == "Down": # final check. logger.warning("System with mac: %s has not woken after 3 attempts. Continuing with the rest of the script." % ( - mac)) + mac)) else: logger.info("System with mac: %s has been woken. Continuing with the rest of the script." % (mac)) @@ -392,7 +411,8 @@ def CharReplace(Name): # /!\ detection is done 2char by 2char for UTF-8 special character if (len(Name) != 1) & (Idx < (len(Name) - 1)): # Detect UTF-8 - if ((Name[Idx] == '\xC2') | (Name[Idx] == '\xC3')) & ((Name[Idx+1] >= '\xA0') & (Name[Idx+1] <= '\xFF')): + if ((Name[Idx] == '\xC2') | (Name[Idx] == '\xC3')) & ( + (Name[Idx + 1] >= '\xA0') & (Name[Idx + 1] <= '\xFF')): encoding = 'utf-8' break # Detect CP850 @@ -433,7 +453,7 @@ def convert_to_ascii(inputName, dirName): if encoded: dirName = os.path.join(dir, base2) logger.info("Renaming directory to: %s." % (base2), 'ENCODER') - os.rename(os.path.join(dir,base), dirName) + os.rename(os.path.join(dir, base), dirName) if os.environ.has_key('NZBOP_SCRIPTDIR'): print "[NZB] DIRECTORY=%s" % (dirName) # Return the new directory to NZBGet. @@ -576,23 +596,23 @@ def parse_args(clientAgent, args): return None, None, None, None, None -def getDirs(section, subsection, link = 'hard'): +def getDirs(section, subsection, link='hard'): to_return = [] def processDir(path): folders = [] logger.info("Searching %s for mediafiles to post-process ..." % (path)) - sync = [ o for o in os.listdir(path) if os.path.splitext(o)[1] in ['.!sync','.bts'] ] + sync = [o for o in os.listdir(path) if os.path.splitext(o)[1] in ['.!sync', '.bts']] # search for single files and move them into their own folder for post-processing - for mediafile in [ os.path.join(path, o) for o in os.listdir(path) if - os.path.isfile(os.path.join(path, o)) ]: + for mediafile in [os.path.join(path, o) for o in os.listdir(path) if + os.path.isfile(os.path.join(path, o))]: if len(sync) > 0: break if os.path.split(mediafile)[1] in ['Thumbs.db', 'thumbs.db']: continue try: - logger.debug("Found file %s in root directory %s." % (os.path.split(mediafile)[1], path)) + logger.debug("Found file %s in root directory %s." % (os.path.split(mediafile)[1], path)) newPath = None fileExt = os.path.splitext(mediafile)[1] try: @@ -627,8 +647,9 @@ def getDirs(section, subsection, link = 'hard'): newPath = os.path.join(path, sanitizeName(title)) try: - newPath = newPath.encode(core.SYS_ENCODING) - except: pass + newPath = newPath.encode(core.SYS_ENCODING) + except: + pass # Just fail-safe incase we already have afile with this clean-name (was actually a bug from earlier code, but let's be safe). if os.path.isfile(newPath): @@ -642,19 +663,20 @@ def getDirs(section, subsection, link = 'hard'): newfile = os.path.join(newPath, sanitizeName(os.path.split(mediafile)[1])) try: newfile = newfile.encode(core.SYS_ENCODING) - except: pass + except: + pass # link file to its new path copy_link(mediafile, newfile, link) except Exception as e: logger.error("Failed to move %s to its own directory: %s" % (os.path.split(mediafile)[1], e)) - #removeEmptyFolders(path, removeRoot=False) + # removeEmptyFolders(path, removeRoot=False) if os.listdir(path): for dir in [os.path.join(path, o) for o in os.listdir(path) if - os.path.isdir(os.path.join(path, o))]: - sync = [ o for o in os.listdir(dir) if os.path.splitext(o)[1] in ['.!sync','.bts'] ] + os.path.isdir(os.path.join(path, o))]: + sync = [o for o in os.listdir(dir) if os.path.splitext(o)[1] in ['.!sync', '.bts']] if len(sync) > 0 or len(os.listdir(dir)) == 0: continue folders.extend([dir]) @@ -667,7 +689,8 @@ def getDirs(section, subsection, link = 'hard'): elif os.path.exists(core.CFG[section][subsection]["watch_dir"]): to_return.extend(processDir(core.CFG[section][subsection]["watch_dir"])) except Exception as e: - logger.error("Failed to add directories from %s for post-processing: %s" % (core.CFG[section][subsection]["watch_dir"], e)) + logger.error("Failed to add directories from %s for post-processing: %s" % ( + core.CFG[section][subsection]["watch_dir"], e)) if core.USELINK == 'move': try: @@ -678,10 +701,11 @@ def getDirs(section, subsection, link = 'hard'): logger.error("Failed to add directories from %s for post-processing: %s" % (core.OUTPUTDIRECTORY, e)) if not to_return: - logger.debug("No directories identified in %s:%s for post-processing" % (section,subsection)) + logger.debug("No directories identified in %s:%s for post-processing" % (section, subsection)) return list(set(to_return)) + def onerror(func, path, exc_info): """ Error handler for ``shutil.rmtree``. @@ -700,6 +724,7 @@ def onerror(func, path, exc_info): else: raise + def rmDir(dirName): logger.info("Deleting %s" % (dirName)) try: @@ -707,6 +732,7 @@ def rmDir(dirName): except: logger.error("Unable to delete folder %s" % (dirName)) + def cleanDir(path, section, subsection): if not os.path.exists(path): logger.info('Directory %s has been processed and removed ...' % (path), 'CLEANDIR') @@ -717,10 +743,12 @@ def cleanDir(path, section, subsection): return try: minSize = int(core.CFG[section][subsection]['minSize']) - except:minSize = 0 + except: + minSize = 0 try: delete_ignored = int(core.CFG[section][subsection]['delete_ignored']) - except:delete_ignored = 0 + except: + delete_ignored = 0 try: num_files = len(listMediaFiles(path, minSize=minSize, delete_ignored=delete_ignored)) except: @@ -737,6 +765,7 @@ def cleanDir(path, section, subsection): except: logger.error("Unable to delete directory %s" % (path)) + def create_torrent_class(clientAgent): # Hardlink solution for Torrents tc = None @@ -753,8 +782,8 @@ def create_torrent_class(clientAgent): logger.debug("Connecting to %s: http://%s:%s" % ( clientAgent, core.TRANSMISSIONHOST, core.TRANSMISSIONPORT)) tc = TransmissionClient(core.TRANSMISSIONHOST, core.TRANSMISSIONPORT, - core.TRANSMISSIONUSR, - core.TRANSMISSIONPWD) + core.TRANSMISSIONUSR, + core.TRANSMISSIONPWD) except: logger.error("Failed to connect to Transmission") @@ -763,12 +792,13 @@ def create_torrent_class(clientAgent): logger.debug("Connecting to %s: http://%s:%s" % (clientAgent, core.DELUGEHOST, core.DELUGEPORT)) tc = DelugeClient() tc.connect(host=core.DELUGEHOST, port=core.DELUGEPORT, username=core.DELUGEUSR, - password=core.DELUGEPWD) + password=core.DELUGEPWD) except: logger.error("Failed to connect to Deluge") return tc + def pause_torrent(clientAgent, inputHash, inputID, inputName): logger.debug("Stopping torrent %s in %s while processing" % (inputName, clientAgent)) try: @@ -782,6 +812,7 @@ def pause_torrent(clientAgent, inputHash, inputID, inputName): except: logger.warning("Failed to stop torrent %s in %s" % (inputName, clientAgent)) + def resume_torrent(clientAgent, inputHash, inputID, inputName): if not core.TORRENT_RESUME == 1: return @@ -797,6 +828,7 @@ def resume_torrent(clientAgent, inputHash, inputID, inputName): except: logger.warning("Failed to start torrent %s in %s" % (inputName, clientAgent)) + def remove_torrent(clientAgent, inputHash, inputID, inputName): if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move': logger.debug("Deleting torrent %s from %s" % (inputName, clientAgent)) @@ -811,9 +843,10 @@ def remove_torrent(clientAgent, inputHash, inputID, inputName): time.sleep(5) except: logger.warning("Failed to delete torrent %s in %s" % (inputName, clientAgent)) - else: + else: resume_torrent(clientAgent, inputHash, inputID, inputName) + def find_download(clientAgent, download_id): logger.debug("Searching for Download on %s ..." % (clientAgent)) if clientAgent == 'utorrent': @@ -851,6 +884,7 @@ def find_download(clientAgent, download_id): return True return False + def get_nzoid(inputName): nzoid = None slots = [] @@ -923,6 +957,7 @@ def is_archive_file(filename): return regext.split(filename)[0] return False + def isMediaFile(mediafile, media=True, audio=True, meta=True, archives=True): fileName, fileExt = os.path.splitext(mediafile) @@ -933,17 +968,18 @@ def isMediaFile(mediafile, media=True, audio=True, meta=True, archives=True): except: pass - if (media and fileExt.lower() in core.MEDIACONTAINER)\ - or (audio and fileExt.lower() in core.AUDIOCONTAINER)\ - or (meta and fileExt.lower() in core.METACONTAINER)\ - or (archives and is_archive_file(mediafile)): + if (media and fileExt.lower() in core.MEDIACONTAINER) \ + or (audio and fileExt.lower() in core.AUDIOCONTAINER) \ + or (meta and fileExt.lower() in core.METACONTAINER) \ + or (archives and is_archive_file(mediafile)): return True else: return False + def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, meta=True, archives=True): files = [] - if not os.path.isdir(path): + if not os.path.isdir(path): if os.path.isfile(path): # Single file downloads. curFile = os.path.split(path)[1] if isMediaFile(curFile, media, audio, meta, archives): @@ -953,7 +989,8 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me try: os.unlink(path) logger.debug('Ignored file %s has been removed ...' % (curFile)) - except:pass + except: + pass else: files.append(path) @@ -973,12 +1010,14 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me try: os.unlink(fullCurFile) logger.debug('Ignored file %s has been removed ...' % (curFile)) - except:pass + except: + pass continue files.append(fullCurFile) - return sorted(files,key=len) + return sorted(files, key=len) + def find_imdbid(dirName, inputName): imdbid = None @@ -987,7 +1026,7 @@ def find_imdbid(dirName, inputName): # find imdbid in dirName logger.info('Searching folder and file names for imdbID ...') - m = re.search('(tt\d{7})', dirName+inputName) + m = re.search('(tt\d{7})', dirName + inputName) if m: imdbid = m.group(1) logger.info("Found imdbID [%s]" % imdbid) @@ -1000,14 +1039,14 @@ def find_imdbid(dirName, inputName): logger.info("Found imdbID [%s] via file name" % imdbid) return imdbid if os.environ.has_key('NZBPR__DNZB_MOREINFO'): - dnzb_more_info=os.environ.get('NZBPR__DNZB_MOREINFO', '') + dnzb_more_info = os.environ.get('NZBPR__DNZB_MOREINFO', '') if dnzb_more_info != '': regex = re.compile(r'^http://www.imdb.com/title/(tt[0-9]+)/$', re.IGNORECASE) m = regex.match(dnzb_more_info) if m: imdbid = m.group(1) logger.info("Found imdbID [%s] from DNZB-MoreInfo" % imdbid) - return imdbid + return imdbid logger.info('Searching IMDB for imdbID ...') guess = guessit.guess_movie_info(inputName) if guess: @@ -1045,7 +1084,8 @@ def find_imdbid(dirName, inputName): logger.warning('Unable to find a imdbID for %s' % (inputName)) return imdbid -def extractFiles(src, dst=None, keep_archive = None): + +def extractFiles(src, dst=None, keep_archive=None): extracted_folder = [] extracted_archive = [] @@ -1081,13 +1121,14 @@ def extractFiles(src, dst=None, keep_archive = None): except Exception as e: logger.error("Unable to remove file %s due to: %s" % (inputFile, e)) + def import_subs(filename): if not core.GETSUBS: return try: subliminal.cache_region.configure('dogpile.cache.memory') except: - pass + pass languages = set() for item in core.SLANGUAGES: @@ -1098,13 +1139,14 @@ def import_subs(filename): if not languages: return - logger.debug("Attempting to download subtitles for %s" %(filename), 'SUBTITLES') + logger.debug("Attempting to download subtitles for %s" % (filename), 'SUBTITLES') try: video = subliminal.scan_video(filename, subtitles=True, embedded_subtitles=True) subtitles = subliminal.download_best_subtitles([video], languages, hearing_impaired=False) subliminal.save_subtitles(subtitles) except Exception as e: - logger.error("Failed to download subtitles for %s due to: %s" %(filename, e), 'SUBTITLES') + logger.error("Failed to download subtitles for %s due to: %s" % (filename, e), 'SUBTITLES') + def server_responding(baseURL): try: @@ -1113,6 +1155,7 @@ def server_responding(baseURL): except (requests.ConnectionError, requests.exceptions.Timeout): return False + def plex_update(category): if core.FAILED: return @@ -1124,7 +1167,7 @@ def plex_update(category): section = None if not core.PLEXSEC: return - logger.debug("Attempting to update Plex Library for category %s." %(category), 'PLEX') + logger.debug("Attempting to update Plex Library for category %s." % (category), 'PLEX') for item in core.PLEXSEC: if item[0] == category: section = item[1] @@ -1136,6 +1179,7 @@ def plex_update(category): else: logger.debug("Could not identify section for plex update", 'PLEX') + def backupVersionedFile(old_file, version): numTries = 0 @@ -1152,7 +1196,8 @@ def backupVersionedFile(old_file, version): logger.log(u"Backup done", logger.DEBUG) break except Exception, e: - logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + str(e), logger.WARNING) + logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + str(e), + logger.WARNING) numTries += 1 time.sleep(1) logger.log(u"Trying again.", logger.DEBUG) @@ -1181,6 +1226,7 @@ def get_downloadInfo(inputName, status): return sqlResults + class RunningProcess(): """ Limits application to single instance """ @@ -1193,13 +1239,13 @@ class RunningProcess(): def alreadyrunning(self): return self.process.alreadyrunning() - #def __del__(self): - # self.process.__del__() + # def __del__(self): + # self.process.__del__() + class WindowsProcess(): - def __init__(self): - self.mutexname = "nzbtomedia_" + core.PID_FILE.replace('\\','/') # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}" + self.mutexname = "nzbtomedia_" + core.PID_FILE.replace('\\', '/') # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}" if platform.system() == 'Windows': from win32event import CreateMutex from win32api import CloseHandle, GetLastError @@ -1208,7 +1254,7 @@ class WindowsProcess(): self.CloseHandle = CloseHandle self.GetLastError = GetLastError self.ERROR_ALREADY_EXISTS = ERROR_ALREADY_EXISTS - + def alreadyrunning(self): self.mutex = self.CreateMutex(None, 0, self.mutexname) self.lasterror = self.GetLastError() @@ -1217,14 +1263,13 @@ class WindowsProcess(): return True else: return False - def __del__(self): if self.mutex: self.CloseHandle(self.mutex) -class PosixProcess(): +class PosixProcess(): def __init__(self): self.pidpath = core.PID_FILE self.lock_socket = None @@ -1239,7 +1284,8 @@ class PosixProcess(): if "Address already in use" in e: self.lasterror = True return self.lasterror - except AttributeError: pass + except AttributeError: + pass if os.path.exists(self.pidpath): # Make sure it is not a "stale" pidFile try: @@ -1256,7 +1302,7 @@ class PosixProcess(): else: self.lasterror = False else: - self.lasterror = False + self.lasterror = False if not self.lasterror: # Write my pid into pidFile to keep multiple copies of program from running @@ -1264,7 +1310,8 @@ class PosixProcess(): fp = open(self.pidpath, 'w') fp.write(str(os.getpid())) fp.close() - except: pass + except: + pass return self.lasterror diff --git a/core/synchronousdeluge/client.py b/core/synchronousdeluge/client.py index b4228d83..af2c740c 100644 --- a/core/synchronousdeluge/client.py +++ b/core/synchronousdeluge/client.py @@ -8,10 +8,8 @@ from exceptions import DelugeRPCError from protocol import DelugeRPCRequest, DelugeRPCResponse from transfer import DelugeTransfer - __all__ = ["DelugeClient"] - RPC_RESPONSE = 1 RPC_ERROR = 2 RPC_EVENT = 3 @@ -31,7 +29,8 @@ class DelugeClient(object): appDataPath = os.environ.get("APPDATA") if not appDataPath: import _winreg - hkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders") + hkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, + "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders") appDataReg = _winreg.QueryValueEx(hkey, "AppData") appDataPath = appDataReg[0] _winreg.CloseKey(hkey) @@ -44,7 +43,6 @@ class DelugeClient(object): except OSError, e: return username, password - if os.path.exists(auth_file): for line in open(auth_file): if line.startswith("#"): @@ -108,20 +106,20 @@ class DelugeClient(object): message_type = message[0] -# if message_type == RPC_EVENT: -# event = message[1] -# values = message[2] -# -# if event in self._event_handlers: -# for handler in self._event_handlers[event]: -# gevent.spawn(handler, *values) -# -# elif message_type in (RPC_RESPONSE, RPC_ERROR): + # if message_type == RPC_EVENT: + # event = message[1] + # values = message[2] + # + # if event in self._event_handlers: + # for handler in self._event_handlers[event]: + # gevent.spawn(handler, *values) + # + # elif message_type in (RPC_RESPONSE, RPC_ERROR): if message_type in (RPC_RESPONSE, RPC_ERROR): request_id = message[1] value = message[2] - if request_id == self._request_counter : + if request_id == self._request_counter: if message_type == RPC_RESPONSE: response.set(value) elif message_type == RPC_ERROR: @@ -160,4 +158,3 @@ class DelugeClient(object): def disconnect(self): """Disconnects from the daemon.""" self.transfer.disconnect() - diff --git a/core/synchronousdeluge/exceptions.py b/core/synchronousdeluge/exceptions.py index ff622cb1..95bf7f04 100644 --- a/core/synchronousdeluge/exceptions.py +++ b/core/synchronousdeluge/exceptions.py @@ -1,6 +1,7 @@ # coding=utf-8 __all__ = ["DelugeRPCError"] + class DelugeRPCError(Exception): def __init__(self, name, msg, traceback): self.name = name @@ -9,4 +10,3 @@ class DelugeRPCError(Exception): def __str__(self): return "{0}: {1}: {2}".format(self.__class__.__name__, self.name, self.msg) - diff --git a/core/synchronousdeluge/protocol.py b/core/synchronousdeluge/protocol.py index 9af38b4d..2cb1a73e 100644 --- a/core/synchronousdeluge/protocol.py +++ b/core/synchronousdeluge/protocol.py @@ -1,6 +1,7 @@ # coding=utf-8 __all__ = ["DelugeRPCRequest", "DelugeRPCResponse"] + class DelugeRPCRequest(object): def __init__(self, request_id, method, *args, **kwargs): self.request_id = request_id @@ -11,6 +12,7 @@ class DelugeRPCRequest(object): def format(self): return (self.request_id, self.method, self.args, self.kwargs) + class DelugeRPCResponse(object): def __init__(self): self.value = None @@ -36,4 +38,3 @@ class DelugeRPCResponse(object): return self.value else: raise self._exception - diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index 0f6ca1ec..655f903b 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -9,9 +9,9 @@ BitTorrent project. For complex, heterogeneous data structures with many small elements, r-encodings take up significantly less space than b-encodings: - >>> len(rencode.dumps({'a':0, 'b':[1,2], 'c':99})) + >>> len(rencode.dumps({'a': 0, 'b': [1, 2], 'c': 99})) 13 - >>> len(bencode.bencode({'a':0, 'b':[1,2], 'c':99})) + >>> len(bencode.bencode({'a': 0, 'b': [1, 2], 'c': 99})) 26 The rencode format is not standardized, and may change with different @@ -73,19 +73,19 @@ MAX_INT_LENGTH = 64 # The bencode 'typecodes' such as i, d, etc have been extended and # relocated on the base-256 character set. -CHR_LIST = chr(59) -CHR_DICT = chr(60) -CHR_INT = chr(61) -CHR_INT1 = chr(62) -CHR_INT2 = chr(63) -CHR_INT4 = chr(64) -CHR_INT8 = chr(65) +CHR_LIST = chr(59) +CHR_DICT = chr(60) +CHR_INT = chr(61) +CHR_INT1 = chr(62) +CHR_INT2 = chr(63) +CHR_INT4 = chr(64) +CHR_INT8 = chr(65) CHR_FLOAT32 = chr(66) CHR_FLOAT64 = chr(44) -CHR_TRUE = chr(67) -CHR_FALSE = chr(68) -CHR_NONE = chr(69) -CHR_TERM = chr(127) +CHR_TRUE = chr(67) +CHR_FALSE = chr(68) +CHR_NONE = chr(69) +CHR_TERM = chr(127) # Positive integers with value embedded in typecode. INT_POS_FIXED_START = 0 @@ -104,9 +104,10 @@ STR_FIXED_START = 128 STR_FIXED_COUNT = 64 # Lists with length embedded in typecode. -LIST_FIXED_START = STR_FIXED_START+STR_FIXED_COUNT +LIST_FIXED_START = STR_FIXED_START + STR_FIXED_COUNT LIST_FIXED_COUNT = 64 + def decode_int(x, f): f += 1 newf = x.index(CHR_TERM, f) @@ -119,35 +120,42 @@ def decode_int(x, f): if x[f] == '-': if x[f + 1] == '0': raise ValueError - elif x[f] == '0' and newf != f+1: + elif x[f] == '0' and newf != f + 1: raise ValueError - return (n, newf+1) + return (n, newf + 1) + def decode_intb(x, f): f += 1 - return (struct.unpack('!b', x[f:f+1])[0], f+1) + return (struct.unpack('!b', x[f:f + 1])[0], f + 1) + def decode_inth(x, f): f += 1 - return (struct.unpack('!h', x[f:f+2])[0], f+2) + return (struct.unpack('!h', x[f:f + 2])[0], f + 2) + def decode_intl(x, f): f += 1 - return (struct.unpack('!l', x[f:f+4])[0], f+4) + return (struct.unpack('!l', x[f:f + 4])[0], f + 4) + def decode_intq(x, f): f += 1 - return (struct.unpack('!q', x[f:f+8])[0], f+8) + return (struct.unpack('!q', x[f:f + 8])[0], f + 8) + def decode_float32(x, f): f += 1 - n = struct.unpack('!f', x[f:f+4])[0] - return (n, f+4) + n = struct.unpack('!f', x[f:f + 4])[0] + return (n, f + 4) + def decode_float64(x, f): f += 1 - n = struct.unpack('!d', x[f:f+8])[0] - return (n, f+8) + n = struct.unpack('!d', x[f:f + 8])[0] + return (n, f + 8) + def decode_string(x, f): colon = x.index(':', f) @@ -155,40 +163,46 @@ def decode_string(x, f): n = int(x[f:colon]) except (OverflowError, ValueError): n = long(x[f:colon]) - if x[f] == '0' and colon != f+1: + if x[f] == '0' and colon != f + 1: raise ValueError colon += 1 - s = x[colon:colon+n] + s = x[colon:colon + n] try: t = s.decode("utf8") if len(t) != len(s): s = t except UnicodeDecodeError: pass - return (s, colon+n) + return (s, colon + n) + def decode_list(x, f): - r, f = [], f+1 + r, f = [], f + 1 while x[f] != CHR_TERM: v, f = decode_func[x[f]](x, f) r.append(v) return (tuple(r), f + 1) + def decode_dict(x, f): - r, f = {}, f+1 + r, f = {}, f + 1 while x[f] != CHR_TERM: k, f = decode_func[x[f]](x, f) r[k], f = decode_func[x[f]](x, f) return (r, f + 1) + def decode_true(x, f): - return (True, f+1) + return (True, f + 1) + def decode_false(x, f): - return (False, f+1) + return (False, f + 1) + def decode_none(x, f): - return (None, f+1) + return (None, f + 1) + decode_func = {} decode_func['0'] = decode_string @@ -201,77 +215,94 @@ decode_func['6'] = decode_string decode_func['7'] = decode_string decode_func['8'] = decode_string decode_func['9'] = decode_string -decode_func[CHR_LIST ] = decode_list -decode_func[CHR_DICT ] = decode_dict -decode_func[CHR_INT ] = decode_int -decode_func[CHR_INT1 ] = decode_intb -decode_func[CHR_INT2 ] = decode_inth -decode_func[CHR_INT4 ] = decode_intl -decode_func[CHR_INT8 ] = decode_intq +decode_func[CHR_LIST] = decode_list +decode_func[CHR_DICT] = decode_dict +decode_func[CHR_INT] = decode_int +decode_func[CHR_INT1] = decode_intb +decode_func[CHR_INT2] = decode_inth +decode_func[CHR_INT4] = decode_intl +decode_func[CHR_INT8] = decode_intq decode_func[CHR_FLOAT32] = decode_float32 decode_func[CHR_FLOAT64] = decode_float64 -decode_func[CHR_TRUE ] = decode_true -decode_func[CHR_FALSE ] = decode_false -decode_func[CHR_NONE ] = decode_none +decode_func[CHR_TRUE] = decode_true +decode_func[CHR_FALSE] = decode_false +decode_func[CHR_NONE] = decode_none + def make_fixed_length_string_decoders(): def make_decoder(slen): def f(x, f): - s = x[f+1:f+1+slen] + s = x[f + 1:f + 1 + slen] try: t = s.decode("utf8") if len(t) != len(s): s = t except UnicodeDecodeError: pass - return (s, f+1+slen) + return (s, f + 1 + slen) + return f + for i in range(STR_FIXED_COUNT): - decode_func[chr(STR_FIXED_START+i)] = make_decoder(i) + decode_func[chr(STR_FIXED_START + i)] = make_decoder(i) + make_fixed_length_string_decoders() + def make_fixed_length_list_decoders(): def make_decoder(slen): def f(x, f): - r, f = [], f+1 + r, f = [], f + 1 for i in range(slen): v, f = decode_func[x[f]](x, f) r.append(v) return (tuple(r), f) + return f + for i in range(LIST_FIXED_COUNT): - decode_func[chr(LIST_FIXED_START+i)] = make_decoder(i) + decode_func[chr(LIST_FIXED_START + i)] = make_decoder(i) + make_fixed_length_list_decoders() + def make_fixed_length_int_decoders(): def make_decoder(j): def f(x, f): - return (j, f+1) + return (j, f + 1) + return f + for i in range(INT_POS_FIXED_COUNT): - decode_func[chr(INT_POS_FIXED_START+i)] = make_decoder(i) + decode_func[chr(INT_POS_FIXED_START + i)] = make_decoder(i) for i in range(INT_NEG_FIXED_COUNT): - decode_func[chr(INT_NEG_FIXED_START+i)] = make_decoder(-1-i) + decode_func[chr(INT_NEG_FIXED_START + i)] = make_decoder(-1 - i) + make_fixed_length_int_decoders() + def make_fixed_length_dict_decoders(): def make_decoder(slen): def f(x, f): - r, f = {}, f+1 + r, f = {}, f + 1 for j in range(slen): k, f = decode_func[x[f]](x, f) r[k], f = decode_func[x[f]](x, f) return (r, f) + return f + for i in range(DICT_FIXED_COUNT): - decode_func[chr(DICT_FIXED_START+i)] = make_decoder(i) + decode_func[chr(DICT_FIXED_START + i)] = make_decoder(i) + make_fixed_length_dict_decoders() -def encode_dict(x,r): + +def encode_dict(x, r): r.append(CHR_DICT) for k, v in x.items(): encode_func[type(k)](k, r) @@ -288,13 +319,15 @@ def loads(x): raise ValueError return r + from types import StringType, IntType, LongType, DictType, ListType, TupleType, FloatType, NoneType, UnicodeType + def encode_int(x, r): if 0 <= x < INT_POS_FIXED_COUNT: - r.append(chr(INT_POS_FIXED_START+x)) + r.append(chr(INT_POS_FIXED_START + x)) elif -INT_NEG_FIXED_COUNT <= x < 0: - r.append(chr(INT_NEG_FIXED_START-1-x)) + r.append(chr(INT_NEG_FIXED_START - 1 - x)) elif -128 <= x < 128: r.extend((CHR_INT1, struct.pack('!b', x))) elif -32768 <= x < 32768: @@ -309,27 +342,34 @@ def encode_int(x, r): raise ValueError('overflow') r.extend((CHR_INT, s, CHR_TERM)) + def encode_float32(x, r): r.extend((CHR_FLOAT32, struct.pack('!f', x))) + def encode_float64(x, r): r.extend((CHR_FLOAT64, struct.pack('!d', x))) + def encode_bool(x, r): r.extend({False: CHR_FALSE, True: CHR_TRUE}[bool(x)]) + def encode_none(x, r): r.extend(CHR_NONE) + def encode_string(x, r): if len(x) < STR_FIXED_COUNT: r.extend((chr(STR_FIXED_START + len(x)), x)) else: r.extend((str(len(x)), ':', x)) + def encode_unicode(x, r): encode_string(x.encode("utf8"), r) + def encode_list(x, r): if len(x) < LIST_FIXED_COUNT: r.append(chr(LIST_FIXED_START + len(x))) @@ -341,7 +381,8 @@ def encode_list(x, r): encode_func[type(i)](i, r) r.append(CHR_TERM) -def encode_dict(x,r): + +def encode_dict(x, r): if len(x) < DICT_FIXED_COUNT: r.append(chr(DICT_FIXED_START + len(x))) for k, v in x.items(): @@ -354,6 +395,7 @@ def encode_dict(x,r): encode_func[type(v)](v, r) r.append(CHR_TERM) + encode_func = {} encode_func[IntType] = encode_int encode_func[LongType] = encode_int @@ -368,10 +410,12 @@ lock = Lock() try: from types import BooleanType + encode_func[BooleanType] = encode_bool except ImportError: pass + def dumps(x, float_bits=DEFAULT_FLOAT_BITS): """ Dump data structure to str. @@ -392,41 +436,46 @@ def dumps(x, float_bits=DEFAULT_FLOAT_BITS): lock.release() return ''.join(r) + def test(): f1 = struct.unpack('!f', struct.pack('!f', 25.5))[0] f2 = struct.unpack('!f', struct.pack('!f', 29.3))[0] f3 = struct.unpack('!f', struct.pack('!f', -0.6))[0] - L = (({'a':15, 'bb':f1, 'ccc':f2, '':(f3,(),False,True,'')},('a',10**20),tuple(range(-100000,100000)),'b'*31,'b'*62,'b'*64,2**30,2**33,2**62,2**64,2**30,2**33,2**62,2**64,False,False, True, -1, 2, 0),) + L = (({'a': 15, 'bb': f1, 'ccc': f2, '': (f3, (), False, True, '')}, ('a', 10 ** 20), tuple(range(-100000, 100000)), + 'b' * 31, 'b' * 62, 'b' * 64, 2 ** 30, 2 ** 33, 2 ** 62, 2 ** 64, 2 ** 30, 2 ** 33, 2 ** 62, 2 ** 64, False, + False, True, -1, 2, 0),) assert loads(dumps(L)) == L - d = dict(zip(range(-100000,100000),range(-100000,100000))) - d.update({'a':20, 20:40, 40:41, f1:f2, f2:f3, f3:False, False:True, True:False}) - L = (d, {}, {5:6}, {7:7,True:8}, {9:10, 22:39, 49:50, 44: ''}) + d = dict(zip(range(-100000, 100000), range(-100000, 100000))) + d.update({'a': 20, 20: 40, 40: 41, f1: f2, f2: f3, f3: False, False: True, True: False}) + L = (d, {}, {5: 6}, {7: 7, True: 8}, {9: 10, 22: 39, 49: 50, 44: ''}) assert loads(dumps(L)) == L - L = ('', 'a'*10, 'a'*100, 'a'*1000, 'a'*10000, 'a'*100000, 'a'*1000000, 'a'*10000000) + L = ('', 'a' * 10, 'a' * 100, 'a' * 1000, 'a' * 10000, 'a' * 100000, 'a' * 1000000, 'a' * 10000000) assert loads(dumps(L)) == L - L = tuple([dict(zip(range(n),range(n))) for n in range(100)]) + ('b',) + L = tuple([dict(zip(range(n), range(n))) for n in range(100)]) + ('b',) assert loads(dumps(L)) == L - L = tuple([dict(zip(range(n),range(-n,0))) for n in range(100)]) + ('b',) + L = tuple([dict(zip(range(n), range(-n, 0))) for n in range(100)]) + ('b',) assert loads(dumps(L)) == L L = tuple([tuple(range(n)) for n in range(100)]) + ('b',) assert loads(dumps(L)) == L - L = tuple(['a'*n for n in range(1000)]) + ('b',) + L = tuple(['a' * n for n in range(1000)]) + ('b',) assert loads(dumps(L)) == L - L = tuple(['a'*n for n in range(1000)]) + (None,True,None) + L = tuple(['a' * n for n in range(1000)]) + (None, True, None) assert loads(dumps(L)) == L assert loads(dumps(None)) == None - assert loads(dumps({None:None})) == {None:None} - assert 1e-10 0 and len(audStreams) > 0): disable = True - logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.", 'TRANSCODER') + logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.", + 'TRANSCODER') if disable: if status: # if the download was "failed", assume bad. If it was successful, assume good. return False @@ -51,9 +53,11 @@ def isVideoGood(videofile, status): logger.info("SUCCESS: [%s] has no corruption." % (fileNameExt), 'TRANSCODER') return True else: - logger.info("FAILED: [%s] has %s video streams and %s audio streams. Assume corruption." % (fileNameExt, str(len(videoStreams)), str(len(audioStreams))), 'TRANSCODER') + logger.info("FAILED: [%s] has %s video streams and %s audio streams. Assume corruption." % ( + fileNameExt, str(len(videoStreams)), str(len(audioStreams))), 'TRANSCODER') return False + def zip_out(file, img, bitbucket): procin = None cmd = [core.SEVENZIP, '-so', 'e', img, file] @@ -63,6 +67,7 @@ def zip_out(file, img, bitbucket): logger.error("Extracting [%s] has failed" % (file), 'TRANSCODER') return procin + def getVideoDetails(videofile, img=None, bitbucket=None): video_details = {} result = 1 @@ -76,7 +81,8 @@ def getVideoDetails(videofile, img=None, bitbucket=None): try: if img: videofile = '-' - command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', '-show_error', videofile] + command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', '-show_error', + videofile] print_cmd(command) if img: procin = zip_out(file, img, bitbucket) @@ -87,7 +93,8 @@ def getVideoDetails(videofile, img=None, bitbucket=None): out, err = proc.communicate() result = proc.returncode video_details = json.loads(out) - except: pass + except: + pass if not video_details: try: command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', videofile] @@ -104,6 +111,7 @@ def getVideoDetails(videofile, img=None, bitbucket=None): logger.error("Checking [%s] has failed" % (file), 'TRANSCODER') return video_details, result + def buildCommands(file, newDir, movieName, bitbucket): if isinstance(file, str): inputFile = file @@ -119,8 +127,8 @@ def buildCommands(file, newDir, movieName, bitbucket): name = ('%s.cd%s' % (movieName, check.groups()[0])) elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name): name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name) - if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself. - core.VEXTENSION = '-transcoded' + core.VEXTENSION # adds '-transcoded.ext' + if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself. + core.VEXTENSION = '-transcoded' + core.VEXTENSION # adds '-transcoded.ext' else: img, data = file.iteritems().next() name = data['name'] @@ -139,7 +147,8 @@ def buildCommands(file, newDir, movieName, bitbucket): meta_cmd = [] other_cmd = [] - if not video_details or not video_details.get("streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding. + if not video_details or not video_details.get( + "streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding. videoStreams = [] audioStreams = [] subStreams = [] @@ -166,12 +175,13 @@ def buildCommands(file, newDir, movieName, bitbucket): if core.ACODEC: audio_cmd.extend(['-c:a', core.ACODEC]) - if core.ACODEC in ['aac', 'dts']: # Allow users to use the experimental AAC codec that's built into recent versions of ffmpeg + if core.ACODEC in ['aac', + 'dts']: # Allow users to use the experimental AAC codec that's built into recent versions of ffmpeg audio_cmd.extend(['-strict', '-2']) else: audio_cmd.extend(['-c:a', 'copy']) if core.ACHANNELS: - audio_cmd.extend(['-ac', str(core.ACHANNELS)]) + audio_cmd.extend(['-ac', str(core.ACHANNELS)]) if core.ABITRATE: audio_cmd.extend(['-b:a', str(core.ABITRATE)]) if core.OUTPUTQUALITYPERCENT: @@ -183,7 +193,7 @@ def buildCommands(file, newDir, movieName, bitbucket): sub_cmd.extend(['-c:s', 'copy']) else: # http://en.wikibooks.org/wiki/FFMPEG_An_Intermediate_Guide/subtitle_options sub_cmd.extend(['-sn']) # Don't copy the subtitles over - + if core.OUTPUTFASTSTART: other_cmd.extend(['-movflags', '+faststart']) @@ -192,23 +202,29 @@ def buildCommands(file, newDir, movieName, bitbucket): audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"] if core.VEXTENSION not in ['.mkv', '.mpegts']: - subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"] + subStreams = [item for item in video_details["streams"] if + item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ + "codec_name"] != "pgssub"] for video in videoStreams: codec = video["codec_name"] try: fr = video["avg_frame_rate"] - except: fr = 0 + except: + fr = 0 try: width = video["width"] - except: width = 0 + except: + width = 0 try: height = video["height"] - except: height = 0 + except: + height = 0 scale = core.VRESOLUTION try: - framerate = float(fr.split('/')[0])/float(fr.split('/')[1]) - except: framerate = 0 + framerate = float(fr.split('/')[0]) / float(fr.split('/')[1]) + except: + framerate = 0 if codec in core.VCODEC_ALLOW or not core.VCODEC: video_cmd.extend(['-c:v', 'copy']) else: @@ -216,16 +232,16 @@ def buildCommands(file, newDir, movieName, bitbucket): if core.VFRAMERATE and not (core.VFRAMERATE * 0.999 <= fr <= core.VFRAMERATE * 1.001): video_cmd.extend(['-r', str(core.VFRAMERATE)]) if scale: - w_scale = width/float(scale.split(':')[0]) - h_scale = height/float(scale.split(':')[1]) - if w_scale > h_scale: # widescreen, Scale by width only. - scale = scale.split(':')[0] + ":" + str(int((height/w_scale)/2)*2) - if w_scale > 1: - video_cmd.extend(['-vf', 'scale=' + scale]) + w_scale = width / float(scale.split(':')[0]) + h_scale = height / float(scale.split(':')[1]) + if w_scale > h_scale: # widescreen, Scale by width only. + scale = scale.split(':')[0] + ":" + str(int((height / w_scale) / 2) * 2) + if w_scale > 1: + video_cmd.extend(['-vf', 'scale=' + scale]) else: # lower or mathcing ratio, scale by height only. - scale = str(int((width/h_scale)/2)*2) + ":" + scale.split(':')[1] - if h_scale > 1: - video_cmd.extend(['-vf', 'scale=' + scale]) + scale = str(int((width / h_scale) / 2) * 2) + ":" + scale.split(':')[1] + if h_scale > 1: + video_cmd.extend(['-vf', 'scale=' + scale]) if core.VBITRATE: video_cmd.extend(['-b:v', str(core.VBITRATE)]) if core.VPRESET: @@ -238,7 +254,7 @@ def buildCommands(file, newDir, movieName, bitbucket): if video_cmd[1] == 'copy' and any(i in video_cmd for i in no_copy): video_cmd[1] = core.VCODEC if core.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding. - video_cmd = ['-c:v', 'copy'] + video_cmd = ['-c:v', 'copy'] map_cmd.extend(['-map', '0:' + str(video["index"])]) break # Only one video needed @@ -246,12 +262,12 @@ def buildCommands(file, newDir, movieName, bitbucket): a_mapped = [] if audioStreams: try: - audio1 = [ item for item in audioStreams if item["tags"]["language"] == core.ALANGUAGE ] + audio1 = [item for item in audioStreams if item["tags"]["language"] == core.ALANGUAGE] except: # no language tags. Assume only 1 language. audio1 = audioStreams - audio2 = [ item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW ] + audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW] try: - audio3 = [ item for item in audioStreams if item["tags"]["language"] != core.ALANGUAGE ] + audio3 = [item for item in audioStreams if item["tags"]["language"] != core.ALANGUAGE] except: audio3 = [] @@ -259,21 +275,25 @@ def buildCommands(file, newDir, movieName, bitbucket): map_cmd.extend(['-map', '0:' + str(audio2[0]["index"])]) a_mapped.extend([audio2[0]["index"]]) try: - bitrate = int(audio2[0]["bit_rate"])/1000 - except: bitrate = 0 + bitrate = int(audio2[0]["bit_rate"]) / 1000 + except: + bitrate = 0 try: channels = int(audio2[0]["channels"]) - except: channels = 0 + except: + channels = 0 audio_cmd.extend(['-c:a:' + str(used_audio), 'copy']) elif audio1: # right language wrong codec. map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])]) a_mapped.extend([audio1[0]["index"]]) try: - bitrate = int(audio1[0]["bit_rate"])/1000 - except: bitrate = 0 + bitrate = int(audio1[0]["bit_rate"]) / 1000 + except: + bitrate = 0 try: channels = int(audio1[0]["channels"]) - except: channels = 0 + except: + channels = 0 if core.ACODEC: audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC]) else: @@ -282,11 +302,13 @@ def buildCommands(file, newDir, movieName, bitbucket): map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])]) a_mapped.extend([audio3[0]["index"]]) try: - bitrate = int(audio3[0]["bit_rate"])/1000 - except: bitrate = 0 + bitrate = int(audio3[0]["bit_rate"]) / 1000 + except: + bitrate = 0 try: channels = int(audio3[0]["channels"]) - except: channels = 0 + except: + channels = 0 if core.ACODEC: audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC]) else: @@ -309,26 +331,30 @@ def buildCommands(file, newDir, movieName, bitbucket): if core.ACODEC2_ALLOW: used_audio += 1 - audio4 = [ item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW ] + audio4 = [item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW] if audio4: # right language and codec. map_cmd.extend(['-map', '0:' + str(audio4[0]["index"])]) a_mapped.extend([audio4[0]["index"]]) try: - bitrate = int(audio4[0]["bit_rate"])/1000 - except: bitrate = 0 + bitrate = int(audio4[0]["bit_rate"]) / 1000 + except: + bitrate = 0 try: channels = int(audio4[0]["channels"]) - except: channels = 0 + except: + channels = 0 audio_cmd2.extend(['-c:a:' + str(used_audio), 'copy']) elif audio1: # right language wrong codec. map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])]) a_mapped.extend([audio1[0]["index"]]) try: - bitrate = int(audio1[0]["bit_rate"])/1000 - except: bitrate = 0 + bitrate = int(audio1[0]["bit_rate"]) / 1000 + except: + bitrate = 0 try: channels = int(audio1[0]["channels"]) - except: channels = 0 + except: + channels = 0 if core.ACODEC2: audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2]) else: @@ -337,11 +363,13 @@ def buildCommands(file, newDir, movieName, bitbucket): map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])]) a_mapped.extend([audio3[0]["index"]]) try: - bitrate = int(audio3[0]["bit_rate"])/1000 - except: bitrate = 0 + bitrate = int(audio3[0]["bit_rate"]) / 1000 + except: + bitrate = 0 try: channels = int(audio3[0]["channels"]) - except: channels = 0 + except: + channels = 0 if core.ACODEC2: audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2]) else: @@ -371,11 +399,13 @@ def buildCommands(file, newDir, movieName, bitbucket): map_cmd.extend(['-map', '0:' + str(audio["index"])]) audio_cmd3 = [] try: - bitrate = int(audio["bit_rate"])/1000 - except: bitrate = 0 + bitrate = int(audio["bit_rate"]) / 1000 + except: + bitrate = 0 try: channels = int(audio["channels"]) - except: channels = 0 + except: + channels = 0 if audio["codec_name"] in core.ACODEC3_ALLOW: audio_cmd3.extend(['-c:a:' + str(used_audio), 'copy']) else: @@ -406,8 +436,9 @@ def buildCommands(file, newDir, movieName, bitbucket): n = 0 for lan in core.SLANGUAGES: try: - subs1 = [ item for item in subStreams if item["tags"]["language"] == lan ] - except: subs1 = [] + subs1 = [item for item in subStreams if item["tags"]["language"] == lan] + except: + subs1 = [] if core.BURN and not subs1 and not burnt and os.path.isfile(file): for subfile in get_subs(file): if lan in os.path.split(subfile)[1]: @@ -426,7 +457,7 @@ def buildCommands(file, newDir, movieName, bitbucket): break map_cmd.extend(['-map', '0:' + str(sub["index"])]) s_mapped.extend([sub["index"]]) - + if core.SINCLUDE: for sub in subStreams: if not core.ALLOWSUBS: @@ -434,7 +465,7 @@ def buildCommands(file, newDir, movieName, bitbucket): if sub["index"] in s_mapped: continue map_cmd.extend(['-map', '0:' + str(sub["index"])]) - s_mapped.extend([sub["index"]]) + s_mapped.extend([sub["index"]]) if core.OUTPUTFASTSTART: other_cmd.extend(['-movflags', '+faststart']) @@ -446,7 +477,7 @@ def buildCommands(file, newDir, movieName, bitbucket): if core.GENERALOPTS: command.extend(core.GENERALOPTS) - command.extend([ '-i', inputFile]) + command.extend(['-i', inputFile]) if core.SEMBED and os.path.isfile(file): for subfile in get_subs(file): @@ -461,7 +492,7 @@ def buildCommands(file, newDir, movieName, bitbucket): if not core.ALLOWSUBS or (not s_mapped and not n): sub_cmd.extend(['-sn']) - else: + else: if core.SCODEC: sub_cmd.extend(['-c:s', core.SCODEC]) else: @@ -478,6 +509,7 @@ def buildCommands(file, newDir, movieName, bitbucket): command = core.NICENESS + command return command + def get_subs(file): filepaths = [] subExt = ['.srt', '.sub', '.idx'] @@ -486,9 +518,10 @@ def get_subs(file): for dirname, dirs, filenames in os.walk(dir): for filename in filenames: filepaths.extend([os.path.join(dirname, filename)]) - subfiles = [ item for item in filepaths if os.path.splitext(item)[1] in subExt and name in item ] + subfiles = [item for item in filepaths if os.path.splitext(item)[1] in subExt and name in item] return subfiles + def extract_subs(file, newfilePath, bitbucket): video_details, result = getVideoDetails(file) if not video_details: @@ -501,34 +534,39 @@ def extract_subs(file, newfilePath, bitbucket): name = os.path.splitext(os.path.split(newfilePath)[1])[0] try: - subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item["codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"] + subStreams = [item for item in video_details["streams"] if + item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[ + "codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"] except: - subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"] + subStreams = [item for item in video_details["streams"] if + item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ + "codec_name"] != "pgssub"] num = len(subStreams) for n in range(num): sub = subStreams[n] idx = sub["index"] try: - lan = sub["tags"]["language"] + lan = sub["tags"]["language"] except: - lan = "unk" + lan = "unk" if num == 1: - outputFile = os.path.join(subdir, "%s.srt" %(name)) - if os.path.isfile(outputFile): - outputFile = os.path.join(subdir, "%s.%s.srt" %(name, n)) + outputFile = os.path.join(subdir, "%s.srt" % (name)) + if os.path.isfile(outputFile): + outputFile = os.path.join(subdir, "%s.%s.srt" % (name, n)) else: - outputFile = os.path.join(subdir, "%s.%s.srt" %(name, lan)) - if os.path.isfile(outputFile): - outputFile = os.path.join(subdir, "%s.%s.%s.srt" %(name, lan, n)) + outputFile = os.path.join(subdir, "%s.%s.srt" % (name, lan)) + if os.path.isfile(outputFile): + outputFile = os.path.join(subdir, "%s.%s.%s.srt" % (name, lan, n)) - command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', '-codec:' + str(idx), 'srt', outputFile] + command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', '-codec:' + str(idx), 'srt', + outputFile] if platform.system() != 'Windows': command = core.NICENESS + command logger.info("Extracting %s subtitle from: %s" % (lan, file)) print_cmd(command) - result = 1 # set result to failed in case call fails. + result = 1 # set result to failed in case call fails. try: proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) proc.communicate() @@ -539,11 +577,13 @@ def extract_subs(file, newfilePath, bitbucket): if result == 0: try: shutil.copymode(file, outputFile) - except: pass + except: + pass logger.info("Extracting %s subtitle from %s has succeeded" % (lan, file)) else: logger.error("Extracting subtitles has failed") + def processList(List, newDir, bitbucket): remList = [] newList = [] @@ -562,7 +602,7 @@ def processList(List, newDir, bitbucket): logger.debug("Found VIDEO_TS image file: %s" % (item), "TRANSCODER") if not vtsPath: try: - vtsPath = re.match("(.+VIDEO_TS)",item).groups()[0] + vtsPath = re.match("(.+VIDEO_TS)", item).groups()[0] except: vtsPath = os.path.split(item)[0] remList.append(item) @@ -571,7 +611,8 @@ def processList(List, newDir, bitbucket): elif core.CONCAT and re.match(".+[cC][dD][0-9].", item): remList.append(item) combine.append(item) - else: continue + else: + continue if vtsPath: newList.extend(combineVTS(vtsPath)) if combine: @@ -589,7 +630,8 @@ def processList(List, newDir, bitbucket): newList = [] remList = [] logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER") - return List, remList, newList, success + return List, remList, newList, success + def ripISO(item, newDir, bitbucket): newFiles = [] @@ -606,13 +648,14 @@ def ripISO(item, newDir, bitbucket): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) out, err = proc.communicate() result = proc.returncode - fileList = [ re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line) ] + fileList = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in + out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)] combined = [] for n in range(99): concat = [] m = 1 while True: - vtsName = 'VIDEO_TS%sVTS_%02d_%d.VOB' % (os.sep, n+1, m) + vtsName = 'VIDEO_TS%sVTS_%02d_%d.VOB' % (os.sep, n + 1, m) if vtsName in fileList: concat.append(vtsName) m += 1 @@ -623,11 +666,11 @@ def ripISO(item, newDir, bitbucket): if core.CONCAT: combined.extend(concat) continue - name = '%s.cd%s' % (os.path.splitext(os.path.split(item)[1])[0] ,str(n+1)) - newFiles.append({item: {'name': name , 'files': concat}}) + name = '%s.cd%s' % (os.path.splitext(os.path.split(item)[1])[0], str(n + 1)) + newFiles.append({item: {'name': name, 'files': concat}}) if core.CONCAT: name = os.path.splitext(os.path.split(item)[1])[0] - newFiles.append({item: {'name': name , 'files': combined}}) + newFiles.append({item: {'name': name, 'files': combined}}) if not newFiles: logger.error("No VIDEO_TS folder found in image file %s" % (item), "TRANSCODER") newFiles = [failure_dir] @@ -636,6 +679,7 @@ def ripISO(item, newDir, bitbucket): newFiles = [failure_dir] return newFiles + def combineVTS(vtsPath): newFiles = [] combined = '' @@ -643,7 +687,7 @@ def combineVTS(vtsPath): concat = '' m = 1 while True: - vtsName = 'VTS_%02d_%d.VOB' % (n+1, m) + vtsName = 'VTS_%02d_%d.VOB' % (n + 1, m) if os.path.isfile(os.path.join(vtsPath, vtsName)): concat = concat + os.path.join(vtsPath, vtsName) + '|' m += 1 @@ -659,12 +703,14 @@ def combineVTS(vtsPath): newFiles.append('concat:%s' % combined[:-1]) return newFiles + def combineCD(combine): newFiles = [] - for item in set([ re.match("(.+)[cC][dD][0-9].",item).groups()[0] for item in combine ]): + for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]): concat = '' for n in range(99): - files = [ file for file in combine if n+1 == int(re.match(".+[cC][dD]([0-9]+).",file).groups()[0]) and item in file ] + files = [file for file in combine if + n + 1 == int(re.match(".+[cC][dD]([0-9]+).", file).groups()[0]) and item in file] if files: concat = concat + files[0] + '|' else: @@ -673,17 +719,19 @@ def combineCD(combine): newFiles.append('concat:%s' % concat[:-1]) return newFiles + def print_cmd(command): cmd = "" for item in command: cmd = cmd + " " + str(item) logger.debug("calling command:%s" % (cmd)) + def Transcode_directory(dirName): if not core.FFMPEG: return 1, dirName logger.info("Checking for files to be transcoded") - final_result = 0 # initialize as successful + final_result = 0 # initialize as successful if core.OUTPUTVIDEOPATH: newDir = core.OUTPUTVIDEOPATH makeDir(newDir) @@ -713,17 +761,17 @@ def Transcode_directory(dirName): if core.SEXTRACT and isinstance(file, str): extract_subs(file, newfilePath, bitbucket) - try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason) + try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason) os.remove(newfilePath) except OSError, e: - if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist + if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist logger.debug("Error when removing transcoding target: %s" % (e)) except Exception, e: logger.debug("Error when removing transcoding target: %s" % (e)) logger.info("Transcoding video: %s" % (newfilePath)) print_cmd(command) - result = 1 # set result to failed in case call fails. + result = 1 # set result to failed in case call fails. try: if isinstance(file, str): proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) @@ -752,12 +800,14 @@ def Transcode_directory(dirName): if result == 0: try: shutil.copymode(file, newfilePath) - except: pass + except: + pass logger.info("Transcoding of video to %s succeeded" % (newfilePath)) if os.path.isfile(newfilePath) and (file in newList or not core.DUPLICATE): try: os.unlink(file) - except: pass + except: + pass else: logger.error("Transcoding of video to %s failed with result %s" % (newfilePath, str(result))) # this will be 0 (successful) it all are successful, else will return a positive integer for failure. @@ -766,8 +816,9 @@ def Transcode_directory(dirName): for file in remList: try: os.unlink(file) - except: pass - if not os.listdir(newDir): #this is an empty directory and we didn't transcode into it. + except: + pass + if not os.listdir(newDir): # this is an empty directory and we didn't transcode into it. os.rmdir(newDir) newDir = dirName if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB diff --git a/core/transmissionrpc/__init__.py b/core/transmissionrpc/__init__.py index 7cc02cfd..c0ced381 100644 --- a/core/transmissionrpc/__init__.py +++ b/core/transmissionrpc/__init__.py @@ -10,9 +10,9 @@ from core.transmissionrpc.session import Session from core.transmissionrpc.client import Client from core.transmissionrpc.utils import add_stdout_logger, add_file_logger -__author__ = 'Erik Svensson ' -__version_major__ = 0 -__version_minor__ = 11 -__version__ = '{0}.{1}'.format(__version_major__, __version_minor__) -__copyright__ = 'Copyright (c) 2008-2013 Erik Svensson' -__license__ = 'MIT' +__author__ = 'Erik Svensson ' +__version_major__ = 0 +__version_minor__ = 11 +__version__ = '{0}.{1}'.format(__version_major__, __version_minor__) +__copyright__ = 'Copyright (c) 2008-2013 Erik Svensson' +__license__ = 'MIT' diff --git a/core/transmissionrpc/client.py b/core/transmissionrpc/client.py index 803b59ae..461be0ad 100644 --- a/core/transmissionrpc/client.py +++ b/core/transmissionrpc/client.py @@ -18,7 +18,6 @@ from core.transmissionrpc.torrent import Torrent from core.transmissionrpc.session import Session from six import PY3, integer_types, string_types, iteritems - if PY3: from urllib.parse import urlparse from urllib.request import urlopen @@ -26,6 +25,7 @@ else: from urlparse import urlparse from urllib2 import urlopen + def debug_httperror(error): """ Log the Transmission RPC HTTP error. @@ -49,6 +49,7 @@ def debug_httperror(error): ) ) + def parse_torrent_id(arg): """Parse an torrent id or torrent hashString.""" torrent_id = None @@ -62,7 +63,7 @@ def parse_torrent_id(arg): elif isinstance(arg, string_types): try: torrent_id = int(arg) - if torrent_id >= 2**31: + if torrent_id >= 2 ** 31: torrent_id = None except (ValueError, TypeError): pass @@ -75,6 +76,7 @@ def parse_torrent_id(arg): pass return torrent_id + def parse_torrent_ids(args): """ Take things and make them valid torrent identifiers @@ -115,6 +117,7 @@ def parse_torrent_ids(args): ids = [torrent_id] return ids + """ Torrent ids @@ -129,12 +132,14 @@ possible to provide a argument called ``timeout``. Timeout is only effective when using Python 2.6 or later and the default timeout is 30 seconds. """ + class Client(object): """ Client is the class handling the Transmission JSON-RPC client protocol. """ - def __init__(self, address='localhost', port=DEFAULT_PORT, user=None, password=None, http_handler=None, timeout=None): + def __init__(self, address='localhost', port=DEFAULT_PORT, user=None, password=None, http_handler=None, + timeout=None): if isinstance(timeout, (integer_types, float)): self._query_timeout = float(timeout) else: @@ -204,7 +209,8 @@ class Client(object): if timeout is None: timeout = self._query_timeout while True: - LOGGER.debug(json.dumps({'url': self.url, 'headers': headers, 'query': query, 'timeout': timeout}, indent=2)) + LOGGER.debug( + json.dumps({'url': self.url, 'headers': headers, 'query': query, 'timeout': timeout}, indent=2)) try: result = self.http_handler.request(self.url, query, headers, timeout) break @@ -244,8 +250,7 @@ class Client(object): elif require_ids: raise ValueError('request require ids') - query = json.dumps({'tag': self._sequence, 'method': method - , 'arguments': arguments}) + query = json.dumps({'tag': self._sequence, 'method': method, 'arguments': arguments}) self._sequence += 1 start = time.time() http_data = self._http_query(query, timeout) @@ -348,7 +353,7 @@ class Client(object): """ if self.rpc_version < version: LOGGER.warning('Using feature not supported by server. RPC version for server %d, feature introduced in %d.' - % (self.rpc_version, version)) + % (self.rpc_version, version)) def add_torrent(self, torrent, timeout=None, **kwargs): """ @@ -476,7 +481,7 @@ class Client(object): """ self._rpc_version_warning(3) self._request('torrent-remove', - {'delete-local-data':rpc_bool(delete_data)}, ids, True, timeout=timeout) + {'delete-local-data': rpc_bool(delete_data)}, ids, True, timeout=timeout) def remove(self, ids, delete_data=False, timeout=None): """ @@ -606,34 +611,34 @@ class Client(object): the new methods. list returns a dictionary indexed by torrent id. """ warnings.warn('list has been deprecated, please use get_torrent or get_torrents instead.', DeprecationWarning) - fields = ['id', 'hashString', 'name', 'sizeWhenDone', 'leftUntilDone' - , 'eta', 'status', 'rateUpload', 'rateDownload', 'uploadedEver' - , 'downloadedEver', 'uploadRatio', 'queuePosition'] + fields = ['id', 'hashString', 'name', 'sizeWhenDone', 'leftUntilDone', + 'eta', 'status', 'rateUpload', 'rateDownload', 'uploadedEver', + 'downloadedEver', 'uploadRatio', 'queuePosition'] return self._request('torrent-get', {'fields': fields}, timeout=timeout) def get_files(self, ids=None, timeout=None): """ - Get list of files for provided torrent id(s). If ids is empty, - information for all torrents are fetched. This function returns a dictionary - for each requested torrent id holding the information about the files. + Get list of files for provided torrent id(s). If ids is empty, + information for all torrents are fetched. This function returns a dictionary + for each requested torrent id holding the information about the files. - :: + :: - { - : { - : { - 'name': , - 'size': , - 'completed': , - 'priority': , - 'selected': - } + { + : { + : { + 'name': , + 'size': , + 'completed': , + 'priority': , + 'selected': + } - ... - } + ... + } - ... - } + ... + } """ fields = ['id', 'name', 'hashString', 'files', 'priorities', 'wanted'] request_result = self._request('torrent-get', {'fields': fields}, ids, timeout=timeout) @@ -645,22 +650,22 @@ class Client(object): def set_files(self, items, timeout=None): """ Set file properties. Takes a dictionary with similar contents as the result - of `get_files`. + of `get_files`. - :: + :: - { - : { - : { - 'priority': , - 'selected': - } + { + : { + : { + 'priority': , + 'selected': + } - ... - } + ... + } - ... - } + ... + } """ if not isinstance(items, dict): raise ValueError('Invalid file description') @@ -703,8 +708,8 @@ class Client(object): def change_torrent(self, ids, timeout=None, **kwargs): """ - Change torrent parameters for the torrent(s) with the supplied id's. The - parameters are: + Change torrent parameters for the torrent(s) with the supplied id's. The + parameters are: ============================ ===== =============== ======================================================================================= Argument RPC Replaced by Description @@ -736,13 +741,13 @@ class Client(object): ``uploadLimited`` 5 - Enable upload speed limiter. ============================ ===== =============== ======================================================================================= - .. NOTE:: - transmissionrpc will try to automatically fix argument errors. + .. NOTE:: + transmissionrpc will try to automatically fix argument errors. """ args = {} for key, value in iteritems(kwargs): argument = make_rpc_name(key) - (arg, val) = argument_value_convert('torrent-set' , argument, value, self.rpc_version) + (arg, val) = argument_value_convert('torrent-set', argument, value, self.rpc_version) args[arg] = val if len(args) > 0: @@ -814,7 +819,7 @@ class Client(object): """Move transfer to the bottom of the queue.""" self._rpc_version_warning(14) self._request('queue-move-bottom', ids=ids, require_ids=True, timeout=timeout) - + def queue_up(self, ids, timeout=None): """Move transfer up in the queue.""" self._rpc_version_warning(14) @@ -888,14 +893,14 @@ class Client(object): ================================ ===== ================= ========================================================================================================================== .. NOTE:: - transmissionrpc will try to automatically fix argument errors. + transmissionrpc will try to automatically fix argument errors. """ args = {} for key, value in iteritems(kwargs): if key == 'encryption' and value not in ['required', 'preferred', 'tolerated']: raise ValueError('Invalid encryption value') argument = make_rpc_name(key) - (arg, val) = argument_value_convert('session-set' , argument, value, self.rpc_version) + (arg, val) = argument_value_convert('session-set', argument, value, self.rpc_version) args[arg] = val if len(args) > 0: self._request('session-set', args, timeout=timeout) diff --git a/core/transmissionrpc/constants.py b/core/transmissionrpc/constants.py index 5237fac0..78e61dd5 100644 --- a/core/transmissionrpc/constants.py +++ b/core/transmissionrpc/constants.py @@ -6,10 +6,10 @@ import logging from core.transmissionrpc.six import iteritems - LOGGER = logging.getLogger('transmissionrpc') LOGGER.setLevel(logging.ERROR) + def mirror_dict(source): """ Creates a dictionary with all values as keys and all keys as values. @@ -17,38 +17,39 @@ def mirror_dict(source): source.update(dict((value, key) for key, value in iteritems(source))) return source + DEFAULT_PORT = 9091 DEFAULT_TIMEOUT = 30.0 -TR_PRI_LOW = -1 -TR_PRI_NORMAL = 0 -TR_PRI_HIGH = 1 +TR_PRI_LOW = -1 +TR_PRI_NORMAL = 0 +TR_PRI_HIGH = 1 PRIORITY = mirror_dict({ - 'low' : TR_PRI_LOW, - 'normal' : TR_PRI_NORMAL, - 'high' : TR_PRI_HIGH + 'low': TR_PRI_LOW, + 'normal': TR_PRI_NORMAL, + 'high': TR_PRI_HIGH }) -TR_RATIOLIMIT_GLOBAL = 0 # follow the global settings -TR_RATIOLIMIT_SINGLE = 1 # override the global settings, seeding until a certain ratio -TR_RATIOLIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of ratio +TR_RATIOLIMIT_GLOBAL = 0 # follow the global settings +TR_RATIOLIMIT_SINGLE = 1 # override the global settings, seeding until a certain ratio +TR_RATIOLIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of ratio RATIO_LIMIT = mirror_dict({ - 'global' : TR_RATIOLIMIT_GLOBAL, - 'single' : TR_RATIOLIMIT_SINGLE, - 'unlimited' : TR_RATIOLIMIT_UNLIMITED + 'global': TR_RATIOLIMIT_GLOBAL, + 'single': TR_RATIOLIMIT_SINGLE, + 'unlimited': TR_RATIOLIMIT_UNLIMITED }) -TR_IDLELIMIT_GLOBAL = 0 # follow the global settings -TR_IDLELIMIT_SINGLE = 1 # override the global settings, seeding until a certain idle time -TR_IDLELIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of activity +TR_IDLELIMIT_GLOBAL = 0 # follow the global settings +TR_IDLELIMIT_SINGLE = 1 # override the global settings, seeding until a certain idle time +TR_IDLELIMIT_UNLIMITED = 2 # override the global settings, seeding regardless of activity IDLE_LIMIT = mirror_dict({ - 'global' : TR_RATIOLIMIT_GLOBAL, - 'single' : TR_RATIOLIMIT_SINGLE, - 'unlimited' : TR_RATIOLIMIT_UNLIMITED + 'global': TR_RATIOLIMIT_GLOBAL, + 'single': TR_RATIOLIMIT_SINGLE, + 'unlimited': TR_RATIOLIMIT_UNLIMITED }) # A note on argument maps @@ -62,236 +63,266 @@ IDLE_LIMIT = mirror_dict({ # Arguments for torrent methods TORRENT_ARGS = { - 'get' : { - 'activityDate': ('number', 1, None, None, None, 'Last time of upload or download activity.'), - 'addedDate': ('number', 1, None, None, None, 'The date when this torrent was first added.'), - 'announceResponse': ('string', 1, 7, None, None, 'The announce message from the tracker.'), - 'announceURL': ('string', 1, 7, None, None, 'Current announce URL.'), - 'bandwidthPriority': ('number', 5, None, None, None, 'Bandwidth priority. Low (-1), Normal (0) or High (1).'), - 'comment': ('string', 1, None, None, None, 'Torrent comment.'), - 'corruptEver': ('number', 1, None, None, None, 'Number of bytes of corrupt data downloaded.'), - 'creator': ('string', 1, None, None, None, 'Torrent creator.'), - 'dateCreated': ('number', 1, None, None, None, 'Torrent creation date.'), - 'desiredAvailable': ('number', 1, None, None, None, 'Number of bytes avalable and left to be downloaded.'), - 'doneDate': ('number', 1, None, None, None, 'The date when the torrent finished downloading.'), - 'downloadDir': ('string', 4, None, None, None, 'The directory path where the torrent is downloaded to.'), - 'downloadedEver': ('number', 1, None, None, None, 'Number of bytes of good data downloaded.'), - 'downloaders': ('number', 4, 7, None, None, 'Number of downloaders.'), - 'downloadLimit': ('number', 1, None, None, None, 'Download limit in Kbps.'), - 'downloadLimited': ('boolean', 5, None, None, None, 'Download limit is enabled'), - 'downloadLimitMode': ('number', 1, 5, None, None, 'Download limit mode. 0 means global, 1 means signle, 2 unlimited.'), - 'error': ('number', 1, None, None, None, 'Kind of error. 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error.'), - 'errorString': ('number', 1, None, None, None, 'Error message.'), - 'eta': ('number', 1, None, None, None, 'Estimated number of seconds left when downloading or seeding. -1 means not available and -2 means unknown.'), - 'etaIdle': ('number', 15, None, None, None, 'Estimated number of seconds left until the idle time limit is reached. -1 means not available and -2 means unknown.'), - 'files': ('array', 1, None, None, None, 'Array of file object containing key, bytesCompleted, length and name.'), - 'fileStats': ('array', 5, None, None, None, 'Aray of file statistics containing bytesCompleted, wanted and priority.'), - 'hashString': ('string', 1, None, None, None, 'Hashstring unique for the torrent even between sessions.'), - 'haveUnchecked': ('number', 1, None, None, None, 'Number of bytes of partial pieces.'), - 'haveValid': ('number', 1, None, None, None, 'Number of bytes of checksum verified data.'), - 'honorsSessionLimits': ('boolean', 5, None, None, None, 'True if session upload limits are honored'), - 'id': ('number', 1, None, None, None, 'Session unique torrent id.'), - 'isFinished': ('boolean', 9, None, None, None, 'True if the torrent is finished. Downloaded and seeded.'), - 'isPrivate': ('boolean', 1, None, None, None, 'True if the torrent is private.'), - 'isStalled': ('boolean', 14, None, None, None, 'True if the torrent has stalled (been idle for a long time).'), - 'lastAnnounceTime': ('number', 1, 7, None, None, 'The time of the last announcement.'), - 'lastScrapeTime': ('number', 1, 7, None, None, 'The time af the last successful scrape.'), - 'leechers': ('number', 1, 7, None, None, 'Number of leechers.'), - 'leftUntilDone': ('number', 1, None, None, None, 'Number of bytes left until the download is done.'), - 'magnetLink': ('string', 7, None, None, None, 'The magnet link for this torrent.'), - 'manualAnnounceTime': ('number', 1, None, None, None, 'The time until you manually ask for more peers.'), - 'maxConnectedPeers': ('number', 1, None, None, None, 'Maximum of connected peers.'), - 'metadataPercentComplete': ('number', 7, None, None, None, 'Download progress of metadata. 0.0 to 1.0.'), - 'name': ('string', 1, None, None, None, 'Torrent name.'), - 'nextAnnounceTime': ('number', 1, 7, None, None, 'Next announce time.'), - 'nextScrapeTime': ('number', 1, 7, None, None, 'Next scrape time.'), - 'peer-limit': ('number', 5, None, None, None, 'Maximum number of peers.'), - 'peers': ('array', 2, None, None, None, 'Array of peer objects.'), - 'peersConnected': ('number', 1, None, None, None, 'Number of peers we are connected to.'), - 'peersFrom': ('object', 1, None, None, None, 'Object containing download peers counts for different peer types.'), - 'peersGettingFromUs': ('number', 1, None, None, None, 'Number of peers we are sending data to.'), - 'peersKnown': ('number', 1, 13, None, None, 'Number of peers that the tracker knows.'), - 'peersSendingToUs': ('number', 1, None, None, None, 'Number of peers sending to us'), - 'percentDone': ('double', 5, None, None, None, 'Download progress of selected files. 0.0 to 1.0.'), - 'pieces': ('string', 5, None, None, None, 'String with base64 encoded bitfield indicating finished pieces.'), - 'pieceCount': ('number', 1, None, None, None, 'Number of pieces.'), - 'pieceSize': ('number', 1, None, None, None, 'Number of bytes in a piece.'), - 'priorities': ('array', 1, None, None, None, 'Array of file priorities.'), - 'queuePosition': ('number', 14, None, None, None, 'The queue position.'), - 'rateDownload': ('number', 1, None, None, None, 'Download rate in bps.'), - 'rateUpload': ('number', 1, None, None, None, 'Upload rate in bps.'), - 'recheckProgress': ('double', 1, None, None, None, 'Progress of recheck. 0.0 to 1.0.'), - 'secondsDownloading': ('number', 15, None, None, None, ''), - 'secondsSeeding': ('number', 15, None, None, None, ''), - 'scrapeResponse': ('string', 1, 7, None, None, 'Scrape response message.'), - 'scrapeURL': ('string', 1, 7, None, None, 'Current scrape URL'), - 'seeders': ('number', 1, 7, None, None, 'Number of seeders reported by the tracker.'), - 'seedIdleLimit': ('number', 10, None, None, None, 'Idle limit in minutes.'), - 'seedIdleMode': ('number', 10, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'), - 'seedRatioLimit': ('double', 5, None, None, None, 'Seed ratio limit.'), - 'seedRatioMode': ('number', 5, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'), - 'sizeWhenDone': ('number', 1, None, None, None, 'Size of the torrent download in bytes.'), - 'startDate': ('number', 1, None, None, None, 'The date when the torrent was last started.'), - 'status': ('number', 1, None, None, None, 'Current status, see source'), - 'swarmSpeed': ('number', 1, 7, None, None, 'Estimated speed in Kbps in the swarm.'), - 'timesCompleted': ('number', 1, 7, None, None, 'Number of successful downloads reported by the tracker.'), - 'trackers': ('array', 1, None, None, None, 'Array of tracker objects.'), - 'trackerStats': ('object', 7, None, None, None, 'Array of object containing tracker statistics.'), - 'totalSize': ('number', 1, None, None, None, 'Total size of the torrent in bytes'), - 'torrentFile': ('string', 5, None, None, None, 'Path to .torrent file.'), - 'uploadedEver': ('number', 1, None, None, None, 'Number of bytes uploaded, ever.'), - 'uploadLimit': ('number', 1, None, None, None, 'Upload limit in Kbps'), - 'uploadLimitMode': ('number', 1, 5, None, None, 'Upload limit mode. 0 means global, 1 means signle, 2 unlimited.'), - 'uploadLimited': ('boolean', 5, None, None, None, 'Upload limit enabled.'), - 'uploadRatio': ('double', 1, None, None, None, 'Seed ratio.'), - 'wanted': ('array', 1, None, None, None, 'Array of booleans indicated wanted files.'), - 'webseeds': ('array', 1, None, None, None, 'Array of webseeds objects'), - 'webseedsSendingToUs': ('number', 1, None, None, None, 'Number of webseeds seeding to us.'), + 'get': { + 'activityDate': ('number', 1, None, None, None, 'Last time of upload or download activity.'), + 'addedDate': ('number', 1, None, None, None, 'The date when this torrent was first added.'), + 'announceResponse': ('string', 1, 7, None, None, 'The announce message from the tracker.'), + 'announceURL': ('string', 1, 7, None, None, 'Current announce URL.'), + 'bandwidthPriority': ('number', 5, None, None, None, 'Bandwidth priority. Low (-1), Normal (0) or High (1).'), + 'comment': ('string', 1, None, None, None, 'Torrent comment.'), + 'corruptEver': ('number', 1, None, None, None, 'Number of bytes of corrupt data downloaded.'), + 'creator': ('string', 1, None, None, None, 'Torrent creator.'), + 'dateCreated': ('number', 1, None, None, None, 'Torrent creation date.'), + 'desiredAvailable': ('number', 1, None, None, None, 'Number of bytes avalable and left to be downloaded.'), + 'doneDate': ('number', 1, None, None, None, 'The date when the torrent finished downloading.'), + 'downloadDir': ('string', 4, None, None, None, 'The directory path where the torrent is downloaded to.'), + 'downloadedEver': ('number', 1, None, None, None, 'Number of bytes of good data downloaded.'), + 'downloaders': ('number', 4, 7, None, None, 'Number of downloaders.'), + 'downloadLimit': ('number', 1, None, None, None, 'Download limit in Kbps.'), + 'downloadLimited': ('boolean', 5, None, None, None, 'Download limit is enabled'), + 'downloadLimitMode': ( + 'number', 1, 5, None, None, 'Download limit mode. 0 means global, 1 means signle, 2 unlimited.'), + 'error': ('number', 1, None, None, None, + 'Kind of error. 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error.'), + 'errorString': ('number', 1, None, None, None, 'Error message.'), + 'eta': ('number', 1, None, None, None, + 'Estimated number of seconds left when downloading or seeding. -1 means not available and -2 means unknown.'), + 'etaIdle': ('number', 15, None, None, None, + 'Estimated number of seconds left until the idle time limit is reached. -1 means not available and -2 means unknown.'), + 'files': ( + 'array', 1, None, None, None, 'Array of file object containing key, bytesCompleted, length and name.'), + 'fileStats': ( + 'array', 5, None, None, None, 'Aray of file statistics containing bytesCompleted, wanted and priority.'), + 'hashString': ('string', 1, None, None, None, 'Hashstring unique for the torrent even between sessions.'), + 'haveUnchecked': ('number', 1, None, None, None, 'Number of bytes of partial pieces.'), + 'haveValid': ('number', 1, None, None, None, 'Number of bytes of checksum verified data.'), + 'honorsSessionLimits': ('boolean', 5, None, None, None, 'True if session upload limits are honored'), + 'id': ('number', 1, None, None, None, 'Session unique torrent id.'), + 'isFinished': ('boolean', 9, None, None, None, 'True if the torrent is finished. Downloaded and seeded.'), + 'isPrivate': ('boolean', 1, None, None, None, 'True if the torrent is private.'), + 'isStalled': ('boolean', 14, None, None, None, 'True if the torrent has stalled (been idle for a long time).'), + 'lastAnnounceTime': ('number', 1, 7, None, None, 'The time of the last announcement.'), + 'lastScrapeTime': ('number', 1, 7, None, None, 'The time af the last successful scrape.'), + 'leechers': ('number', 1, 7, None, None, 'Number of leechers.'), + 'leftUntilDone': ('number', 1, None, None, None, 'Number of bytes left until the download is done.'), + 'magnetLink': ('string', 7, None, None, None, 'The magnet link for this torrent.'), + 'manualAnnounceTime': ('number', 1, None, None, None, 'The time until you manually ask for more peers.'), + 'maxConnectedPeers': ('number', 1, None, None, None, 'Maximum of connected peers.'), + 'metadataPercentComplete': ('number', 7, None, None, None, 'Download progress of metadata. 0.0 to 1.0.'), + 'name': ('string', 1, None, None, None, 'Torrent name.'), + 'nextAnnounceTime': ('number', 1, 7, None, None, 'Next announce time.'), + 'nextScrapeTime': ('number', 1, 7, None, None, 'Next scrape time.'), + 'peer-limit': ('number', 5, None, None, None, 'Maximum number of peers.'), + 'peers': ('array', 2, None, None, None, 'Array of peer objects.'), + 'peersConnected': ('number', 1, None, None, None, 'Number of peers we are connected to.'), + 'peersFrom': ( + 'object', 1, None, None, None, 'Object containing download peers counts for different peer types.'), + 'peersGettingFromUs': ('number', 1, None, None, None, 'Number of peers we are sending data to.'), + 'peersKnown': ('number', 1, 13, None, None, 'Number of peers that the tracker knows.'), + 'peersSendingToUs': ('number', 1, None, None, None, 'Number of peers sending to us'), + 'percentDone': ('double', 5, None, None, None, 'Download progress of selected files. 0.0 to 1.0.'), + 'pieces': ('string', 5, None, None, None, 'String with base64 encoded bitfield indicating finished pieces.'), + 'pieceCount': ('number', 1, None, None, None, 'Number of pieces.'), + 'pieceSize': ('number', 1, None, None, None, 'Number of bytes in a piece.'), + 'priorities': ('array', 1, None, None, None, 'Array of file priorities.'), + 'queuePosition': ('number', 14, None, None, None, 'The queue position.'), + 'rateDownload': ('number', 1, None, None, None, 'Download rate in bps.'), + 'rateUpload': ('number', 1, None, None, None, 'Upload rate in bps.'), + 'recheckProgress': ('double', 1, None, None, None, 'Progress of recheck. 0.0 to 1.0.'), + 'secondsDownloading': ('number', 15, None, None, None, ''), + 'secondsSeeding': ('number', 15, None, None, None, ''), + 'scrapeResponse': ('string', 1, 7, None, None, 'Scrape response message.'), + 'scrapeURL': ('string', 1, 7, None, None, 'Current scrape URL'), + 'seeders': ('number', 1, 7, None, None, 'Number of seeders reported by the tracker.'), + 'seedIdleLimit': ('number', 10, None, None, None, 'Idle limit in minutes.'), + 'seedIdleMode': ('number', 10, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'), + 'seedRatioLimit': ('double', 5, None, None, None, 'Seed ratio limit.'), + 'seedRatioMode': ('number', 5, None, None, None, 'Use global (0), torrent (1), or unlimited (2) limit.'), + 'sizeWhenDone': ('number', 1, None, None, None, 'Size of the torrent download in bytes.'), + 'startDate': ('number', 1, None, None, None, 'The date when the torrent was last started.'), + 'status': ('number', 1, None, None, None, 'Current status, see source'), + 'swarmSpeed': ('number', 1, 7, None, None, 'Estimated speed in Kbps in the swarm.'), + 'timesCompleted': ('number', 1, 7, None, None, 'Number of successful downloads reported by the tracker.'), + 'trackers': ('array', 1, None, None, None, 'Array of tracker objects.'), + 'trackerStats': ('object', 7, None, None, None, 'Array of object containing tracker statistics.'), + 'totalSize': ('number', 1, None, None, None, 'Total size of the torrent in bytes'), + 'torrentFile': ('string', 5, None, None, None, 'Path to .torrent file.'), + 'uploadedEver': ('number', 1, None, None, None, 'Number of bytes uploaded, ever.'), + 'uploadLimit': ('number', 1, None, None, None, 'Upload limit in Kbps'), + 'uploadLimitMode': ( + 'number', 1, 5, None, None, 'Upload limit mode. 0 means global, 1 means signle, 2 unlimited.'), + 'uploadLimited': ('boolean', 5, None, None, None, 'Upload limit enabled.'), + 'uploadRatio': ('double', 1, None, None, None, 'Seed ratio.'), + 'wanted': ('array', 1, None, None, None, 'Array of booleans indicated wanted files.'), + 'webseeds': ('array', 1, None, None, None, 'Array of webseeds objects'), + 'webseedsSendingToUs': ('number', 1, None, None, None, 'Number of webseeds seeding to us.'), }, 'set': { - 'bandwidthPriority': ('number', 5, None, None, None, 'Priority for this transfer.'), - 'downloadLimit': ('number', 5, None, 'speed-limit-down', None, 'Set the speed limit for download in Kib/s.'), - 'downloadLimited': ('boolean', 5, None, 'speed-limit-down-enabled', None, 'Enable download speed limiter.'), - 'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."), - 'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."), - 'honorsSessionLimits': ('boolean', 5, None, None, None, "Enables or disables the transfer to honour the upload limit set in the session."), - 'location': ('array', 1, None, None, None, 'Local download location.'), - 'peer-limit': ('number', 1, None, None, None, 'The peer limit for the torrents.'), - 'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."), - 'priority-low': ('array', 1, None, None, None, "A list of file id's that should have normal priority."), - 'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have low priority."), - 'queuePosition': ('number', 14, None, None, None, 'Position of this transfer in its queue.'), - 'seedIdleLimit': ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'), - 'seedIdleMode': ('number', 10, None, None, None, 'Seed inactivity mode. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'), - 'seedRatioLimit': ('double', 5, None, None, None, 'Seeding ratio.'), - 'seedRatioMode': ('number', 5, None, None, None, 'Which ratio to use. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'), - 'speed-limit-down': ('number', 1, 5, None, 'downloadLimit', 'Set the speed limit for download in Kib/s.'), - 'speed-limit-down-enabled': ('boolean', 1, 5, None, 'downloadLimited', 'Enable download speed limiter.'), - 'speed-limit-up': ('number', 1, 5, None, 'uploadLimit', 'Set the speed limit for upload in Kib/s.'), - 'speed-limit-up-enabled': ('boolean', 1, 5, None, 'uploadLimited', 'Enable upload speed limiter.'), - 'trackerAdd': ('array', 10, None, None, None, 'Array of string with announce URLs to add.'), - 'trackerRemove': ('array', 10, None, None, None, 'Array of ids of trackers to remove.'), - 'trackerReplace': ('array', 10, None, None, None, 'Array of (id, url) tuples where the announce URL should be replaced.'), - 'uploadLimit': ('number', 5, None, 'speed-limit-up', None, 'Set the speed limit for upload in Kib/s.'), - 'uploadLimited': ('boolean', 5, None, 'speed-limit-up-enabled', None, 'Enable upload speed limiter.'), + 'bandwidthPriority': ('number', 5, None, None, None, 'Priority for this transfer.'), + 'downloadLimit': ('number', 5, None, 'speed-limit-down', None, 'Set the speed limit for download in Kib/s.'), + 'downloadLimited': ('boolean', 5, None, 'speed-limit-down-enabled', None, 'Enable download speed limiter.'), + 'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."), + 'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."), + 'honorsSessionLimits': ('boolean', 5, None, None, None, + "Enables or disables the transfer to honour the upload limit set in the session."), + 'location': ('array', 1, None, None, None, 'Local download location.'), + 'peer-limit': ('number', 1, None, None, None, 'The peer limit for the torrents.'), + 'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."), + 'priority-low': ('array', 1, None, None, None, "A list of file id's that should have normal priority."), + 'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have low priority."), + 'queuePosition': ('number', 14, None, None, None, 'Position of this transfer in its queue.'), + 'seedIdleLimit': ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'), + 'seedIdleMode': ('number', 10, None, None, None, + 'Seed inactivity mode. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'), + 'seedRatioLimit': ('double', 5, None, None, None, 'Seeding ratio.'), + 'seedRatioMode': ('number', 5, None, None, None, + 'Which ratio to use. 0 = Use session limit, 1 = Use transfer limit, 2 = Disable limit.'), + 'speed-limit-down': ('number', 1, 5, None, 'downloadLimit', 'Set the speed limit for download in Kib/s.'), + 'speed-limit-down-enabled': ('boolean', 1, 5, None, 'downloadLimited', 'Enable download speed limiter.'), + 'speed-limit-up': ('number', 1, 5, None, 'uploadLimit', 'Set the speed limit for upload in Kib/s.'), + 'speed-limit-up-enabled': ('boolean', 1, 5, None, 'uploadLimited', 'Enable upload speed limiter.'), + 'trackerAdd': ('array', 10, None, None, None, 'Array of string with announce URLs to add.'), + 'trackerRemove': ('array', 10, None, None, None, 'Array of ids of trackers to remove.'), + 'trackerReplace': ( + 'array', 10, None, None, None, 'Array of (id, url) tuples where the announce URL should be replaced.'), + 'uploadLimit': ('number', 5, None, 'speed-limit-up', None, 'Set the speed limit for upload in Kib/s.'), + 'uploadLimited': ('boolean', 5, None, 'speed-limit-up-enabled', None, 'Enable upload speed limiter.'), }, 'add': { - 'bandwidthPriority': ('number', 8, None, None, None, 'Priority for this transfer.'), - 'download-dir': ('string', 1, None, None, None, 'The directory where the downloaded contents will be saved in.'), - 'cookies': ('string', 13, None, None, None, 'One or more HTTP cookie(s).'), - 'filename': ('string', 1, None, None, None, "A file path or URL to a torrent file or a magnet link."), - 'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."), - 'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."), - 'metainfo': ('string', 1, None, None, None, 'The content of a torrent file, base64 encoded.'), - 'paused': ('boolean', 1, None, None, None, 'If True, does not start the transfer when added.'), - 'peer-limit': ('number', 1, None, None, None, 'Maximum number of peers allowed.'), - 'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."), - 'priority-low': ('array', 1, None, None, None, "A list of file id's that should have low priority."), - 'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have normal priority."), + 'bandwidthPriority': ('number', 8, None, None, None, 'Priority for this transfer.'), + 'download-dir': ( + 'string', 1, None, None, None, 'The directory where the downloaded contents will be saved in.'), + 'cookies': ('string', 13, None, None, None, 'One or more HTTP cookie(s).'), + 'filename': ('string', 1, None, None, None, "A file path or URL to a torrent file or a magnet link."), + 'files-wanted': ('array', 1, None, None, None, "A list of file id's that should be downloaded."), + 'files-unwanted': ('array', 1, None, None, None, "A list of file id's that shouldn't be downloaded."), + 'metainfo': ('string', 1, None, None, None, 'The content of a torrent file, base64 encoded.'), + 'paused': ('boolean', 1, None, None, None, 'If True, does not start the transfer when added.'), + 'peer-limit': ('number', 1, None, None, None, 'Maximum number of peers allowed.'), + 'priority-high': ('array', 1, None, None, None, "A list of file id's that should have high priority."), + 'priority-low': ('array', 1, None, None, None, "A list of file id's that should have low priority."), + 'priority-normal': ('array', 1, None, None, None, "A list of file id's that should have normal priority."), } } # Arguments for session methods SESSION_ARGS = { 'get': { - "alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'), - "alt-speed-enabled": ('boolean', 5, None, None, None, 'True if alternate global download speed limiter is ebabled.'), - "alt-speed-time-begin": ('number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'), - "alt-speed-time-enabled": ('boolean', 5, None, None, None, 'True if alternate speeds scheduling is enabled.'), - "alt-speed-time-end": ('number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'), - "alt-speed-time-day": ('number', 5, None, None, None, 'Days alternate speeds scheduling is enabled.'), - "alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s)'), - "blocklist-enabled": ('boolean', 5, None, None, None, 'True when blocklist is enabled.'), - "blocklist-size": ('number', 5, None, None, None, 'Number of rules in the blocklist'), - "blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'), - "cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'), - "config-dir": ('string', 8, None, None, None, 'location of transmissions configuration directory'), - "dht-enabled": ('boolean', 6, None, None, None, 'True if DHT enabled.'), - "download-dir": ('string', 1, None, None, None, 'The download directory.'), - "download-dir-free-space": ('number', 12, None, None, None, 'Free space in the download directory, in bytes'), - "download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'), - "download-queue-enabled": ('boolean', 14, None, None, None, 'True if the download queue is enabled.'), - "encryption": ('string', 1, None, None, None, 'Encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'), - "idle-seeding-limit": ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'), - "idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'True if the seed activity limit is enabled.'), - "incomplete-dir": ('string', 7, None, None, None, 'The path to the directory for incomplete torrent transfer data.'), - "incomplete-dir-enabled": ('boolean', 7, None, None, None, 'True if the incomplete dir is enabled.'), - "lpd-enabled": ('boolean', 9, None, None, None, 'True if local peer discovery is enabled.'), - "peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'), - "peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'), - "peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'), - "pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'True if PEX is allowed.'), - "pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'True if PEX is enabled.'), - "port": ('number', 1, 5, None, 'peer-port', 'Peer port.'), - "peer-port": ('number', 5, None, 'port', None, 'Peer port.'), - "peer-port-random-on-start": ('boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'), - "port-forwarding-enabled": ('boolean', 1, None, None, None, 'True if port forwarding is enabled.'), - "queue-stalled-minutes": ('number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'), - "queue-stalled-enabled": ('boolean', 14, None, None, None, 'True if stalled tracking of transfers is enabled.'), - "rename-partial-files": ('boolean', 8, None, None, None, 'True if ".part" is appended to incomplete files'), - "rpc-version": ('number', 4, None, None, None, 'Transmission RPC API Version.'), - "rpc-version-minimum": ('number', 4, None, None, None, 'Minimum accepted RPC API Version.'), - "script-torrent-done-enabled": ('boolean', 9, None, None, None, 'True if the done script is enabled.'), - "script-torrent-done-filename": ('string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'), - "seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'), - "seedRatioLimited": ('boolean', 5, None, None, None, 'True if seed ration limit is enabled.'), - "seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'), - "seed-queue-enabled": ('boolean', 14, None, None, None, 'True if upload queue is enabled.'), - "speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'), - "speed-limit-down-enabled": ('boolean', 1, None, None, None, 'True if the download speed is limited.'), - "speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'), - "speed-limit-up-enabled": ('boolean', 1, None, None, None, 'True if the upload speed is limited.'), - "start-added-torrents": ('boolean', 9, None, None, None, 'When true uploaded torrents will start right away.'), - "trash-original-torrent-files": ('boolean', 9, None, None, None, 'When true added .torrent files will be deleted.'), - 'units': ('object', 10, None, None, None, 'An object containing units for size and speed.'), - 'utp-enabled': ('boolean', 13, None, None, None, 'True if Micro Transport Protocol (UTP) is enabled.'), - "version": ('string', 3, None, None, None, 'Transmission version.'), + "alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'), + "alt-speed-enabled": ( + 'boolean', 5, None, None, None, 'True if alternate global download speed limiter is ebabled.'), + "alt-speed-time-begin": ( + 'number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'), + "alt-speed-time-enabled": ('boolean', 5, None, None, None, 'True if alternate speeds scheduling is enabled.'), + "alt-speed-time-end": ( + 'number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'), + "alt-speed-time-day": ('number', 5, None, None, None, 'Days alternate speeds scheduling is enabled.'), + "alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s)'), + "blocklist-enabled": ('boolean', 5, None, None, None, 'True when blocklist is enabled.'), + "blocklist-size": ('number', 5, None, None, None, 'Number of rules in the blocklist'), + "blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'), + "cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'), + "config-dir": ('string', 8, None, None, None, 'location of transmissions configuration directory'), + "dht-enabled": ('boolean', 6, None, None, None, 'True if DHT enabled.'), + "download-dir": ('string', 1, None, None, None, 'The download directory.'), + "download-dir-free-space": ('number', 12, None, None, None, 'Free space in the download directory, in bytes'), + "download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'), + "download-queue-enabled": ('boolean', 14, None, None, None, 'True if the download queue is enabled.'), + "encryption": ( + 'string', 1, None, None, None, 'Encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'), + "idle-seeding-limit": ('number', 10, None, None, None, 'Seed inactivity limit in minutes.'), + "idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'True if the seed activity limit is enabled.'), + "incomplete-dir": ( + 'string', 7, None, None, None, 'The path to the directory for incomplete torrent transfer data.'), + "incomplete-dir-enabled": ('boolean', 7, None, None, None, 'True if the incomplete dir is enabled.'), + "lpd-enabled": ('boolean', 9, None, None, None, 'True if local peer discovery is enabled.'), + "peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'), + "peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'), + "peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'), + "pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'True if PEX is allowed.'), + "pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'True if PEX is enabled.'), + "port": ('number', 1, 5, None, 'peer-port', 'Peer port.'), + "peer-port": ('number', 5, None, 'port', None, 'Peer port.'), + "peer-port-random-on-start": ( + 'boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'), + "port-forwarding-enabled": ('boolean', 1, None, None, None, 'True if port forwarding is enabled.'), + "queue-stalled-minutes": ( + 'number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'), + "queue-stalled-enabled": ('boolean', 14, None, None, None, 'True if stalled tracking of transfers is enabled.'), + "rename-partial-files": ('boolean', 8, None, None, None, 'True if ".part" is appended to incomplete files'), + "rpc-version": ('number', 4, None, None, None, 'Transmission RPC API Version.'), + "rpc-version-minimum": ('number', 4, None, None, None, 'Minimum accepted RPC API Version.'), + "script-torrent-done-enabled": ('boolean', 9, None, None, None, 'True if the done script is enabled.'), + "script-torrent-done-filename": ( + 'string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'), + "seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'), + "seedRatioLimited": ('boolean', 5, None, None, None, 'True if seed ration limit is enabled.'), + "seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'), + "seed-queue-enabled": ('boolean', 14, None, None, None, 'True if upload queue is enabled.'), + "speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'), + "speed-limit-down-enabled": ('boolean', 1, None, None, None, 'True if the download speed is limited.'), + "speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'), + "speed-limit-up-enabled": ('boolean', 1, None, None, None, 'True if the upload speed is limited.'), + "start-added-torrents": ('boolean', 9, None, None, None, 'When true uploaded torrents will start right away.'), + "trash-original-torrent-files": ( + 'boolean', 9, None, None, None, 'When true added .torrent files will be deleted.'), + 'units': ('object', 10, None, None, None, 'An object containing units for size and speed.'), + 'utp-enabled': ('boolean', 13, None, None, None, 'True if Micro Transport Protocol (UTP) is enabled.'), + "version": ('string', 3, None, None, None, 'Transmission version.'), }, 'set': { - "alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'), - "alt-speed-enabled": ('boolean', 5, None, None, None, 'Enables alternate global download speed limiter.'), - "alt-speed-time-begin": ('number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'), - "alt-speed-time-enabled": ('boolean', 5, None, None, None, 'Enables alternate speeds scheduling.'), - "alt-speed-time-end": ('number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'), - "alt-speed-time-day": ('number', 5, None, None, None, 'Enables alternate speeds scheduling these days.'), - "alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s).'), - "blocklist-enabled": ('boolean', 5, None, None, None, 'Enables the block list'), - "blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'), - "cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'), - "dht-enabled": ('boolean', 6, None, None, None, 'Enables DHT.'), - "download-dir": ('string', 1, None, None, None, 'Set the session download directory.'), - "download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'), - "download-queue-enabled": ('boolean', 14, None, None, None, 'Enables download queue.'), - "encryption": ('string', 1, None, None, None, 'Set the session encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'), - "idle-seeding-limit": ('number', 10, None, None, None, 'The default seed inactivity limit in minutes.'), - "idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'Enables the default seed inactivity limit'), - "incomplete-dir": ('string', 7, None, None, None, 'The path to the directory of incomplete transfer data.'), - "incomplete-dir-enabled": ('boolean', 7, None, None, None, 'Enables the incomplete transfer data directory. Otherwise data for incomplete transfers are stored in the download target.'), - "lpd-enabled": ('boolean', 9, None, None, None, 'Enables local peer discovery for public torrents.'), - "peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'), - "peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'), - "peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'), - "pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'Allowing PEX in public torrents.'), - "pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'Allowing PEX in public torrents.'), - "port": ('number', 1, 5, None, 'peer-port', 'Peer port.'), - "peer-port": ('number', 5, None, 'port', None, 'Peer port.'), - "peer-port-random-on-start": ('boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'), - "port-forwarding-enabled": ('boolean', 1, None, None, None, 'Enables port forwarding.'), - "rename-partial-files": ('boolean', 8, None, None, None, 'Appends ".part" to incomplete files'), - "queue-stalled-minutes": ('number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'), - "queue-stalled-enabled": ('boolean', 14, None, None, None, 'Enable tracking of stalled transfers.'), - "script-torrent-done-enabled": ('boolean', 9, None, None, None, 'Whether or not to call the "done" script.'), - "script-torrent-done-filename": ('string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'), - "seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'), - "seed-queue-enabled": ('boolean', 14, None, None, None, 'Enables upload queue.'), - "seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'), - "seedRatioLimited": ('boolean', 5, None, None, None, 'Enables seed ration limit.'), - "speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'), - "speed-limit-down-enabled": ('boolean', 1, None, None, None, 'Enables download speed limiting.'), - "speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'), - "speed-limit-up-enabled": ('boolean', 1, None, None, None, 'Enables upload speed limiting.'), - "start-added-torrents": ('boolean', 9, None, None, None, 'Added torrents will be started right away.'), - "trash-original-torrent-files": ('boolean', 9, None, None, None, 'The .torrent file of added torrents will be deleted.'), - 'utp-enabled': ('boolean', 13, None, None, None, 'Enables Micro Transport Protocol (UTP).'), + "alt-speed-down": ('number', 5, None, None, None, 'Alternate session download speed limit (in Kib/s).'), + "alt-speed-enabled": ('boolean', 5, None, None, None, 'Enables alternate global download speed limiter.'), + "alt-speed-time-begin": ( + 'number', 5, None, None, None, 'Time when alternate speeds should be enabled. Minutes after midnight.'), + "alt-speed-time-enabled": ('boolean', 5, None, None, None, 'Enables alternate speeds scheduling.'), + "alt-speed-time-end": ( + 'number', 5, None, None, None, 'Time when alternate speeds should be disabled. Minutes after midnight.'), + "alt-speed-time-day": ('number', 5, None, None, None, 'Enables alternate speeds scheduling these days.'), + "alt-speed-up": ('number', 5, None, None, None, 'Alternate session upload speed limit (in Kib/s).'), + "blocklist-enabled": ('boolean', 5, None, None, None, 'Enables the block list'), + "blocklist-url": ('string', 11, None, None, None, 'Location of the block list. Updated with blocklist-update.'), + "cache-size-mb": ('number', 10, None, None, None, 'The maximum size of the disk cache in MB'), + "dht-enabled": ('boolean', 6, None, None, None, 'Enables DHT.'), + "download-dir": ('string', 1, None, None, None, 'Set the session download directory.'), + "download-queue-size": ('number', 14, None, None, None, 'Number of slots in the download queue.'), + "download-queue-enabled": ('boolean', 14, None, None, None, 'Enables download queue.'), + "encryption": ('string', 1, None, None, None, + 'Set the session encryption mode, one of ``required``, ``preferred`` or ``tolerated``.'), + "idle-seeding-limit": ('number', 10, None, None, None, 'The default seed inactivity limit in minutes.'), + "idle-seeding-limit-enabled": ('boolean', 10, None, None, None, 'Enables the default seed inactivity limit'), + "incomplete-dir": ('string', 7, None, None, None, 'The path to the directory of incomplete transfer data.'), + "incomplete-dir-enabled": ('boolean', 7, None, None, None, + 'Enables the incomplete transfer data directory. Otherwise data for incomplete transfers are stored in the download target.'), + "lpd-enabled": ('boolean', 9, None, None, None, 'Enables local peer discovery for public torrents.'), + "peer-limit": ('number', 1, 5, None, 'peer-limit-global', 'Maximum number of peers.'), + "peer-limit-global": ('number', 5, None, 'peer-limit', None, 'Maximum number of peers.'), + "peer-limit-per-torrent": ('number', 5, None, None, None, 'Maximum number of peers per transfer.'), + "pex-allowed": ('boolean', 1, 5, None, 'pex-enabled', 'Allowing PEX in public torrents.'), + "pex-enabled": ('boolean', 5, None, 'pex-allowed', None, 'Allowing PEX in public torrents.'), + "port": ('number', 1, 5, None, 'peer-port', 'Peer port.'), + "peer-port": ('number', 5, None, 'port', None, 'Peer port.'), + "peer-port-random-on-start": ( + 'boolean', 5, None, None, None, 'Enables randomized peer port on start of Transmission.'), + "port-forwarding-enabled": ('boolean', 1, None, None, None, 'Enables port forwarding.'), + "rename-partial-files": ('boolean', 8, None, None, None, 'Appends ".part" to incomplete files'), + "queue-stalled-minutes": ( + 'number', 14, None, None, None, 'Number of minutes of idle that marks a transfer as stalled.'), + "queue-stalled-enabled": ('boolean', 14, None, None, None, 'Enable tracking of stalled transfers.'), + "script-torrent-done-enabled": ('boolean', 9, None, None, None, 'Whether or not to call the "done" script.'), + "script-torrent-done-filename": ( + 'string', 9, None, None, None, 'Filename of the script to run when the transfer is done.'), + "seed-queue-size": ('number', 14, None, None, None, 'Number of slots in the upload queue.'), + "seed-queue-enabled": ('boolean', 14, None, None, None, 'Enables upload queue.'), + "seedRatioLimit": ('double', 5, None, None, None, 'Seed ratio limit. 1.0 means 1:1 download and upload ratio.'), + "seedRatioLimited": ('boolean', 5, None, None, None, 'Enables seed ration limit.'), + "speed-limit-down": ('number', 1, None, None, None, 'Download speed limit (in Kib/s).'), + "speed-limit-down-enabled": ('boolean', 1, None, None, None, 'Enables download speed limiting.'), + "speed-limit-up": ('number', 1, None, None, None, 'Upload speed limit (in Kib/s).'), + "speed-limit-up-enabled": ('boolean', 1, None, None, None, 'Enables upload speed limiting.'), + "start-added-torrents": ('boolean', 9, None, None, None, 'Added torrents will be started right away.'), + "trash-original-torrent-files": ( + 'boolean', 9, None, None, None, 'The .torrent file of added torrents will be deleted.'), + 'utp-enabled': ('boolean', 13, None, None, None, 'Enables Micro Transport Protocol (UTP).'), }, } diff --git a/core/transmissionrpc/error.py b/core/transmissionrpc/error.py index fed65a43..6b44bf32 100644 --- a/core/transmissionrpc/error.py +++ b/core/transmissionrpc/error.py @@ -4,11 +4,13 @@ from core.transmissionrpc.six import string_types, integer_types + class TransmissionError(Exception): """ - This exception is raised when there has occurred an error related to - communication with Transmission. It is a subclass of Exception. + This exception is raised when there has occurred an error related to + communication with Transmission. It is a subclass of Exception. """ + def __init__(self, message='', original=None): Exception.__init__(self) self.message = message @@ -21,11 +23,13 @@ class TransmissionError(Exception): else: return self.message + class HTTPHandlerError(Exception): """ - This exception is raised when there has occurred an error related to - the HTTP handler. It is a subclass of Exception. + This exception is raised when there has occurred an error related to + the HTTP handler. It is a subclass of Exception. """ + def __init__(self, httpurl=None, httpcode=None, httpmsg=None, httpheaders=None, httpdata=None): Exception.__init__(self) self.url = '' diff --git a/core/transmissionrpc/httphandler.py b/core/transmissionrpc/httphandler.py index 0904206f..1e884399 100644 --- a/core/transmissionrpc/httphandler.py +++ b/core/transmissionrpc/httphandler.py @@ -18,10 +18,12 @@ else: from urllib2 import HTTPError, URLError from httplib import BadStatusLine + class HTTPHandler(object): """ Prototype for HTTP handling. """ + def set_authentication(self, uri, login, password): """ Transmission use basic authentication in earlier versions and digest @@ -44,10 +46,12 @@ class HTTPHandler(object): """ raise NotImplementedError("Bad HTTPHandler, failed to implement request.") + class DefaultHTTPHandler(HTTPHandler): """ The default HTTP handler provided with transmissionrpc. """ + def __init__(self): HTTPHandler.__init__(self) self.http_opener = build_opener() diff --git a/core/transmissionrpc/session.py b/core/transmissionrpc/session.py index bd2c4e2e..6b620373 100644 --- a/core/transmissionrpc/session.py +++ b/core/transmissionrpc/session.py @@ -6,6 +6,7 @@ from core.transmissionrpc.utils import Field from core.transmissionrpc.six import iteritems, integer_types + class Session(object): """ Session is a class holding the session data for a Transmission daemon. diff --git a/core/transmissionrpc/six.py b/core/transmissionrpc/six.py index b73b777a..0554cddc 100644 --- a/core/transmissionrpc/six.py +++ b/core/transmissionrpc/six.py @@ -28,7 +28,6 @@ import types __author__ = "Benjamin Peterson " __version__ = "1.4.1" - # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 @@ -56,6 +55,8 @@ else: class X(object): def __len__(self): return 1 << 31 + + try: len(X()) except OverflowError: @@ -79,7 +80,6 @@ def _import_module(name): class _LazyDescr(object): - def __init__(self, name): self.name = name @@ -92,7 +92,6 @@ class _LazyDescr(object): class MovedModule(_LazyDescr): - def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: @@ -107,7 +106,6 @@ class MovedModule(_LazyDescr): class MovedAttribute(_LazyDescr): - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: @@ -131,7 +129,6 @@ class MovedAttribute(_LazyDescr): return getattr(module, self.attr) - class _MovedItems(types.ModuleType): """Lazy loading of moved objects""" @@ -199,7 +196,6 @@ del attr moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves") - class Module_six_moves_urllib_parse(types.ModuleType): """Lazy loading of moved objects in six.moves.urllib_parse""" @@ -320,8 +316,10 @@ for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr -sys.modules[__name__ + ".moves.urllib_robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib_robotparser") -sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser") +sys.modules[__name__ + ".moves.urllib_robotparser"] = Module_six_moves_urllib_robotparser( + __name__ + ".moves.urllib_robotparser") +sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser( + __name__ + ".moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): @@ -379,7 +377,6 @@ else: _iteritems = "iteritems" _iterlists = "iterlists" - try: advance_iterator = next except NameError: @@ -387,18 +384,17 @@ except NameError: return it.next() next = advance_iterator - try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - if PY3: def get_unbound_function(unbound): return unbound + create_bound_method = types.MethodType Iterator = object @@ -406,19 +402,21 @@ else: def get_unbound_function(unbound): return unbound.im_func + def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) + class Iterator(object): def next(self): return type(self).__next__(self) + callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") - get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) @@ -431,14 +429,17 @@ def iterkeys(d, **kw): """Return an iterator over the keys of a dictionary.""" return iter(getattr(d, _iterkeys)(**kw)) + def itervalues(d, **kw): """Return an iterator over the values of a dictionary.""" return iter(getattr(d, _itervalues)(**kw)) + def iteritems(d, **kw): """Return an iterator over the (key, value) pairs of a dictionary.""" return iter(getattr(d, _iteritems)(**kw)) + def iterlists(d, **kw): """Return an iterator over the (key, [values]) pairs of a dictionary.""" return iter(getattr(d, _iterlists)(**kw)) @@ -447,8 +448,12 @@ def iterlists(d, **kw): if PY3: def b(s): return s.encode("latin-1") + + def u(s): return s + + unichr = chr if sys.version_info[1] <= 1: def int2byte(i): @@ -460,29 +465,43 @@ if PY3: indexbytes = operator.getitem iterbytes = iter import io + StringIO = io.StringIO BytesIO = io.BytesIO else: def b(s): return s + + def u(s): return unicode(s, "unicode_escape") + + unichr = unichr int2byte = chr + + def byte2int(bs): return ord(bs[0]) + + def indexbytes(buf, i): return ord(buf[i]) + + def iterbytes(buf): return (ord(byte) for byte in buf) + + import StringIO + StringIO = BytesIO = StringIO.StringIO _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") - if PY3: import builtins + exec_ = getattr(builtins, "exec") @@ -506,7 +525,7 @@ else: del frame elif _locs_ is None: _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") + exec ("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): @@ -519,10 +538,12 @@ else: fp = kwargs.pop("file", sys.stdout) if fp is None: return + def write(data): if not isinstance(data, basestring): data = str(data) fp.write(data) + want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: @@ -566,8 +587,10 @@ def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" return meta("NewBase", bases, {}) + def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): orig_vars = cls.__dict__.copy() orig_vars.pop('__dict__', None) @@ -575,4 +598,5 @@ def add_metaclass(metaclass): for slots_var in orig_vars.get('__slots__', ()): orig_vars.pop(slots_var) return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper diff --git a/core/transmissionrpc/torrent.py b/core/transmissionrpc/torrent.py index eaf7a52d..a0813464 100644 --- a/core/transmissionrpc/torrent.py +++ b/core/transmissionrpc/torrent.py @@ -13,14 +13,15 @@ from six import integer_types, string_types, text_type, iteritems def get_status_old(code): """Get the torrent status using old status codes""" mapping = { - (1<<0): 'check pending', - (1<<1): 'checking', - (1<<2): 'downloading', - (1<<3): 'seeding', - (1<<4): 'stopped', + (1 << 0): 'check pending', + (1 << 1): 'checking', + (1 << 2): 'downloading', + (1 << 3): 'seeding', + (1 << 4): 'stopped', } return mapping[code] + def get_status_new(code): """Get the torrent status using new status codes""" mapping = { @@ -34,6 +35,7 @@ def get_status_new(code): } return mapping[code] + class Torrent(object): """ Torrent is a class holding the data received from Transmission regarding a bittorrent transfer. @@ -99,8 +101,9 @@ class Torrent(object): def _dirty_fields(self): """Enumerate changed fields""" - outgoing_keys = ['bandwidthPriority', 'downloadLimit', 'downloadLimited', 'peer_limit', 'queuePosition' - , 'seedIdleLimit', 'seedIdleMode', 'seedRatioLimit', 'seedRatioMode', 'uploadLimit', 'uploadLimited'] + outgoing_keys = ['bandwidthPriority', 'downloadLimit', 'downloadLimited', 'peer_limit', 'queuePosition', + 'seedIdleLimit', 'seedIdleMode', 'seedRatioLimit', 'seedRatioMode', 'uploadLimit', + 'uploadLimited'] fields = [] for key in outgoing_keys: if key in self._fields and self._fields[key].dirty: @@ -131,7 +134,7 @@ class Torrent(object): else: raise ValueError('Cannot update with supplied data') self._incoming_pending = False - + def _status(self): """Get the torrent status""" code = self._fields['status'].value @@ -270,7 +273,8 @@ class Torrent(object): else: raise ValueError("Not a valid limit") - download_limit = property(_get_download_limit, _set_download_limit, None, "Download limit in Kbps or None. This is a mutator.") + download_limit = property(_get_download_limit, _set_download_limit, None, + "Download limit in Kbps or None. This is a mutator.") def _get_peer_limit(self): """ @@ -307,7 +311,7 @@ class Torrent(object): self._push() priority = property(_get_priority, _set_priority, None - , "Bandwidth priority as string. Can be one of 'low', 'normal', 'high'. This is a mutator.") + , "Bandwidth priority as string. Can be one of 'low', 'normal', 'high'. This is a mutator.") def _get_seed_idle_limit(self): """ @@ -326,7 +330,7 @@ class Torrent(object): raise ValueError("Not a valid limit") seed_idle_limit = property(_get_seed_idle_limit, _set_seed_idle_limit, None - , "Torrent seed idle limit in minutes. Also see seed_idle_mode. This is a mutator.") + , "Torrent seed idle limit in minutes. Also see seed_idle_mode. This is a mutator.") def _get_seed_idle_mode(self): """ @@ -345,7 +349,7 @@ class Torrent(object): raise ValueError("Not a valid limit") seed_idle_mode = property(_get_seed_idle_mode, _set_seed_idle_mode, None, - """ + """ Seed idle mode as string. Can be one of 'global', 'single' or 'unlimited'. * global, use session seed idle limit. @@ -354,7 +358,7 @@ class Torrent(object): This is a mutator. """ - ) + ) def _get_seed_ratio_limit(self): """ @@ -373,7 +377,7 @@ class Torrent(object): raise ValueError("Not a valid limit") seed_ratio_limit = property(_get_seed_ratio_limit, _set_seed_ratio_limit, None - , "Torrent seed ratio limit as float. Also see seed_ratio_mode. This is a mutator.") + , "Torrent seed ratio limit as float. Also see seed_ratio_mode. This is a mutator.") def _get_seed_ratio_mode(self): """ @@ -392,7 +396,7 @@ class Torrent(object): raise ValueError("Not a valid limit") seed_ratio_mode = property(_get_seed_ratio_mode, _set_seed_ratio_mode, None, - """ + """ Seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'. * global, use session seed ratio limit. @@ -401,7 +405,7 @@ class Torrent(object): This is a mutator. """ - ) + ) def _get_upload_limit(self): """ @@ -428,7 +432,8 @@ class Torrent(object): else: raise ValueError("Not a valid limit") - upload_limit = property(_get_upload_limit, _set_upload_limit, None, "Upload limit in Kbps or None. This is a mutator.") + upload_limit = property(_get_upload_limit, _set_upload_limit, None, + "Upload limit in Kbps or None. This is a mutator.") def _get_queue_position(self): """Get the queue position for this torrent.""" diff --git a/core/transmissionrpc/utils.py b/core/transmissionrpc/utils.py index d67a3d06..223921d6 100644 --- a/core/transmissionrpc/utils.py +++ b/core/transmissionrpc/utils.py @@ -10,6 +10,7 @@ from six import string_types, iteritems UNITS = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB'] + def format_size(size): """ Format byte size into IEC prefixes, B, KiB, MiB ... @@ -21,6 +22,7 @@ def format_size(size): size /= 1024.0 return (size, UNITS[i]) + def format_speed(size): """ Format bytes per second speed into IEC prefixes, B/s, KiB/s, MiB/s ... @@ -28,6 +30,7 @@ def format_speed(size): (size, unit) = format_size(size) return (size, unit + '/s') + def format_timedelta(delta): """ Format datetime.timedelta into ::. @@ -36,6 +39,7 @@ def format_timedelta(delta): hours, minutes = divmod(minutes, 60) return '%d %02d:%02d:%02d' % (delta.days, hours, minutes, seconds) + def format_timestamp(timestamp, utc=False): """ Format unix timestamp into ISO date format. @@ -49,12 +53,14 @@ def format_timestamp(timestamp, utc=False): else: return '-' + class INetAddressError(Exception): """ Error parsing / generating a internet address. """ pass + def inet_address(address, default_port, default_address='localhost'): """ Parse internet address. @@ -84,6 +90,7 @@ def inet_address(address, default_port, default_address='localhost'): raise INetAddressError('Cannot look up address "%s".' % address) return (addr, port) + def rpc_bool(arg): """ Convert between Python boolean and Transmission RPC boolean. @@ -95,27 +102,31 @@ def rpc_bool(arg): arg = arg.lower() in ['true', 'yes'] return 1 if bool(arg) else 0 + TR_TYPE_MAP = { - 'number' : int, - 'string' : str, + 'number': int, + 'string': str, 'double': float, - 'boolean' : rpc_bool, + 'boolean': rpc_bool, 'array': list, 'object': dict } + def make_python_name(name): """ Convert Transmission RPC name to python compatible name. """ return name.replace('-', '_') + def make_rpc_name(name): """ Convert python compatible name to Transmission RPC name. """ return name.replace('_', '-') + def argument_value_convert(method, argument, value, rpc_version): """ Check and fix Transmission RPC issues with regards to methods, arguments and values. @@ -154,6 +165,7 @@ def argument_value_convert(method, argument, value, rpc_version): raise ValueError('Argument "%s" does not exists for method "%s".', (argument, method)) + def get_arguments(method, rpc_version): """ Get arguments for method in specified Transmission RPC version. @@ -175,6 +187,7 @@ def get_arguments(method, rpc_version): accessible.append(argument) return accessible + def add_stdout_logger(level='debug'): """ Add a stdout target for the transmissionrpc logging. @@ -189,6 +202,7 @@ def add_stdout_logger(level='debug'): loghandler.setLevel(loglevel) trpc_logger.addHandler(loghandler) + def add_file_logger(filepath, level='debug'): """ Add a stdout target for the transmissionrpc logging. @@ -203,4 +217,5 @@ def add_file_logger(filepath, level='debug'): loghandler.setLevel(loglevel) trpc_logger.addHandler(loghandler) + Field = namedtuple('Field', ['value', 'dirty']) diff --git a/core/utorrent/__init__.py b/core/utorrent/__init__.py index bf893c06..9bad5790 100644 --- a/core/utorrent/__init__.py +++ b/core/utorrent/__init__.py @@ -1 +1 @@ -# coding=utf-8 \ No newline at end of file +# coding=utf-8 diff --git a/core/utorrent/client.py b/core/utorrent/client.py index 2f8e385e..ae5d4634 100644 --- a/core/utorrent/client.py +++ b/core/utorrent/client.py @@ -1,29 +1,31 @@ -#coding=utf8 +# coding=utf8 + import urllib import urllib2 import urlparse import cookielib import re import StringIO + try: - import json + import json except ImportError: import simplejson as json from core.utorrent.upload import MultiPartForm -class UTorrentClient(object): +class UTorrentClient(object): def __init__(self, base_url, username, password): self.base_url = base_url self.username = username self.password = password self.opener = self._make_opener('uTorrent', base_url, username, password) self.token = self._get_token() - #TODO refresh token, when necessary + # TODO refresh token, when necessary def _make_opener(self, realm, base_url, username, password): - '''uTorrent API need HTTP Basic Auth and cookie support for token verify.''' + """uTorrent API need HTTP Basic Auth and cookie support for token verify.""" auth_handler = urllib2.HTTPBasicAuthHandler() auth_handler.add_password(realm=realm, @@ -31,7 +33,7 @@ class UTorrentClient(object): user=username, passwd=password) opener = urllib2.build_opener(auth_handler) - urllib2.install_opener(opener) + urllib2.install_opener(opener) cookie_jar = cookielib.CookieJar() cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar) @@ -47,69 +49,68 @@ class UTorrentClient(object): match = re.search(token_re, response.read()) return match.group(1) - def list(self, **kwargs): params = [('list', '1')] params += kwargs.items() return self._action(params) def start(self, *hashes): - params = [('action', 'start'),] + params = [('action', 'start'), ] for hash in hashes: params.append(('hash', hash)) return self._action(params) - + def stop(self, *hashes): - params = [('action', 'stop'),] + params = [('action', 'stop'), ] for hash in hashes: params.append(('hash', hash)) return self._action(params) - + def pause(self, *hashes): - params = [('action', 'pause'),] + params = [('action', 'pause'), ] for hash in hashes: params.append(('hash', hash)) return self._action(params) - + def forcestart(self, *hashes): - params = [('action', 'forcestart'),] + params = [('action', 'forcestart'), ] for hash in hashes: params.append(('hash', hash)) return self._action(params) - + def remove(self, *hashes): - params = [('action', 'remove'),] + params = [('action', 'remove'), ] for hash in hashes: params.append(('hash', hash)) return self._action(params) - + def removedata(self, *hashes): - params = [('action', 'removedata'),] + params = [('action', 'removedata'), ] for hash in hashes: params.append(('hash', hash)) return self._action(params) - + def recheck(self, *hashes): - params = [('action', 'recheck'),] + params = [('action', 'recheck'), ] for hash in hashes: params.append(('hash', hash)) return self._action(params) - + def getfiles(self, hash): params = [('action', 'getfiles'), ('hash', hash)] return self._action(params) - + def getprops(self, hash): params = [('action', 'getprops'), ('hash', hash)] return self._action(params) - + def setprio(self, hash, priority, *files): params = [('action', 'setprio'), ('hash', hash), ('p', str(priority))] for file_index in files: params.append(('f', str(file_index))) return self._action(params) - + def addfile(self, filename, filepath=None, bytes=None): params = [('action', 'add-file')] @@ -118,13 +119,13 @@ class UTorrentClient(object): file_handler = open(filepath) else: file_handler = StringIO.StringIO(bytes) - + form.add_file('torrent_file', filename.encode('utf-8'), file_handler) return self._action(params, str(form), form.get_content_type()) def _action(self, params, body=None, content_type=None): - #about token, see https://github.com/bittorrent/webui/wiki/TokenSystem + # about token, see https://github.com/bittorrent/webui/wiki/TokenSystem url = self.base_url + '?token=' + self.token + '&' + urllib.urlencode(params) request = urllib2.Request(url) @@ -137,6 +138,5 @@ class UTorrentClient(object): try: response = self.opener.open(request) return response.code, json.loads(response.read()) - except urllib2.HTTPError,e: - raise - + except urllib2.HTTPError, e: + raise diff --git a/core/utorrent/upload.py b/core/utorrent/upload.py index 8a72306a..de149efc 100644 --- a/core/utorrent/upload.py +++ b/core/utorrent/upload.py @@ -1,5 +1,5 @@ # coding=utf-8 -#code copied from http://www.doughellmann.com/PyMOTW/urllib2/ +# code copied from http://www.doughellmann.com/PyMOTW/urllib2/ import itertools import mimetools @@ -14,7 +14,7 @@ class MultiPartForm(object): self.files = [] self.boundary = mimetools.choose_boundary() return - + def get_content_type(self): return 'multipart/form-data; boundary=%s' % self.boundary @@ -30,7 +30,7 @@ class MultiPartForm(object): mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream' self.files.append((fieldname, filename, mimetype, body)) return - + def __str__(self): """Return a string representing the form data, including attached files.""" # Build a list of lists, each containing "lines" of the @@ -39,29 +39,28 @@ class MultiPartForm(object): # line is separated by '\r\n'. parts = [] part_boundary = '--' + self.boundary - + # Add the form fields parts.extend( - [ part_boundary, - 'Content-Disposition: form-data; name="%s"' % name, - '', - value, - ] + [part_boundary, + 'Content-Disposition: form-data; name="%s"' % name, + '', + value, + ] for name, value in self.form_fields - ) - + ) + # Add the files to upload parts.extend( - [ part_boundary, - 'Content-Disposition: file; name="%s"; filename="%s"' % \ - (field_name, filename), - 'Content-Type: %s' % content_type, - '', - body, - ] + [part_boundary, + 'Content-Disposition: file; name="%s"; filename="%s"' % (field_name, filename), + 'Content-Type: %s' % content_type, + '', + body, + ] for field_name, filename, content_type, body in self.files - ) - + ) + # Flatten the list and add closing boundary marker, # then return CR+LF separated data flattened = list(itertools.chain(*parts)) diff --git a/core/versionCheck.py b/core/versionCheck.py index 773a7f25..b71f903c 100644 --- a/core/versionCheck.py +++ b/core/versionCheck.py @@ -16,6 +16,7 @@ import gh_api as github import core from core import logger + class CheckVersion(): """ Version check class meant to run as a thread object with the SB scheduler. @@ -80,6 +81,7 @@ class CheckVersion(): if self.updater.need_update(): return self.updater.update() + class UpdateManager(): def get_github_repo_user(self): return core.GIT_USER @@ -90,6 +92,7 @@ class UpdateManager(): def get_github_branch(self): return core.GIT_BRANCH + class GitUpdateManager(UpdateManager): def __init__(self): self._git_path = self._find_working_git() @@ -103,7 +106,8 @@ class GitUpdateManager(UpdateManager): self._num_commits_ahead = 0 def _git_error(self): - logger.debug('Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.') + logger.debug( + 'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.') def _find_working_git(self): test_cmd = 'version' @@ -148,7 +152,8 @@ class GitUpdateManager(UpdateManager): logger.log(u"Not using: " + cur_git, logger.DEBUG) # Still haven't found a working git - logger.debug('Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.') + logger.debug( + 'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.') return None @@ -279,9 +284,10 @@ class GitUpdateManager(UpdateManager): logger.log(u"git didn't return numbers for behind and ahead, not using it", logger.DEBUG) return - logger.log(u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) - + u", num_commits_behind = " + str(self._num_commits_behind) + u", num_commits_ahead = " + str( - self._num_commits_ahead), logger.DEBUG) + logger.log( + u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) + + u", num_commits_behind = " + str(self._num_commits_behind) + u", num_commits_ahead = " + + str(self._num_commits_ahead), logger.DEBUG) def set_newest_text(self): if self._num_commits_ahead: @@ -411,8 +417,9 @@ class SourceUpdateManager(UpdateManager): # when _cur_commit_hash doesn't match anything _num_commits_behind == 100 self._num_commits_behind += 1 - logger.log(u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) - + u", num_commits_behind = " + str(self._num_commits_behind), logger.DEBUG) + logger.log( + u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) + + u", num_commits_behind = " + str(self._num_commits_behind), logger.DEBUG) def set_newest_text(self): @@ -489,9 +496,9 @@ class SourceUpdateManager(UpdateManager): old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(core.PROGRAM_DIR, dirname, curfile) - #Avoid DLL access problem on WIN32/64 - #These files needing to be updated manually - #or find a way to kill the access from memory + # Avoid DLL access problem on WIN32/64 + # These files needing to be updated manually + # or find a way to kill the access from memory if curfile in ('unrar.dll', 'unrar64.dll'): try: os.chmod(new_path, stat.S_IWRITE) @@ -519,4 +526,4 @@ class SourceUpdateManager(UpdateManager): logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG) return False - return True \ No newline at end of file + return True From 92ae85251331db432a1dafdc0bd45b5cb26de1e1 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 22:16:52 -0400 Subject: [PATCH 03/21] PEP8: comparison to `None`, `True`, or `False` should use `is`/`is not` --- core/nzbToMediaDB.py | 12 ++++++------ core/nzbToMediaUtil.py | 2 +- core/synchronousdeluge/rencode.py | 2 +- core/transmissionrpc/client.py | 2 +- core/transmissionrpc/torrent.py | 4 ++-- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index a34b8662..e2ff20f6 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -46,7 +46,7 @@ class DBConnection: return 0 def fetch(self, query, args=None): - if query == None: + if query is None: return sqlResult = None @@ -54,7 +54,7 @@ class DBConnection: while attempt < 5: try: - if args == None: + if args is None: logger.log(self.filename + ": " + query, logger.DB) cursor = self.connection.cursor() cursor.execute(query) @@ -82,7 +82,7 @@ class DBConnection: return sqlResult def mass_action(self, querylist, logTransaction=False): - if querylist == None: + if querylist is None: return sqlResult = [] @@ -123,7 +123,7 @@ class DBConnection: return sqlResult def action(self, query, args=None): - if query == None: + if query is None: return sqlResult = None @@ -131,7 +131,7 @@ class DBConnection: while attempt < 5: try: - if args == None: + if args is None: logger.log(self.filename + ": " + query, logger.DB) sqlResult = self.connection.execute(query) else: @@ -158,7 +158,7 @@ class DBConnection: sqlResults = self.action(query, args).fetchall() - if sqlResults == None: + if sqlResults is None: return [] return sqlResults diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 3899766d..c359ecda 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -1110,7 +1110,7 @@ def extractFiles(src, dst=None, keep_archive=None): fullFileName = os.path.basename(inputFile) archiveName = os.path.splitext(fullFileName)[0] archiveName = re.sub(r"part[0-9]+", "", archiveName) - if not archiveName in extracted_archive or keep_archive == True: + if not archiveName in extracted_archive or keep_archive is True: continue # don't remove if we haven't extracted this archive, or if we want to preserve them. logger.info("Removing extracted archive %s from folder %s ..." % (fullFileName, folder)) try: diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index 655f903b..62e22b08 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -461,7 +461,7 @@ def test(): assert loads(dumps(L)) == L L = tuple(['a' * n for n in range(1000)]) + (None, True, None) assert loads(dumps(L)) == L - assert loads(dumps(None)) == None + assert loads(dumps(None)) is None assert loads(dumps({None: None})) == {None: None} assert 1e-10 < abs(loads(dumps(1.1)) - 1.1) < 1e-6 assert 1e-10 < abs(loads(dumps(1.1, 32)) - 1.1) < 1e-6 diff --git a/core/transmissionrpc/client.py b/core/transmissionrpc/client.py index 461be0ad..c451e685 100644 --- a/core/transmissionrpc/client.py +++ b/core/transmissionrpc/client.py @@ -111,7 +111,7 @@ def parse_torrent_ids(args): ids.extend(parse_torrent_ids(item)) else: torrent_id = parse_torrent_id(args) - if torrent_id == None: + if torrent_id is None: raise ValueError('Invalid torrent id') else: ids = [torrent_id] diff --git a/core/transmissionrpc/torrent.py b/core/transmissionrpc/torrent.py index a0813464..5fd033db 100644 --- a/core/transmissionrpc/torrent.py +++ b/core/transmissionrpc/torrent.py @@ -267,7 +267,7 @@ class Torrent(object): self._fields['downloadLimited'] = Field(True, True) self._fields['downloadLimit'] = Field(limit, True) self._push() - elif limit == None: + elif limit is None: self._fields['downloadLimited'] = Field(False, True) self._push() else: @@ -426,7 +426,7 @@ class Torrent(object): self._fields['uploadLimited'] = Field(True, True) self._fields['uploadLimit'] = Field(limit, True) self._push() - elif limit == None: + elif limit is None: self._fields['uploadLimited'] = Field(False, True) self._push() else: From 1fd904eb5bb738a237c5552af5dba83c006f6c01 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 22:20:45 -0400 Subject: [PATCH 04/21] PEP8: Tests for membership should use `in`/`not in` * .has_key() is deprecated, use `in` --- core/__init__.py | 12 ++++----- core/logger.py | 2 +- core/nzbToMediaConfig.py | 46 +++++++++++++++++------------------ core/nzbToMediaUtil.py | 8 +++--- core/transcoder/transcoder.py | 6 ++--- 5 files changed, 37 insertions(+), 37 deletions(-) diff --git a/core/__init__.py b/core/__init__.py index 21864078..12c413f1 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -227,7 +227,7 @@ def initialize(section=None): if __INITIALIZED__: return False - if os.environ.has_key('NTM_LOGFILE'): + if 'NTM_LOGFILE' in os.environ: LOG_FILE = os.environ['NTM_LOGFILE'] LOG_DIR = os.path.split(LOG_FILE)[0] @@ -259,7 +259,7 @@ def initialize(section=None): except: print 'Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable' print 'or find another way to force Python to use ' + SYS_ENCODING + ' for string encoding.' - if os.environ.has_key('NZBOP_SCRIPTDIR'): + if 'NZBOP_SCRIPTDIR' in os.environ: sys.exit(NZBGET_POSTPROCESS_ERROR) else: sys.exit(1) @@ -270,13 +270,13 @@ def initialize(section=None): # run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options. if not config.migrate(): logger.error("Unable to migrate config file %s, exiting ..." % (CONFIG_FILE)) - if os.environ.has_key('NZBOP_SCRIPTDIR'): + if 'NZBOP_SCRIPTDIR' in os.environ: pass # We will try and read config from Environment. else: sys.exit(-1) # run migrate to convert NzbGet data from old cfg style to new cfg style - if os.environ.has_key('NZBOP_SCRIPTDIR'): + if 'NZBOP_SCRIPTDIR' in os.environ: CFG = config.addnzbget() else: # load newly migrated config @@ -441,9 +441,9 @@ def initialize(section=None): GENERALOPTS = GENERALOPTS.split(',') if GENERALOPTS == ['']: GENERALOPTS = [] - if not '-fflags' in GENERALOPTS: + if '-fflags' not in GENERALOPTS: GENERALOPTS.append('-fflags') - if not '+genpts' in GENERALOPTS: + if '+genpts' not in GENERALOPTS: GENERALOPTS.append('+genpts') try: OUTPUTQUALITYPERCENT = int(CFG["Transcoder"]["outputQualityPercent"]) diff --git a/core/logger.py b/core/logger.py index 324248d4..b3800a98 100644 --- a/core/logger.py +++ b/core/logger.py @@ -228,7 +228,7 @@ class NTMRotatingLogHandler(object): def log_error_and_exit(self, error_msg): log(error_msg, ERROR) - if os.environ.has_key('NZBOP_SCRIPTDIR'): + if 'NZBOP_SCRIPTDIR' in os.environ: sys.exit(core.NZBGET_POSTPROCESS_ERROR) elif not self.console_logging: sys.exit(error_msg.encode(core.SYS_ENCODING, 'xmlcharrefreplace')) diff --git a/core/nzbToMediaConfig.py b/core/nzbToMediaConfig.py index e0bb8172..75d8ad19 100644 --- a/core/nzbToMediaConfig.py +++ b/core/nzbToMediaConfig.py @@ -186,7 +186,7 @@ class ConfigObj(configobj.ConfigObj, Section): CFG_NEW['Posix'][option] = value values.pop(option) if option == "remote_path": - if value and not value in ['0', '1', 0, 1]: + if value and value not in ['0', '1', 0, 1]: value = 1 elif not value: value = 0 @@ -251,14 +251,14 @@ class ConfigObj(configobj.ConfigObj, Section): CFG_NEW = config() try: - if os.environ.has_key('NZBPO_NDCATEGORY') and os.environ.has_key('NZBPO_SBCATEGORY'): + if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ: if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']: logger.warning("%s category is set for SickBeard and NzbDrone. " "Please check your config in NZBGet" % (os.environ['NZBPO_NDCATEGORY'])) section = "Nzb" key = 'NZBOP_DESTDIR' - if os.environ.has_key(key): + if key in os.environ: option = 'default_downloadDirectory' value = os.environ[key] CFG_NEW[section][option] = value @@ -268,7 +268,7 @@ class ConfigObj(configobj.ConfigObj, Section): cfgKeys = ['auto_update', 'check_media', 'safe_mode'] for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] CFG_NEW[section][option] = value @@ -278,7 +278,7 @@ class ConfigObj(configobj.ConfigObj, Section): cfgKeys = ['mount_points'] for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] CFG_NEW[section][option] = value @@ -289,10 +289,10 @@ class ConfigObj(configobj.ConfigObj, Section): 'WAIT_FOR', 'WATCH_DIR'] cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path', 'wait_for', 'watch_dir'] - if os.environ.has_key(envCatKey): + if envCatKey in os.environ: for index in range(len(envKeys)): key = 'NZBPO_CPS' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] if os.environ[envCatKey] not in CFG_NEW[section].sections: @@ -306,10 +306,10 @@ class ConfigObj(configobj.ConfigObj, Section): 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD'] cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method'] - if os.environ.has_key(envCatKey): + if envCatKey in os.environ: for index in range(len(envKeys)): key = 'NZBPO_SB' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] if os.environ[envCatKey] not in CFG_NEW[section].sections: @@ -323,10 +323,10 @@ class ConfigObj(configobj.ConfigObj, Section): envCatKey = 'NZBPO_HPCATEGORY' envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH'] cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path'] - if os.environ.has_key(envCatKey): + if envCatKey in os.environ: for index in range(len(envKeys)): key = 'NZBPO_HP' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] if os.environ[envCatKey] not in CFG_NEW[section].sections: @@ -340,10 +340,10 @@ class ConfigObj(configobj.ConfigObj, Section): 'REMOTE_PATH'] cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir', 'remote_path'] - if os.environ.has_key(envCatKey): + if envCatKey in os.environ: for index in range(len(envKeys)): key = 'NZBPO_MY' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] if os.environ[envCatKey] not in CFG_NEW[section].sections: @@ -355,10 +355,10 @@ class ConfigObj(configobj.ConfigObj, Section): envCatKey = 'NZBPO_GZCATEGORY' envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH'] cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path'] - if os.environ.has_key(envCatKey): + if envCatKey in os.environ: for index in range(len(envKeys)): key = 'NZBPO_GZ' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] if os.environ[envCatKey] not in CFG_NEW[section].sections: @@ -372,10 +372,10 @@ class ConfigObj(configobj.ConfigObj, Section): 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH'] cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path'] - if os.environ.has_key(envCatKey): + if envCatKey in os.environ: for index in range(len(envKeys)): key = 'NZBPO_ND' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] if os.environ[envCatKey] not in CFG_NEW[section].sections: @@ -390,7 +390,7 @@ class ConfigObj(configobj.ConfigObj, Section): cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions'] for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] CFG_NEW[section][option] = value @@ -400,7 +400,7 @@ class ConfigObj(configobj.ConfigObj, Section): cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata'] for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] CFG_NEW[section][option] = value @@ -428,7 +428,7 @@ class ConfigObj(configobj.ConfigObj, Section): 'outputAudioOtherChannels'] for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] CFG_NEW[section][option] = value @@ -438,7 +438,7 @@ class ConfigObj(configobj.ConfigObj, Section): cfgKeys = ['wake', 'host', 'port', 'mac'] for index in range(len(envKeys)): key = 'NZBPO_WOL' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] CFG_NEW[section][option] = value @@ -449,10 +449,10 @@ class ConfigObj(configobj.ConfigObj, Section): 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH'] cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce', 'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path'] - if os.environ.has_key(envCatKey): + if envCatKey in os.environ: for index in range(len(envKeys)): key = 'NZBPO_' + envKeys[index] - if os.environ.has_key(key): + if key in os.environ: option = cfgKeys[index] value = os.environ[key] if os.environ[envCatKey] not in CFG_NEW[section].sections: diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index c359ecda..9e5eeaa5 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -157,7 +157,7 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): tordir = True imdbid = [item for item in pathlist if '.cp(tt' in item] # This looks for the .cp(tt imdb id in the path. - if imdbid and not '.cp(tt' in inputName: + if imdbid and '.cp(tt' not in inputName: inputName = imdbid[0] # This ensures the imdb id is preserved and passed to CP tordir = True @@ -454,7 +454,7 @@ def convert_to_ascii(inputName, dirName): dirName = os.path.join(dir, base2) logger.info("Renaming directory to: %s." % (base2), 'ENCODER') os.rename(os.path.join(dir, base), dirName) - if os.environ.has_key('NZBOP_SCRIPTDIR'): + if 'NZBOP_SCRIPTDIR' in os.environ: print "[NZB] DIRECTORY=%s" % (dirName) # Return the new directory to NZBGet. for dirname, dirnames, filenames in os.walk(dirName, topdown=False): @@ -1038,7 +1038,7 @@ def find_imdbid(dirName, inputName): imdbid = m.group(1) logger.info("Found imdbID [%s] via file name" % imdbid) return imdbid - if os.environ.has_key('NZBPR__DNZB_MOREINFO'): + if 'NZBPR__DNZB_MOREINFO' in os.environ: dnzb_more_info = os.environ.get('NZBPR__DNZB_MOREINFO', '') if dnzb_more_info != '': regex = re.compile(r'^http://www.imdb.com/title/(tt[0-9]+)/$', re.IGNORECASE) @@ -1110,7 +1110,7 @@ def extractFiles(src, dst=None, keep_archive=None): fullFileName = os.path.basename(inputFile) archiveName = os.path.splitext(fullFileName)[0] archiveName = re.sub(r"part[0-9]+", "", archiveName) - if not archiveName in extracted_archive or keep_archive is True: + if archiveName not in extracted_archive or keep_archive is True: continue # don't remove if we haven't extracted this archive, or if we want to preserve them. logger.info("Removing extracted archive %s from folder %s ..." % (fullFileName, folder)) try: diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index 1eed54b6..d31ef25e 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -594,11 +594,11 @@ def processList(List, newDir, bitbucket): for item in List: newfile = None ext = os.path.splitext(item)[1].lower() - if ext in ['.iso', '.bin', '.img'] and not ext in core.IGNOREEXTENSIONS: + if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS: logger.debug("Attempting to rip disk image: %s" % (item), "TRANSCODER") newList.extend(ripISO(item, newDir, bitbucket)) remList.append(item) - elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and not '.vob' in core.IGNOREEXTENSIONS: + elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS: logger.debug("Found VIDEO_TS image file: %s" % (item), "TRANSCODER") if not vtsPath: try: @@ -618,7 +618,7 @@ def processList(List, newDir, bitbucket): if combine: newList.extend(combineCD(combine)) for file in newList: - if isinstance(file, str) and not 'concat:' in file and not os.path.isfile(file): + if isinstance(file, str) and 'concat:' not in file and not os.path.isfile(file): success = False break if success and newList: From 3acaf29f1e2024e6e9d23c3f6ee22d9651dc756e Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 00:36:12 -0400 Subject: [PATCH 05/21] Update six to 1.10.0 --- core/transmissionrpc/six.py | 468 ++++++++++++++++++++++++++++-------- libs/six.py | 372 ++++++++++++++++++++++------ 2 files changed, 671 insertions(+), 169 deletions(-) diff --git a/core/transmissionrpc/six.py b/core/transmissionrpc/six.py index 0554cddc..190c0239 100644 --- a/core/transmissionrpc/six.py +++ b/core/transmissionrpc/six.py @@ -1,7 +1,6 @@ -# coding=utf-8 """Utilities for writing code that runs on Python 2 and 3""" -# Copyright (c) 2010-2013 Benjamin Peterson +# Copyright (c) 2010-2015 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -21,16 +20,22 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import absolute_import + +import functools +import itertools import operator import sys import types __author__ = "Benjamin Peterson " -__version__ = "1.4.1" +__version__ = "1.10.0" + # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, @@ -53,10 +58,9 @@ else: else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): + def __len__(self): return 1 << 31 - - try: len(X()) except OverflowError: @@ -80,18 +84,24 @@ def _import_module(name): class _LazyDescr(object): + def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() - setattr(obj, self.name, result) - # This is a bit ugly, but it avoids running this again. - delattr(tp, self.name) + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass return result class MovedModule(_LazyDescr): + def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: @@ -104,8 +114,30 @@ class MovedModule(_LazyDescr): def _resolve(self): return _import_module(self.mod) + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + class MovedAttribute(_LazyDescr): + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: @@ -129,8 +161,75 @@ class MovedAttribute(_LazyDescr): return getattr(module, self.attr) -class _MovedItems(types.ModuleType): +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + """Lazy loading of moved objects""" + __path__ = [] # mark as package _moved_attributes = [ @@ -138,25 +237,33 @@ _moved_attributes = [ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("config", "config"), + MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), @@ -166,12 +273,14 @@ _moved_attributes = [ MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", @@ -187,21 +296,35 @@ _moved_attributes = [ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("winreg", "_winreg"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) del attr -moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves") +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") -class Module_six_moves_urllib_parse(types.ModuleType): +class Module_six_moves_urllib_parse(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), @@ -215,16 +338,27 @@ _urllib_parse_moved_attributes = [ MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr -sys.modules[__name__ + ".moves.urllib_parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse") -sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib.parse") +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") -class Module_six_moves_urllib_error(types.ModuleType): +class Module_six_moves_urllib_error(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_error""" @@ -237,11 +371,14 @@ for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr -sys.modules[__name__ + ".moves.urllib_error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib_error") -sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error") +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") -class Module_six_moves_urllib_request(types.ModuleType): +class Module_six_moves_urllib_request(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_request""" @@ -278,16 +415,20 @@ _urllib_request_moved_attributes = [ MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr -sys.modules[__name__ + ".moves.urllib_request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib_request") -sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request") +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") -class Module_six_moves_urllib_response(types.ModuleType): +class Module_six_moves_urllib_response(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_response""" @@ -301,11 +442,14 @@ for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr -sys.modules[__name__ + ".moves.urllib_response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib_response") -sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response") +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") -class Module_six_moves_urllib_robotparser(types.ModuleType): +class Module_six_moves_urllib_robotparser(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_robotparser""" @@ -316,22 +460,27 @@ for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr -sys.modules[__name__ + ".moves.urllib_robotparser"] = Module_six_moves_urllib_robotparser( - __name__ + ".moves.urllib_robotparser") -sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser( - __name__ + ".moves.urllib.robotparser") +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - parse = sys.modules[__name__ + ".moves.urllib_parse"] - error = sys.modules[__name__ + ".moves.urllib_error"] - request = sys.modules[__name__ + ".moves.urllib_request"] - response = sys.modules[__name__ + ".moves.urllib_response"] - robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"] + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] -sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib") +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") def add_move(move): @@ -358,11 +507,6 @@ if PY3: _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" - - _iterkeys = "keys" - _itervalues = "values" - _iteritems = "items" - _iterlists = "lists" else: _meth_func = "im_func" _meth_self = "im_self" @@ -372,10 +516,6 @@ else: _func_defaults = "func_defaults" _func_globals = "func_globals" - _iterkeys = "iterkeys" - _itervalues = "itervalues" - _iteritems = "iteritems" - _iterlists = "iterlists" try: advance_iterator = next @@ -384,39 +524,44 @@ except NameError: return it.next() next = advance_iterator + try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + if PY3: def get_unbound_function(unbound): return unbound - create_bound_method = types.MethodType + def create_unbound_method(func, cls): + return func + Iterator = object else: def get_unbound_function(unbound): return unbound.im_func - def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) - callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") + get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) @@ -425,95 +570,121 @@ get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) -def iterkeys(d, **kw): - """Return an iterator over the keys of a dictionary.""" - return iter(getattr(d, _iterkeys)(**kw)) +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + def itervalues(d, **kw): + return iter(d.values(**kw)) -def itervalues(d, **kw): - """Return an iterator over the values of a dictionary.""" - return iter(getattr(d, _itervalues)(**kw)) + def iteritems(d, **kw): + return iter(d.items(**kw)) + def iterlists(d, **kw): + return iter(d.lists(**kw)) -def iteritems(d, **kw): - """Return an iterator over the (key, value) pairs of a dictionary.""" - return iter(getattr(d, _iteritems)(**kw)) + viewkeys = operator.methodcaller("keys") + viewvalues = operator.methodcaller("values") -def iterlists(d, **kw): - """Return an iterator over the (key, [values]) pairs of a dictionary.""" - return iter(getattr(d, _iterlists)(**kw)) + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") - def u(s): return s - - unichr = chr - if sys.version_info[1] <= 1: - def int2byte(i): - return bytes((i,)) - else: - # This is about 2x faster than the implementation above on 3.2+ - int2byte = operator.methodcaller("to_bytes", 1, "big") + import struct + int2byte = struct.Struct(">B").pack + del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io - StringIO = io.StringIO BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" else: def b(s): return s - + # Workaround for standalone backslash def u(s): - return unicode(s, "unicode_escape") - - + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr - def byte2int(bs): return ord(bs[0]) - def indexbytes(buf, i): return ord(buf[i]) - - - def iterbytes(buf): - return (ord(byte) for byte in buf) - - + iterbytes = functools.partial(itertools.imap, ord) import StringIO - StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + if PY3: - import builtins - - exec_ = getattr(builtins, "exec") - + exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): + if value is None: + value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value - - print_ = getattr(builtins, "print") - del builtins - else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" @@ -525,16 +696,32 @@ else: del frame elif _locs_ is None: _locs_ = _globs_ - exec ("""exec _code_ in _globs_, _locs_""") - + exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: def print_(*args, **kwargs): - """The new-style print function.""" + """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return @@ -542,8 +729,15 @@ else: def write(data): if not isinstance(data, basestring): data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) fp.write(data) - want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: @@ -579,24 +773,96 @@ else: write(sep) write(arg) write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() _add_doc(reraise, """Reraise an exception.""") +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" - return meta("NewBase", bases, {}) + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) - for slots_var in orig_vars.get('__slots__', ()): - orig_vars.pop(slots_var) return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/libs/six.py b/libs/six.py index ab2c7e85..190c0239 100644 --- a/libs/six.py +++ b/libs/six.py @@ -1,6 +1,6 @@ """Utilities for writing code that runs on Python 2 and 3""" -# Copyright (c) 2010-2014 Benjamin Peterson +# Copyright (c) 2010-2015 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -20,17 +20,22 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import absolute_import + +import functools +import itertools import operator import sys import types __author__ = "Benjamin Peterson " -__version__ = "1.5.2" +__version__ = "1.10.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, @@ -53,6 +58,7 @@ else: else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): + def __len__(self): return 1 << 31 try: @@ -84,9 +90,13 @@ class _LazyDescr(object): def __get__(self, obj, tp): result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - # This is a bit ugly, but it avoids running this again. - delattr(obj.__class__, self.name) + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass return result @@ -105,14 +115,6 @@ class MovedModule(_LazyDescr): return _import_module(self.mod) def __getattr__(self, attr): - # Hack around the Django autoreloader. The reloader tries to get - # __file__ or __name__ of every module in sys.modules. This doesn't work - # well if this MovedModule is for an module that is unavailable on this - # machine (like winreg on Unix systems). Thus, we pretend __file__ and - # __name__ don't exist if the module hasn't been loaded yet. See issues - # #51 and #53. - if attr in ("__file__", "__name__") and self.mod not in sys.modules: - raise AttributeError _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) @@ -159,9 +161,75 @@ class MovedAttribute(_LazyDescr): return getattr(module, self.attr) +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + class _MovedItems(_LazyModule): + """Lazy loading of moved objects""" + __path__ = [] # mark as package _moved_attributes = [ @@ -169,26 +237,33 @@ _moved_attributes = [ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), @@ -222,25 +297,34 @@ _moved_attributes = [ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("winreg", "_winreg"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): - sys.modules[__name__ + ".moves." + attr.name] = attr + _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes -moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves") +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), @@ -254,6 +338,14 @@ _urllib_parse_moved_attributes = [ MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) @@ -261,10 +353,12 @@ del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes -sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse") +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_error""" @@ -279,10 +373,12 @@ del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes -sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error") +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_request""" @@ -327,10 +423,12 @@ del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes -sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request") +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_response""" @@ -346,10 +444,12 @@ del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes -sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response") +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_robotparser""" @@ -362,22 +462,25 @@ del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes -sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser") +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - parse = sys.modules[__name__ + ".moves.urllib_parse"] - error = sys.modules[__name__ + ".moves.urllib_error"] - request = sys.modules[__name__ + ".moves.urllib_request"] - response = sys.modules[__name__ + ".moves.urllib_response"] - robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"] + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] - -sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib") +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") def add_move(move): @@ -404,11 +507,6 @@ if PY3: _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" - - _iterkeys = "keys" - _itervalues = "values" - _iteritems = "items" - _iterlists = "lists" else: _meth_func = "im_func" _meth_self = "im_self" @@ -418,11 +516,6 @@ else: _func_defaults = "func_defaults" _func_globals = "func_globals" - _iterkeys = "iterkeys" - _itervalues = "itervalues" - _iteritems = "iteritems" - _iterlists = "iterlists" - try: advance_iterator = next @@ -445,6 +538,9 @@ if PY3: create_bound_method = types.MethodType + def create_unbound_method(func, cls): + return func + Iterator = object else: def get_unbound_function(unbound): @@ -453,6 +549,9 @@ else: def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + class Iterator(object): def next(self): @@ -471,66 +570,117 @@ get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) -def iterkeys(d, **kw): - """Return an iterator over the keys of a dictionary.""" - return iter(getattr(d, _iterkeys)(**kw)) +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) -def itervalues(d, **kw): - """Return an iterator over the values of a dictionary.""" - return iter(getattr(d, _itervalues)(**kw)) + def itervalues(d, **kw): + return iter(d.values(**kw)) -def iteritems(d, **kw): - """Return an iterator over the (key, value) pairs of a dictionary.""" - return iter(getattr(d, _iteritems)(**kw)) + def iteritems(d, **kw): + return iter(d.items(**kw)) -def iterlists(d, **kw): - """Return an iterator over the (key, [values]) pairs of a dictionary.""" - return iter(getattr(d, _iterlists)(**kw)) + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") + def u(s): return s unichr = chr - if sys.version_info[1] <= 1: - def int2byte(i): - return bytes((i,)) - else: - # This is about 2x faster than the implementation above on 3.2+ - int2byte = operator.methodcaller("to_bytes", 1, "big") + import struct + int2byte = struct.Struct(">B").pack + del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash + def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr + def byte2int(bs): return ord(bs[0]) + def indexbytes(buf, i): return ord(buf[i]) - def iterbytes(buf): - return (ord(byte) for byte in buf) + iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + if PY3: exec_ = getattr(moves.builtins, "exec") - def reraise(tp, value, tb=None): + if value is None: + value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value @@ -548,12 +698,26 @@ else: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") - exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): @@ -561,13 +725,14 @@ if print_ is None: fp = kwargs.pop("file", sys.stdout) if fp is None: return + def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): + isinstance(data, unicode) and + fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" @@ -608,25 +773,96 @@ if print_ is None: write(sep) write(arg) write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() _add_doc(reraise, """Reraise an exception.""") +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" - return meta("NewBase", bases, {}) + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper \ No newline at end of file + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) From 382d108db238f8ac8c8b63295f88b2f696295e84 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 22:10:26 -0400 Subject: [PATCH 06/21] Optimize imports * PEP8: Fix module level import not at top of file * Remove unused imports * Remove simplejson * Replace mimetools with email --- core/__init__.py | 10 ++++++---- core/databases/mainDB.py | 2 +- core/gh_api.py | 2 +- core/nzbToMediaAutoFork.py | 5 +++-- core/synchronousdeluge/__init__.py | 5 ++--- core/synchronousdeluge/rencode.py | 7 ++++--- core/transcoder/transcoder.py | 2 -- core/transmissionrpc/utils.py | 7 +++++-- core/utorrent/client.py | 6 +----- core/utorrent/upload.py | 4 ++-- 10 files changed, 25 insertions(+), 25 deletions(-) diff --git a/core/__init__.py b/core/__init__.py index 12c413f1..9f9fe812 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -33,10 +33,12 @@ from core.autoProcess.autoProcessMusic import autoProcessMusic from core.autoProcess.autoProcessTV import autoProcessTV from core import logger, versionCheck, nzbToMediaDB from core.nzbToMediaConfig import config -from core.nzbToMediaUtil import category_search, sanitizeName, copy_link, parse_args, flatten, getDirs, \ - rmReadOnly, rmDir, pause_torrent, resume_torrent, remove_torrent, listMediaFiles, \ - extractFiles, cleanDir, update_downloadInfoStatus, get_downloadInfo, WakeUp, makeDir, cleanDir, \ - create_torrent_class, listMediaFiles, RunningProcess +from core.nzbToMediaUtil import ( + category_search, sanitizeName, copy_link, parse_args, flatten, getDirs, + rmReadOnly, rmDir, pause_torrent, resume_torrent, remove_torrent, listMediaFiles, + extractFiles, cleanDir, update_downloadInfoStatus, get_downloadInfo, WakeUp, makeDir, cleanDir, + create_torrent_class, listMediaFiles, RunningProcess, + ) from core.transcoder import transcoder from core.databases import mainDB diff --git a/core/databases/mainDB.py b/core/databases/mainDB.py index 0d27b526..e32e6dae 100644 --- a/core/databases/mainDB.py +++ b/core/databases/mainDB.py @@ -1,5 +1,5 @@ # coding=utf-8 -import core + from core import logger, nzbToMediaDB from core.nzbToMediaUtil import backupVersionedFile diff --git a/core/gh_api.py b/core/gh_api.py index 95faf10e..1db7faf7 100644 --- a/core/gh_api.py +++ b/core/gh_api.py @@ -1,5 +1,5 @@ # coding=utf-8 -import json + import requests diff --git a/core/nzbToMediaAutoFork.py b/core/nzbToMediaAutoFork.py index d1c122c1..cfb4e60c 100644 --- a/core/nzbToMediaAutoFork.py +++ b/core/nzbToMediaAutoFork.py @@ -1,7 +1,8 @@ # coding=utf-8 -import urllib -import core + import requests + +import core from core import logger diff --git a/core/synchronousdeluge/__init__.py b/core/synchronousdeluge/__init__.py index 6155881f..9d4d8c77 100644 --- a/core/synchronousdeluge/__init__.py +++ b/core/synchronousdeluge/__init__.py @@ -15,10 +15,9 @@ Example usage: download_location = client.core.get_config_value("download_location").get() """ +from core.synchronousdeluge.exceptions import DelugeRPCError + __title__ = "synchronous-deluge" __version__ = "0.1" __author__ = "Christian Dale" - -from core.synchronousdeluge.exceptions import DelugeRPCError - diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index 62e22b08..0d960255 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -19,6 +19,10 @@ rencode module versions, so you should check that you are using the same rencode version throughout your project. """ +import struct +from threading import Lock + + __version__ = '1.0.1' __all__ = ['dumps', 'loads'] @@ -62,9 +66,6 @@ __all__ = ['dumps', 'loads'] # (The rencode module is licensed under the above license as well). # -import struct -from threading import Lock - # Default number of bits for serialized floats, either 32 or 64 (also a parameter for dumps()). DEFAULT_FLOAT_BITS = 32 diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index d31ef25e..9065c78a 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -3,8 +3,6 @@ import errno import os import platform import subprocess -import urllib2 -import traceback import core import json import shutil diff --git a/core/transmissionrpc/utils.py b/core/transmissionrpc/utils.py index 223921d6..c2bca855 100644 --- a/core/transmissionrpc/utils.py +++ b/core/transmissionrpc/utils.py @@ -2,10 +2,13 @@ # Copyright (c) 2008-2013 Erik Svensson # Licensed under the MIT license. -import socket, datetime, logging, constants +import constants +import datetime +import logging +import socket from collections import namedtuple -from constants import LOGGER +from constants import LOGGER from six import string_types, iteritems UNITS = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB'] diff --git a/core/utorrent/client.py b/core/utorrent/client.py index ae5d4634..f8989acf 100644 --- a/core/utorrent/client.py +++ b/core/utorrent/client.py @@ -4,14 +4,10 @@ import urllib import urllib2 import urlparse import cookielib +import json import re import StringIO -try: - import json -except ImportError: - import simplejson as json - from core.utorrent.upload import MultiPartForm diff --git a/core/utorrent/upload.py b/core/utorrent/upload.py index de149efc..ddf228cc 100644 --- a/core/utorrent/upload.py +++ b/core/utorrent/upload.py @@ -1,8 +1,8 @@ # coding=utf-8 # code copied from http://www.doughellmann.com/PyMOTW/urllib2/ +from email.generator import _make_boundary as make_boundary import itertools -import mimetools import mimetypes @@ -12,7 +12,7 @@ class MultiPartForm(object): def __init__(self): self.form_fields = [] self.files = [] - self.boundary = mimetools.choose_boundary() + self.boundary = make_boundary() return def get_content_type(self): From cf1ae938fccbc85228aa72f60737e0b40733a815 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 23:53:26 -0400 Subject: [PATCH 07/21] Use six to standardize imports between Python 2 and Python 3 --- core/__init__.py | 4 +++- core/transmissionrpc/client.py | 8 ++----- core/transmissionrpc/httphandler.py | 20 +++++++---------- core/utorrent/client.py | 35 ++++++++++++++++------------- 4 files changed, 33 insertions(+), 34 deletions(-) diff --git a/core/__init__.py b/core/__init__.py index 9f9fe812..46eef2c3 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -1,4 +1,6 @@ # coding=utf-8 + +from six.moves import reload_module import locale import os import re @@ -252,7 +254,7 @@ def initialize(section=None): SYS_ENCODING = 'UTF-8' if not hasattr(sys, "setdefaultencoding"): - reload(sys) + reload_module(sys) try: # pylint: disable=E1101 diff --git a/core/transmissionrpc/client.py b/core/transmissionrpc/client.py index c451e685..b64b709d 100644 --- a/core/transmissionrpc/client.py +++ b/core/transmissionrpc/client.py @@ -18,12 +18,8 @@ from core.transmissionrpc.torrent import Torrent from core.transmissionrpc.session import Session from six import PY3, integer_types, string_types, iteritems -if PY3: - from urllib.parse import urlparse - from urllib.request import urlopen -else: - from urlparse import urlparse - from urllib2 import urlopen +from six.moves.urllib_parse import urlparse +from six.moves.urllib_request import urlopen def debug_httperror(error): diff --git a/core/transmissionrpc/httphandler.py b/core/transmissionrpc/httphandler.py index 1e884399..2968762e 100644 --- a/core/transmissionrpc/httphandler.py +++ b/core/transmissionrpc/httphandler.py @@ -4,19 +4,15 @@ import sys -from core.transmissionrpc.error import HTTPHandlerError -from six import PY3 +from six.moves.urllib_request import ( + build_opener, install_opener, + HTTPBasicAuthHandler, HTTPDigestAuthHandler, HTTPPasswordMgrWithDefaultRealm, + Request, +) +from six.moves.urllib_error import HTTPError, URLError +from six.moves.http_client import BadStatusLine -if PY3: - from urllib.request import Request, build_opener, \ - HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler - from urllib.error import HTTPError, URLError - from http.client import BadStatusLine -else: - from urllib2 import Request, build_opener, \ - HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler - from urllib2 import HTTPError, URLError - from httplib import BadStatusLine +from core.transmissionrpc.error import HTTPHandlerError class HTTPHandler(object): diff --git a/core/utorrent/client.py b/core/utorrent/client.py index f8989acf..f8ddc80d 100644 --- a/core/utorrent/client.py +++ b/core/utorrent/client.py @@ -1,12 +1,17 @@ # coding=utf8 -import urllib -import urllib2 -import urlparse -import cookielib import json import re -import StringIO + +from six import StringIO +from six.moves.http_cookiejar import CookieJar +from six.moves.urllib_error import HTTPError +from six.moves.urllib_parse import urljoin, urlencode +from six.moves.urllib_request import ( + build_opener, install_opener, + HTTPBasicAuthHandler, HTTPCookieProcessor, + Request, +) from core.utorrent.upload import MultiPartForm @@ -23,23 +28,23 @@ class UTorrentClient(object): def _make_opener(self, realm, base_url, username, password): """uTorrent API need HTTP Basic Auth and cookie support for token verify.""" - auth_handler = urllib2.HTTPBasicAuthHandler() + auth_handler = HTTPBasicAuthHandler() auth_handler.add_password(realm=realm, uri=base_url, user=username, passwd=password) - opener = urllib2.build_opener(auth_handler) - urllib2.install_opener(opener) + opener = build_opener(auth_handler) + install_opener(opener) - cookie_jar = cookielib.CookieJar() - cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar) + cookie_jar = CookieJar() + cookie_handler = HTTPCookieProcessor(cookie_jar) handlers = [auth_handler, cookie_handler] - opener = urllib2.build_opener(*handlers) + opener = build_opener(*handlers) return opener def _get_token(self): - url = urlparse.urljoin(self.base_url, 'token.html') + url = urljoin(self.base_url, 'token.html') response = self.opener.open(url) token_re = "" match = re.search(token_re, response.read()) @@ -122,8 +127,8 @@ class UTorrentClient(object): def _action(self, params, body=None, content_type=None): # about token, see https://github.com/bittorrent/webui/wiki/TokenSystem - url = self.base_url + '?token=' + self.token + '&' + urllib.urlencode(params) - request = urllib2.Request(url) + url = self.base_url + '?token=' + self.token + '&' + urlencode(params) + request = Request(url) if body: request.add_data(body) @@ -134,5 +139,5 @@ class UTorrentClient(object): try: response = self.opener.open(request) return response.code, json.loads(response.read()) - except urllib2.HTTPError, e: + except HTTPError: raise From d4e5809a290168448c1030bb4ccf38551bb4372d Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 23:24:54 -0400 Subject: [PATCH 08/21] Use print_function to standardize printing between Python 2 and Python 3 --- core/__init__.py | 9 ++++++--- core/nzbToMediaDB.py | 5 +++-- core/nzbToMediaUtil.py | 6 ++++-- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/core/__init__.py b/core/__init__.py index 46eef2c3..6b3ca265 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -1,6 +1,7 @@ # coding=utf-8 -from six.moves import reload_module +from __future__ import print_function + import locale import os import re @@ -9,6 +10,8 @@ import sys import platform import time +from six.moves import reload_module + # init libs PROGRAM_DIR = os.path.dirname(os.path.normpath(os.path.abspath(os.path.join(__file__, os.pardir)))) LIBS_DIR = os.path.join(PROGRAM_DIR, 'libs') @@ -261,8 +264,8 @@ def initialize(section=None): # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError sys.setdefaultencoding(SYS_ENCODING) except: - print 'Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable' - print 'or find another way to force Python to use ' + SYS_ENCODING + ' for string encoding.' + print('Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable') + print('or find another way to force Python to use ' + SYS_ENCODING + ' for string encoding.') if 'NZBOP_SCRIPTDIR' in os.environ: sys.exit(NZBGET_POSTPROCESS_ERROR) else: diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index e2ff20f6..0e1af3a9 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -1,5 +1,6 @@ # coding=utf-8 -from __future__ import with_statement + +from __future__ import print_function, with_statement import re import sqlite3 @@ -228,7 +229,7 @@ def _processUpgrade(connection, upgradeClass): try: instance.execute() except sqlite3.DatabaseError, e: - print "Error in " + str(upgradeClass.__name__) + ": " + str(e) + print("Error in " + str(upgradeClass.__name__) + ": " + str(e)) raise logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG) else: diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 9e5eeaa5..6883b926 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -1,5 +1,7 @@ # coding=utf-8 -from __future__ import unicode_literals + +from __future__ import print_function, unicode_literals + import os import re import socket @@ -455,7 +457,7 @@ def convert_to_ascii(inputName, dirName): logger.info("Renaming directory to: %s." % (base2), 'ENCODER') os.rename(os.path.join(dir, base), dirName) if 'NZBOP_SCRIPTDIR' in os.environ: - print "[NZB] DIRECTORY=%s" % (dirName) # Return the new directory to NZBGet. + print("[NZB] DIRECTORY=%s" % (dirName)) for dirname, dirnames, filenames in os.walk(dirName, topdown=False): for subdirname in dirnames: From abf63d6bbea8712d07bf1af97b5090c488635f2d Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 08:58:17 -0400 Subject: [PATCH 09/21] Use six.iteritems helper * Standardizes dict.iteritems between Python 2 and Python 3 --- core/nzbToMediaAutoFork.py | 4 +++- core/nzbToMediaConfig.py | 6 ++++-- core/transcoder/transcoder.py | 6 ++++-- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/core/nzbToMediaAutoFork.py b/core/nzbToMediaAutoFork.py index cfb4e60c..ce1949c5 100644 --- a/core/nzbToMediaAutoFork.py +++ b/core/nzbToMediaAutoFork.py @@ -2,6 +2,8 @@ import requests +from six import iteritems + import core from core import logger @@ -89,7 +91,7 @@ def autoFork(section, inputCategory): rem_params.append(param) for param in rem_params: params.pop(param) - for fork in sorted(core.FORKS.iteritems(), reverse=False): + for fork in sorted(iteritems(core.FORKS), reverse=False): if params == fork[1]: detected = True break diff --git a/core/nzbToMediaConfig.py b/core/nzbToMediaConfig.py index 75d8ad19..bb1202ce 100644 --- a/core/nzbToMediaConfig.py +++ b/core/nzbToMediaConfig.py @@ -1,4 +1,6 @@ # coding=utf-8 + +from six import iteritems import os import shutil import copy @@ -150,7 +152,7 @@ class ConfigObj(configobj.ConfigObj, Section): continue def cleanup_values(values, section): - for option, value in values.iteritems(): + for option, value in iteritems(values): if section in ['CouchPotato']: if option == ['outputDirectory']: CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0] @@ -227,7 +229,7 @@ class ConfigObj(configobj.ConfigObj, Section): subsection = None if section in list(chain.from_iterable(subsections.values())): subsection = section - section = ''.join([k for k, v in subsections.iteritems() if subsection in v]) + section = ''.join([k for k, v in iteritems(subsections) if subsection in v]) process_section(section, subsection) elif section in subsections.keys(): subsection = subsections[section] diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index 9065c78a..730ce77e 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -1,4 +1,6 @@ # coding=utf-8 + +from six import iteritems import errno import os import platform @@ -128,7 +130,7 @@ def buildCommands(file, newDir, movieName, bitbucket): if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself. core.VEXTENSION = '-transcoded' + core.VEXTENSION # adds '-transcoded.ext' else: - img, data = file.iteritems().next() + img, data = iteritems(file).next() name = data['name'] video_details, result = getVideoDetails(data['files'][0], img, bitbucket) inputFile = '-' @@ -774,7 +776,7 @@ def Transcode_directory(dirName): if isinstance(file, str): proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) else: - img, data = file.iteritems().next() + img, data = iteritems(file).next() proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket, stdin=subprocess.PIPE) for vob in data['files']: procin = zip_out(vob, img, bitbucket) From ec2fc8d5380f81366e9499b92afb6f5b666ca3c2 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 23:32:32 -0400 Subject: [PATCH 10/21] Use six.text_type * Standarizes unicode function between Python 2 and Python 3 --- core/nzbToMediaUtil.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 6883b926..897ac36a 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -1,7 +1,7 @@ # coding=utf-8 from __future__ import print_function, unicode_literals - +from six import text_type import os import re import socket @@ -1216,7 +1216,7 @@ def update_downloadInfoStatus(inputName, status): myDB = nzbToMediaDB.DBConnection() myDB.action("UPDATE downloads SET status=?, last_update=? WHERE input_name=?", - [status, datetime.date.today().toordinal(), unicode(inputName)]) + [status, datetime.date.today().toordinal(), text_type(inputName)]) def get_downloadInfo(inputName, status): @@ -1224,7 +1224,7 @@ def get_downloadInfo(inputName, status): myDB = nzbToMediaDB.DBConnection() sqlResults = myDB.select("SELECT * FROM downloads WHERE input_name=? AND status=?", - [unicode(inputName), status]) + [text_type(inputName), status]) return sqlResults From 5903538ae50019e30fa01e145decaebe68cb6062 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 23:34:56 -0400 Subject: [PATCH 11/21] Python 3: Make long = int --- core/synchronousdeluge/rencode.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index 0d960255..843be62d 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -21,7 +21,10 @@ same rencode version throughout your project. import struct from threading import Lock +from six import PY3 +if PY3: + long = int __version__ = '1.0.1' __all__ = ['dumps', 'loads'] From ec71e7806d39cdd940f352e48db599851453bfdc Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 23:17:50 -0400 Subject: [PATCH 12/21] Python 3: Convert except ExceptClass, Target: to except ExceptClass as Target: --- core/nzbToMediaConfig.py | 8 ++++---- core/nzbToMediaDB.py | 16 ++++++++-------- core/nzbToMediaSceneExceptions.py | 4 ++-- core/nzbToMediaUtil.py | 8 ++++---- core/synchronousdeluge/client.py | 4 ++-- core/transcoder/transcoder.py | 4 ++-- core/versionCheck.py | 12 ++++++------ 7 files changed, 28 insertions(+), 28 deletions(-) diff --git a/core/nzbToMediaConfig.py b/core/nzbToMediaConfig.py index bb1202ce..6303a698 100644 --- a/core/nzbToMediaConfig.py +++ b/core/nzbToMediaConfig.py @@ -118,7 +118,7 @@ class ConfigObj(configobj.ConfigObj, Section): if not os.path.isfile(core.CONFIG_FILE): shutil.copyfile(core.CONFIG_SPEC_FILE, core.CONFIG_FILE) CFG_OLD = config(core.CONFIG_FILE) - except Exception, e: + except Exception as e: logger.debug("Error %s when copying to .cfg" % (e)) try: @@ -126,7 +126,7 @@ class ConfigObj(configobj.ConfigObj, Section): if not os.path.isfile(core.CONFIG_SPEC_FILE): shutil.copyfile(core.CONFIG_FILE, core.CONFIG_SPEC_FILE) CFG_NEW = config(core.CONFIG_SPEC_FILE) - except Exception, e: + except Exception as e: logger.debug("Error %s when copying to .spec" % (e)) # check for autoProcessMedia.cfg and autoProcessMedia.cfg.spec and if they don't exist return and fail @@ -462,14 +462,14 @@ class ConfigObj(configobj.ConfigObj, Section): CFG_NEW[section][os.environ[envCatKey]][option] = value CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 - except Exception, e: + except Exception as e: logger.debug("Error %s when applying NZBGet config" % (e)) try: # write our new config to autoProcessMedia.cfg CFG_NEW.filename = core.CONFIG_FILE CFG_NEW.write() - except Exception, e: + except Exception as e: logger.debug("Error %s when writing changes to .cfg" % (e)) return CFG_NEW diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index 0e1af3a9..e11b7dbb 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -37,7 +37,7 @@ class DBConnection: result = None try: result = self.select("SELECT db_version FROM db_version") - except sqlite3.OperationalError, e: + except sqlite3.OperationalError as e: if "no such table: db_version" in e.args[0]: return 0 @@ -68,7 +68,7 @@ class DBConnection: # get out of the connection attempt loop since we were successful break - except sqlite3.OperationalError, e: + except sqlite3.OperationalError as e: if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: logger.log(u"DB error: " + str(e), logger.WARNING) attempt += 1 @@ -76,7 +76,7 @@ class DBConnection: else: logger.log(u"DB error: " + str(e), logger.ERROR) raise - except sqlite3.DatabaseError, e: + except sqlite3.DatabaseError as e: logger.log(u"Fatal error executing query: " + str(e), logger.ERROR) raise @@ -103,7 +103,7 @@ class DBConnection: self.connection.commit() logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG) return sqlResult - except sqlite3.OperationalError, e: + except sqlite3.OperationalError as e: sqlResult = [] if self.connection: self.connection.rollback() @@ -114,7 +114,7 @@ class DBConnection: else: logger.log(u"DB error: " + str(e), logger.ERROR) raise - except sqlite3.DatabaseError, e: + except sqlite3.DatabaseError as e: sqlResult = [] if self.connection: self.connection.rollback() @@ -141,7 +141,7 @@ class DBConnection: self.connection.commit() # get out of the connection attempt loop since we were successful break - except sqlite3.OperationalError, e: + except sqlite3.OperationalError as e: if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: logger.log(u"DB error: " + str(e), logger.WARNING) attempt += 1 @@ -149,7 +149,7 @@ class DBConnection: else: logger.log(u"DB error: " + str(e), logger.ERROR) raise - except sqlite3.DatabaseError, e: + except sqlite3.DatabaseError as e: logger.log(u"Fatal error executing query: " + str(e), logger.ERROR) raise @@ -228,7 +228,7 @@ def _processUpgrade(connection, upgradeClass): logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.MESSAGE) try: instance.execute() - except sqlite3.DatabaseError, e: + except sqlite3.DatabaseError as e: print("Error in " + str(upgradeClass.__name__) + ": " + str(e)) raise logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG) diff --git a/core/nzbToMediaSceneExceptions.py b/core/nzbToMediaSceneExceptions.py index b37ec268..40700961 100644 --- a/core/nzbToMediaSceneExceptions.py +++ b/core/nzbToMediaSceneExceptions.py @@ -64,7 +64,7 @@ def rename_file(filename, newfilePath): logger.debug("Replacing file name %s with download name %s" % (filename, newfilePath), "EXCEPTION") try: os.rename(filename, newfilePath) - except Exception, e: + except Exception as e: logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION") @@ -132,7 +132,7 @@ def rename_script(dirname): logger.debug("Renaming file %s to %s" % (orig, dest), "EXCEPTION") try: os.rename(orig, dest) - except Exception, e: + except Exception as e: logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION") # dict for custom groups diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 897ac36a..0c81cbe8 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -74,7 +74,7 @@ def makeDir(path): if not os.path.isdir(path): try: os.makedirs(path) - except Exception, e: + except Exception: return False return True @@ -262,7 +262,7 @@ def copy_link(src, targetLink, useLink): logger.info("Moving SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') shutil.move(src, targetLink) return True - except Exception, e: + except Exception as e: logger.warning("Error: %s, copying instead ... " % (e), 'COPYLINK') logger.info("Copying SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') @@ -1104,7 +1104,7 @@ def extractFiles(src, dst=None, keep_archive=None): if extractor.extract(inputFile, dst or dirPath): extracted_folder.append(dst or dirPath) extracted_archive.append(archiveName) - except Exception, e: + except Exception: logger.error("Extraction failed for: %s" % (fullFileName)) for folder in extracted_folder: @@ -1197,7 +1197,7 @@ def backupVersionedFile(old_file, version): shutil.copy(old_file, new_file) logger.log(u"Backup done", logger.DEBUG) break - except Exception, e: + except Exception as e: logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + str(e), logger.WARNING) numTries += 1 diff --git a/core/synchronousdeluge/client.py b/core/synchronousdeluge/client.py index af2c740c..fa80fb46 100644 --- a/core/synchronousdeluge/client.py +++ b/core/synchronousdeluge/client.py @@ -40,7 +40,7 @@ class DelugeClient(object): from xdg.BaseDirectory import save_config_path try: auth_file = os.path.join(save_config_path("deluge"), "auth") - except OSError, e: + except OSError: return username, password if os.path.exists(auth_file): @@ -51,7 +51,7 @@ class DelugeClient(object): line = line.strip() try: lsplit = line.split(":") - except Exception, e: + except Exception: continue if len(lsplit) == 2: diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index 730ce77e..46e95896 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -763,10 +763,10 @@ def Transcode_directory(dirName): try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason) os.remove(newfilePath) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist logger.debug("Error when removing transcoding target: %s" % (e)) - except Exception, e: + except Exception as e: logger.debug("Error when removing transcoding target: %s" % (e)) logger.info("Transcoding video: %s" % (newfilePath)) diff --git a/core/versionCheck.py b/core/versionCheck.py index b71f903c..f3cd3c1e 100644 --- a/core/versionCheck.py +++ b/core/versionCheck.py @@ -312,7 +312,7 @@ class GitUpdateManager(UpdateManager): else: try: self._check_github_for_update() - except Exception, e: + except Exception as e: logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR) return False @@ -356,7 +356,7 @@ class SourceUpdateManager(UpdateManager): try: with open(version_file, 'r') as fp: self._cur_commit_hash = fp.read().strip(' \n\r') - except EnvironmentError, e: + except EnvironmentError as e: logger.log(u"Unable to open 'version.txt': " + str(e), logger.DEBUG) if not self._cur_commit_hash: @@ -370,7 +370,7 @@ class SourceUpdateManager(UpdateManager): try: self._check_github_for_update() - except Exception, e: + except Exception as e: logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR) return False @@ -504,7 +504,7 @@ class SourceUpdateManager(UpdateManager): os.chmod(new_path, stat.S_IWRITE) os.remove(new_path) os.renames(old_path, new_path) - except Exception, e: + except Exception as e: logger.log(u"Unable to update " + new_path + ': ' + str(e), logger.DEBUG) os.remove(old_path) # Trash the updated file without moving in new path continue @@ -517,11 +517,11 @@ class SourceUpdateManager(UpdateManager): try: with open(version_path, 'w') as ver_file: ver_file.write(self._newest_commit_hash) - except EnvironmentError, e: + except EnvironmentError as e: logger.log(u"Unable to write version file, update not complete: " + str(e), logger.ERROR) return False - except Exception, e: + except Exception as e: logger.log(u"Error while trying to update: " + str(e), logger.ERROR) logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG) return False From 8434fd54193c8dadca31e70d87f3042468e0ea5e Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 09:35:33 -0400 Subject: [PATCH 13/21] Python 3: Fix relative imports --- core/synchronousdeluge/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/synchronousdeluge/client.py b/core/synchronousdeluge/client.py index fa80fb46..050d414e 100644 --- a/core/synchronousdeluge/client.py +++ b/core/synchronousdeluge/client.py @@ -4,9 +4,9 @@ import platform from collections import defaultdict from itertools import imap -from exceptions import DelugeRPCError -from protocol import DelugeRPCRequest, DelugeRPCResponse -from transfer import DelugeTransfer +from .exceptions import DelugeRPCError +from .protocol import DelugeRPCRequest, DelugeRPCResponse +from .transfer import DelugeTransfer __all__ = ["DelugeClient"] From 76a00b249316bc577d52cb385534cfff257a103b Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 23:01:15 -0400 Subject: [PATCH 14/21] Convert to new-style class by inheriting from object --- core/logger.py | 2 +- core/nzbToMediaConfig.py | 2 +- core/nzbToMediaDB.py | 2 +- core/nzbToMediaUtil.py | 6 +++--- core/versionCheck.py | 4 ++-- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/core/logger.py b/core/logger.py index b3800a98..86772c15 100644 --- a/core/logger.py +++ b/core/logger.py @@ -236,7 +236,7 @@ class NTMRotatingLogHandler(object): sys.exit(1) -class DispatchingFormatter: +class DispatchingFormatter(object): def __init__(self, formatters, default_formatter): self._formatters = formatters self._default_formatter = default_formatter diff --git a/core/nzbToMediaConfig.py b/core/nzbToMediaConfig.py index 6303a698..badc597f 100644 --- a/core/nzbToMediaConfig.py +++ b/core/nzbToMediaConfig.py @@ -11,7 +11,7 @@ from core import logger from itertools import chain -class Section(configobj.Section): +class Section(configobj.Section, object): def isenabled(section): # checks if subsection enabled, returns true/false if subsection specified otherwise returns true/false in {} if not section.sections: diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index e11b7dbb..c8a22318 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -23,7 +23,7 @@ def dbFilename(filename="nzbtomedia.db", suffix=None): return core.os.path.join(core.PROGRAM_DIR, filename) -class DBConnection: +class DBConnection(object): def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None): self.filename = filename diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 0c81cbe8..3f8f0f7e 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -1229,7 +1229,7 @@ def get_downloadInfo(inputName, status): return sqlResults -class RunningProcess(): +class RunningProcess(object): """ Limits application to single instance """ def __init__(self): @@ -1245,7 +1245,7 @@ class RunningProcess(): # self.process.__del__() -class WindowsProcess(): +class WindowsProcess(object): def __init__(self): self.mutexname = "nzbtomedia_" + core.PID_FILE.replace('\\', '/') # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}" if platform.system() == 'Windows': @@ -1271,7 +1271,7 @@ class WindowsProcess(): self.CloseHandle(self.mutex) -class PosixProcess(): +class PosixProcess(object): def __init__(self): self.pidpath = core.PID_FILE self.lock_socket = None diff --git a/core/versionCheck.py b/core/versionCheck.py index f3cd3c1e..fd58af45 100644 --- a/core/versionCheck.py +++ b/core/versionCheck.py @@ -17,7 +17,7 @@ import core from core import logger -class CheckVersion(): +class CheckVersion(object): """ Version check class meant to run as a thread object with the SB scheduler. """ @@ -82,7 +82,7 @@ class CheckVersion(): return self.updater.update() -class UpdateManager(): +class UpdateManager(object): def get_github_repo_user(self): return core.GIT_USER From 22daf021670e56d6dd7857e8489320d9042b281a Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 4 Jun 2016 23:35:11 -0400 Subject: [PATCH 15/21] Python 3: Raise with no arguments can only be used in an except block --- core/nzbToMediaUtil.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 3f8f0f7e..178dc784 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -724,7 +724,7 @@ def onerror(func, path, exc_info): os.chmod(path, stat.S_IWUSR) func(path) else: - raise + raise Exception def rmDir(dirName): From eb1ee8b5f57f85ede15ba22724fb49be947cb42d Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 09:28:47 -0400 Subject: [PATCH 16/21] Set expected, but passing list --- core/nzbToMediaUtil.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 178dc784..30bbf794 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -1144,7 +1144,7 @@ def import_subs(filename): logger.debug("Attempting to download subtitles for %s" % (filename), 'SUBTITLES') try: video = subliminal.scan_video(filename, subtitles=True, embedded_subtitles=True) - subtitles = subliminal.download_best_subtitles([video], languages, hearing_impaired=False) + subtitles = subliminal.download_best_subtitles({video}, languages, hearing_impaired=False) subliminal.save_subtitles(subtitles) except Exception as e: logger.error("Failed to download subtitles for %s due to: %s" % (filename, e), 'SUBTITLES') From a983c6c7bead63cb9cbcf5126764d6971364ca64 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 09:49:59 -0400 Subject: [PATCH 17/21] Streamline conditional statements * Non-zero ints evaluate True. --- core/autoProcess/autoProcessMovie.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/autoProcess/autoProcessMovie.py b/core/autoProcess/autoProcessMovie.py index a5846e14..a4e4b0ba 100644 --- a/core/autoProcess/autoProcessMovie.py +++ b/core/autoProcess/autoProcessMovie.py @@ -169,11 +169,11 @@ class autoProcessMovie(object): if transcoder.isVideoGood(video, status): import_subs(video) good_files += 1 - if num_files > 0 and good_files == num_files: + if num_files and good_files == num_files: if status: logger.info("Status shown as failed from Downloader, but {0} valid video files found. Setting as success.".format(good_files), section) status = 0 - elif num_files > 0 and good_files < num_files: + elif num_files and good_files < num_files: logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section) if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': print('[NZB] MARK=BAD') From 47c585d81c81ee456b21e719f1eb0adea3ac249b Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 09:55:56 -0400 Subject: [PATCH 18/21] Rewrite dictionary creation as a dictionary literal --- core/nzbToMediaUtil.py | 20 +++++----- core/synchronousdeluge/rencode.py | 66 ++++++++++++++++--------------- 2 files changed, 45 insertions(+), 41 deletions(-) diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 30bbf794..f1098330 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -870,11 +870,12 @@ def find_download(clientAgent, download_id): else: baseURL = "http://%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT) url = baseURL - params = {} - params['apikey'] = core.SABNZBDAPIKEY - params['mode'] = "get_files" - params['output'] = 'json' - params['value'] = download_id + params = { + 'apikey': core.SABNZBDAPIKEY, + 'mode': "get_files", + 'output': 'json', + 'value': download_id, + } try: r = requests.get(url, params=params, verify=False, timeout=(30, 120)) except requests.ConnectionError: @@ -896,10 +897,11 @@ def get_nzoid(inputName): else: baseURL = "http://%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT) url = baseURL - params = {} - params['apikey'] = core.SABNZBDAPIKEY - params['mode'] = "queue" - params['output'] = 'json' + params = { + 'apikey': core.SABNZBDAPIKEY, + 'mode': "queue", + 'output': 'json', + } try: r = requests.get(url, params=params, verify=False, timeout=(30, 120)) except requests.ConnectionError: diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index 843be62d..f0fcc69e 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -208,29 +208,30 @@ def decode_none(x, f): return (None, f + 1) -decode_func = {} -decode_func['0'] = decode_string -decode_func['1'] = decode_string -decode_func['2'] = decode_string -decode_func['3'] = decode_string -decode_func['4'] = decode_string -decode_func['5'] = decode_string -decode_func['6'] = decode_string -decode_func['7'] = decode_string -decode_func['8'] = decode_string -decode_func['9'] = decode_string -decode_func[CHR_LIST] = decode_list -decode_func[CHR_DICT] = decode_dict -decode_func[CHR_INT] = decode_int -decode_func[CHR_INT1] = decode_intb -decode_func[CHR_INT2] = decode_inth -decode_func[CHR_INT4] = decode_intl -decode_func[CHR_INT8] = decode_intq -decode_func[CHR_FLOAT32] = decode_float32 -decode_func[CHR_FLOAT64] = decode_float64 -decode_func[CHR_TRUE] = decode_true -decode_func[CHR_FALSE] = decode_false -decode_func[CHR_NONE] = decode_none +decode_func = { + '0': decode_string, + '1': decode_string, + '2': decode_string, + '3': decode_string, + '4': decode_string, + '5': decode_string, + '6': decode_string, + '7': decode_string, + '8': decode_string, + '9': decode_string, + CHR_LIST: decode_list, + CHR_DICT: decode_dict, + CHR_INT: decode_int, + CHR_INT1: decode_intb, + CHR_INT2: decode_inth, + CHR_INT4: decode_intl, + CHR_INT8: decode_intq, + CHR_FLOAT32: decode_float32, + CHR_FLOAT64: decode_float64, + CHR_TRUE: decode_true, + CHR_FALSE: decode_false, + CHR_NONE: decode_none, +} def make_fixed_length_string_decoders(): @@ -400,15 +401,16 @@ def encode_dict(x, r): r.append(CHR_TERM) -encode_func = {} -encode_func[IntType] = encode_int -encode_func[LongType] = encode_int -encode_func[StringType] = encode_string -encode_func[ListType] = encode_list -encode_func[TupleType] = encode_list -encode_func[DictType] = encode_dict -encode_func[NoneType] = encode_none -encode_func[UnicodeType] = encode_unicode +encode_func = { + IntType: encode_int, + LongType: encode_int, + StringType: encode_string, + ListType: encode_list, + TupleType: encode_list, + DictType: encode_dict, + NoneType: encode_none, + UnicodeType: encode_unicode, +} lock = Lock() From 94e8a45c6249e0e5736d2fe95d132b0120730336 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 02:31:04 -0400 Subject: [PATCH 19/21] Code cleanup * Streamline variable assignment * Replace assignment with augmented assignment * Remove unused variables and redundant parentheses --- core/nzbToMediaDB.py | 1 - core/nzbToMediaUserScript.py | 7 +++--- core/nzbToMediaUtil.py | 14 +++++------- core/synchronousdeluge/client.py | 5 ++--- core/synchronousdeluge/protocol.py | 2 +- core/synchronousdeluge/rencode.py | 34 +++++++++++++++--------------- core/transcoder/transcoder.py | 9 -------- core/transmissionrpc/client.py | 9 +++----- core/transmissionrpc/torrent.py | 1 - core/transmissionrpc/utils.py | 8 +++---- core/versionCheck.py | 7 +++--- 11 files changed, 39 insertions(+), 58 deletions(-) diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index c8a22318..4445b6e1 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -115,7 +115,6 @@ class DBConnection(object): logger.log(u"DB error: " + str(e), logger.ERROR) raise except sqlite3.DatabaseError as e: - sqlResult = [] if self.connection: self.connection.rollback() logger.log(u"Fatal error executing query: " + str(e), logger.ERROR) diff --git a/core/nzbToMediaUserScript.py b/core/nzbToMediaUserScript.py index 23c3c5de..fe6a453e 100644 --- a/core/nzbToMediaUserScript.py +++ b/core/nzbToMediaUserScript.py @@ -58,7 +58,7 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): fileName, fileExtension = os.path.splitext(file) if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or "ALL" in core.USER_SCRIPT_MEDIAEXTENSIONS: - num_files = num_files + 1 + num_files += 1 if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files. continue command = [core.USER_SCRIPT] @@ -103,16 +103,15 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): except: logger.error("UserScript %s has failed" % (command[0]), "USERSCRIPT") result = int(1) - final_result = final_result + result + final_result += result num_files_new = 0 for dirpath, dirnames, filenames in os.walk(outputDestination): for file in filenames: - filePath = core.os.path.join(dirpath, file) fileName, fileExtension = os.path.splitext(file) if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL": - num_files_new = num_files_new + 1 + num_files_new += 1 if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0: logger.info("All files have been processed. Cleaning outputDirectory %s" % (outputDestination)) diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index f1098330..bbb58346 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -38,7 +38,7 @@ def reportNzb(failure_link, clientAgent): else: return try: - r = requests.post(failure_link, headers=headers, timeout=(30, 300)) + requests.post(failure_link, headers=headers, timeout=(30, 300)) except Exception as e: logger.error("Unable to open URL %s due to %s" % (failure_link, e)) return @@ -202,7 +202,7 @@ def is_minSize(inputName, minSize): # audio files we need to check directory size not file size inputSize = os.path.getsize(inputName) - if fileExt in (core.AUDIOCONTAINER): + if fileExt in core.AUDIOCONTAINER: try: inputSize = getDirSize(os.path.dirname(inputName)) except: @@ -339,7 +339,7 @@ def rmReadOnly(filename): if os.path.isfile(filename): # check first the read-only attribute file_attribute = os.stat(filename)[0] - if (not file_attribute & stat.S_IWRITE): + if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable logger.debug('Read only mode on file ' + filename + ' Will try to make it writeable') try: @@ -631,11 +631,7 @@ def getDirs(section, subsection, link='hard'): f = guessit.guess_video_info(mediafile) # get title - title = None - try: - title = f['series'] - except: - title = f['title'] + title = f.get('series') or f.get('title') if not title: title = os.path.splitext(os.path.basename(mediafile))[0] @@ -1262,7 +1258,7 @@ class WindowsProcess(object): def alreadyrunning(self): self.mutex = self.CreateMutex(None, 0, self.mutexname) self.lasterror = self.GetLastError() - if (self.lasterror == self.ERROR_ALREADY_EXISTS): + if self.lasterror == self.ERROR_ALREADY_EXISTS: self.CloseHandle(self.mutex) return True else: diff --git a/core/synchronousdeluge/client.py b/core/synchronousdeluge/client.py index 050d414e..cecb2a88 100644 --- a/core/synchronousdeluge/client.py +++ b/core/synchronousdeluge/client.py @@ -23,7 +23,6 @@ class DelugeClient(object): self._request_counter = 0 def _get_local_auth(self): - auth_file = "" username = password = "" if platform.system() in ('Windows', 'Microsoft'): appDataPath = os.environ.get("APPDATA") @@ -62,9 +61,9 @@ class DelugeClient(object): continue if username == "localclient": - return (username, password) + return username, password - return ("", "") + return "", "" def _create_module_method(self, module, method): fullname = "{0}.{1}".format(module, method) diff --git a/core/synchronousdeluge/protocol.py b/core/synchronousdeluge/protocol.py index 2cb1a73e..98084d4f 100644 --- a/core/synchronousdeluge/protocol.py +++ b/core/synchronousdeluge/protocol.py @@ -10,7 +10,7 @@ class DelugeRPCRequest(object): self.kwargs = kwargs def format(self): - return (self.request_id, self.method, self.args, self.kwargs) + return self.request_id, self.method, self.args, self.kwargs class DelugeRPCResponse(object): diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index f0fcc69e..f27c3304 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -126,39 +126,39 @@ def decode_int(x, f): raise ValueError elif x[f] == '0' and newf != f + 1: raise ValueError - return (n, newf + 1) + return n, newf + 1 def decode_intb(x, f): f += 1 - return (struct.unpack('!b', x[f:f + 1])[0], f + 1) + return struct.unpack('!b', x[f:f + 1])[0], f + 1 def decode_inth(x, f): f += 1 - return (struct.unpack('!h', x[f:f + 2])[0], f + 2) + return struct.unpack('!h', x[f:f + 2])[0], f + 2 def decode_intl(x, f): f += 1 - return (struct.unpack('!l', x[f:f + 4])[0], f + 4) + return struct.unpack('!l', x[f:f + 4])[0], f + 4 def decode_intq(x, f): f += 1 - return (struct.unpack('!q', x[f:f + 8])[0], f + 8) + return struct.unpack('!q', x[f:f + 8])[0], f + 8 def decode_float32(x, f): f += 1 n = struct.unpack('!f', x[f:f + 4])[0] - return (n, f + 4) + return n, f + 4 def decode_float64(x, f): f += 1 n = struct.unpack('!d', x[f:f + 8])[0] - return (n, f + 8) + return n, f + 8 def decode_string(x, f): @@ -177,7 +177,7 @@ def decode_string(x, f): s = t except UnicodeDecodeError: pass - return (s, colon + n) + return s, colon + n def decode_list(x, f): @@ -185,7 +185,7 @@ def decode_list(x, f): while x[f] != CHR_TERM: v, f = decode_func[x[f]](x, f) r.append(v) - return (tuple(r), f + 1) + return tuple(r), f + 1 def decode_dict(x, f): @@ -193,19 +193,19 @@ def decode_dict(x, f): while x[f] != CHR_TERM: k, f = decode_func[x[f]](x, f) r[k], f = decode_func[x[f]](x, f) - return (r, f + 1) + return r, f + 1 def decode_true(x, f): - return (True, f + 1) + return True, f + 1 def decode_false(x, f): - return (False, f + 1) + return False, f + 1 def decode_none(x, f): - return (None, f + 1) + return None, f + 1 decode_func = { @@ -244,7 +244,7 @@ def make_fixed_length_string_decoders(): s = t except UnicodeDecodeError: pass - return (s, f + 1 + slen) + return s, f + 1 + slen return f @@ -262,7 +262,7 @@ def make_fixed_length_list_decoders(): for i in range(slen): v, f = decode_func[x[f]](x, f) r.append(v) - return (tuple(r), f) + return tuple(r), f return f @@ -276,7 +276,7 @@ make_fixed_length_list_decoders() def make_fixed_length_int_decoders(): def make_decoder(j): def f(x, f): - return (j, f + 1) + return j, f + 1 return f @@ -296,7 +296,7 @@ def make_fixed_length_dict_decoders(): for j in range(slen): k, f = decode_func[x[f]](x, f) r[k], f = decode_func[x[f]](x, f) - return (r, f) + return r, f return f diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index 46e95896..9731f689 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -142,7 +142,6 @@ def buildCommands(file, newDir, movieName, bitbucket): video_cmd = [] audio_cmd = [] audio_cmd2 = [] - audio_cmd3 = [] sub_cmd = [] meta_cmd = [] other_cmd = [] @@ -221,10 +220,6 @@ def buildCommands(file, newDir, movieName, bitbucket): except: height = 0 scale = core.VRESOLUTION - try: - framerate = float(fr.split('/')[0]) / float(fr.split('/')[1]) - except: - framerate = 0 if codec in core.VCODEC_ALLOW or not core.VCODEC: video_cmd.extend(['-c:v', 'copy']) else: @@ -431,7 +426,6 @@ def buildCommands(file, newDir, movieName, bitbucket): audio_cmd.extend(audio_cmd3) s_mapped = [] - subs1 = [] burnt = 0 n = 0 for lan in core.SLANGUAGES: @@ -587,12 +581,10 @@ def extract_subs(file, newfilePath, bitbucket): def processList(List, newDir, bitbucket): remList = [] newList = [] - delList = [] combine = [] vtsPath = None success = True for item in List: - newfile = None ext = os.path.splitext(item)[1].lower() if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS: logger.debug("Attempting to rip disk image: %s" % (item), "TRANSCODER") @@ -647,7 +639,6 @@ def ripISO(item, newDir, bitbucket): print_cmd(cmd) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) out, err = proc.communicate() - result = proc.returncode fileList = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)] combined = [] diff --git a/core/transmissionrpc/client.py b/core/transmissionrpc/client.py index b64b709d..6379e595 100644 --- a/core/transmissionrpc/client.py +++ b/core/transmissionrpc/client.py @@ -409,11 +409,8 @@ class Client(object): pass if might_be_base64: torrent_data = torrent - args = {} - if torrent_data: - args = {'metainfo': torrent_data} - else: - args = {'filename': torrent} + + args = {'metainfo': torrent_data} if torrent_data else {'filename': torrent} for key, value in iteritems(kwargs): argument = make_rpc_name(key) (arg, val) = argument_value_convert('torrent-add', argument, value, self.rpc_version) @@ -804,7 +801,7 @@ class Client(object): raise ValueError("Target name cannot contain a path delimiter") args = {'path': location, 'name': name} result = self._request('torrent-rename-path', args, torrent_id, True, timeout=timeout) - return (result['path'], result['name']) + return result['path'], result['name'] def queue_top(self, ids, timeout=None): """Move transfer to the top of the queue.""" diff --git a/core/transmissionrpc/torrent.py b/core/transmissionrpc/torrent.py index 5fd033db..54ee2a2d 100644 --- a/core/transmissionrpc/torrent.py +++ b/core/transmissionrpc/torrent.py @@ -124,7 +124,6 @@ class Torrent(object): """ Update the torrent data from a Transmission JSON-RPC arguments dictionary """ - fields = None if isinstance(other, dict): for key, value in iteritems(other): self._fields[key.replace('-', '_')] = Field(value, False) diff --git a/core/transmissionrpc/utils.py b/core/transmissionrpc/utils.py index c2bca855..0ac2a32a 100644 --- a/core/transmissionrpc/utils.py +++ b/core/transmissionrpc/utils.py @@ -23,7 +23,7 @@ def format_size(size): while size >= 1024.0 and i < len(UNITS): i += 1 size /= 1024.0 - return (size, UNITS[i]) + return size, UNITS[i] def format_speed(size): @@ -31,7 +31,7 @@ def format_speed(size): Format bytes per second speed into IEC prefixes, B/s, KiB/s, MiB/s ... """ (size, unit) = format_size(size) - return (size, unit + '/s') + return size, unit + '/s' def format_timedelta(delta): @@ -91,7 +91,7 @@ def inet_address(address, default_port, default_address='localhost'): socket.getaddrinfo(addr, port, socket.AF_INET, socket.SOCK_STREAM) except socket.gaierror: raise INetAddressError('Cannot look up address "%s".' % address) - return (addr, port) + return addr, port def rpc_bool(arg): @@ -163,7 +163,7 @@ def argument_value_convert(method, argument, value, rpc_version): raise ValueError( 'Method "%s" Argument "%s" does not exist in version %d.' % (method, argument, rpc_version)) - return (argument, TR_TYPE_MAP[info[0]](value)) + return argument, TR_TYPE_MAP[info[0]](value) else: raise ValueError('Argument "%s" does not exists for method "%s".', (argument, method)) diff --git a/core/versionCheck.py b/core/versionCheck.py index fd58af45..84e14947 100644 --- a/core/versionCheck.py +++ b/core/versionCheck.py @@ -159,12 +159,13 @@ class GitUpdateManager(UpdateManager): def _run_git(self, git_path, args): - output = err = exit_status = None + output = None + err = None if not git_path: logger.log(u"No git specified, can't use git commands", logger.DEBUG) exit_status = 1 - return (output, err, exit_status) + return output, err, exit_status cmd = git_path + ' ' + args @@ -203,7 +204,7 @@ class GitUpdateManager(UpdateManager): logger.log(cmd + u" returned : " + output + u", treat as error for now", logger.DEBUG) exit_status = 1 - return (output, err, exit_status) + return output, err, exit_status def _find_installed_version(self): """ From 1cd073cd52bd2622c51c95fc0451f37a37b89173 Mon Sep 17 00:00:00 2001 From: Labrys Date: Tue, 31 May 2016 08:20:06 -0400 Subject: [PATCH 20/21] Use `format()` instead of `%` for string formatting --- TorrentToMedia.py | 64 +++++----- core/__init__.py | 18 +-- core/extractor/extractor.py | 35 ++++-- core/linktastic/linktastic.py | 10 +- core/logger.py | 4 +- core/nzbToMediaAutoFork.py | 37 +++--- core/nzbToMediaConfig.py | 21 ++-- core/nzbToMediaDB.py | 8 +- core/nzbToMediaSceneExceptions.py | 24 ++-- core/nzbToMediaUserScript.py | 28 ++--- core/nzbToMediaUtil.py | 173 ++++++++++++++-------------- core/synchronousdeluge/rencode.py | 2 +- core/transcoder/transcoder.py | 71 ++++++------ core/transmissionrpc/client.py | 15 +-- core/transmissionrpc/error.py | 8 +- core/transmissionrpc/httphandler.py | 4 +- core/transmissionrpc/session.py | 4 +- core/transmissionrpc/torrent.py | 8 +- core/transmissionrpc/utils.py | 18 ++- core/utorrent/upload.py | 8 +- nzbToMedia.py | 44 +++---- 21 files changed, 317 insertions(+), 287 deletions(-) diff --git a/TorrentToMedia.py b/TorrentToMedia.py index f82b9b10..b1a317ae 100755 --- a/TorrentToMedia.py +++ b/TorrentToMedia.py @@ -18,7 +18,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, uniquePath = 1 if clientAgent != 'manual' and not core.DOWNLOADINFO: - logger.debug('Adding TORRENT download info for directory %s to database' % (inputDirectory)) + logger.debug('Adding TORRENT download info for directory {0} to database'.format(inputDirectory)) myDB = nzbToMediaDB.DBConnection() @@ -42,7 +42,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, } myDB.upsert("downloads", newValueDict, controlValueDict) - logger.debug("Received Directory: %s | Name: %s | Category: %s" % (inputDirectory, inputName, inputCategory)) + logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory)) inputDirectory, inputName, inputCategory, root = core.category_search(inputDirectory, inputName, inputCategory, root, @@ -58,7 +58,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, inputDirectory = inputDirectory.encode(core.SYS_ENCODING) except: pass - logger.debug("Determined Directory: %s | Name: %s | Category: %s" % (inputDirectory, inputName, inputCategory)) + logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory)) # auto-detect section section = core.CFG.findsection(inputCategory).isenabled() @@ -66,7 +66,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, section = core.CFG.findsection("ALL").isenabled() if section is None: logger.error( - 'Category:[%s] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.' % ( + 'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format( inputCategory)) return [-1, ""] else: @@ -74,15 +74,15 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, if len(section) > 1: logger.error( - 'Category:[%s] is not unique, %s are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.' % ( + 'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format( usercat, section.keys())) return [-1, ""] if section: sectionName = section.keys()[0] - logger.info('Auto-detected SECTION:%s' % (sectionName)) + logger.info('Auto-detected SECTION:{0}'.format(sectionName)) else: - logger.error("Unable to locate a section with subsection:%s enabled in your autoProcessMedia.cfg, exiting!" % ( + logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format( inputCategory)) return [-1, ""] @@ -129,15 +129,15 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, if outputDestination in inputDirectory: outputDestination = inputDirectory - logger.info("Output directory set to: %s" % (outputDestination)) + logger.info("Output directory set to: {0}".format(outputDestination)) if core.SAFE_MODE and outputDestination == core.TORRENT_DEFAULTDIR: logger.error( - 'The output directory:[%s] is the Download Directory. Edit outputDirectory in autoProcessMedia.cfg. Exiting' % ( + 'The output directory:[{0}] is the Download Directory. Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format( inputDirectory)) return [-1, ""] - logger.debug("Scanning files in directory: %s" % (inputDirectory)) + logger.debug("Scanning files in directory: {0}".format(inputDirectory)) if sectionName == 'HeadPhones': core.NOFLATTEN.extend( @@ -149,7 +149,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, inputFiles = core.listMediaFiles(inputDirectory, archives=False) else: inputFiles = core.listMediaFiles(inputDirectory) - logger.debug("Found %s files in %s" % (str(len(inputFiles)), inputDirectory)) + logger.debug("Found {0} files in {1}".format(len(inputFiles), inputDirectory)) for inputFile in inputFiles: filePath = os.path.dirname(inputFile) fileName, fileExt = os.path.splitext(os.path.basename(inputFile)) @@ -161,17 +161,17 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, targetFile = core.os.path.join( core.os.path.join(outputDestination, os.path.basename(filePath)), fullFileName) logger.debug( - "Setting outputDestination to %s to preserve folder structure" % (os.path.dirname(targetFile))) + "Setting outputDestination to {0} to preserve folder structure".format(os.path.dirname(targetFile))) try: targetFile = targetFile.encode(core.SYS_ENCODING) except: pass if root == 1: if not foundFile: - logger.debug("Looking for %s in: %s" % (inputName, inputFile)) + logger.debug("Looking for {0} in: {1}".format(inputName, inputFile)) if (core.sanitizeName(inputName) in core.sanitizeName(inputFile)) or ( core.sanitizeName(fileName) in core.sanitizeName(inputName)): foundFile = True - logger.debug("Found file %s that matches Torrent Name %s" % (fullFileName, inputName)) + logger.debug("Found file {0} that matches Torrent Name {1}".format(fullFileName, inputName)) else: continue @@ -183,7 +183,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, logger.debug("Looking for files with modified/created dates less than 5 minutes old.") if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)): foundFile = True - logger.debug("Found file %s with date modifed/created less than 5 minutes ago." % (fullFileName)) + logger.debug("Found file {0} with date modifed/created less than 5 minutes ago.".format(fullFileName)) else: continue # This file has not been recently moved or created, skip it @@ -192,12 +192,12 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, core.copy_link(inputFile, targetFile, core.USELINK) core.rmReadOnly(targetFile) except: - logger.error("Failed to link: %s to %s" % (inputFile, targetFile)) + logger.error("Failed to link: {0} to {1}".format(inputFile, targetFile)) inputName, outputDestination = convert_to_ascii(inputName, outputDestination) if extract == 1: - logger.debug('Checking for archives to extract in directory: %s' % (inputDirectory)) + logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory)) core.extractFiles(inputDirectory, outputDestination, keep_archive) if not inputCategory in core.NOFLATTEN: #don't flatten hp in case multi cd albums, and we need to copy this back later. @@ -208,19 +208,19 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, numVideos = len( core.listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False)) if numVideos > 0: - logger.info("Found %s media files in %s" % (numVideos, outputDestination)) + logger.info("Found {0} media files in {1}".format(numVideos, outputDestination)) status = 0 elif extract != 1: - logger.info("Found no media files in %s. Sending to %s to process" % (outputDestination, sectionName)) + logger.info("Found no media files in {0}. Sending to {1} to process".format(outputDestination, sectionName)) status = 0 else: - logger.warning("Found no media files in %s" % outputDestination) + logger.warning("Found no media files in {0}".format(outputDestination)) # Only these sections can handling failed downloads so make sure everything else gets through without the check for failed if not sectionName in ['CouchPotato', 'SickBeard', 'NzbDrone']: status = 0 - logger.info("Calling %s:%s to post-process:%s" % (sectionName, usercat, inputName)) + logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, usercat, inputName)) if core.TORRENT_CHMOD_DIRECTORY: core.rchmod(outputDestination, core.TORRENT_CHMOD_DIRECTORY) @@ -262,10 +262,10 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, # remove torrent if core.USELINK == 'move-sym' and not core.DELETE_ORIGINAL == 1: - logger.debug('Checking for sym-links to re-direct in: %s' % (inputDirectory)) + logger.debug('Checking for sym-links to re-direct in: {0}'.format(inputDirectory)) for dirpath, dirs, files in os.walk(inputDirectory): for file in files: - logger.debug('Checking symlink: %s' % (os.path.join(dirpath,file))) + logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath,file))) core.replace_links(os.path.join(dirpath,file)) core.remove_torrent(clientAgent, inputHash, inputID, inputName) @@ -284,11 +284,11 @@ def main(args): clientAgent = core.TORRENT_CLIENTAGENT logger.info("#########################################################") - logger.info("## ..::[%s]::.. ##" % os.path.basename(__file__)) + logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) logger.info("#########################################################") # debug command line options - logger.debug("Options passed into TorrentToMedia: %s" % (args)) + logger.debug("Options passed into TorrentToMedia: {0}".format(args)) # Post-Processing Result result = [ 0, "" ] @@ -310,16 +310,16 @@ def main(args): if not core.CFG[section][subsection].isenabled(): continue for dirName in core.getDirs(section, subsection, link='hard'): - logger.info("Starting manual run for %s:%s - Folder:%s" % (section, subsection, dirName)) + logger.info("Starting manual run for {0}:{1} - Folder:{2}".format(section, subsection, dirName)) - logger.info("Checking database for download info for %s ..." % (os.path.basename(dirName))) + logger.info("Checking database for download info for {0} ...".format(os.path.basename(dirName))) core.DOWNLOADINFO = core.get_downloadInfo(os.path.basename(dirName), 0) if core.DOWNLOADINFO: logger.info( - "Found download info for %s, setting variables now ..." % (os.path.basename(dirName))) + "Found download info for {0}, setting variables now ...".format(os.path.basename(dirName))) else: logger.info( - 'Unable to locate download info for %s, continuing to try and process this release ...' % ( + 'Unable to locate download info for {0}, continuing to try and process this release ...'.format( os.path.basename(dirName)) ) @@ -350,14 +350,14 @@ def main(args): results = processTorrent(dirName, inputName, subsection, inputHash, inputID, clientAgent) if results[0] != 0: - logger.error("A problem was reported when trying to perform a manual run for %s:%s." % ( + logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format( section, subsection)) result = results if result[0] == 0: - logger.info("The %s script completed successfully." % (args[0])) + logger.info("The {0} script completed successfully.".format(args[0])) else: - logger.error("A problem was reported in the %s script." % (args[0])) + logger.error("A problem was reported in the {0} script.".format(args[0])) del core.MYAPP return result[0] diff --git a/core/__init__.py b/core/__init__.py index 6b3ca265..b71a3b53 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -276,7 +276,7 @@ def initialize(section=None): # run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options. if not config.migrate(): - logger.error("Unable to migrate config file %s, exiting ..." % (CONFIG_FILE)) + logger.error("Unable to migrate config file {0}, exiting ...".format(CONFIG_FILE)) if 'NZBOP_SCRIPTDIR' in os.environ: pass # We will try and read config from Environment. else: @@ -287,7 +287,7 @@ def initialize(section=None): CFG = config.addnzbget() else: # load newly migrated config - logger.info("Loading config from [%s]" % (CONFIG_FILE)) + logger.info("Loading config from [{0}]".format(CONFIG_FILE)) CFG = config() # Enable/Disable DEBUG Logging @@ -298,7 +298,7 @@ def initialize(section=None): if LOG_ENV: for item in os.environ: - logger.info("%s: %s" % (item, os.environ[item]), "ENVIRONMENT") + logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT") # initialize the main SB database nzbToMediaDB.upgradeDatabase(nzbToMediaDB.DBConnection(), mainDB.InitialSchema) @@ -399,20 +399,20 @@ def initialize(section=None): devnull = open(os.devnull, 'w') try: subprocess.Popen(["nice"], stdout=devnull, stderr=devnull).communicate() - NICENESS.extend(['nice', '-n%s' % (int(CFG["Posix"]["niceness"]))]) + NICENESS.extend(['nice', '-n{0}'.format(int(CFG["Posix"]["niceness"]))]) except: pass try: subprocess.Popen(["ionice"], stdout=devnull, stderr=devnull).communicate() try: - NICENESS.extend(['ionice', '-c%s' % (int(CFG["Posix"]["ionice_class"]))]) + NICENESS.extend(['ionice', '-c{0}'.format(int(CFG["Posix"]["ionice_class"]))]) except: pass try: if 'ionice' in NICENESS: - NICENESS.extend(['-n%s' % (int(CFG["Posix"]["ionice_classdata"]))]) + NICENESS.extend(['-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))]) else: - NICENESS.extend(['ionice', '-n%s' % (int(CFG["Posix"]["ionice_classdata"]))]) + NICENESS.extend(['ionice', '-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))]) except: pass except: @@ -422,7 +422,7 @@ def initialize(section=None): COMPRESSEDCONTAINER = [re.compile('.r\d{2}$', re.I), re.compile('.part\d+.rar$', re.I), re.compile('.rar$', re.I)] - COMPRESSEDCONTAINER += [re.compile('%s$' % ext, re.I) for ext in CFG["Extensions"]["compressedExtensions"]] + COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG["Extensions"]["compressedExtensions"]] MEDIACONTAINER = CFG["Extensions"]["mediaExtensions"] AUDIOCONTAINER = CFG["Extensions"]["audioExtensions"] METACONTAINER = CFG["Extensions"]["metaExtensions"] # .nfo,.sub,.srt @@ -851,7 +851,7 @@ def restart(): def rchmod(path, mod): - logger.log("Changing file mode of %s to %s" % (path, oct(mod))) + logger.log("Changing file mode of {0} to {1}".format(path, oct(mod))) os.chmod(path, mod) if not os.path.isdir(path): return # Skip files diff --git a/core/extractor/extractor.py b/core/extractor/extractor.py index 865802ba..08011706 100644 --- a/core/extractor/extractor.py +++ b/core/extractor/extractor.py @@ -1,4 +1,5 @@ # coding=utf-8 + import os import platform import shutil @@ -53,7 +54,9 @@ def extract(filePath, outputDestination): else: for k, v in EXTRACT_COMMANDS.items(): if cmd in v[0]: - core.logger.error("EXTRACTOR: %s not found, disabling support for %s" % (cmd, k)) + core.logger.error("EXTRACTOR: {cmd} not found, " + "disabling support for {feature}".format + (cmd=cmd, feature=k)) del EXTRACT_COMMANDS[k] devnull.close() else: @@ -76,10 +79,11 @@ def extract(filePath, outputDestination): if ext[1] in EXTRACT_COMMANDS: cmd = EXTRACT_COMMANDS[ext[1]] else: - core.logger.debug("EXTRACTOR: Unknown file type: %s" % ext[1]) + core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format + (ext=ext[1])) return False - # Create outputDestination folder + # Create outputDestination folder core.makeDir(outputDestination) if core.PASSWORDSFILE != "" and os.path.isfile(os.path.normpath(core.PASSWORDSFILE)): @@ -87,8 +91,10 @@ def extract(filePath, outputDestination): else: passwords = [] - core.logger.info("Extracting %s to %s" % (filePath, outputDestination)) - core.logger.debug("Extracting %s %s %s" % (cmd, filePath, outputDestination)) + core.logger.info("Extracting {file} to {destination}".format + (file=filePath, destination=outputDestination)) + core.logger.debug("Extracting {cmd} {file} {destination}".format + (cmd=cmd, file=filePath, destination=outputDestination)) origFiles = [] origDirs = [] @@ -114,9 +120,9 @@ def extract(filePath, outputDestination): cmd2.append("-p-") # don't prompt for password. p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. res = p.wait() - if ( - res >= 0 and os.name == 'nt') or res == 0: # for windows chp returns process id if successful or -1*Error code. Linux returns 0 for successful. - core.logger.info("EXTRACTOR: Extraction was successful for %s to %s" % (filePath, outputDestination)) + if (res >= 0 and os.name == 'nt') or res == 0: # for windows chp returns process id if successful or -1*Error code. Linux returns 0 for successful. + core.logger.info("EXTRACTOR: Extraction was successful for {file} to {destination}".format + (file=filePath, destination=outputDestination)) success = 1 elif len(passwords) > 0: core.logger.info("EXTRACTOR: Attempting to extract with passwords") @@ -130,14 +136,17 @@ def extract(filePath, outputDestination): p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. res = p.wait() if (res >= 0 and platform == 'Windows') or res == 0: - core.logger.info("EXTRACTOR: Extraction was successful for %s to %s using password: %s" % ( - filePath, outputDestination, password)) + core.logger.info("EXTRACTOR: Extraction was successful " + "for {file} to {destination} using password: {pwd}".format + (file=filePath, destination=outputDestination, pwd=password)) success = 1 break else: continue except: - core.logger.error("EXTRACTOR: Extraction failed for %s. Could not call command %s" % (filePath, cmd)) + core.logger.error("EXTRACTOR: Extraction failed for {file}. " + "Could not call command {cmd}".format + (file=filePath, cmd=cmd)) os.chdir(pwd) return False @@ -162,5 +171,7 @@ def extract(filePath, outputDestination): pass return True else: - core.logger.error("EXTRACTOR: Extraction failed for %s. Result was %s" % (filePath, res)) + core.logger.error("EXTRACTOR: Extraction failed for {file}. " + "Result was {result}".format + (file=filePath, result=res)) return False diff --git a/core/linktastic/linktastic.py b/core/linktastic/linktastic.py index af690158..95d2f8c6 100644 --- a/core/linktastic/linktastic.py +++ b/core/linktastic/linktastic.py @@ -33,14 +33,14 @@ if os.name == 'nt': # Prevent spaces from messing with us! def _escape_param(param): - return '"%s"' % param + return '"{0}"'.format(param) # Private function to create link on nt-based systems def _link_windows(src, dest): try: subprocess.check_output( - 'cmd /C mklink /H %s %s' % (_escape_param(dest), _escape_param(src)), + 'cmd /C mklink /H {0} {1}'.format(_escape_param(dest), _escape_param(src)), stderr=subprocess.STDOUT, startupinfo=info) except CalledProcessError as err: @@ -54,7 +54,7 @@ def _link_windows(src, dest): def _symlink_windows(src, dest): try: subprocess.check_output( - 'cmd /C mklink %s %s' % (_escape_param(dest), _escape_param(src)), + 'cmd /C mklink {0} {1}'.format(_escape_param(dest), _escape_param(src)), stderr=subprocess.STDOUT, startupinfo=info) except CalledProcessError as err: raise IOError(err.output.decode('utf-8')) @@ -67,7 +67,7 @@ def _symlink_windows(src, dest): def _dirlink_windows(src, dest): try: subprocess.check_output( - 'cmd /C mklink /J %s %s' % (_escape_param(dest), _escape_param(src)), + 'cmd /C mklink /J {0} {1}'.format(_escape_param(dest), _escape_param(src)), stderr=subprocess.STDOUT, startupinfo=info) except CalledProcessError as err: raise IOError(err.output.decode('utf-8')) @@ -80,7 +80,7 @@ def _dirlink_windows(src, dest): def _junctionlink_windows(src, dest): try: subprocess.check_output( - 'cmd /C mklink /D %s %s' % (_escape_param(dest), _escape_param(src)), + 'cmd /C mklink /D {0} {1}'.format(_escape_param(dest), _escape_param(src)), stderr=subprocess.STDOUT, startupinfo=info) except CalledProcessError as err: raise IOError(err.output.decode('utf-8')) diff --git a/core/logger.py b/core/logger.py index 86772c15..94d1764f 100644 --- a/core/logger.py +++ b/core/logger.py @@ -193,9 +193,9 @@ class NTMRotatingLogHandler(object): self.writes_since_check += 1 try: - message = u"%s: %s" % (str(section).upper(), toLog) + message = u"{0}: {1}".format(str(section).upper(), toLog) except: - message = u"%s: Message contains non-utf-8 string" % (str(section).upper()) + message = u"{0}: Message contains non-utf-8 string".format(str(section).upper()) out_line = message diff --git a/core/nzbToMediaAutoFork.py b/core/nzbToMediaAutoFork.py index ce1949c5..76a11204 100644 --- a/core/nzbToMediaAutoFork.py +++ b/core/nzbToMediaAutoFork.py @@ -52,42 +52,49 @@ def autoFork(section, inputCategory): detected = False if section == "NzbDrone": - logger.info("Attempting to verify %s fork" % inputCategory) - url = "%s%s:%s%s/api/rootfolder" % (protocol, host, port, web_root) + logger.info("Attempting to verify {category} fork".format + (category=inputCategory)) + url = "{protocol}{host}:{port}{root}/api/rootfolder".format( + protocol=protocol, host=host, port=port, root=web_root) headers = {"X-Api-Key": apikey} try: r = requests.get(url, headers=headers, stream=True, verify=False) except requests.ConnectionError: - logger.warning("Could not connect to %s:%s to verify fork!" % (section, inputCategory)) + logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, inputCategory)) if not r.ok: - logger.warning("Connection to %s:%s failed! Check your configuration" % (section, inputCategory)) + logger.warning("Connection to {section}:{category} failed! " + "Check your configuration".format + (section=section, category=inputCategory)) fork = ['default', {}] elif fork == "auto": params = core.ALL_FORKS rem_params = [] - logger.info("Attempting to auto-detect %s fork" % inputCategory) + logger.info("Attempting to auto-detect {category} fork".format(category=inputCategory)) # define the order to test. Default must be first since the default fork doesn't reject parameters. # then in order of most unique parameters. - url = "%s%s:%s%s/home/postprocess/" % (protocol, host, port, web_root) + url = "{protocol}{host}:{port}{root}/home/postprocess/".format( + protocol=protocol, host=host, port=port, root=web_root) # attempting to auto-detect fork try: if username and password: s = requests.Session() - login = "%s%s:%s%s/login" % (protocol, host, port, web_root) + login = "{protocol}{host}:{port}{root}/login".format( + protocol=protocol, host=host, port=port, root=web_root) login_params = {'username': username, 'password': password} s.post(login, data=login_params, stream=True, verify=False) r = s.get(url, auth=(username, password), verify=False) else: r = requests.get(url, verify=False) except requests.ConnectionError: - logger.info("Could not connect to %s:%s to perform auto-fork detection!" % (section, inputCategory)) + logger.info("Could not connect to {section}:{category} to perform auto-fork detection!".format + (section=section, category=inputCategory)) r = [] if r and r.ok: for param in params: - if not 'name="%s"' % (param) in r.text: + if not 'name={param!r}'.format(param=param) in r.text: rem_params.append(param) for param in rem_params: params.pop(param) @@ -96,13 +103,17 @@ def autoFork(section, inputCategory): detected = True break if detected: - logger.info("%s:%s fork auto-detection successful ..." % (section, inputCategory)) + logger.info("{section}:{category} fork auto-detection successful ...".format + (section=section, category=inputCategory)) elif rem_params: - logger.info("%s:%s fork auto-detection found custom params %s" % (section, inputCategory, params)) + logger.info("{section}:{category} fork auto-detection found custom params {params}".format + (section=section, category=inputCategory, params=params)) fork = ['custom', params] else: - logger.info("%s:%s fork auto-detection failed" % (section, inputCategory)) + logger.info("{section}:{category} fork auto-detection failed".format + (section=section, category=inputCategory)) fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)] - logger.info("%s:%s fork set to %s" % (section, inputCategory, fork[0])) + logger.info("{section}:{category} fork set to {fork}".format + (section=section, category=inputCategory, fork=fork[0])) return fork[0], fork[1] diff --git a/core/nzbToMediaConfig.py b/core/nzbToMediaConfig.py index badc597f..00aed989 100644 --- a/core/nzbToMediaConfig.py +++ b/core/nzbToMediaConfig.py @@ -118,16 +118,16 @@ class ConfigObj(configobj.ConfigObj, Section): if not os.path.isfile(core.CONFIG_FILE): shutil.copyfile(core.CONFIG_SPEC_FILE, core.CONFIG_FILE) CFG_OLD = config(core.CONFIG_FILE) - except Exception as e: - logger.debug("Error %s when copying to .cfg" % (e)) + except Exception as error: + logger.debug("Error {msg} when copying to .cfg".format(msg=error)) try: # check for autoProcessMedia.cfg.spec and create if it does not exist if not os.path.isfile(core.CONFIG_SPEC_FILE): shutil.copyfile(core.CONFIG_FILE, core.CONFIG_SPEC_FILE) CFG_NEW = config(core.CONFIG_SPEC_FILE) - except Exception as e: - logger.debug("Error %s when copying to .spec" % (e)) + except Exception as error: + logger.debug("Error {msg} when copying to .spec".format(msg=error)) # check for autoProcessMedia.cfg and autoProcessMedia.cfg.spec and if they don't exist return and fail if CFG_NEW is None or CFG_OLD is None: @@ -255,8 +255,9 @@ class ConfigObj(configobj.ConfigObj, Section): try: if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ: if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']: - logger.warning("%s category is set for SickBeard and NzbDrone. " - "Please check your config in NZBGet" % (os.environ['NZBPO_NDCATEGORY'])) + logger.warning("{x} category is set for SickBeard and NzbDrone. " + "Please check your config in NZBGet".format + (x=os.environ['NZBPO_NDCATEGORY'])) section = "Nzb" key = 'NZBOP_DESTDIR' @@ -462,15 +463,15 @@ class ConfigObj(configobj.ConfigObj, Section): CFG_NEW[section][os.environ[envCatKey]][option] = value CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 - except Exception as e: - logger.debug("Error %s when applying NZBGet config" % (e)) + except Exception as error: + logger.debug("Error {msg} when applying NZBGet config".format(msg=error)) try: # write our new config to autoProcessMedia.cfg CFG_NEW.filename = core.CONFIG_FILE CFG_NEW.write() - except Exception as e: - logger.debug("Error %s when writing changes to .cfg" % (e)) + except Exception as error: + logger.debug("Error {msg} when writing changes to .cfg".format(msg=error)) return CFG_NEW diff --git a/core/nzbToMediaDB.py b/core/nzbToMediaDB.py index 4445b6e1..de7fd825 100644 --- a/core/nzbToMediaDB.py +++ b/core/nzbToMediaDB.py @@ -19,7 +19,7 @@ def dbFilename(filename="nzbtomedia.db", suffix=None): @return: the correct location of the database file. """ if suffix: - filename = "%s.%s" % (filename, suffix) + filename = "{0}.{1}".format(filename, suffix) return core.os.path.join(core.PROGRAM_DIR, filename) @@ -181,7 +181,7 @@ class DBConnection(object): def tableInfo(self, tableName): # FIXME ? binding is not supported here, but I cannot find a way to escape a string manually - cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName) + cursor = self.connection.execute("PRAGMA table_info({0})".format(tableName)) columns = {} for column in cursor: columns[column['name']] = {'type': column['type']} @@ -250,8 +250,8 @@ class SchemaUpgrade(object): return column in self.connection.tableInfo(tableName) def addColumn(self, table, column, type="NUMERIC", default=0): - self.connection.action("ALTER TABLE %s ADD %s %s" % (table, column, type)) - self.connection.action("UPDATE %s SET %s = ?" % (table, column), (default,)) + self.connection.action("ALTER TABLE {0} ADD {1} {2}".format(table, column, type)) + self.connection.action("UPDATE {0} SET {1} = ?".format(table, column), (default,)) def checkDBVersion(self): result = self.connection.select("SELECT db_version FROM db_version") diff --git a/core/nzbToMediaSceneExceptions.py b/core/nzbToMediaSceneExceptions.py index 40700961..71ac28b2 100644 --- a/core/nzbToMediaSceneExceptions.py +++ b/core/nzbToMediaSceneExceptions.py @@ -61,23 +61,25 @@ def strip_groups(filename): def rename_file(filename, newfilePath): - logger.debug("Replacing file name %s with download name %s" % (filename, newfilePath), "EXCEPTION") + logger.debug("Replacing file name {old} with download name {new}".format + (old=filename, new=newfilePath), "EXCEPTION") try: os.rename(filename, newfilePath) - except Exception as e: - logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION") + except Exception as error: + logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION") def replace_filename(filename, dirname, name): head, fileExtension = os.path.splitext(os.path.basename(filename)) if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None: newname = os.path.basename(dirname).replace(' ', '.') - logger.debug("Replacing file name %s with directory name %s" % (head, newname), "EXCEPTION") + logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION") elif media_pattern.search(name.replace(' ', '.').lower()) is not None: newname = name.replace(' ', '.') - logger.debug("Replacing file name %s with download name %s" % (head, newname), "EXCEPTION") + logger.debug("Replacing file name {old} with download name {new}".format + (old=head, new=newname), "EXCEPTION") else: - logger.warning("No name replacement determined for %s" % (head), "EXCEPTION") + logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION") newname = name newfile = newname + fileExtension newfilePath = os.path.join(dirname, newfile) @@ -103,7 +105,8 @@ def reverse_filename(filename, dirname, name): else: newname = head[::-1].title() newname = newname.replace(' ', '.') - logger.debug("Reversing filename %s to %s" % (head, newname), "EXCEPTION") + logger.debug("Reversing filename {old} to {new}".format + (old=head, new=newname), "EXCEPTION") newfile = newname + fileExtension newfilePath = os.path.join(dirname, newfile) return newfilePath @@ -129,11 +132,12 @@ def rename_script(dirname): dest = os.path.join(dirname, cmd[1].split('\\')[-1].split('/')[-1]) if os.path.isfile(dest): continue - logger.debug("Renaming file %s to %s" % (orig, dest), "EXCEPTION") + logger.debug("Renaming file {source} to {destination}".format + (source=orig, destination=dest), "EXCEPTION") try: os.rename(orig, dest) - except Exception as e: - logger.error("Unable to rename file due to: %s" % (str(e)), "EXCEPTION") + except Exception as error: + logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION") # dict for custom groups # we can add more to this list diff --git a/core/nzbToMediaUserScript.py b/core/nzbToMediaUserScript.py index fe6a453e..d05273be 100644 --- a/core/nzbToMediaUserScript.py +++ b/core/nzbToMediaUserScript.py @@ -48,7 +48,7 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): if transcoder.isVideoGood(video, 0): import_subs(video) else: - logger.info("Corrupt video file found %s. Deleting." % (video), "USERSCRIPT") + logger.info("Corrupt video file found {0}. Deleting.".format(video), "USERSCRIPT") os.unlink(video) for dirpath, dirnames, filenames in os.walk(outputDestination): @@ -64,22 +64,22 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): command = [core.USER_SCRIPT] for param in core.USER_SCRIPT_PARAM: if param == "FN": - command.append('%s' % file) + command.append('{0}'.format(file)) continue elif param == "FP": - command.append('%s' % filePath) + command.append('{0}'.format(filePath)) continue elif param == "TN": - command.append('%s' % torrentName) + command.append('{0}'.format(torrentName)) continue elif param == "TL": - command.append('%s' % torrentLabel) + command.append('{0}'.format(torrentLabel)) continue elif param == "DN": if core.USER_SCRIPT_RUNONCE == 1: - command.append('%s' % outputDestination) + command.append('{0}'.format(outputDestination)) else: - command.append('%s' % dirpath) + command.append('{0}'.format(dirpath)) continue else: command.append(param) @@ -87,21 +87,21 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): cmd = "" for item in command: cmd = cmd + " " + item - logger.info("Running script %s on file %s." % (cmd, filePath), "USERSCRIPT") + logger.info("Running script {0} on file {1}.".format(cmd, filePath), "USERSCRIPT") try: p = Popen(command) res = p.wait() if str(res) in core.USER_SCRIPT_SUCCESSCODES: # Linux returns 0 for successful. - logger.info("UserScript %s was successfull" % (command[0])) + logger.info("UserScript {0} was successfull".format(command[0])) result = 0 else: - logger.error("UserScript %s has failed with return code: %s" % (command[0], res), "USERSCRIPT") + logger.error("UserScript {0} has failed with return code: {1}".format(command[0], res), "USERSCRIPT") logger.info( - "If the UserScript completed successfully you should add %s to the user_script_successCodes" % ( + "If the UserScript completed successfully you should add {0} to the user_script_successCodes".format( res), "USERSCRIPT") result = int(1) except: - logger.error("UserScript %s has failed" % (command[0]), "USERSCRIPT") + logger.error("UserScript {0} has failed".format(command[0]), "USERSCRIPT") result = int(1) final_result += result @@ -114,9 +114,9 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): num_files_new += 1 if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0: - logger.info("All files have been processed. Cleaning outputDirectory %s" % (outputDestination)) + logger.info("All files have been processed. Cleaning outputDirectory {0}".format(outputDestination)) rmDir(outputDestination) elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0: - logger.info("%s files were processed, but %s still remain. outputDirectory will not be cleaned." % ( + logger.info("{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.".format( num_files, num_files_new)) return [final_result, ''] diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index bbb58346..75c3571a 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -40,7 +40,7 @@ def reportNzb(failure_link, clientAgent): try: requests.post(failure_link, headers=headers, timeout=(30, 300)) except Exception as e: - logger.error("Unable to open URL %s due to %s" % (failure_link, e)) + logger.error("Unable to open URL {0} due to {1}".format(failure_link, e)) return @@ -115,13 +115,13 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): pathlist = os.path.normpath(inputDirectory).split(os.sep) if inputCategory and inputCategory in pathlist: - logger.debug("SEARCH: Found the Category: %s in directory structure" % (inputCategory)) + logger.debug("SEARCH: Found the Category: {0} in directory structure".format(inputCategory)) elif inputCategory: - logger.debug("SEARCH: Could not find the category: %s in the directory structure" % (inputCategory)) + logger.debug("SEARCH: Could not find the category: {0} in the directory structure".format(inputCategory)) else: try: inputCategory = list(set(pathlist) & set(categories))[-1] # assume last match is most relevant category. - logger.debug("SEARCH: Found Category: %s in directory structure" % (inputCategory)) + logger.debug("SEARCH: Found Category: {0} in directory structure".format(inputCategory)) except IndexError: inputCategory = "" logger.debug("SEARCH: Could not find a category in the directory structure") @@ -132,30 +132,30 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): if inputCategory and os.path.isdir(os.path.join(inputDirectory, inputCategory)): logger.info( - "SEARCH: Found category directory %s in input directory directory %s" % (inputCategory, inputDirectory)) + "SEARCH: Found category directory {0} in input directory directory {1}".format(inputCategory, inputDirectory)) inputDirectory = os.path.join(inputDirectory, inputCategory) - logger.info("SEARCH: Setting inputDirectory to %s" % (inputDirectory)) + logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory)) if inputName and os.path.isdir(os.path.join(inputDirectory, inputName)): - logger.info("SEARCH: Found torrent directory %s in input directory directory %s" % (inputName, inputDirectory)) + logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format(inputName, inputDirectory)) inputDirectory = os.path.join(inputDirectory, inputName) - logger.info("SEARCH: Setting inputDirectory to %s" % (inputDirectory)) + logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory)) tordir = True elif inputName and os.path.isdir(os.path.join(inputDirectory, sanitizeName(inputName))): - logger.info("SEARCH: Found torrent directory %s in input directory directory %s" % ( + logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format( sanitizeName(inputName), inputDirectory)) inputDirectory = os.path.join(inputDirectory, sanitizeName(inputName)) - logger.info("SEARCH: Setting inputDirectory to %s" % (inputDirectory)) + logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory)) tordir = True elif inputName and os.path.isfile(os.path.join(inputDirectory, inputName)): - logger.info("SEARCH: Found torrent file %s in input directory directory %s" % (inputName, inputDirectory)) + logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format(inputName, inputDirectory)) inputDirectory = os.path.join(inputDirectory, inputName) - logger.info("SEARCH: Setting inputDirectory to %s" % (inputDirectory)) + logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory)) tordir = True elif inputName and os.path.isfile(os.path.join(inputDirectory, sanitizeName(inputName))): - logger.info("SEARCH: Found torrent file %s in input directory directory %s" % ( + logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format( sanitizeName(inputName), inputDirectory)) inputDirectory = os.path.join(inputDirectory, sanitizeName(inputName)) - logger.info("SEARCH: Setting inputDirectory to %s" % (inputDirectory)) + logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory)) tordir = True imdbid = [item for item in pathlist if '.cp(tt' in item] # This looks for the .cp(tt imdb id in the path. @@ -168,7 +168,8 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): index = pathlist.index(inputCategory) if index + 1 < len(pathlist): tordir = True - logger.info("SEARCH: Found a unique directory %s in the category directory" % (pathlist[index + 1])) + logger.info("SEARCH: Found a unique directory {0} in the category directory".format + (pathlist[index + 1])) if not inputName: inputName = pathlist[index + 1] except ValueError: @@ -176,7 +177,7 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories): if inputName and not tordir: if inputName in pathlist or sanitizeName(inputName) in pathlist: - logger.info("SEARCH: Found torrent directory %s in the directory structure" % (inputName)) + logger.info("SEARCH: Found torrent directory {0} in the directory structure".format(inputName)) tordir = True else: root = 1 @@ -206,7 +207,7 @@ def is_minSize(inputName, minSize): try: inputSize = getDirSize(os.path.dirname(inputName)) except: - logger.error("Failed to get file size for %s" % (inputName), 'MINSIZE') + logger.error("Failed to get file size for {0}".format(inputName), 'MINSIZE') return True # Ignore files under a certain size @@ -221,9 +222,9 @@ def is_sample(inputName): def copy_link(src, targetLink, useLink): - logger.info("MEDIAFILE: [%s]" % (os.path.basename(targetLink)), 'COPYLINK') - logger.info("SOURCE FOLDER: [%s]" % (os.path.dirname(src)), 'COPYLINK') - logger.info("TARGET FOLDER: [%s]" % (os.path.dirname(targetLink)), 'COPYLINK') + logger.info("MEDIAFILE: [{0}]".format(os.path.basename(targetLink)), 'COPYLINK') + logger.info("SOURCE FOLDER: [{0}]".format(os.path.dirname(src)), 'COPYLINK') + logger.info("TARGET FOLDER: [{0}]".format(os.path.dirname(targetLink)), 'COPYLINK') if src != targetLink and os.path.exists(targetLink): logger.info("MEDIAFILE already exists in the TARGET folder, skipping ...", 'COPYLINK') @@ -263,7 +264,7 @@ def copy_link(src, targetLink, useLink): shutil.move(src, targetLink) return True except Exception as e: - logger.warning("Error: %s, copying instead ... " % (e), 'COPYLINK') + logger.warning("Error: {0}, copying instead ... ".format(e), 'COPYLINK') logger.info("Copying SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') shutil.copy(src, targetLink) @@ -277,26 +278,26 @@ def replace_links(link): if os.name == 'nt': import jaraco if not jaraco.windows.filesystem.islink(link): - logger.debug('%s is not a link' % (link)) + logger.debug('{0} is not a link'.format(link)) return while jaraco.windows.filesystem.islink(target): target = jaraco.windows.filesystem.readlink(target) n = n + 1 else: if not os.path.islink(link): - logger.debug('%s is not a link' % (link)) + logger.debug('{0} is not a link'.format(link)) return while os.path.islink(target): target = os.readlink(target) n = n + 1 if n > 1: - logger.info("Changing sym-link: %s to point directly to file: %s" % (link, target), 'COPYLINK') + logger.info("Changing sym-link: {0} to point directly to file: {1}".format(link, target), 'COPYLINK') os.unlink(link) linktastic.symlink(target, link) def flatten(outputDestination): - logger.info("FLATTEN: Flattening directory: %s" % (outputDestination)) + logger.info("FLATTEN: Flattening directory: {0}".format(outputDestination)) for outputFile in listMediaFiles(outputDestination): dirPath = os.path.dirname(outputFile) fileName = os.path.basename(outputFile) @@ -309,7 +310,7 @@ def flatten(outputDestination): try: shutil.move(outputFile, target) except: - logger.error("Could not flatten %s" % (outputFile), 'FLATTEN') + logger.error("Could not flatten {0}".format(outputFile), 'FLATTEN') removeEmptyFolders(outputDestination) # Cleanup empty directories @@ -320,7 +321,7 @@ def removeEmptyFolders(path, removeRoot=True): return # remove empty subfolders - logger.debug("Checking for empty folders in:%s" % (path)) + logger.debug("Checking for empty folders in:{0}".format(path)) files = os.listdir(path) if len(files): for f in files: @@ -331,7 +332,7 @@ def removeEmptyFolders(path, removeRoot=True): # if folder empty, delete it files = os.listdir(path) if len(files) == 0 and removeRoot: - logger.debug("Removing empty folder:%s" % (path)) + logger.debug("Removing empty folder:{}".format(path)) os.rmdir(path) @@ -386,16 +387,16 @@ def WakeUp(): i = 1 while TestCon(host, port) == "Down" and i < 4: - logger.info(("Sending WakeOnLan Magic Packet for mac: %s" % (mac))) + logger.info(("Sending WakeOnLan Magic Packet for mac: {0}".format(mac))) WakeOnLan(mac) time.sleep(20) i = i + 1 if TestCon(host, port) == "Down": # final check. - logger.warning("System with mac: %s has not woken after 3 attempts. Continuing with the rest of the script." % ( - mac)) + logger.warning("System with mac: {0} has not woken after 3 attempts. " + "Continuing with the rest of the script.".format(mac)) else: - logger.info("System with mac: %s has been woken. Continuing with the rest of the script." % (mac)) + logger.info("System with mac: {0} has been woken. Continuing with the rest of the script.".format(mac)) def CharReplace(Name): @@ -454,23 +455,23 @@ def convert_to_ascii(inputName, dirName): encoded, base2 = CharReplace(base) if encoded: dirName = os.path.join(dir, base2) - logger.info("Renaming directory to: %s." % (base2), 'ENCODER') + logger.info("Renaming directory to: {0}.".format(base2), 'ENCODER') os.rename(os.path.join(dir, base), dirName) if 'NZBOP_SCRIPTDIR' in os.environ: - print("[NZB] DIRECTORY=%s" % (dirName)) + print("[NZB] DIRECTORY={0}".format(dirName)) for dirname, dirnames, filenames in os.walk(dirName, topdown=False): for subdirname in dirnames: encoded, subdirname2 = CharReplace(subdirname) if encoded: - logger.info("Renaming directory to: %s." % (subdirname2), 'ENCODER') + logger.info("Renaming directory to: {0}.".format(subdirname2), 'ENCODER') os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2)) for dirname, dirnames, filenames in os.walk(dirName): for filename in filenames: encoded, filename2 = CharReplace(filename) if encoded: - logger.info("Renaming file to: %s." % (filename2), 'ENCODER') + logger.info("Renaming file to: {0}.".format(filename2), 'ENCODER') os.rename(os.path.join(dirname, filename), os.path.join(dirname, filename2)) return inputName, dirName @@ -604,7 +605,7 @@ def getDirs(section, subsection, link='hard'): def processDir(path): folders = [] - logger.info("Searching %s for mediafiles to post-process ..." % (path)) + logger.info("Searching {0} for mediafiles to post-process ...".format(path)) sync = [o for o in os.listdir(path) if os.path.splitext(o)[1] in ['.!sync', '.bts']] # search for single files and move them into their own folder for post-processing for mediafile in [os.path.join(path, o) for o in os.listdir(path) if @@ -614,7 +615,7 @@ def getDirs(section, subsection, link='hard'): if os.path.split(mediafile)[1] in ['Thumbs.db', 'thumbs.db']: continue try: - logger.debug("Found file %s in root directory %s." % (os.path.split(mediafile)[1], path)) + logger.debug("Found file {0} in root directory {1}.".format(os.path.split(mediafile)[1], path)) newPath = None fileExt = os.path.splitext(mediafile)[1] try: @@ -626,7 +627,7 @@ def getDirs(section, subsection, link='hard'): album = f.album # create new path - newPath = os.path.join(path, "%s - %s" % (sanitizeName(artist), sanitizeName(album))) + newPath = os.path.join(path, "{0} - {1}".format(sanitizeName(artist), sanitizeName(album))) elif fileExt in core.MEDIACONTAINER: f = guessit.guess_video_info(mediafile) @@ -638,7 +639,7 @@ def getDirs(section, subsection, link='hard'): newPath = os.path.join(path, sanitizeName(title)) except Exception as e: - logger.error("Exception parsing name for media file: %s: %s" % (os.path.split(mediafile)[1], e)) + logger.error("Exception parsing name for media file: {0}: {1}".format(os.path.split(mediafile)[1], e)) if not newPath: title = os.path.splitext(os.path.basename(mediafile))[0] @@ -667,7 +668,7 @@ def getDirs(section, subsection, link='hard'): # link file to its new path copy_link(mediafile, newfile, link) except Exception as e: - logger.error("Failed to move %s to its own directory: %s" % (os.path.split(mediafile)[1], e)) + logger.error("Failed to move {0} to its own directory: {1}".format(os.path.split(mediafile)[1], e)) # removeEmptyFolders(path, removeRoot=False) @@ -687,8 +688,8 @@ def getDirs(section, subsection, link='hard'): elif os.path.exists(core.CFG[section][subsection]["watch_dir"]): to_return.extend(processDir(core.CFG[section][subsection]["watch_dir"])) except Exception as e: - logger.error("Failed to add directories from %s for post-processing: %s" % ( - core.CFG[section][subsection]["watch_dir"], e)) + logger.error("Failed to add directories from {0} for post-processing: {1}".format + (core.CFG[section][subsection]["watch_dir"], e)) if core.USELINK == 'move': try: @@ -696,10 +697,10 @@ def getDirs(section, subsection, link='hard'): if os.path.exists(outputDirectory): to_return.extend(processDir(outputDirectory)) except Exception as e: - logger.error("Failed to add directories from %s for post-processing: %s" % (core.OUTPUTDIRECTORY, e)) + logger.error("Failed to add directories from {0} for post-processing: {1}".format(core.OUTPUTDIRECTORY, e)) if not to_return: - logger.debug("No directories identified in %s:%s for post-processing" % (section, subsection)) + logger.debug("No directories identified in {0}:{1} for post-processing".format(section, subsection)) return list(set(to_return)) @@ -724,19 +725,19 @@ def onerror(func, path, exc_info): def rmDir(dirName): - logger.info("Deleting %s" % (dirName)) + logger.info("Deleting {0}".format(dirName)) try: shutil.rmtree(dirName, onerror=onerror) except: - logger.error("Unable to delete folder %s" % (dirName)) + logger.error("Unable to delete folder {0}".format(dirName)) def cleanDir(path, section, subsection): if not os.path.exists(path): - logger.info('Directory %s has been processed and removed ...' % (path), 'CLEANDIR') + logger.info('Directory {0} has been processed and removed ...'.format(path), 'CLEANDIR') return if core.FORCE_CLEAN and not core.FAILED: - logger.info('Doing Forceful Clean of %s' % (path), 'CLEANDIR') + logger.info('Doing Forceful Clean of {0}'.format(path), 'CLEANDIR') rmDir(path) return try: @@ -753,15 +754,15 @@ def cleanDir(path, section, subsection): num_files = 'unknown' if num_files > 0: logger.info( - "Directory %s still contains %s unprocessed file(s), skipping ..." % (path, num_files), + "Directory {0} still contains {1} unprocessed file(s), skipping ...".format(path, num_files), 'CLEANDIRS') return - logger.info("Directory %s has been processed, removing ..." % (path), 'CLEANDIRS') + logger.info("Directory {0} has been processed, removing ...".format(path), 'CLEANDIRS') try: shutil.rmtree(path, onerror=onerror) except: - logger.error("Unable to delete directory %s" % (path)) + logger.error("Unable to delete directory {0}".format(path)) def create_torrent_class(clientAgent): @@ -770,14 +771,14 @@ def create_torrent_class(clientAgent): if clientAgent == 'utorrent': try: - logger.debug("Connecting to %s: %s" % (clientAgent, core.UTORRENTWEBUI)) + logger.debug("Connecting to {0}: {1}".format(clientAgent, core.UTORRENTWEBUI)) tc = UTorrentClient(core.UTORRENTWEBUI, core.UTORRENTUSR, core.UTORRENTPWD) except: logger.error("Failed to connect to uTorrent") if clientAgent == 'transmission': try: - logger.debug("Connecting to %s: http://%s:%s" % ( + logger.debug("Connecting to {0}: http://{1}:{2}".format( clientAgent, core.TRANSMISSIONHOST, core.TRANSMISSIONPORT)) tc = TransmissionClient(core.TRANSMISSIONHOST, core.TRANSMISSIONPORT, core.TRANSMISSIONUSR, @@ -787,7 +788,7 @@ def create_torrent_class(clientAgent): if clientAgent == 'deluge': try: - logger.debug("Connecting to %s: http://%s:%s" % (clientAgent, core.DELUGEHOST, core.DELUGEPORT)) + logger.debug("Connecting to {0}: http://{1}:{2}".format(clientAgent, core.DELUGEHOST, core.DELUGEPORT)) tc = DelugeClient() tc.connect(host=core.DELUGEHOST, port=core.DELUGEPORT, username=core.DELUGEUSR, password=core.DELUGEPWD) @@ -798,7 +799,7 @@ def create_torrent_class(clientAgent): def pause_torrent(clientAgent, inputHash, inputID, inputName): - logger.debug("Stopping torrent %s in %s while processing" % (inputName, clientAgent)) + logger.debug("Stopping torrent {0} in {1} while processing".format(inputName, clientAgent)) try: if clientAgent == 'utorrent' and core.TORRENT_CLASS != "": core.TORRENT_CLASS.stop(inputHash) @@ -808,13 +809,13 @@ def pause_torrent(clientAgent, inputHash, inputID, inputName): core.TORRENT_CLASS.core.pause_torrent([inputID]) time.sleep(5) except: - logger.warning("Failed to stop torrent %s in %s" % (inputName, clientAgent)) + logger.warning("Failed to stop torrent {0} in {1}".format(inputName, clientAgent)) def resume_torrent(clientAgent, inputHash, inputID, inputName): if not core.TORRENT_RESUME == 1: return - logger.debug("Starting torrent %s in %s" % (inputName, clientAgent)) + logger.debug("Starting torrent {0} in {1}".format(inputName, clientAgent)) try: if clientAgent == 'utorrent' and core.TORRENT_CLASS != "": core.TORRENT_CLASS.start(inputHash) @@ -824,12 +825,12 @@ def resume_torrent(clientAgent, inputHash, inputID, inputName): core.TORRENT_CLASS.core.resume_torrent([inputID]) time.sleep(5) except: - logger.warning("Failed to start torrent %s in %s" % (inputName, clientAgent)) + logger.warning("Failed to start torrent {0} in {1}".format(inputName, clientAgent)) def remove_torrent(clientAgent, inputHash, inputID, inputName): if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move': - logger.debug("Deleting torrent %s from %s" % (inputName, clientAgent)) + logger.debug("Deleting torrent {0} from {1}".format(inputName, clientAgent)) try: if clientAgent == 'utorrent' and core.TORRENT_CLASS != "": core.TORRENT_CLASS.removedata(inputHash) @@ -840,13 +841,13 @@ def remove_torrent(clientAgent, inputHash, inputID, inputName): core.TORRENT_CLASS.core.remove_torrent(inputID, True) time.sleep(5) except: - logger.warning("Failed to delete torrent %s in %s" % (inputName, clientAgent)) + logger.warning("Failed to delete torrent {0} in {1}".format(inputName, clientAgent)) else: resume_torrent(clientAgent, inputHash, inputID, inputName) def find_download(clientAgent, download_id): - logger.debug("Searching for Download on %s ..." % (clientAgent)) + logger.debug("Searching for Download on {0} ...".format(clientAgent)) if clientAgent == 'utorrent': torrents = core.TORRENT_CLASS.list()[1]['torrents'] for torrent in torrents: @@ -862,9 +863,9 @@ def find_download(clientAgent, download_id): return False if clientAgent == 'sabnzbd': if "http" in core.SABNZBDHOST: - baseURL = "%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT) + baseURL = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) else: - baseURL = "http://%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT) + baseURL = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) url = baseURL params = { 'apikey': core.SABNZBDAPIKEY, @@ -889,9 +890,9 @@ def get_nzoid(inputName): slots = [] logger.debug("Searching for nzoid from SAbnzbd ...") if "http" in core.SABNZBDHOST: - baseURL = "%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT) + baseURL = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) else: - baseURL = "http://%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT) + baseURL = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) url = baseURL params = { 'apikey': core.SABNZBDAPIKEY, @@ -925,7 +926,7 @@ def get_nzoid(inputName): for nzo_id, name in slots: if name in [inputName, cleanName]: nzoid = nzo_id - logger.debug("Found nzoid: %s" % nzoid) + logger.debug("Found nzoid: {0}".format(nzoid)) break except: logger.warning("Data from SABnzbd could not be parsed") @@ -988,7 +989,8 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me if delete_ignored == 1: try: os.unlink(path) - logger.debug('Ignored file %s has been removed ...' % (curFile)) + logger.debug('Ignored file {0} has been removed ...'.format + (curFile)) except: pass else: @@ -1009,7 +1011,8 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me if delete_ignored == 1: try: os.unlink(fullCurFile) - logger.debug('Ignored file %s has been removed ...' % (curFile)) + logger.debug('Ignored file {0} has been removed ...'.format + (curFile)) except: pass continue @@ -1022,21 +1025,21 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me def find_imdbid(dirName, inputName): imdbid = None - logger.info('Attemping imdbID lookup for %s' % (inputName)) + logger.info('Attemping imdbID lookup for {0}'.format(inputName)) # find imdbid in dirName logger.info('Searching folder and file names for imdbID ...') m = re.search('(tt\d{7})', dirName + inputName) if m: imdbid = m.group(1) - logger.info("Found imdbID [%s]" % imdbid) + logger.info("Found imdbID [{0}]".format(imdbid)) return imdbid if os.path.isdir(dirName): for file in os.listdir(dirName): m = re.search('(tt\d{7})', file) if m: imdbid = m.group(1) - logger.info("Found imdbID [%s] via file name" % imdbid) + logger.info("Found imdbID [{0}] via file name".format(imdbid)) return imdbid if 'NZBPR__DNZB_MOREINFO' in os.environ: dnzb_more_info = os.environ.get('NZBPR__DNZB_MOREINFO', '') @@ -1045,7 +1048,7 @@ def find_imdbid(dirName, inputName): m = regex.match(dnzb_more_info) if m: imdbid = m.group(1) - logger.info("Found imdbID [%s] from DNZB-MoreInfo" % imdbid) + logger.info("Found imdbID [{0}] from DNZB-MoreInfo".format(imdbid)) return imdbid logger.info('Searching IMDB for imdbID ...') guess = guessit.guess_movie_info(inputName) @@ -1062,12 +1065,12 @@ def find_imdbid(dirName, inputName): url = "http://www.omdbapi.com" - logger.debug("Opening URL: %s" % url) + logger.debug("Opening URL: {0}".format(url)) try: r = requests.get(url, params={'y': year, 't': title}, verify=False, timeout=(60, 300)) except requests.ConnectionError: - logger.error("Unable to open URL %s" % url) + logger.error("Unable to open URL {0}".format(url)) return results = r.json() @@ -1078,10 +1081,10 @@ def find_imdbid(dirName, inputName): pass if imdbid: - logger.info("Found imdbID [%s]" % imdbid) + logger.info("Found imdbID [{0}]".format(imdbid)) return imdbid - logger.warning('Unable to find a imdbID for %s' % (inputName)) + logger.warning('Unable to find a imdbID for {0}'.format(inputName)) return imdbid @@ -1103,7 +1106,7 @@ def extractFiles(src, dst=None, keep_archive=None): extracted_folder.append(dst or dirPath) extracted_archive.append(archiveName) except Exception: - logger.error("Extraction failed for: %s" % (fullFileName)) + logger.error("Extraction failed for: {0}".format(fullFileName)) for folder in extracted_folder: for inputFile in listMediaFiles(folder, media=False, audio=False, meta=False, archives=True): @@ -1112,14 +1115,14 @@ def extractFiles(src, dst=None, keep_archive=None): archiveName = re.sub(r"part[0-9]+", "", archiveName) if archiveName not in extracted_archive or keep_archive is True: continue # don't remove if we haven't extracted this archive, or if we want to preserve them. - logger.info("Removing extracted archive %s from folder %s ..." % (fullFileName, folder)) + logger.info("Removing extracted archive {0} from folder {1} ...".format(fullFileName, folder)) try: if not os.access(inputFile, os.W_OK): os.chmod(inputFile, stat.S_IWUSR) os.remove(inputFile) time.sleep(1) except Exception as e: - logger.error("Unable to remove file %s due to: %s" % (inputFile, e)) + logger.error("Unable to remove file {0} due to: {1}".format(inputFile, e)) def import_subs(filename): @@ -1139,13 +1142,13 @@ def import_subs(filename): if not languages: return - logger.debug("Attempting to download subtitles for %s" % (filename), 'SUBTITLES') + logger.debug("Attempting to download subtitles for {0}".format(filename), 'SUBTITLES') try: video = subliminal.scan_video(filename, subtitles=True, embedded_subtitles=True) subtitles = subliminal.download_best_subtitles({video}, languages, hearing_impaired=False) subliminal.save_subtitles(subtitles) except Exception as e: - logger.error("Failed to download subtitles for %s due to: %s" % (filename, e), 'SUBTITLES') + logger.error("Failed to download subtitles for {0} due to: {1}".format(filename, e), 'SUBTITLES') def server_responding(baseURL): @@ -1167,7 +1170,7 @@ def plex_update(category): section = None if not core.PLEXSEC: return - logger.debug("Attempting to update Plex Library for category %s." % (category), 'PLEX') + logger.debug("Attempting to update Plex Library for category {0}.".format(category), 'PLEX') for item in core.PLEXSEC: if item[0] == category: section = item[1] @@ -1210,7 +1213,7 @@ def backupVersionedFile(old_file, version): def update_downloadInfoStatus(inputName, status): - logger.db("Updating status of our download %s in the DB to %s" % (inputName, status)) + logger.db("Updating status of our download {0} in the DB to {1}".format(inputName, status)) myDB = nzbToMediaDB.DBConnection() myDB.action("UPDATE downloads SET status=?, last_update=? WHERE input_name=?", @@ -1218,7 +1221,7 @@ def update_downloadInfoStatus(inputName, status): def get_downloadInfo(inputName, status): - logger.db("Getting download info for %s from the DB" % (inputName)) + logger.db("Getting download info for {0} from the DB".format(inputName)) myDB = nzbToMediaDB.DBConnection() sqlResults = myDB.select("SELECT * FROM downloads WHERE input_name=? AND status=?", diff --git a/core/synchronousdeluge/rencode.py b/core/synchronousdeluge/rencode.py index f27c3304..8ab01375 100644 --- a/core/synchronousdeluge/rencode.py +++ b/core/synchronousdeluge/rencode.py @@ -435,7 +435,7 @@ def dumps(x, float_bits=DEFAULT_FLOAT_BITS): elif float_bits == 64: encode_func[FloatType] = encode_float64 else: - raise ValueError('Float bits (%d) is not 32 or 64' % float_bits) + raise ValueError('Float bits ({0:d}) is not 32 or 64'.format(float_bits)) r = [] encode_func[type(x)](x, r) finally: diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index 9731f689..a557c808 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -37,24 +37,25 @@ def isVideoGood(videofile, status): else: return True - logger.info('Checking [%s] for corruption, please stand by ...' % (fileNameExt), 'TRANSCODER') + logger.info('Checking [{0}] for corruption, please stand by ...'.format(fileNameExt), 'TRANSCODER') video_details, result = getVideoDetails(videofile) if result != 0: - logger.error("FAILED: [%s] is corrupted!" % (fileNameExt), 'TRANSCODER') + logger.error("FAILED: [{0}] is corrupted!".format(fileNameExt), 'TRANSCODER') return False if video_details.get("error"): - logger.info("FAILED: [%s] returned error [%s]." % (fileNameExt, str(video_details.get("error"))), 'TRANSCODER') + logger.info("FAILED: [{0}] returned error [{1}].".format(fileNameExt, video_details.get("error")), 'TRANSCODER') return False if video_details.get("streams"): videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"] audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] if len(videoStreams) > 0 and len(audioStreams) > 0: - logger.info("SUCCESS: [%s] has no corruption." % (fileNameExt), 'TRANSCODER') + logger.info("SUCCESS: [{0}] has no corruption.".format(fileNameExt), 'TRANSCODER') return True else: - logger.info("FAILED: [%s] has %s video streams and %s audio streams. Assume corruption." % ( - fileNameExt, str(len(videoStreams)), str(len(audioStreams))), 'TRANSCODER') + logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. " + "Assume corruption.".format + (fileNameExt, len(videoStreams), len(audioStreams)), 'TRANSCODER') return False @@ -64,7 +65,7 @@ def zip_out(file, img, bitbucket): try: procin = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) except: - logger.error("Extracting [%s] has failed" % (file), 'TRANSCODER') + logger.error("Extracting [{0}] has failed".format(file), 'TRANSCODER') return procin @@ -108,7 +109,7 @@ def getVideoDetails(videofile, img=None, bitbucket=None): result = proc.returncode video_details = json.loads(out) except: - logger.error("Checking [%s] has failed" % (file), 'TRANSCODER') + logger.error("Checking [{0}] has failed".format(file), 'TRANSCODER') return video_details, result @@ -124,7 +125,7 @@ def buildCommands(file, newDir, movieName, bitbucket): if check and core.CONCAT: name = movieName elif check: - name = ('%s.cd%s' % (movieName, check.groups()[0])) + name = ('{0}.cd{1}'.format(movieName, check.groups()[0])) elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name): name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name) if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself. @@ -545,20 +546,20 @@ def extract_subs(file, newfilePath, bitbucket): lan = "unk" if num == 1: - outputFile = os.path.join(subdir, "%s.srt" % (name)) + outputFile = os.path.join(subdir, "{0}.srt".format(name)) if os.path.isfile(outputFile): - outputFile = os.path.join(subdir, "%s.%s.srt" % (name, n)) + outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, n)) else: - outputFile = os.path.join(subdir, "%s.%s.srt" % (name, lan)) + outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, lan)) if os.path.isfile(outputFile): - outputFile = os.path.join(subdir, "%s.%s.%s.srt" % (name, lan, n)) + outputFile = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n)) command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', '-codec:' + str(idx), 'srt', outputFile] if platform.system() != 'Windows': command = core.NICENESS + command - logger.info("Extracting %s subtitle from: %s" % (lan, file)) + logger.info("Extracting {0} subtitle from: {1}".format(lan, file)) print_cmd(command) result = 1 # set result to failed in case call fails. try: @@ -573,7 +574,7 @@ def extract_subs(file, newfilePath, bitbucket): shutil.copymode(file, outputFile) except: pass - logger.info("Extracting %s subtitle from %s has succeeded" % (lan, file)) + logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file)) else: logger.error("Extracting subtitles has failed") @@ -587,11 +588,11 @@ def processList(List, newDir, bitbucket): for item in List: ext = os.path.splitext(item)[1].lower() if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS: - logger.debug("Attempting to rip disk image: %s" % (item), "TRANSCODER") + logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER") newList.extend(ripISO(item, newDir, bitbucket)) remList.append(item) elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS: - logger.debug("Found VIDEO_TS image file: %s" % (item), "TRANSCODER") + logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER") if not vtsPath: try: vtsPath = re.match("(.+VIDEO_TS)", item).groups()[0] @@ -617,7 +618,7 @@ def processList(List, newDir, bitbucket): List.extend(newList) for item in remList: List.remove(item) - logger.debug("Successfully extracted .vob file %s from disk image" % (newList[0]), "TRANSCODER") + logger.debug("Successfully extracted .vob file {0} from disk image".format(newList[0]), "TRANSCODER") elif newList and not success: newList = [] remList = [] @@ -630,12 +631,12 @@ def ripISO(item, newDir, bitbucket): failure_dir = 'failure' # Mount the ISO in your OS and call combineVTS. if not core.SEVENZIP: - logger.error("No 7zip installed. Can't extract image file %s" % (item), "TRANSCODER") + logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER") newFiles = [failure_dir] return newFiles cmd = [core.SEVENZIP, 'l', item] try: - logger.debug("Attempting to extract .vob from image file %s" % (item), "TRANSCODER") + logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER") print_cmd(cmd) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) out, err = proc.communicate() @@ -646,7 +647,7 @@ def ripISO(item, newDir, bitbucket): concat = [] m = 1 while True: - vtsName = 'VIDEO_TS%sVTS_%02d_%d.VOB' % (os.sep, n + 1, m) + vtsName = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m) if vtsName in fileList: concat.append(vtsName) m += 1 @@ -657,16 +658,16 @@ def ripISO(item, newDir, bitbucket): if core.CONCAT: combined.extend(concat) continue - name = '%s.cd%s' % (os.path.splitext(os.path.split(item)[1])[0], str(n + 1)) + name = '{0}.cd{1}'.format(os.path.splitext(os.path.split(item)[1])[0], str(n + 1)) newFiles.append({item: {'name': name, 'files': concat}}) if core.CONCAT: name = os.path.splitext(os.path.split(item)[1])[0] newFiles.append({item: {'name': name, 'files': combined}}) if not newFiles: - logger.error("No VIDEO_TS folder found in image file %s" % (item), "TRANSCODER") + logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER") newFiles = [failure_dir] except: - logger.error("Failed to extract from image file %s" % (item), "TRANSCODER") + logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER") newFiles = [failure_dir] return newFiles @@ -678,7 +679,7 @@ def combineVTS(vtsPath): concat = '' m = 1 while True: - vtsName = 'VTS_%02d_%d.VOB' % (n + 1, m) + vtsName = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m) if os.path.isfile(os.path.join(vtsPath, vtsName)): concat = concat + os.path.join(vtsPath, vtsName) + '|' m += 1 @@ -689,9 +690,9 @@ def combineVTS(vtsPath): if core.CONCAT: combined = combined + concat + '|' continue - newFiles.append('concat:%s' % concat[:-1]) + newFiles.append('concat:{0}'.format(concat[:-1])) if core.CONCAT: - newFiles.append('concat:%s' % combined[:-1]) + newFiles.append('concat:{0}'.format(combined[:-1])) return newFiles @@ -707,7 +708,7 @@ def combineCD(combine): else: break if concat: - newFiles.append('concat:%s' % concat[:-1]) + newFiles.append('concat:{0}'.format(concat[:-1])) return newFiles @@ -715,7 +716,7 @@ def print_cmd(command): cmd = "" for item in command: cmd = cmd + " " + str(item) - logger.debug("calling command:%s" % (cmd)) + logger.debug("calling command:{0}".format(cmd)) def Transcode_directory(dirName): @@ -756,11 +757,11 @@ def Transcode_directory(dirName): os.remove(newfilePath) except OSError as e: if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist - logger.debug("Error when removing transcoding target: %s" % (e)) + logger.debug("Error when removing transcoding target: {0}".format(e)) except Exception as e: - logger.debug("Error when removing transcoding target: %s" % (e)) + logger.debug("Error when removing transcoding target: {0}".format(e)) - logger.info("Transcoding video: %s" % (newfilePath)) + logger.info("Transcoding video: {0}".format(newfilePath)) print_cmd(command) result = 1 # set result to failed in case call fails. try: @@ -777,7 +778,7 @@ def Transcode_directory(dirName): proc.communicate() result = proc.returncode except: - logger.error("Transcoding of video %s has failed" % (newfilePath)) + logger.error("Transcoding of video {0} has failed".format(newfilePath)) if core.SUBSDIR and result == 0 and isinstance(file, str): for sub in get_subs(file): @@ -793,14 +794,14 @@ def Transcode_directory(dirName): shutil.copymode(file, newfilePath) except: pass - logger.info("Transcoding of video to %s succeeded" % (newfilePath)) + logger.info("Transcoding of video to {0} succeeded".format(newfilePath)) if os.path.isfile(newfilePath) and (file in newList or not core.DUPLICATE): try: os.unlink(file) except: pass else: - logger.error("Transcoding of video to %s failed with result %s" % (newfilePath, str(result))) + logger.error("Transcoding of video to {0} failed with result {1}".format(newfilePath, result)) # this will be 0 (successful) it all are successful, else will return a positive integer for failure. final_result = final_result + result if final_result == 0 and not core.DUPLICATE: diff --git a/core/transmissionrpc/client.py b/core/transmissionrpc/client.py index 6379e595..04c85ac0 100644 --- a/core/transmissionrpc/client.py +++ b/core/transmissionrpc/client.py @@ -100,7 +100,7 @@ def parse_torrent_ids(args): except ValueError: pass if not addition: - raise ValueError('Invalid torrent id, \"%s\"' % item) + raise ValueError('Invalid torrent id, {item!r}'.format(item=item)) ids.extend(addition) elif isinstance(args, (list, tuple)): for item in args: @@ -251,20 +251,20 @@ class Client(object): start = time.time() http_data = self._http_query(query, timeout) elapsed = time.time() - start - LOGGER.info('http request took %.3f s' % (elapsed)) + LOGGER.info('http request took {time:.3f} s'.format(time=elapsed)) try: data = json.loads(http_data) except ValueError as error: LOGGER.error('Error: ' + str(error)) - LOGGER.error('Request: \"%s\"' % (query)) - LOGGER.error('HTTP data: \"%s\"' % (http_data)) + LOGGER.error('Request: {request!r}'.format(request=query)) + LOGGER.error('HTTP data: {data!r}'.format(data=http_data)) raise LOGGER.debug(json.dumps(data, indent=2)) if 'result' in data: if data['result'] != 'success': - raise TransmissionError('Query failed with result \"%s\".' % (data['result'])) + raise TransmissionError('Query failed with result {result!r}.'.format(result=data['result'])) else: raise TransmissionError('Query failed without result.') @@ -348,8 +348,9 @@ class Client(object): Add a warning to the log if the Transmission RPC version is lower then the provided version. """ if self.rpc_version < version: - LOGGER.warning('Using feature not supported by server. RPC version for server %d, feature introduced in %d.' - % (self.rpc_version, version)) + LOGGER.warning('Using feature not supported by server. ' + 'RPC version for server {x}, feature introduced in {y}.'.format + (x=self.rpc_version, y=version)) def add_torrent(self, torrent, timeout=None, **kwargs): """ diff --git a/core/transmissionrpc/error.py b/core/transmissionrpc/error.py index 6b44bf32..ecd6bf11 100644 --- a/core/transmissionrpc/error.py +++ b/core/transmissionrpc/error.py @@ -19,7 +19,7 @@ class TransmissionError(Exception): def __str__(self): if self.original: original_name = type(self.original).__name__ - return '%s Original exception: %s, "%s"' % (self.message, original_name, str(self.original)) + return '{0} Original exception: {1}, "{2}"'.format(self.message, original_name, str(self.original)) else: return self.message @@ -49,10 +49,10 @@ class HTTPHandlerError(Exception): self.data = httpdata def __repr__(self): - return '' % (self.code, self.message) + return ''.format(self.code, self.message) def __str__(self): - return 'HTTPHandlerError %d: %s' % (self.code, self.message) + return 'HTTPHandlerError {0:d}: {1}'.format(self.code, self.message) def __unicode__(self): - return 'HTTPHandlerError %d: %s' % (self.code, self.message) + return 'HTTPHandlerError {0:d}: {1}'.format(self.code, self.message) diff --git a/core/transmissionrpc/httphandler.py b/core/transmissionrpc/httphandler.py index 2968762e..02d65fa7 100644 --- a/core/transmissionrpc/httphandler.py +++ b/core/transmissionrpc/httphandler.py @@ -75,7 +75,7 @@ class DefaultHTTPHandler(HTTPHandler): if hasattr(error.reason, 'args') and isinstance(error.reason.args, tuple) and len(error.reason.args) == 2: raise HTTPHandlerError(httpcode=error.reason.args[0], httpmsg=error.reason.args[1]) else: - raise HTTPHandlerError(httpmsg='urllib2.URLError: %s' % (error.reason)) + raise HTTPHandlerError(httpmsg='urllib2.URLError: {error.reason}'.format(error=error)) except BadStatusLine as error: - raise HTTPHandlerError(httpmsg='httplib.BadStatusLine: %s' % (error.line)) + raise HTTPHandlerError(httpmsg='httplib.BadStatusLine: {error.line}'.format(error=error)) return response.read().decode('utf-8') diff --git a/core/transmissionrpc/session.py b/core/transmissionrpc/session.py index 6b620373..bb2c1560 100644 --- a/core/transmissionrpc/session.py +++ b/core/transmissionrpc/session.py @@ -27,12 +27,12 @@ class Session(object): try: return self._fields[name].value except KeyError: - raise AttributeError('No attribute %s' % name) + raise AttributeError('No attribute {0}'.format(name)) def __str__(self): text = '' for key in sorted(self._fields.keys()): - text += "% 32s: %s\n" % (key[-32:], self._fields[key].value) + text += "{0:32}: {1}\n".format(key[-32:], self._fields[key].value) return text def _update_fields(self, other): diff --git a/core/transmissionrpc/torrent.py b/core/transmissionrpc/torrent.py index 54ee2a2d..21d4f367 100644 --- a/core/transmissionrpc/torrent.py +++ b/core/transmissionrpc/torrent.py @@ -73,14 +73,14 @@ class Torrent(object): tid = self._fields['id'].value name = self._get_name_string() if isinstance(name, str): - return '' % (tid, name) + return ''.format(tid, name) else: - return '' % (tid) + return ''.format(tid) def __str__(self): name = self._get_name_string() if isinstance(name, str): - return 'Torrent \"%s\"' % (name) + return 'Torrent \"{0}\"'.format(name) else: return 'Torrent' @@ -91,7 +91,7 @@ class Torrent(object): try: return self._fields[name].value except KeyError: - raise AttributeError('No attribute %s' % name) + raise AttributeError('No attribute {0}'.format(name)) def _rpc_version(self): """Get the Transmission RPC API version.""" diff --git a/core/transmissionrpc/utils.py b/core/transmissionrpc/utils.py index 0ac2a32a..9381edac 100644 --- a/core/transmissionrpc/utils.py +++ b/core/transmissionrpc/utils.py @@ -40,7 +40,7 @@ def format_timedelta(delta): """ minutes, seconds = divmod(delta.seconds, 60) hours, minutes = divmod(minutes, 60) - return '%d %02d:%02d:%02d' % (delta.days, hours, minutes, seconds) + return '{0:d} {1:02d}:{2:02d}:{3:02d}'.format(delta.days, hours, minutes, seconds) def format_timestamp(timestamp, utc=False): @@ -80,17 +80,17 @@ def inet_address(address, default_port, default_address='localhost'): try: port = int(addr[1]) except ValueError: - raise INetAddressError('Invalid address "%s".' % address) + raise INetAddressError('Invalid address "{0}".'.format(address)) if len(addr[0]) == 0: addr = default_address else: addr = addr[0] else: - raise INetAddressError('Invalid address "%s".' % address) + raise INetAddressError('Invalid address "{0}".'.format(address)) try: socket.getaddrinfo(addr, port, socket.AF_INET, socket.SOCK_STREAM) except socket.gaierror: - raise INetAddressError('Cannot look up address "%s".' % address) + raise INetAddressError('Cannot look up address "{0}".'.format(address)) return addr, port @@ -139,7 +139,7 @@ def argument_value_convert(method, argument, value, rpc_version): elif method in ('session-get', 'session-set'): args = constants.SESSION_ARGS[method[-3:]] else: - return ValueError('Method "%s" not supported' % (method)) + return ValueError('Method "{0}" not supported'.format(method)) if argument in args: info = args[argument] invalid_version = True @@ -155,14 +155,12 @@ def argument_value_convert(method, argument, value, rpc_version): if invalid_version: if replacement: LOGGER.warning( - 'Replacing requested argument "%s" with "%s".' - % (argument, replacement)) + 'Replacing requested argument "{0}" with "{1}".'.format(argument, replacement)) argument = replacement info = args[argument] else: raise ValueError( - 'Method "%s" Argument "%s" does not exist in version %d.' - % (method, argument, rpc_version)) + 'Method "{0}" Argument "{1}" does not exist in version {2:d}.'.format(method, argument, rpc_version)) return argument, TR_TYPE_MAP[info[0]](value) else: raise ValueError('Argument "%s" does not exists for method "%s".', @@ -178,7 +176,7 @@ def get_arguments(method, rpc_version): elif method in ('session-get', 'session-set'): args = constants.SESSION_ARGS[method[-3:]] else: - return ValueError('Method "%s" not supported' % (method)) + return ValueError('Method "{0}" not supported'.format(method)) accessible = [] for argument, info in iteritems(args): valid_version = True diff --git a/core/utorrent/upload.py b/core/utorrent/upload.py index ddf228cc..f8db659c 100644 --- a/core/utorrent/upload.py +++ b/core/utorrent/upload.py @@ -16,7 +16,7 @@ class MultiPartForm(object): return def get_content_type(self): - return 'multipart/form-data; boundary=%s' % self.boundary + return 'multipart/form-data; boundary={0}'.format(self.boundary) def add_field(self, name, value): """Add a simple field to the form data.""" @@ -43,7 +43,7 @@ class MultiPartForm(object): # Add the form fields parts.extend( [part_boundary, - 'Content-Disposition: form-data; name="%s"' % name, + 'Content-Disposition: form-data; name="{0}"'.format(name), '', value, ] @@ -53,8 +53,8 @@ class MultiPartForm(object): # Add the files to upload parts.extend( [part_boundary, - 'Content-Disposition: file; name="%s"; filename="%s"' % (field_name, filename), - 'Content-Type: %s' % content_type, + 'Content-Disposition: file; name="{0}"; filename="{1}"'.format(field_name, filename), + 'Content-Type: {0}'.format(content_type), '', body, ] diff --git a/nzbToMedia.py b/nzbToMedia.py index 7c11c35a..c599e217 100755 --- a/nzbToMedia.py +++ b/nzbToMedia.py @@ -514,7 +514,7 @@ from core import logger, nzbToMediaDB def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None, failureLink=None): if core.SAFE_MODE and inputDirectory == core.NZB_DEFAULTDIR: logger.error( - 'The input directory:[%s] is the Default Download Directory. Please configure category directories to prevent processing of other media.' % ( + 'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format( inputDirectory)) return [-1, ""] @@ -522,7 +522,7 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down download_id = get_nzoid(inputName) if clientAgent != 'manual' and not core.DOWNLOADINFO: - logger.debug('Adding NZB download info for directory %s to database' % (inputDirectory)) + logger.debug('Adding NZB download info for directory {0} to database'.format(inputDirectory)) myDB = nzbToMediaDB.DBConnection() @@ -555,7 +555,7 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down section = core.CFG.findsection("ALL").isenabled() if section is None: logger.error( - 'Category:[%s] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.' % ( + 'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format( inputCategory)) return [-1, ""] else: @@ -563,15 +563,15 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down if len(section) > 1: logger.error( - 'Category:[%s] is not unique, %s are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.' % ( + 'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format( inputCategory, section.keys())) return [-1, ""] if section: sectionName = section.keys()[0] - logger.info('Auto-detected SECTION:%s' % (sectionName)) + logger.info('Auto-detected SECTION:{0}'.format(sectionName)) else: - logger.error("Unable to locate a section with subsection:%s enabled in your autoProcessMedia.cfg, exiting!" % ( + logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format( inputCategory)) return [-1, ""] @@ -582,20 +582,20 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down try: if int(section[usercat]['remote_path']) and not core.REMOTEPATHS: - logger.error('Remote Path is enabled for %s:%s but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!' % ( + logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format( sectionName, inputCategory)) return [-1, ""] except: - logger.error('Remote Path %s is not valid for %s:%s Please set this to either 0 to disable or 1 to enable!' % ( + logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format( section[usercat]['remote_path'], sectionName, inputCategory)) inputName, inputDirectory = convert_to_ascii(inputName, inputDirectory) if extract == 1: - logger.debug('Checking for archives to extract in directory: %s' % (inputDirectory)) + logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory)) extractFiles(inputDirectory) - logger.info("Calling %s:%s to post-process:%s" % (sectionName, inputCategory, inputName)) + logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, inputCategory, inputName)) if sectionName == "CouchPotato": result = autoProcessMovie().process(sectionName, inputDirectory, inputName, status, clientAgent, download_id, @@ -636,11 +636,11 @@ def main(args, section=None): clientAgent = core.NZB_CLIENTAGENT logger.info("#########################################################") - logger.info("## ..::[%s]::.. ##" % os.path.basename(__file__)) + logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) logger.info("#########################################################") # debug command line options - logger.debug("Options passed into nzbToMedia: %s" % args) + logger.debug("Options passed into nzbToMedia: {0}".format(args)) # Post-Processing Result result = [0, ""] @@ -650,15 +650,15 @@ def main(args, section=None): if os.environ.has_key('NZBOP_SCRIPTDIR'): # Check if the script is called from nzbget 11.0 or later if os.environ['NZBOP_VERSION'][0:5] < '11.0': - logger.error("NZBGet Version %s is not supported. Please update NZBGet." %(str(os.environ['NZBOP_VERSION']))) + logger.error("NZBGet Version {0} is not supported. Please update NZBGet.".format(os.environ['NZBOP_VERSION'])) sys.exit(core.NZBGET_POSTPROCESS_ERROR) - logger.info("Script triggered from NZBGet Version %s." %(str(os.environ['NZBOP_VERSION']))) + logger.info("Script triggered from NZBGet Version {0}.".format(os.environ['NZBOP_VERSION'])) # Check if the script is called from nzbget 13.0 or later if os.environ.has_key('NZBPP_TOTALSTATUS'): if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS': - logger.info("Download failed with status %s." %(os.environ['NZBPP_STATUS'])) + logger.info("Download failed with status {0}.".format(os.environ['NZBPP_STATUS'])) status = 1 else: @@ -745,16 +745,16 @@ def main(args, section=None): if not core.CFG[section][subsection].isenabled(): continue for dirName in getDirs(section, subsection, link = 'move'): - logger.info("Starting manual run for %s:%s - Folder:%s" % (section, subsection, dirName)) + logger.info("Starting manual run for {0}:{1} - Folder:{2}".format(section, subsection, dirName)) - logger.info("Checking database for download info for %s ..." % (os.path.basename(dirName))) + logger.info("Checking database for download info for {0} ...".format(os.path.basename(dirName))) core.DOWNLOADINFO = get_downloadInfo(os.path.basename(dirName), 0) if core.DOWNLOADINFO: logger.info( - "Found download info for %s, setting variables now ..." % (os.path.basename(dirName))) + "Found download info for {0}, setting variables now ...".format(os.path.basename(dirName))) else: logger.info( - 'Unable to locate download info for %s, continuing to try and process this release ...' % ( + 'Unable to locate download info for {0}, continuing to try and process this release ...'.format( os.path.basename(dirName)) ) @@ -781,19 +781,19 @@ def main(args, section=None): results = process(dirName, inputName, 0, clientAgent=clientAgent, download_id=download_id, inputCategory=subsection) if results[0] != 0: - logger.error("A problem was reported when trying to perform a manual run for %s:%s." % ( + logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format( section, subsection)) result = results if result[0] == 0: - logger.info("The %s script completed successfully." % args[0]) + logger.info("The {0} script completed successfully.".format(args[0])) if result[1]: print result[1] + "!" # For SABnzbd Status display. if os.environ.has_key('NZBOP_SCRIPTDIR'): # return code for nzbget v11 del core.MYAPP return (core.NZBGET_POSTPROCESS_SUCCESS) else: - logger.error("A problem was reported in the %s script." % args[0]) + logger.error("A problem was reported in the {0} script.".format(args[0])) if result[1]: print result[1] + "!" # For SABnzbd Status display. if os.environ.has_key('NZBOP_SCRIPTDIR'): # return code for nzbget v11 From df8c6bc20f34f5b0ee390f1cef57dc9ee179ed3b Mon Sep 17 00:00:00 2001 From: Labrys Date: Sun, 5 Jun 2016 13:35:46 -0400 Subject: [PATCH 21/21] Too broad exceptions. * Use .get() with default value instead. --- core/nzbToMediaAutoFork.py | 46 ++++-------------- core/nzbToMediaUserScript.py | 20 +++----- core/nzbToMediaUtil.py | 10 +--- core/transcoder/transcoder.py | 90 +++++++---------------------------- 4 files changed, 37 insertions(+), 129 deletions(-) diff --git a/core/nzbToMediaAutoFork.py b/core/nzbToMediaAutoFork.py index 76a11204..3e213fd8 100644 --- a/core/nzbToMediaAutoFork.py +++ b/core/nzbToMediaAutoFork.py @@ -11,44 +11,18 @@ from core import logger def autoFork(section, inputCategory): # auto-detect correct section # config settings - try: - host = core.CFG[section][inputCategory]["host"] - port = core.CFG[section][inputCategory]["port"] - except: - host = None - port = None - try: - username = core.CFG[section][inputCategory]["username"] - password = core.CFG[section][inputCategory]["password"] - except: - username = None - password = None + cfg = core.CFG[section][inputCategory] - try: - apikey = core.CFG[section][inputCategory]["apikey"] - except: - apikey = None - - try: - ssl = int(core.CFG[section][inputCategory]["ssl"]) - except: - ssl = 0 - - try: - web_root = core.CFG[section][inputCategory]["web_root"] - except: - web_root = "" - - try: - fork = core.FORKS.items()[core.FORKS.keys().index(core.CFG[section][inputCategory]["fork"])] - except: - fork = "auto" - - if ssl: - protocol = "https://" - else: - protocol = "http://" + host = cfg.get("host") + port = cfg.get("port") + username = cfg.get("username") + password = cfg.get("password") + apikey = cfg.get("apikey") + ssl = int(cfg.get("ssl", 0)) + web_root = cfg.get("web_root", "") + fork = core.FORKS.items()[core.FORKS.keys().index(cfg.get("fork", "auto"))] + protocol = "https://" if ssl else "http://" detected = False if section == "NzbDrone": diff --git a/core/nzbToMediaUserScript.py b/core/nzbToMediaUserScript.py index d05273be..a3a75dac 100644 --- a/core/nzbToMediaUserScript.py +++ b/core/nzbToMediaUserScript.py @@ -16,11 +16,10 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',') except: core.USER_SCRIPT_MEDIAEXTENSIONS = [] - try: - core.USER_SCRIPT = settings["user_script_path"] - except: - core.USER_SCRIPT = None - if core.USER_SCRIPT is None or core.USER_SCRIPT == "None": # do nothing and return success. + + core.USER_SCRIPT = settings.get("user_script_path") + + if not core.USER_SCRIPT or core.USER_SCRIPT == "None": # do nothing and return success. return [0, ""] try: core.USER_SCRIPT_PARAM = settings["user_script_param"] @@ -34,14 +33,9 @@ def external_script(outputDestination, torrentName, torrentLabel, settings): core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',') except: core.USER_SCRIPT_SUCCESSCODES = 0 - try: - core.USER_SCRIPT_CLEAN = int(settings["user_script_clean"]) - except: - core.USER_SCRIPT_CLEAN = 1 - try: - core.USER_SCRIPT_RUNONCE = int(settings["user_script_runOnce"]) - except: - core.USER_SCRIPT_RUNONCE = 1 + + core.USER_SCRIPT_CLEAN = int(settings.get("user_script_clean", 1)) + core.USER_SCRIPT_RUNONCE = int(settings.get("user_script_runOnce", 1)) if core.CHECK_MEDIA: for video in listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False): diff --git a/core/nzbToMediaUtil.py b/core/nzbToMediaUtil.py index 75c3571a..d600eceb 100644 --- a/core/nzbToMediaUtil.py +++ b/core/nzbToMediaUtil.py @@ -740,14 +740,8 @@ def cleanDir(path, section, subsection): logger.info('Doing Forceful Clean of {0}'.format(path), 'CLEANDIR') rmDir(path) return - try: - minSize = int(core.CFG[section][subsection]['minSize']) - except: - minSize = 0 - try: - delete_ignored = int(core.CFG[section][subsection]['delete_ignored']) - except: - delete_ignored = 0 + minSize = int(core.CFG[section][subsection].get('minSize', 0)) + delete_ignored = int(core.CFG[section][subsection].get('delete_ignored', 0)) try: num_files = len(listMediaFiles(path, minSize=minSize, delete_ignored=delete_ignored)) except: diff --git a/core/transcoder/transcoder.py b/core/transcoder/transcoder.py index a557c808..c5cb43a6 100644 --- a/core/transcoder/transcoder.py +++ b/core/transcoder/transcoder.py @@ -208,18 +208,9 @@ def buildCommands(file, newDir, movieName, bitbucket): for video in videoStreams: codec = video["codec_name"] - try: - fr = video["avg_frame_rate"] - except: - fr = 0 - try: - width = video["width"] - except: - width = 0 - try: - height = video["height"] - except: - height = 0 + fr = video.get("avg_frame_rate", 0) + width = video.get("width", 0) + height = video.get("height", 0) scale = core.VRESOLUTION if codec in core.VCODEC_ALLOW or not core.VCODEC: video_cmd.extend(['-c:v', 'copy']) @@ -270,26 +261,14 @@ def buildCommands(file, newDir, movieName, bitbucket): if audio2: # right language and codec... map_cmd.extend(['-map', '0:' + str(audio2[0]["index"])]) a_mapped.extend([audio2[0]["index"]]) - try: - bitrate = int(audio2[0]["bit_rate"]) / 1000 - except: - bitrate = 0 - try: - channels = int(audio2[0]["channels"]) - except: - channels = 0 + bitrate = int(audio2[0].get("bit_rate", 0)) / 1000 + channels = int(audio2[0].get("channels", 0)) audio_cmd.extend(['-c:a:' + str(used_audio), 'copy']) elif audio1: # right language wrong codec. map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])]) a_mapped.extend([audio1[0]["index"]]) - try: - bitrate = int(audio1[0]["bit_rate"]) / 1000 - except: - bitrate = 0 - try: - channels = int(audio1[0]["channels"]) - except: - channels = 0 + bitrate = int(audio1[0].get("bit_rate", 0)) / 1000 + channels = int(audio1[0].get("channels", 0)) if core.ACODEC: audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC]) else: @@ -297,14 +276,8 @@ def buildCommands(file, newDir, movieName, bitbucket): elif audio3: # just pick the default audio track map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])]) a_mapped.extend([audio3[0]["index"]]) - try: - bitrate = int(audio3[0]["bit_rate"]) / 1000 - except: - bitrate = 0 - try: - channels = int(audio3[0]["channels"]) - except: - channels = 0 + bitrate = int(audio3[0].get("bit_rate", 0)) / 1000 + channels = int(audio3[0].get("channels", 0)) if core.ACODEC: audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC]) else: @@ -331,26 +304,14 @@ def buildCommands(file, newDir, movieName, bitbucket): if audio4: # right language and codec. map_cmd.extend(['-map', '0:' + str(audio4[0]["index"])]) a_mapped.extend([audio4[0]["index"]]) - try: - bitrate = int(audio4[0]["bit_rate"]) / 1000 - except: - bitrate = 0 - try: - channels = int(audio4[0]["channels"]) - except: - channels = 0 + bitrate = int(audio4[0].get("bit_rate", 0)) / 1000 + channels = int(audio4[0].get("channels", 0)) audio_cmd2.extend(['-c:a:' + str(used_audio), 'copy']) elif audio1: # right language wrong codec. map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])]) a_mapped.extend([audio1[0]["index"]]) - try: - bitrate = int(audio1[0]["bit_rate"]) / 1000 - except: - bitrate = 0 - try: - channels = int(audio1[0]["channels"]) - except: - channels = 0 + bitrate = int(audio1[0].get("bit_rate", 0)) / 1000 + channels = int(audio1[0].get("channels", 0)) if core.ACODEC2: audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2]) else: @@ -358,14 +319,8 @@ def buildCommands(file, newDir, movieName, bitbucket): elif audio3: # just pick the default audio track map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])]) a_mapped.extend([audio3[0]["index"]]) - try: - bitrate = int(audio3[0]["bit_rate"]) / 1000 - except: - bitrate = 0 - try: - channels = int(audio3[0]["channels"]) - except: - channels = 0 + bitrate = int(audio3[0].get("bit_rate", 0)) / 1000 + channels = int(audio3[0].get("channels", 0)) if core.ACODEC2: audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2]) else: @@ -394,14 +349,8 @@ def buildCommands(file, newDir, movieName, bitbucket): used_audio += 1 map_cmd.extend(['-map', '0:' + str(audio["index"])]) audio_cmd3 = [] - try: - bitrate = int(audio["bit_rate"]) / 1000 - except: - bitrate = 0 - try: - channels = int(audio["channels"]) - except: - channels = 0 + bitrate = int(audio.get("bit_rate", 0)) / 1000 + channels = int(audio.get("channels", 0)) if audio["codec_name"] in core.ACODEC3_ALLOW: audio_cmd3.extend(['-c:a:' + str(used_audio), 'copy']) else: @@ -540,10 +489,7 @@ def extract_subs(file, newfilePath, bitbucket): for n in range(num): sub = subStreams[n] idx = sub["index"] - try: - lan = sub["tags"]["language"] - except: - lan = "unk" + lan = sub.geet("tags", {}).get("language", "unk") if num == 1: outputFile = os.path.join(subdir, "{0}.srt".format(name))