Merge pull request #1050 from clinton-hall/code-cleanup-3

Code cleanup 3
This commit is contained in:
Labrys of Knossos 2016-06-12 03:42:18 -04:00 committed by GitHub
commit 0ce7582da5
24 changed files with 824 additions and 816 deletions

View file

@ -1,28 +1,26 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# coding=utf-8
import datetime import datetime
import os import os
import time
import shutil
import sys import sys
import core import core
from subprocess import Popen from libs.six import text_type
from core import logger, nzbToMediaDB from core import logger, nzbToMediaDB
from core.nzbToMediaUtil import convert_to_ascii, CharReplace, plex_update from core.nzbToMediaUtil import convert_to_ascii, CharReplace, plex_update, replace_links
from core.nzbToMediaUserScript import external_script from core.nzbToMediaUserScript import external_script
def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent): def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent):
status = 1 # 1 = failed | 0 = success status = 1 # 1 = failed | 0 = success
root = 0 root = 0
foundFile = 0 foundFile = 0
uniquePath = 1
if clientAgent != 'manual' and not core.DOWNLOADINFO: if clientAgent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding TORRENT download info for directory {0} to database'.format(inputDirectory)) logger.debug('Adding TORRENT download info for directory {0} to database'.format(inputDirectory))
myDB = nzbToMediaDB.DBConnection() myDB = nzbToMediaDB.DBConnection()
encoded = False
inputDirectory1 = inputDirectory inputDirectory1 = inputDirectory
inputName1 = inputName inputName1 = inputName
@ -32,11 +30,11 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
except: except:
pass pass
controlValueDict = {"input_directory": unicode(inputDirectory1)} controlValueDict = {"input_directory": text_type(inputDirectory1)}
newValueDict = {"input_name": unicode(inputName1), newValueDict = {"input_name": text_type(inputName1),
"input_hash": unicode(inputHash), "input_hash": text_type(inputHash),
"input_id": unicode(inputID), "input_id": text_type(inputID),
"client_agent": unicode(clientAgent), "client_agent": text_type(clientAgent),
"status": 0, "status": 0,
"last_update": datetime.date.today().toordinal() "last_update": datetime.date.today().toordinal()
} }
@ -53,10 +51,12 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
usercat = inputCategory usercat = inputCategory
try: try:
inputName = inputName.encode(core.SYS_ENCODING) inputName = inputName.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
try: try:
inputDirectory = inputDirectory.encode(core.SYS_ENCODING) inputDirectory = inputDirectory.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory)) logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory))
@ -86,25 +86,10 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
inputCategory)) inputCategory))
return [-1, ""] return [-1, ""]
try: Torrent_NoLink = int(section[usercat].get("Torrent_NoLink", 0))
Torrent_NoLink = int(section[usercat]["Torrent_NoLink"]) keep_archive = int(section[usercat].get("keep_archive", 0))
except: extract = int(section[usercat].get('extract', 0))
Torrent_NoLink = 0 uniquePath = int(section[usercat].get("unique_path", 1))
try:
keep_archive = int(section[usercat]["keep_archive"])
except:
keep_archive = 0
try:
extract = int(section[usercat]['extract'])
except:
extract = 0
try:
uniquePath = int(section[usercat]["unique_path"])
except:
uniquePath = 1
if clientAgent != 'manual': if clientAgent != 'manual':
core.pause_torrent(clientAgent, inputHash, inputID, inputName) core.pause_torrent(clientAgent, inputHash, inputID, inputName)
@ -124,7 +109,8 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
core.os.path.join(core.OUTPUTDIRECTORY, inputCategory)) core.os.path.join(core.OUTPUTDIRECTORY, inputCategory))
try: try:
outputDestination = outputDestination.encode(core.SYS_ENCODING) outputDestination = outputDestination.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
if outputDestination in inputDirectory: if outputDestination in inputDirectory:
outputDestination = inputDirectory outputDestination = inputDirectory
@ -164,12 +150,13 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
"Setting outputDestination to {0} to preserve folder structure".format(os.path.dirname(targetFile))) "Setting outputDestination to {0} to preserve folder structure".format(os.path.dirname(targetFile)))
try: try:
targetFile = targetFile.encode(core.SYS_ENCODING) targetFile = targetFile.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
if root == 1: if root == 1:
if not foundFile: if not foundFile:
logger.debug("Looking for {0} in: {1}".format(inputName, inputFile)) logger.debug("Looking for {0} in: {1}".format(inputName, inputFile))
if (core.sanitizeName(inputName) in core.sanitizeName(inputFile)) or ( if any([core.sanitizeName(inputName) in core.sanitizeName(inputFile),
core.sanitizeName(fileName) in core.sanitizeName(inputName)): core.sanitizeName(fileName) in core.sanitizeName(inputName)]):
foundFile = True foundFile = True
logger.debug("Found file {0} that matches Torrent Name {1}".format(fullFileName, inputName)) logger.debug("Found file {0} that matches Torrent Name {1}".format(fullFileName, inputName))
else: else:
@ -183,7 +170,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
logger.debug("Looking for files with modified/created dates less than 5 minutes old.") logger.debug("Looking for files with modified/created dates less than 5 minutes old.")
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)): if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)):
foundFile = True foundFile = True
logger.debug("Found file {0} with date modifed/created less than 5 minutes ago.".format(fullFileName)) logger.debug("Found file {0} with date modified/created less than 5 minutes ago.".format(fullFileName))
else: else:
continue # This file has not been recently moved or created, skip it continue # This file has not been recently moved or created, skip it
@ -200,7 +187,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory)) logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory))
core.extractFiles(inputDirectory, outputDestination, keep_archive) core.extractFiles(inputDirectory, outputDestination, keep_archive)
if not inputCategory in core.NOFLATTEN: #don't flatten hp in case multi cd albums, and we need to copy this back later. if inputCategory not in core.NOFLATTEN: # don't flatten hp in case multi cd albums, and we need to copy this back later.
core.flatten(outputDestination) core.flatten(outputDestination)
# Now check if video files exist in destination: # Now check if video files exist in destination:
@ -217,7 +204,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
logger.warning("Found no media files in {0}".format(outputDestination)) logger.warning("Found no media files in {0}".format(outputDestination))
# Only these sections can handling failed downloads so make sure everything else gets through without the check for failed # Only these sections can handling failed downloads so make sure everything else gets through without the check for failed
if not sectionName in ['CouchPotato', 'SickBeard', 'NzbDrone']: if sectionName not in ['CouchPotato', 'SickBeard', 'NzbDrone']:
status = 0 status = 0
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, usercat, inputName)) logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, usercat, inputName))
@ -266,7 +253,7 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
for dirpath, dirs, files in os.walk(inputDirectory): for dirpath, dirs, files in os.walk(inputDirectory):
for file in files: for file in files:
logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file))) logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file)))
core.replace_links(os.path.join(dirpath,file)) replace_links(os.path.join(dirpath, file))
core.remove_torrent(clientAgent, inputHash, inputID, inputName) core.remove_torrent(clientAgent, inputHash, inputID, inputName)
if not sectionName == 'UserScript': # for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN if not sectionName == 'UserScript': # for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN
@ -323,32 +310,25 @@ def main(args):
os.path.basename(dirName)) os.path.basename(dirName))
) )
try: clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', ''))
clientAgent = str(core.DOWNLOADINFO[0]['client_agent']) inputHash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
except: inputID = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
clientAgent = 'manual'
try:
inputHash = str(core.DOWNLOADINFO[0]['input_hash'])
except:
inputHash = None
try:
inputID = str(core.DOWNLOADINFO[0]['input_id'])
except:
inputID = None
if clientAgent.lower() not in core.TORRENT_CLIENTS and clientAgent != 'manual': if clientAgent and clientAgent.lower() not in core.TORRENT_CLIENTS:
continue continue
try: try:
dirName = dirName.encode(core.SYS_ENCODING) dirName = dirName.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
inputName = os.path.basename(dirName) inputName = os.path.basename(dirName)
try: try:
inputName = inputName.encode(core.SYS_ENCODING) inputName = inputName.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
results = processTorrent(dirName, inputName, subsection, inputHash, inputID, results = processTorrent(dirName, inputName, subsection, inputHash or None, inputID or None,
clientAgent) clientAgent or 'manual')
if results[0] != 0: if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format( logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format(
section, subsection)) section, subsection))

View file

@ -265,8 +265,9 @@ def initialize(section=None):
# On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
sys.setdefaultencoding(SYS_ENCODING) sys.setdefaultencoding(SYS_ENCODING)
except: except:
print('Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable') print('Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable'
print('or find another way to force Python to use ' + SYS_ENCODING + ' for string encoding.') '\nor find another way to force Python to use {codec} for string encoding.'.format
(codec=SYS_ENCODING))
if 'NZBOP_SCRIPTDIR' in os.environ: if 'NZBOP_SCRIPTDIR' in os.environ:
sys.exit(NZBGET_POSTPROCESS_ERROR) sys.exit(NZBGET_POSTPROCESS_ERROR)
else: else:
@ -333,8 +334,9 @@ def initialize(section=None):
logger.error("Update wasn't successful, not restarting. Check your log for more information.") logger.error("Update wasn't successful, not restarting. Check your log for more information.")
# Set Current Version # Set Current Version
logger.info( logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format
'nzbToMedia Version:' + NZBTOMEDIA_VERSION + ' Branch:' + GIT_BRANCH + ' (' + platform.system() + ' ' + platform.release() + ')') (version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH,
system=platform.system(), release=platform.release()))
if int(CFG["WakeOnLan"]["wake"]) == 1: if int(CFG["WakeOnLan"]["wake"]) == 1:
WakeUp() WakeUp()
@ -842,7 +844,7 @@ def restart():
if popen_list: if popen_list:
popen_list += SYS_ARGV popen_list += SYS_ARGV
logger.log(u"Restarting nzbToMedia with " + str(popen_list)) logger.log(u"Restarting nzbToMedia with {args}".format(args=popen_list))
logger.close() logger.close()
p = subprocess.Popen(popen_list, cwd=os.getcwd()) p = subprocess.Popen(popen_list, cwd=os.getcwd())
p.wait() p.wait()

View file

@ -46,7 +46,7 @@ class autoProcessMovie(object):
if not result['success']: if not result['success']:
if 'error' in result: if 'error' in result:
logger.error(str(result['error'])) logger.error('{0}'.format(result['error']))
else: else:
logger.error("no media found for id {0}".format(params['id'])) logger.error("no media found for id {0}".format(params['id']))
return results return results
@ -262,7 +262,7 @@ class autoProcessMovie(object):
if release_id: if release_id:
logger.postprocess("Setting failed release {0} to ignored ...".format(inputName), section) logger.postprocess("Setting failed release {0} to ignored ...".format(inputName), section)
url = baseURL + "/release.ignore" url = "{url}/release.ignore".format(url=baseURL)
params = {'id': release_id} params = {'id': release_id}
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)

View file

@ -42,18 +42,17 @@ class InitialSchema(nzbToMediaDB.SchemaUpgrade):
cur_db_version = self.checkDBVersion() cur_db_version = self.checkDBVersion()
if cur_db_version < MIN_DB_VERSION: if cur_db_version < MIN_DB_VERSION:
logger.log_error_and_exit("Your database version (" + str( logger.log_error_and_exit(u"Your database version ({current}) is too old to migrate "
cur_db_version) + ") is too old to migrate from what this version of nzbToMedia supports (" + \ u"from what this version of nzbToMedia supports ({min})."
str(MIN_DB_VERSION) + ").\n" + \ u"\nPlease remove nzbtomedia.db file to begin fresh.".format
"Please remove nzbtomedia.db file to begin fresh." (current=cur_db_version, min=MIN_DB_VERSION))
)
if cur_db_version > MAX_DB_VERSION: if cur_db_version > MAX_DB_VERSION:
logger.log_error_and_exit("Your database version (" + str( logger.log_error_and_exit(u"Your database version ({current}) has been incremented "
cur_db_version) + ") has been incremented past what this version of nzbToMedia supports (" + \ u"past what this version of nzbToMedia supports ({max})."
str(MAX_DB_VERSION) + ").\n" + \ u"\nIf you have used other forks of nzbToMedia, your database "
"If you have used other forks of nzbToMedia, your database may be unusable due to their modifications." u"may be unusable due to their modifications.".format
) (current=cur_db_version, max=MAX_DB_VERSION))
if cur_db_version < MAX_DB_VERSION: # We need to upgrade. if cur_db_version < MAX_DB_VERSION: # We need to upgrade.
queries = [ queries = [
"CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));", "CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));",

View file

@ -70,7 +70,7 @@ def extract(filePath, outputDestination):
if ext[1] in (".gz", ".bz2", ".lzma"): if ext[1] in (".gz", ".bz2", ".lzma"):
# Check if this is a tar # Check if this is a tar
if os.path.splitext(ext[0])[1] == ".tar": if os.path.splitext(ext[0])[1] == ".tar":
cmd = EXTRACT_COMMANDS[".tar" + ext[1]] cmd = EXTRACT_COMMANDS[".tar{ext}".format(ext=ext[1])]
elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"): elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"):
cmd = EXTRACT_COMMANDS[os.path.splitext(ext[0])[1]] cmd = EXTRACT_COMMANDS[os.path.splitext(ext[0])[1]]
elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives. elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives.
@ -131,7 +131,7 @@ def extract(filePath, outputDestination):
continue continue
cmd2 = cmd cmd2 = cmd
# append password here. # append password here.
passcmd = "-p" + password passcmd = "-p{pwd}".format(pwd=password)
cmd2.append(passcmd) cmd2.append(passcmd)
p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine.
res = p.wait() res = p.wait()

View file

@ -1,6 +1,7 @@
# coding=utf-8 # coding=utf-8
import requests import requests
from six import iteritems
class GitHub(object): class GitHub(object):
@ -19,10 +20,11 @@ class GitHub(object):
Access the API at the path given and with the optional params given. Access the API at the path given and with the optional params given.
""" """
url = 'https://api.github.com/' + '/'.join(path) url = 'https://api.github.com/{path}'.format(path='/'.join(path))
if params and type(params) is dict: if params and type(params) is dict:
url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params.keys()]) url += '?{params}'.format(params='&'.join(['{key}={value}'.format(key=k, value=v)
for k, v in iteritems(params)]))
data = requests.get(url, verify=False) data = requests.get(url, verify=False)
@ -59,6 +61,6 @@ class GitHub(object):
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/ Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/
""" """
access_API = self._access_API( access_API = self._access_API(
['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head], ['repos', self.github_repo_user, self.github_repo, 'compare', '{base}...{head}'.format(base=base, head=head)],
params={'per_page': per_page}) params={'per_page': per_page})
return access_API return access_API

View file

@ -136,7 +136,7 @@ class NTMRotatingLogHandler(object):
i: Log number to ues i: Log number to ues
""" """
return self.log_file_path + ('.' + str(i) if i else '') return self.log_file_path + ('.{0}'.format(i) if i else '')
def _num_logs(self): def _num_logs(self):
""" """
@ -193,9 +193,9 @@ class NTMRotatingLogHandler(object):
self.writes_since_check += 1 self.writes_since_check += 1
try: try:
message = u"{0}: {1}".format(str(section).upper(), toLog) message = u"{0}: {1}".format(section.upper(), toLog)
except: except UnicodeError:
message = u"{0}: Message contains non-utf-8 string".format(str(section).upper()) message = u"{0}: Message contains non-utf-8 string".format(section.upper())
out_line = message out_line = message

View file

@ -238,7 +238,7 @@ class ConfigObj(configobj.ConfigObj, Section):
process_section(section, subsection) process_section(section, subsection)
# create a backup of our old config # create a backup of our old config
CFG_OLD.filename = core.CONFIG_FILE + ".old" CFG_OLD.filename ="{config}.old".format(config=core.CONFIG_FILE)
CFG_OLD.write() CFG_OLD.write()
# write our new config to autoProcessMedia.cfg # write our new config to autoProcessMedia.cfg
@ -270,7 +270,7 @@ class ConfigObj(configobj.ConfigObj, Section):
envKeys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE'] envKeys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE']
cfgKeys = ['auto_update', 'check_media', 'safe_mode'] cfgKeys = ['auto_update', 'check_media', 'safe_mode']
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_' + envKeys[index] key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -280,7 +280,7 @@ class ConfigObj(configobj.ConfigObj, Section):
envKeys = ['MOUNTPOINTS'] envKeys = ['MOUNTPOINTS']
cfgKeys = ['mount_points'] cfgKeys = ['mount_points']
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_' + envKeys[index] key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -294,7 +294,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'wait_for', 'watch_dir'] 'wait_for', 'watch_dir']
if envCatKey in os.environ: if envCatKey in os.environ:
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_CPS' + envKeys[index] key = 'NZBPO_CPS{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -311,7 +311,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method'] 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
if envCatKey in os.environ: if envCatKey in os.environ:
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_SB' + envKeys[index] key = 'NZBPO_SB{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -328,7 +328,7 @@ class ConfigObj(configobj.ConfigObj, Section):
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path'] cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path']
if envCatKey in os.environ: if envCatKey in os.environ:
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_HP' + envKeys[index] key = 'NZBPO_HP{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -345,7 +345,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'remote_path'] 'remote_path']
if envCatKey in os.environ: if envCatKey in os.environ:
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_MY' + envKeys[index] key = 'NZBPO_MY{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -360,7 +360,7 @@ class ConfigObj(configobj.ConfigObj, Section):
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path'] cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
if envCatKey in os.environ: if envCatKey in os.environ:
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_GZ' + envKeys[index] key = 'NZBPO_GZ{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -377,7 +377,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path'] 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
if envCatKey in os.environ: if envCatKey in os.environ:
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_ND' + envKeys[index] key = 'NZBPO_ND{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -392,7 +392,7 @@ class ConfigObj(configobj.ConfigObj, Section):
envKeys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS'] envKeys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions'] cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_' + envKeys[index] key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -402,7 +402,7 @@ class ConfigObj(configobj.ConfigObj, Section):
envKeys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA'] envKeys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata'] cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_' + envKeys[index] key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -430,7 +430,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels', 'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
'outputAudioOtherChannels'] 'outputAudioOtherChannels']
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_' + envKeys[index] key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -440,7 +440,7 @@ class ConfigObj(configobj.ConfigObj, Section):
envKeys = ['WAKE', 'HOST', 'PORT', 'MAC'] envKeys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfgKeys = ['wake', 'host', 'port', 'mac'] cfgKeys = ['wake', 'host', 'port', 'mac']
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_WOL' + envKeys[index] key = 'NZBPO_WOL{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]
@ -454,7 +454,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path'] 'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
if envCatKey in os.environ: if envCatKey in os.environ:
for index in range(len(envKeys)): for index in range(len(envKeys)):
key = 'NZBPO_' + envKeys[index] key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfgKeys[index]
value = os.environ[key] value = os.environ[key]

View file

@ -56,28 +56,29 @@ class DBConnection(object):
while attempt < 5: while attempt < 5:
try: try:
if args is None: if args is None:
logger.log(self.filename + ": " + query, logger.DB) logger.log("{name}: {query}".format(name=self.filename, query=query), logger.DB)
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute(query) cursor.execute(query)
sqlResult = cursor.fetchone()[0] sqlResult = cursor.fetchone()[0]
else: else:
logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB) logger.log("{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB)
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute(query, args) cursor.execute(query, args)
sqlResult = cursor.fetchone()[0] sqlResult = cursor.fetchone()[0]
# get out of the connection attempt loop since we were successful # get out of the connection attempt loop since we were successful
break break
except sqlite3.OperationalError as e: except sqlite3.OperationalError as error:
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]:
logger.log(u"DB error: " + str(e), logger.WARNING) logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(1) time.sleep(1)
else: else:
logger.log(u"DB error: " + str(e), logger.ERROR) logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR)
raise raise
except sqlite3.DatabaseError as e: except sqlite3.DatabaseError as error:
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR) logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise raise
return sqlResult return sqlResult
@ -98,26 +99,26 @@ class DBConnection(object):
sqlResult.append(self.connection.execute(qu[0])) sqlResult.append(self.connection.execute(qu[0]))
elif len(qu) > 1: elif len(qu) > 1:
if logTransaction: if logTransaction:
logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG) logger.log(u"{query} with args {args}".format(query=qu[0], args=qu[1]), logger.DEBUG)
sqlResult.append(self.connection.execute(qu[0], qu[1])) sqlResult.append(self.connection.execute(qu[0], qu[1]))
self.connection.commit() self.connection.commit()
logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG) logger.log(u"Transaction with {x} query's executed".format(x=len(querylist)), logger.DEBUG)
return sqlResult return sqlResult
except sqlite3.OperationalError as e: except sqlite3.OperationalError as error:
sqlResult = [] sqlResult = []
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]:
logger.log(u"DB error: " + str(e), logger.WARNING) logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(1) time.sleep(1)
else: else:
logger.log(u"DB error: " + str(e), logger.ERROR) logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR)
raise raise
except sqlite3.DatabaseError as e: except sqlite3.DatabaseError as error:
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR) logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise raise
return sqlResult return sqlResult
@ -132,24 +133,25 @@ class DBConnection(object):
while attempt < 5: while attempt < 5:
try: try:
if args is None: if args is None:
logger.log(self.filename + ": " + query, logger.DB) logger.log(u"{name}: {query}".format(name=self.filename, query=query), logger.DB)
sqlResult = self.connection.execute(query) sqlResult = self.connection.execute(query)
else: else:
logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB) logger.log(u"{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB)
sqlResult = self.connection.execute(query, args) sqlResult = self.connection.execute(query, args)
self.connection.commit() self.connection.commit()
# get out of the connection attempt loop since we were successful # get out of the connection attempt loop since we were successful
break break
except sqlite3.OperationalError as e: except sqlite3.OperationalError as error:
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]:
logger.log(u"DB error: " + str(e), logger.WARNING) logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(1) time.sleep(1)
else: else:
logger.log(u"DB error: " + str(e), logger.ERROR) logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR)
raise raise
except sqlite3.DatabaseError as e: except sqlite3.DatabaseError as error:
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR) logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise raise
return sqlResult return sqlResult
@ -167,17 +169,28 @@ class DBConnection(object):
changesBefore = self.connection.total_changes changesBefore = self.connection.total_changes
genParams = lambda myDict: [x + " = ?" for x in myDict.keys()] genParams = lambda myDict: ["{key} = ?".format(key=k) for k in myDict.keys()]
query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join( self.action(
genParams(keyDict)) "UPDATE {table} "
"SET {params} "
self.action(query, valueDict.values() + keyDict.values()) "WHERE {conditions}".format(
table=tableName,
params=", ".join(genParams(valueDict)),
conditions=" AND ".join(genParams(keyDict))),
valueDict.values() + keyDict.values()
)
if self.connection.total_changes == changesBefore: if self.connection.total_changes == changesBefore:
query = "INSERT OR IGNORE INTO " + tableName + " (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" + \ self.action(
" VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")" "INSERT OR IGNORE INTO {table} ({columns}) "
self.action(query, valueDict.values() + keyDict.values()) "VALUES ({values})".format(
table=tableName,
columns=", ".join(valueDict.keys() + keyDict.keys()),
values=", ".join(["?"] * len(valueDict.keys() + keyDict.keys()))
)
, valueDict.values() + keyDict.values()
)
def tableInfo(self, tableName): def tableInfo(self, tableName):
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually # FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
@ -222,17 +235,22 @@ def prettyName(class_name):
def _processUpgrade(connection, upgradeClass): def _processUpgrade(connection, upgradeClass):
instance = upgradeClass(connection) instance = upgradeClass(connection)
logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) logger.log(u"Checking {name} database upgrade".format
(name=prettyName(upgradeClass.__name__)), logger.DEBUG)
if not instance.test(): if not instance.test():
logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.MESSAGE) logger.log(u"Database upgrade required: {name}".format
(name=prettyName(upgradeClass.__name__)), logger.MESSAGE)
try: try:
instance.execute() instance.execute()
except sqlite3.DatabaseError as e: except sqlite3.DatabaseError as error:
print("Error in " + str(upgradeClass.__name__) + ": " + str(e)) print(u"Error in {name}: {msg}".format
(name=upgradeClass.__name__, msg=error))
raise raise
logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG) logger.log(u"{name} upgrade completed".format
(name=upgradeClass.__name__), logger.DEBUG)
else: else:
logger.log(upgradeClass.__name__ + " upgrade not required", logger.DEBUG) logger.log(u"{name} upgrade not required".format
(name=upgradeClass.__name__), logger.DEBUG)
for upgradeSubClass in upgradeClass.__subclasses__(): for upgradeSubClass in upgradeClass.__subclasses__():
_processUpgrade(connection, upgradeSubClass) _processUpgrade(connection, upgradeSubClass)

View file

@ -80,8 +80,8 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
continue continue
cmd = "" cmd = ""
for item in command: for item in command:
cmd = cmd + " " + item cmd = "{cmd} {item}".format(cmd=cmd, item=item)
logger.info("Running script {0} on file {1}.".format(cmd, filePath), "USERSCRIPT") logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=filePath), "USERSCRIPT")
try: try:
p = Popen(command) p = Popen(command)
res = p.wait() res = p.wait()

View file

@ -342,11 +342,12 @@ def rmReadOnly(filename):
file_attribute = os.stat(filename)[0] file_attribute = os.stat(filename)[0]
if not file_attribute & stat.S_IWRITE: if not file_attribute & stat.S_IWRITE:
# File is read-only, so make it writeable # File is read-only, so make it writeable
logger.debug('Read only mode on file ' + filename + ' Will try to make it writeable') logger.debug('Read only mode on file {name}. Attempting to make it writeable'.format
(name=filename))
try: try:
os.chmod(filename, stat.S_IWRITE) os.chmod(filename, stat.S_IWRITE)
except: except:
logger.warning('Cannot change permissions of ' + filename, logger.WARNING) logger.warning('Cannot change permissions of {file}'.format(file=filename), logger.WARNING)
# Wake function # Wake function
@ -1156,11 +1157,11 @@ def server_responding(baseURL):
def plex_update(category): def plex_update(category):
if core.FAILED: if core.FAILED:
return return
if core.PLEXSSL: url = '{scheme}://{host}:{port}/library/sections/'.format(
url = "https://" scheme='https' if core.PLEXSSL else 'http',
else: host=core.PLEXHOST,
url = "http://" port=core.PLEXPORT,
url = url + core.PLEXHOST + ':' + core.PLEXPORT + '/library/sections/' )
section = None section = None
if not core.PLEXSEC: if not core.PLEXSEC:
return return
@ -1170,7 +1171,7 @@ def plex_update(category):
section = item[1] section = item[1]
if section: if section:
url = url + section + '/refresh?X-Plex-Token=' + core.PLEXTOKEN url = '{url}{section}/refresh?X-Plex-Token={token}'.format(url=url, section=section, token=core.PLEXTOKEN)
requests.get(url, timeout=(60, 120), verify=False) requests.get(url, timeout=(60, 120), verify=False)
logger.debug("Plex Library has been refreshed.", 'PLEX') logger.debug("Plex Library has been refreshed.", 'PLEX')
else: else:
@ -1180,27 +1181,27 @@ def plex_update(category):
def backupVersionedFile(old_file, version): def backupVersionedFile(old_file, version):
numTries = 0 numTries = 0
new_file = old_file + '.' + 'v' + str(version) new_file = '{old}.v{version}'.format(old=old_file, version=version)
while not os.path.isfile(new_file): while not os.path.isfile(new_file):
if not os.path.isfile(old_file): if not os.path.isfile(old_file):
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG) logger.log(u"Not creating backup, {file} doesn't exist".format(file=old_file), logger.DEBUG)
break break
try: try:
logger.log(u"Trying to back up " + old_file + " to " + new_file, logger.DEBUG) logger.log(u"Trying to back up {old} to {new]".format(old=old_file, new=new_file), logger.DEBUG)
shutil.copy(old_file, new_file) shutil.copy(old_file, new_file)
logger.log(u"Backup done", logger.DEBUG) logger.log(u"Backup done", logger.DEBUG)
break break
except Exception as e: except Exception as error:
logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + str(e), logger.log(u"Error while trying to back up {old} to {new} : {msg}".format
logger.WARNING) (old=old_file, new=new_file, msg=error), logger.WARNING)
numTries += 1 numTries += 1
time.sleep(1) time.sleep(1)
logger.log(u"Trying again.", logger.DEBUG) logger.log(u"Trying again.", logger.DEBUG)
if numTries >= 10: if numTries >= 10:
logger.log(u"Unable to back up " + old_file + " to " + new_file + " please do it manually.", logger.ERROR) logger.log(u"Unable to back up {old} to {new} please do it manually.".format(old=old_file, new=new_file), logger.ERROR)
return False return False
return True return True
@ -1242,7 +1243,7 @@ class RunningProcess(object):
class WindowsProcess(object): class WindowsProcess(object):
def __init__(self): def __init__(self):
self.mutexname = "nzbtomedia_" + core.PID_FILE.replace('\\', '/') # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}" self.mutexname = "nzbtomedia_{pid}".format(pid=core.PID_FILE.replace('\\', '/')) # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}"
if platform.system() == 'Windows': if platform.system() == 'Windows':
from win32event import CreateMutex from win32event import CreateMutex
from win32api import CloseHandle, GetLastError from win32api import CloseHandle, GetLastError
@ -1274,7 +1275,7 @@ class PosixProcess(object):
def alreadyrunning(self): def alreadyrunning(self):
try: try:
self.lock_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) self.lock_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
self.lock_socket.bind('\0' + self.pidpath) self.lock_socket.bind('\0{path}'.format(path=self.pidpath))
self.lasterror = False self.lasterror = False
return self.lasterror return self.lasterror
except socket.error as e: except socket.error as e:

View file

@ -129,7 +129,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name): elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name):
name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name) name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name)
if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself. if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself.
core.VEXTENSION = '-transcoded' + core.VEXTENSION # adds '-transcoded.ext' core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext'
else: else:
img, data = iteritems(file).next() img, data = iteritems(file).next()
name = data['name'] name = data['name']
@ -165,7 +165,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
if core.VBITRATE: if core.VBITRATE:
video_cmd.extend(['-b:v', str(core.VBITRATE)]) video_cmd.extend(['-b:v', str(core.VBITRATE)])
if core.VRESOLUTION: if core.VRESOLUTION:
video_cmd.extend(['-vf', 'scale=' + core.VRESOLUTION]) video_cmd.extend(['-vf', 'scale={vres}'.format(vres=core.VRESOLUTION)])
if core.VPRESET: if core.VPRESET:
video_cmd.extend(['-preset', core.VPRESET]) video_cmd.extend(['-preset', core.VPRESET])
if core.VCRF: if core.VCRF:
@ -222,13 +222,19 @@ def buildCommands(file, newDir, movieName, bitbucket):
w_scale = width / float(scale.split(':')[0]) w_scale = width / float(scale.split(':')[0])
h_scale = height / float(scale.split(':')[1]) h_scale = height / float(scale.split(':')[1])
if w_scale > h_scale: # widescreen, Scale by width only. if w_scale > h_scale: # widescreen, Scale by width only.
scale = scale.split(':')[0] + ":" + str(int((height / w_scale) / 2) * 2) scale = "{width}:{height}".format(
width=scale.split(':')[0],
height=int((height / w_scale) / 2) * 2,
)
if w_scale > 1: if w_scale > 1:
video_cmd.extend(['-vf', 'scale=' + scale]) video_cmd.extend(['-vf', 'scale={width}'.format(width=scale)])
else: # lower or mathcing ratio, scale by height only. else: # lower or matching ratio, scale by height only.
scale = str(int((width / h_scale) / 2) * 2) + ":" + scale.split(':')[1] scale = "{width}:{height}".format(
width=int((width / h_scale) / 2) * 2,
height=scale.split(':')[1],
)
if h_scale > 1: if h_scale > 1:
video_cmd.extend(['-vf', 'scale=' + scale]) video_cmd.extend(['-vf', 'scale={height}'.format(height=scale)])
if core.VBITRATE: if core.VBITRATE:
video_cmd.extend(['-b:v', str(core.VBITRATE)]) video_cmd.extend(['-b:v', str(core.VBITRATE)])
if core.VPRESET: if core.VPRESET:
@ -242,7 +248,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
video_cmd[1] = core.VCODEC video_cmd[1] = core.VCODEC
if core.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding. if core.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding.
video_cmd = ['-c:v', 'copy'] video_cmd = ['-c:v', 'copy']
map_cmd.extend(['-map', '0:' + str(video["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=video["index"])])
break # Only one video needed break # Only one video needed
used_audio = 0 used_audio = 0
@ -259,40 +265,34 @@ def buildCommands(file, newDir, movieName, bitbucket):
audio3 = [] audio3 = []
if audio2: # right language and codec... if audio2: # right language and codec...
map_cmd.extend(['-map', '0:' + str(audio2[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio2[0]["index"])])
a_mapped.extend([audio2[0]["index"]]) a_mapped.extend([audio2[0]["index"]])
bitrate = int(audio2[0].get("bit_rate", 0)) / 1000 bitrate = int(audio2[0].get("bit_rate", 0)) / 1000
channels = int(audio2[0].get("channels", 0)) channels = int(audio2[0].get("channels", 0))
audio_cmd.extend(['-c:a:' + str(used_audio), 'copy']) audio_cmd.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio1: # right language wrong codec. elif audio1: # right language wrong codec.
map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]["index"])])
a_mapped.extend([audio1[0]["index"]]) a_mapped.extend([audio1[0]["index"]])
bitrate = int(audio1[0].get("bit_rate", 0)) / 1000 bitrate = int(audio1[0].get("bit_rate", 0)) / 1000
channels = int(audio1[0].get("channels", 0)) channels = int(audio1[0].get("channels", 0))
if core.ACODEC: audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy'])
audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC])
else:
audio_cmd.extend(['-c:a:' + str(used_audio), 'copy'])
elif audio3: # just pick the default audio track elif audio3: # just pick the default audio track
map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]["index"])])
a_mapped.extend([audio3[0]["index"]]) a_mapped.extend([audio3[0]["index"]])
bitrate = int(audio3[0].get("bit_rate", 0)) / 1000 bitrate = int(audio3[0].get("bit_rate", 0)) / 1000
channels = int(audio3[0].get("channels", 0)) channels = int(audio3[0].get("channels", 0))
if core.ACODEC: audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy'])
audio_cmd.extend(['-c:a:' + str(used_audio), core.ACODEC])
else:
audio_cmd.extend(['-c:a:' + str(used_audio), 'copy'])
if core.ACHANNELS and channels and channels > core.ACHANNELS: if core.ACHANNELS and channels and channels > core.ACHANNELS:
audio_cmd.extend(['-ac:a:' + str(used_audio), str(core.ACHANNELS)]) audio_cmd.extend(['-ac:a:{0}'.format(used_audio), str(core.ACHANNELS)])
if audio_cmd[1] == 'copy': if audio_cmd[1] == 'copy':
audio_cmd[1] = core.ACODEC audio_cmd[1] = core.ACODEC
if core.ABITRATE and not (core.ABITRATE * 0.9 < bitrate < core.ABITRATE * 1.1): if core.ABITRATE and not (core.ABITRATE * 0.9 < bitrate < core.ABITRATE * 1.1):
audio_cmd.extend(['-b:a:' + str(used_audio), str(core.ABITRATE)]) audio_cmd.extend(['-b:a:{0}'.format(used_audio), str(core.ABITRATE)])
if audio_cmd[1] == 'copy': if audio_cmd[1] == 'copy':
audio_cmd[1] = core.ACODEC audio_cmd[1] = core.ACODEC
if core.OUTPUTQUALITYPERCENT: if core.OUTPUTQUALITYPERCENT:
audio_cmd.extend(['-q:a:' + str(used_audio), str(core.OUTPUTQUALITYPERCENT)]) audio_cmd.extend(['-q:a:{0}'.format(used_audio), str(core.OUTPUTQUALITYPERCENT)])
if audio_cmd[1] == 'copy': if audio_cmd[1] == 'copy':
audio_cmd[1] = core.ACODEC audio_cmd[1] = core.ACODEC
if audio_cmd[1] in ['aac', 'dts']: if audio_cmd[1] in ['aac', 'dts']:
@ -302,40 +302,40 @@ def buildCommands(file, newDir, movieName, bitbucket):
used_audio += 1 used_audio += 1
audio4 = [item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW] audio4 = [item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW]
if audio4: # right language and codec. if audio4: # right language and codec.
map_cmd.extend(['-map', '0:' + str(audio4[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio4[0]["index"])])
a_mapped.extend([audio4[0]["index"]]) a_mapped.extend([audio4[0]["index"]])
bitrate = int(audio4[0].get("bit_rate", 0)) / 1000 bitrate = int(audio4[0].get("bit_rate", 0)) / 1000
channels = int(audio4[0].get("channels", 0)) channels = int(audio4[0].get("channels", 0))
audio_cmd2.extend(['-c:a:' + str(used_audio), 'copy']) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio1: # right language wrong codec. elif audio1: # right language wrong codec.
map_cmd.extend(['-map', '0:' + str(audio1[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]["index"])])
a_mapped.extend([audio1[0]["index"]]) a_mapped.extend([audio1[0]["index"]])
bitrate = int(audio1[0].get("bit_rate", 0)) / 1000 bitrate = int(audio1[0].get("bit_rate", 0)) / 1000
channels = int(audio1[0].get("channels", 0)) channels = int(audio1[0].get("channels", 0))
if core.ACODEC2: if core.ACODEC2:
audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2]) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2])
else: else:
audio_cmd2.extend(['-c:a:' + str(used_audio), 'copy']) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio3: # just pick the default audio track elif audio3: # just pick the default audio track
map_cmd.extend(['-map', '0:' + str(audio3[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]["index"])])
a_mapped.extend([audio3[0]["index"]]) a_mapped.extend([audio3[0]["index"]])
bitrate = int(audio3[0].get("bit_rate", 0)) / 1000 bitrate = int(audio3[0].get("bit_rate", 0)) / 1000
channels = int(audio3[0].get("channels", 0)) channels = int(audio3[0].get("channels", 0))
if core.ACODEC2: if core.ACODEC2:
audio_cmd2.extend(['-c:a:' + str(used_audio), core.ACODEC2]) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2])
else: else:
audio_cmd2.extend(['-c:a:' + str(used_audio), 'copy']) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy'])
if core.ACHANNELS2 and channels and channels > core.ACHANNELS2: if core.ACHANNELS2 and channels and channels > core.ACHANNELS2:
audio_cmd2.extend(['-ac:a:' + str(used_audio), str(core.ACHANNELS2)]) audio_cmd2.extend(['-ac:a:{0}'.format(used_audio), str(core.ACHANNELS2)])
if audio_cmd2[1] == 'copy': if audio_cmd2[1] == 'copy':
audio_cmd2[1] = core.ACODEC2 audio_cmd2[1] = core.ACODEC2
if core.ABITRATE2 and not (core.ABITRATE2 * 0.9 < bitrate < core.ABITRATE2 * 1.1): if core.ABITRATE2 and not (core.ABITRATE2 * 0.9 < bitrate < core.ABITRATE2 * 1.1):
audio_cmd2.extend(['-b:a:' + str(used_audio), str(core.ABITRATE2)]) audio_cmd2.extend(['-b:a:{0}'.format(used_audio), str(core.ABITRATE2)])
if audio_cmd2[1] == 'copy': if audio_cmd2[1] == 'copy':
audio_cmd2[1] = core.ACODEC2 audio_cmd2[1] = core.ACODEC2
if core.OUTPUTQUALITYPERCENT: if core.OUTPUTQUALITYPERCENT:
audio_cmd2.extend(['-q:a:' + str(used_audio), str(core.OUTPUTQUALITYPERCENT)]) audio_cmd2.extend(['-q:a:{0}'.format(used_audio), str(core.OUTPUTQUALITYPERCENT)])
if audio_cmd2[1] == 'copy': if audio_cmd2[1] == 'copy':
audio_cmd2[1] = core.ACODEC2 audio_cmd2[1] = core.ACODEC2
if audio_cmd2[1] in ['aac', 'dts']: if audio_cmd2[1] in ['aac', 'dts']:
@ -347,28 +347,28 @@ def buildCommands(file, newDir, movieName, bitbucket):
if audio["index"] in a_mapped: if audio["index"] in a_mapped:
continue continue
used_audio += 1 used_audio += 1
map_cmd.extend(['-map', '0:' + str(audio["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio["index"])])
audio_cmd3 = [] audio_cmd3 = []
bitrate = int(audio.get("bit_rate", 0)) / 1000 bitrate = int(audio.get("bit_rate", 0)) / 1000
channels = int(audio.get("channels", 0)) channels = int(audio.get("channels", 0))
if audio["codec_name"] in core.ACODEC3_ALLOW: if audio["codec_name"] in core.ACODEC3_ALLOW:
audio_cmd3.extend(['-c:a:' + str(used_audio), 'copy']) audio_cmd3.extend(['-c:a:{0}'.format(used_audio), 'copy'])
else: else:
if core.ACODEC3: if core.ACODEC3:
audio_cmd3.extend(['-c:a:' + str(used_audio), core.ACODEC3]) audio_cmd3.extend(['-c:a:{0}'.format(used_audio), core.ACODEC3])
else: else:
audio_cmd3.extend(['-c:a:' + str(used_audio), 'copy']) audio_cmd3.extend(['-c:a:{0}'.format(used_audio), 'copy'])
if core.ACHANNELS3 and channels and channels > core.ACHANNELS3: if core.ACHANNELS3 and channels and channels > core.ACHANNELS3:
audio_cmd3.extend(['-ac:a:' + str(used_audio), str(core.ACHANNELS3)]) audio_cmd3.extend(['-ac:a:{0}'.format(used_audio), str(core.ACHANNELS3)])
if audio_cmd3[1] == 'copy': if audio_cmd3[1] == 'copy':
audio_cmd3[1] = core.ACODEC3 audio_cmd3[1] = core.ACODEC3
if core.ABITRATE3 and not (core.ABITRATE3 * 0.9 < bitrate < core.ABITRATE3 * 1.1): if core.ABITRATE3 and not (core.ABITRATE3 * 0.9 < bitrate < core.ABITRATE3 * 1.1):
audio_cmd3.extend(['-b:a:' + str(used_audio), str(core.ABITRATE3)]) audio_cmd3.extend(['-b:a:{0}'.format(used_audio), str(core.ABITRATE3)])
if audio_cmd3[1] == 'copy': if audio_cmd3[1] == 'copy':
audio_cmd3[1] = core.ACODEC3 audio_cmd3[1] = core.ACODEC3
if core.OUTPUTQUALITYPERCENT > 0: if core.OUTPUTQUALITYPERCENT > 0:
audio_cmd3.extend(['-q:a:' + str(used_audio), str(core.OUTPUTQUALITYPERCENT)]) audio_cmd3.extend(['-q:a:{0}'.format(used_audio), str(core.OUTPUTQUALITYPERCENT)])
if audio_cmd3[1] == 'copy': if audio_cmd3[1] == 'copy':
audio_cmd3[1] = core.ACODEC3 audio_cmd3[1] = core.ACODEC3
if audio_cmd3[1] in ['aac', 'dts']: if audio_cmd3[1] in ['aac', 'dts']:
@ -386,7 +386,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
if core.BURN and not subs1 and not burnt and os.path.isfile(file): if core.BURN and not subs1 and not burnt and os.path.isfile(file):
for subfile in get_subs(file): for subfile in get_subs(file):
if lan in os.path.split(subfile)[1]: if lan in os.path.split(subfile)[1]:
video_cmd.extend(['-vf', 'subtitles=' + subfile]) video_cmd.extend(['-vf', 'subtitles={subs}'.format(subs=subfile)])
burnt = 1 burnt = 1
for sub in subs1: for sub in subs1:
if core.BURN and not burnt and os.path.isfile(inputFile): if core.BURN and not burnt and os.path.isfile(inputFile):
@ -395,11 +395,11 @@ def buildCommands(file, newDir, movieName, bitbucket):
if subStreams[index]["index"] == sub["index"]: if subStreams[index]["index"] == sub["index"]:
subloc = index subloc = index
break break
video_cmd.extend(['-vf', 'subtitles=' + inputFile + ':si=' + str(subloc)]) video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=inputFile, loc=subloc)])
burnt = 1 burnt = 1
if not core.ALLOWSUBS: if not core.ALLOWSUBS:
break break
map_cmd.extend(['-map', '0:' + str(sub["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=sub["index"])])
s_mapped.extend([sub["index"]]) s_mapped.extend([sub["index"]])
if core.SINCLUDE: if core.SINCLUDE:
@ -408,7 +408,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
break break
if sub["index"] in s_mapped: if sub["index"] in s_mapped:
continue continue
map_cmd.extend(['-map', '0:' + str(sub["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=sub["index"])])
s_mapped.extend([sub["index"]]) s_mapped.extend([sub["index"]])
if core.OUTPUTFASTSTART: if core.OUTPUTFASTSTART:
@ -430,9 +430,10 @@ def buildCommands(file, newDir, movieName, bitbucket):
continue continue
lan = os.path.splitext(os.path.splitext(subfile)[0])[1] lan = os.path.splitext(os.path.splitext(subfile)[0])[1]
command.extend(['-i', subfile]) command.extend(['-i', subfile])
meta_cmd.extend(['-metadata:s:s:' + str(len(s_mapped) + n), 'language=' + lan[1:]]) meta_cmd.extend(['-metadata:s:s:{x}'.format(x=len(s_mapped) + n),
'language={lang}'.format(lang=lan[1:])])
n += 1 n += 1
map_cmd.extend(['-map', str(n) + ':0']) map_cmd.extend(['-map', '{x}:0'.format(x=n)])
if not core.ALLOWSUBS or (not s_mapped and not n): if not core.ALLOWSUBS or (not s_mapped and not n):
sub_cmd.extend(['-sn']) sub_cmd.extend(['-sn'])
@ -500,8 +501,8 @@ def extract_subs(file, newfilePath, bitbucket):
if os.path.isfile(outputFile): if os.path.isfile(outputFile):
outputFile = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n)) outputFile = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n))
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', '-codec:' + str(idx), 'srt', command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an',
outputFile] '-codec:{index}'.format(index=idx), 'srt', outputFile]
if platform.system() != 'Windows': if platform.system() != 'Windows':
command = core.NICENESS + command command = core.NICENESS + command
@ -604,7 +605,9 @@ def ripISO(item, newDir, bitbucket):
if core.CONCAT: if core.CONCAT:
combined.extend(concat) combined.extend(concat)
continue continue
name = '{0}.cd{1}'.format(os.path.splitext(os.path.split(item)[1])[0], str(n + 1)) name = '{name}.cd{x}'.format(
name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1
)
newFiles.append({item: {'name': name, 'files': concat}}) newFiles.append({item: {'name': name, 'files': concat}})
if core.CONCAT: if core.CONCAT:
name = os.path.splitext(os.path.split(item)[1])[0] name = os.path.splitext(os.path.split(item)[1])[0]
@ -627,14 +630,14 @@ def combineVTS(vtsPath):
while True: while True:
vtsName = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m) vtsName = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
if os.path.isfile(os.path.join(vtsPath, vtsName)): if os.path.isfile(os.path.join(vtsPath, vtsName)):
concat = concat + os.path.join(vtsPath, vtsName) + '|' concat += '{file}|'.format(file=os.path.join(vtsPath, vtsName))
m += 1 m += 1
else: else:
break break
if not concat: if not concat:
break break
if core.CONCAT: if core.CONCAT:
combined = combined + concat + '|' combined += '{files}|'.format(files=concat)
continue continue
newFiles.append('concat:{0}'.format(concat[:-1])) newFiles.append('concat:{0}'.format(concat[:-1]))
if core.CONCAT: if core.CONCAT:
@ -650,7 +653,7 @@ def combineCD(combine):
files = [file for file in combine if files = [file for file in combine if
n + 1 == int(re.match(".+[cC][dD]([0-9]+).", file).groups()[0]) and item in file] n + 1 == int(re.match(".+[cC][dD]([0-9]+).", file).groups()[0]) and item in file]
if files: if files:
concat = concat + files[0] + '|' concat += '{file}|'.format(file=files[0])
else: else:
break break
if concat: if concat:
@ -661,7 +664,7 @@ def combineCD(combine):
def print_cmd(command): def print_cmd(command):
cmd = "" cmd = ""
for item in command: for item in command:
cmd = cmd + " " + str(item) cmd = "{cmd} {item}".format(cmd=cmd, item=item)
logger.debug("calling command:{0}".format(cmd)) logger.debug("calling command:{0}".format(cmd))

View file

@ -141,15 +141,14 @@ class Client(object):
else: else:
self._query_timeout = DEFAULT_TIMEOUT self._query_timeout = DEFAULT_TIMEOUT
urlo = urlparse(address) urlo = urlparse(address)
if urlo.scheme == '': if not urlo.scheme:
base_url = 'http://' + address + ':' + str(port) self.url = 'http://{host}:{port}/transmission/rpc/'.format(host=address, port=port)
self.url = base_url + '/transmission/rpc/'
else: else:
if urlo.port: if urlo.port:
self.url = urlo.scheme + '://' + urlo.hostname + ':' + str(urlo.port) + urlo.path self.url = '{url.scheme}://{url.hostname}:{url.port}{url.path}'.format(url=urlo)
else: else:
self.url = urlo.scheme + '://' + urlo.hostname + urlo.path self.url = '{url.scheme}://{url.hostname}{url.path}'.format(url=urlo)
LOGGER.info('Using custom URL "' + self.url + '".') LOGGER.info('Using custom URL {url!r}.'.format(url=self.url))
if urlo.username and urlo.password: if urlo.username and urlo.password:
user = urlo.username user = urlo.username
password = urlo.password password = urlo.password
@ -256,7 +255,7 @@ class Client(object):
try: try:
data = json.loads(http_data) data = json.loads(http_data)
except ValueError as error: except ValueError as error:
LOGGER.error('Error: ' + str(error)) LOGGER.error('Error: {msg}'.format(msg=error))
LOGGER.error('Request: {request!r}'.format(request=query)) LOGGER.error('Request: {request!r}'.format(request=query))
LOGGER.error('HTTP data: {data!r}'.format(data=http_data)) LOGGER.error('HTTP data: {data!r}'.format(data=http_data))
raise raise

View file

@ -31,7 +31,7 @@ def format_speed(size):
Format bytes per second speed into IEC prefixes, B/s, KiB/s, MiB/s ... Format bytes per second speed into IEC prefixes, B/s, KiB/s, MiB/s ...
""" """
(size, unit) = format_size(size) (size, unit) = format_size(size)
return size, unit + '/s' return size, '{unit}/s'.format(unit=unit)
def format_timedelta(delta): def format_timedelta(delta):

View file

@ -127,7 +127,7 @@ class UTorrentClient(object):
def _action(self, params, body=None, content_type=None): def _action(self, params, body=None, content_type=None):
# about token, see https://github.com/bittorrent/webui/wiki/TokenSystem # about token, see https://github.com/bittorrent/webui/wiki/TokenSystem
url = self.base_url + '?token=' + self.token + '&' + urlencode(params) url = '{url}?token={token}&{params}'.format(url=self.url, token=self.token, params=urlencode(params))
request = Request(url) request = Request(url)
if body: if body:

View file

@ -38,7 +38,7 @@ class MultiPartForm(object):
# Once the list is built, return a string where each # Once the list is built, return a string where each
# line is separated by '\r\n'. # line is separated by '\r\n'.
parts = [] parts = []
part_boundary = '--' + self.boundary part_boundary = '--{boundary}'.format(boundary=self.boundary)
# Add the form fields # Add the form fields
parts.extend( parts.extend(
@ -64,6 +64,6 @@ class MultiPartForm(object):
# Flatten the list and add closing boundary marker, # Flatten the list and add closing boundary marker,
# then return CR+LF separated data # then return CR+LF separated data
flattened = list(itertools.chain(*parts)) flattened = list(itertools.chain(*parts))
flattened.append('--' + self.boundary + '--') flattened.append('--{boundary}--'.format(boundary=self.boundary))
flattened.append('') flattened.append('')
return '\r\n'.join(flattened) return '\r\n'.join(flattened)

View file

@ -68,7 +68,7 @@ class CheckVersion(object):
logger.log(u"Version checking is disabled, not checking for the newest version") logger.log(u"Version checking is disabled, not checking for the newest version")
return False return False
logger.log(u"Checking if " + self.install_type + " needs an update") logger.log(u"Checking if {install} needs an update".format(install=self.install_type))
if not self.updater.need_update(): if not self.updater.need_update():
core.NEWEST_VERSION_STRING = None core.NEWEST_VERSION_STRING = None
logger.log(u"No update needed") logger.log(u"No update needed")
@ -113,18 +113,19 @@ class GitUpdateManager(UpdateManager):
test_cmd = 'version' test_cmd = 'version'
if core.GIT_PATH: if core.GIT_PATH:
main_git = '"' + core.GIT_PATH + '"' main_git = '"{git}"'.format(git=core.GIT_PATH)
else: else:
main_git = 'git' main_git = 'git'
logger.log(u"Checking if we can use git commands: " + main_git + ' ' + test_cmd, logger.DEBUG) logger.log(u"Checking if we can use git commands: {git} {cmd}".format
(git=main_git, cmd=test_cmd), logger.DEBUG)
output, err, exit_status = self._run_git(main_git, test_cmd) output, err, exit_status = self._run_git(main_git, test_cmd)
if exit_status == 0: if exit_status == 0:
logger.log(u"Using: " + main_git, logger.DEBUG) logger.log(u"Using: {git}".format(git=main_git), logger.DEBUG)
return main_git return main_git
else: else:
logger.log(u"Not using: " + main_git, logger.DEBUG) logger.log(u"Not using: {git}".format(git=main_git), logger.DEBUG)
# trying alternatives # trying alternatives
@ -142,18 +143,20 @@ class GitUpdateManager(UpdateManager):
logger.log(u"Trying known alternative git locations", logger.DEBUG) logger.log(u"Trying known alternative git locations", logger.DEBUG)
for cur_git in alternative_git: for cur_git in alternative_git:
logger.log(u"Checking if we can use git commands: " + cur_git + ' ' + test_cmd, logger.DEBUG) logger.log(u"Checking if we can use git commands: {git} {cmd}".format
(git=cur_git, cmd=test_cmd), logger.DEBUG)
output, err, exit_status = self._run_git(cur_git, test_cmd) output, err, exit_status = self._run_git(cur_git, test_cmd)
if exit_status == 0: if exit_status == 0:
logger.log(u"Using: " + cur_git, logger.DEBUG) logger.log(u"Using: {git}".format(git=cur_git), logger.DEBUG)
return cur_git return cur_git
else: else:
logger.log(u"Not using: " + cur_git, logger.DEBUG) logger.log(u"Not using: {git}".format(git=cur_git), logger.DEBUG)
# Still haven't found a working git # Still haven't found a working git
logger.debug( logger.debug('Unable to find your git executable - '
'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.') 'Set git_path in your autoProcessMedia.cfg OR '
'delete your .git folder and run from source to enable updates.')
return None return None
@ -167,10 +170,11 @@ class GitUpdateManager(UpdateManager):
exit_status = 1 exit_status = 1
return output, err, exit_status return output, err, exit_status
cmd = git_path + ' ' + args cmd = '{git} {args}'.format(git=git_path, args=args)
try: try:
logger.log(u"Executing " + cmd + " with your shell in " + core.PROGRAM_DIR, logger.DEBUG) logger.log(u"Executing {cmd} with your shell in {directory}".format
(cmd=cmd, directory=core.PROGRAM_DIR), logger.DEBUG)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
shell=True, cwd=core.PROGRAM_DIR) shell=True, cwd=core.PROGRAM_DIR)
output, err = p.communicate() output, err = p.communicate()
@ -179,29 +183,23 @@ class GitUpdateManager(UpdateManager):
if output: if output:
output = output.strip() output = output.strip()
if core.LOG_GIT: if core.LOG_GIT:
logger.log(u"git output: " + output, logger.DEBUG) logger.log(u"git output: {output}".format(output=output), logger.DEBUG)
except OSError: except OSError:
logger.log(u"Command " + cmd + " didn't work") logger.log(u"Command {cmd} didn't work".format(cmd=cmd))
exit_status = 1 exit_status = 1
exit_status = 128 if ('fatal:' in output) or err else exit_status
if exit_status == 0: if exit_status == 0:
logger.log(cmd + u" : returned successful", logger.DEBUG) logger.log(u"{cmd} : returned successful".format(cmd=cmd), logger.DEBUG)
exit_status = 0 exit_status = 0
elif core.LOG_GIT and exit_status in (1, 128):
elif exit_status == 1: logger.log(u"{cmd} returned : {output}".format
if core.LOG_GIT: (cmd=cmd, output=output), logger.DEBUG)
logger.log(cmd + u" returned : " + output, logger.DEBUG)
exit_status = 1
elif exit_status == 128 or 'fatal:' in output or err:
if core.LOG_GIT:
logger.log(cmd + u" returned : " + output, logger.DEBUG)
exit_status = 128
else: else:
if core.LOG_GIT: if core.LOG_GIT:
logger.log(cmd + u" returned : " + output + u", treat as error for now", logger.DEBUG) logger.log(u"{cmd} returned : {output}, treat as error for now".format
(cmd=cmd, output=output), logger.DEBUG)
exit_status = 1 exit_status = 1
return output, err, exit_status return output, err, exit_status
@ -285,21 +283,18 @@ class GitUpdateManager(UpdateManager):
logger.log(u"git didn't return numbers for behind and ahead, not using it", logger.DEBUG) logger.log(u"git didn't return numbers for behind and ahead, not using it", logger.DEBUG)
return return
logger.log( logger.log(u"cur_commit = {current} % (newest_commit)= {new}, "
u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) + u"num_commits_behind = {x}, num_commits_ahead = {y}".format
u", num_commits_behind = " + str(self._num_commits_behind) + u", num_commits_ahead = " + (current=self._cur_commit_hash, new=self._newest_commit_hash,
str(self._num_commits_ahead), logger.DEBUG) x=self._num_commits_behind, y=self._num_commits_ahead), logger.DEBUG)
def set_newest_text(self): def set_newest_text(self):
if self._num_commits_ahead: if self._num_commits_ahead:
logger.log(u"Local branch is ahead of " + self.branch + ". Automatic update not possible.", logger.ERROR) logger.log(u"Local branch is ahead of {branch}. Automatic update not possible.".format
elif self._num_commits_behind > 0: (branch=self.branch), logger.ERROR)
newest_text = 'There is a newer version available ' elif self._num_commits_behind:
newest_text += " (you're " + str(self._num_commits_behind) + " commit" logger.log(u"There is a newer version available (you're {x} commit{s} behind)".format
if self._num_commits_behind > 1: (x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE)
newest_text += 's'
newest_text += ' behind)'
logger.log(newest_text, logger.MESSAGE)
else: else:
return return
@ -313,8 +308,8 @@ class GitUpdateManager(UpdateManager):
else: else:
try: try:
self._check_github_for_update() self._check_github_for_update()
except Exception as e: except Exception as error:
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR) logger.log(u"Unable to contact github, can't check for update: {msg!r}".format(msg=error), logger.ERROR)
return False return False
if self._num_commits_behind > 0: if self._num_commits_behind > 0:
@ -328,7 +323,7 @@ class GitUpdateManager(UpdateManager):
on the call's success. on the call's success.
""" """
output, err, exit_status = self._run_git(self._git_path, 'pull origin ' + self.branch) # @UnusedVariable output, err, exit_status = self._run_git(self._git_path, 'pull origin {branch}'.format(branch=self.branch)) # @UnusedVariable
if exit_status == 0: if exit_status == 0:
return True return True
@ -357,8 +352,8 @@ class SourceUpdateManager(UpdateManager):
try: try:
with open(version_file, 'r') as fp: with open(version_file, 'r') as fp:
self._cur_commit_hash = fp.read().strip(' \n\r') self._cur_commit_hash = fp.read().strip(' \n\r')
except EnvironmentError as e: except EnvironmentError as error:
logger.log(u"Unable to open 'version.txt': " + str(e), logger.DEBUG) logger.log(u"Unable to open 'version.txt': {msg}".format(msg=error), logger.DEBUG)
if not self._cur_commit_hash: if not self._cur_commit_hash:
self._cur_commit_hash = None self._cur_commit_hash = None
@ -371,8 +366,8 @@ class SourceUpdateManager(UpdateManager):
try: try:
self._check_github_for_update() self._check_github_for_update()
except Exception as e: except Exception as error:
logger.log(u"Unable to contact github, can't check for update: " + repr(e), logger.ERROR) logger.log(u"Unable to contact github, can't check for update: {msg!r}".format(msg=error), logger.ERROR)
return False return False
if not self._cur_commit_hash or self._num_commits_behind > 0: if not self._cur_commit_hash or self._num_commits_behind > 0:
@ -418,9 +413,8 @@ class SourceUpdateManager(UpdateManager):
# when _cur_commit_hash doesn't match anything _num_commits_behind == 100 # when _cur_commit_hash doesn't match anything _num_commits_behind == 100
self._num_commits_behind += 1 self._num_commits_behind += 1
logger.log( logger.log(u"cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}".format
u"cur_commit = " + str(self._cur_commit_hash) + u" % (newest_commit)= " + str(self._newest_commit_hash) + (current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG)
u", num_commits_behind = " + str(self._num_commits_behind), logger.DEBUG)
def set_newest_text(self): def set_newest_text(self):
@ -430,12 +424,8 @@ class SourceUpdateManager(UpdateManager):
if not self._cur_commit_hash: if not self._cur_commit_hash:
logger.log(u"Unknown current version number, don't know if we should update or not", logger.ERROR) logger.log(u"Unknown current version number, don't know if we should update or not", logger.ERROR)
elif self._num_commits_behind > 0: elif self._num_commits_behind > 0:
newest_text = 'There is a newer version available' logger.log(u"There is a newer version available (you're {x} commit{s} behind)".format
newest_text += " (you're " + str(self._num_commits_behind) + " commit" (x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE)
if self._num_commits_behind > 1:
newest_text += "s"
newest_text += " behind)"
logger.log(newest_text, logger.MESSAGE)
else: else:
return return
@ -443,8 +433,8 @@ class SourceUpdateManager(UpdateManager):
""" """
Downloads the latest source tarball from github and installs it over the existing version. Downloads the latest source tarball from github and installs it over the existing version.
""" """
base_url = 'https://github.com/' + self.github_repo_user + '/' + self.github_repo tar_download_url = 'https://github.com/{org}/{repo}/tarball/{branch}'.format(
tar_download_url = base_url + '/tarball/' + self.branch org=self.github_repo_user, repo=self.github_repo, branch=self.branch)
version_path = os.path.join(core.PROGRAM_DIR, u'version.txt') version_path = os.path.join(core.PROGRAM_DIR, u'version.txt')
try: try:
@ -452,45 +442,48 @@ class SourceUpdateManager(UpdateManager):
sb_update_dir = os.path.join(core.PROGRAM_DIR, u'sb-update') sb_update_dir = os.path.join(core.PROGRAM_DIR, u'sb-update')
if os.path.isdir(sb_update_dir): if os.path.isdir(sb_update_dir):
logger.log(u"Clearing out update folder " + sb_update_dir + " before extracting") logger.log(u"Clearing out update folder {dir} before extracting".format(dir=sb_update_dir))
shutil.rmtree(sb_update_dir) shutil.rmtree(sb_update_dir)
logger.log(u"Creating update folder " + sb_update_dir + " before extracting") logger.log(u"Creating update folder {dir} before extracting".format(dir=sb_update_dir))
os.makedirs(sb_update_dir) os.makedirs(sb_update_dir)
# retrieve file # retrieve file
logger.log(u"Downloading update from " + repr(tar_download_url)) logger.log(u"Downloading update from {url!r}".format(url=tar_download_url))
tar_download_path = os.path.join(sb_update_dir, u'nzbtomedia-update.tar') tar_download_path = os.path.join(sb_update_dir, u'nzbtomedia-update.tar')
urllib.urlretrieve(tar_download_url, tar_download_path) urllib.urlretrieve(tar_download_url, tar_download_path)
if not os.path.isfile(tar_download_path): if not os.path.isfile(tar_download_path):
logger.log(u"Unable to retrieve new version from " + tar_download_url + ", can't update", logger.ERROR) logger.log(u"Unable to retrieve new version from {url}, can't update".format
(url=tar_download_url), logger.ERROR)
return False return False
if not tarfile.is_tarfile(tar_download_path): if not tarfile.is_tarfile(tar_download_path):
logger.log(u"Retrieved version from " + tar_download_url + " is corrupt, can't update", logger.ERROR) logger.log(u"Retrieved version from {url} is corrupt, can't update".format
(url=tar_download_url), logger.ERROR)
return False return False
# extract to sb-update dir # extract to sb-update dir
logger.log(u"Extracting file " + tar_download_path) logger.log(u"Extracting file {path}".format(path=tar_download_path))
tar = tarfile.open(tar_download_path) tar = tarfile.open(tar_download_path)
tar.extractall(sb_update_dir) tar.extractall(sb_update_dir)
tar.close() tar.close()
# delete .tar.gz # delete .tar.gz
logger.log(u"Deleting file " + tar_download_path) logger.log(u"Deleting file {path}".format(path=tar_download_path))
os.remove(tar_download_path) os.remove(tar_download_path)
# find update dir name # find update dir name
update_dir_contents = [x for x in os.listdir(sb_update_dir) if update_dir_contents = [x for x in os.listdir(sb_update_dir) if
os.path.isdir(os.path.join(sb_update_dir, x))] os.path.isdir(os.path.join(sb_update_dir, x))]
if len(update_dir_contents) != 1: if len(update_dir_contents) != 1:
logger.log(u"Invalid update data, update failed: " + str(update_dir_contents), logger.ERROR) logger.log(u"Invalid update data, update failed: {0}".format(update_dir_contents), logger.ERROR)
return False return False
content_dir = os.path.join(sb_update_dir, update_dir_contents[0]) content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
# walk temp folder and move files to main folder # walk temp folder and move files to main folder
logger.log(u"Moving files from " + content_dir + " to " + core.PROGRAM_DIR) logger.log(u"Moving files from {source} to {destination}".format
(source=content_dir, destination=core.PROGRAM_DIR))
for dirname, dirnames, filenames in os.walk(content_dir): # @UnusedVariable for dirname, dirnames, filenames in os.walk(content_dir): # @UnusedVariable
dirname = dirname[len(content_dir) + 1:] dirname = dirname[len(content_dir) + 1:]
for curfile in filenames: for curfile in filenames:
@ -505,8 +498,9 @@ class SourceUpdateManager(UpdateManager):
os.chmod(new_path, stat.S_IWRITE) os.chmod(new_path, stat.S_IWRITE)
os.remove(new_path) os.remove(new_path)
os.renames(old_path, new_path) os.renames(old_path, new_path)
except Exception as e: except Exception as error:
logger.log(u"Unable to update " + new_path + ': ' + str(e), logger.DEBUG) logger.log(u"Unable to update {path}: {msg}".format
(path=new_path, msg=error), logger.DEBUG)
os.remove(old_path) # Trash the updated file without moving in new path os.remove(old_path) # Trash the updated file without moving in new path
continue continue
@ -518,13 +512,15 @@ class SourceUpdateManager(UpdateManager):
try: try:
with open(version_path, 'w') as ver_file: with open(version_path, 'w') as ver_file:
ver_file.write(self._newest_commit_hash) ver_file.write(self._newest_commit_hash)
except EnvironmentError as e: except EnvironmentError as error:
logger.log(u"Unable to write version file, update not complete: " + str(e), logger.ERROR) logger.log(u"Unable to write version file, update not complete: {msg}".format
(msg=error), logger.ERROR)
return False return False
except Exception as e: except Exception as error:
logger.log(u"Error while trying to update: " + str(e), logger.ERROR) logger.log(u"Error while trying to update: {msg}".format
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG) (msg=error), logger.ERROR)
logger.log(u"Traceback: {error}".format(error=traceback.format_exc()), logger.DEBUG)
return False return False
return True return True

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# # coding=utf-8
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### # ##############################################################################
# ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones. # Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones.
# #
@ -9,10 +10,10 @@
# #
# NOTE: This script requires Python to be installed on your system. # NOTE: This script requires Python to be installed on your system.
############################################################################## # ##############################################################################
### OPTIONS ### # ### OPTIONS ###
## General # ## General
# Auto Update nzbToMedia (0, 1). # Auto Update nzbToMedia (0, 1).
# #
@ -29,7 +30,7 @@
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake. # Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
# safe_mode=1 # safe_mode=1
## CouchPotato # ## CouchPotato
# CouchPotato script category. # CouchPotato script category.
# #
@ -82,7 +83,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# cpsremote_path=0 # cpsremote_path=0
## Network # ## Network
# Network Mount Points (Needed for remote path above) # Network Mount Points (Needed for remote path above)
# #
@ -90,14 +91,14 @@
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\ # e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
# mountPoints= # mountPoints=
## Extensions # ## Extensions
# Media Extensions # Media Extensions
# #
# This is a list of media extensions that are used to verify that the download does contain valid media. # This is a list of media extensions that are used to verify that the download does contain valid media.
# mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts # mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts
## Posix # ## Posix
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
@ -114,7 +115,7 @@
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
# ionice_classdata=4 # ionice_classdata=4
## Transcoder # ## Transcoder
# getSubs (0, 1). # getSubs (0, 1).
# #
@ -133,7 +134,7 @@
# create a duplicate, or replace the original (0, 1). # create a duplicate, or replace the original (0, 1).
# #
# set to 1 to cretae a new file or 0 to replace the original # set to 1 to create a new file or 0 to replace the original
# duplicate=1 # duplicate=1
# ignore extensions. # ignore extensions.
@ -168,12 +169,12 @@
# allSubLanguages (0,1). # allSubLanguages (0,1).
# #
# allSubLanguages. 1 will keep all exisiting sub languages. 0 will discare those not in your list above. # allSubLanguages. 1 will keep all existing sub languages. 0 will discard those not in your list above.
# allSubLanguages=0 # allSubLanguages=0
# embedSubs (0,1). # embedSubs (0,1).
# #
# embedSubs. 1 will embded external sub/srt subs into your video if this is supported. # embedSubs. 1 will embed external sub/srt subs into your video if this is supported.
# embedSubs=1 # embedSubs=1
# burnInSubtitle (0,1). # burnInSubtitle (0,1).
@ -224,7 +225,7 @@
# outputAudioOtherBitrate=128k # outputAudioOtherBitrate=128k
# outputSubtitleCodec= # outputSubtitleCodec=
## WakeOnLan # ## WakeOnLan
# use WOL (0, 1). # use WOL (0, 1).
# #
@ -240,8 +241,9 @@
# wolhost=192.168.1.37 # wolhost=192.168.1.37
# wolport=80 # wolport=80
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## # ##############################################################################
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# coding=utf-8
# #
############################################################################## # ##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones. # Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones.
# #
@ -9,11 +10,11 @@
# #
# NOTE: This script requires Python to be installed on your system. # NOTE: This script requires Python to be installed on your system.
############################################################################## # ##############################################################################
# #
### OPTIONS ### # ### OPTIONS ###
## General # ## General
# Auto Update nzbToMedia (0, 1). # Auto Update nzbToMedia (0, 1).
# #
@ -25,7 +26,7 @@
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake. # Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
# safe_mode=1 # safe_mode=1
## Gamez # ## Gamez
# Gamez script category. # Gamez script category.
# #
@ -63,7 +64,7 @@
# set this to where your Gamez completed downloads are. # set this to where your Gamez completed downloads are.
# gzwatch_dir= # gzwatch_dir=
## Posix # ## Posix
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
@ -80,7 +81,7 @@
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
# ionice_classdata=4 # ionice_classdata=4
## WakeOnLan # ## WakeOnLan
# use WOL (0, 1). # use WOL (0, 1).
# #
@ -96,8 +97,9 @@
# wolhost=192.168.1.37 # wolhost=192.168.1.37
# wolport=80 # wolport=80
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## # ##############################################################################
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# # coding=utf-8
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### # ##############################################################################
# ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to HeadPhones. # Post-Process to HeadPhones.
# #
@ -9,10 +10,10 @@
# #
# NOTE: This script requires Python to be installed on your system. # NOTE: This script requires Python to be installed on your system.
############################################################################## # ##############################################################################
### OPTIONS # ### OPTIONS
## General # ## General
# Auto Update nzbToMedia (0, 1). # Auto Update nzbToMedia (0, 1).
# #
@ -24,7 +25,7 @@
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake. # Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
# safe_mode=1 # safe_mode=1
## HeadPhones # ## HeadPhones
# HeadPhones script category. # HeadPhones script category.
# #
@ -67,7 +68,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# hpremote_path=0 # hpremote_path=0
## Posix # ## Posix
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
@ -84,7 +85,7 @@
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
# ionice_classdata=4 # ionice_classdata=4
## Network # ## Network
# Network Mount Points (Needed for remote path above) # Network Mount Points (Needed for remote path above)
# #
@ -92,7 +93,7 @@
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\ # e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
# mountPoints= # mountPoints=
## WakeOnLan # ## WakeOnLan
# use WOL (0, 1). # use WOL (0, 1).
# #
@ -108,8 +109,9 @@
# wolhost=192.168.1.37 # wolhost=192.168.1.37
# wolport=80 # wolport=80
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## # ##############################################################################
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# coding=utf-8
# #
############################################################################## # ##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones. # Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones.
# #
@ -9,10 +10,10 @@
# #
# NOTE: This script requires Python to be installed on your system. # NOTE: This script requires Python to be installed on your system.
############################################################################## # ##############################################################################
### OPTIONS ### # ### OPTIONS ###
## General # ## General
# Auto Update nzbToMedia (0, 1). # Auto Update nzbToMedia (0, 1).
# #
@ -29,7 +30,7 @@
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake. # Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
# safe_mode=1 # safe_mode=1
## CouchPotato # ## CouchPotato
# CouchPotato script category. # CouchPotato script category.
# #
@ -77,7 +78,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# cpsremote_path=0 # cpsremote_path=0
## SickBeard # ## SickBeard
# SickBeard script category. # SickBeard script category.
# #
@ -133,7 +134,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# sbremote_path=0 # sbremote_path=0
## NzbDrone # ## NzbDrone
# NzbDrone script category. # NzbDrone script category.
# #
@ -176,7 +177,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# ndremote_path=0 # ndremote_path=0
## HeadPhones # ## HeadPhones
# HeadPhones script category. # HeadPhones script category.
# #
@ -209,7 +210,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# hpremote_path=0 # hpremote_path=0
## Mylar # ## Mylar
# Mylar script category. # Mylar script category.
# #
@ -250,7 +251,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# myremote_path=0 # myremote_path=0
## Gamez # ## Gamez
# Gamez script category. # Gamez script category.
# #
@ -288,7 +289,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# gzremote_path=0 # gzremote_path=0
## Network # ## Network
# Network Mount Points (Needed for remote path above) # Network Mount Points (Needed for remote path above)
# #
@ -296,14 +297,14 @@
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\ # e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
# mountPoints= # mountPoints=
## Extensions # ## Extensions
# Media Extensions # Media Extensions
# #
# This is a list of media extensions that are used to verify that the download does contain valid media. # This is a list of media extensions that are used to verify that the download does contain valid media.
# mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts # mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts
## Posix # ## Posix
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
@ -320,7 +321,7 @@
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
# ionice_classdata=4 # ionice_classdata=4
## Transcoder # ## Transcoder
# getSubs (0, 1). # getSubs (0, 1).
# #
@ -430,7 +431,7 @@
# outputAudioOtherBitrate=128k # outputAudioOtherBitrate=128k
# outputSubtitleCodec= # outputSubtitleCodec=
## WakeOnLan # ## WakeOnLan
# use WOL (0, 1). # use WOL (0, 1).
# #
@ -446,7 +447,7 @@
# wolhost=192.168.1.37 # wolhost=192.168.1.37
# wolport=80 # wolport=80
## UserScript # ## UserScript
# User Script category. # User Script category.
# #
@ -495,11 +496,16 @@
# Delay in seconds after processing. # Delay in seconds after processing.
# usdelay=120 # usdelay=120
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## # ##############################################################################
from __future__ import print_function
import os import os
import sys import sys
import datetime import datetime
from libs.six import text_type
import core import core
from core.autoProcess.autoProcessComics import autoProcessComics from core.autoProcess.autoProcessComics import autoProcessComics
from core.autoProcess.autoProcessGames import autoProcessGames from core.autoProcess.autoProcessGames import autoProcessGames
@ -510,6 +516,7 @@ from core.nzbToMediaUtil import getDirs, extractFiles, cleanDir, update_download
from core.nzbToMediaUserScript import external_script from core.nzbToMediaUserScript import external_script
from core import logger, nzbToMediaDB from core import logger, nzbToMediaDB
# post-processing # post-processing
def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None, failureLink=None): def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None, failureLink=None):
if core.SAFE_MODE and inputDirectory == core.NZB_DEFAULTDIR: if core.SAFE_MODE and inputDirectory == core.NZB_DEFAULTDIR:
@ -526,7 +533,6 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
myDB = nzbToMediaDB.DBConnection() myDB = nzbToMediaDB.DBConnection()
encoded = False
inputDirectory1 = inputDirectory inputDirectory1 = inputDirectory
inputName1 = inputName inputName1 = inputName
@ -536,11 +542,11 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
except: except:
pass pass
controlValueDict = {"input_directory": unicode(inputDirectory1)} controlValueDict = {"input_directory": text_type(inputDirectory1)}
newValueDict = {"input_name": unicode(inputName1), newValueDict = {"input_name": text_type(inputName1),
"input_hash": unicode(download_id), "input_hash": text_type(download_id),
"input_id": unicode(download_id), "input_id": text_type(download_id),
"client_agent": unicode(clientAgent), "client_agent": text_type(clientAgent),
"status": 0, "status": 0,
"last_update": datetime.date.today().toordinal() "last_update": datetime.date.today().toordinal()
} }
@ -575,10 +581,7 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
inputCategory)) inputCategory))
return [-1, ""] return [-1, ""]
try: extract = int(section[usercat].get('extract', 0))
extract = int(section[usercat]['extract'])
except:
extract = 0
try: try:
if int(section[usercat]['remote_path']) and not core.REMOTEPATHS: if int(section[usercat]['remote_path']) and not core.REMOTEPATHS:
@ -621,7 +624,7 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
if clientAgent != 'manual': if clientAgent != 'manual':
# update download status in our DB # update download status in our DB
update_downloadInfoStatus(inputName, 1) update_downloadInfoStatus(inputName, 1)
if not sectionName in ['UserScript', 'NzbDrone']: if sectionName not in ['UserScript', 'NzbDrone']:
# cleanup our processing folders of any misc unwanted files and empty directories # cleanup our processing folders of any misc unwanted files and empty directories
cleanDir(inputDirectory, sectionName, inputCategory) cleanDir(inputDirectory, sectionName, inputCategory)
@ -632,9 +635,6 @@ def main(args, section=None):
# Initialize the config # Initialize the config
core.initialize(section) core.initialize(section)
# clientAgent for NZBs
clientAgent = core.NZB_CLIENTAGENT
logger.info("#########################################################") logger.info("#########################################################")
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__)))
logger.info("#########################################################") logger.info("#########################################################")
@ -647,7 +647,7 @@ def main(args, section=None):
status = 0 status = 0
# NZBGet # NZBGet
if os.environ.has_key('NZBOP_SCRIPTDIR'): if 'NZBOP_SCRIPTDIR' in os.environ:
# Check if the script is called from nzbget 11.0 or later # Check if the script is called from nzbget 11.0 or later
if os.environ['NZBOP_VERSION'][0:5] < '11.0': if os.environ['NZBOP_VERSION'][0:5] < '11.0':
logger.error("NZBGet Version {0} is not supported. Please update NZBGet.".format(os.environ['NZBOP_VERSION'])) logger.error("NZBGet Version {0} is not supported. Please update NZBGet.".format(os.environ['NZBOP_VERSION']))
@ -656,7 +656,7 @@ def main(args, section=None):
logger.info("Script triggered from NZBGet Version {0}.".format(os.environ['NZBOP_VERSION'])) logger.info("Script triggered from NZBGet Version {0}.".format(os.environ['NZBOP_VERSION']))
# Check if the script is called from nzbget 13.0 or later # Check if the script is called from nzbget 13.0 or later
if os.environ.has_key('NZBPP_TOTALSTATUS'): if 'NZBPP_TOTALSTATUS' in os.environ:
if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS': if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS':
logger.info("Download failed with status {0}.".format(os.environ['NZBPP_STATUS'])) logger.info("Download failed with status {0}.".format(os.environ['NZBPP_STATUS']))
status = 1 status = 1
@ -689,13 +689,13 @@ def main(args, section=None):
# Check for download_id to pass to CouchPotato # Check for download_id to pass to CouchPotato
download_id = "" download_id = ""
failureLink = None failureLink = None
if os.environ.has_key('NZBPR_COUCHPOTATO'): if 'NZBPR_COUCHPOTATO' in os.environ:
download_id = os.environ['NZBPR_COUCHPOTATO'] download_id = os.environ['NZBPR_COUCHPOTATO']
elif os.environ.has_key('NZBPR_DRONE'): elif 'NZBPR_DRONE' in os.environ:
download_id = os.environ['NZBPR_DRONE'] download_id = os.environ['NZBPR_DRONE']
elif os.environ.has_key('NZBPR_SONARR'): elif 'NZBPR_SONARR' in os.environ:
download_id = os.environ['NZBPR_SONARR'] download_id = os.environ['NZBPR_SONARR']
if os.environ.has_key('NZBPR__DNZB_FAILURE'): if 'NZBPR__DNZB_FAILURE' in os.environ:
failureLink = os.environ['NZBPR__DNZB_FAILURE'] failureLink = os.environ['NZBPR__DNZB_FAILURE']
# All checks done, now launching the script. # All checks done, now launching the script.
@ -758,49 +758,45 @@ def main(args, section=None):
os.path.basename(dirName)) os.path.basename(dirName))
) )
try: clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', ''))
clientAgent = str(core.DOWNLOADINFO[0]['client_agent']) download_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
except:
clientAgent = 'manual'
try:
download_id = str(core.DOWNLOADINFO[0]['input_id'])
except:
download_id = None
if clientAgent.lower() not in core.NZB_CLIENTS and clientAgent != 'manual': if clientAgent and clientAgent.lower() not in core.NZB_CLIENTS:
continue continue
try: try:
dirName = dirName.encode(core.SYS_ENCODING) dirName = dirName.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
inputName = os.path.basename(dirName) inputName = os.path.basename(dirName)
try: try:
inputName = inputName.encode(core.SYS_ENCODING) inputName = inputName.encode(core.SYS_ENCODING)
except: pass except UnicodeError:
pass
results = process(dirName, inputName, 0, clientAgent=clientAgent, results = process(dirName, inputName, 0, clientAgent=clientAgent or 'manual',
download_id=download_id, inputCategory=subsection) download_id=download_id or None, inputCategory=subsection)
if results[0] != 0: if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format( logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format
section, subsection)) (section, subsection))
result = results result = results
if result[0] == 0: if result[0] == 0:
logger.info("The {0} script completed successfully.".format(args[0])) logger.info("The {0} script completed successfully.".format(args[0]))
if result[1]: if result[1]:
print result[1] + "!" # For SABnzbd Status display. print(result[1] + "!")
if os.environ.has_key('NZBOP_SCRIPTDIR'): # return code for nzbget v11 if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11
del core.MYAPP del core.MYAPP
return (core.NZBGET_POSTPROCESS_SUCCESS) return core.NZBGET_POSTPROCESS_SUCCESS
else: else:
logger.error("A problem was reported in the {0} script.".format(args[0])) logger.error("A problem was reported in the {0} script.".format(args[0]))
if result[1]: if result[1]:
print result[1] + "!" # For SABnzbd Status display. print(result[1] + "!")
if os.environ.has_key('NZBOP_SCRIPTDIR'): # return code for nzbget v11 if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11
del core.MYAPP del core.MYAPP
return (core.NZBGET_POSTPROCESS_ERROR) return core.NZBGET_POSTPROCESS_ERROR
del core.MYAPP del core.MYAPP
return (result[0]) return result[0]
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# # coding=utf-8
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### # ##############################################################################
# ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to Mylar. # Post-Process to Mylar.
# #
@ -9,11 +10,11 @@
# #
# NOTE: This script requires Python to be installed on your system. # NOTE: This script requires Python to be installed on your system.
############################################################################## # ##############################################################################
# #
### OPTIONS # ### OPTIONS
## General # ## General
# Auto Update nzbToMedia (0, 1). # Auto Update nzbToMedia (0, 1).
# #
@ -25,7 +26,7 @@
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake. # Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
# safe_mode=1 # safe_mode=1
## Mylar # ## Mylar
# Mylar script category. # Mylar script category.
# #
@ -71,7 +72,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# myremote_path=0 # myremote_path=0
## Posix # ## Posix
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
@ -88,7 +89,7 @@
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
# ionice_classdata=4 # ionice_classdata=4
## Network # ## Network
# Network Mount Points (Needed for remote path above) # Network Mount Points (Needed for remote path above)
# #
@ -96,7 +97,7 @@
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\ # e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
# mountPoints= # mountPoints=
## WakeOnLan # ## WakeOnLan
# use WOL (0, 1). # use WOL (0, 1).
# #
@ -112,8 +113,9 @@
# wolhost=192.168.1.37 # wolhost=192.168.1.37
# wolport=80 # wolport=80
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## # ##############################################################################
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# # coding=utf-8
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### # ##############################################################################
# ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to NzbDrone. # Post-Process to NzbDrone.
# #
@ -9,10 +10,10 @@
# #
# NOTE: This script requires Python to be installed on your system. # NOTE: This script requires Python to be installed on your system.
############################################################################## # ##############################################################################
### OPTIONS ### # ### OPTIONS ###
## General # ## General
# Auto Update nzbToMedia (0, 1). # Auto Update nzbToMedia (0, 1).
# #
@ -29,7 +30,7 @@
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake. # Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
# safe_mode=1 # safe_mode=1
## NzbDrone # ## NzbDrone
# NzbDrone script category. # NzbDrone script category.
# #
@ -72,7 +73,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# ndremote_path=0 # ndremote_path=0
## Network # ## Network
# Network Mount Points (Needed for remote path above) # Network Mount Points (Needed for remote path above)
# #
@ -80,14 +81,14 @@
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\ # e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
# mountPoints= # mountPoints=
## Extensions # ## Extensions
# Media Extensions # Media Extensions
# #
# This is a list of media extensions that are used to verify that the download does contain valid media. # This is a list of media extensions that are used to verify that the download does contain valid media.
# mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts # mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts
## Posix # ## Posix
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
@ -104,7 +105,7 @@
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
# ionice_classdata=4 # ionice_classdata=4
## Transcoder # ## Transcoder
# getSubs (0, 1). # getSubs (0, 1).
# #
@ -123,7 +124,7 @@
# create a duplicate, or replace the original (0, 1). # create a duplicate, or replace the original (0, 1).
# #
# set to 1 to cretae a new file or 0 to replace the original # set to 1 to create a new file or 0 to replace the original
# duplicate=1 # duplicate=1
# ignore extensions. # ignore extensions.
@ -158,12 +159,12 @@
# allSubLanguages (0,1). # allSubLanguages (0,1).
# #
# allSubLanguages. 1 will keep all exisiting sub languages. 0 will discare those not in your list above. # allSubLanguages. 1 will keep all existing sub languages. 0 will discard those not in your list above.
# allSubLanguages = 0 # allSubLanguages = 0
# embedSubs (0,1). # embedSubs (0,1).
# #
# embedSubs. 1 will embded external sub/srt subs into your video if this is supported. # embedSubs. 1 will embed external sub/srt subs into your video if this is supported.
# embedSubs = 1 # embedSubs = 1
# burnInSubtitle (0,1). # burnInSubtitle (0,1).
@ -211,7 +212,7 @@
# outputAudioOtherBitrate = 128k # outputAudioOtherBitrate = 128k
# outputSubtitleCodec = # outputSubtitleCodec =
## WakeOnLan # ## WakeOnLan
# use WOL (0, 1). # use WOL (0, 1).
# #
@ -227,8 +228,9 @@
# wolhost=192.168.1.37 # wolhost=192.168.1.37
# wolport=80 # wolport=80
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## # ##############################################################################
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
# # coding=utf-8
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### # ##############################################################################
# ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to SickBeard. # Post-Process to SickBeard.
# #
@ -9,10 +10,10 @@
# #
# NOTE: This script requires Python to be installed on your system. # NOTE: This script requires Python to be installed on your system.
############################################################################## # ##############################################################################
### OPTIONS ### # ### OPTIONS ###
## General # ## General
# Auto Update nzbToMedia (0, 1). # Auto Update nzbToMedia (0, 1).
# #
@ -29,7 +30,7 @@
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake. # Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
# safe_mode=1 # safe_mode=1
## SickBeard # ## SickBeard
# SickBeard script category. # SickBeard script category.
# #
@ -85,7 +86,7 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
# sbremote_path=0 # sbremote_path=0
## Network # ## Network
# Network Mount Points (Needed for remote path above) # Network Mount Points (Needed for remote path above)
# #
@ -93,14 +94,14 @@
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\ # e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
# mountPoints= # mountPoints=
## Extensions # ## Extensions
# Media Extensions # Media Extensions
# #
# This is a list of media extensions that are used to verify that the download does contain valid media. # This is a list of media extensions that are used to verify that the download does contain valid media.
# mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts # mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.ts
## Posix # ## Posix
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
@ -117,7 +118,7 @@
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
# ionice_classdata=4 # ionice_classdata=4
## Transcoder # ## Transcoder
# getSubs (0, 1). # getSubs (0, 1).
# #
@ -136,7 +137,7 @@
# create a duplicate, or replace the original (0, 1). # create a duplicate, or replace the original (0, 1).
# #
# set to 1 to cretae a new file or 0 to replace the original # set to 1 to create a new file or 0 to replace the original
# duplicate=1 # duplicate=1
# ignore extensions. # ignore extensions.
@ -171,12 +172,12 @@
# allSubLanguages (0,1). # allSubLanguages (0,1).
# #
# allSubLanguages. 1 will keep all exisiting sub languages. 0 will discare those not in your list above. # allSubLanguages. 1 will keep all existing sub languages. 0 will discard those not in your list above.
# allSubLanguages=0 # allSubLanguages=0
# embedSubs (0,1). # embedSubs (0,1).
# #
# embedSubs. 1 will embded external sub/srt subs into your video if this is supported. # embedSubs. 1 will embed external sub/srt subs into your video if this is supported.
# embedSubs=1 # embedSubs=1
# burnInSubtitle (0,1). # burnInSubtitle (0,1).
@ -227,7 +228,7 @@
# outputAudioOtherBitrate=128k # outputAudioOtherBitrate=128k
# outputSubtitleCodec= # outputSubtitleCodec=
## WakeOnLan # ## WakeOnLan
# use WOL (0, 1). # use WOL (0, 1).
# #
@ -243,8 +244,9 @@
# wolhost=192.168.1.37 # wolhost=192.168.1.37
# wolport=80 # wolport=80
### NZBGET POST-PROCESSING SCRIPT ### # ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## # ##############################################################################
import sys import sys
import nzbToMedia import nzbToMedia