Fix quotes - standardize to single-quoted strings

This commit is contained in:
Labrys of Knossos 2018-12-29 14:05:37 -05:00
commit c5343889fb
30 changed files with 1257 additions and 1257 deletions

View file

@ -36,24 +36,24 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
except Exception: except Exception:
pass pass
control_value_dict = {"input_directory": text_type(input_directory1)} control_value_dict = {'input_directory': text_type(input_directory1)}
new_value_dict = { new_value_dict = {
"input_name": text_type(input_name1), 'input_name': text_type(input_name1),
"input_hash": text_type(input_hash), 'input_hash': text_type(input_hash),
"input_id": text_type(input_id), 'input_id': text_type(input_id),
"client_agent": text_type(client_agent), 'client_agent': text_type(client_agent),
"status": 0, 'status': 0,
"last_update": datetime.date.today().toordinal(), 'last_update': datetime.date.today().toordinal(),
} }
my_db.upsert("downloads", new_value_dict, control_value_dict) my_db.upsert('downloads', new_value_dict, control_value_dict)
logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(input_directory, input_name, input_category)) logger.debug('Received Directory: {0} | Name: {1} | Category: {2}'.format(input_directory, input_name, input_category))
# Confirm the category by parsing directory structure # Confirm the category by parsing directory structure
input_directory, input_name, input_category, root = core.category_search(input_directory, input_name, input_category, input_directory, input_name, input_category, root = core.category_search(input_directory, input_name, input_category,
root, core.CATEGORIES) root, core.CATEGORIES)
if input_category == "": if input_category == '':
input_category = "UNCAT" input_category = 'UNCAT'
usercat = input_category usercat = input_category
try: try:
@ -65,45 +65,45 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
except UnicodeError: except UnicodeError:
pass pass
logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format logger.debug('Determined Directory: {0} | Name: {1} | Category: {2}'.format
(input_directory, input_name, input_category)) (input_directory, input_name, input_category))
# auto-detect section # auto-detect section
section = core.CFG.findsection(input_category).isenabled() section = core.CFG.findsection(input_category).isenabled()
if section is None: if section is None:
section = core.CFG.findsection("ALL").isenabled() section = core.CFG.findsection('ALL').isenabled()
if section is None: if section is None:
logger.error('Category:[{0}] is not defined or is not enabled. ' logger.error('Category:[{0}] is not defined or is not enabled. '
'Please rename it or ensure it is enabled for the appropriate section ' 'Please rename it or ensure it is enabled for the appropriate section '
'in your autoProcessMedia.cfg and try again.'.format 'in your autoProcessMedia.cfg and try again.'.format
(input_category)) (input_category))
return [-1, ""] return [-1, '']
else: else:
usercat = "ALL" usercat = 'ALL'
if len(section) > 1: if len(section) > 1:
logger.error('Category:[{0}] is not unique, {1} are using it. ' logger.error('Category:[{0}] is not unique, {1} are using it. '
'Please rename it or disable all other sections using the same category name ' 'Please rename it or disable all other sections using the same category name '
'in your autoProcessMedia.cfg and try again.'.format 'in your autoProcessMedia.cfg and try again.'.format
(usercat, section.keys())) (usercat, section.keys()))
return [-1, ""] return [-1, '']
if section: if section:
section_name = section.keys()[0] section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(section_name)) logger.info('Auto-detected SECTION:{0}'.format(section_name))
else: else:
logger.error("Unable to locate a section with subsection:{0} " logger.error('Unable to locate a section with subsection:{0} '
"enabled in your autoProcessMedia.cfg, exiting!".format 'enabled in your autoProcessMedia.cfg, exiting!'.format
(input_category)) (input_category))
return [-1, ""] return [-1, '']
section = dict(section[section_name][usercat]) # Type cast to dict() to allow effective usage of .get() section = dict(section[section_name][usercat]) # Type cast to dict() to allow effective usage of .get()
torrent_no_link = int(section.get("Torrent_NoLink", 0)) torrent_no_link = int(section.get('Torrent_NoLink', 0))
keep_archive = int(section.get("keep_archive", 0)) keep_archive = int(section.get('keep_archive', 0))
extract = int(section.get('extract', 0)) extract = int(section.get('extract', 0))
extensions = section.get('user_script_mediaExtensions', "").lower().split(',') extensions = section.get('user_script_mediaExtensions', '').lower().split(',')
unique_path = int(section.get("unique_path", 1)) unique_path = int(section.get('unique_path', 1))
if client_agent != 'manual': if client_agent != 'manual':
core.pause_torrent(client_agent, input_hash, input_id, input_name) core.pause_torrent(client_agent, input_hash, input_id, input_name)
@ -117,7 +117,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
output_destination = os.path.join(core.OUTPUTDIRECTORY, input_category, basename) output_destination = os.path.join(core.OUTPUTDIRECTORY, input_category, basename)
elif unique_path: elif unique_path:
output_destination = os.path.normpath( output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitize_name(input_name).replace(" ", "."))) core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitize_name(input_name).replace(' ', '.')))
else: else:
output_destination = os.path.normpath( output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, input_category)) core.os.path.join(core.OUTPUTDIRECTORY, input_category))
@ -129,15 +129,15 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
if output_destination in input_directory: if output_destination in input_directory:
output_destination = input_directory output_destination = input_directory
logger.info("Output directory set to: {0}".format(output_destination)) logger.info('Output directory set to: {0}'.format(output_destination))
if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULTDIR: if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULTDIR:
logger.error('The output directory:[{0}] is the Download Directory. ' logger.error('The output directory:[{0}] is the Download Directory. '
'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format 'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format
(input_directory)) (input_directory))
return [-1, ""] return [-1, '']
logger.debug("Scanning files in directory: {0}".format(input_directory)) logger.debug('Scanning files in directory: {0}'.format(input_directory))
if section_name in ['HeadPhones', 'Lidarr']: if section_name in ['HeadPhones', 'Lidarr']:
core.NOFLATTEN.extend( core.NOFLATTEN.extend(
@ -151,9 +151,9 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
input_files = core.list_media_files(input_directory, other=True, otherext=extensions) input_files = core.list_media_files(input_directory, other=True, otherext=extensions)
if len(input_files) == 0 and os.path.isfile(input_directory): if len(input_files) == 0 and os.path.isfile(input_directory):
input_files = [input_directory] input_files = [input_directory]
logger.debug("Found 1 file to process: {0}".format(input_directory)) logger.debug('Found 1 file to process: {0}'.format(input_directory))
else: else:
logger.debug("Found {0} files in {1}".format(len(input_files), input_directory)) logger.debug('Found {0} files in {1}'.format(len(input_files), input_directory))
for inputFile in input_files: for inputFile in input_files:
file_path = os.path.dirname(inputFile) file_path = os.path.dirname(inputFile)
file_name, file_ext = os.path.splitext(os.path.basename(inputFile)) file_name, file_ext = os.path.splitext(os.path.basename(inputFile))
@ -164,7 +164,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
if not os.path.basename(file_path) in output_destination: if not os.path.basename(file_path) in output_destination:
target_file = core.os.path.join( target_file = core.os.path.join(
core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name) core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name)
logger.debug("Setting outputDestination to {0} to preserve folder structure".format logger.debug('Setting outputDestination to {0} to preserve folder structure'.format
(os.path.dirname(target_file))) (os.path.dirname(target_file)))
try: try:
target_file = target_file.encode(core.SYS_ENCODING) target_file = target_file.encode(core.SYS_ENCODING)
@ -172,11 +172,11 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
pass pass
if root == 1: if root == 1:
if not found_file: if not found_file:
logger.debug("Looking for {0} in: {1}".format(input_name, inputFile)) logger.debug('Looking for {0} in: {1}'.format(input_name, inputFile))
if any([core.sanitize_name(input_name) in core.sanitize_name(inputFile), if any([core.sanitize_name(input_name) in core.sanitize_name(inputFile),
core.sanitize_name(file_name) in core.sanitize_name(input_name)]): core.sanitize_name(file_name) in core.sanitize_name(input_name)]):
found_file = True found_file = True
logger.debug("Found file {0} that matches Torrent Name {1}".format logger.debug('Found file {0} that matches Torrent Name {1}'.format
(full_file_name, input_name)) (full_file_name, input_name))
else: else:
continue continue
@ -186,10 +186,10 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile)) ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile))
if not found_file: if not found_file:
logger.debug("Looking for files with modified/created dates less than 5 minutes old.") logger.debug('Looking for files with modified/created dates less than 5 minutes old.')
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)): if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)):
found_file = True found_file = True
logger.debug("Found file {0} with date modified/created less than 5 minutes ago.".format logger.debug('Found file {0} with date modified/created less than 5 minutes ago.'.format
(full_file_name)) (full_file_name))
else: else:
continue # This file has not been recently moved or created, skip it continue # This file has not been recently moved or created, skip it
@ -199,7 +199,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
core.copy_link(inputFile, target_file, core.USELINK) core.copy_link(inputFile, target_file, core.USELINK)
core.remove_read_only(target_file) core.remove_read_only(target_file)
except Exception: except Exception:
logger.error("Failed to link: {0} to {1}".format(inputFile, target_file)) logger.error('Failed to link: {0} to {1}'.format(inputFile, target_file))
input_name, output_destination = convert_to_ascii(input_name, output_destination) input_name, output_destination = convert_to_ascii(input_name, output_destination)
@ -212,30 +212,30 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
core.flatten(output_destination) core.flatten(output_destination)
# Now check if video files exist in destination: # Now check if video files exist in destination:
if section_name in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]: if section_name in ['SickBeard', 'NzbDrone', 'Sonarr', 'CouchPotato', 'Radarr']:
num_videos = len( num_videos = len(
core.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False)) core.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False))
if num_videos > 0: if num_videos > 0:
logger.info("Found {0} media files in {1}".format(num_videos, output_destination)) logger.info('Found {0} media files in {1}'.format(num_videos, output_destination))
status = 0 status = 0
elif extract != 1: elif extract != 1:
logger.info("Found no media files in {0}. Sending to {1} to process".format(output_destination, section_name)) logger.info('Found no media files in {0}. Sending to {1} to process'.format(output_destination, section_name))
status = 0 status = 0
else: else:
logger.warning("Found no media files in {0}".format(output_destination)) logger.warning('Found no media files in {0}'.format(output_destination))
# Only these sections can handling failed downloads # Only these sections can handling failed downloads
# so make sure everything else gets through without the check for failed # so make sure everything else gets through without the check for failed
if section_name not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']: if section_name not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']:
status = 0 status = 0
logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, usercat, input_name)) logger.info('Calling {0}:{1} to post-process:{2}'.format(section_name, usercat, input_name))
if core.TORRENT_CHMOD_DIRECTORY: if core.TORRENT_CHMOD_DIRECTORY:
core.rchmod(output_destination, core.TORRENT_CHMOD_DIRECTORY) core.rchmod(output_destination, core.TORRENT_CHMOD_DIRECTORY)
result = ProcessResult( result = ProcessResult(
message="", message='',
status_code=0, status_code=0,
) )
if section_name == 'UserScript': if section_name == 'UserScript':
@ -257,11 +257,11 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
if result.status_code != 0: if result.status_code != 0:
if not core.TORRENT_RESUME_ON_FAILURE: if not core.TORRENT_RESUME_ON_FAILURE:
logger.error("A problem was reported in the autoProcess* script. " logger.error('A problem was reported in the autoProcess* script. '
"Torrent won't resume seeding (settings)") 'Torrent won\'t resume seeding (settings)')
elif client_agent != 'manual': elif client_agent != 'manual':
logger.error("A problem was reported in the autoProcess* script. " logger.error('A problem was reported in the autoProcess* script. '
"If torrent was paused we will resume seeding") 'If torrent was paused we will resume seeding')
core.resume_torrent(client_agent, input_hash, input_id, input_name) core.resume_torrent(client_agent, input_hash, input_id, input_name)
else: else:
@ -293,48 +293,48 @@ def main(args):
# clientAgent for Torrents # clientAgent for Torrents
client_agent = core.TORRENT_CLIENTAGENT client_agent = core.TORRENT_CLIENTAGENT
logger.info("#########################################################") logger.info('#########################################################')
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) logger.info('## ..::[{0}]::.. ##'.format(os.path.basename(__file__)))
logger.info("#########################################################") logger.info('#########################################################')
# debug command line options # debug command line options
logger.debug("Options passed into TorrentToMedia: {0}".format(args)) logger.debug('Options passed into TorrentToMedia: {0}'.format(args))
# Post-Processing Result # Post-Processing Result
result = ProcessResult( result = ProcessResult(
message="", message='',
status_code=0, status_code=0,
) )
try: try:
input_directory, input_name, input_category, input_hash, input_id = core.parse_args(client_agent, args) input_directory, input_name, input_category, input_hash, input_id = core.parse_args(client_agent, args)
except Exception: except Exception:
logger.error("There was a problem loading variables") logger.error('There was a problem loading variables')
return -1 return -1
if input_directory and input_name and input_hash and input_id: if input_directory and input_name and input_hash and input_id:
result = process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent) result = process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent)
else: else:
# Perform Manual Post-Processing # Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...") logger.warning('Invalid number of arguments received from client, Switching to manual run mode ...')
for section, subsections in core.SECTIONS.items(): for section, subsections in core.SECTIONS.items():
for subsection in subsections: for subsection in subsections:
if not core.CFG[section][subsection].isenabled(): if not core.CFG[section][subsection].isenabled():
continue continue
for dir_name in core.get_dirs(section, subsection, link='hard'): for dir_name in core.get_dirs(section, subsection, link='hard'):
logger.info("Starting manual run for {0}:{1} - Folder:{2}".format logger.info('Starting manual run for {0}:{1} - Folder:{2}'.format
(section, subsection, dir_name)) (section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format logger.info('Checking database for download info for {0} ...'.format
(os.path.basename(dir_name))) (os.path.basename(dir_name)))
core.DOWNLOADINFO = core.get_download_info(os.path.basename(dir_name), 0) core.DOWNLOADINFO = core.get_download_info(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO: if core.DOWNLOADINFO:
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual')) client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
input_hash = text_type(core.DOWNLOADINFO[0].get('input_hash', '')) input_hash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
input_id = text_type(core.DOWNLOADINFO[0].get('input_id', '')) input_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
logger.info("Found download info for {0}, " logger.info('Found download info for {0}, '
"setting variables now ...".format(os.path.basename(dir_name))) 'setting variables now ...'.format(os.path.basename(dir_name)))
else: else:
logger.info('Unable to locate download info for {0}, ' logger.info('Unable to locate download info for {0}, '
'continuing to try and process this release ...'.format 'continuing to try and process this release ...'.format
@ -359,17 +359,17 @@ def main(args):
results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None, results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None,
client_agent) client_agent)
if results[0] != 0: if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format logger.error('A problem was reported when trying to perform a manual run for {0}:{1}.'.format
(section, subsection)) (section, subsection))
result = results result = results
if result.status_code == 0: if result.status_code == 0:
logger.info("The {0} script completed successfully.".format(args[0])) logger.info('The {0} script completed successfully.'.format(args[0]))
else: else:
logger.error("A problem was reported in the {0} script.".format(args[0])) logger.error('A problem was reported in the {0} script.'.format(args[0]))
del core.MYAPP del core.MYAPP
return result.status_code return result.status_code
if __name__ == "__main__": if __name__ == '__main__':
exit(main(sys.argv)) exit(main(sys.argv))

View file

@ -64,22 +64,22 @@ SABNZB_0717_NO_OF_ARGUMENTS = 9
# sickbeard fork/branch constants # sickbeard fork/branch constants
FORKS = {} FORKS = {}
FORK_DEFAULT = "default" FORK_DEFAULT = 'default'
FORK_FAILED = "failed" FORK_FAILED = 'failed'
FORK_FAILED_TORRENT = "failed-torrent" FORK_FAILED_TORRENT = 'failed-torrent'
FORK_SICKRAGE = "SickRage" FORK_SICKRAGE = 'SickRage'
FORK_SICKCHILL = "SickChill" FORK_SICKCHILL = 'SickChill'
FORK_SICKBEARD_API = "SickBeard-api" FORK_SICKBEARD_API = 'SickBeard-api'
FORK_MEDUSA = "Medusa" FORK_MEDUSA = 'Medusa'
FORK_SICKGEAR = "SickGear" FORK_SICKGEAR = 'SickGear'
FORKS[FORK_DEFAULT] = {"dir": None} FORKS[FORK_DEFAULT] = {'dir': None}
FORKS[FORK_FAILED] = {"dirName": None, "failed": None} FORKS[FORK_FAILED] = {'dirName': None, 'failed': None}
FORKS[FORK_FAILED_TORRENT] = {"dir": None, "failed": None, "process_method": None} FORKS[FORK_FAILED_TORRENT] = {'dir': None, 'failed': None, 'process_method': None}
FORKS[FORK_SICKRAGE] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None} FORKS[FORK_SICKRAGE] = {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None}
FORKS[FORK_SICKCHILL] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "force_next": None} FORKS[FORK_SICKCHILL] = {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None}
FORKS[FORK_SICKBEARD_API] = {"path": None, "failed": None, "process_method": None, "force_replace": None, "return_data": None, "type": None, "delete": None, "force_next": None} FORKS[FORK_SICKBEARD_API] = {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None}
FORKS[FORK_MEDUSA] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "ignore_subs": None} FORKS[FORK_MEDUSA] = {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None}
FORKS[FORK_SICKGEAR] = {"dir": None, "failed": None, "process_method": None, "force": None} FORKS[FORK_SICKGEAR] = {'dir': None, 'failed': None, 'process_method': None, 'force': None}
ALL_FORKS = {k: None for k in set(list(itertools.chain.from_iterable([FORKS[x].keys() for x in FORKS.keys()])))} ALL_FORKS = {k: None for k in set(list(itertools.chain.from_iterable([FORKS[x].keys() for x in FORKS.keys()])))}
# NZBGet Exit Codes # NZBGet Exit Codes
@ -257,15 +257,15 @@ def initialize(section=None):
LOG_DIR = os.path.split(LOG_FILE)[0] LOG_DIR = os.path.split(LOG_FILE)[0]
if not make_dir(LOG_DIR): if not make_dir(LOG_DIR):
print("No log folder, logging to screen only") print('No log folder, logging to screen only')
MYAPP = RunningProcess() MYAPP = RunningProcess()
while MYAPP.alreadyrunning(): while MYAPP.alreadyrunning():
print("Waiting for existing session to end") print('Waiting for existing session to end')
time.sleep(30) time.sleep(30)
try: try:
locale.setlocale(locale.LC_ALL, "") locale.setlocale(locale.LC_ALL, '')
SYS_ENCODING = locale.getpreferredencoding() SYS_ENCODING = locale.getpreferredencoding()
except (locale.Error, IOError): except (locale.Error, IOError):
pass pass
@ -275,7 +275,7 @@ def initialize(section=None):
SYS_ENCODING = 'UTF-8' SYS_ENCODING = 'UTF-8'
if six.PY2: if six.PY2:
if not hasattr(sys, "setdefaultencoding"): if not hasattr(sys, 'setdefaultencoding'):
reload_module(sys) reload_module(sys)
try: try:
@ -296,7 +296,7 @@ def initialize(section=None):
# run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options. # run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
if not config.migrate(): if not config.migrate():
logger.error("Unable to migrate config file {0}, exiting ...".format(CONFIG_FILE)) logger.error('Unable to migrate config file {0}, exiting ...'.format(CONFIG_FILE))
if 'NZBOP_SCRIPTDIR' in os.environ: if 'NZBOP_SCRIPTDIR' in os.environ:
pass # We will try and read config from Environment. pass # We will try and read config from Environment.
else: else:
@ -307,7 +307,7 @@ def initialize(section=None):
CFG = config.addnzbget() CFG = config.addnzbget()
else: # load newly migrated config else: # load newly migrated config
logger.info("Loading config from [{0}]".format(CONFIG_FILE)) logger.info('Loading config from [{0}]'.format(CONFIG_FILE))
CFG = config() CFG = config()
# Enable/Disable DEBUG Logging # Enable/Disable DEBUG Logging
@ -318,7 +318,7 @@ def initialize(section=None):
if LOG_ENV: if LOG_ENV:
for item in os.environ: for item in os.environ:
logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT") logger.info('{0}: {1}'.format(item, os.environ[item]), 'ENVIRONMENT')
# initialize the main SB database # initialize the main SB database
main_db.upgrade_database(main_db.DBConnection(), databases.InitialSchema) main_db.upgrade_database(main_db.DBConnection(), databases.InitialSchema)
@ -331,16 +331,16 @@ def initialize(section=None):
GIT_PATH = CFG['General']['git_path'] GIT_PATH = CFG['General']['git_path']
GIT_USER = CFG['General']['git_user'] or 'clinton-hall' GIT_USER = CFG['General']['git_user'] or 'clinton-hall'
GIT_BRANCH = CFG['General']['git_branch'] or 'master' GIT_BRANCH = CFG['General']['git_branch'] or 'master'
FORCE_CLEAN = int(CFG["General"]["force_clean"]) FORCE_CLEAN = int(CFG['General']['force_clean'])
FFMPEG_PATH = CFG["General"]["ffmpeg_path"] FFMPEG_PATH = CFG['General']['ffmpeg_path']
CHECK_MEDIA = int(CFG["General"]["check_media"]) CHECK_MEDIA = int(CFG['General']['check_media'])
SAFE_MODE = int(CFG["General"]["safe_mode"]) SAFE_MODE = int(CFG['General']['safe_mode'])
NOEXTRACTFAILED = int(CFG["General"]["no_extract_failed"]) NOEXTRACTFAILED = int(CFG['General']['no_extract_failed'])
# Check for updates via GitHUB # Check for updates via GitHUB
if version_check.CheckVersion().check_for_new_version(): if version_check.CheckVersion().check_for_new_version():
if AUTO_UPDATE == 1: if AUTO_UPDATE == 1:
logger.info("Auto-Updating nzbToMedia, Please wait ...") logger.info('Auto-Updating nzbToMedia, Please wait ...')
updated = version_check.CheckVersion().update() updated = version_check.CheckVersion().update()
if updated: if updated:
# restart nzbToMedia # restart nzbToMedia
@ -350,61 +350,61 @@ def initialize(section=None):
pass pass
restart() restart()
else: else:
logger.error("Update wasn't successful, not restarting. Check your log for more information.") logger.error('Update wasn\'t successful, not restarting. Check your log for more information.')
# Set Current Version # Set Current Version
logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format
(version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH, (version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH,
system=platform.system(), release=platform.release())) system=platform.system(), release=platform.release()))
if int(CFG["WakeOnLan"]["wake"]) == 1: if int(CFG['WakeOnLan']['wake']) == 1:
wake_up() wake_up()
NZB_CLIENTAGENT = CFG["Nzb"]["clientAgent"] # sabnzbd NZB_CLIENTAGENT = CFG['Nzb']['clientAgent'] # sabnzbd
SABNZBDHOST = CFG["Nzb"]["sabnzbd_host"] SABNZBDHOST = CFG['Nzb']['sabnzbd_host']
SABNZBDPORT = int(CFG["Nzb"]["sabnzbd_port"] or 8080) # defaults to accomodate NzbGet SABNZBDPORT = int(CFG['Nzb']['sabnzbd_port'] or 8080) # defaults to accomodate NzbGet
SABNZBDAPIKEY = CFG["Nzb"]["sabnzbd_apikey"] SABNZBDAPIKEY = CFG['Nzb']['sabnzbd_apikey']
NZB_DEFAULTDIR = CFG["Nzb"]["default_downloadDirectory"] NZB_DEFAULTDIR = CFG['Nzb']['default_downloadDirectory']
GROUPS = CFG["Custom"]["remove_group"] GROUPS = CFG['Custom']['remove_group']
if isinstance(GROUPS, str): if isinstance(GROUPS, str):
GROUPS = GROUPS.split(',') GROUPS = GROUPS.split(',')
if GROUPS == ['']: if GROUPS == ['']:
GROUPS = None GROUPS = None
TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent |other TORRENT_CLIENTAGENT = CFG['Torrent']['clientAgent'] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent |other
USELINK = CFG["Torrent"]["useLink"] # no | hard | sym USELINK = CFG['Torrent']['useLink'] # no | hard | sym
OUTPUTDIRECTORY = CFG["Torrent"]["outputDirectory"] # /abs/path/to/complete/ OUTPUTDIRECTORY = CFG['Torrent']['outputDirectory'] # /abs/path/to/complete/
TORRENT_DEFAULTDIR = CFG["Torrent"]["default_downloadDirectory"] TORRENT_DEFAULTDIR = CFG['Torrent']['default_downloadDirectory']
CATEGORIES = (CFG["Torrent"]["categories"]) # music,music_videos,pictures,software CATEGORIES = (CFG['Torrent']['categories']) # music,music_videos,pictures,software
NOFLATTEN = (CFG["Torrent"]["noFlatten"]) NOFLATTEN = (CFG['Torrent']['noFlatten'])
if isinstance(NOFLATTEN, str): if isinstance(NOFLATTEN, str):
NOFLATTEN = NOFLATTEN.split(',') NOFLATTEN = NOFLATTEN.split(',')
if isinstance(CATEGORIES, str): if isinstance(CATEGORIES, str):
CATEGORIES = CATEGORIES.split(',') CATEGORIES = CATEGORIES.split(',')
DELETE_ORIGINAL = int(CFG["Torrent"]["deleteOriginal"]) DELETE_ORIGINAL = int(CFG['Torrent']['deleteOriginal'])
TORRENT_CHMOD_DIRECTORY = int(str(CFG["Torrent"]["chmodDirectory"]), 8) TORRENT_CHMOD_DIRECTORY = int(str(CFG['Torrent']['chmodDirectory']), 8)
TORRENT_RESUME_ON_FAILURE = int(CFG["Torrent"]["resumeOnFailure"]) TORRENT_RESUME_ON_FAILURE = int(CFG['Torrent']['resumeOnFailure'])
TORRENT_RESUME = int(CFG["Torrent"]["resume"]) TORRENT_RESUME = int(CFG['Torrent']['resume'])
UTORRENTWEBUI = CFG["Torrent"]["uTorrentWEBui"] # http://localhost:8090/gui/ UTORRENTWEBUI = CFG['Torrent']['uTorrentWEBui'] # http://localhost:8090/gui/
UTORRENTUSR = CFG["Torrent"]["uTorrentUSR"] # mysecretusr UTORRENTUSR = CFG['Torrent']['uTorrentUSR'] # mysecretusr
UTORRENTPWD = CFG["Torrent"]["uTorrentPWD"] # mysecretpwr UTORRENTPWD = CFG['Torrent']['uTorrentPWD'] # mysecretpwr
TRANSMISSIONHOST = CFG["Torrent"]["TransmissionHost"] # localhost TRANSMISSIONHOST = CFG['Torrent']['TransmissionHost'] # localhost
TRANSMISSIONPORT = int(CFG["Torrent"]["TransmissionPort"]) TRANSMISSIONPORT = int(CFG['Torrent']['TransmissionPort'])
TRANSMISSIONUSR = CFG["Torrent"]["TransmissionUSR"] # mysecretusr TRANSMISSIONUSR = CFG['Torrent']['TransmissionUSR'] # mysecretusr
TRANSMISSIONPWD = CFG["Torrent"]["TransmissionPWD"] # mysecretpwr TRANSMISSIONPWD = CFG['Torrent']['TransmissionPWD'] # mysecretpwr
DELUGEHOST = CFG["Torrent"]["DelugeHost"] # localhost DELUGEHOST = CFG['Torrent']['DelugeHost'] # localhost
DELUGEPORT = int(CFG["Torrent"]["DelugePort"]) # 8084 DELUGEPORT = int(CFG['Torrent']['DelugePort']) # 8084
DELUGEUSR = CFG["Torrent"]["DelugeUSR"] # mysecretusr DELUGEUSR = CFG['Torrent']['DelugeUSR'] # mysecretusr
DELUGEPWD = CFG["Torrent"]["DelugePWD"] # mysecretpwr DELUGEPWD = CFG['Torrent']['DelugePWD'] # mysecretpwr
QBITTORRENTHOST = CFG["Torrent"]["qBittorrenHost"] # localhost QBITTORRENTHOST = CFG['Torrent']['qBittorrenHost'] # localhost
QBITTORRENTPORT = int(CFG["Torrent"]["qBittorrentPort"]) # 8080 QBITTORRENTPORT = int(CFG['Torrent']['qBittorrentPort']) # 8080
QBITTORRENTUSR = CFG["Torrent"]["qBittorrentUSR"] # mysecretusr QBITTORRENTUSR = CFG['Torrent']['qBittorrentUSR'] # mysecretusr
QBITTORRENTPWD = CFG["Torrent"]["qBittorrentPWD"] # mysecretpwr QBITTORRENTPWD = CFG['Torrent']['qBittorrentPWD'] # mysecretpwr
REMOTEPATHS = CFG["Network"]["mount_points"] or [] REMOTEPATHS = CFG['Network']['mount_points'] or []
if REMOTEPATHS: if REMOTEPATHS:
if isinstance(REMOTEPATHS, list): if isinstance(REMOTEPATHS, list):
REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list. REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list.
@ -413,11 +413,11 @@ def initialize(section=None):
REMOTEPATHS = [(local.strip(), remote.strip()) for local, remote in REMOTEPATHS = [(local.strip(), remote.strip()) for local, remote in
REMOTEPATHS] # strip trailing and leading whitespaces REMOTEPATHS] # strip trailing and leading whitespaces
PLEXSSL = int(CFG["Plex"]["plex_ssl"]) PLEXSSL = int(CFG['Plex']['plex_ssl'])
PLEXHOST = CFG["Plex"]["plex_host"] PLEXHOST = CFG['Plex']['plex_host']
PLEXPORT = CFG["Plex"]["plex_port"] PLEXPORT = CFG['Plex']['plex_port']
PLEXTOKEN = CFG["Plex"]["plex_token"] PLEXTOKEN = CFG['Plex']['plex_token']
PLEXSEC = CFG["Plex"]["plex_sections"] or [] PLEXSEC = CFG['Plex']['plex_sections'] or []
if PLEXSEC: if PLEXSEC:
if isinstance(PLEXSEC, list): if isinstance(PLEXSEC, list):
PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list. PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list.
@ -425,21 +425,21 @@ def initialize(section=None):
devnull = open(os.devnull, 'w') devnull = open(os.devnull, 'w')
try: try:
subprocess.Popen(["nice"], stdout=devnull, stderr=devnull).communicate() subprocess.Popen(['nice'], stdout=devnull, stderr=devnull).communicate()
NICENESS.extend(['nice', '-n{0}'.format(int(CFG["Posix"]["niceness"]))]) NICENESS.extend(['nice', '-n{0}'.format(int(CFG['Posix']['niceness']))])
except Exception: except Exception:
pass pass
try: try:
subprocess.Popen(["ionice"], stdout=devnull, stderr=devnull).communicate() subprocess.Popen(['ionice'], stdout=devnull, stderr=devnull).communicate()
try: try:
NICENESS.extend(['ionice', '-c{0}'.format(int(CFG["Posix"]["ionice_class"]))]) NICENESS.extend(['ionice', '-c{0}'.format(int(CFG['Posix']['ionice_class']))])
except Exception: except Exception:
pass pass
try: try:
if 'ionice' in NICENESS: if 'ionice' in NICENESS:
NICENESS.extend(['-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))]) NICENESS.extend(['-n{0}'.format(int(CFG['Posix']['ionice_classdata']))])
else: else:
NICENESS.extend(['ionice', '-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))]) NICENESS.extend(['ionice', '-n{0}'.format(int(CFG['Posix']['ionice_classdata']))])
except Exception: except Exception:
pass pass
except Exception: except Exception:
@ -449,10 +449,10 @@ def initialize(section=None):
COMPRESSEDCONTAINER = [re.compile(r'.r\d{2}$', re.I), COMPRESSEDCONTAINER = [re.compile(r'.r\d{2}$', re.I),
re.compile(r'.part\d+.rar$', re.I), re.compile(r'.part\d+.rar$', re.I),
re.compile('.rar$', re.I)] re.compile('.rar$', re.I)]
COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG["Extensions"]["compressedExtensions"]] COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG['Extensions']['compressedExtensions']]
MEDIACONTAINER = CFG["Extensions"]["mediaExtensions"] MEDIACONTAINER = CFG['Extensions']['mediaExtensions']
AUDIOCONTAINER = CFG["Extensions"]["audioExtensions"] AUDIOCONTAINER = CFG['Extensions']['audioExtensions']
METACONTAINER = CFG["Extensions"]["metaExtensions"] # .nfo,.sub,.srt METACONTAINER = CFG['Extensions']['metaExtensions'] # .nfo,.sub,.srt
if isinstance(COMPRESSEDCONTAINER, str): if isinstance(COMPRESSEDCONTAINER, str):
COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',') COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',')
if isinstance(MEDIACONTAINER, str): if isinstance(MEDIACONTAINER, str):
@ -462,15 +462,15 @@ def initialize(section=None):
if isinstance(METACONTAINER, str): if isinstance(METACONTAINER, str):
METACONTAINER = METACONTAINER.split(',') METACONTAINER = METACONTAINER.split(',')
GETSUBS = int(CFG["Transcoder"]["getSubs"]) GETSUBS = int(CFG['Transcoder']['getSubs'])
TRANSCODE = int(CFG["Transcoder"]["transcode"]) TRANSCODE = int(CFG['Transcoder']['transcode'])
DUPLICATE = int(CFG["Transcoder"]["duplicate"]) DUPLICATE = int(CFG['Transcoder']['duplicate'])
CONCAT = int(CFG["Transcoder"]["concat"]) CONCAT = int(CFG['Transcoder']['concat'])
IGNOREEXTENSIONS = (CFG["Transcoder"]["ignoreExtensions"]) IGNOREEXTENSIONS = (CFG['Transcoder']['ignoreExtensions'])
if isinstance(IGNOREEXTENSIONS, str): if isinstance(IGNOREEXTENSIONS, str):
IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',') IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',')
OUTPUTFASTSTART = int(CFG["Transcoder"]["outputFastStart"]) OUTPUTFASTSTART = int(CFG['Transcoder']['outputFastStart'])
GENERALOPTS = (CFG["Transcoder"]["generalOptions"]) GENERALOPTS = (CFG['Transcoder']['generalOptions'])
if isinstance(GENERALOPTS, str): if isinstance(GENERALOPTS, str):
GENERALOPTS = GENERALOPTS.split(',') GENERALOPTS = GENERALOPTS.split(',')
if GENERALOPTS == ['']: if GENERALOPTS == ['']:
@ -480,93 +480,93 @@ def initialize(section=None):
if '+genpts' not in GENERALOPTS: if '+genpts' not in GENERALOPTS:
GENERALOPTS.append('+genpts') GENERALOPTS.append('+genpts')
try: try:
OUTPUTQUALITYPERCENT = int(CFG["Transcoder"]["outputQualityPercent"]) OUTPUTQUALITYPERCENT = int(CFG['Transcoder']['outputQualityPercent'])
except Exception: except Exception:
pass pass
OUTPUTVIDEOPATH = CFG["Transcoder"]["outputVideoPath"] OUTPUTVIDEOPATH = CFG['Transcoder']['outputVideoPath']
PROCESSOUTPUT = int(CFG["Transcoder"]["processOutput"]) PROCESSOUTPUT = int(CFG['Transcoder']['processOutput'])
ALANGUAGE = CFG["Transcoder"]["audioLanguage"] ALANGUAGE = CFG['Transcoder']['audioLanguage']
AINCLUDE = int(CFG["Transcoder"]["allAudioLanguages"]) AINCLUDE = int(CFG['Transcoder']['allAudioLanguages'])
SLANGUAGES = CFG["Transcoder"]["subLanguages"] SLANGUAGES = CFG['Transcoder']['subLanguages']
if isinstance(SLANGUAGES, str): if isinstance(SLANGUAGES, str):
SLANGUAGES = SLANGUAGES.split(',') SLANGUAGES = SLANGUAGES.split(',')
if SLANGUAGES == ['']: if SLANGUAGES == ['']:
SLANGUAGES = [] SLANGUAGES = []
SINCLUDE = int(CFG["Transcoder"]["allSubLanguages"]) SINCLUDE = int(CFG['Transcoder']['allSubLanguages'])
SEXTRACT = int(CFG["Transcoder"]["extractSubs"]) SEXTRACT = int(CFG['Transcoder']['extractSubs'])
SEMBED = int(CFG["Transcoder"]["embedSubs"]) SEMBED = int(CFG['Transcoder']['embedSubs'])
SUBSDIR = CFG["Transcoder"]["externalSubDir"] SUBSDIR = CFG['Transcoder']['externalSubDir']
VEXTENSION = CFG["Transcoder"]["outputVideoExtension"].strip() VEXTENSION = CFG['Transcoder']['outputVideoExtension'].strip()
VCODEC = CFG["Transcoder"]["outputVideoCodec"].strip() VCODEC = CFG['Transcoder']['outputVideoCodec'].strip()
VCODEC_ALLOW = CFG["Transcoder"]["VideoCodecAllow"].strip() VCODEC_ALLOW = CFG['Transcoder']['VideoCodecAllow'].strip()
if isinstance(VCODEC_ALLOW, str): if isinstance(VCODEC_ALLOW, str):
VCODEC_ALLOW = VCODEC_ALLOW.split(',') VCODEC_ALLOW = VCODEC_ALLOW.split(',')
if VCODEC_ALLOW == ['']: if VCODEC_ALLOW == ['']:
VCODEC_ALLOW = [] VCODEC_ALLOW = []
VPRESET = CFG["Transcoder"]["outputVideoPreset"].strip() VPRESET = CFG['Transcoder']['outputVideoPreset'].strip()
try: try:
VFRAMERATE = float(CFG["Transcoder"]["outputVideoFramerate"].strip()) VFRAMERATE = float(CFG['Transcoder']['outputVideoFramerate'].strip())
except Exception: except Exception:
pass pass
try: try:
VCRF = int(CFG["Transcoder"]["outputVideoCRF"].strip()) VCRF = int(CFG['Transcoder']['outputVideoCRF'].strip())
except Exception: except Exception:
pass pass
try: try:
VLEVEL = CFG["Transcoder"]["outputVideoLevel"].strip() VLEVEL = CFG['Transcoder']['outputVideoLevel'].strip()
except Exception: except Exception:
pass pass
try: try:
VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k', '000')) VBITRATE = int((CFG['Transcoder']['outputVideoBitrate'].strip()).replace('k', '000'))
except Exception: except Exception:
pass pass
VRESOLUTION = CFG["Transcoder"]["outputVideoResolution"] VRESOLUTION = CFG['Transcoder']['outputVideoResolution']
ACODEC = CFG["Transcoder"]["outputAudioCodec"].strip() ACODEC = CFG['Transcoder']['outputAudioCodec'].strip()
ACODEC_ALLOW = CFG["Transcoder"]["AudioCodecAllow"].strip() ACODEC_ALLOW = CFG['Transcoder']['AudioCodecAllow'].strip()
if isinstance(ACODEC_ALLOW, str): if isinstance(ACODEC_ALLOW, str):
ACODEC_ALLOW = ACODEC_ALLOW.split(',') ACODEC_ALLOW = ACODEC_ALLOW.split(',')
if ACODEC_ALLOW == ['']: if ACODEC_ALLOW == ['']:
ACODEC_ALLOW = [] ACODEC_ALLOW = []
try: try:
ACHANNELS = int(CFG["Transcoder"]["outputAudioChannels"].strip()) ACHANNELS = int(CFG['Transcoder']['outputAudioChannels'].strip())
except Exception: except Exception:
pass pass
try: try:
ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k', '000')) ABITRATE = int((CFG['Transcoder']['outputAudioBitrate'].strip()).replace('k', '000'))
except Exception: except Exception:
pass pass
ACODEC2 = CFG["Transcoder"]["outputAudioTrack2Codec"].strip() ACODEC2 = CFG['Transcoder']['outputAudioTrack2Codec'].strip()
ACODEC2_ALLOW = CFG["Transcoder"]["AudioCodec2Allow"].strip() ACODEC2_ALLOW = CFG['Transcoder']['AudioCodec2Allow'].strip()
if isinstance(ACODEC2_ALLOW, str): if isinstance(ACODEC2_ALLOW, str):
ACODEC2_ALLOW = ACODEC2_ALLOW.split(',') ACODEC2_ALLOW = ACODEC2_ALLOW.split(',')
if ACODEC2_ALLOW == ['']: if ACODEC2_ALLOW == ['']:
ACODEC2_ALLOW = [] ACODEC2_ALLOW = []
try: try:
ACHANNELS2 = int(CFG["Transcoder"]["outputAudioTrack2Channels"].strip()) ACHANNELS2 = int(CFG['Transcoder']['outputAudioTrack2Channels'].strip())
except Exception: except Exception:
pass pass
try: try:
ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k', '000')) ABITRATE2 = int((CFG['Transcoder']['outputAudioTrack2Bitrate'].strip()).replace('k', '000'))
except Exception: except Exception:
pass pass
ACODEC3 = CFG["Transcoder"]["outputAudioOtherCodec"].strip() ACODEC3 = CFG['Transcoder']['outputAudioOtherCodec'].strip()
ACODEC3_ALLOW = CFG["Transcoder"]["AudioOtherCodecAllow"].strip() ACODEC3_ALLOW = CFG['Transcoder']['AudioOtherCodecAllow'].strip()
if isinstance(ACODEC3_ALLOW, str): if isinstance(ACODEC3_ALLOW, str):
ACODEC3_ALLOW = ACODEC3_ALLOW.split(',') ACODEC3_ALLOW = ACODEC3_ALLOW.split(',')
if ACODEC3_ALLOW == ['']: if ACODEC3_ALLOW == ['']:
ACODEC3_ALLOW = [] ACODEC3_ALLOW = []
try: try:
ACHANNELS3 = int(CFG["Transcoder"]["outputAudioOtherChannels"].strip()) ACHANNELS3 = int(CFG['Transcoder']['outputAudioOtherChannels'].strip())
except Exception: except Exception:
pass pass
try: try:
ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k', '000')) ABITRATE3 = int((CFG['Transcoder']['outputAudioOtherBitrate'].strip()).replace('k', '000'))
except Exception: except Exception:
pass pass
SCODEC = CFG["Transcoder"]["outputSubtitleCodec"].strip() SCODEC = CFG['Transcoder']['outputSubtitleCodec'].strip()
BURN = int(CFG["Transcoder"]["burnInSubtitle"].strip()) BURN = int(CFG['Transcoder']['burnInSubtitle'].strip())
DEFAULTS = CFG["Transcoder"]["outputDefault"].strip() DEFAULTS = CFG['Transcoder']['outputDefault'].strip()
HWACCEL = int(CFG["Transcoder"]["hwAccel"]) HWACCEL = int(CFG['Transcoder']['hwAccel'])
allow_subs = ['.mkv', '.mp4', '.m4v', 'asf', 'wma', 'wmv'] allow_subs = ['.mkv', '.mp4', '.m4v', 'asf', 'wma', 'wmv']
codec_alias = { codec_alias = {
@ -743,25 +743,25 @@ def initialize(section=None):
ACODEC3_ALLOW.extend(extra) ACODEC3_ALLOW.extend(extra)
codec_alias = {} # clear memory codec_alias = {} # clear memory
PASSWORDSFILE = CFG["passwords"]["PassWordFile"] PASSWORDSFILE = CFG['passwords']['PassWordFile']
# Setup FFMPEG, FFPROBE and SEVENZIP locations # Setup FFMPEG, FFPROBE and SEVENZIP locations
if platform.system() == 'Windows': if platform.system() == 'Windows':
FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg.exe') FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg.exe')
FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe.exe') FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe.exe')
SEVENZIP = os.path.join(APP_ROOT, 'core', 'extractor', 'bin', platform.machine(), '7z.exe') SEVENZIP = os.path.join(APP_ROOT, 'core', 'extractor', 'bin', platform.machine(), '7z.exe')
SHOWEXTRACT = int(str(CFG["Windows"]["show_extraction"]), 0) SHOWEXTRACT = int(str(CFG['Windows']['show_extraction']), 0)
if not (os.path.isfile(FFMPEG)): # problem if not (os.path.isfile(FFMPEG)): # problem
FFMPEG = None FFMPEG = None
logger.warning("Failed to locate ffmpeg.exe. Transcoding disabled!") logger.warning('Failed to locate ffmpeg.exe. Transcoding disabled!')
logger.warning("Install ffmpeg with x264 support to enable this feature ...") logger.warning('Install ffmpeg with x264 support to enable this feature ...')
if not (os.path.isfile(FFPROBE)): if not (os.path.isfile(FFPROBE)):
FFPROBE = None FFPROBE = None
if CHECK_MEDIA: if CHECK_MEDIA:
logger.warning("Failed to locate ffprobe.exe. Video corruption detection disabled!") logger.warning('Failed to locate ffprobe.exe. Video corruption detection disabled!')
logger.warning("Install ffmpeg with x264 support to enable this feature ...") logger.warning('Install ffmpeg with x264 support to enable this feature ...')
else: else:
try: try:
@ -781,7 +781,7 @@ def initialize(section=None):
if not SEVENZIP: if not SEVENZIP:
SEVENZIP = None SEVENZIP = None
logger.warning( logger.warning(
"Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!") 'Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!')
try: try:
PAR2CMD = subprocess.Popen(['which', 'par2'], stdout=subprocess.PIPE).communicate()[0].strip() PAR2CMD = subprocess.Popen(['which', 'par2'], stdout=subprocess.PIPE).communicate()[0].strip()
except Exception: except Exception:
@ -789,7 +789,7 @@ def initialize(section=None):
if not PAR2CMD: if not PAR2CMD:
PAR2CMD = None PAR2CMD = None
logger.warning( logger.warning(
"Failed to locate par2. Repair and rename using par files will not be possible!") 'Failed to locate par2. Repair and rename using par files will not be possible!')
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'), if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'),
os.X_OK): os.X_OK):
FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg') FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg')
@ -808,8 +808,8 @@ def initialize(section=None):
pass pass
if not FFMPEG: if not FFMPEG:
FFMPEG = None FFMPEG = None
logger.warning("Failed to locate ffmpeg. Transcoding disabled!") logger.warning('Failed to locate ffmpeg. Transcoding disabled!')
logger.warning("Install ffmpeg with x264 support to enable this feature ...") logger.warning('Install ffmpeg with x264 support to enable this feature ...')
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'), if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'),
os.X_OK): os.X_OK):
@ -830,8 +830,8 @@ def initialize(section=None):
if not FFPROBE: if not FFPROBE:
FFPROBE = None FFPROBE = None
if CHECK_MEDIA: if CHECK_MEDIA:
logger.warning("Failed to locate ffprobe. Video corruption detection disabled!") logger.warning('Failed to locate ffprobe. Video corruption detection disabled!')
logger.warning("Install ffmpeg with x264 support to enable this feature ...") logger.warning('Install ffmpeg with x264 support to enable this feature ...')
# check for script-defied section and if None set to allow sections # check for script-defied section and if None set to allow sections
SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)] SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)]
@ -857,7 +857,7 @@ def restart():
if popen_list: if popen_list:
popen_list += SYS_ARGV popen_list += SYS_ARGV
logger.log(u"Restarting nzbToMedia with {args}".format(args=popen_list)) logger.log(u'Restarting nzbToMedia with {args}'.format(args=popen_list))
logger.close() logger.close()
p = subprocess.Popen(popen_list, cwd=os.getcwd()) p = subprocess.Popen(popen_list, cwd=os.getcwd())
p.wait() p.wait()
@ -867,7 +867,7 @@ def restart():
def rchmod(path, mod): def rchmod(path, mod):
logger.log("Changing file mode of {0} to {1}".format(path, oct(mod))) logger.log('Changing file mode of {0} to {1}'.format(path, oct(mod)))
os.chmod(path, mod) os.chmod(path, mod)
if not os.path.isdir(path): if not os.path.isdir(path):
return # Skip files return # Skip files

View file

@ -13,24 +13,24 @@ requests.packages.urllib3.disable_warnings()
def process(section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None): def process(section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None):
apc_version = "2.04" apc_version = '2.04'
comicrn_version = "1.01" comicrn_version = '1.01'
cfg = dict(core.CFG[section][input_category]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg['host']
port = cfg["port"] port = cfg['port']
apikey = cfg["apikey"] apikey = cfg['apikey']
ssl = int(cfg.get("ssl", 0)) ssl = int(cfg.get('ssl', 0))
web_root = cfg.get("web_root", "") web_root = cfg.get('web_root', '')
remote_path = int(cfg.get("remote_path"), 0) remote_path = int(cfg.get('remote_path'), 0)
protocol = "https://" if ssl else "http://" protocol = 'https://' if ssl else 'http://'
url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/api'.format(protocol, host, port, web_root)
if not server_responding(url): if not server_responding(url):
logger.error("Server did not respond. Exiting", section) logger.error('Server did not respond. Exiting', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - {0} did not respond.".format(section), message='{0}: Failed to post-process - {0} did not respond.'.format(section),
status_code=1, status_code=1,
) )
@ -53,19 +53,19 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
success = False success = False
logger.debug("Opening URL: {0}".format(url), section) logger.debug('Opening URL: {0}'.format(url), section)
try: try:
r = requests.post(url, params=params, stream=True, verify=False, timeout=(30, 300)) r = requests.post(url, params=params, stream=True, verify=False, timeout=(30, 300))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL", section) logger.error('Unable to open URL', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {0}".format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1 status_code=1
) )
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
status_code=1, status_code=1,
) )
@ -74,19 +74,19 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
result = result.split('\n') result = result.split('\n')
for line in result: for line in result:
if line: if line:
logger.postprocess("{0}".format(line), section) logger.postprocess('{0}'.format(line), section)
if "Post Processing SUCCESSFUL" in line: if 'Post Processing SUCCESSFUL' in line:
success = True success = True
if success: if success:
logger.postprocess("SUCCESS: This issue has been processed successfully", section) logger.postprocess('SUCCESS: This issue has been processed successfully', section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
else: else:
logger.warning("The issue does not appear to have successfully processed. Please check your Logs", section) logger.warning('The issue does not appear to have successfully processed. Please check your Logs', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section),
status_code=1, status_code=1,
) )

View file

@ -31,32 +31,32 @@ def command_complete(url, params, headers, section):
try: try:
r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60)) r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL: {0}".format(url), section) logger.error('Unable to open URL: {0}'.format(url), section)
return None return None
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return None return None
else: else:
try: try:
return r.json()['state'] return r.json()['state']
except (ValueError, KeyError): except (ValueError, KeyError):
# ValueError catches simplejson's JSONDecodeError and json's ValueError # ValueError catches simplejson's JSONDecodeError and json's ValueError
logger.error("{0} did not return expected json data.".format(section), section) logger.error('{0} did not return expected json data.'.format(section), section)
return None return None
def completed_download_handling(url2, headers, section="MAIN"): def completed_download_handling(url2, headers, section='MAIN'):
try: try:
r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60)) r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL: {0}".format(url2), section) logger.error('Unable to open URL: {0}'.format(url2), section)
return False return False
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return False return False
else: else:
try: try:
return r.json().get("enableCompletedDownloadHandling", False) return r.json().get('enableCompletedDownloadHandling', False)
except ValueError: except ValueError:
# ValueError catches simplejson's JSONDecodeError and json's ValueError # ValueError catches simplejson's JSONDecodeError and json's ValueError
return False return False

View file

@ -18,27 +18,27 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
cfg = dict(core.CFG[section][input_category]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg['host']
port = cfg["port"] port = cfg['port']
apikey = cfg["apikey"] apikey = cfg['apikey']
library = cfg.get("library") library = cfg.get('library')
ssl = int(cfg.get("ssl", 0)) ssl = int(cfg.get('ssl', 0))
web_root = cfg.get("web_root", "") web_root = cfg.get('web_root', '')
protocol = "https://" if ssl else "http://" protocol = 'https://' if ssl else 'http://'
url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/api'.format(protocol, host, port, web_root)
if not server_responding(url): if not server_responding(url):
logger.error("Server did not respond. Exiting", section) logger.error('Server did not respond. Exiting', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - {0} did not respond.".format(section), message='{0}: Failed to post-process - {0} did not respond.'.format(section),
status_code=1, status_code=1,
) )
input_name, dir_name = convert_to_ascii(input_name, dir_name) input_name, dir_name = convert_to_ascii(input_name, dir_name)
fields = input_name.split("-") fields = input_name.split('-')
gamez_id = fields[0].replace("[", "").replace("]", "").replace(" ", "") gamez_id = fields[0].replace('[', '').replace(']', '').replace(' ', '')
download_status = 'Downloaded' if status == 0 else 'Wanted' download_status = 'Downloaded' if status == 0 else 'Wanted'
@ -49,51 +49,51 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
'status': download_status 'status': download_status
} }
logger.debug("Opening URL: {0}".format(url), section) logger.debug('Opening URL: {0}'.format(url), section)
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 300)) r = requests.get(url, params=params, verify=False, timeout=(30, 300))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL") logger.error('Unable to open URL')
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {1}".format(section, section), message='{0}: Failed to post-process - Unable to connect to {1}'.format(section, section),
status_code=1, status_code=1,
) )
result = r.json() result = r.json()
logger.postprocess("{0}".format(result), section) logger.postprocess('{0}'.format(result), section)
if library: if library:
logger.postprocess("moving files to library: {0}".format(library), section) logger.postprocess('moving files to library: {0}'.format(library), section)
try: try:
shutil.move(dir_name, os.path.join(library, input_name)) shutil.move(dir_name, os.path.join(library, input_name))
except Exception: except Exception:
logger.error("Unable to move {0} to {1}".format(dir_name, os.path.join(library, input_name)), section) logger.error('Unable to move {0} to {1}'.format(dir_name, os.path.join(library, input_name)), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to move files".format(section), message='{0}: Failed to post-process - Unable to move files'.format(section),
status_code=1, status_code=1,
) )
else: else:
logger.error("No library specified to move files to. Please edit your configuration.", section) logger.error('No library specified to move files to. Please edit your configuration.', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - No library defined in {0}".format(section), message='{0}: Failed to post-process - No library defined in {0}'.format(section),
status_code=1, status_code=1,
) )
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
status_code=1, status_code=1,
) )
elif result['success']: elif result['success']:
logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamez_id, download_status), section) logger.postprocess('SUCCESS: Status for {0} has been set to {1} in Gamez'.format(gamez_id, download_status), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
else: else:
logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamez_id), section) logger.error('FAILED: Status for {0} has NOT been updated in Gamez'.format(gamez_id), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section),
status_code=1, status_code=1,
) )

View file

@ -15,54 +15,54 @@ from core.utils import convert_to_ascii, find_download, find_imdbid, import_subs
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
def process(section, dir_name, input_name=None, status=0, client_agent="manual", download_id="", input_category=None, failure_link=None): def process(section, dir_name, input_name=None, status=0, client_agent='manual', download_id='', input_category=None, failure_link=None):
cfg = dict(core.CFG[section][input_category]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg['host']
port = cfg["port"] port = cfg['port']
apikey = cfg["apikey"] apikey = cfg['apikey']
if section == "CouchPotato": if section == 'CouchPotato':
method = cfg["method"] method = cfg['method']
else: else:
method = None method = None
# added importMode for Radarr config # added importMode for Radarr config
if section == "Radarr": if section == 'Radarr':
import_mode = cfg.get("importMode", "Move") import_mode = cfg.get('importMode', 'Move')
else: else:
import_mode = None import_mode = None
delete_failed = int(cfg["delete_failed"]) delete_failed = int(cfg['delete_failed'])
wait_for = int(cfg["wait_for"]) wait_for = int(cfg['wait_for'])
ssl = int(cfg.get("ssl", 0)) ssl = int(cfg.get('ssl', 0))
web_root = cfg.get("web_root", "") web_root = cfg.get('web_root', '')
remote_path = int(cfg.get("remote_path", 0)) remote_path = int(cfg.get('remote_path', 0))
protocol = "https://" if ssl else "http://" protocol = 'https://' if ssl else 'http://'
omdbapikey = cfg.get("omdbapikey", "") omdbapikey = cfg.get('omdbapikey', '')
status = int(status) status = int(status)
if status > 0 and core.NOEXTRACTFAILED: if status > 0 and core.NOEXTRACTFAILED:
extract = 0 extract = 0
else: else:
extract = int(cfg.get("extract", 0)) extract = int(cfg.get('extract', 0))
imdbid = find_imdbid(dir_name, input_name, omdbapikey) imdbid = find_imdbid(dir_name, input_name, omdbapikey)
if section == "CouchPotato": if section == 'CouchPotato':
base_url = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey) base_url = '{0}{1}:{2}{3}/api/{4}/'.format(protocol, host, port, web_root, apikey)
if section == "Radarr": if section == 'Radarr':
base_url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root) base_url = '{0}{1}:{2}{3}/api/command'.format(protocol, host, port, web_root)
url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root) url2 = '{0}{1}:{2}{3}/api/config/downloadClient'.format(protocol, host, port, web_root)
headers = {'X-Api-Key': apikey} headers = {'X-Api-Key': apikey}
if not apikey: if not apikey:
logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only') logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only')
release = None release = None
elif server_responding(base_url): elif server_responding(base_url):
if section == "CouchPotato": if section == 'CouchPotato':
release = get_release(base_url, imdbid, download_id) release = get_release(base_url, imdbid, download_id)
else: else:
release = None release = None
else: else:
logger.error("Server did not respond. Exiting", section) logger.error('Server did not respond. Exiting', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - {0} did not respond.".format(section), message='{0}: Failed to post-process - {0} did not respond.'.format(section),
status_code=1, status_code=1,
) )
@ -86,7 +86,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
specific_path = os.path.join(dir_name, str(input_name)) specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path) clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb": if clean_name[1] == '.nzb':
specific_path = clean_name[0] specific_path = clean_name[0]
if os.path.isdir(specific_path): if os.path.isdir(specific_path):
dir_name = specific_path dir_name = specific_path
@ -109,23 +109,23 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
good_files += 1 good_files += 1
if num_files and good_files == num_files: if num_files and good_files == num_files:
if status: if status:
logger.info("Status shown as failed from Downloader, but {0} valid video files found. Setting as success.".format(good_files), section) logger.info('Status shown as failed from Downloader, but {0} valid video files found. Setting as success.'.format(good_files), section)
status = 0 status = 0
elif num_files and good_files < num_files: elif num_files and good_files < num_files:
logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section) logger.info('Status shown as success from Downloader, but corrupt video files found. Setting as failed.', section)
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD') print('[NZB] MARK=BAD')
if failure_link: if failure_link:
failure_link += '&corrupt=true' failure_link += '&corrupt=true'
status = 1 status = 1
elif client_agent == "manual": elif client_agent == 'manual':
logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section) logger.warning('No media files found in directory {0} to manually process.'.format(dir_name), section)
return ProcessResult( return ProcessResult(
message="", message='',
status_code=0, # Success (as far as this script is concerned) status_code=0, # Success (as far as this script is concerned)
) )
else: else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section) logger.warning('No media files found in directory {0}. Processing this as a failed download'.format(dir_name), section)
status = 1 status = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD') print('[NZB] MARK=BAD')
@ -134,24 +134,24 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
if core.TRANSCODE == 1: if core.TRANSCODE == 1:
result, new_dir_name = transcoder.transcode_directory(dir_name) result, new_dir_name = transcoder.transcode_directory(dir_name)
if result == 0: if result == 0:
logger.debug("Transcoding succeeded for files in {0}".format(dir_name), section) logger.debug('Transcoding succeeded for files in {0}'.format(dir_name), section)
dir_name = new_dir_name dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8) chmod_directory = int(str(cfg.get('chmodDirectory', '0')), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section) logger.debug('Config setting \'chmodDirectory\' currently set to {0}'.format(oct(chmod_directory)), section)
if chmod_directory: if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section) logger.info('Attempting to set the octal permission of \'{0}\' on directory \'{1}\''.format(oct(chmod_directory), dir_name), section)
core.rchmod(dir_name, chmod_directory) core.rchmod(dir_name, chmod_directory)
else: else:
logger.error("Transcoding failed for files in {0}".format(dir_name), section) logger.error('Transcoding failed for files in {0}'.format(dir_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Transcoding failed".format(section), message='{0}: Failed to post-process - Transcoding failed'.format(section),
status_code=1, status_code=1,
) )
for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False): for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
if not release and ".cp(tt" not in video and imdbid: if not release and '.cp(tt' not in video and imdbid:
video_name, video_ext = os.path.splitext(video) video_name, video_ext = os.path.splitext(video)
video2 = "{0}.cp({1}){2}".format(video_name, imdbid, video_ext) video2 = '{0}.cp({1}){2}'.format(video_name, imdbid, video_ext)
if not (client_agent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'): if not (client_agent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'):
logger.debug('Renaming: {0} to: {1}'.format(video, video2)) logger.debug('Renaming: {0} to: {1}'.format(video, video2))
os.rename(video, video2) os.rename(video, video2)
@ -159,7 +159,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
if not apikey: # If only using Transcoder functions, exit here. if not apikey: # If only using Transcoder functions, exit here.
logger.info('No CouchPotato or Radarr apikey entered. Processing completed.') logger.info('No CouchPotato or Radarr apikey entered. Processing completed.')
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
@ -170,157 +170,157 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
params['media_folder'] = remote_dir(dir_name) if remote_path else dir_name params['media_folder'] = remote_dir(dir_name) if remote_path else dir_name
if section == "CouchPotato": if section == 'CouchPotato':
if method == "manage": if method == 'manage':
command = "manage.update" command = 'manage.update'
params = {} params = {}
else: else:
command = "renamer.scan" command = 'renamer.scan'
url = "{0}{1}".format(base_url, command) url = '{0}{1}'.format(base_url, command)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params), section)
logger.postprocess("Starting {0} scan for {1}".format(method, input_name), section) logger.postprocess('Starting {0} scan for {1}'.format(method, input_name), section)
if section == "Radarr": if section == 'Radarr':
payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id, 'importMode': import_mode} payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id, 'importMode': import_mode}
if not download_id: if not download_id:
payload.pop("downloadClientId") payload.pop('downloadClientId')
logger.debug("Opening URL: {0} with PARAMS: {1}".format(base_url, payload), section) logger.debug('Opening URL: {0} with PARAMS: {1}'.format(base_url, payload), section)
logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(input_name), section) logger.postprocess('Starting DownloadedMoviesScan scan for {0}'.format(input_name), section)
try: try:
if section == "CouchPotato": if section == 'CouchPotato':
r = requests.get(url, params=params, verify=False, timeout=(30, 1800)) r = requests.get(url, params=params, verify=False, timeout=(30, 1800))
else: else:
r = requests.post(base_url, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800)) r = requests.post(base_url, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL", section) logger.error('Unable to open URL', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {0}".format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1, status_code=1,
) )
result = r.json() result = r.json()
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
status_code=1, status_code=1,
) )
elif section == "CouchPotato" and result['success']: elif section == 'CouchPotato' and result['success']:
logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dir_name), section) logger.postprocess('SUCCESS: Finished {0} scan for folder {1}'.format(method, dir_name), section)
if method == "manage": if method == 'manage':
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
elif section == "Radarr": elif section == 'Radarr':
logger.postprocess("Radarr response: {0}".format(result['state'])) logger.postprocess('Radarr response: {0}'.format(result['state']))
try: try:
res = json.loads(r.content) res = json.loads(r.content)
scan_id = int(res['id']) scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section) logger.debug('Scan started with id: {0}'.format(scan_id), section)
started = True started = True
except Exception as e: except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section) logger.warning('No scan id was returned due to: {0}'.format(e), section)
scan_id = None scan_id = None
else: else:
logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dir_name), logger.error('FAILED: {0} scan was unable to finish for folder {1}. exiting!'.format(method, dir_name),
section) section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Server did not return success".format(section), message='{0}: Failed to post-process - Server did not return success'.format(section),
status_code=1, status_code=1,
) )
else: else:
core.FAILED = True core.FAILED = True
logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(input_name), section) logger.postprocess('FAILED DOWNLOAD DETECTED FOR {0}'.format(input_name), section)
if failure_link: if failure_link:
report_nzb(failure_link, client_agent) report_nzb(failure_link, client_agent)
if section == "Radarr": if section == 'Radarr':
logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section) logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(section), section)
return ProcessResult( return ProcessResult(
message="{0}: Download Failed. Sending back to {0}".format(section), message='{0}: Download Failed. Sending back to {0}'.format(section),
status_code=1, # Return as failed to flag this in the downloader. status_code=1, # Return as failed to flag this in the downloader.
) )
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name: if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
remove_dir(dir_name) remove_dir(dir_name)
if not release_id and not media_id: if not release_id and not media_id:
logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(input_name), logger.error('Could not find a downloaded movie in the database matching {0}, exiting!'.format(input_name),
section) section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Failed download not found in {0}".format(section), message='{0}: Failed to post-process - Failed download not found in {0}'.format(section),
status_code=1, status_code=1,
) )
if release_id: if release_id:
logger.postprocess("Setting failed release {0} to ignored ...".format(input_name), section) logger.postprocess('Setting failed release {0} to ignored ...'.format(input_name), section)
url = "{url}release.ignore".format(url=base_url) url = '{url}release.ignore'.format(url=base_url)
params = {'id': release_id} params = {'id': release_id}
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params), section)
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 120)) r = requests.get(url, params=params, verify=False, timeout=(30, 120))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL {0}".format(url), section) logger.error('Unable to open URL {0}'.format(url), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {1}".format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1, status_code=1,
) )
result = r.json() result = r.json()
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(
status_code=1, status_code=1,
message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
) )
elif result['success']: elif result['success']:
logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(input_name), section) logger.postprocess('SUCCESS: {0} has been set to ignored ...'.format(input_name), section)
else: else:
logger.warning("FAILED: Unable to set {0} to ignored!".format(input_name), section) logger.warning('FAILED: Unable to set {0} to ignored!'.format(input_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to set {1} to ignored".format(section, input_name), message='{0}: Failed to post-process - Unable to set {1} to ignored'.format(section, input_name),
status_code=1, status_code=1,
) )
logger.postprocess("Trying to snatch the next highest ranked release.", section) logger.postprocess('Trying to snatch the next highest ranked release.', section)
url = "{0}movie.searcher.try_next".format(base_url) url = '{0}movie.searcher.try_next'.format(base_url)
logger.debug("Opening URL: {0}".format(url), section) logger.debug('Opening URL: {0}'.format(url), section)
try: try:
r = requests.get(url, params={'media_id': media_id}, verify=False, timeout=(30, 600)) r = requests.get(url, params={'media_id': media_id}, verify=False, timeout=(30, 600))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL {0}".format(url), section) logger.error('Unable to open URL {0}'.format(url), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {0}".format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1, status_code=1,
) )
result = r.json() result = r.json()
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
status_code=1, status_code=1,
) )
elif result['success']: elif result['success']:
logger.postprocess("SUCCESS: Snatched the next highest release ...", section) logger.postprocess('SUCCESS: Snatched the next highest release ...', section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully snatched next highest release".format(section), message='{0}: Successfully snatched next highest release'.format(section),
status_code=0, status_code=0,
) )
else: else:
logger.postprocess("SUCCESS: Unable to find a new release to snatch now. CP will keep searching!", section) logger.postprocess('SUCCESS: Unable to find a new release to snatch now. CP will keep searching!', section)
return ProcessResult( return ProcessResult(
status_code=0, status_code=0,
message="{0}: No new release found now. {0} will keep searching".format(section), message='{0}: No new release found now. {0} will keep searching'.format(section),
) )
# Added a release that was not in the wanted list so confirm rename successful by finding this movie media.list. # Added a release that was not in the wanted list so confirm rename successful by finding this movie media.list.
@ -330,8 +330,8 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
# we will now check to see if CPS has finished renaming before returning to TorrentToMedia and unpausing. # we will now check to see if CPS has finished renaming before returning to TorrentToMedia and unpausing.
timeout = time.time() + 60 * wait_for timeout = time.time() + 60 * wait_for
while time.time() < timeout: # only wait 2 (default) minutes, then return. while time.time() < timeout: # only wait 2 (default) minutes, then return.
logger.postprocess("Checking for status change, please stand by ...", section) logger.postprocess('Checking for status change, please stand by ...', section)
if section == "CouchPotato": if section == 'CouchPotato':
release = get_release(base_url, imdbid, download_id, release_id) release = get_release(base_url, imdbid, download_id, release_id)
scan_id = None scan_id = None
else: else:
@ -342,50 +342,50 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
title = release[release_id]['title'] title = release[release_id]['title']
release_status_new = release[release_id]['status'] release_status_new = release[release_id]['status']
if release_status_old is None: # we didn't have a release before, but now we do. if release_status_old is None: # we didn't have a release before, but now we do.
logger.postprocess("SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]".format( logger.postprocess('SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]'.format(
title, str(release_status_new).upper()), section) title, str(release_status_new).upper()), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
if release_status_new != release_status_old: if release_status_new != release_status_old:
logger.postprocess("SUCCESS: Release for {0} has now been marked with a status of [{1}]".format( logger.postprocess('SUCCESS: Release for {0} has now been marked with a status of [{1}]'.format(
title, str(release_status_new).upper()), section) title, str(release_status_new).upper()), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
except Exception: except Exception:
pass pass
elif scan_id: elif scan_id:
url = "{0}/{1}".format(base_url, scan_id) url = '{0}/{1}'.format(base_url, scan_id)
command_status = command_complete(url, params, headers, section) command_status = command_complete(url, params, headers, section)
if command_status: if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section) logger.debug('The Scan command return status: {0}'.format(command_status), section)
if command_status in ['completed']: if command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section) logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)] return [0, '{0}: Successfully post-processed {1}'.format(section, input_name)]
elif command_status in ['failed']: elif command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section) logger.debug('The Scan command has failed. Renaming was not successful.', section)
# return ProcessResult( # return ProcessResult(
# message="{0}: Failed to post-process {1}".format(section, input_name), # message='{0}: Failed to post-process {1}'.format(section, input_name),
# status_code=1, # status_code=1,
# ) # )
if not os.path.isdir(dir_name): if not os.path.isdir(dir_name):
logger.postprocess("SUCCESS: Input Directory [{0}] has been processed and removed".format( logger.postprocess('SUCCESS: Input Directory [{0}] has been processed and removed'.format(
dir_name), section) dir_name), section)
return ProcessResult( return ProcessResult(
status_code=0, status_code=0,
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
) )
elif not list_media_files(dir_name, media=True, audio=False, meta=False, archives=True): elif not list_media_files(dir_name, media=True, audio=False, meta=False, archives=True):
logger.postprocess("SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.".format( logger.postprocess('SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.'.format(
dir_name), section) dir_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
@ -393,19 +393,19 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
time.sleep(10 * wait_for) time.sleep(10 * wait_for)
# The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now. # The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now.
if section == "Radarr" and completed_download_handling(url2, headers, section=section): if section == 'Radarr' and completed_download_handling(url2, headers, section=section):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section) logger.debug('The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.'.format(section), section)
return ProcessResult( return ProcessResult(
message="{0}: Complete DownLoad Handling is enabled. Passing back to {0}".format(section), message='{0}: Complete DownLoad Handling is enabled. Passing back to {0}'.format(section),
status_code=status, status_code=status,
) )
logger.warning( logger.warning(
"{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(input_name, wait_for), '{0} does not appear to have changed status after {1} minutes, Please check your logs.'.format(input_name, wait_for),
section, section,
) )
return ProcessResult( return ProcessResult(
status_code=1, status_code=1,
message="{0}: Failed to post-process - No change in status".format(section), message='{0}: Failed to post-process - No change in status'.format(section),
) )
@ -415,39 +415,39 @@ def get_release(base_url, imdb_id=None, download_id=None, release_id=None):
# determine cmd and params to send to CouchPotato to get our results # determine cmd and params to send to CouchPotato to get our results
section = 'movies' section = 'movies'
cmd = "media.list" cmd = 'media.list'
if release_id or imdb_id: if release_id or imdb_id:
section = 'media' section = 'media'
cmd = "media.get" cmd = 'media.get'
params['id'] = release_id or imdb_id params['id'] = release_id or imdb_id
if not (release_id or imdb_id or download_id): if not (release_id or imdb_id or download_id):
logger.debug("No information available to filter CP results") logger.debug('No information available to filter CP results')
return results return results
url = "{0}{1}".format(base_url, cmd) url = '{0}{1}'.format(base_url, cmd)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params)) logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params))
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 60)) r = requests.get(url, params=params, verify=False, timeout=(30, 60))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL {0}".format(url)) logger.error('Unable to open URL {0}'.format(url))
return results return results
try: try:
result = r.json() result = r.json()
except ValueError: except ValueError:
# ValueError catches simplejson's JSONDecodeError and json's ValueError # ValueError catches simplejson's JSONDecodeError and json's ValueError
logger.error("CouchPotato returned the following non-json data") logger.error('CouchPotato returned the following non-json data')
for line in r.iter_lines(): for line in r.iter_lines():
logger.error("{0}".format(line)) logger.error('{0}'.format(line))
return results return results
if not result['success']: if not result['success']:
if 'error' in result: if 'error' in result:
logger.error('{0}'.format(result['error'])) logger.error('{0}'.format(result['error']))
else: else:
logger.error("no media found for id {0}".format(params['id'])) logger.error('no media found for id {0}'.format(params['id']))
return results return results
# Gather release info and return it back, no need to narrow results # Gather release info and return it back, no need to narrow results
@ -489,7 +489,7 @@ def get_release(base_url, imdb_id=None, download_id=None, release_id=None):
for id1, x1 in results.items(): for id1, x1 in results.items():
for id2, x2 in results.items(): for id2, x2 in results.items():
try: try:
if x2["last_edit"] > x1["last_edit"]: if x2['last_edit'] > x1['last_edit']:
results.pop(id1) results.pop(id1)
except Exception: except Exception:
continue continue

View file

@ -15,34 +15,34 @@ from core.utils import convert_to_ascii, list_media_files, remote_dir, remove_di
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
def process(section, dir_name, input_name=None, status=0, client_agent="manual", input_category=None): def process(section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None):
status = int(status) status = int(status)
cfg = dict(core.CFG[section][input_category]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg['host']
port = cfg["port"] port = cfg['port']
apikey = cfg["apikey"] apikey = cfg['apikey']
wait_for = int(cfg["wait_for"]) wait_for = int(cfg['wait_for'])
ssl = int(cfg.get("ssl", 0)) ssl = int(cfg.get('ssl', 0))
delete_failed = int(cfg["delete_failed"]) delete_failed = int(cfg['delete_failed'])
web_root = cfg.get("web_root", "") web_root = cfg.get('web_root', '')
remote_path = int(cfg.get("remote_path", 0)) remote_path = int(cfg.get('remote_path', 0))
protocol = "https://" if ssl else "http://" protocol = 'https://' if ssl else 'http://'
status = int(status) status = int(status)
if status > 0 and core.NOEXTRACTFAILED: if status > 0 and core.NOEXTRACTFAILED:
extract = 0 extract = 0
else: else:
extract = int(cfg.get("extract", 0)) extract = int(cfg.get('extract', 0))
if section == "Lidarr": if section == 'Lidarr':
url = "{0}{1}:{2}{3}/api/v1".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/api/v1'.format(protocol, host, port, web_root)
else: else:
url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/api'.format(protocol, host, port, web_root)
if not server_responding(url): if not server_responding(url):
logger.error("Server did not respond. Exiting", section) logger.error('Server did not respond. Exiting', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - {0} did not respond.".format(section), message='{0}: Failed to post-process - {0} did not respond.'.format(section),
status_code=1, status_code=1,
) )
@ -51,7 +51,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
specific_path = os.path.join(dir_name, str(input_name)) specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path) clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb": if clean_name[1] == '.nzb':
specific_path = clean_name[0] specific_path = clean_name[0]
if os.path.isdir(specific_path): if os.path.isdir(specific_path):
dir_name = specific_path dir_name = specific_path
@ -65,14 +65,14 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
input_name, dir_name = convert_to_ascii(input_name, dir_name) input_name, dir_name = convert_to_ascii(input_name, dir_name)
# if listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and status: # if listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and status:
# logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section) # logger.info('Status shown as failed from Downloader, but valid video files found. Setting as successful.', section)
# status = 0 # status = 0
if status == 0 and section == "HeadPhones": if status == 0 and section == 'HeadPhones':
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': "forceProcess", 'cmd': 'forceProcess',
'dir': remote_dir(dir_name) if remote_path else dir_name 'dir': remote_dir(dir_name) if remote_path else dir_name
} }
@ -82,7 +82,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': "forceProcess", 'cmd': 'forceProcess',
'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0] 'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0]
} }
@ -91,29 +91,29 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
return res return res
# The status hasn't changed. uTorrent can resume seeding now. # The status hasn't changed. uTorrent can resume seeding now.
logger.warning("The music album does not appear to have changed status after {0} minutes. Please check your Logs".format(wait_for), section) logger.warning('The music album does not appear to have changed status after {0} minutes. Please check your Logs'.format(wait_for), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - No change in wanted status".format(section), message='{0}: Failed to post-process - No change in wanted status'.format(section),
status_code=1, status_code=1,
) )
elif status == 0 and section == "Lidarr": elif status == 0 and section == 'Lidarr':
url = "{0}{1}:{2}{3}/api/v1/command".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/api/v1/command'.format(protocol, host, port, web_root)
headers = {"X-Api-Key": apikey} headers = {'X-Api-Key': apikey}
if remote_path: if remote_path:
logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section) logger.debug('remote_path: {0}'.format(remote_dir(dir_name)), section)
data = {"name": "Rename", "path": remote_dir(dir_name)} data = {'name': 'Rename', 'path': remote_dir(dir_name)}
else: else:
logger.debug("path: {0}".format(dir_name), section) logger.debug('path: {0}'.format(dir_name), section)
data = {"name": "Rename", "path": dir_name} data = {'name': 'Rename', 'path': dir_name}
data = json.dumps(data) data = json.dumps(data)
try: try:
logger.debug("Opening URL: {0} with data: {1}".format(url, data), section) logger.debug('Opening URL: {0} with data: {1}'.format(url, data), section)
r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800)) r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL: {0}".format(url), section) logger.error('Unable to open URL: {0}'.format(url), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {0}".format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1, status_code=1,
) )
@ -123,20 +123,20 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
try: try:
res = json.loads(r.content) res = json.loads(r.content)
scan_id = int(res['id']) scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section) logger.debug('Scan started with id: {0}'.format(scan_id), section)
started = True started = True
except Exception as e: except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section) logger.warning('No scan id was returned due to: {0}'.format(e), section)
scan_id = None scan_id = None
started = False started = False
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to start scan".format(section), message='{0}: Failed to post-process - Unable to start scan'.format(section),
status_code=1, status_code=1,
) )
n = 0 n = 0
params = {} params = {}
url = "{0}/{1}".format(url, scan_id) url = '{0}/{1}'.format(url, scan_id)
while n < 6: # set up wait_for minutes to see if command completes.. while n < 6: # set up wait_for minutes to see if command completes..
time.sleep(10 * wait_for) time.sleep(10 * wait_for)
command_status = command_complete(url, params, headers, section) command_status = command_complete(url, params, headers, section)
@ -144,64 +144,64 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual",
break break
n += 1 n += 1
if command_status: if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section) logger.debug('The Scan command return status: {0}'.format(command_status), section)
if not os.path.exists(dir_name): if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section) logger.debug('The directory {0} has been removed. Renaming was successful.'.format(dir_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
elif command_status and command_status in ['completed']: elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section) logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
elif command_status and command_status in ['failed']: elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section) logger.debug('The Scan command has failed. Renaming was not successful.', section)
# return ProcessResult( # return ProcessResult(
# message="{0}: Failed to post-process {1}".format(section, input_name), # message='{0}: Failed to post-process {1}'.format(section, input_name),
# status_code=1, # status_code=1,
# ) # )
else: else:
logger.debug("The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.".format(section), section) logger.debug('The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.'.format(section), section)
return ProcessResult( return ProcessResult(
message="{0}: Passing back to {0} to attempt Complete Download Handling".format(section), message='{0}: Passing back to {0} to attempt Complete Download Handling'.format(section),
status_code=status, status_code=status,
) )
else: else:
if section == "Lidarr": if section == 'Lidarr':
logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section) logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(section), section)
return ProcessResult( return ProcessResult(
message="{0}: Download Failed. Sending back to {0}".format(section), message='{0}: Download Failed. Sending back to {0}'.format(section),
status_code=1, # Return as failed to flag this in the downloader. status_code=1, # Return as failed to flag this in the downloader.
) )
else: else:
logger.warning("FAILED DOWNLOAD DETECTED", section) logger.warning('FAILED DOWNLOAD DETECTED', section)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name: if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
remove_dir(dir_name) remove_dir(dir_name)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process. {0} does not support failed downloads".format(section), message='{0}: Failed to post-process. {0} does not support failed downloads'.format(section),
status_code=1, # Return as failed to flag this in the downloader. status_code=1, # Return as failed to flag this in the downloader.
) )
def get_status(url, apikey, dir_name): def get_status(url, apikey, dir_name):
logger.debug("Attempting to get current status for release:{0}".format(os.path.basename(dir_name))) logger.debug('Attempting to get current status for release:{0}'.format(os.path.basename(dir_name)))
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': "getHistory" 'cmd': 'getHistory'
} }
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params)) logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params))
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 120)) r = requests.get(url, params=params, verify=False, timeout=(30, 120))
except requests.RequestException: except requests.RequestException:
logger.error("Unable to open URL") logger.error('Unable to open URL')
return None return None
try: try:
@ -212,39 +212,39 @@ def get_status(url, apikey, dir_name):
for album in result: for album in result:
if os.path.basename(dir_name) == album['FolderName']: if os.path.basename(dir_name) == album['FolderName']:
return album["Status"].lower() return album['Status'].lower()
def force_process(params, url, apikey, input_name, dir_name, section, wait_for): def force_process(params, url, apikey, input_name, dir_name, section, wait_for):
release_status = get_status(url, apikey, dir_name) release_status = get_status(url, apikey, dir_name)
if not release_status: if not release_status:
logger.error("Could not find a status for {0}, is it in the wanted list ?".format(input_name), section) logger.error('Could not find a status for {0}, is it in the wanted list ?'.format(input_name), section)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params), section)
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 300)) r = requests.get(url, params=params, verify=False, timeout=(30, 300))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL {0}".format(url), section) logger.error('Unable to open URL {0}'.format(url), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {0}".format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1, status_code=1,
) )
logger.debug("Result: {0}".format(r.text), section) logger.debug('Result: {0}'.format(r.text), section)
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
status_code=1, status_code=1,
) )
elif r.text == "OK": elif r.text == 'OK':
logger.postprocess("SUCCESS: Post-Processing started for {0} in folder {1} ...".format(input_name, dir_name), section) logger.postprocess('SUCCESS: Post-Processing started for {0} in folder {1} ...'.format(input_name, dir_name), section)
else: else:
logger.error("FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!".format(input_name, dir_name), section) logger.error('FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!'.format(input_name, dir_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section),
status_code=1, status_code=1,
) )
@ -253,20 +253,20 @@ def force_process(params, url, apikey, input_name, dir_name, section, wait_for):
while time.time() < timeout: while time.time() < timeout:
current_status = get_status(url, apikey, dir_name) current_status = get_status(url, apikey, dir_name)
if current_status is not None and current_status != release_status: # Something has changed. CPS must have processed this movie. if current_status is not None and current_status != release_status: # Something has changed. CPS must have processed this movie.
logger.postprocess("SUCCESS: This release is now marked as status [{0}]".format(current_status), section) logger.postprocess('SUCCESS: This release is now marked as status [{0}]'.format(current_status), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
if not os.path.isdir(dir_name): if not os.path.isdir(dir_name):
logger.postprocess("SUCCESS: The input directory {0} has been removed Processing must have finished.".format(dir_name), section) logger.postprocess('SUCCESS: The input directory {0} has been removed Processing must have finished.'.format(dir_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
time.sleep(10 * wait_for) time.sleep(10 * wait_for)
# The status hasn't changed. # The status hasn't changed.
return ProcessResult( return ProcessResult(
message="no change", message='no change',
status_code=2, status_code=2,
) )

View file

@ -18,56 +18,56 @@ from core.utils import convert_to_ascii, flatten, import_subs, list_media_files,
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
def process(section, dir_name, input_name=None, failed=False, client_agent="manual", download_id=None, input_category=None, failure_link=None): def process(section, dir_name, input_name=None, failed=False, client_agent='manual', download_id=None, input_category=None, failure_link=None):
cfg = dict(core.CFG[section][input_category]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg['host']
port = cfg["port"] port = cfg['port']
ssl = int(cfg.get("ssl", 0)) ssl = int(cfg.get('ssl', 0))
web_root = cfg.get("web_root", "") web_root = cfg.get('web_root', '')
protocol = "https://" if ssl else "http://" protocol = 'https://' if ssl else 'http://'
username = cfg.get("username", "") username = cfg.get('username', '')
password = cfg.get("password", "") password = cfg.get('password', '')
apikey = cfg.get("apikey", "") apikey = cfg.get('apikey', '')
if server_responding("{0}{1}:{2}{3}".format(protocol, host, port, web_root)): if server_responding('{0}{1}:{2}{3}'.format(protocol, host, port, web_root)):
# auto-detect correct fork # auto-detect correct fork
fork, fork_params = auto_fork(section, input_category) fork, fork_params = auto_fork(section, input_category)
elif not username and not apikey: elif not username and not apikey:
logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only') logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only')
fork, fork_params = "None", {} fork, fork_params = 'None', {}
else: else:
logger.error("Server did not respond. Exiting", section) logger.error('Server did not respond. Exiting', section)
return ProcessResult( return ProcessResult(
status_code=1, status_code=1,
message="{0}: Failed to post-process - {0} did not respond.".format(section), message='{0}: Failed to post-process - {0} did not respond.'.format(section),
) )
delete_failed = int(cfg.get("delete_failed", 0)) delete_failed = int(cfg.get('delete_failed', 0))
nzb_extraction_by = cfg.get("nzbExtractionBy", "Downloader") nzb_extraction_by = cfg.get('nzbExtractionBy', 'Downloader')
process_method = cfg.get("process_method") process_method = cfg.get('process_method')
if client_agent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym": if client_agent == core.TORRENT_CLIENTAGENT and core.USELINK == 'move-sym':
process_method = "symlink" process_method = 'symlink'
remote_path = int(cfg.get("remote_path", 0)) remote_path = int(cfg.get('remote_path', 0))
wait_for = int(cfg.get("wait_for", 2)) wait_for = int(cfg.get('wait_for', 2))
force = int(cfg.get("force", 0)) force = int(cfg.get('force', 0))
delete_on = int(cfg.get("delete_on", 0)) delete_on = int(cfg.get('delete_on', 0))
ignore_subs = int(cfg.get("ignore_subs", 0)) ignore_subs = int(cfg.get('ignore_subs', 0))
status = int(failed) status = int(failed)
if status > 0 and core.NOEXTRACTFAILED: if status > 0 and core.NOEXTRACTFAILED:
extract = 0 extract = 0
else: else:
extract = int(cfg.get("extract", 0)) extract = int(cfg.get('extract', 0))
# get importmode, default to "Move" for consistency with legacy # get importmode, default to 'Move' for consistency with legacy
import_mode = cfg.get("importMode", "Move") import_mode = cfg.get('importMode', 'Move')
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name. if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dir_name = os.path.split(os.path.normpath(dir_name))[0] dir_name = os.path.split(os.path.normpath(dir_name))[0]
specific_path = os.path.join(dir_name, str(input_name)) specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path) clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb": if clean_name[1] == '.nzb':
specific_path = clean_name[0] specific_path = clean_name[0]
if os.path.isdir(specific_path): if os.path.isdir(specific_path):
dir_name = specific_path dir_name = specific_path
@ -82,7 +82,7 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
raise raise
if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != "Destination"): if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != 'Destination'):
if input_name: if input_name:
process_all_exceptions(input_name, dir_name) process_all_exceptions(input_name, dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name) input_name, dir_name = convert_to_ascii(input_name, dir_name)
@ -118,24 +118,24 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu
print('[NZB] MARK=BAD') print('[NZB] MARK=BAD')
if failure_link: if failure_link:
failure_link += '&corrupt=true' failure_link += '&corrupt=true'
elif client_agent == "manual": elif client_agent == 'manual':
logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section) logger.warning('No media files found in directory {0} to manually process.'.format(dir_name), section)
return ProcessResult( return ProcessResult(
message="", message='',
status_code=0, # Success (as far as this script is concerned) status_code=0, # Success (as far as this script is concerned)
) )
elif nzb_extraction_by == "Destination": elif nzb_extraction_by == 'Destination':
logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.") logger.info('Check for media files ignored because nzbExtractionBy is set to Destination.')
if int(failed) == 0: if int(failed) == 0:
logger.info("Setting Status Success.") logger.info('Setting Status Success.')
status = 0 status = 0
failed = 0 failed = 0
else: else:
logger.info("Downloader reported an error during download or verification. Processing this as a failed download.") logger.info('Downloader reported an error during download or verification. Processing this as a failed download.')
status = 1 status = 1
failed = 1 failed = 1
else: else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section) logger.warning('No media files found in directory {0}. Processing this as a failed download'.format(dir_name), section)
status = 1 status = 1
failed = 1 failed = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
@ -144,18 +144,18 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu
if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads
result, new_dir_name = transcoder.transcode_directory(dir_name) result, new_dir_name = transcoder.transcode_directory(dir_name)
if result == 0: if result == 0:
logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dir_name), section) logger.debug('SUCCESS: Transcoding succeeded for files in {0}'.format(dir_name), section)
dir_name = new_dir_name dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8) chmod_directory = int(str(cfg.get('chmodDirectory', '0')), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section) logger.debug('Config setting \'chmodDirectory\' currently set to {0}'.format(oct(chmod_directory)), section)
if chmod_directory: if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section) logger.info('Attempting to set the octal permission of \'{0}\' on directory \'{1}\''.format(oct(chmod_directory), dir_name), section)
core.rchmod(dir_name, chmod_directory) core.rchmod(dir_name, chmod_directory)
else: else:
logger.error("FAILED: Transcoding failed for files in {0}".format(dir_name), section) logger.error('FAILED: Transcoding failed for files in {0}'.format(dir_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Transcoding failed".format(section), message='{0}: Failed to post-process - Transcoding failed'.format(section),
status_code=1, status_code=1,
) )
@ -166,140 +166,140 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu
fork_params['nzbName'] = input_name fork_params['nzbName'] = input_name
for param in copy.copy(fork_params): for param in copy.copy(fork_params):
if param == "failed": if param == 'failed':
fork_params[param] = failed fork_params[param] = failed
del fork_params['proc_type'] del fork_params['proc_type']
if "type" in fork_params: if 'type' in fork_params:
del fork_params['type'] del fork_params['type']
if param == "return_data": if param == 'return_data':
fork_params[param] = 0 fork_params[param] = 0
del fork_params['quiet'] del fork_params['quiet']
if param == "type": if param == 'type':
fork_params[param] = 'manual' fork_params[param] = 'manual'
if "proc_type" in fork_params: if 'proc_type' in fork_params:
del fork_params['proc_type'] del fork_params['proc_type']
if param in ["dir_name", "dir", "proc_dir", "process_directory", "path"]: if param in ['dir_name', 'dir', 'proc_dir', 'process_directory', 'path']:
fork_params[param] = dir_name fork_params[param] = dir_name
if remote_path: if remote_path:
fork_params[param] = remote_dir(dir_name) fork_params[param] = remote_dir(dir_name)
if param == "process_method": if param == 'process_method':
if process_method: if process_method:
fork_params[param] = process_method fork_params[param] = process_method
else: else:
del fork_params[param] del fork_params[param]
if param in ["force", "force_replace"]: if param in ['force', 'force_replace']:
if force: if force:
fork_params[param] = force fork_params[param] = force
else: else:
del fork_params[param] del fork_params[param]
if param in ["delete_on", "delete"]: if param in ['delete_on', 'delete']:
if delete_on: if delete_on:
fork_params[param] = delete_on fork_params[param] = delete_on
else: else:
del fork_params[param] del fork_params[param]
if param == "ignore_subs": if param == 'ignore_subs':
if ignore_subs: if ignore_subs:
fork_params[param] = ignore_subs fork_params[param] = ignore_subs
else: else:
del fork_params[param] del fork_params[param]
if param == "force_next": if param == 'force_next':
fork_params[param] = 1 fork_params[param] = 1
# delete any unused params so we don't pass them to SB by mistake # delete any unused params so we don't pass them to SB by mistake
[fork_params.pop(k) for k, v in fork_params.items() if v is None] [fork_params.pop(k) for k, v in fork_params.items() if v is None]
if status == 0: if status == 0:
if section == "NzbDrone" and not apikey: if section == 'NzbDrone' and not apikey:
logger.info('No Sonarr apikey entered. Processing completed.') logger.info('No Sonarr apikey entered. Processing completed.')
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section) logger.postprocess('SUCCESS: The download succeeded, sending a post-process request', section)
else: else:
core.FAILED = True core.FAILED = True
if failure_link: if failure_link:
report_nzb(failure_link, client_agent) report_nzb(failure_link, client_agent)
if 'failed' in fork_params: if 'failed' in fork_params:
logger.postprocess("FAILED: The download failed. Sending 'failed' process request to {0} branch".format(fork), section) logger.postprocess('FAILED: The download failed. Sending \'failed\' process request to {0} branch'.format(fork), section)
elif section == "NzbDrone": elif section == 'NzbDrone':
logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(fork), section) logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(fork), section)
return ProcessResult( return ProcessResult(
message="{0}: Download Failed. Sending back to {0}".format(section), message='{0}: Download Failed. Sending back to {0}'.format(section),
status_code=1, # Return as failed to flag this in the downloader. status_code=1, # Return as failed to flag this in the downloader.
) )
else: else:
logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section) logger.postprocess('FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process'.format(fork), section)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name: if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
remove_dir(dir_name) remove_dir(dir_name)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process. {0} does not support failed downloads".format(section), message='{0}: Failed to post-process. {0} does not support failed downloads'.format(section),
status_code=1, # Return as failed to flag this in the downloader. status_code=1, # Return as failed to flag this in the downloader.
) )
url = None url = None
if section == "SickBeard": if section == 'SickBeard':
if apikey: if apikey:
url = "{0}{1}:{2}{3}/api/{4}/?cmd=postprocess".format(protocol, host, port, web_root, apikey) url = '{0}{1}:{2}{3}/api/{4}/?cmd=postprocess'.format(protocol, host, port, web_root, apikey)
else: else:
url = "{0}{1}:{2}{3}/home/postprocess/processEpisode".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/home/postprocess/processEpisode'.format(protocol, host, port, web_root)
elif section == "NzbDrone": elif section == 'NzbDrone':
url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/api/command'.format(protocol, host, port, web_root)
url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root) url2 = '{0}{1}:{2}{3}/api/config/downloadClient'.format(protocol, host, port, web_root)
headers = {"X-Api-Key": apikey} headers = {'X-Api-Key': apikey}
# params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'} # params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
if remote_path: if remote_path:
logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section) logger.debug('remote_path: {0}'.format(remote_dir(dir_name)), section)
data = {"name": "DownloadedEpisodesScan", "path": remote_dir(dir_name), "downloadClientId": download_id, "importMode": import_mode} data = {'name': 'DownloadedEpisodesScan', 'path': remote_dir(dir_name), 'downloadClientId': download_id, 'importMode': import_mode}
else: else:
logger.debug("path: {0}".format(dir_name), section) logger.debug('path: {0}'.format(dir_name), section)
data = {"name": "DownloadedEpisodesScan", "path": dir_name, "downloadClientId": download_id, "importMode": import_mode} data = {'name': 'DownloadedEpisodesScan', 'path': dir_name, 'downloadClientId': download_id, 'importMode': import_mode}
if not download_id: if not download_id:
data.pop("downloadClientId") data.pop('downloadClientId')
data = json.dumps(data) data = json.dumps(data)
try: try:
if section == "SickBeard": if section == 'SickBeard':
logger.debug("Opening URL: {0} with params: {1}".format(url, fork_params), section) logger.debug('Opening URL: {0} with params: {1}'.format(url, fork_params), section)
s = requests.Session() s = requests.Session()
if not apikey and username and password: if not apikey and username and password:
login = "{0}{1}:{2}{3}/login".format(protocol, host, port, web_root) login = '{0}{1}:{2}{3}/login'.format(protocol, host, port, web_root)
login_params = {'username': username, 'password': password} login_params = {'username': username, 'password': password}
r = s.get(login, verify=False, timeout=(30, 60)) r = s.get(login, verify=False, timeout=(30, 60))
if r.status_code == 401 and r.cookies.get('_xsrf'): if r.status_code == 401 and r.cookies.get('_xsrf'):
login_params['_xsrf'] = r.cookies.get('_xsrf') login_params['_xsrf'] = r.cookies.get('_xsrf')
s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60)) s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800)) r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800))
elif section == "NzbDrone": elif section == 'NzbDrone':
logger.debug("Opening URL: {0} with data: {1}".format(url, data), section) logger.debug('Opening URL: {0} with data: {1}'.format(url, data), section)
r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800)) r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL: {0}".format(url), section) logger.error('Unable to open URL: {0}'.format(url), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Unable to connect to {0}".format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1, status_code=1,
) )
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error("Server returned status {0}".format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
status_code=1, status_code=1,
) )
success = False success = False
queued = False queued = False
started = False started = False
if section == "SickBeard": if section == 'SickBeard':
if apikey: if apikey:
if r.json()['result'] == 'success': if r.json()['result'] == 'success':
success = True success = True
@ -307,40 +307,40 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu
for line in r.iter_lines(): for line in r.iter_lines():
if line: if line:
line = line.decode('utf-8') line = line.decode('utf-8')
logger.postprocess("{0}".format(line), section) logger.postprocess('{0}'.format(line), section)
if "Moving file from" in line: if 'Moving file from' in line:
input_name = os.path.split(line)[1] input_name = os.path.split(line)[1]
if "added to the queue" in line: if 'added to the queue' in line:
queued = True queued = True
if "Processing succeeded" in line or "Successfully processed" in line: if 'Processing succeeded' in line or 'Successfully processed' in line:
success = True success = True
if queued: if queued:
time.sleep(60) time.sleep(60)
elif section == "NzbDrone": elif section == 'NzbDrone':
try: try:
res = json.loads(r.content) res = json.loads(r.content)
scan_id = int(res['id']) scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section) logger.debug('Scan started with id: {0}'.format(scan_id), section)
started = True started = True
except Exception as e: except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section) logger.warning('No scan id was returned due to: {0}'.format(e), section)
scan_id = None scan_id = None
started = False started = False
if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name: if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
remove_dir(dir_name) remove_dir(dir_name)
if success: if success:
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
elif section == "NzbDrone" and started: elif section == 'NzbDrone' and started:
n = 0 n = 0
params = {} params = {}
url = "{0}/{1}".format(url, scan_id) url = '{0}/{1}'.format(url, scan_id)
while n < 6: # set up wait_for minutes to see if command completes.. while n < 6: # set up wait_for minutes to see if command completes..
time.sleep(10 * wait_for) time.sleep(10 * wait_for)
command_status = command_complete(url, params, headers, section) command_status = command_complete(url, params, headers, section)
@ -348,39 +348,39 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu
break break
n += 1 n += 1
if command_status: if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section) logger.debug('The Scan command return status: {0}'.format(command_status), section)
if not os.path.exists(dir_name): if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section) logger.debug('The directory {0} has been removed. Renaming was successful.'.format(dir_name), section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
elif command_status and command_status in ['completed']: elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section) logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
return ProcessResult( return ProcessResult(
message="{0}: Successfully post-processed {1}".format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
elif command_status and command_status in ['failed']: elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section) logger.debug('The Scan command has failed. Renaming was not successful.', section)
# return ProcessResult( # return ProcessResult(
# message="{0}: Failed to post-process {1}".format(section, input_name), # message='{0}: Failed to post-process {1}'.format(section, input_name),
# status_code=1, # status_code=1,
# ) # )
if completed_download_handling(url2, headers, section=section): if completed_download_handling(url2, headers, section=section):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section) logger.debug('The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.'.format(section), section)
return ProcessResult( return ProcessResult(
message="{0}: Complete DownLoad Handling is enabled. Passing back to {0}".format(section), message='{0}: Complete DownLoad Handling is enabled. Passing back to {0}'.format(section),
status_code=status, status_code=status,
) )
else: else:
logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section) logger.warning('The Scan command did not return a valid status. Renaming was not successful.', section)
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process {1}".format(section, input_name), message='{0}: Failed to post-process {1}'.format(section, input_name),
status_code=1, status_code=1,
) )
else: else:
return ProcessResult( return ProcessResult(
message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section),
status_code=1, # We did not receive Success confirmation. status_code=1, # We did not receive Success confirmation.
) )

View file

@ -120,7 +120,7 @@ class ConfigObj(configobj.ConfigObj, Section):
shutil.copyfile(core.CONFIG_SPEC_FILE, core.CONFIG_FILE) shutil.copyfile(core.CONFIG_SPEC_FILE, core.CONFIG_FILE)
CFG_OLD = config(core.CONFIG_FILE) CFG_OLD = config(core.CONFIG_FILE)
except Exception as error: except Exception as error:
logger.debug("Error {msg} when copying to .cfg".format(msg=error)) logger.debug('Error {msg} when copying to .cfg'.format(msg=error))
try: try:
# check for autoProcessMedia.cfg.spec and create if it does not exist # check for autoProcessMedia.cfg.spec and create if it does not exist
@ -128,7 +128,7 @@ class ConfigObj(configobj.ConfigObj, Section):
shutil.copyfile(core.CONFIG_FILE, core.CONFIG_SPEC_FILE) shutil.copyfile(core.CONFIG_FILE, core.CONFIG_SPEC_FILE)
CFG_NEW = config(core.CONFIG_SPEC_FILE) CFG_NEW = config(core.CONFIG_SPEC_FILE)
except Exception as error: except Exception as error:
logger.debug("Error {msg} when copying to .spec".format(msg=error)) logger.debug('Error {msg} when copying to .spec'.format(msg=error))
# check for autoProcessMedia.cfg and autoProcessMedia.cfg.spec and if they don't exist return and fail # check for autoProcessMedia.cfg and autoProcessMedia.cfg.spec and if they don't exist return and fail
if CFG_NEW is None or CFG_OLD is None: if CFG_NEW is None or CFG_OLD is None:
@ -143,7 +143,7 @@ class ConfigObj(configobj.ConfigObj, Section):
if CFG_OLD[section].sections: if CFG_OLD[section].sections:
subsections.update({section: CFG_OLD[section].sections}) subsections.update({section: CFG_OLD[section].sections})
for option, value in CFG_OLD[section].items(): for option, value in CFG_OLD[section].items():
if option in ["category", "cpsCategory", "sbCategory", "hpCategory", "mlCategory", "gzCategory", "raCategory", "ndCategory"]: if option in ['category', 'cpsCategory', 'sbCategory', 'hpCategory', 'mlCategory', 'gzCategory', 'raCategory', 'ndCategory']:
if not isinstance(value, list): if not isinstance(value, list):
value = [value] value = [value]
@ -161,34 +161,34 @@ class ConfigObj(configobj.ConfigObj, Section):
if section in ['CouchPotato', 'HeadPhones', 'Gamez', 'Mylar']: if section in ['CouchPotato', 'HeadPhones', 'Gamez', 'Mylar']:
if option in ['username', 'password']: if option in ['username', 'password']:
values.pop(option) values.pop(option)
if section in ["SickBeard", "Mylar"]: if section in ['SickBeard', 'Mylar']:
if option == "wait_for": # remove old format if option == 'wait_for': # remove old format
values.pop(option) values.pop(option)
if section in ["SickBeard", "NzbDrone"]: if section in ['SickBeard', 'NzbDrone']:
if option == "failed_fork": # change this old format if option == 'failed_fork': # change this old format
values['failed'] = 'auto' values['failed'] = 'auto'
values.pop(option) values.pop(option)
if option == "outputDirectory": # move this to new location format if option == 'outputDirectory': # move this to new location format
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0] CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
values.pop(option) values.pop(option)
if section in ["Torrent"]: if section in ['Torrent']:
if option in ["compressedExtensions", "mediaExtensions", "metaExtensions", "minSampleSize"]: if option in ['compressedExtensions', 'mediaExtensions', 'metaExtensions', 'minSampleSize']:
CFG_NEW['Extensions'][option] = value CFG_NEW['Extensions'][option] = value
values.pop(option) values.pop(option)
if option == "useLink": # Sym links supported now as well. if option == 'useLink': # Sym links supported now as well.
if value in ['1', 1]: if value in ['1', 1]:
value = 'hard' value = 'hard'
elif value in ['0', 0]: elif value in ['0', 0]:
value = 'no' value = 'no'
values[option] = value values[option] = value
if option == "forceClean": if option == 'forceClean':
CFG_NEW['General']['force_clean'] = value CFG_NEW['General']['force_clean'] = value
values.pop(option) values.pop(option)
if section in ["Transcoder"]: if section in ['Transcoder']:
if option in ["niceness"]: if option in ['niceness']:
CFG_NEW['Posix'][option] = value CFG_NEW['Posix'][option] = value
values.pop(option) values.pop(option)
if option == "remote_path": if option == 'remote_path':
if value and value not in ['0', '1', 0, 1]: if value and value not in ['0', '1', 0, 1]:
value = 1 value = 1
elif not value: elif not value:
@ -239,7 +239,7 @@ class ConfigObj(configobj.ConfigObj, Section):
process_section(section, subsection) process_section(section, subsection)
# create a backup of our old config # create a backup of our old config
CFG_OLD.filename = "{config}.old".format(config=core.CONFIG_FILE) CFG_OLD.filename = '{config}.old'.format(config=core.CONFIG_FILE)
CFG_OLD.write() CFG_OLD.write()
# write our new config to autoProcessMedia.cfg # write our new config to autoProcessMedia.cfg
@ -256,27 +256,27 @@ class ConfigObj(configobj.ConfigObj, Section):
try: try:
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ: if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']: if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']:
logger.warning("{x} category is set for SickBeard and Sonarr. " logger.warning('{x} category is set for SickBeard and Sonarr. '
"Please check your config in NZBGet".format 'Please check your config in NZBGet'.format
(x=os.environ['NZBPO_NDCATEGORY'])) (x=os.environ['NZBPO_NDCATEGORY']))
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ: if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_CPSCATEGORY']: if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
logger.warning("{x} category is set for CouchPotato and Radarr. " logger.warning('{x} category is set for CouchPotato and Radarr. '
"Please check your config in NZBGet".format 'Please check your config in NZBGet'.format
(x=os.environ['NZBPO_RACATEGORY'])) (x=os.environ['NZBPO_RACATEGORY']))
if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ: if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ:
if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']: if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']:
logger.warning("{x} category is set for HeadPhones and Lidarr. " logger.warning('{x} category is set for HeadPhones and Lidarr. '
"Please check your config in NZBGet".format 'Please check your config in NZBGet'.format
(x=os.environ['NZBPO_LICATEGORY'])) (x=os.environ['NZBPO_LICATEGORY']))
section = "Nzb" section = 'Nzb'
key = 'NZBOP_DESTDIR' key = 'NZBOP_DESTDIR'
if key in os.environ: if key in os.environ:
option = 'default_downloadDirectory' option = 'default_downloadDirectory'
value = os.environ[key] value = os.environ[key]
cfg_new[section][option] = value cfg_new[section][option] = value
section = "General" section = 'General'
env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED'] env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
cfg_keys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed'] cfg_keys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed']
for index in range(len(env_keys)): for index in range(len(env_keys)):
@ -286,7 +286,7 @@ class ConfigObj(configobj.ConfigObj, Section):
value = os.environ[key] value = os.environ[key]
cfg_new[section][option] = value cfg_new[section][option] = value
section = "Network" section = 'Network'
env_keys = ['MOUNTPOINTS'] env_keys = ['MOUNTPOINTS']
cfg_keys = ['mount_points'] cfg_keys = ['mount_points']
for index in range(len(env_keys)): for index in range(len(env_keys)):
@ -296,7 +296,7 @@ class ConfigObj(configobj.ConfigObj, Section):
value = os.environ[key] value = os.environ[key]
cfg_new[section][option] = value cfg_new[section][option] = value
section = "CouchPotato" section = 'CouchPotato'
env_cat_key = 'NZBPO_CPSCATEGORY' env_cat_key = 'NZBPO_CPSCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY'] 'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
@ -315,7 +315,7 @@ class ConfigObj(configobj.ConfigObj, Section):
if os.environ[env_cat_key] in cfg_new['Radarr'].sections: if os.environ[env_cat_key] in cfg_new['Radarr'].sections:
cfg_new['Radarr'][env_cat_key]['enabled'] = 0 cfg_new['Radarr'][env_cat_key]['enabled'] = 0
section = "SickBeard" section = 'SickBeard'
env_cat_key = 'NZBPO_SBCATEGORY' env_cat_key = 'NZBPO_SBCATEGORY'
env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD'] 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
@ -334,7 +334,7 @@ class ConfigObj(configobj.ConfigObj, Section):
if os.environ[env_cat_key] in cfg_new['NzbDrone'].sections: if os.environ[env_cat_key] in cfg_new['NzbDrone'].sections:
cfg_new['NzbDrone'][env_cat_key]['enabled'] = 0 cfg_new['NzbDrone'][env_cat_key]['enabled'] = 0
section = "HeadPhones" section = 'HeadPhones'
env_cat_key = 'NZBPO_HPCATEGORY' env_cat_key = 'NZBPO_HPCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED'] env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed'] cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
@ -351,7 +351,7 @@ class ConfigObj(configobj.ConfigObj, Section):
if os.environ[env_cat_key] in cfg_new['Lidarr'].sections: if os.environ[env_cat_key] in cfg_new['Lidarr'].sections:
cfg_new['Lidarr'][env_cat_key]['enabled'] = 0 cfg_new['Lidarr'][env_cat_key]['enabled'] = 0
section = "Mylar" section = 'Mylar'
env_cat_key = 'NZBPO_MYCATEGORY' env_cat_key = 'NZBPO_MYCATEGORY'
env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR', env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
'REMOTE_PATH'] 'REMOTE_PATH']
@ -368,7 +368,7 @@ class ConfigObj(configobj.ConfigObj, Section):
cfg_new[section][os.environ[env_cat_key]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "Gamez" section = 'Gamez'
env_cat_key = 'NZBPO_GZCATEGORY' env_cat_key = 'NZBPO_GZCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH'] env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path'] cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
@ -383,7 +383,7 @@ class ConfigObj(configobj.ConfigObj, Section):
cfg_new[section][os.environ[env_cat_key]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "NzbDrone" section = 'NzbDrone'
env_cat_key = 'NZBPO_NDCATEGORY' env_cat_key = 'NZBPO_NDCATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE'] 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE']
@ -403,7 +403,7 @@ class ConfigObj(configobj.ConfigObj, Section):
if os.environ[env_cat_key] in cfg_new['SickBeard'].sections: if os.environ[env_cat_key] in cfg_new['SickBeard'].sections:
cfg_new['SickBeard'][env_cat_key]['enabled'] = 0 cfg_new['SickBeard'][env_cat_key]['enabled'] = 0
section = "Radarr" section = 'Radarr'
env_cat_key = 'NZBPO_RACATEGORY' env_cat_key = 'NZBPO_RACATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE'] 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE']
@ -423,7 +423,7 @@ class ConfigObj(configobj.ConfigObj, Section):
if os.environ[env_cat_key] in cfg_new['CouchPotato'].sections: if os.environ[env_cat_key] in cfg_new['CouchPotato'].sections:
cfg_new['CouchPotato'][env_cat_key]['enabled'] = 0 cfg_new['CouchPotato'][env_cat_key]['enabled'] = 0
section = "Lidarr" section = 'Lidarr'
env_cat_key = 'NZBPO_LICATEGORY' env_cat_key = 'NZBPO_LICATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH'] 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
@ -442,7 +442,7 @@ class ConfigObj(configobj.ConfigObj, Section):
if os.environ[env_cat_key] in cfg_new['HeadPhones'].sections: if os.environ[env_cat_key] in cfg_new['HeadPhones'].sections:
cfg_new['HeadPhones'][env_cat_key]['enabled'] = 0 cfg_new['HeadPhones'][env_cat_key]['enabled'] = 0
section = "Extensions" section = 'Extensions'
env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS'] env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions'] cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(env_keys)): for index in range(len(env_keys)):
@ -452,7 +452,7 @@ class ConfigObj(configobj.ConfigObj, Section):
value = os.environ[key] value = os.environ[key]
cfg_new[section][option] = value cfg_new[section][option] = value
section = "Posix" section = 'Posix'
env_keys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA'] env_keys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfg_keys = ['niceness', 'ionice_class', 'ionice_classdata'] cfg_keys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(env_keys)): for index in range(len(env_keys)):
@ -462,7 +462,7 @@ class ConfigObj(configobj.ConfigObj, Section):
value = os.environ[key] value = os.environ[key]
cfg_new[section][option] = value cfg_new[section][option] = value
section = "Transcoder" section = 'Transcoder'
env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH', env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES', 'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR', 'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
@ -490,7 +490,7 @@ class ConfigObj(configobj.ConfigObj, Section):
value = os.environ[key] value = os.environ[key]
cfg_new[section][option] = value cfg_new[section][option] = value
section = "WakeOnLan" section = 'WakeOnLan'
env_keys = ['WAKE', 'HOST', 'PORT', 'MAC'] env_keys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfg_keys = ['wake', 'host', 'port', 'mac'] cfg_keys = ['wake', 'host', 'port', 'mac']
for index in range(len(env_keys)): for index in range(len(env_keys)):
@ -500,7 +500,7 @@ class ConfigObj(configobj.ConfigObj, Section):
value = os.environ[key] value = os.environ[key]
cfg_new[section][option] = value cfg_new[section][option] = value
section = "UserScript" section = 'UserScript'
env_cat_key = 'NZBPO_USCATEGORY' env_cat_key = 'NZBPO_USCATEGORY'
env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE', env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH'] 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
@ -518,14 +518,14 @@ class ConfigObj(configobj.ConfigObj, Section):
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
except Exception as error: except Exception as error:
logger.debug("Error {msg} when applying NZBGet config".format(msg=error)) logger.debug('Error {msg} when applying NZBGet config'.format(msg=error))
try: try:
# write our new config to autoProcessMedia.cfg # write our new config to autoProcessMedia.cfg
cfg_new.filename = core.CONFIG_FILE cfg_new.filename = core.CONFIG_FILE
cfg_new.write() cfg_new.write()
except Exception as error: except Exception as error:
logger.debug("Error {msg} when writing changes to .cfg".format(msg=error)) logger.debug('Error {msg} when writing changes to .cfg'.format(msg=error))
return cfg_new return cfg_new

View file

@ -8,11 +8,11 @@ MAX_DB_VERSION = 2
def backup_database(version): def backup_database(version):
logger.info("Backing up database before upgrade") logger.info('Backing up database before upgrade')
if not backup_versioned_file(main_db.db_filename(), version): if not backup_versioned_file(main_db.db_filename(), version):
logger.log_error_and_exit("Database backup failed, abort upgrading database") logger.log_error_and_exit('Database backup failed, abort upgrading database')
else: else:
logger.info("Proceeding with upgrade") logger.info('Proceeding with upgrade')
# ====================== # ======================
@ -23,17 +23,17 @@ def backup_database(version):
class InitialSchema(main_db.SchemaUpgrade): class InitialSchema(main_db.SchemaUpgrade):
def test(self): def test(self):
no_update = False no_update = False
if self.has_table("db_version"): if self.has_table('db_version'):
cur_db_version = self.check_db_version() cur_db_version = self.check_db_version()
no_update = not cur_db_version < MAX_DB_VERSION no_update = not cur_db_version < MAX_DB_VERSION
return no_update return no_update
def execute(self): def execute(self):
if not self.has_table("downloads") and not self.has_table("db_version"): if not self.has_table('downloads') and not self.has_table('db_version'):
queries = [ queries = [
"CREATE TABLE db_version (db_version INTEGER);", 'CREATE TABLE db_version (db_version INTEGER);',
"CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));", 'CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));',
"INSERT INTO db_version (db_version) VALUES (2);" 'INSERT INTO db_version (db_version) VALUES (2);'
] ]
for query in queries: for query in queries:
self.connection.action(query) self.connection.action(query)
@ -42,24 +42,24 @@ class InitialSchema(main_db.SchemaUpgrade):
cur_db_version = self.check_db_version() cur_db_version = self.check_db_version()
if cur_db_version < MIN_DB_VERSION: if cur_db_version < MIN_DB_VERSION:
logger.log_error_and_exit(u"Your database version ({current}) is too old to migrate " logger.log_error_and_exit(u'Your database version ({current}) is too old to migrate '
u"from what this version of nzbToMedia supports ({min})." u'from what this version of nzbToMedia supports ({min}).'
u"\nPlease remove nzbtomedia.db file to begin fresh.".format u'\nPlease remove nzbtomedia.db file to begin fresh.'.format
(current=cur_db_version, min=MIN_DB_VERSION)) (current=cur_db_version, min=MIN_DB_VERSION))
if cur_db_version > MAX_DB_VERSION: if cur_db_version > MAX_DB_VERSION:
logger.log_error_and_exit(u"Your database version ({current}) has been incremented " logger.log_error_and_exit(u'Your database version ({current}) has been incremented '
u"past what this version of nzbToMedia supports ({max})." u'past what this version of nzbToMedia supports ({max}).'
u"\nIf you have used other forks of nzbToMedia, your database " u'\nIf you have used other forks of nzbToMedia, your database '
u"may be unusable due to their modifications.".format u'may be unusable due to their modifications.'.format
(current=cur_db_version, max=MAX_DB_VERSION)) (current=cur_db_version, max=MAX_DB_VERSION))
if cur_db_version < MAX_DB_VERSION: # We need to upgrade. if cur_db_version < MAX_DB_VERSION: # We need to upgrade.
queries = [ queries = [
"CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));", 'CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));',
"INSERT INTO downloads2 SELECT * FROM downloads;", 'INSERT INTO downloads2 SELECT * FROM downloads;',
"DROP TABLE IF EXISTS downloads;", 'DROP TABLE IF EXISTS downloads;',
"ALTER TABLE downloads2 RENAME TO downloads;", 'ALTER TABLE downloads2 RENAME TO downloads;',
"INSERT INTO db_version (db_version) VALUES (2);" 'INSERT INTO db_version (db_version) VALUES (2);'
] ]
for query in queries: for query in queries:
self.connection.action(query) self.connection.action(query)

View file

@ -16,32 +16,32 @@ def extract(file_path, output_destination):
# Using Windows # Using Windows
if platform.system() == 'Windows': if platform.system() == 'Windows':
if not os.path.exists(core.SEVENZIP): if not os.path.exists(core.SEVENZIP):
core.logger.error("EXTRACTOR: Could not find 7-zip, Exiting") core.logger.error('EXTRACTOR: Could not find 7-zip, Exiting')
return False return False
wscriptlocation = os.path.join(os.environ['WINDIR'], 'system32', 'wscript.exe') wscriptlocation = os.path.join(os.environ['WINDIR'], 'system32', 'wscript.exe')
invislocation = os.path.join(core.APP_ROOT, 'core', 'extractor', 'bin', 'invisible.vbs') invislocation = os.path.join(core.APP_ROOT, 'core', 'extractor', 'bin', 'invisible.vbs')
cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, "x", "-y"] cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, 'x', '-y']
ext_7zip = [".rar", ".zip", ".tar.gz", "tgz", ".tar.bz2", ".tbz", ".tar.lzma", ".tlz", ".7z", ".xz"] ext_7zip = ['.rar', '.zip', '.tar.gz', 'tgz', '.tar.bz2', '.tbz', '.tar.lzma', '.tlz', '.7z', '.xz']
extract_commands = dict.fromkeys(ext_7zip, cmd_7zip) extract_commands = dict.fromkeys(ext_7zip, cmd_7zip)
# Using unix # Using unix
else: else:
required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"] required_cmds = ['unrar', 'unzip', 'tar', 'unxz', 'unlzma', '7zr', 'bunzip2']
# ## Possible future suport: # ## Possible future suport:
# gunzip: gz (cmd will delete original archive) # gunzip: gz (cmd will delete original archive)
# ## the following do not extract to dest dir # ## the following do not extract to dest dir
# ".xz": ["xz", "-d --keep"], # '.xz': ['xz', '-d --keep'],
# ".lzma": ["xz", "-d --format=lzma --keep"], # '.lzma': ['xz', '-d --format=lzma --keep'],
# ".bz2": ["bzip2", "-d --keep"], # '.bz2': ['bzip2', '-d --keep'],
extract_commands = { extract_commands = {
".rar": ["unrar", "x", "-o+", "-y"], '.rar': ['unrar', 'x', '-o+', '-y'],
".tar": ["tar", "-xf"], '.tar': ['tar', '-xf'],
".zip": ["unzip"], '.zip': ['unzip'],
".tar.gz": ["tar", "-xzf"], ".tgz": ["tar", "-xzf"], '.tar.gz': ['tar', '-xzf'], '.tgz': ['tar', '-xzf'],
".tar.bz2": ["tar", "-xjf"], ".tbz": ["tar", "-xjf"], '.tar.bz2': ['tar', '-xjf'], '.tbz': ['tar', '-xjf'],
".tar.lzma": ["tar", "--lzma", "-xf"], ".tlz": ["tar", "--lzma", "-xf"], '.tar.lzma': ['tar', '--lzma', '-xf'], '.tlz': ['tar', '--lzma', '-xf'],
".tar.xz": ["tar", "--xz", "-xf"], ".txz": ["tar", "--xz", "-xf"], '.tar.xz': ['tar', '--xz', '-xf'], '.txz': ['tar', '--xz', '-xf'],
".7z": ["7zr", "x"], '.7z': ['7zr', 'x'],
} }
# Test command exists and if not, remove # Test command exists and if not, remove
if not os.getenv('TR_TORRENT_DIR'): if not os.getenv('TR_TORRENT_DIR'):
@ -51,39 +51,39 @@ def extract(file_path, output_destination):
stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist. stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist.
for k, v in extract_commands.items(): for k, v in extract_commands.items():
if cmd in v[0]: if cmd in v[0]:
if not call(["which", "7zr"], stdout=devnull, stderr=devnull): # we do have "7zr" if not call(['which', '7zr'], stdout=devnull, stderr=devnull): # we do have '7zr'
extract_commands[k] = ["7zr", "x", "-y"] extract_commands[k] = ['7zr', 'x', '-y']
elif not call(["which", "7z"], stdout=devnull, stderr=devnull): # we do have "7z" elif not call(['which', '7z'], stdout=devnull, stderr=devnull): # we do have '7z'
extract_commands[k] = ["7z", "x", "-y"] extract_commands[k] = ['7z', 'x', '-y']
elif not call(["which", "7za"], stdout=devnull, stderr=devnull): # we do have "7za" elif not call(['which', '7za'], stdout=devnull, stderr=devnull): # we do have '7za'
extract_commands[k] = ["7za", "x", "-y"] extract_commands[k] = ['7za', 'x', '-y']
else: else:
core.logger.error("EXTRACTOR: {cmd} not found, " core.logger.error('EXTRACTOR: {cmd} not found, '
"disabling support for {feature}".format 'disabling support for {feature}'.format
(cmd=cmd, feature=k)) (cmd=cmd, feature=k))
del extract_commands[k] del extract_commands[k]
devnull.close() devnull.close()
else: else:
core.logger.warning("EXTRACTOR: Cannot determine which tool to use when called from Transmission") core.logger.warning('EXTRACTOR: Cannot determine which tool to use when called from Transmission')
if not extract_commands: if not extract_commands:
core.logger.warning("EXTRACTOR: No archive extracting programs found, plugin will be disabled") core.logger.warning('EXTRACTOR: No archive extracting programs found, plugin will be disabled')
ext = os.path.splitext(file_path) ext = os.path.splitext(file_path)
cmd = [] cmd = []
if ext[1] in (".gz", ".bz2", ".lzma"): if ext[1] in ('.gz', '.bz2', '.lzma'):
# Check if this is a tar # Check if this is a tar
if os.path.splitext(ext[0])[1] == ".tar": if os.path.splitext(ext[0])[1] == '.tar':
cmd = extract_commands[".tar{ext}".format(ext=ext[1])] cmd = extract_commands['.tar{ext}'.format(ext=ext[1])]
elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"): elif ext[1] in ('.1', '.01', '.001') and os.path.splitext(ext[0])[1] in ('.rar', '.zip', '.7z'):
cmd = extract_commands[os.path.splitext(ext[0])[1]] cmd = extract_commands[os.path.splitext(ext[0])[1]]
elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives. elif ext[1] in ('.cb7', '.cba', '.cbr', '.cbt', '.cbz'): # don't extract these comic book archives.
return False return False
else: else:
if ext[1] in extract_commands: if ext[1] in extract_commands:
cmd = extract_commands[ext[1]] cmd = extract_commands[ext[1]]
else: else:
core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format core.logger.debug('EXTRACTOR: Unknown file type: {ext}'.format
(ext=ext[1])) (ext=ext[1]))
return False return False
@ -95,9 +95,9 @@ def extract(file_path, output_destination):
else: else:
passwords = [] passwords = []
core.logger.info("Extracting {file} to {destination}".format core.logger.info('Extracting {file} to {destination}'.format
(file=file_path, destination=output_destination)) (file=file_path, destination=output_destination))
core.logger.debug("Extracting {cmd} {file} {destination}".format core.logger.debug('Extracting {cmd} {file} {destination}'.format
(cmd=cmd, file=file_path, destination=output_destination)) (cmd=cmd, file=file_path, destination=output_destination))
orig_files = [] orig_files = []
@ -121,35 +121,35 @@ def extract(file_path, output_destination):
else: else:
cmd = core.NICENESS + cmd cmd = core.NICENESS + cmd
cmd2 = cmd cmd2 = cmd
cmd2.append("-p-") # don't prompt for password. cmd2.append('-p-') # don't prompt for password.
p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine.
res = p.wait() res = p.wait()
if res == 0: # Both Linux and Windows return 0 for successful. if res == 0: # Both Linux and Windows return 0 for successful.
core.logger.info("EXTRACTOR: Extraction was successful for {file} to {destination}".format core.logger.info('EXTRACTOR: Extraction was successful for {file} to {destination}'.format
(file=file_path, destination=output_destination)) (file=file_path, destination=output_destination))
success = 1 success = 1
elif len(passwords) > 0: elif len(passwords) > 0:
core.logger.info("EXTRACTOR: Attempting to extract with passwords") core.logger.info('EXTRACTOR: Attempting to extract with passwords')
for password in passwords: for password in passwords:
if password == "": # if edited in windows or otherwise if blank lines. if password == '': # if edited in windows or otherwise if blank lines.
continue continue
cmd2 = cmd cmd2 = cmd
# append password here. # append password here.
passcmd = "-p{pwd}".format(pwd=password) passcmd = '-p{pwd}'.format(pwd=password)
cmd2.append(passcmd) cmd2.append(passcmd)
p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine.
res = p.wait() res = p.wait()
if (res >= 0 and platform == 'Windows') or res == 0: if (res >= 0 and platform == 'Windows') or res == 0:
core.logger.info("EXTRACTOR: Extraction was successful " core.logger.info('EXTRACTOR: Extraction was successful '
"for {file} to {destination} using password: {pwd}".format 'for {file} to {destination} using password: {pwd}'.format
(file=file_path, destination=output_destination, pwd=password)) (file=file_path, destination=output_destination, pwd=password))
success = 1 success = 1
break break
else: else:
continue continue
except Exception: except Exception:
core.logger.error("EXTRACTOR: Extraction failed for {file}. " core.logger.error('EXTRACTOR: Extraction failed for {file}. '
"Could not call command {cmd}".format 'Could not call command {cmd}'.format
(file=file_path, cmd=cmd)) (file=file_path, cmd=cmd))
os.chdir(pwd) os.chdir(pwd)
return False return False
@ -175,7 +175,7 @@ def extract(file_path, output_destination):
pass pass
return True return True
else: else:
core.logger.error("EXTRACTOR: Extraction failed for {file}. " core.logger.error('EXTRACTOR: Extraction failed for {file}. '
"Result was {result}".format 'Result was {result}'.format
(file=file_path, result=res)) (file=file_path, result=res))
return False return False

View file

@ -13,59 +13,59 @@ def auto_fork(section, input_category):
cfg = dict(core.CFG[section][input_category]) cfg = dict(core.CFG[section][input_category])
host = cfg.get("host") host = cfg.get('host')
port = cfg.get("port") port = cfg.get('port')
username = cfg.get("username") username = cfg.get('username')
password = cfg.get("password") password = cfg.get('password')
apikey = cfg.get("apikey") apikey = cfg.get('apikey')
ssl = int(cfg.get("ssl", 0)) ssl = int(cfg.get('ssl', 0))
web_root = cfg.get("web_root", "") web_root = cfg.get('web_root', '')
replace = {'sickrage': 'SickRage', 'sickchill': 'SickChill', 'sickgear': 'SickGear', 'medusa': 'Medusa', 'sickbeard-api': 'SickBeard-api'} replace = {'sickrage': 'SickRage', 'sickchill': 'SickChill', 'sickgear': 'SickGear', 'medusa': 'Medusa', 'sickbeard-api': 'SickBeard-api'}
f1 = replace[cfg.get("fork", "auto")] if cfg.get("fork", "auto") in replace else cfg.get("fork", "auto") f1 = replace[cfg.get('fork', 'auto')] if cfg.get('fork', 'auto') in replace else cfg.get('fork', 'auto')
try: try:
fork = f1, core.FORKS[f1] fork = f1, core.FORKS[f1]
except KeyError: except KeyError:
fork = "auto" fork = 'auto'
protocol = "https://" if ssl else "http://" protocol = 'https://' if ssl else 'http://'
detected = False detected = False
if section == "NzbDrone": if section == 'NzbDrone':
logger.info("Attempting to verify {category} fork".format logger.info('Attempting to verify {category} fork'.format
(category=input_category)) (category=input_category))
url = "{protocol}{host}:{port}{root}/api/rootfolder".format( url = '{protocol}{host}:{port}{root}/api/rootfolder'.format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root)
headers = {"X-Api-Key": apikey} headers = {'X-Api-Key': apikey}
try: try:
r = requests.get(url, headers=headers, stream=True, verify=False) r = requests.get(url, headers=headers, stream=True, verify=False)
except requests.ConnectionError: except requests.ConnectionError:
logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, input_category)) logger.warning('Could not connect to {0}:{1} to verify fork!'.format(section, input_category))
if not r.ok: if not r.ok:
logger.warning("Connection to {section}:{category} failed! " logger.warning('Connection to {section}:{category} failed! '
"Check your configuration".format 'Check your configuration'.format
(section=section, category=input_category)) (section=section, category=input_category))
fork = ['default', {}] fork = ['default', {}]
elif fork == "auto": elif fork == 'auto':
params = core.ALL_FORKS params = core.ALL_FORKS
rem_params = [] rem_params = []
logger.info("Attempting to auto-detect {category} fork".format(category=input_category)) logger.info('Attempting to auto-detect {category} fork'.format(category=input_category))
# define the order to test. Default must be first since the default fork doesn't reject parameters. # define the order to test. Default must be first since the default fork doesn't reject parameters.
# then in order of most unique parameters. # then in order of most unique parameters.
if apikey: if apikey:
url = "{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess".format( url = '{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess'.format(
protocol=protocol, host=host, port=port, root=web_root, apikey=apikey) protocol=protocol, host=host, port=port, root=web_root, apikey=apikey)
else: else:
url = "{protocol}{host}:{port}{root}/home/postprocess/".format( url = '{protocol}{host}:{port}{root}/home/postprocess/'.format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root)
# attempting to auto-detect fork # attempting to auto-detect fork
try: try:
s = requests.Session() s = requests.Session()
if not apikey and username and password: if not apikey and username and password:
login = "{protocol}{host}:{port}{root}/login".format( login = '{protocol}{host}:{port}{root}/login'.format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root)
login_params = {'username': username, 'password': password} login_params = {'username': username, 'password': password}
r = s.get(login, verify=False, timeout=(30, 60)) r = s.get(login, verify=False, timeout=(30, 60))
@ -74,7 +74,7 @@ def auto_fork(section, input_category):
s.post(login, data=login_params, stream=True, verify=False) s.post(login, data=login_params, stream=True, verify=False)
r = s.get(url, auth=(username, password), verify=False) r = s.get(url, auth=(username, password), verify=False)
except requests.ConnectionError: except requests.ConnectionError:
logger.info("Could not connect to {section}:{category} to perform auto-fork detection!".format logger.info('Could not connect to {section}:{category} to perform auto-fork detection!'.format
(section=section, category=input_category)) (section=section, category=input_category))
r = [] r = []
if r and r.ok: if r and r.ok:
@ -98,17 +98,17 @@ def auto_fork(section, input_category):
detected = True detected = True
break break
if detected: if detected:
logger.info("{section}:{category} fork auto-detection successful ...".format logger.info('{section}:{category} fork auto-detection successful ...'.format
(section=section, category=input_category)) (section=section, category=input_category))
elif rem_params: elif rem_params:
logger.info("{section}:{category} fork auto-detection found custom params {params}".format logger.info('{section}:{category} fork auto-detection found custom params {params}'.format
(section=section, category=input_category, params=params)) (section=section, category=input_category, params=params))
fork = ['custom', params] fork = ['custom', params]
else: else:
logger.info("{section}:{category} fork auto-detection failed".format logger.info('{section}:{category} fork auto-detection failed'.format
(section=section, category=input_category)) (section=section, category=input_category))
fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)] fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)]
logger.info("{section}:{category} fork set to {fork}".format logger.info('{section}:{category} fork set to {fork}'.format
(section=section, category=input_category, fork=fork[0])) (section=section, category=input_category, fork=fork[0]))
return fork[0], fork[1] return fork[0], fork[1]

View file

@ -193,9 +193,9 @@ class NTMRotatingLogHandler(object):
self.writes_since_check += 1 self.writes_since_check += 1
try: try:
message = u"{0}: {1}".format(section.upper(), to_log) message = u'{0}: {1}'.format(section.upper(), to_log)
except UnicodeError: except UnicodeError:
message = u"{0}: Message contains non-utf-8 string".format(section.upper()) message = u'{0}: Message contains non-utf-8 string'.format(section.upper())
out_line = message out_line = message

View file

@ -12,7 +12,7 @@ import core
from core import logger from core import logger
def db_filename(filename="nzbtomedia.db", suffix=None): def db_filename(filename='nzbtomedia.db', suffix=None):
""" """
@param filename: The sqlite database filename to use. If not specified, @param filename: The sqlite database filename to use. If not specified,
will be made to be nzbtomedia.db will be made to be nzbtomedia.db
@ -21,16 +21,16 @@ def db_filename(filename="nzbtomedia.db", suffix=None):
@return: the correct location of the database file. @return: the correct location of the database file.
""" """
if suffix: if suffix:
filename = "{0}.{1}".format(filename, suffix) filename = '{0}.{1}'.format(filename, suffix)
return core.os.path.join(core.APP_ROOT, filename) return core.os.path.join(core.APP_ROOT, filename)
class DBConnection(object): class DBConnection(object):
def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None): def __init__(self, filename='nzbtomedia.db', suffix=None, row_type=None):
self.filename = filename self.filename = filename
self.connection = sqlite3.connect(db_filename(filename), 20) self.connection = sqlite3.connect(db_filename(filename), 20)
if row_type == "dict": if row_type == 'dict':
self.connection.row_factory = self._dict_factory self.connection.row_factory = self._dict_factory
else: else:
self.connection.row_factory = sqlite3.Row self.connection.row_factory = sqlite3.Row
@ -38,13 +38,13 @@ class DBConnection(object):
def check_db_version(self): def check_db_version(self):
result = None result = None
try: try:
result = self.select("SELECT db_version FROM db_version") result = self.select('SELECT db_version FROM db_version')
except sqlite3.OperationalError as e: except sqlite3.OperationalError as e:
if "no such table: db_version" in e.args[0]: if 'no such table: db_version' in e.args[0]:
return 0 return 0
if result: if result:
return int(result[0]["db_version"]) return int(result[0]['db_version'])
else: else:
return 0 return 0
@ -58,12 +58,12 @@ class DBConnection(object):
while attempt < 5: while attempt < 5:
try: try:
if args is None: if args is None:
logger.log("{name}: {query}".format(name=self.filename, query=query), logger.DB) logger.log('{name}: {query}'.format(name=self.filename, query=query), logger.DB)
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute(query) cursor.execute(query)
sql_result = cursor.fetchone()[0] sql_result = cursor.fetchone()[0]
else: else:
logger.log("{name}: {query} with args {args}".format logger.log('{name}: {query} with args {args}'.format
(name=self.filename, query=query, args=args), logger.DB) (name=self.filename, query=query, args=args), logger.DB)
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute(query, args) cursor.execute(query, args)
@ -72,15 +72,15 @@ class DBConnection(object):
# get out of the connection attempt loop since we were successful # get out of the connection attempt loop since we were successful
break break
except sqlite3.OperationalError as error: except sqlite3.OperationalError as error:
if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]: if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]:
logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING) logger.log(u'DB error: {msg}'.format(msg=error), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(1) time.sleep(1)
else: else:
logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR) logger.log(u'DB error: {msg}'.format(msg=error), logger.ERROR)
raise raise
except sqlite3.DatabaseError as error: except sqlite3.DatabaseError as error:
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) logger.log(u'Fatal error executing query: {msg}'.format(msg=error), logger.ERROR)
raise raise
return sql_result return sql_result
@ -101,26 +101,26 @@ class DBConnection(object):
sql_result.append(self.connection.execute(qu[0])) sql_result.append(self.connection.execute(qu[0]))
elif len(qu) > 1: elif len(qu) > 1:
if log_transaction: if log_transaction:
logger.log(u"{query} with args {args}".format(query=qu[0], args=qu[1]), logger.DEBUG) logger.log(u'{query} with args {args}'.format(query=qu[0], args=qu[1]), logger.DEBUG)
sql_result.append(self.connection.execute(qu[0], qu[1])) sql_result.append(self.connection.execute(qu[0], qu[1]))
self.connection.commit() self.connection.commit()
logger.log(u"Transaction with {x} query's executed".format(x=len(querylist)), logger.DEBUG) logger.log(u'Transaction with {x} query\'s executed'.format(x=len(querylist)), logger.DEBUG)
return sql_result return sql_result
except sqlite3.OperationalError as error: except sqlite3.OperationalError as error:
sql_result = [] sql_result = []
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]: if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]:
logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING) logger.log(u'DB error: {msg}'.format(msg=error), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(1) time.sleep(1)
else: else:
logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR) logger.log(u'DB error: {msg}'.format(msg=error), logger.ERROR)
raise raise
except sqlite3.DatabaseError as error: except sqlite3.DatabaseError as error:
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) logger.log(u'Fatal error executing query: {msg}'.format(msg=error), logger.ERROR)
raise raise
return sql_result return sql_result
@ -135,25 +135,25 @@ class DBConnection(object):
while attempt < 5: while attempt < 5:
try: try:
if args is None: if args is None:
logger.log(u"{name}: {query}".format(name=self.filename, query=query), logger.DB) logger.log(u'{name}: {query}'.format(name=self.filename, query=query), logger.DB)
sql_result = self.connection.execute(query) sql_result = self.connection.execute(query)
else: else:
logger.log(u"{name}: {query} with args {args}".format logger.log(u'{name}: {query} with args {args}'.format
(name=self.filename, query=query, args=args), logger.DB) (name=self.filename, query=query, args=args), logger.DB)
sql_result = self.connection.execute(query, args) sql_result = self.connection.execute(query, args)
self.connection.commit() self.connection.commit()
# get out of the connection attempt loop since we were successful # get out of the connection attempt loop since we were successful
break break
except sqlite3.OperationalError as error: except sqlite3.OperationalError as error:
if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]: if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]:
logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING) logger.log(u'DB error: {msg}'.format(msg=error), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(1) time.sleep(1)
else: else:
logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR) logger.log(u'DB error: {msg}'.format(msg=error), logger.ERROR)
raise raise
except sqlite3.DatabaseError as error: except sqlite3.DatabaseError as error:
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) logger.log(u'Fatal error executing query: {msg}'.format(msg=error), logger.ERROR)
raise raise
return sql_result return sql_result
@ -171,37 +171,37 @@ class DBConnection(object):
def gen_params(my_dict): def gen_params(my_dict):
return [ return [
"{key} = ?".format(key=k) '{key} = ?'.format(key=k)
for k in my_dict.keys() for k in my_dict.keys()
] ]
changes_before = self.connection.total_changes changes_before = self.connection.total_changes
items = list(value_dict.values()) + list(key_dict.values()) items = list(value_dict.values()) + list(key_dict.values())
self.action( self.action(
"UPDATE {table} " 'UPDATE {table} '
"SET {params} " 'SET {params} '
"WHERE {conditions}".format( 'WHERE {conditions}'.format(
table=table_name, table=table_name,
params=", ".join(gen_params(value_dict)), params=', '.join(gen_params(value_dict)),
conditions=" AND ".join(gen_params(key_dict)) conditions=' AND '.join(gen_params(key_dict))
), ),
items items
) )
if self.connection.total_changes == changes_before: if self.connection.total_changes == changes_before:
self.action( self.action(
"INSERT OR IGNORE INTO {table} ({columns}) " 'INSERT OR IGNORE INTO {table} ({columns}) '
"VALUES ({values})".format( 'VALUES ({values})'.format(
table=table_name, table=table_name,
columns=", ".join(map(text_type, value_dict.keys())), columns=', '.join(map(text_type, value_dict.keys())),
values=", ".join(["?"] * len(value_dict.values())) values=', '.join(['?'] * len(value_dict.values()))
), ),
list(value_dict.values()) list(value_dict.values())
) )
def table_info(self, table_name): def table_info(self, table_name):
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually # FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
cursor = self.connection.execute("PRAGMA table_info({0})".format(table_name)) cursor = self.connection.execute('PRAGMA table_info({0})'.format(table_name))
columns = {} columns = {}
for column in cursor: for column in cursor:
columns[column['name']] = {'type': column['type']} columns[column['name']] = {'type': column['type']}
@ -232,31 +232,31 @@ class DBSanityCheck(object):
# =============== # ===============
def upgrade_database(connection, schema): def upgrade_database(connection, schema):
logger.log(u"Checking database structure...", logger.MESSAGE) logger.log(u'Checking database structure...', logger.MESSAGE)
_process_upgrade(connection, schema) _process_upgrade(connection, schema)
def pretty_name(class_name): def pretty_name(class_name):
return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)]) return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
def _process_upgrade(connection, upgrade_class): def _process_upgrade(connection, upgrade_class):
instance = upgrade_class(connection) instance = upgrade_class(connection)
logger.log(u"Checking {name} database upgrade".format logger.log(u'Checking {name} database upgrade'.format
(name=pretty_name(upgrade_class.__name__)), logger.DEBUG) (name=pretty_name(upgrade_class.__name__)), logger.DEBUG)
if not instance.test(): if not instance.test():
logger.log(u"Database upgrade required: {name}".format logger.log(u'Database upgrade required: {name}'.format
(name=pretty_name(upgrade_class.__name__)), logger.MESSAGE) (name=pretty_name(upgrade_class.__name__)), logger.MESSAGE)
try: try:
instance.execute() instance.execute()
except sqlite3.DatabaseError as error: except sqlite3.DatabaseError as error:
print(u"Error in {name}: {msg}".format print(u'Error in {name}: {msg}'.format
(name=upgrade_class.__name__, msg=error)) (name=upgrade_class.__name__, msg=error))
raise raise
logger.log(u"{name} upgrade completed".format logger.log(u'{name} upgrade completed'.format
(name=upgrade_class.__name__), logger.DEBUG) (name=upgrade_class.__name__), logger.DEBUG)
else: else:
logger.log(u"{name} upgrade not required".format logger.log(u'{name} upgrade not required'.format
(name=upgrade_class.__name__), logger.DEBUG) (name=upgrade_class.__name__), logger.DEBUG)
for upgradeSubClass in upgrade_class.__subclasses__(): for upgradeSubClass in upgrade_class.__subclasses__():
@ -269,23 +269,23 @@ class SchemaUpgrade(object):
self.connection = connection self.connection = connection
def has_table(self, table_name): def has_table(self, table_name):
return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (table_name,)).fetchall()) > 0 return len(self.connection.action('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,)).fetchall()) > 0
def has_column(self, table_name, column): def has_column(self, table_name, column):
return column in self.connection.table_info(table_name) return column in self.connection.table_info(table_name)
def add_column(self, table, column, data_type="NUMERIC", default=0): def add_column(self, table, column, data_type='NUMERIC', default=0):
self.connection.action("ALTER TABLE {0} ADD {1} {2}".format(table, column, data_type)) self.connection.action('ALTER TABLE {0} ADD {1} {2}'.format(table, column, data_type))
self.connection.action("UPDATE {0} SET {1} = ?".format(table, column), (default,)) self.connection.action('UPDATE {0} SET {1} = ?'.format(table, column), (default,))
def check_db_version(self): def check_db_version(self):
result = self.connection.select("SELECT db_version FROM db_version") result = self.connection.select('SELECT db_version FROM db_version')
if result: if result:
return int(result[-1]["db_version"]) return int(result[-1]['db_version'])
else: else:
return 0 return 0
def inc_db_version(self): def inc_db_version(self):
new_version = self.check_db_version() + 1 new_version = self.check_db_version() + 1
self.connection.action("UPDATE db_version SET db_version = ?", [new_version]) self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
return new_version return new_version

View file

@ -10,22 +10,22 @@ import core
from core import logger from core import logger
from core.utils import list_media_files from core.utils import list_media_files
reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.", reverse_list = [r'\.\d{2}e\d{2}s\.', r'\.[pi]0801\.', r'\.p027\.', r'\.[pi]675\.', r'\.[pi]084\.', r'\.p063\.',
r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.", r'\b[45]62[xh]\.', r'\.yarulb\.', r'\.vtd[hp]\.',
r"\.ld[.-]?bew\.", r"\.pir.?(dov|dvd|bew|db|rb)\.", r"\brdvd\.", r"\.vts\.", r"\.reneercs\.", r'\.ld[.-]?bew\.', r'\.pir.?(dov|dvd|bew|db|rb)\.', r'\brdvd\.', r'\.vts\.', r'\.reneercs\.',
r"\.dcv\.", r"\b(pir|mac)dh\b", r"\.reporp\.", r"\.kcaper\.", r'\.dcv\.', r'\b(pir|mac)dh\b', r'\.reporp\.', r'\.kcaper\.',
r"\.lanretni\.", r"\b3ca\b", r"\.cstn\."] r'\.lanretni\.', r'\b3ca\b', r'\.cstn\.']
reverse_pattern = re.compile('|'.join(reverse_list), flags=re.IGNORECASE) reverse_pattern = re.compile('|'.join(reverse_list), flags=re.IGNORECASE)
season_pattern = re.compile(r"(.*\.\d{2}e\d{2}s\.)(.*)", flags=re.IGNORECASE) season_pattern = re.compile(r'(.*\.\d{2}e\d{2}s\.)(.*)', flags=re.IGNORECASE)
word_pattern = re.compile(r"([^A-Z0-9]*[A-Z0-9]+)") word_pattern = re.compile(r'([^A-Z0-9]*[A-Z0-9]+)')
media_list = [r"\.s\d{2}e\d{2}\.", r"\.1080[pi]\.", r"\.720p\.", r"\.576[pi]", r"\.480[pi]\.", r"\.360p\.", media_list = [r'\.s\d{2}e\d{2}\.', r'\.1080[pi]\.', r'\.720p\.', r'\.576[pi]', r'\.480[pi]\.', r'\.360p\.',
r"\.[xh]26[45]\b", r"\.bluray\.", r"\.[hp]dtv\.", r'\.[xh]26[45]\b', r'\.bluray\.', r'\.[hp]dtv\.',
r"\.web[.-]?dl\.", r"\.(vod|dvd|web|bd|br).?rip\.", r"\.dvdr\b", r"\.stv\.", r"\.screener\.", r"\.vcd\.", r'\.web[.-]?dl\.', r'\.(vod|dvd|web|bd|br).?rip\.', r'\.dvdr\b', r'\.stv\.', r'\.screener\.', r'\.vcd\.',
r"\bhd(cam|rip)\b", r"\.proper\.", r"\.repack\.", r'\bhd(cam|rip)\b', r'\.proper\.', r'\.repack\.',
r"\.internal\.", r"\bac3\b", r"\.ntsc\.", r"\.pal\.", r"\.secam\.", r"\bdivx\b", r"\bxvid\b"] r'\.internal\.', r'\bac3\b', r'\.ntsc\.', r'\.pal\.', r'\.secam\.', r'\bdivx\b', r'\bxvid\b']
media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE) media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE)
garbage_name = re.compile(r"^[a-zA-Z0-9]*$") garbage_name = re.compile(r'^[a-zA-Z0-9]*$')
char_replace = [[r"(\w)1\.(\w)", r"\1i\2"] char_replace = [[r'(\w)1\.(\w)', r'\1i\2']
] ]
@ -67,26 +67,26 @@ def strip_groups(filename):
def rename_file(filename, newfile_path): def rename_file(filename, newfile_path):
if os.path.isfile(newfile_path): if os.path.isfile(newfile_path):
newfile_path = os.path.splitext(newfile_path)[0] + ".NTM" + os.path.splitext(newfile_path)[1] newfile_path = os.path.splitext(newfile_path)[0] + '.NTM' + os.path.splitext(newfile_path)[1]
logger.debug("Replacing file name {old} with download name {new}".format logger.debug('Replacing file name {old} with download name {new}'.format
(old=filename, new=newfile_path), "EXCEPTION") (old=filename, new=newfile_path), 'EXCEPTION')
try: try:
os.rename(filename, newfile_path) os.rename(filename, newfile_path)
except Exception as error: except Exception as error:
logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION") logger.error('Unable to rename file due to: {error}'.format(error=error), 'EXCEPTION')
def replace_filename(filename, dirname, name): def replace_filename(filename, dirname, name):
head, file_extension = os.path.splitext(os.path.basename(filename)) head, file_extension = os.path.splitext(os.path.basename(filename))
if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None: if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
newname = os.path.basename(dirname).replace(' ', '.') newname = os.path.basename(dirname).replace(' ', '.')
logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION") logger.debug('Replacing file name {old} with directory name {new}'.format(old=head, new=newname), 'EXCEPTION')
elif media_pattern.search(name.replace(' ', '.').lower()) is not None: elif media_pattern.search(name.replace(' ', '.').lower()) is not None:
newname = name.replace(' ', '.') newname = name.replace(' ', '.')
logger.debug("Replacing file name {old} with download name {new}".format logger.debug('Replacing file name {old} with download name {new}'.format
(old=head, new=newname), "EXCEPTION") (old=head, new=newname), 'EXCEPTION')
else: else:
logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION") logger.warning('No name replacement determined for {name}'.format(name=head), 'EXCEPTION')
newname = name newname = name
newfile = newname + file_extension newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile) newfile_path = os.path.join(dirname, newfile)
@ -99,11 +99,11 @@ def reverse_filename(filename, dirname, name):
if na_parts is not None: if na_parts is not None:
word_p = word_pattern.findall(na_parts.group(2)) word_p = word_pattern.findall(na_parts.group(2))
if word_p: if word_p:
new_words = "" new_words = ''
for wp in word_p: for wp in word_p:
if wp[0] == ".": if wp[0] == '.':
new_words += "." new_words += '.'
new_words += re.sub(r"\W", "", wp) new_words += re.sub(r'\W', '', wp)
else: else:
new_words = na_parts.group(2) new_words = na_parts.group(2)
for cr in char_replace: for cr in char_replace:
@ -112,15 +112,15 @@ def reverse_filename(filename, dirname, name):
else: else:
newname = head[::-1].title() newname = head[::-1].title()
newname = newname.replace(' ', '.') newname = newname.replace(' ', '.')
logger.debug("Reversing filename {old} to {new}".format logger.debug('Reversing filename {old} to {new}'.format
(old=head, new=newname), "EXCEPTION") (old=head, new=newname), 'EXCEPTION')
newfile = newname + file_extension newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile) newfile_path = os.path.join(dirname, newfile)
return newfile_path return newfile_path
def rename_script(dirname): def rename_script(dirname):
rename_file = "" rename_file = ''
for directory, directories, files in os.walk(dirname): for directory, directories, files in os.walk(dirname):
for file in files: for file in files:
if re.search(r'(rename\S*\.(sh|bat)$)', file, re.IGNORECASE): if re.search(r'(rename\S*\.(sh|bat)$)', file, re.IGNORECASE):
@ -139,23 +139,23 @@ def rename_script(dirname):
dest = os.path.join(dirname, cmd[1].split('\\')[-1].split('/')[-1]) dest = os.path.join(dirname, cmd[1].split('\\')[-1].split('/')[-1])
if os.path.isfile(dest): if os.path.isfile(dest):
continue continue
logger.debug("Renaming file {source} to {destination}".format logger.debug('Renaming file {source} to {destination}'.format
(source=orig, destination=dest), "EXCEPTION") (source=orig, destination=dest), 'EXCEPTION')
try: try:
os.rename(orig, dest) os.rename(orig, dest)
except Exception as error: except Exception as error:
logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION") logger.error('Unable to rename file due to: {error}'.format(error=error), 'EXCEPTION')
def par2(dirname): def par2(dirname):
newlist = [] newlist = []
sofar = 0 sofar = 0
parfile = "" parfile = ''
objects = [] objects = []
if os.path.exists(dirname): if os.path.exists(dirname):
objects = os.listdir(dirname) objects = os.listdir(dirname)
for item in objects: for item in objects:
if item.endswith(".par2"): if item.endswith('.par2'):
size = os.path.getsize(os.path.join(dirname, item)) size = os.path.getsize(os.path.join(dirname, item))
if size > sofar: if size > sofar:
sofar = size sofar = size
@ -167,20 +167,20 @@ def par2(dirname):
bitbucket = open('NUL') bitbucket = open('NUL')
else: else:
bitbucket = open('/dev/null') bitbucket = open('/dev/null')
logger.info("Running par2 on file {0}.".format(parfile), "PAR2") logger.info('Running par2 on file {0}.'.format(parfile), 'PAR2')
command = [core.PAR2CMD, 'r', parfile, "*"] command = [core.PAR2CMD, 'r', parfile, '*']
cmd = "" cmd = ''
for item in command: for item in command:
cmd = "{cmd} {item}".format(cmd=cmd, item=item) cmd = '{cmd} {item}'.format(cmd=cmd, item=item)
logger.debug("calling command:{0}".format(cmd), "PAR2") logger.debug('calling command:{0}'.format(cmd), 'PAR2')
try: try:
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket)
proc.communicate() proc.communicate()
result = proc.returncode result = proc.returncode
except Exception: except Exception:
logger.error("par2 file processing for {0} has failed".format(parfile), "PAR2") logger.error('par2 file processing for {0} has failed'.format(parfile), 'PAR2')
if result == 0: if result == 0:
logger.info("par2 file processing succeeded", "PAR2") logger.info('par2 file processing succeeded', 'PAR2')
os.chdir(pwd) os.chdir(pwd)
bitbucket.close() bitbucket.close()

View file

@ -26,18 +26,18 @@ def is_video_good(videofile, status):
disable = True disable = True
else: else:
test_details, res = get_video_details(core.TEST_FILE) test_details, res = get_video_details(core.TEST_FILE)
if res != 0 or test_details.get("error"): if res != 0 or test_details.get('error'):
disable = True disable = True
logger.info("DISABLED: ffprobe failed to analyse test file. Stopping corruption check.", 'TRANSCODER') logger.info('DISABLED: ffprobe failed to analyse test file. Stopping corruption check.', 'TRANSCODER')
if test_details.get("streams"): if test_details.get('streams'):
vid_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"] vid_streams = [item for item in test_details['streams'] if 'codec_type' in item and item['codec_type'] == 'video']
aud_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"] aud_streams = [item for item in test_details['streams'] if 'codec_type' in item and item['codec_type'] == 'audio']
if not (len(vid_streams) > 0 and len(aud_streams) > 0): if not (len(vid_streams) > 0 and len(aud_streams) > 0):
disable = True disable = True
logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.", logger.info('DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.',
'TRANSCODER') 'TRANSCODER')
if disable: if disable:
if status: # if the download was "failed", assume bad. If it was successful, assume good. if status: # if the download was 'failed', assume bad. If it was successful, assume good.
return False return False
else: else:
return True return True
@ -46,20 +46,20 @@ def is_video_good(videofile, status):
video_details, result = get_video_details(videofile) video_details, result = get_video_details(videofile)
if result != 0: if result != 0:
logger.error("FAILED: [{0}] is corrupted!".format(file_name_ext), 'TRANSCODER') logger.error('FAILED: [{0}] is corrupted!'.format(file_name_ext), 'TRANSCODER')
return False return False
if video_details.get("error"): if video_details.get('error'):
logger.info("FAILED: [{0}] returned error [{1}].".format(file_name_ext, video_details.get("error")), 'TRANSCODER') logger.info('FAILED: [{0}] returned error [{1}].'.format(file_name_ext, video_details.get('error')), 'TRANSCODER')
return False return False
if video_details.get("streams"): if video_details.get('streams'):
video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"] video_streams = [item for item in video_details['streams'] if item['codec_type'] == 'video']
audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] audio_streams = [item for item in video_details['streams'] if item['codec_type'] == 'audio']
if len(video_streams) > 0 and len(audio_streams) > 0: if len(video_streams) > 0 and len(audio_streams) > 0:
logger.info("SUCCESS: [{0}] has no corruption.".format(file_name_ext), 'TRANSCODER') logger.info('SUCCESS: [{0}] has no corruption.'.format(file_name_ext), 'TRANSCODER')
return True return True
else: else:
logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. " logger.info('FAILED: [{0}] has {1} video streams and {2} audio streams. '
"Assume corruption.".format 'Assume corruption.'.format
(file_name_ext, len(video_streams), len(audio_streams)), 'TRANSCODER') (file_name_ext, len(video_streams), len(audio_streams)), 'TRANSCODER')
return False return False
@ -70,7 +70,7 @@ def zip_out(file, img, bitbucket):
try: try:
procin = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) procin = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
except Exception: except Exception:
logger.error("Extracting [{0}] has failed".format(file), 'TRANSCODER') logger.error('Extracting [{0}] has failed'.format(file), 'TRANSCODER')
return procin return procin
@ -114,7 +114,7 @@ def get_video_details(videofile, img=None, bitbucket=None):
result = proc.returncode result = proc.returncode
video_details = json.loads(out) video_details = json.loads(out)
except Exception: except Exception:
logger.error("Checking [{0}] has failed".format(file), 'TRANSCODER') logger.error('Checking [{0}] has failed'.format(file), 'TRANSCODER')
return video_details, result return video_details, result
@ -126,13 +126,13 @@ def build_commands(file, new_dir, movie_name, bitbucket):
video_details, result = get_video_details(file) video_details, result = get_video_details(file)
directory, name = os.path.split(file) directory, name = os.path.split(file)
name, ext = os.path.splitext(name) name, ext = os.path.splitext(name)
check = re.match("VTS_([0-9][0-9])_[0-9]+", name) check = re.match('VTS_([0-9][0-9])_[0-9]+', name)
if check and core.CONCAT: if check and core.CONCAT:
name = movie_name name = movie_name
elif check: elif check:
name = ('{0}.cd{1}'.format(movie_name, check.groups()[0])) name = ('{0}.cd{1}'.format(movie_name, check.groups()[0]))
elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name): elif core.CONCAT and re.match('(.+)[cC][dD][0-9]', name):
name = re.sub('([ ._=:-]+[cC][dD][0-9])', "", name) name = re.sub('([ ._=:-]+[cC][dD][0-9])', '', name)
if ext == core.VEXTENSION and new_dir == directory: # we need to change the name to prevent overwriting itself. if ext == core.VEXTENSION and new_dir == directory: # we need to change the name to prevent overwriting itself.
core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext' core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext'
else: else:
@ -153,7 +153,7 @@ def build_commands(file, new_dir, movie_name, bitbucket):
other_cmd = [] other_cmd = []
if not video_details or not video_details.get( if not video_details or not video_details.get(
"streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding. 'streams'): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
video_streams = [] video_streams = []
audio_streams = [] audio_streams = []
sub_streams = [] sub_streams = []
@ -203,19 +203,19 @@ def build_commands(file, new_dir, movie_name, bitbucket):
other_cmd.extend(['-movflags', '+faststart']) other_cmd.extend(['-movflags', '+faststart'])
else: else:
video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"] video_streams = [item for item in video_details['streams'] if item['codec_type'] == 'video']
audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] audio_streams = [item for item in video_details['streams'] if item['codec_type'] == 'audio']
sub_streams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"] sub_streams = [item for item in video_details['streams'] if item['codec_type'] == 'subtitle']
if core.VEXTENSION not in ['.mkv', '.mpegts']: if core.VEXTENSION not in ['.mkv', '.mpegts']:
sub_streams = [item for item in video_details["streams"] if sub_streams = [item for item in video_details['streams'] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ item['codec_type'] == 'subtitle' and item['codec_name'] != 'hdmv_pgs_subtitle' and item[
"codec_name"] != "pgssub"] 'codec_name'] != 'pgssub']
for video in video_streams: for video in video_streams:
codec = video["codec_name"] codec = video['codec_name']
fr = video.get("avg_frame_rate", 0) fr = video.get('avg_frame_rate', 0)
width = video.get("width", 0) width = video.get('width', 0)
height = video.get("height", 0) height = video.get('height', 0)
scale = core.VRESOLUTION scale = core.VRESOLUTION
if codec in core.VCODEC_ALLOW or not core.VCODEC: if codec in core.VCODEC_ALLOW or not core.VCODEC:
video_cmd.extend(['-c:v', 'copy']) video_cmd.extend(['-c:v', 'copy'])
@ -227,14 +227,14 @@ def build_commands(file, new_dir, movie_name, bitbucket):
w_scale = width / float(scale.split(':')[0]) w_scale = width / float(scale.split(':')[0])
h_scale = height / float(scale.split(':')[1]) h_scale = height / float(scale.split(':')[1])
if w_scale > h_scale: # widescreen, Scale by width only. if w_scale > h_scale: # widescreen, Scale by width only.
scale = "{width}:{height}".format( scale = '{width}:{height}'.format(
width=scale.split(':')[0], width=scale.split(':')[0],
height=int((height / w_scale) / 2) * 2, height=int((height / w_scale) / 2) * 2,
) )
if w_scale > 1: if w_scale > 1:
video_cmd.extend(['-vf', 'scale={width}'.format(width=scale)]) video_cmd.extend(['-vf', 'scale={width}'.format(width=scale)])
else: # lower or matching ratio, scale by height only. else: # lower or matching ratio, scale by height only.
scale = "{width}:{height}".format( scale = '{width}:{height}'.format(
width=int((width / h_scale) / 2) * 2, width=int((width / h_scale) / 2) * 2,
height=scale.split(':')[1], height=scale.split(':')[1],
) )
@ -253,7 +253,7 @@ def build_commands(file, new_dir, movie_name, bitbucket):
video_cmd[1] = core.VCODEC video_cmd[1] = core.VCODEC
if core.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding. if core.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding.
video_cmd = ['-c:v', 'copy'] video_cmd = ['-c:v', 'copy']
map_cmd.extend(['-map', '0:{index}'.format(index=video["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=video['index'])])
break # Only one video needed break # Only one video needed
used_audio = 0 used_audio = 0
@ -262,51 +262,51 @@ def build_commands(file, new_dir, movie_name, bitbucket):
if audio_streams: if audio_streams:
for i, val in reversed(list(enumerate(audio_streams))): for i, val in reversed(list(enumerate(audio_streams))):
try: try:
if "Commentary" in val.get("tags").get("title"): # Split out commentry tracks. if 'Commentary' in val.get('tags').get('title'): # Split out commentry tracks.
commentary.append(val) commentary.append(val)
del audio_streams[i] del audio_streams[i]
except Exception: except Exception:
continue continue
try: try:
audio1 = [item for item in audio_streams if item["tags"]["language"] == core.ALANGUAGE] audio1 = [item for item in audio_streams if item['tags']['language'] == core.ALANGUAGE]
except Exception: # no language tags. Assume only 1 language. except Exception: # no language tags. Assume only 1 language.
audio1 = audio_streams audio1 = audio_streams
try: try:
audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW] audio2 = [item for item in audio1 if item['codec_name'] in core.ACODEC_ALLOW]
except Exception: except Exception:
audio2 = [] audio2 = []
try: try:
audio3 = [item for item in audio_streams if item["tags"]["language"] != core.ALANGUAGE] audio3 = [item for item in audio_streams if item['tags']['language'] != core.ALANGUAGE]
except Exception: except Exception:
audio3 = [] audio3 = []
try: try:
audio4 = [item for item in audio3 if item["codec_name"] in core.ACODEC_ALLOW] audio4 = [item for item in audio3 if item['codec_name'] in core.ACODEC_ALLOW]
except Exception: except Exception:
audio4 = [] audio4 = []
if audio2: # right (or only) language and codec... if audio2: # right (or only) language and codec...
map_cmd.extend(['-map', '0:{index}'.format(index=audio2[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio2[0]['index'])])
a_mapped.extend([audio2[0]["index"]]) a_mapped.extend([audio2[0]['index']])
bitrate = int(float(audio2[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio2[0].get('bit_rate', 0))) / 1000
channels = int(float(audio2[0].get("channels", 0))) channels = int(float(audio2[0].get('channels', 0)))
audio_cmd.extend(['-c:a:{0}'.format(used_audio), 'copy']) audio_cmd.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio1: # right (or only) language, wrong codec. elif audio1: # right (or only) language, wrong codec.
map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]['index'])])
a_mapped.extend([audio1[0]["index"]]) a_mapped.extend([audio1[0]['index']])
bitrate = int(float(audio1[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio1[0].get('bit_rate', 0))) / 1000
channels = int(float(audio1[0].get("channels", 0))) channels = int(float(audio1[0].get('channels', 0)))
audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy']) audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy'])
elif audio4: # wrong language, right codec. elif audio4: # wrong language, right codec.
map_cmd.extend(['-map', '0:{index}'.format(index=audio4[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio4[0]['index'])])
a_mapped.extend([audio4[0]["index"]]) a_mapped.extend([audio4[0]['index']])
bitrate = int(float(audio4[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio4[0].get('bit_rate', 0))) / 1000
channels = int(float(audio4[0].get("channels", 0))) channels = int(float(audio4[0].get('channels', 0)))
audio_cmd.extend(['-c:a:{0}'.format(used_audio), 'copy']) audio_cmd.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio3: # wrong language, wrong codec. just pick the default audio track elif audio3: # wrong language, wrong codec. just pick the default audio track
map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]['index'])])
a_mapped.extend([audio3[0]["index"]]) a_mapped.extend([audio3[0]['index']])
bitrate = int(float(audio3[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio3[0].get('bit_rate', 0))) / 1000
channels = int(float(audio3[0].get("channels", 0))) channels = int(float(audio3[0].get('channels', 0)))
audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy']) audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy'])
if core.ACHANNELS and channels and channels > core.ACHANNELS: if core.ACHANNELS and channels and channels > core.ACHANNELS:
@ -327,39 +327,39 @@ def build_commands(file, new_dir, movie_name, bitbucket):
if core.ACODEC2_ALLOW: if core.ACODEC2_ALLOW:
used_audio += 1 used_audio += 1
try: try:
audio5 = [item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW] audio5 = [item for item in audio1 if item['codec_name'] in core.ACODEC2_ALLOW]
except Exception: except Exception:
audio5 = [] audio5 = []
try: try:
audio6 = [item for item in audio3 if item["codec_name"] in core.ACODEC2_ALLOW] audio6 = [item for item in audio3 if item['codec_name'] in core.ACODEC2_ALLOW]
except Exception: except Exception:
audio6 = [] audio6 = []
if audio5: # right language and codec. if audio5: # right language and codec.
map_cmd.extend(['-map', '0:{index}'.format(index=audio5[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio5[0]['index'])])
a_mapped.extend([audio5[0]["index"]]) a_mapped.extend([audio5[0]['index']])
bitrate = int(float(audio5[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio5[0].get('bit_rate', 0))) / 1000
channels = int(float(audio5[0].get("channels", 0))) channels = int(float(audio5[0].get('channels', 0)))
audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy']) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio1: # right language wrong codec. elif audio1: # right language wrong codec.
map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]['index'])])
a_mapped.extend([audio1[0]["index"]]) a_mapped.extend([audio1[0]['index']])
bitrate = int(float(audio1[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio1[0].get('bit_rate', 0))) / 1000
channels = int(float(audio1[0].get("channels", 0))) channels = int(float(audio1[0].get('channels', 0)))
if core.ACODEC2: if core.ACODEC2:
audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2]) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2])
else: else:
audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy']) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio6: # wrong language, right codec elif audio6: # wrong language, right codec
map_cmd.extend(['-map', '0:{index}'.format(index=audio6[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio6[0]['index'])])
a_mapped.extend([audio6[0]["index"]]) a_mapped.extend([audio6[0]['index']])
bitrate = int(float(audio6[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio6[0].get('bit_rate', 0))) / 1000
channels = int(float(audio6[0].get("channels", 0))) channels = int(float(audio6[0].get('channels', 0)))
audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy']) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy'])
elif audio3: # wrong language, wrong codec just pick the default audio track elif audio3: # wrong language, wrong codec just pick the default audio track
map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]['index'])])
a_mapped.extend([audio3[0]["index"]]) a_mapped.extend([audio3[0]['index']])
bitrate = int(float(audio3[0].get("bit_rate", 0))) / 1000 bitrate = int(float(audio3[0].get('bit_rate', 0))) / 1000
channels = int(float(audio3[0].get("channels", 0))) channels = int(float(audio3[0].get('channels', 0)))
if core.ACODEC2: if core.ACODEC2:
audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2]) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2])
else: else:
@ -388,14 +388,14 @@ def build_commands(file, new_dir, movie_name, bitbucket):
if core.AINCLUDE and core.ACODEC3: if core.AINCLUDE and core.ACODEC3:
audio_streams.extend(commentary) # add commentry tracks back here. audio_streams.extend(commentary) # add commentry tracks back here.
for audio in audio_streams: for audio in audio_streams:
if audio["index"] in a_mapped: if audio['index'] in a_mapped:
continue continue
used_audio += 1 used_audio += 1
map_cmd.extend(['-map', '0:{index}'.format(index=audio["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=audio['index'])])
audio_cmd3 = [] audio_cmd3 = []
bitrate = int(float(audio.get("bit_rate", 0))) / 1000 bitrate = int(float(audio.get('bit_rate', 0))) / 1000
channels = int(float(audio.get("channels", 0))) channels = int(float(audio.get('channels', 0)))
if audio["codec_name"] in core.ACODEC3_ALLOW: if audio['codec_name'] in core.ACODEC3_ALLOW:
audio_cmd3.extend(['-c:a:{0}'.format(used_audio), 'copy']) audio_cmd3.extend(['-c:a:{0}'.format(used_audio), 'copy'])
else: else:
if core.ACODEC3: if core.ACODEC3:
@ -424,7 +424,7 @@ def build_commands(file, new_dir, movie_name, bitbucket):
n = 0 n = 0
for lan in core.SLANGUAGES: for lan in core.SLANGUAGES:
try: try:
subs1 = [item for item in sub_streams if item["tags"]["language"] == lan] subs1 = [item for item in sub_streams if item['tags']['language'] == lan]
except Exception: except Exception:
subs1 = [] subs1 = []
if core.BURN and not subs1 and not burnt and os.path.isfile(file): if core.BURN and not subs1 and not burnt and os.path.isfile(file):
@ -436,28 +436,28 @@ def build_commands(file, new_dir, movie_name, bitbucket):
if core.BURN and not burnt and os.path.isfile(input_file): if core.BURN and not burnt and os.path.isfile(input_file):
subloc = 0 subloc = 0
for index in range(len(sub_streams)): for index in range(len(sub_streams)):
if sub_streams[index]["index"] == sub["index"]: if sub_streams[index]['index'] == sub['index']:
subloc = index subloc = index
break break
video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=input_file, loc=subloc)]) video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=input_file, loc=subloc)])
burnt = 1 burnt = 1
if not core.ALLOWSUBS: if not core.ALLOWSUBS:
break break
if sub["codec_name"] in ["dvd_subtitle", "VobSub"] and core.SCODEC == "mov_text": # We can't convert these. if sub['codec_name'] in ['dvd_subtitle', 'VobSub'] and core.SCODEC == 'mov_text': # We can't convert these.
continue continue
map_cmd.extend(['-map', '0:{index}'.format(index=sub["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=sub['index'])])
s_mapped.extend([sub["index"]]) s_mapped.extend([sub['index']])
if core.SINCLUDE: if core.SINCLUDE:
for sub in sub_streams: for sub in sub_streams:
if not core.ALLOWSUBS: if not core.ALLOWSUBS:
break break
if sub["index"] in s_mapped: if sub['index'] in s_mapped:
continue continue
if sub["codec_name"] in ["dvd_subtitle", "VobSub"] and core.SCODEC == "mov_text": # We can't convert these. if sub['codec_name'] in ['dvd_subtitle', 'VobSub'] and core.SCODEC == 'mov_text': # We can't convert these.
continue continue
map_cmd.extend(['-map', '0:{index}'.format(index=sub["index"])]) map_cmd.extend(['-map', '0:{index}'.format(index=sub['index'])])
s_mapped.extend([sub["index"]]) s_mapped.extend([sub['index']])
if core.OUTPUTFASTSTART: if core.OUTPUTFASTSTART:
other_cmd.extend(['-movflags', '+faststart']) other_cmd.extend(['-movflags', '+faststart'])
@ -474,11 +474,11 @@ def build_commands(file, new_dir, movie_name, bitbucket):
if core.SEMBED and os.path.isfile(file): if core.SEMBED and os.path.isfile(file):
for subfile in get_subs(file): for subfile in get_subs(file):
sub_details, result = get_video_details(subfile) sub_details, result = get_video_details(subfile)
if not sub_details or not sub_details.get("streams"): if not sub_details or not sub_details.get('streams'):
continue continue
if core.SCODEC == "mov_text": if core.SCODEC == 'mov_text':
subcode = [stream["codec_name"] for stream in sub_details["streams"]] subcode = [stream['codec_name'] for stream in sub_details['streams']]
if set(subcode).intersection(["dvd_subtitle", "VobSub"]): # We can't convert these. if set(subcode).intersection(['dvd_subtitle', 'VobSub']): # We can't convert these.
continue continue
command.extend(['-i', subfile]) command.extend(['-i', subfile])
lan = os.path.splitext(os.path.splitext(subfile)[0])[1][1:].split('-')[0] lan = os.path.splitext(os.path.splitext(subfile)[0])[1][1:].split('-')[0]
@ -541,34 +541,34 @@ def extract_subs(file, newfile_path, bitbucket):
name = os.path.splitext(os.path.split(newfile_path)[1])[0] name = os.path.splitext(os.path.split(newfile_path)[1])[0]
try: try:
sub_streams = [item for item in video_details["streams"] if sub_streams = [item for item in video_details['streams'] if
item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[ item['codec_type'] == 'subtitle' and item['tags']['language'] in core.SLANGUAGES and item[
"codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"] 'codec_name'] != 'hdmv_pgs_subtitle' and item['codec_name'] != 'pgssub']
except Exception: except Exception:
sub_streams = [item for item in video_details["streams"] if sub_streams = [item for item in video_details['streams'] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ item['codec_type'] == 'subtitle' and item['codec_name'] != 'hdmv_pgs_subtitle' and item[
"codec_name"] != "pgssub"] 'codec_name'] != 'pgssub']
num = len(sub_streams) num = len(sub_streams)
for n in range(num): for n in range(num):
sub = sub_streams[n] sub = sub_streams[n]
idx = sub["index"] idx = sub['index']
lan = sub.get("tags", {}).get("language", "unk") lan = sub.get('tags', {}).get('language', 'unk')
if num == 1: if num == 1:
output_file = os.path.join(subdir, "{0}.srt".format(name)) output_file = os.path.join(subdir, '{0}.srt'.format(name))
if os.path.isfile(output_file): if os.path.isfile(output_file):
output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, n)) output_file = os.path.join(subdir, '{0}.{1}.srt'.format(name, n))
else: else:
output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, lan)) output_file = os.path.join(subdir, '{0}.{1}.srt'.format(name, lan))
if os.path.isfile(output_file): if os.path.isfile(output_file):
output_file = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n)) output_file = os.path.join(subdir, '{0}.{1}.{2}.srt'.format(name, lan, n))
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an',
'-codec:{index}'.format(index=idx), 'srt', output_file] '-codec:{index}'.format(index=idx), 'srt', output_file]
if platform.system() != 'Windows': if platform.system() != 'Windows':
command = core.NICENESS + command command = core.NICENESS + command
logger.info("Extracting {0} subtitle from: {1}".format(lan, file)) logger.info('Extracting {0} subtitle from: {1}'.format(lan, file))
print_cmd(command) print_cmd(command)
result = 1 # set result to failed in case call fails. result = 1 # set result to failed in case call fails.
try: try:
@ -576,16 +576,16 @@ def extract_subs(file, newfile_path, bitbucket):
proc.communicate() proc.communicate()
result = proc.returncode result = proc.returncode
except Exception: except Exception:
logger.error("Extracting subtitle has failed") logger.error('Extracting subtitle has failed')
if result == 0: if result == 0:
try: try:
shutil.copymode(file, output_file) shutil.copymode(file, output_file)
except Exception: except Exception:
pass pass
logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file)) logger.info('Extracting {0} subtitle from {1} has succeeded'.format(lan, file))
else: else:
logger.error("Extracting subtitles has failed") logger.error('Extracting subtitles has failed')
def process_list(it, new_dir, bitbucket): def process_list(it, new_dir, bitbucket):
@ -597,20 +597,20 @@ def process_list(it, new_dir, bitbucket):
for item in it: for item in it:
ext = os.path.splitext(item)[1].lower() ext = os.path.splitext(item)[1].lower()
if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS: if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS:
logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER") logger.debug('Attempting to rip disk image: {0}'.format(item), 'TRANSCODER')
new_list.extend(rip_iso(item, new_dir, bitbucket)) new_list.extend(rip_iso(item, new_dir, bitbucket))
rem_list.append(item) rem_list.append(item)
elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS: elif re.match('.+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', item) and '.vob' not in core.IGNOREEXTENSIONS:
logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER") logger.debug('Found VIDEO_TS image file: {0}'.format(item), 'TRANSCODER')
if not vts_path: if not vts_path:
try: try:
vts_path = re.match("(.+VIDEO_TS)", item).groups()[0] vts_path = re.match('(.+VIDEO_TS)', item).groups()[0]
except Exception: except Exception:
vts_path = os.path.split(item)[0] vts_path = os.path.split(item)[0]
rem_list.append(item) rem_list.append(item)
elif re.match(".+VIDEO_TS.", item) or re.match(".+VTS_[0-9][0-9]_[0-9].", item): elif re.match('.+VIDEO_TS.', item) or re.match('.+VTS_[0-9][0-9]_[0-9].', item):
rem_list.append(item) rem_list.append(item)
elif core.CONCAT and re.match(".+[cC][dD][0-9].", item): elif core.CONCAT and re.match('.+[cC][dD][0-9].', item):
rem_list.append(item) rem_list.append(item)
combine.append(item) combine.append(item)
else: else:
@ -627,11 +627,11 @@ def process_list(it, new_dir, bitbucket):
it.extend(new_list) it.extend(new_list)
for item in rem_list: for item in rem_list:
it.remove(item) it.remove(item)
logger.debug("Successfully extracted .vob file {0} from disk image".format(new_list[0]), "TRANSCODER") logger.debug('Successfully extracted .vob file {0} from disk image'.format(new_list[0]), 'TRANSCODER')
elif new_list and not success: elif new_list and not success:
new_list = [] new_list = []
rem_list = [] rem_list = []
logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER") logger.error('Failed extracting .vob files from disk image. Stopping transcoding.', 'TRANSCODER')
return it, rem_list, new_list, success return it, rem_list, new_list, success
@ -640,17 +640,17 @@ def rip_iso(item, new_dir, bitbucket):
failure_dir = 'failure' failure_dir = 'failure'
# Mount the ISO in your OS and call combineVTS. # Mount the ISO in your OS and call combineVTS.
if not core.SEVENZIP: if not core.SEVENZIP:
logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER") logger.error('No 7zip installed. Can\'t extract image file {0}'.format(item), 'TRANSCODER')
new_files = [failure_dir] new_files = [failure_dir]
return new_files return new_files
cmd = [core.SEVENZIP, 'l', item] cmd = [core.SEVENZIP, 'l', item]
try: try:
logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER") logger.debug('Attempting to extract .vob from image file {0}'.format(item), 'TRANSCODER')
print_cmd(cmd) print_cmd(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
out, err = proc.communicate() out, err = proc.communicate()
file_list = [re.match(r".+(VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in file_list = [re.match(r'.+(VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])', line).groups()[0] for line in
out.splitlines() if re.match(r".+VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)] out.splitlines() if re.match(r'.+VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', line)]
combined = [] combined = []
for n in range(99): for n in range(99):
concat = [] concat = []
@ -675,10 +675,10 @@ def rip_iso(item, new_dir, bitbucket):
name = os.path.splitext(os.path.split(item)[1])[0] name = os.path.splitext(os.path.split(item)[1])[0]
new_files.append({item: {'name': name, 'files': combined}}) new_files.append({item: {'name': name, 'files': combined}})
if not new_files: if not new_files:
logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER") logger.error('No VIDEO_TS folder found in image file {0}'.format(item), 'TRANSCODER')
new_files = [failure_dir] new_files = [failure_dir]
except Exception: except Exception:
logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER") logger.error('Failed to extract from image file {0}'.format(item), 'TRANSCODER')
new_files = [failure_dir] new_files = [failure_dir]
return new_files return new_files
@ -709,11 +709,11 @@ def combine_vts(vts_path):
def combine_cd(combine): def combine_cd(combine):
new_files = [] new_files = []
for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]): for item in set([re.match('(.+)[cC][dD][0-9].', item).groups()[0] for item in combine]):
concat = '' concat = ''
for n in range(99): for n in range(99):
files = [file for file in combine if files = [file for file in combine if
n + 1 == int(re.match(".+[cC][dD]([0-9]+).", file).groups()[0]) and item in file] n + 1 == int(re.match('.+[cC][dD]([0-9]+).', file).groups()[0]) and item in file]
if files: if files:
concat += '{file}|'.format(file=files[0]) concat += '{file}|'.format(file=files[0])
else: else:
@ -724,16 +724,16 @@ def combine_cd(combine):
def print_cmd(command): def print_cmd(command):
cmd = "" cmd = ''
for item in command: for item in command:
cmd = "{cmd} {item}".format(cmd=cmd, item=item) cmd = '{cmd} {item}'.format(cmd=cmd, item=item)
logger.debug("calling command:{0}".format(cmd)) logger.debug('calling command:{0}'.format(cmd))
def transcode_directory(dir_name): def transcode_directory(dir_name):
if not core.FFMPEG: if not core.FFMPEG:
return 1, dir_name return 1, dir_name
logger.info("Checking for files to be transcoded") logger.info('Checking for files to be transcoded')
final_result = 0 # initialize as successful final_result = 0 # initialize as successful
if core.OUTPUTVIDEOPATH: if core.OUTPUTVIDEOPATH:
new_dir = core.OUTPUTVIDEOPATH new_dir = core.OUTPUTVIDEOPATH
@ -768,11 +768,11 @@ def transcode_directory(dir_name):
os.remove(newfile_path) os.remove(newfile_path)
except OSError as e: except OSError as e:
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
logger.debug("Error when removing transcoding target: {0}".format(e)) logger.debug('Error when removing transcoding target: {0}'.format(e))
except Exception as e: except Exception as e:
logger.debug("Error when removing transcoding target: {0}".format(e)) logger.debug('Error when removing transcoding target: {0}'.format(e))
logger.info("Transcoding video: {0}".format(newfile_path)) logger.info('Transcoding video: {0}'.format(newfile_path))
print_cmd(command) print_cmd(command)
result = 1 # set result to failed in case call fails. result = 1 # set result to failed in case call fails.
try: try:
@ -789,7 +789,7 @@ def transcode_directory(dir_name):
proc.communicate() proc.communicate()
result = proc.returncode result = proc.returncode
except Exception: except Exception:
logger.error("Transcoding of video {0} has failed".format(newfile_path)) logger.error('Transcoding of video {0} has failed'.format(newfile_path))
if core.SUBSDIR and result == 0 and isinstance(file, string_types): if core.SUBSDIR and result == 0 and isinstance(file, string_types):
for sub in get_subs(file): for sub in get_subs(file):
@ -805,14 +805,14 @@ def transcode_directory(dir_name):
shutil.copymode(file, newfile_path) shutil.copymode(file, newfile_path)
except Exception: except Exception:
pass pass
logger.info("Transcoding of video to {0} succeeded".format(newfile_path)) logger.info('Transcoding of video to {0} succeeded'.format(newfile_path))
if os.path.isfile(newfile_path) and (file in new_list or not core.DUPLICATE): if os.path.isfile(newfile_path) and (file in new_list or not core.DUPLICATE):
try: try:
os.unlink(file) os.unlink(file)
except Exception: except Exception:
pass pass
else: else:
logger.error("Transcoding of video to {0} failed with result {1}".format(newfile_path, result)) logger.error('Transcoding of video to {0} failed with result {1}'.format(newfile_path, result))
# this will be 0 (successful) it all are successful, else will return a positive integer for failure. # this will be 0 (successful) it all are successful, else will return a positive integer for failure.
final_result = final_result + result final_result = final_result + result
if final_result == 0 and not core.DUPLICATE: if final_result == 0 and not core.DUPLICATE:

View file

@ -12,38 +12,38 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
final_result = 0 # start at 0. final_result = 0 # start at 0.
num_files = 0 num_files = 0
try: try:
core.USER_SCRIPT_MEDIAEXTENSIONS = settings["user_script_mediaExtensions"].lower() core.USER_SCRIPT_MEDIAEXTENSIONS = settings['user_script_mediaExtensions'].lower()
if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str): if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str):
core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',') core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',')
except Exception: except Exception:
core.USER_SCRIPT_MEDIAEXTENSIONS = [] core.USER_SCRIPT_MEDIAEXTENSIONS = []
core.USER_SCRIPT = settings.get("user_script_path") core.USER_SCRIPT = settings.get('user_script_path')
if not core.USER_SCRIPT or core.USER_SCRIPT == "None": # do nothing and return success. if not core.USER_SCRIPT or core.USER_SCRIPT == 'None': # do nothing and return success.
return [0, ""] return [0, '']
try: try:
core.USER_SCRIPT_PARAM = settings["user_script_param"] core.USER_SCRIPT_PARAM = settings['user_script_param']
if isinstance(core.USER_SCRIPT_PARAM, str): if isinstance(core.USER_SCRIPT_PARAM, str):
core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',') core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',')
except Exception: except Exception:
core.USER_SCRIPT_PARAM = [] core.USER_SCRIPT_PARAM = []
try: try:
core.USER_SCRIPT_SUCCESSCODES = settings["user_script_successCodes"] core.USER_SCRIPT_SUCCESSCODES = settings['user_script_successCodes']
if isinstance(core.USER_SCRIPT_SUCCESSCODES, str): if isinstance(core.USER_SCRIPT_SUCCESSCODES, str):
core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',') core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',')
except Exception: except Exception:
core.USER_SCRIPT_SUCCESSCODES = 0 core.USER_SCRIPT_SUCCESSCODES = 0
core.USER_SCRIPT_CLEAN = int(settings.get("user_script_clean", 1)) core.USER_SCRIPT_CLEAN = int(settings.get('user_script_clean', 1))
core.USER_SCRIPT_RUNONCE = int(settings.get("user_script_runOnce", 1)) core.USER_SCRIPT_RUNONCE = int(settings.get('user_script_runOnce', 1))
if core.CHECK_MEDIA: if core.CHECK_MEDIA:
for video in list_media_files(output_destination, media=True, audio=False, meta=False, archives=False): for video in list_media_files(output_destination, media=True, audio=False, meta=False, archives=False):
if transcoder.is_video_good(video, 0): if transcoder.is_video_good(video, 0):
import_subs(video) import_subs(video)
else: else:
logger.info("Corrupt video file found {0}. Deleting.".format(video), "USERSCRIPT") logger.info('Corrupt video file found {0}. Deleting.'.format(video), 'USERSCRIPT')
os.unlink(video) os.unlink(video)
for dirpath, dirnames, filenames in os.walk(output_destination): for dirpath, dirnames, filenames in os.walk(output_destination):
@ -52,25 +52,25 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
file_path = core.os.path.join(dirpath, file) file_path = core.os.path.join(dirpath, file)
file_name, file_extension = os.path.splitext(file) file_name, file_extension = os.path.splitext(file)
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS: if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or 'all' in core.USER_SCRIPT_MEDIAEXTENSIONS:
num_files += 1 num_files += 1
if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files. if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files.
continue continue
command = [core.USER_SCRIPT] command = [core.USER_SCRIPT]
for param in core.USER_SCRIPT_PARAM: for param in core.USER_SCRIPT_PARAM:
if param == "FN": if param == 'FN':
command.append('{0}'.format(file)) command.append('{0}'.format(file))
continue continue
elif param == "FP": elif param == 'FP':
command.append('{0}'.format(file_path)) command.append('{0}'.format(file_path))
continue continue
elif param == "TN": elif param == 'TN':
command.append('{0}'.format(torrent_name)) command.append('{0}'.format(torrent_name))
continue continue
elif param == "TL": elif param == 'TL':
command.append('{0}'.format(torrent_label)) command.append('{0}'.format(torrent_label))
continue continue
elif param == "DN": elif param == 'DN':
if core.USER_SCRIPT_RUNONCE == 1: if core.USER_SCRIPT_RUNONCE == 1:
command.append('{0}'.format(output_destination)) command.append('{0}'.format(output_destination))
else: else:
@ -79,24 +79,24 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
else: else:
command.append(param) command.append(param)
continue continue
cmd = "" cmd = ''
for item in command: for item in command:
cmd = "{cmd} {item}".format(cmd=cmd, item=item) cmd = '{cmd} {item}'.format(cmd=cmd, item=item)
logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=file_path), "USERSCRIPT") logger.info('Running script {cmd} on file {path}.'.format(cmd=cmd, path=file_path), 'USERSCRIPT')
try: try:
p = Popen(command) p = Popen(command)
res = p.wait() res = p.wait()
if str(res) in core.USER_SCRIPT_SUCCESSCODES: # Linux returns 0 for successful. if str(res) in core.USER_SCRIPT_SUCCESSCODES: # Linux returns 0 for successful.
logger.info("UserScript {0} was successfull".format(command[0])) logger.info('UserScript {0} was successfull'.format(command[0]))
result = 0 result = 0
else: else:
logger.error("UserScript {0} has failed with return code: {1}".format(command[0], res), "USERSCRIPT") logger.error('UserScript {0} has failed with return code: {1}'.format(command[0], res), 'USERSCRIPT')
logger.info( logger.info(
"If the UserScript completed successfully you should add {0} to the user_script_successCodes".format( 'If the UserScript completed successfully you should add {0} to the user_script_successCodes'.format(
res), "USERSCRIPT") res), 'USERSCRIPT')
result = int(1) result = int(1)
except Exception: except Exception:
logger.error("UserScript {0} has failed".format(command[0]), "USERSCRIPT") logger.error('UserScript {0} has failed'.format(command[0]), 'USERSCRIPT')
result = int(1) result = int(1)
final_result += result final_result += result
@ -105,13 +105,13 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
for file in filenames: for file in filenames:
file_name, file_extension = os.path.splitext(file) file_name, file_extension = os.path.splitext(file)
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL": if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == 'ALL':
num_files_new += 1 num_files_new += 1
if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0: if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0:
logger.info("All files have been processed. Cleaning outputDirectory {0}".format(output_destination)) logger.info('All files have been processed. Cleaning outputDirectory {0}'.format(output_destination))
remove_dir(output_destination) remove_dir(output_destination)
elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0: elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0:
logger.info("{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.".format( logger.info('{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.'.format(
num_files, num_files_new)) num_files, num_files_new))
return [final_result, ''] return [final_result, '']

View file

@ -56,7 +56,7 @@ shutil.copyfileobj = copyfileobj_fast
def report_nzb(failure_link, client_agent): def report_nzb(failure_link, client_agent):
# Contact indexer site # Contact indexer site
logger.info("Sending failure notification to indexer site") logger.info('Sending failure notification to indexer site')
if client_agent == 'nzbget': if client_agent == 'nzbget':
headers = {'User-Agent': 'NZBGet / nzbToMedia.py'} headers = {'User-Agent': 'NZBGet / nzbToMedia.py'}
elif client_agent == 'sabnzbd': elif client_agent == 'sabnzbd':
@ -66,7 +66,7 @@ def report_nzb(failure_link, client_agent):
try: try:
requests.post(failure_link, headers=headers, timeout=(30, 300)) requests.post(failure_link, headers=headers, timeout=(30, 300))
except Exception as e: except Exception as e:
logger.error("Unable to open URL {0} due to {1}".format(failure_link, e)) logger.error('Unable to open URL {0} due to {1}'.format(failure_link, e))
return return
@ -83,8 +83,8 @@ def sanitize_name(name):
""" """
# remove bad chars from the filename # remove bad chars from the filename
name = re.sub(r'[\\\/*]', '-', name) name = re.sub(r'[\\/*]', '-', name)
name = re.sub(r'[:"<>|?]', '', name) name = re.sub(r'[:\'<>|?]', '', name)
# remove leading/trailing periods and spaces # remove leading/trailing periods and spaces
name = name.strip(' .') name = name.strip(' .')
@ -110,15 +110,15 @@ def remote_dir(path):
return path return path
for local, remote in core.REMOTEPATHS: for local, remote in core.REMOTEPATHS:
if local in path: if local in path:
base_dirs = path.replace(local, "").split(os.sep) base_dirs = path.replace(local, '').split(os.sep)
if '/' in remote: if '/' in remote:
remote_sep = '/' remote_sep = '/'
else: else:
remote_sep = '\\' remote_sep = '\\'
new_path = remote_sep.join([remote] + base_dirs) new_path = remote_sep.join([remote] + base_dirs)
new_path = re.sub(r'(\S)(\\+)', r'\1\\', new_path) new_path = re.sub(r'(\S)(\\+)', r'\1\\', new_path)
new_path = re.sub(r'(\/+)', r'/', new_path) new_path = re.sub(r'(/+)', r'/', new_path)
new_path = re.sub(r'([\/\\])$', r'', new_path) new_path = re.sub(r'([/\\])$', r'', new_path)
return new_path return new_path
return path return path
@ -141,16 +141,16 @@ def category_search(input_directory, input_name, input_category, root, categorie
pathlist = os.path.normpath(input_directory).split(os.sep) pathlist = os.path.normpath(input_directory).split(os.sep)
if input_category and input_category in pathlist: if input_category and input_category in pathlist:
logger.debug("SEARCH: Found the Category: {0} in directory structure".format(input_category)) logger.debug('SEARCH: Found the Category: {0} in directory structure'.format(input_category))
elif input_category: elif input_category:
logger.debug("SEARCH: Could not find the category: {0} in the directory structure".format(input_category)) logger.debug('SEARCH: Could not find the category: {0} in the directory structure'.format(input_category))
else: else:
try: try:
input_category = list(set(pathlist) & set(categories))[-1] # assume last match is most relevant category. input_category = list(set(pathlist) & set(categories))[-1] # assume last match is most relevant category.
logger.debug("SEARCH: Found Category: {0} in directory structure".format(input_category)) logger.debug('SEARCH: Found Category: {0} in directory structure'.format(input_category))
except IndexError: except IndexError:
input_category = "" input_category = ''
logger.debug("SEARCH: Could not find a category in the directory structure") logger.debug('SEARCH: Could not find a category in the directory structure')
if not os.path.isdir(input_directory) and os.path.isfile(input_directory): # If the input directory is a file if not os.path.isdir(input_directory) and os.path.isfile(input_directory): # If the input directory is a file
if not input_name: if not input_name:
input_name = os.path.split(os.path.normpath(input_directory))[1] input_name = os.path.split(os.path.normpath(input_directory))[1]
@ -158,30 +158,30 @@ def category_search(input_directory, input_name, input_category, root, categorie
if input_category and os.path.isdir(os.path.join(input_directory, input_category)): if input_category and os.path.isdir(os.path.join(input_directory, input_category)):
logger.info( logger.info(
"SEARCH: Found category directory {0} in input directory directory {1}".format(input_category, input_directory)) 'SEARCH: Found category directory {0} in input directory directory {1}'.format(input_category, input_directory))
input_directory = os.path.join(input_directory, input_category) input_directory = os.path.join(input_directory, input_category)
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory))
if input_name and os.path.isdir(os.path.join(input_directory, input_name)): if input_name and os.path.isdir(os.path.join(input_directory, input_name)):
logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format(input_name, input_directory)) logger.info('SEARCH: Found torrent directory {0} in input directory directory {1}'.format(input_name, input_directory))
input_directory = os.path.join(input_directory, input_name) input_directory = os.path.join(input_directory, input_name)
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory))
tordir = True tordir = True
elif input_name and os.path.isdir(os.path.join(input_directory, sanitize_name(input_name))): elif input_name and os.path.isdir(os.path.join(input_directory, sanitize_name(input_name))):
logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format( logger.info('SEARCH: Found torrent directory {0} in input directory directory {1}'.format(
sanitize_name(input_name), input_directory)) sanitize_name(input_name), input_directory))
input_directory = os.path.join(input_directory, sanitize_name(input_name)) input_directory = os.path.join(input_directory, sanitize_name(input_name))
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory))
tordir = True tordir = True
elif input_name and os.path.isfile(os.path.join(input_directory, input_name)): elif input_name and os.path.isfile(os.path.join(input_directory, input_name)):
logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format(input_name, input_directory)) logger.info('SEARCH: Found torrent file {0} in input directory directory {1}'.format(input_name, input_directory))
input_directory = os.path.join(input_directory, input_name) input_directory = os.path.join(input_directory, input_name)
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory))
tordir = True tordir = True
elif input_name and os.path.isfile(os.path.join(input_directory, sanitize_name(input_name))): elif input_name and os.path.isfile(os.path.join(input_directory, sanitize_name(input_name))):
logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format( logger.info('SEARCH: Found torrent file {0} in input directory directory {1}'.format(
sanitize_name(input_name), input_directory)) sanitize_name(input_name), input_directory))
input_directory = os.path.join(input_directory, sanitize_name(input_name)) input_directory = os.path.join(input_directory, sanitize_name(input_name))
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory))
tordir = True tordir = True
imdbid = [item for item in pathlist if '.cp(tt' in item] # This looks for the .cp(tt imdb id in the path. imdbid = [item for item in pathlist if '.cp(tt' in item] # This looks for the .cp(tt imdb id in the path.
@ -194,7 +194,7 @@ def category_search(input_directory, input_name, input_category, root, categorie
index = pathlist.index(input_category) index = pathlist.index(input_category)
if index + 1 < len(pathlist): if index + 1 < len(pathlist):
tordir = True tordir = True
logger.info("SEARCH: Found a unique directory {0} in the category directory".format logger.info('SEARCH: Found a unique directory {0} in the category directory'.format
(pathlist[index + 1])) (pathlist[index + 1]))
if not input_name: if not input_name:
input_name = pathlist[index + 1] input_name = pathlist[index + 1]
@ -203,7 +203,7 @@ def category_search(input_directory, input_name, input_category, root, categorie
if input_name and not tordir: if input_name and not tordir:
if input_name in pathlist or sanitize_name(input_name) in pathlist: if input_name in pathlist or sanitize_name(input_name) in pathlist:
logger.info("SEARCH: Found torrent directory {0} in the directory structure".format(input_name)) logger.info('SEARCH: Found torrent directory {0} in the directory structure'.format(input_name))
tordir = True tordir = True
else: else:
root = 1 root = 1
@ -211,8 +211,8 @@ def category_search(input_directory, input_name, input_category, root, categorie
root = 2 root = 2
if root > 0: if root > 0:
logger.info("SEARCH: Could not find a unique directory for this download. Assume a common directory.") logger.info('SEARCH: Could not find a unique directory for this download. Assume a common directory.')
logger.info("SEARCH: We will try and determine which files to process, individually") logger.info('SEARCH: We will try and determine which files to process, individually')
return input_directory, input_name, input_category, root return input_directory, input_name, input_category, root
@ -234,7 +234,7 @@ def is_min_size(input_name, min_size):
try: try:
input_size = get_dir_size(os.path.dirname(input_name)) input_size = get_dir_size(os.path.dirname(input_name))
except Exception: except Exception:
logger.error("Failed to get file size for {0}".format(input_name), 'MINSIZE') logger.error('Failed to get file size for {0}'.format(input_name), 'MINSIZE')
return True return True
# Ignore files under a certain size # Ignore files under a certain size
@ -249,51 +249,51 @@ def is_sample(input_name):
def copy_link(src, target_link, use_link): def copy_link(src, target_link, use_link):
logger.info("MEDIAFILE: [{0}]".format(os.path.basename(target_link)), 'COPYLINK') logger.info('MEDIAFILE: [{0}]'.format(os.path.basename(target_link)), 'COPYLINK')
logger.info("SOURCE FOLDER: [{0}]".format(os.path.dirname(src)), 'COPYLINK') logger.info('SOURCE FOLDER: [{0}]'.format(os.path.dirname(src)), 'COPYLINK')
logger.info("TARGET FOLDER: [{0}]".format(os.path.dirname(target_link)), 'COPYLINK') logger.info('TARGET FOLDER: [{0}]'.format(os.path.dirname(target_link)), 'COPYLINK')
if src != target_link and os.path.exists(target_link): if src != target_link and os.path.exists(target_link):
logger.info("MEDIAFILE already exists in the TARGET folder, skipping ...", 'COPYLINK') logger.info('MEDIAFILE already exists in the TARGET folder, skipping ...', 'COPYLINK')
return True return True
elif src == target_link and os.path.isfile(target_link) and os.path.isfile(src): elif src == target_link and os.path.isfile(target_link) and os.path.isfile(src):
logger.info("SOURCE AND TARGET files are the same, skipping ...", 'COPYLINK') logger.info('SOURCE AND TARGET files are the same, skipping ...', 'COPYLINK')
return True return True
elif src == os.path.dirname(target_link): elif src == os.path.dirname(target_link):
logger.info("SOURCE AND TARGET folders are the same, skipping ...", 'COPYLINK') logger.info('SOURCE AND TARGET folders are the same, skipping ...', 'COPYLINK')
return True return True
make_dir(os.path.dirname(target_link)) make_dir(os.path.dirname(target_link))
try: try:
if use_link == 'dir': if use_link == 'dir':
logger.info("Directory linking SOURCE FOLDER -> TARGET FOLDER", 'COPYLINK') logger.info('Directory linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK')
linktastic.dirlink(src, target_link) linktastic.dirlink(src, target_link)
return True return True
if use_link == 'junction': if use_link == 'junction':
logger.info("Directory junction linking SOURCE FOLDER -> TARGET FOLDER", 'COPYLINK') logger.info('Directory junction linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK')
linktastic.dirlink(src, target_link) linktastic.dirlink(src, target_link)
return True return True
elif use_link == "hard": elif use_link == 'hard':
logger.info("Hard linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') logger.info('Hard linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
linktastic.link(src, target_link) linktastic.link(src, target_link)
return True return True
elif use_link == "sym": elif use_link == 'sym':
logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
linktastic.symlink(src, target_link) linktastic.symlink(src, target_link)
return True return True
elif use_link == "move-sym": elif use_link == 'move-sym':
logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
shutil.move(src, target_link) shutil.move(src, target_link)
linktastic.symlink(target_link, src) linktastic.symlink(target_link, src)
return True return True
elif use_link == "move": elif use_link == 'move':
logger.info("Moving SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') logger.info('Moving SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
shutil.move(src, target_link) shutil.move(src, target_link)
return True return True
except Exception as e: except Exception as e:
logger.warning("Error: {0}, copying instead ... ".format(e), 'COPYLINK') logger.warning('Error: {0}, copying instead ... '.format(e), 'COPYLINK')
logger.info("Copying SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') logger.info('Copying SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
shutil.copy(src, target_link) shutil.copy(src, target_link)
return True return True
@ -317,13 +317,13 @@ def replace_links(link):
target = os.readlink(target) target = os.readlink(target)
n = n + 1 n = n + 1
if n > 1: if n > 1:
logger.info("Changing sym-link: {0} to point directly to file: {1}".format(link, target), 'COPYLINK') logger.info('Changing sym-link: {0} to point directly to file: {1}'.format(link, target), 'COPYLINK')
os.unlink(link) os.unlink(link)
linktastic.symlink(target, link) linktastic.symlink(target, link)
def flatten(output_destination): def flatten(output_destination):
logger.info("FLATTEN: Flattening directory: {0}".format(output_destination)) logger.info('FLATTEN: Flattening directory: {0}'.format(output_destination))
for outputFile in list_media_files(output_destination): for outputFile in list_media_files(output_destination):
dir_path = os.path.dirname(outputFile) dir_path = os.path.dirname(outputFile)
file_name = os.path.basename(outputFile) file_name = os.path.basename(outputFile)
@ -336,7 +336,7 @@ def flatten(output_destination):
try: try:
shutil.move(outputFile, target) shutil.move(outputFile, target)
except Exception: except Exception:
logger.error("Could not flatten {0}".format(outputFile), 'FLATTEN') logger.error('Could not flatten {0}'.format(outputFile), 'FLATTEN')
remove_empty_folders(output_destination) # Cleanup empty directories remove_empty_folders(output_destination) # Cleanup empty directories
@ -347,7 +347,7 @@ def remove_empty_folders(path, remove_root=True):
return return
# remove empty subfolders # remove empty subfolders
logger.debug("Checking for empty folders in:{0}".format(path)) logger.debug('Checking for empty folders in:{0}'.format(path))
files = os.listdir(text_type(path)) files = os.listdir(text_type(path))
if len(files): if len(files):
for f in files: for f in files:
@ -358,7 +358,7 @@ def remove_empty_folders(path, remove_root=True):
# if folder empty, delete it # if folder empty, delete it
files = os.listdir(text_type(path)) files = os.listdir(text_type(path))
if len(files) == 0 and remove_root: if len(files) == 0 and remove_root:
logger.debug("Removing empty folder:{}".format(path)) logger.debug('Removing empty folder:{}'.format(path))
os.rmdir(path) os.rmdir(path)
@ -386,7 +386,7 @@ def wake_on_lan(ethernet_address):
int(addr_byte[4], 16), int(addr_byte[4], 16),
int(addr_byte[5], 16)) int(addr_byte[5], 16))
# Build the Wake-On-LAN "Magic Packet"... # Build the Wake-On-LAN 'Magic Packet'...
msg = b'\xff' * 6 + hw_addr * 16 msg = b'\xff' * 6 + hw_addr * 16
@ -402,28 +402,28 @@ def wake_on_lan(ethernet_address):
def test_connection(host, port): def test_connection(host, port):
try: try:
socket.create_connection((host, port)) socket.create_connection((host, port))
return "Up" return 'Up'
except Exception: except Exception:
return "Down" return 'Down'
def wake_up(): def wake_up():
host = core.CFG["WakeOnLan"]["host"] host = core.CFG['WakeOnLan']['host']
port = int(core.CFG["WakeOnLan"]["port"]) port = int(core.CFG['WakeOnLan']['port'])
mac = core.CFG["WakeOnLan"]["mac"] mac = core.CFG['WakeOnLan']['mac']
i = 1 i = 1
while test_connection(host, port) == "Down" and i < 4: while test_connection(host, port) == 'Down' and i < 4:
logger.info(("Sending WakeOnLan Magic Packet for mac: {0}".format(mac))) logger.info(('Sending WakeOnLan Magic Packet for mac: {0}'.format(mac)))
wake_on_lan(mac) wake_on_lan(mac)
time.sleep(20) time.sleep(20)
i = i + 1 i = i + 1
if test_connection(host, port) == "Down": # final check. if test_connection(host, port) == 'Down': # final check.
logger.warning("System with mac: {0} has not woken after 3 attempts. " logger.warning('System with mac: {0} has not woken after 3 attempts. '
"Continuing with the rest of the script.".format(mac)) 'Continuing with the rest of the script.'.format(mac))
else: else:
logger.info("System with mac: {0} has been woken. Continuing with the rest of the script.".format(mac)) logger.info('System with mac: {0} has been woken. Continuing with the rest of the script.'.format(mac))
def char_replace(name): def char_replace(name):
@ -470,36 +470,36 @@ def char_replace(name):
def convert_to_ascii(input_name, dir_name): def convert_to_ascii(input_name, dir_name):
ascii_convert = int(core.CFG["ASCII"]["convert"]) ascii_convert = int(core.CFG['ASCII']['convert'])
if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and "\" is replaced!. if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and '\' is replaced!.
return input_name, dir_name return input_name, dir_name
encoded, input_name = char_replace(input_name) encoded, input_name = char_replace(input_name)
directory, base = os.path.split(dir_name) directory, base = os.path.split(dir_name)
if not base: # ended with "/" if not base: # ended with '/'
directory, base = os.path.split(directory) directory, base = os.path.split(directory)
encoded, base2 = char_replace(base) encoded, base2 = char_replace(base)
if encoded: if encoded:
dir_name = os.path.join(directory, base2) dir_name = os.path.join(directory, base2)
logger.info("Renaming directory to: {0}.".format(base2), 'ENCODER') logger.info('Renaming directory to: {0}.'.format(base2), 'ENCODER')
os.rename(os.path.join(directory, base), dir_name) os.rename(os.path.join(directory, base), dir_name)
if 'NZBOP_SCRIPTDIR' in os.environ: if 'NZBOP_SCRIPTDIR' in os.environ:
print("[NZB] DIRECTORY={0}".format(dir_name)) print('[NZB] DIRECTORY={0}'.format(dir_name))
for dirname, dirnames, filenames in os.walk(dir_name, topdown=False): for dirname, dirnames, filenames in os.walk(dir_name, topdown=False):
for subdirname in dirnames: for subdirname in dirnames:
encoded, subdirname2 = char_replace(subdirname) encoded, subdirname2 = char_replace(subdirname)
if encoded: if encoded:
logger.info("Renaming directory to: {0}.".format(subdirname2), 'ENCODER') logger.info('Renaming directory to: {0}.'.format(subdirname2), 'ENCODER')
os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2)) os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2))
for dirname, dirnames, filenames in os.walk(dir_name): for dirname, dirnames, filenames in os.walk(dir_name):
for filename in filenames: for filename in filenames:
encoded, filename2 = char_replace(filename) encoded, filename2 = char_replace(filename)
if encoded: if encoded:
logger.info("Renaming file to: {0}.".format(filename2), 'ENCODER') logger.info('Renaming file to: {0}.'.format(filename2), 'ENCODER')
os.rename(os.path.join(dirname, filename), os.path.join(dirname, filename2)) os.rename(os.path.join(dirname, filename), os.path.join(dirname, filename2))
return input_name, dir_name return input_name, dir_name
@ -511,7 +511,7 @@ def parse_other(args):
def parse_rtorrent(args): def parse_rtorrent(args):
# rtorrent usage: system.method.set_key = event.download.finished,TorrentToMedia, # rtorrent usage: system.method.set_key = event.download.finished,TorrentToMedia,
# "execute={/path/to/nzbToMedia/TorrentToMedia.py,\"$d.get_base_path=\",\"$d.get_name=\",\"$d.get_custom1=\",\"$d.get_hash=\"}" # 'execute={/path/to/nzbToMedia/TorrentToMedia.py,\'$d.get_base_path=\',\'$d.get_name=\',\'$d.get_custom1=\',\'$d.get_hash=\'}'
input_directory = os.path.normpath(args[1]) input_directory = os.path.normpath(args[1])
try: try:
input_name = args[2] input_name = args[2]
@ -534,7 +534,7 @@ def parse_rtorrent(args):
def parse_utorrent(args): def parse_utorrent(args):
# uTorrent usage: call TorrentToMedia.py "%D" "%N" "%L" "%I" # uTorrent usage: call TorrentToMedia.py '%D' '%N' '%L' '%I'
input_directory = os.path.normpath(args[1]) input_directory = os.path.normpath(args[1])
input_name = args[2] input_name = args[2]
try: try:
@ -577,7 +577,7 @@ def parse_transmission(args):
def parse_vuze(args): def parse_vuze(args):
# vuze usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py "%D%N%L%I%K%F" # vuze usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py '%D%N%L%I%K%F'
try: try:
cur_input = args[1].split(',') cur_input = args[1].split(',')
except Exception: except Exception:
@ -612,29 +612,29 @@ def parse_vuze(args):
def parse_qbittorrent(args): def parse_qbittorrent(args):
# qbittorrent usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py "%D|%N|%L|%I" # qbittorrent usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py '%D|%N|%L|%I'
try: try:
cur_input = args[1].split('|') cur_input = args[1].split('|')
except Exception: except Exception:
cur_input = [] cur_input = []
try: try:
input_directory = os.path.normpath(cur_input[0].replace('"', '')) input_directory = os.path.normpath(cur_input[0].replace('\'', ''))
except Exception: except Exception:
input_directory = '' input_directory = ''
try: try:
input_name = cur_input[1].replace('"', '') input_name = cur_input[1].replace('\'', '')
except Exception: except Exception:
input_name = '' input_name = ''
try: try:
input_category = cur_input[2].replace('"', '') input_category = cur_input[2].replace('\'', '')
except Exception: except Exception:
input_category = '' input_category = ''
try: try:
input_hash = cur_input[3].replace('"', '') input_hash = cur_input[3].replace('\'', '')
except Exception: except Exception:
input_hash = '' input_hash = ''
try: try:
input_id = cur_input[3].replace('"', '') input_id = cur_input[3].replace('\'', '')
except Exception: except Exception:
input_id = '' input_id = ''
@ -664,7 +664,7 @@ def get_dirs(section, subsection, link='hard'):
def process_dir(path): def process_dir(path):
folders = [] folders = []
logger.info("Searching {0} for mediafiles to post-process ...".format(path)) logger.info('Searching {0} for mediafiles to post-process ...'.format(path))
sync = [o for o in os.listdir(text_type(path)) if os.path.splitext(o)[1] in ['.!sync', '.bts']] sync = [o for o in os.listdir(text_type(path)) if os.path.splitext(o)[1] in ['.!sync', '.bts']]
# search for single files and move them into their own folder for post-processing # search for single files and move them into their own folder for post-processing
for mediafile in [os.path.join(path, o) for o in os.listdir(text_type(path)) if for mediafile in [os.path.join(path, o) for o in os.listdir(text_type(path)) if
@ -674,7 +674,7 @@ def get_dirs(section, subsection, link='hard'):
if os.path.split(mediafile)[1] in ['Thumbs.db', 'thumbs.db']: if os.path.split(mediafile)[1] in ['Thumbs.db', 'thumbs.db']:
continue continue
try: try:
logger.debug("Found file {0} in root directory {1}.".format(os.path.split(mediafile)[1], path)) logger.debug('Found file {0} in root directory {1}.'.format(os.path.split(mediafile)[1], path))
new_path = None new_path = None
file_ext = os.path.splitext(mediafile)[1] file_ext = os.path.splitext(mediafile)[1]
try: try:
@ -686,7 +686,7 @@ def get_dirs(section, subsection, link='hard'):
album = f.album album = f.album
# create new path # create new path
new_path = os.path.join(path, "{0} - {1}".format(sanitize_name(artist), sanitize_name(album))) new_path = os.path.join(path, '{0} - {1}'.format(sanitize_name(artist), sanitize_name(album)))
elif file_ext in core.MEDIACONTAINER: elif file_ext in core.MEDIACONTAINER:
f = guessit.guessit(mediafile) f = guessit.guessit(mediafile)
@ -698,7 +698,7 @@ def get_dirs(section, subsection, link='hard'):
new_path = os.path.join(path, sanitize_name(title)) new_path = os.path.join(path, sanitize_name(title))
except Exception as e: except Exception as e:
logger.error("Exception parsing name for media file: {0}: {1}".format(os.path.split(mediafile)[1], e)) logger.error('Exception parsing name for media file: {0}: {1}'.format(os.path.split(mediafile)[1], e))
if not new_path: if not new_path:
title = os.path.splitext(os.path.basename(mediafile))[0] title = os.path.splitext(os.path.basename(mediafile))[0]
@ -727,7 +727,7 @@ def get_dirs(section, subsection, link='hard'):
# link file to its new path # link file to its new path
copy_link(mediafile, newfile, link) copy_link(mediafile, newfile, link)
except Exception as e: except Exception as e:
logger.error("Failed to move {0} to its own directory: {1}".format(os.path.split(mediafile)[1], e)) logger.error('Failed to move {0} to its own directory: {1}'.format(os.path.split(mediafile)[1], e))
# removeEmptyFolders(path, removeRoot=False) # removeEmptyFolders(path, removeRoot=False)
@ -741,14 +741,14 @@ def get_dirs(section, subsection, link='hard'):
return folders return folders
try: try:
watch_dir = os.path.join(core.CFG[section][subsection]["watch_dir"], subsection) watch_dir = os.path.join(core.CFG[section][subsection]['watch_dir'], subsection)
if os.path.exists(watch_dir): if os.path.exists(watch_dir):
to_return.extend(process_dir(watch_dir)) to_return.extend(process_dir(watch_dir))
elif os.path.exists(core.CFG[section][subsection]["watch_dir"]): elif os.path.exists(core.CFG[section][subsection]['watch_dir']):
to_return.extend(process_dir(core.CFG[section][subsection]["watch_dir"])) to_return.extend(process_dir(core.CFG[section][subsection]['watch_dir']))
except Exception as e: except Exception as e:
logger.error("Failed to add directories from {0} for post-processing: {1}".format logger.error('Failed to add directories from {0} for post-processing: {1}'.format
(core.CFG[section][subsection]["watch_dir"], e)) (core.CFG[section][subsection]['watch_dir'], e))
if core.USELINK == 'move': if core.USELINK == 'move':
try: try:
@ -756,10 +756,10 @@ def get_dirs(section, subsection, link='hard'):
if os.path.exists(output_directory): if os.path.exists(output_directory):
to_return.extend(process_dir(output_directory)) to_return.extend(process_dir(output_directory))
except Exception as e: except Exception as e:
logger.error("Failed to add directories from {0} for post-processing: {1}".format(core.OUTPUTDIRECTORY, e)) logger.error('Failed to add directories from {0} for post-processing: {1}'.format(core.OUTPUTDIRECTORY, e))
if not to_return: if not to_return:
logger.debug("No directories identified in {0}:{1} for post-processing".format(section, subsection)) logger.debug('No directories identified in {0}:{1} for post-processing'.format(section, subsection))
return list(set(to_return)) return list(set(to_return))
@ -784,11 +784,11 @@ def onerror(func, path, exc_info):
def remove_dir(dir_name): def remove_dir(dir_name):
logger.info("Deleting {0}".format(dir_name)) logger.info('Deleting {0}'.format(dir_name))
try: try:
shutil.rmtree(text_type(dir_name), onerror=onerror) shutil.rmtree(text_type(dir_name), onerror=onerror)
except Exception: except Exception:
logger.error("Unable to delete folder {0}".format(dir_name)) logger.error('Unable to delete folder {0}'.format(dir_name))
def clean_dir(path, section, subsection): def clean_dir(path, section, subsection):
@ -808,15 +808,15 @@ def clean_dir(path, section, subsection):
num_files = 'unknown' num_files = 'unknown'
if num_files > 0: if num_files > 0:
logger.info( logger.info(
"Directory {0} still contains {1} unprocessed file(s), skipping ...".format(path, num_files), 'Directory {0} still contains {1} unprocessed file(s), skipping ...'.format(path, num_files),
'CLEANDIRS') 'CLEANDIRS')
return return
logger.info("Directory {0} has been processed, removing ...".format(path), 'CLEANDIRS') logger.info('Directory {0} has been processed, removing ...'.format(path), 'CLEANDIRS')
try: try:
shutil.rmtree(path, onerror=onerror) shutil.rmtree(path, onerror=onerror)
except Exception: except Exception:
logger.error("Unable to delete directory {0}".format(path)) logger.error('Unable to delete directory {0}'.format(path))
def create_torrent_class(client_agent): def create_torrent_class(client_agent):
@ -825,97 +825,97 @@ def create_torrent_class(client_agent):
if client_agent == 'utorrent': if client_agent == 'utorrent':
try: try:
logger.debug("Connecting to {0}: {1}".format(client_agent, core.UTORRENTWEBUI)) logger.debug('Connecting to {0}: {1}'.format(client_agent, core.UTORRENTWEBUI))
tc = UTorrentClient(core.UTORRENTWEBUI, core.UTORRENTUSR, core.UTORRENTPWD) tc = UTorrentClient(core.UTORRENTWEBUI, core.UTORRENTUSR, core.UTORRENTPWD)
except Exception: except Exception:
logger.error("Failed to connect to uTorrent") logger.error('Failed to connect to uTorrent')
if client_agent == 'transmission': if client_agent == 'transmission':
try: try:
logger.debug("Connecting to {0}: http://{1}:{2}".format( logger.debug('Connecting to {0}: http://{1}:{2}'.format(
client_agent, core.TRANSMISSIONHOST, core.TRANSMISSIONPORT)) client_agent, core.TRANSMISSIONHOST, core.TRANSMISSIONPORT))
tc = TransmissionClient(core.TRANSMISSIONHOST, core.TRANSMISSIONPORT, tc = TransmissionClient(core.TRANSMISSIONHOST, core.TRANSMISSIONPORT,
core.TRANSMISSIONUSR, core.TRANSMISSIONUSR,
core.TRANSMISSIONPWD) core.TRANSMISSIONPWD)
except Exception: except Exception:
logger.error("Failed to connect to Transmission") logger.error('Failed to connect to Transmission')
if client_agent == 'deluge': if client_agent == 'deluge':
try: try:
logger.debug("Connecting to {0}: http://{1}:{2}".format(client_agent, core.DELUGEHOST, core.DELUGEPORT)) logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.DELUGEHOST, core.DELUGEPORT))
tc = DelugeClient() tc = DelugeClient()
tc.connect(host=core.DELUGEHOST, port=core.DELUGEPORT, username=core.DELUGEUSR, tc.connect(host=core.DELUGEHOST, port=core.DELUGEPORT, username=core.DELUGEUSR,
password=core.DELUGEPWD) password=core.DELUGEPWD)
except Exception: except Exception:
logger.error("Failed to connect to Deluge") logger.error('Failed to connect to Deluge')
if client_agent == 'qbittorrent': if client_agent == 'qbittorrent':
try: try:
logger.debug("Connecting to {0}: http://{1}:{2}".format(client_agent, core.QBITTORRENTHOST, core.QBITTORRENTPORT)) logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.QBITTORRENTHOST, core.QBITTORRENTPORT))
tc = qBittorrentClient("http://{0}:{1}/".format(core.QBITTORRENTHOST, core.QBITTORRENTPORT)) tc = qBittorrentClient('http://{0}:{1}/'.format(core.QBITTORRENTHOST, core.QBITTORRENTPORT))
tc.login(core.QBITTORRENTUSR, core.QBITTORRENTPWD) tc.login(core.QBITTORRENTUSR, core.QBITTORRENTPWD)
except Exception: except Exception:
logger.error("Failed to connect to qBittorrent") logger.error('Failed to connect to qBittorrent')
return tc return tc
def pause_torrent(client_agent, input_hash, input_id, input_name): def pause_torrent(client_agent, input_hash, input_id, input_name):
logger.debug("Stopping torrent {0} in {1} while processing".format(input_name, client_agent)) logger.debug('Stopping torrent {0} in {1} while processing'.format(input_name, client_agent))
try: try:
if client_agent == 'utorrent' and core.TORRENT_CLASS != "": if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.stop(input_hash) core.TORRENT_CLASS.stop(input_hash)
if client_agent == 'transmission' and core.TORRENT_CLASS != "": if client_agent == 'transmission' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.stop_torrent(input_id) core.TORRENT_CLASS.stop_torrent(input_id)
if client_agent == 'deluge' and core.TORRENT_CLASS != "": if client_agent == 'deluge' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.core.pause_torrent([input_id]) core.TORRENT_CLASS.core.pause_torrent([input_id])
if client_agent == 'qbittorrent' and core.TORRENT_CLASS != "": if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.pause(input_hash) core.TORRENT_CLASS.pause(input_hash)
time.sleep(5) time.sleep(5)
except Exception: except Exception:
logger.warning("Failed to stop torrent {0} in {1}".format(input_name, client_agent)) logger.warning('Failed to stop torrent {0} in {1}'.format(input_name, client_agent))
def resume_torrent(client_agent, input_hash, input_id, input_name): def resume_torrent(client_agent, input_hash, input_id, input_name):
if not core.TORRENT_RESUME == 1: if not core.TORRENT_RESUME == 1:
return return
logger.debug("Starting torrent {0} in {1}".format(input_name, client_agent)) logger.debug('Starting torrent {0} in {1}'.format(input_name, client_agent))
try: try:
if client_agent == 'utorrent' and core.TORRENT_CLASS != "": if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.start(input_hash) core.TORRENT_CLASS.start(input_hash)
if client_agent == 'transmission' and core.TORRENT_CLASS != "": if client_agent == 'transmission' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.start_torrent(input_id) core.TORRENT_CLASS.start_torrent(input_id)
if client_agent == 'deluge' and core.TORRENT_CLASS != "": if client_agent == 'deluge' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.core.resume_torrent([input_id]) core.TORRENT_CLASS.core.resume_torrent([input_id])
if client_agent == 'qbittorrent' and core.TORRENT_CLASS != "": if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.resume(input_hash) core.TORRENT_CLASS.resume(input_hash)
time.sleep(5) time.sleep(5)
except Exception: except Exception:
logger.warning("Failed to start torrent {0} in {1}".format(input_name, client_agent)) logger.warning('Failed to start torrent {0} in {1}'.format(input_name, client_agent))
def remove_torrent(client_agent, input_hash, input_id, input_name): def remove_torrent(client_agent, input_hash, input_id, input_name):
if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move': if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move':
logger.debug("Deleting torrent {0} from {1}".format(input_name, client_agent)) logger.debug('Deleting torrent {0} from {1}'.format(input_name, client_agent))
try: try:
if client_agent == 'utorrent' and core.TORRENT_CLASS != "": if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.removedata(input_hash) core.TORRENT_CLASS.removedata(input_hash)
core.TORRENT_CLASS.remove(input_hash) core.TORRENT_CLASS.remove(input_hash)
if client_agent == 'transmission' and core.TORRENT_CLASS != "": if client_agent == 'transmission' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.remove_torrent(input_id, True) core.TORRENT_CLASS.remove_torrent(input_id, True)
if client_agent == 'deluge' and core.TORRENT_CLASS != "": if client_agent == 'deluge' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.core.remove_torrent(input_id, True) core.TORRENT_CLASS.core.remove_torrent(input_id, True)
if client_agent == 'qbittorrent' and core.TORRENT_CLASS != "": if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
core.TORRENT_CLASS.delete_permanently(input_hash) core.TORRENT_CLASS.delete_permanently(input_hash)
time.sleep(5) time.sleep(5)
except Exception: except Exception:
logger.warning("Failed to delete torrent {0} in {1}".format(input_name, client_agent)) logger.warning('Failed to delete torrent {0} in {1}'.format(input_name, client_agent))
else: else:
resume_torrent(client_agent, input_hash, input_id, input_name) resume_torrent(client_agent, input_hash, input_id, input_name)
def find_download(client_agent, download_id): def find_download(client_agent, download_id):
logger.debug("Searching for Download on {0} ...".format(client_agent)) logger.debug('Searching for Download on {0} ...'.format(client_agent))
if client_agent == 'utorrent': if client_agent == 'utorrent':
torrents = core.TORRENT_CLASS.list()[1]['torrents'] torrents = core.TORRENT_CLASS.list()[1]['torrents']
for torrent in torrents: for torrent in torrents:
@ -935,21 +935,21 @@ def find_download(client_agent, download_id):
if torrent['hash'] == download_id: if torrent['hash'] == download_id:
return True return True
if client_agent == 'sabnzbd': if client_agent == 'sabnzbd':
if "http" in core.SABNZBDHOST: if 'http' in core.SABNZBDHOST:
base_url = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) base_url = '{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
else: else:
base_url = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) base_url = 'http://{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
url = base_url url = base_url
params = { params = {
'apikey': core.SABNZBDAPIKEY, 'apikey': core.SABNZBDAPIKEY,
'mode': "get_files", 'mode': 'get_files',
'output': 'json', 'output': 'json',
'value': download_id, 'value': download_id,
} }
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 120)) r = requests.get(url, params=params, verify=False, timeout=(30, 120))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL") logger.error('Unable to open URL')
return False # failure return False # failure
result = r.json() result = r.json()
@ -961,48 +961,48 @@ def find_download(client_agent, download_id):
def get_nzoid(input_name): def get_nzoid(input_name):
nzoid = None nzoid = None
slots = [] slots = []
logger.debug("Searching for nzoid from SAbnzbd ...") logger.debug('Searching for nzoid from SAbnzbd ...')
if "http" in core.SABNZBDHOST: if 'http' in core.SABNZBDHOST:
base_url = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) base_url = '{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
else: else:
base_url = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) base_url = 'http://{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
url = base_url url = base_url
params = { params = {
'apikey': core.SABNZBDAPIKEY, 'apikey': core.SABNZBDAPIKEY,
'mode': "queue", 'mode': 'queue',
'output': 'json', 'output': 'json',
} }
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 120)) r = requests.get(url, params=params, verify=False, timeout=(30, 120))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL") logger.error('Unable to open URL')
return nzoid # failure return nzoid # failure
try: try:
result = r.json() result = r.json()
clean_name = os.path.splitext(os.path.split(input_name)[1])[0] clean_name = os.path.splitext(os.path.split(input_name)[1])[0]
slots.extend([(slot['nzo_id'], slot['filename']) for slot in result['queue']['slots']]) slots.extend([(slot['nzo_id'], slot['filename']) for slot in result['queue']['slots']])
except Exception: except Exception:
logger.warning("Data from SABnzbd queue could not be parsed") logger.warning('Data from SABnzbd queue could not be parsed')
params['mode'] = "history" params['mode'] = 'history'
try: try:
r = requests.get(url, params=params, verify=False, timeout=(30, 120)) r = requests.get(url, params=params, verify=False, timeout=(30, 120))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL") logger.error('Unable to open URL')
return nzoid # failure return nzoid # failure
try: try:
result = r.json() result = r.json()
clean_name = os.path.splitext(os.path.split(input_name)[1])[0] clean_name = os.path.splitext(os.path.split(input_name)[1])[0]
slots.extend([(slot['nzo_id'], slot['name']) for slot in result['history']['slots']]) slots.extend([(slot['nzo_id'], slot['name']) for slot in result['history']['slots']])
except Exception: except Exception:
logger.warning("Data from SABnzbd history could not be parsed") logger.warning('Data from SABnzbd history could not be parsed')
try: try:
for nzo_id, name in slots: for nzo_id, name in slots:
if name in [input_name, clean_name]: if name in [input_name, clean_name]:
nzoid = nzo_id nzoid = nzo_id
logger.debug("Found nzoid: {0}".format(nzoid)) logger.debug('Found nzoid: {0}'.format(nzoid))
break break
except Exception: except Exception:
logger.warning("Data from SABnzbd could not be parsed") logger.warning('Data from SABnzbd could not be parsed')
return nzoid return nzoid
@ -1014,13 +1014,13 @@ def clean_file_name(filename):
space, but handles decimal numbers in string, for example: space, but handles decimal numbers in string, for example:
""" """
filename = re.sub(r"(\D)\.(?!\s)(\D)", r"\1 \2", filename) filename = re.sub(r'(\D)\.(?!\s)(\D)', r'\1 \2', filename)
filename = re.sub(r"(\d)\.(\d{4})", r"\1 \2", filename) # if it ends in a year then don't keep the dot filename = re.sub(r'(\d)\.(\d{4})', r'\1 \2', filename) # if it ends in a year then don't keep the dot
filename = re.sub(r"(\D)\.(?!\s)", r"\1 ", filename) filename = re.sub(r'(\D)\.(?!\s)', r'\1 ', filename)
filename = re.sub(r"\.(?!\s)(\D)", r" \1", filename) filename = re.sub(r'\.(?!\s)(\D)', r' \1', filename)
filename = filename.replace("_", " ") filename = filename.replace('_', ' ')
filename = re.sub("-$", "", filename) filename = re.sub('-$', '', filename)
filename = re.sub(r"^\[.*]", "", filename) filename = re.sub(r'^\[.*]', '', filename)
return filename.strip() return filename.strip()
@ -1039,7 +1039,7 @@ def is_media_file(mediafile, media=True, audio=True, meta=True, archives=True, o
file_name, file_ext = os.path.splitext(mediafile) file_name, file_ext = os.path.splitext(mediafile)
try: try:
# ignore MAC OS's "resource fork" files # ignore MAC OS's 'resource fork' files
if file_name.startswith('._'): if file_name.startswith('._'):
return False return False
except Exception: except Exception:
@ -1111,14 +1111,14 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
m = re.search(r'(tt\d{7})', dir_name + input_name) m = re.search(r'(tt\d{7})', dir_name + input_name)
if m: if m:
imdbid = m.group(1) imdbid = m.group(1)
logger.info("Found imdbID [{0}]".format(imdbid)) logger.info('Found imdbID [{0}]'.format(imdbid))
return imdbid return imdbid
if os.path.isdir(dir_name): if os.path.isdir(dir_name):
for file in os.listdir(text_type(dir_name)): for file in os.listdir(text_type(dir_name)):
m = re.search(r'(tt\d{7})', file) m = re.search(r'(tt\d{7})', file)
if m: if m:
imdbid = m.group(1) imdbid = m.group(1)
logger.info("Found imdbID [{0}] via file name".format(imdbid)) logger.info('Found imdbID [{0}] via file name'.format(imdbid))
return imdbid return imdbid
if 'NZBPR__DNZB_MOREINFO' in os.environ: if 'NZBPR__DNZB_MOREINFO' in os.environ:
dnzb_more_info = os.environ.get('NZBPR__DNZB_MOREINFO', '') dnzb_more_info = os.environ.get('NZBPR__DNZB_MOREINFO', '')
@ -1127,7 +1127,7 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
m = regex.match(dnzb_more_info) m = regex.match(dnzb_more_info)
if m: if m:
imdbid = m.group(1) imdbid = m.group(1)
logger.info("Found imdbID [{0}] from DNZB-MoreInfo".format(imdbid)) logger.info('Found imdbID [{0}] from DNZB-MoreInfo'.format(imdbid))
return imdbid return imdbid
logger.info('Searching IMDB for imdbID ...') logger.info('Searching IMDB for imdbID ...')
try: try:
@ -1145,33 +1145,33 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
if 'year' in guess: if 'year' in guess:
year = guess['year'] year = guess['year']
url = "http://www.omdbapi.com" url = 'http://www.omdbapi.com'
if not omdb_api_key: if not omdb_api_key:
logger.info("Unable to determine imdbID: No api key provided for ombdapi.com.") logger.info('Unable to determine imdbID: No api key provided for ombdapi.com.')
return return
logger.debug("Opening URL: {0}".format(url)) logger.debug('Opening URL: {0}'.format(url))
try: try:
r = requests.get(url, params={'apikey': omdb_api_key, 'y': year, 't': title}, r = requests.get(url, params={'apikey': omdb_api_key, 'y': year, 't': title},
verify=False, timeout=(60, 300)) verify=False, timeout=(60, 300))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL {0}".format(url)) logger.error('Unable to open URL {0}'.format(url))
return return
try: try:
results = r.json() results = r.json()
except Exception: except Exception:
logger.error("No json data returned from omdbapi.com") logger.error('No json data returned from omdbapi.com')
try: try:
imdbid = results['imdbID'] imdbid = results['imdbID']
except Exception: except Exception:
logger.error("No imdbID returned from omdbapi.com") logger.error('No imdbID returned from omdbapi.com')
if imdbid: if imdbid:
logger.info("Found imdbID [{0}]".format(imdbid)) logger.info('Found imdbID [{0}]'.format(imdbid))
return imdbid return imdbid
logger.warning('Unable to find a imdbID for {0}'.format(input_name)) logger.warning('Unable to find a imdbID for {0}'.format(input_name))
@ -1186,7 +1186,7 @@ def extract_files(src, dst=None, keep_archive=None):
dir_path = os.path.dirname(inputFile) dir_path = os.path.dirname(inputFile)
full_file_name = os.path.basename(inputFile) full_file_name = os.path.basename(inputFile)
archive_name = os.path.splitext(full_file_name)[0] archive_name = os.path.splitext(full_file_name)[0]
archive_name = re.sub(r"part[0-9]+", "", archive_name) archive_name = re.sub(r'part[0-9]+', '', archive_name)
if dir_path in extracted_folder and archive_name in extracted_archive: if dir_path in extracted_folder and archive_name in extracted_archive:
continue # no need to extract this, but keep going to look for other archives and sub directories. continue # no need to extract this, but keep going to look for other archives and sub directories.
@ -1196,23 +1196,23 @@ def extract_files(src, dst=None, keep_archive=None):
extracted_folder.append(dir_path) extracted_folder.append(dir_path)
extracted_archive.append(archive_name) extracted_archive.append(archive_name)
except Exception: except Exception:
logger.error("Extraction failed for: {0}".format(full_file_name)) logger.error('Extraction failed for: {0}'.format(full_file_name))
for folder in extracted_folder: for folder in extracted_folder:
for inputFile in list_media_files(folder, media=False, audio=False, meta=False, archives=True): for inputFile in list_media_files(folder, media=False, audio=False, meta=False, archives=True):
full_file_name = os.path.basename(inputFile) full_file_name = os.path.basename(inputFile)
archive_name = os.path.splitext(full_file_name)[0] archive_name = os.path.splitext(full_file_name)[0]
archive_name = re.sub(r"part[0-9]+", "", archive_name) archive_name = re.sub(r'part[0-9]+', '', archive_name)
if archive_name not in extracted_archive or keep_archive: if archive_name not in extracted_archive or keep_archive:
continue # don't remove if we haven't extracted this archive, or if we want to preserve them. continue # don't remove if we haven't extracted this archive, or if we want to preserve them.
logger.info("Removing extracted archive {0} from folder {1} ...".format(full_file_name, folder)) logger.info('Removing extracted archive {0} from folder {1} ...'.format(full_file_name, folder))
try: try:
if not os.access(inputFile, os.W_OK): if not os.access(inputFile, os.W_OK):
os.chmod(inputFile, stat.S_IWUSR) os.chmod(inputFile, stat.S_IWUSR)
os.remove(inputFile) os.remove(inputFile)
time.sleep(1) time.sleep(1)
except Exception as e: except Exception as e:
logger.error("Unable to remove file {0} due to: {1}".format(inputFile, e)) logger.error('Unable to remove file {0} due to: {1}'.format(inputFile, e))
def import_subs(filename): def import_subs(filename):
@ -1232,23 +1232,23 @@ def import_subs(filename):
if not languages: if not languages:
return return
logger.info("Attempting to download subtitles for {0}".format(filename), 'SUBTITLES') logger.info('Attempting to download subtitles for {0}'.format(filename), 'SUBTITLES')
try: try:
video = subliminal.scan_video(filename) video = subliminal.scan_video(filename)
subtitles = subliminal.download_best_subtitles({video}, languages) subtitles = subliminal.download_best_subtitles({video}, languages)
subliminal.save_subtitles(video, subtitles[video]) subliminal.save_subtitles(video, subtitles[video])
except Exception as e: except Exception as e:
logger.error("Failed to download subtitles for {0} due to: {1}".format(filename, e), 'SUBTITLES') logger.error('Failed to download subtitles for {0} due to: {1}'.format(filename, e), 'SUBTITLES')
def server_responding(base_url): def server_responding(base_url):
logger.debug("Attempting to connect to server at {0}".format(base_url), 'SERVER') logger.debug('Attempting to connect to server at {0}'.format(base_url), 'SERVER')
try: try:
requests.get(base_url, timeout=(60, 120), verify=False) requests.get(base_url, timeout=(60, 120), verify=False)
logger.debug("Server responded at {0}".format(base_url), 'SERVER') logger.debug('Server responded at {0}'.format(base_url), 'SERVER')
return True return True
except (requests.ConnectionError, requests.exceptions.Timeout): except (requests.ConnectionError, requests.exceptions.Timeout):
logger.error("Server failed to respond at {0}".format(base_url), 'SERVER') logger.error('Server failed to respond at {0}'.format(base_url), 'SERVER')
return False return False
@ -1263,7 +1263,7 @@ def plex_update(category):
section = None section = None
if not core.PLEXSEC: if not core.PLEXSEC:
return return
logger.debug("Attempting to update Plex Library for category {0}.".format(category), 'PLEX') logger.debug('Attempting to update Plex Library for category {0}.'.format(category), 'PLEX')
for item in core.PLEXSEC: for item in core.PLEXSEC:
if item[0] == category: if item[0] == category:
section = item[1] section = item[1]
@ -1271,9 +1271,9 @@ def plex_update(category):
if section: if section:
url = '{url}{section}/refresh?X-Plex-Token={token}'.format(url=url, section=section, token=core.PLEXTOKEN) url = '{url}{section}/refresh?X-Plex-Token={token}'.format(url=url, section=section, token=core.PLEXTOKEN)
requests.get(url, timeout=(60, 120), verify=False) requests.get(url, timeout=(60, 120), verify=False)
logger.debug("Plex Library has been refreshed.", 'PLEX') logger.debug('Plex Library has been refreshed.', 'PLEX')
else: else:
logger.debug("Could not identify section for plex update", 'PLEX') logger.debug('Could not identify section for plex update', 'PLEX')
def backup_versioned_file(old_file, version): def backup_versioned_file(old_file, version):
@ -1283,41 +1283,41 @@ def backup_versioned_file(old_file, version):
while not os.path.isfile(new_file): while not os.path.isfile(new_file):
if not os.path.isfile(old_file): if not os.path.isfile(old_file):
logger.log(u"Not creating backup, {file} doesn't exist".format(file=old_file), logger.DEBUG) logger.log(u'Not creating backup, {file} doesn\'t exist'.format(file=old_file), logger.DEBUG)
break break
try: try:
logger.log(u"Trying to back up {old} to {new]".format(old=old_file, new=new_file), logger.DEBUG) logger.log(u'Trying to back up {old} to {new]'.format(old=old_file, new=new_file), logger.DEBUG)
shutil.copy(old_file, new_file) shutil.copy(old_file, new_file)
logger.log(u"Backup done", logger.DEBUG) logger.log(u'Backup done', logger.DEBUG)
break break
except Exception as error: except Exception as error:
logger.log(u"Error while trying to back up {old} to {new} : {msg}".format logger.log(u'Error while trying to back up {old} to {new} : {msg}'.format
(old=old_file, new=new_file, msg=error), logger.WARNING) (old=old_file, new=new_file, msg=error), logger.WARNING)
num_tries += 1 num_tries += 1
time.sleep(1) time.sleep(1)
logger.log(u"Trying again.", logger.DEBUG) logger.log(u'Trying again.', logger.DEBUG)
if num_tries >= 10: if num_tries >= 10:
logger.log(u"Unable to back up {old} to {new} please do it manually.".format(old=old_file, new=new_file), logger.ERROR) logger.log(u'Unable to back up {old} to {new} please do it manually.'.format(old=old_file, new=new_file), logger.ERROR)
return False return False
return True return True
def update_download_info_status(input_name, status): def update_download_info_status(input_name, status):
logger.db("Updating status of our download {0} in the DB to {1}".format(input_name, status)) logger.db('Updating status of our download {0} in the DB to {1}'.format(input_name, status))
my_db = main_db.DBConnection() my_db = main_db.DBConnection()
my_db.action("UPDATE downloads SET status=?, last_update=? WHERE input_name=?", my_db.action('UPDATE downloads SET status=?, last_update=? WHERE input_name=?',
[status, datetime.date.today().toordinal(), text_type(input_name)]) [status, datetime.date.today().toordinal(), text_type(input_name)])
def get_download_info(input_name, status): def get_download_info(input_name, status):
logger.db("Getting download info for {0} from the DB".format(input_name)) logger.db('Getting download info for {0} from the DB'.format(input_name))
my_db = main_db.DBConnection() my_db = main_db.DBConnection()
sql_results = my_db.select("SELECT * FROM downloads WHERE input_name=? AND status=?", sql_results = my_db.select('SELECT * FROM downloads WHERE input_name=? AND status=?',
[text_type(input_name), status]) [text_type(input_name), status])
return sql_results return sql_results
@ -1326,7 +1326,7 @@ def get_download_info(input_name, status):
class WindowsProcess(object): class WindowsProcess(object):
def __init__(self): def __init__(self):
self.mutex = None self.mutex = None
self.mutexname = "nzbtomedia_{pid}".format(pid=core.PID_FILE.replace('\\', '/')) # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}" self.mutexname = 'nzbtomedia_{pid}'.format(pid=core.PID_FILE.replace('\\', '/')) # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}'
self.CreateMutex = CreateMutex self.CreateMutex = CreateMutex
self.CloseHandle = CloseHandle self.CloseHandle = CloseHandle
self.GetLastError = GetLastError self.GetLastError = GetLastError
@ -1358,13 +1358,13 @@ class PosixProcess(object):
self.lasterror = False self.lasterror = False
return self.lasterror return self.lasterror
except socket.error as e: except socket.error as e:
if "Address already in use" in e: if 'Address already in use' in e:
self.lasterror = True self.lasterror = True
return self.lasterror return self.lasterror
except AttributeError: except AttributeError:
pass pass
if os.path.exists(self.pidpath): if os.path.exists(self.pidpath):
# Make sure it is not a "stale" pidFile # Make sure it is not a 'stale' pidFile
try: try:
pid = int(open(self.pidpath, 'r').read().strip()) pid = int(open(self.pidpath, 'r').read().strip())
except Exception: except Exception:

View file

@ -66,13 +66,13 @@ class CheckVersion(object):
""" """
if not core.VERSION_NOTIFY and not force: if not core.VERSION_NOTIFY and not force:
logger.log(u"Version checking is disabled, not checking for the newest version") logger.log(u'Version checking is disabled, not checking for the newest version')
return False return False
logger.log(u"Checking if {install} needs an update".format(install=self.install_type)) logger.log(u'Checking if {install} needs an update'.format(install=self.install_type))
if not self.updater.need_update(): if not self.updater.need_update():
core.NEWEST_VERSION_STRING = None core.NEWEST_VERSION_STRING = None
logger.log(u"No update needed") logger.log(u'No update needed')
return False return False
self.updater.set_newest_text() self.updater.set_newest_text()
@ -116,19 +116,19 @@ class GitUpdateManager(UpdateManager):
test_cmd = 'version' test_cmd = 'version'
if core.GIT_PATH: if core.GIT_PATH:
main_git = '"{git}"'.format(git=core.GIT_PATH) main_git = '\'{git}\''.format(git=core.GIT_PATH)
else: else:
main_git = 'git' main_git = 'git'
logger.log(u"Checking if we can use git commands: {git} {cmd}".format logger.log(u'Checking if we can use git commands: {git} {cmd}'.format
(git=main_git, cmd=test_cmd), logger.DEBUG) (git=main_git, cmd=test_cmd), logger.DEBUG)
output, err, exit_status = self._run_git(main_git, test_cmd) output, err, exit_status = self._run_git(main_git, test_cmd)
if exit_status == 0: if exit_status == 0:
logger.log(u"Using: {git}".format(git=main_git), logger.DEBUG) logger.log(u'Using: {git}'.format(git=main_git), logger.DEBUG)
return main_git return main_git
else: else:
logger.log(u"Not using: {git}".format(git=main_git), logger.DEBUG) logger.log(u'Not using: {git}'.format(git=main_git), logger.DEBUG)
# trying alternatives # trying alternatives
@ -143,18 +143,18 @@ class GitUpdateManager(UpdateManager):
alternative_git.append(main_git.lower()) alternative_git.append(main_git.lower())
if alternative_git: if alternative_git:
logger.log(u"Trying known alternative git locations", logger.DEBUG) logger.log(u'Trying known alternative git locations', logger.DEBUG)
for cur_git in alternative_git: for cur_git in alternative_git:
logger.log(u"Checking if we can use git commands: {git} {cmd}".format logger.log(u'Checking if we can use git commands: {git} {cmd}'.format
(git=cur_git, cmd=test_cmd), logger.DEBUG) (git=cur_git, cmd=test_cmd), logger.DEBUG)
output, err, exit_status = self._run_git(cur_git, test_cmd) output, err, exit_status = self._run_git(cur_git, test_cmd)
if exit_status == 0: if exit_status == 0:
logger.log(u"Using: {git}".format(git=cur_git), logger.DEBUG) logger.log(u'Using: {git}'.format(git=cur_git), logger.DEBUG)
return cur_git return cur_git
else: else:
logger.log(u"Not using: {git}".format(git=cur_git), logger.DEBUG) logger.log(u'Not using: {git}'.format(git=cur_git), logger.DEBUG)
# Still haven't found a working git # Still haven't found a working git
logger.debug('Unable to find your git executable - ' logger.debug('Unable to find your git executable - '
@ -169,14 +169,14 @@ class GitUpdateManager(UpdateManager):
err = None err = None
if not git_path: if not git_path:
logger.log(u"No git specified, can't use git commands", logger.DEBUG) logger.log(u'No git specified, can\'t use git commands', logger.DEBUG)
exit_status = 1 exit_status = 1
return output, err, exit_status return output, err, exit_status
cmd = '{git} {args}'.format(git=git_path, args=args) cmd = '{git} {args}'.format(git=git_path, args=args)
try: try:
logger.log(u"Executing {cmd} with your shell in {directory}".format logger.log(u'Executing {cmd} with your shell in {directory}'.format
(cmd=cmd, directory=core.APP_ROOT), logger.DEBUG) (cmd=cmd, directory=core.APP_ROOT), logger.DEBUG)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
shell=True, cwd=core.APP_ROOT) shell=True, cwd=core.APP_ROOT)
@ -188,22 +188,22 @@ class GitUpdateManager(UpdateManager):
if output: if output:
output = output.strip() output = output.strip()
if core.LOG_GIT: if core.LOG_GIT:
logger.log(u"git output: {output}".format(output=output), logger.DEBUG) logger.log(u'git output: {output}'.format(output=output), logger.DEBUG)
except OSError: except OSError:
logger.log(u"Command {cmd} didn't work".format(cmd=cmd)) logger.log(u'Command {cmd} didn\'t work'.format(cmd=cmd))
exit_status = 1 exit_status = 1
exit_status = 128 if ('fatal:' in output) or err else exit_status exit_status = 128 if ('fatal:' in output) or err else exit_status
if exit_status == 0: if exit_status == 0:
logger.log(u"{cmd} : returned successful".format(cmd=cmd), logger.DEBUG) logger.log(u'{cmd} : returned successful'.format(cmd=cmd), logger.DEBUG)
exit_status = 0 exit_status = 0
elif core.LOG_GIT and exit_status in (1, 128): elif core.LOG_GIT and exit_status in (1, 128):
logger.log(u"{cmd} returned : {output}".format logger.log(u'{cmd} returned : {output}'.format
(cmd=cmd, output=output), logger.DEBUG) (cmd=cmd, output=output), logger.DEBUG)
else: else:
if core.LOG_GIT: if core.LOG_GIT:
logger.log(u"{cmd} returned : {output}, treat as error for now".format logger.log(u'{cmd} returned : {output}, treat as error for now'.format
(cmd=cmd, output=output), logger.DEBUG) (cmd=cmd, output=output), logger.DEBUG)
exit_status = 1 exit_status = 1
@ -223,7 +223,7 @@ class GitUpdateManager(UpdateManager):
if exit_status == 0 and output: if exit_status == 0 and output:
cur_commit_hash = output.strip() cur_commit_hash = output.strip()
if not re.match('^[a-z0-9]+$', cur_commit_hash): if not re.match('^[a-z0-9]+$', cur_commit_hash):
logger.log(u"Output doesn't look like a hash, not using it", logger.ERROR) logger.log(u'Output doesn\'t look like a hash, not using it', logger.ERROR)
return False return False
self._cur_commit_hash = cur_commit_hash self._cur_commit_hash = cur_commit_hash
if self._cur_commit_hash: if self._cur_commit_hash:
@ -256,56 +256,56 @@ class GitUpdateManager(UpdateManager):
output, err, exit_status = self._run_git(self._git_path, 'fetch origin') output, err, exit_status = self._run_git(self._git_path, 'fetch origin')
if not exit_status == 0: if not exit_status == 0:
logger.log(u"Unable to contact github, can't check for update", logger.ERROR) logger.log(u'Unable to contact github, can\'t check for update', logger.ERROR)
return return
# get latest commit_hash from remote # get latest commit_hash from remote
output, err, exit_status = self._run_git(self._git_path, 'rev-parse --verify --quiet "@{upstream}"') output, err, exit_status = self._run_git(self._git_path, 'rev-parse --verify --quiet \'@{upstream}\'')
if exit_status == 0 and output: if exit_status == 0 and output:
cur_commit_hash = output.strip() cur_commit_hash = output.strip()
if not re.match('^[a-z0-9]+$', cur_commit_hash): if not re.match('^[a-z0-9]+$', cur_commit_hash):
logger.log(u"Output doesn't look like a hash, not using it", logger.DEBUG) logger.log(u'Output doesn\'t look like a hash, not using it', logger.DEBUG)
return return
else: else:
self._newest_commit_hash = cur_commit_hash self._newest_commit_hash = cur_commit_hash
else: else:
logger.log(u"git didn't return newest commit hash", logger.DEBUG) logger.log(u'git didn\'t return newest commit hash', logger.DEBUG)
return return
# get number of commits behind and ahead (option --count not supported git < 1.7.2) # get number of commits behind and ahead (option --count not supported git < 1.7.2)
output, err, exit_status = self._run_git(self._git_path, 'rev-list --left-right "@{upstream}"...HEAD') output, err, exit_status = self._run_git(self._git_path, 'rev-list --left-right \'@{upstream}\'...HEAD')
if exit_status == 0 and output: if exit_status == 0 and output:
try: try:
self._num_commits_behind = int(output.count("<")) self._num_commits_behind = int(output.count('<'))
self._num_commits_ahead = int(output.count(">")) self._num_commits_ahead = int(output.count('>'))
except Exception: except Exception:
logger.log(u"git didn't return numbers for behind and ahead, not using it", logger.DEBUG) logger.log(u'git didn\'t return numbers for behind and ahead, not using it', logger.DEBUG)
return return
logger.log(u"cur_commit = {current} % (newest_commit)= {new}, " logger.log(u'cur_commit = {current} % (newest_commit)= {new}, '
u"num_commits_behind = {x}, num_commits_ahead = {y}".format u'num_commits_behind = {x}, num_commits_ahead = {y}'.format
(current=self._cur_commit_hash, new=self._newest_commit_hash, (current=self._cur_commit_hash, new=self._newest_commit_hash,
x=self._num_commits_behind, y=self._num_commits_ahead), logger.DEBUG) x=self._num_commits_behind, y=self._num_commits_ahead), logger.DEBUG)
def set_newest_text(self): def set_newest_text(self):
if self._num_commits_ahead: if self._num_commits_ahead:
logger.log(u"Local branch is ahead of {branch}. Automatic update not possible.".format logger.log(u'Local branch is ahead of {branch}. Automatic update not possible.'.format
(branch=self.branch), logger.ERROR) (branch=self.branch), logger.ERROR)
elif self._num_commits_behind: elif self._num_commits_behind:
logger.log(u"There is a newer version available (you're {x} commit{s} behind)".format logger.log(u'There is a newer version available (you\'re {x} commit{s} behind)'.format
(x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE) (x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE)
else: else:
return return
def need_update(self): def need_update(self):
if not self._find_installed_version(): if not self._find_installed_version():
logger.error("Unable to determine installed version via git, please check your logs!") logger.error('Unable to determine installed version via git, please check your logs!')
return False return False
if not self._cur_commit_hash: if not self._cur_commit_hash:
@ -314,7 +314,7 @@ class GitUpdateManager(UpdateManager):
try: try:
self._check_github_for_update() self._check_github_for_update()
except Exception as error: except Exception as error:
logger.log(u"Unable to contact github, can't check for update: {msg!r}".format(msg=error), logger.ERROR) logger.log(u'Unable to contact github, can\'t check for update: {msg!r}'.format(msg=error), logger.ERROR)
return False return False
if self._num_commits_behind > 0: if self._num_commits_behind > 0:
@ -358,7 +358,7 @@ class SourceUpdateManager(UpdateManager):
with open(version_file, 'r') as fp: with open(version_file, 'r') as fp:
self._cur_commit_hash = fp.read().strip(' \n\r') self._cur_commit_hash = fp.read().strip(' \n\r')
except EnvironmentError as error: except EnvironmentError as error:
logger.log(u"Unable to open 'version.txt': {msg}".format(msg=error), logger.DEBUG) logger.log(u'Unable to open \'version.txt\': {msg}'.format(msg=error), logger.DEBUG)
if not self._cur_commit_hash: if not self._cur_commit_hash:
self._cur_commit_hash = None self._cur_commit_hash = None
@ -372,7 +372,7 @@ class SourceUpdateManager(UpdateManager):
try: try:
self._check_github_for_update() self._check_github_for_update()
except Exception as error: except Exception as error:
logger.log(u"Unable to contact github, can't check for update: {msg!r}".format(msg=error), logger.ERROR) logger.log(u'Unable to contact github, can\'t check for update: {msg!r}'.format(msg=error), logger.ERROR)
return False return False
if not self._cur_commit_hash or self._num_commits_behind > 0: if not self._cur_commit_hash or self._num_commits_behind > 0:
@ -418,7 +418,7 @@ class SourceUpdateManager(UpdateManager):
# when _cur_commit_hash doesn't match anything _num_commits_behind == 100 # when _cur_commit_hash doesn't match anything _num_commits_behind == 100
self._num_commits_behind += 1 self._num_commits_behind += 1
logger.log(u"cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}".format logger.log(u'cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}'.format
(current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG) (current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG)
def set_newest_text(self): def set_newest_text(self):
@ -427,9 +427,9 @@ class SourceUpdateManager(UpdateManager):
core.NEWEST_VERSION_STRING = None core.NEWEST_VERSION_STRING = None
if not self._cur_commit_hash: if not self._cur_commit_hash:
logger.log(u"Unknown current version number, don't know if we should update or not", logger.ERROR) logger.log(u'Unknown current version number, don\'t know if we should update or not', logger.ERROR)
elif self._num_commits_behind > 0: elif self._num_commits_behind > 0:
logger.log(u"There is a newer version available (you're {x} commit{s} behind)".format logger.log(u'There is a newer version available (you\'re {x} commit{s} behind)'.format
(x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE) (x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE)
else: else:
return return
@ -447,47 +447,47 @@ class SourceUpdateManager(UpdateManager):
sb_update_dir = os.path.join(core.APP_ROOT, u'sb-update') sb_update_dir = os.path.join(core.APP_ROOT, u'sb-update')
if os.path.isdir(sb_update_dir): if os.path.isdir(sb_update_dir):
logger.log(u"Clearing out update folder {dir} before extracting".format(dir=sb_update_dir)) logger.log(u'Clearing out update folder {dir} before extracting'.format(dir=sb_update_dir))
shutil.rmtree(sb_update_dir) shutil.rmtree(sb_update_dir)
logger.log(u"Creating update folder {dir} before extracting".format(dir=sb_update_dir)) logger.log(u'Creating update folder {dir} before extracting'.format(dir=sb_update_dir))
os.makedirs(sb_update_dir) os.makedirs(sb_update_dir)
# retrieve file # retrieve file
logger.log(u"Downloading update from {url!r}".format(url=tar_download_url)) logger.log(u'Downloading update from {url!r}'.format(url=tar_download_url))
tar_download_path = os.path.join(sb_update_dir, u'nzbtomedia-update.tar') tar_download_path = os.path.join(sb_update_dir, u'nzbtomedia-update.tar')
urlretrieve(tar_download_url, tar_download_path) urlretrieve(tar_download_url, tar_download_path)
if not os.path.isfile(tar_download_path): if not os.path.isfile(tar_download_path):
logger.log(u"Unable to retrieve new version from {url}, can't update".format logger.log(u'Unable to retrieve new version from {url}, can\'t update'.format
(url=tar_download_url), logger.ERROR) (url=tar_download_url), logger.ERROR)
return False return False
if not tarfile.is_tarfile(tar_download_path): if not tarfile.is_tarfile(tar_download_path):
logger.log(u"Retrieved version from {url} is corrupt, can't update".format logger.log(u'Retrieved version from {url} is corrupt, can\'t update'.format
(url=tar_download_url), logger.ERROR) (url=tar_download_url), logger.ERROR)
return False return False
# extract to sb-update dir # extract to sb-update dir
logger.log(u"Extracting file {path}".format(path=tar_download_path)) logger.log(u'Extracting file {path}'.format(path=tar_download_path))
tar = tarfile.open(tar_download_path) tar = tarfile.open(tar_download_path)
tar.extractall(sb_update_dir) tar.extractall(sb_update_dir)
tar.close() tar.close()
# delete .tar.gz # delete .tar.gz
logger.log(u"Deleting file {path}".format(path=tar_download_path)) logger.log(u'Deleting file {path}'.format(path=tar_download_path))
os.remove(tar_download_path) os.remove(tar_download_path)
# find update dir name # find update dir name
update_dir_contents = [x for x in os.listdir(sb_update_dir) if update_dir_contents = [x for x in os.listdir(sb_update_dir) if
os.path.isdir(os.path.join(sb_update_dir, x))] os.path.isdir(os.path.join(sb_update_dir, x))]
if len(update_dir_contents) != 1: if len(update_dir_contents) != 1:
logger.log(u"Invalid update data, update failed: {0}".format(update_dir_contents), logger.ERROR) logger.log(u'Invalid update data, update failed: {0}'.format(update_dir_contents), logger.ERROR)
return False return False
content_dir = os.path.join(sb_update_dir, update_dir_contents[0]) content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
# walk temp folder and move files to main folder # walk temp folder and move files to main folder
logger.log(u"Moving files from {source} to {destination}".format logger.log(u'Moving files from {source} to {destination}'.format
(source=content_dir, destination=core.APP_ROOT)) (source=content_dir, destination=core.APP_ROOT))
for dirname, dirnames, filenames in os.walk(content_dir): # @UnusedVariable for dirname, dirnames, filenames in os.walk(content_dir): # @UnusedVariable
dirname = dirname[len(content_dir) + 1:] dirname = dirname[len(content_dir) + 1:]
@ -504,7 +504,7 @@ class SourceUpdateManager(UpdateManager):
os.remove(new_path) os.remove(new_path)
os.renames(old_path, new_path) os.renames(old_path, new_path)
except Exception as error: except Exception as error:
logger.log(u"Unable to update {path}: {msg}".format logger.log(u'Unable to update {path}: {msg}'.format
(path=new_path, msg=error), logger.DEBUG) (path=new_path, msg=error), logger.DEBUG)
os.remove(old_path) # Trash the updated file without moving in new path os.remove(old_path) # Trash the updated file without moving in new path
continue continue
@ -518,14 +518,14 @@ class SourceUpdateManager(UpdateManager):
with open(version_path, 'w') as ver_file: with open(version_path, 'w') as ver_file:
ver_file.write(self._newest_commit_hash) ver_file.write(self._newest_commit_hash)
except EnvironmentError as error: except EnvironmentError as error:
logger.log(u"Unable to write version file, update not complete: {msg}".format logger.log(u'Unable to write version file, update not complete: {msg}'.format
(msg=error), logger.ERROR) (msg=error), logger.ERROR)
return False return False
except Exception as error: except Exception as error:
logger.log(u"Error while trying to update: {msg}".format logger.log(u'Error while trying to update: {msg}'.format
(msg=error), logger.ERROR) (msg=error), logger.ERROR)
logger.log(u"Traceback: {error}".format(error=traceback.format_exc()), logger.DEBUG) logger.log(u'Traceback: {error}'.format(error=traceback.format_exc()), logger.DEBUG)
return False return False
return True return True

View file

@ -31,7 +31,7 @@ class UTorrentClient(object):
# TODO refresh token, when necessary # TODO refresh token, when necessary
def _make_opener(self, realm, base_url, username, password): def _make_opener(self, realm, base_url, username, password):
'''uTorrent API need HTTP Basic Auth and cookie support for token verify.''' """uTorrent API need HTTP Basic Auth and cookie support for token verify."""
auth_handler = HTTPBasicAuthHandler() auth_handler = HTTPBasicAuthHandler()
auth_handler.add_password(realm=realm, auth_handler.add_password(realm=realm,

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "CouchPotato" section = 'CouchPotato'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "Gamez" section = 'Gamez'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "HeadPhones" section = 'HeadPhones'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "Lidarr" section = 'Lidarr'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -29,7 +29,7 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
logger.error( logger.error(
'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format( 'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format(
input_directory)) input_directory))
return [-1, ""] return [-1, '']
if not download_id and client_agent == 'sabnzbd': if not download_id and client_agent == 'sabnzbd':
download_id = get_nzoid(input_name) download_id = get_nzoid(input_name)
@ -48,16 +48,16 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
except Exception: except Exception:
pass pass
control_value_dict = {"input_directory": text_type(input_directory1)} control_value_dict = {'input_directory': text_type(input_directory1)}
new_value_dict = { new_value_dict = {
"input_name": text_type(input_name1), 'input_name': text_type(input_name1),
"input_hash": text_type(download_id), 'input_hash': text_type(download_id),
"input_id": text_type(download_id), 'input_id': text_type(download_id),
"client_agent": text_type(client_agent), 'client_agent': text_type(client_agent),
"status": 0, 'status': 0,
"last_update": datetime.date.today().toordinal(), 'last_update': datetime.date.today().toordinal(),
} }
my_db.upsert("downloads", new_value_dict, control_value_dict) my_db.upsert('downloads', new_value_dict, control_value_dict)
# auto-detect section # auto-detect section
if input_category is None: if input_category is None:
@ -65,41 +65,41 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
usercat = input_category usercat = input_category
section = core.CFG.findsection(input_category).isenabled() section = core.CFG.findsection(input_category).isenabled()
if section is None: if section is None:
section = core.CFG.findsection("ALL").isenabled() section = core.CFG.findsection('ALL').isenabled()
if section is None: if section is None:
logger.error( logger.error(
'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format( 'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format(
input_category)) input_category))
return [-1, ""] return [-1, '']
else: else:
usercat = "ALL" usercat = 'ALL'
if len(section) > 1: if len(section) > 1:
logger.error( logger.error(
'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format( 'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format(
input_category, section.keys())) input_category, section.keys()))
return [-1, ""] return [-1, '']
if section: if section:
section_name = section.keys()[0] section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(section_name)) logger.info('Auto-detected SECTION:{0}'.format(section_name))
else: else:
logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format( logger.error('Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!'.format(
input_category)) input_category))
return [-1, ""] return [-1, '']
cfg = dict(core.CFG[section_name][usercat]) cfg = dict(core.CFG[section_name][usercat])
extract = int(cfg.get("extract", 0)) extract = int(cfg.get('extract', 0))
try: try:
if int(cfg.get("remote_path")) and not core.REMOTEPATHS: if int(cfg.get('remote_path')) and not core.REMOTEPATHS:
logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format( logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format(
section_name, input_category)) section_name, input_category))
return [-1, ""] return [-1, '']
except Exception: except Exception:
logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format( logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format(
core.get("remote_path"), section_name, input_category)) core.get('remote_path'), section_name, input_category))
input_name, input_directory = convert_to_ascii(input_name, input_directory) input_name, input_directory = convert_to_ascii(input_name, input_directory)
@ -107,23 +107,23 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory)) logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
extract_files(input_directory) extract_files(input_directory)
logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, input_category, input_name)) logger.info('Calling {0}:{1} to post-process:{2}'.format(section_name, input_category, input_name))
if section_name in ["CouchPotato", "Radarr"]: if section_name in ['CouchPotato', 'Radarr']:
result = movies.process(section_name, input_directory, input_name, status, client_agent, download_id, input_category, failure_link) result = movies.process(section_name, input_directory, input_name, status, client_agent, download_id, input_category, failure_link)
elif section_name in ["SickBeard", "NzbDrone", "Sonarr"]: elif section_name in ['SickBeard', 'NzbDrone', 'Sonarr']:
result = tv.process(section_name, input_directory, input_name, status, client_agent, download_id, input_category, failure_link) result = tv.process(section_name, input_directory, input_name, status, client_agent, download_id, input_category, failure_link)
elif section_name in ["HeadPhones", "Lidarr"]: elif section_name in ['HeadPhones', 'Lidarr']:
result = music.process(section_name, input_directory, input_name, status, client_agent, input_category) result = music.process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == "Mylar": elif section_name == 'Mylar':
result = comics.process(section_name, input_directory, input_name, status, client_agent, input_category) result = comics.process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == "Gamez": elif section_name == 'Gamez':
result = games.process(section_name, input_directory, input_name, status, client_agent, input_category) result = games.process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == 'UserScript': elif section_name == 'UserScript':
result = external_script(input_directory, input_name, input_category, section[usercat]) result = external_script(input_directory, input_name, input_category, section[usercat])
else: else:
result = ProcessResult( result = ProcessResult(
message="", message='',
status_code=-1, status_code=-1,
) )
@ -144,16 +144,16 @@ def main(args, section=None):
# Initialize the config # Initialize the config
core.initialize(section) core.initialize(section)
logger.info("#########################################################") logger.info('#########################################################')
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) logger.info('## ..::[{0}]::.. ##'.format(os.path.basename(__file__)))
logger.info("#########################################################") logger.info('#########################################################')
# debug command line options # debug command line options
logger.debug("Options passed into nzbToMedia: {0}".format(args)) logger.debug('Options passed into nzbToMedia: {0}'.format(args))
# Post-Processing Result # Post-Processing Result
result = ProcessResult( result = ProcessResult(
message="", message='',
status_code=0, status_code=0,
) )
status = 0 status = 0
@ -162,26 +162,26 @@ def main(args, section=None):
if 'NZBOP_SCRIPTDIR' in os.environ: if 'NZBOP_SCRIPTDIR' in os.environ:
# Check if the script is called from nzbget 11.0 or later # Check if the script is called from nzbget 11.0 or later
if os.environ['NZBOP_VERSION'][0:5] < '11.0': if os.environ['NZBOP_VERSION'][0:5] < '11.0':
logger.error("NZBGet Version {0} is not supported. Please update NZBGet.".format(os.environ['NZBOP_VERSION'])) logger.error('NZBGet Version {0} is not supported. Please update NZBGet.'.format(os.environ['NZBOP_VERSION']))
sys.exit(core.NZBGET_POSTPROCESS_ERROR) sys.exit(core.NZBGET_POSTPROCESS_ERROR)
logger.info("Script triggered from NZBGet Version {0}.".format(os.environ['NZBOP_VERSION'])) logger.info('Script triggered from NZBGet Version {0}.'.format(os.environ['NZBOP_VERSION']))
# Check if the script is called from nzbget 13.0 or later # Check if the script is called from nzbget 13.0 or later
if 'NZBPP_TOTALSTATUS' in os.environ: if 'NZBPP_TOTALSTATUS' in os.environ:
if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS': if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS':
logger.info("Download failed with status {0}.".format(os.environ['NZBPP_STATUS'])) logger.info('Download failed with status {0}.'.format(os.environ['NZBPP_STATUS']))
status = 1 status = 1
else: else:
# Check par status # Check par status
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4': if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
logger.warning("Par-repair failed, setting status \"failed\"") logger.warning('Par-repair failed, setting status \'failed\'')
status = 1 status = 1
# Check unpack status # Check unpack status
if os.environ['NZBPP_UNPACKSTATUS'] == '1': if os.environ['NZBPP_UNPACKSTATUS'] == '1':
logger.warning("Unpack failed, setting status \"failed\"") logger.warning('Unpack failed, setting status \'failed\'')
status = 1 status = 1
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0': if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
@ -189,17 +189,17 @@ def main(args, section=None):
if os.environ['NZBPP_HEALTH'] < 1000: if os.environ['NZBPP_HEALTH'] < 1000:
logger.warning( logger.warning(
"Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"") 'Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \'failed\'')
logger.info("Please check your Par-check/repair settings for future downloads.") logger.info('Please check your Par-check/repair settings for future downloads.')
status = 1 status = 1
else: else:
logger.info( logger.info(
"Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful") 'Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful')
logger.info("Please check your Par-check/repair settings for future downloads.") logger.info('Please check your Par-check/repair settings for future downloads.')
# Check for download_id to pass to CouchPotato # Check for download_id to pass to CouchPotato
download_id = "" download_id = ''
failure_link = None failure_link = None
if 'NZBPR_COUCHPOTATO' in os.environ: if 'NZBPR_COUCHPOTATO' in os.environ:
download_id = os.environ['NZBPR_COUCHPOTATO'] download_id = os.environ['NZBPR_COUCHPOTATO']
@ -224,13 +224,13 @@ def main(args, section=None):
# SABnzbd argv: # SABnzbd argv:
# 1 The final directory of the job (full path) # 1 The final directory of the job (full path)
# 2 The original name of the NZB file # 2 The original name of the NZB file
# 3 Clean version of the job name (no path info and ".nzb" removed) # 3 Clean version of the job name (no path info and '.nzb' removed)
# 4 Indexer's report number (if supported) # 4 Indexer's report number (if supported)
# 5 User-defined category # 5 User-defined category
# 6 Group that the NZB was posted in e.g. alt.binaries.x # 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2 # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
client_agent = 'sabnzbd' client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd") logger.info('Script triggered from SABnzbd')
result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent, result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent,
download_id='') download_id='')
# SABnzbd 0.7.17+ # SABnzbd 0.7.17+
@ -238,36 +238,36 @@ def main(args, section=None):
# SABnzbd argv: # SABnzbd argv:
# 1 The final directory of the job (full path) # 1 The final directory of the job (full path)
# 2 The original name of the NZB file # 2 The original name of the NZB file
# 3 Clean version of the job name (no path info and ".nzb" removed) # 3 Clean version of the job name (no path info and '.nzb' removed)
# 4 Indexer's report number (if supported) # 4 Indexer's report number (if supported)
# 5 User-defined category # 5 User-defined category
# 6 Group that the NZB was posted in e.g. alt.binaries.x # 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2 # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
# 8 Failure URL # 8 Failure URL
client_agent = 'sabnzbd' client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd 0.7.17+") logger.info('Script triggered from SABnzbd 0.7.17+')
result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent, result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent,
download_id='', failure_link=''.join(args[8:])) download_id='', failure_link=''.join(args[8:]))
# Generic program # Generic program
elif len(args) > 5 and args[5] == 'generic': elif len(args) > 5 and args[5] == 'generic':
logger.info("Script triggered from generic program") logger.info('Script triggered from generic program')
result = process(args[1], input_name=args[2], input_category=args[3], download_id=args[4]) result = process(args[1], input_name=args[2], input_category=args[3], download_id=args[4])
else: else:
# Perform Manual Post-Processing # Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...") logger.warning('Invalid number of arguments received from client, Switching to manual run mode ...')
for section, subsections in core.SECTIONS.items(): for section, subsections in core.SECTIONS.items():
for subsection in subsections: for subsection in subsections:
if not core.CFG[section][subsection].isenabled(): if not core.CFG[section][subsection].isenabled():
continue continue
for dir_name in get_dirs(section, subsection, link='move'): for dir_name in get_dirs(section, subsection, link='move'):
logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dir_name)) logger.info('Starting manual run for {0}:{1} - Folder: {2}'.format(section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dir_name))) logger.info('Checking database for download info for {0} ...'.format(os.path.basename(dir_name)))
core.DOWNLOADINFO = get_download_info(os.path.basename(dir_name), 0) core.DOWNLOADINFO = get_download_info(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO: if core.DOWNLOADINFO:
logger.info("Found download info for {0}, " logger.info('Found download info for {0}, '
"setting variables now ...".format 'setting variables now ...'.format
(os.path.basename(dir_name))) (os.path.basename(dir_name)))
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual')) client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
download_id = text_type(core.DOWNLOADINFO[0].get('input_id', '')) download_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
@ -294,21 +294,21 @@ def main(args, section=None):
results = process(dir_name, input_name, 0, client_agent=client_agent, results = process(dir_name, input_name, 0, client_agent=client_agent,
download_id=download_id or None, input_category=subsection) download_id=download_id or None, input_category=subsection)
if results.status_code != 0: if results.status_code != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format logger.error('A problem was reported when trying to perform a manual run for {0}:{1}.'.format
(section, subsection)) (section, subsection))
result = results result = results
if result.status_code == 0: if result.status_code == 0:
logger.info("The {0} script completed successfully.".format(args[0])) logger.info('The {0} script completed successfully.'.format(args[0]))
if result.message: if result.message:
print(result.message + "!") print(result.message + '!')
if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11 if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11
del core.MYAPP del core.MYAPP
return core.NZBGET_POSTPROCESS_SUCCESS return core.NZBGET_POSTPROCESS_SUCCESS
else: else:
logger.error("A problem was reported in the {0} script.".format(args[0])) logger.error('A problem was reported in the {0} script.'.format(args[0]))
if result.message: if result.message:
print(result.message + "!") print(result.message + '!')
if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11 if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11
del core.MYAPP del core.MYAPP
return core.NZBGET_POSTPROCESS_ERROR return core.NZBGET_POSTPROCESS_ERROR

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "Mylar" section = 'Mylar'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "NzbDrone" section = 'NzbDrone'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "Radarr" section = 'Radarr'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -5,6 +5,6 @@ import sys
import nzbToMedia import nzbToMedia
section = "SickBeard" section = 'SickBeard'
result = nzbToMedia.main(sys.argv, section) result = nzbToMedia.main(sys.argv, section)
sys.exit(result) sys.exit(result)

View file

@ -14,13 +14,13 @@ from core.utils import server_responding
# Initialize the config # Initialize the config
core.initialize() core.initialize()
# label = core.TORRENT_CLASS.core.get_torrent_status("f33a9c4b15cbd9170722d700069af86746817ade", ["label"]).get()['label'] # label = core.TORRENT_CLASS.core.get_torrent_status('f33a9c4b15cbd9170722d700069af86746817ade', ['label']).get()['label']
# print(label) # print(label)
if transcoder.is_video_good(core.TEST_FILE, 0): if transcoder.is_video_good(core.TEST_FILE, 0):
print("FFPROBE Works") print('FFPROBE Works')
else: else:
print("FFPROBE FAILED") print('FFPROBE FAILED')
test = core.CFG['SickBeard', 'NzbDrone']['tv'].isenabled() test = core.CFG['SickBeard', 'NzbDrone']['tv'].isenabled()
print(test) print(test)
@ -29,22 +29,22 @@ print(section)
print(len(section)) print(len(section))
fork, fork_params = auto_fork('SickBeard', 'tv') fork, fork_params = auto_fork('SickBeard', 'tv')
if server_responding("http://127.0.0.1:5050"): if server_responding('http://127.0.0.1:5050'):
print("CouchPotato Running") print('CouchPotato Running')
if server_responding("http://127.0.0.1:7073"): if server_responding('http://127.0.0.1:7073'):
print("SickBeard Running") print('SickBeard Running')
if server_responding("http://127.0.0.1:8181"): if server_responding('http://127.0.0.1:8181'):
print("HeadPhones Running") print('HeadPhones Running')
if server_responding("http://127.0.0.1:8085"): if server_responding('http://127.0.0.1:8085'):
print("Gamez Running") print('Gamez Running')
if server_responding("http://127.0.0.1:8090"): if server_responding('http://127.0.0.1:8090'):
print("Mylar Running") print('Mylar Running')
lan = 'pt' lan = 'pt'
lan = Language.fromalpha2(lan) lan = Language.fromalpha2(lan)
print(lan.alpha3) print(lan.alpha3)
vidName = "/volume1/Public/Movies/A Few Good Men/A Few Good Men(1992).mkv" vidName = '/volume1/Public/Movies/A Few Good Men/A Few Good Men(1992).mkv'
inputName = "in.the.name.of.ben.hur.2016.bdrip.x264-rusted.nzb" inputName = 'in.the.name.of.ben.hur.2016.bdrip.x264-rusted.nzb'
guess = guessit.guessit(inputName) guess = guessit.guessit(inputName)
if guess: if guess:
# Movie Title # Movie Title
@ -55,7 +55,7 @@ if guess:
year = None year = None
if 'year' in guess: if 'year' in guess:
year = guess['year'] year = guess['year']
url = "http://www.omdbapi.com" url = 'http://www.omdbapi.com'
r = requests.get(url, params={'y': year, 't': title}, verify=False, timeout=(60, 300)) r = requests.get(url, params={'y': year, 't': title}, verify=False, timeout=(60, 300))
results = r.json() results = r.json()
print(results) print(results)