From c5343889fb0c038b5382a5eb00fae031a5aec7b0 Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Sat, 29 Dec 2018 14:05:37 -0500 Subject: [PATCH] Fix quotes - standardize to single-quoted strings --- TorrentToMedia.py | 120 +++++------ core/__init__.py | 268 ++++++++++++------------ core/auto_process/comics.py | 46 ++--- core/auto_process/common.py | 14 +- core/auto_process/games.py | 54 ++--- core/auto_process/movies.py | 250 +++++++++++----------- core/auto_process/music.py | 146 ++++++------- core/auto_process/tv.py | 216 +++++++++---------- core/configuration.py | 80 +++---- core/databases.py | 40 ++-- core/extractor/__init__.py | 88 ++++---- core/forks.py | 54 ++--- core/logger.py | 4 +- core/main_db.py | 100 ++++----- core/scene_exceptions.py | 82 ++++---- core/transcoder.py | 268 ++++++++++++------------ core/user_scripts.py | 52 ++--- core/utils.py | 368 ++++++++++++++++----------------- core/version_check.py | 102 ++++----- libs/custom/utorrent/client.py | 2 +- nzbToCouchPotato.py | 2 +- nzbToGamez.py | 2 +- nzbToHeadPhones.py | 2 +- nzbToLidarr.py | 2 +- nzbToMedia.py | 112 +++++----- nzbToMylar.py | 2 +- nzbToNzbDrone.py | 2 +- nzbToRadarr.py | 2 +- nzbToSickBeard.py | 2 +- tests/general.py | 32 +-- 30 files changed, 1257 insertions(+), 1257 deletions(-) diff --git a/TorrentToMedia.py b/TorrentToMedia.py index 11160988..821e60ab 100755 --- a/TorrentToMedia.py +++ b/TorrentToMedia.py @@ -36,24 +36,24 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp except Exception: pass - control_value_dict = {"input_directory": text_type(input_directory1)} + control_value_dict = {'input_directory': text_type(input_directory1)} new_value_dict = { - "input_name": text_type(input_name1), - "input_hash": text_type(input_hash), - "input_id": text_type(input_id), - "client_agent": text_type(client_agent), - "status": 0, - "last_update": datetime.date.today().toordinal(), + 'input_name': text_type(input_name1), + 'input_hash': text_type(input_hash), + 'input_id': text_type(input_id), + 'client_agent': text_type(client_agent), + 'status': 0, + 'last_update': datetime.date.today().toordinal(), } - my_db.upsert("downloads", new_value_dict, control_value_dict) + my_db.upsert('downloads', new_value_dict, control_value_dict) - logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(input_directory, input_name, input_category)) + logger.debug('Received Directory: {0} | Name: {1} | Category: {2}'.format(input_directory, input_name, input_category)) # Confirm the category by parsing directory structure input_directory, input_name, input_category, root = core.category_search(input_directory, input_name, input_category, root, core.CATEGORIES) - if input_category == "": - input_category = "UNCAT" + if input_category == '': + input_category = 'UNCAT' usercat = input_category try: @@ -65,45 +65,45 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp except UnicodeError: pass - logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format + logger.debug('Determined Directory: {0} | Name: {1} | Category: {2}'.format (input_directory, input_name, input_category)) # auto-detect section section = core.CFG.findsection(input_category).isenabled() if section is None: - section = core.CFG.findsection("ALL").isenabled() + section = core.CFG.findsection('ALL').isenabled() if section is None: logger.error('Category:[{0}] is not defined or is not enabled. ' 'Please rename it or ensure it is enabled for the appropriate section ' 'in your autoProcessMedia.cfg and try again.'.format (input_category)) - return [-1, ""] + return [-1, ''] else: - usercat = "ALL" + usercat = 'ALL' if len(section) > 1: logger.error('Category:[{0}] is not unique, {1} are using it. ' 'Please rename it or disable all other sections using the same category name ' 'in your autoProcessMedia.cfg and try again.'.format (usercat, section.keys())) - return [-1, ""] + return [-1, ''] if section: section_name = section.keys()[0] logger.info('Auto-detected SECTION:{0}'.format(section_name)) else: - logger.error("Unable to locate a section with subsection:{0} " - "enabled in your autoProcessMedia.cfg, exiting!".format + logger.error('Unable to locate a section with subsection:{0} ' + 'enabled in your autoProcessMedia.cfg, exiting!'.format (input_category)) - return [-1, ""] + return [-1, ''] section = dict(section[section_name][usercat]) # Type cast to dict() to allow effective usage of .get() - torrent_no_link = int(section.get("Torrent_NoLink", 0)) - keep_archive = int(section.get("keep_archive", 0)) + torrent_no_link = int(section.get('Torrent_NoLink', 0)) + keep_archive = int(section.get('keep_archive', 0)) extract = int(section.get('extract', 0)) - extensions = section.get('user_script_mediaExtensions', "").lower().split(',') - unique_path = int(section.get("unique_path", 1)) + extensions = section.get('user_script_mediaExtensions', '').lower().split(',') + unique_path = int(section.get('unique_path', 1)) if client_agent != 'manual': core.pause_torrent(client_agent, input_hash, input_id, input_name) @@ -117,7 +117,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp output_destination = os.path.join(core.OUTPUTDIRECTORY, input_category, basename) elif unique_path: output_destination = os.path.normpath( - core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitize_name(input_name).replace(" ", "."))) + core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitize_name(input_name).replace(' ', '.'))) else: output_destination = os.path.normpath( core.os.path.join(core.OUTPUTDIRECTORY, input_category)) @@ -129,15 +129,15 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp if output_destination in input_directory: output_destination = input_directory - logger.info("Output directory set to: {0}".format(output_destination)) + logger.info('Output directory set to: {0}'.format(output_destination)) if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULTDIR: logger.error('The output directory:[{0}] is the Download Directory. ' 'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format (input_directory)) - return [-1, ""] + return [-1, ''] - logger.debug("Scanning files in directory: {0}".format(input_directory)) + logger.debug('Scanning files in directory: {0}'.format(input_directory)) if section_name in ['HeadPhones', 'Lidarr']: core.NOFLATTEN.extend( @@ -151,9 +151,9 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp input_files = core.list_media_files(input_directory, other=True, otherext=extensions) if len(input_files) == 0 and os.path.isfile(input_directory): input_files = [input_directory] - logger.debug("Found 1 file to process: {0}".format(input_directory)) + logger.debug('Found 1 file to process: {0}'.format(input_directory)) else: - logger.debug("Found {0} files in {1}".format(len(input_files), input_directory)) + logger.debug('Found {0} files in {1}'.format(len(input_files), input_directory)) for inputFile in input_files: file_path = os.path.dirname(inputFile) file_name, file_ext = os.path.splitext(os.path.basename(inputFile)) @@ -164,7 +164,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp if not os.path.basename(file_path) in output_destination: target_file = core.os.path.join( core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name) - logger.debug("Setting outputDestination to {0} to preserve folder structure".format + logger.debug('Setting outputDestination to {0} to preserve folder structure'.format (os.path.dirname(target_file))) try: target_file = target_file.encode(core.SYS_ENCODING) @@ -172,11 +172,11 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp pass if root == 1: if not found_file: - logger.debug("Looking for {0} in: {1}".format(input_name, inputFile)) + logger.debug('Looking for {0} in: {1}'.format(input_name, inputFile)) if any([core.sanitize_name(input_name) in core.sanitize_name(inputFile), core.sanitize_name(file_name) in core.sanitize_name(input_name)]): found_file = True - logger.debug("Found file {0} that matches Torrent Name {1}".format + logger.debug('Found file {0} that matches Torrent Name {1}'.format (full_file_name, input_name)) else: continue @@ -186,10 +186,10 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile)) if not found_file: - logger.debug("Looking for files with modified/created dates less than 5 minutes old.") + logger.debug('Looking for files with modified/created dates less than 5 minutes old.') if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)): found_file = True - logger.debug("Found file {0} with date modified/created less than 5 minutes ago.".format + logger.debug('Found file {0} with date modified/created less than 5 minutes ago.'.format (full_file_name)) else: continue # This file has not been recently moved or created, skip it @@ -199,7 +199,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp core.copy_link(inputFile, target_file, core.USELINK) core.remove_read_only(target_file) except Exception: - logger.error("Failed to link: {0} to {1}".format(inputFile, target_file)) + logger.error('Failed to link: {0} to {1}'.format(inputFile, target_file)) input_name, output_destination = convert_to_ascii(input_name, output_destination) @@ -212,30 +212,30 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp core.flatten(output_destination) # Now check if video files exist in destination: - if section_name in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]: + if section_name in ['SickBeard', 'NzbDrone', 'Sonarr', 'CouchPotato', 'Radarr']: num_videos = len( core.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False)) if num_videos > 0: - logger.info("Found {0} media files in {1}".format(num_videos, output_destination)) + logger.info('Found {0} media files in {1}'.format(num_videos, output_destination)) status = 0 elif extract != 1: - logger.info("Found no media files in {0}. Sending to {1} to process".format(output_destination, section_name)) + logger.info('Found no media files in {0}. Sending to {1} to process'.format(output_destination, section_name)) status = 0 else: - logger.warning("Found no media files in {0}".format(output_destination)) + logger.warning('Found no media files in {0}'.format(output_destination)) # Only these sections can handling failed downloads # so make sure everything else gets through without the check for failed if section_name not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']: status = 0 - logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, usercat, input_name)) + logger.info('Calling {0}:{1} to post-process:{2}'.format(section_name, usercat, input_name)) if core.TORRENT_CHMOD_DIRECTORY: core.rchmod(output_destination, core.TORRENT_CHMOD_DIRECTORY) result = ProcessResult( - message="", + message='', status_code=0, ) if section_name == 'UserScript': @@ -257,11 +257,11 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp if result.status_code != 0: if not core.TORRENT_RESUME_ON_FAILURE: - logger.error("A problem was reported in the autoProcess* script. " - "Torrent won't resume seeding (settings)") + logger.error('A problem was reported in the autoProcess* script. ' + 'Torrent won\'t resume seeding (settings)') elif client_agent != 'manual': - logger.error("A problem was reported in the autoProcess* script. " - "If torrent was paused we will resume seeding") + logger.error('A problem was reported in the autoProcess* script. ' + 'If torrent was paused we will resume seeding') core.resume_torrent(client_agent, input_hash, input_id, input_name) else: @@ -293,48 +293,48 @@ def main(args): # clientAgent for Torrents client_agent = core.TORRENT_CLIENTAGENT - logger.info("#########################################################") - logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) - logger.info("#########################################################") + logger.info('#########################################################') + logger.info('## ..::[{0}]::.. ##'.format(os.path.basename(__file__))) + logger.info('#########################################################') # debug command line options - logger.debug("Options passed into TorrentToMedia: {0}".format(args)) + logger.debug('Options passed into TorrentToMedia: {0}'.format(args)) # Post-Processing Result result = ProcessResult( - message="", + message='', status_code=0, ) try: input_directory, input_name, input_category, input_hash, input_id = core.parse_args(client_agent, args) except Exception: - logger.error("There was a problem loading variables") + logger.error('There was a problem loading variables') return -1 if input_directory and input_name and input_hash and input_id: result = process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent) else: # Perform Manual Post-Processing - logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...") + logger.warning('Invalid number of arguments received from client, Switching to manual run mode ...') for section, subsections in core.SECTIONS.items(): for subsection in subsections: if not core.CFG[section][subsection].isenabled(): continue for dir_name in core.get_dirs(section, subsection, link='hard'): - logger.info("Starting manual run for {0}:{1} - Folder:{2}".format + logger.info('Starting manual run for {0}:{1} - Folder:{2}'.format (section, subsection, dir_name)) - logger.info("Checking database for download info for {0} ...".format + logger.info('Checking database for download info for {0} ...'.format (os.path.basename(dir_name))) core.DOWNLOADINFO = core.get_download_info(os.path.basename(dir_name), 0) if core.DOWNLOADINFO: client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual')) input_hash = text_type(core.DOWNLOADINFO[0].get('input_hash', '')) input_id = text_type(core.DOWNLOADINFO[0].get('input_id', '')) - logger.info("Found download info for {0}, " - "setting variables now ...".format(os.path.basename(dir_name))) + logger.info('Found download info for {0}, ' + 'setting variables now ...'.format(os.path.basename(dir_name))) else: logger.info('Unable to locate download info for {0}, ' 'continuing to try and process this release ...'.format @@ -359,17 +359,17 @@ def main(args): results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None, client_agent) if results[0] != 0: - logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format + logger.error('A problem was reported when trying to perform a manual run for {0}:{1}.'.format (section, subsection)) result = results if result.status_code == 0: - logger.info("The {0} script completed successfully.".format(args[0])) + logger.info('The {0} script completed successfully.'.format(args[0])) else: - logger.error("A problem was reported in the {0} script.".format(args[0])) + logger.error('A problem was reported in the {0} script.'.format(args[0])) del core.MYAPP return result.status_code -if __name__ == "__main__": +if __name__ == '__main__': exit(main(sys.argv)) diff --git a/core/__init__.py b/core/__init__.py index abccb091..da1d0c37 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -64,22 +64,22 @@ SABNZB_0717_NO_OF_ARGUMENTS = 9 # sickbeard fork/branch constants FORKS = {} -FORK_DEFAULT = "default" -FORK_FAILED = "failed" -FORK_FAILED_TORRENT = "failed-torrent" -FORK_SICKRAGE = "SickRage" -FORK_SICKCHILL = "SickChill" -FORK_SICKBEARD_API = "SickBeard-api" -FORK_MEDUSA = "Medusa" -FORK_SICKGEAR = "SickGear" -FORKS[FORK_DEFAULT] = {"dir": None} -FORKS[FORK_FAILED] = {"dirName": None, "failed": None} -FORKS[FORK_FAILED_TORRENT] = {"dir": None, "failed": None, "process_method": None} -FORKS[FORK_SICKRAGE] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None} -FORKS[FORK_SICKCHILL] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "force_next": None} -FORKS[FORK_SICKBEARD_API] = {"path": None, "failed": None, "process_method": None, "force_replace": None, "return_data": None, "type": None, "delete": None, "force_next": None} -FORKS[FORK_MEDUSA] = {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "ignore_subs": None} -FORKS[FORK_SICKGEAR] = {"dir": None, "failed": None, "process_method": None, "force": None} +FORK_DEFAULT = 'default' +FORK_FAILED = 'failed' +FORK_FAILED_TORRENT = 'failed-torrent' +FORK_SICKRAGE = 'SickRage' +FORK_SICKCHILL = 'SickChill' +FORK_SICKBEARD_API = 'SickBeard-api' +FORK_MEDUSA = 'Medusa' +FORK_SICKGEAR = 'SickGear' +FORKS[FORK_DEFAULT] = {'dir': None} +FORKS[FORK_FAILED] = {'dirName': None, 'failed': None} +FORKS[FORK_FAILED_TORRENT] = {'dir': None, 'failed': None, 'process_method': None} +FORKS[FORK_SICKRAGE] = {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None} +FORKS[FORK_SICKCHILL] = {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None} +FORKS[FORK_SICKBEARD_API] = {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None} +FORKS[FORK_MEDUSA] = {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None} +FORKS[FORK_SICKGEAR] = {'dir': None, 'failed': None, 'process_method': None, 'force': None} ALL_FORKS = {k: None for k in set(list(itertools.chain.from_iterable([FORKS[x].keys() for x in FORKS.keys()])))} # NZBGet Exit Codes @@ -257,15 +257,15 @@ def initialize(section=None): LOG_DIR = os.path.split(LOG_FILE)[0] if not make_dir(LOG_DIR): - print("No log folder, logging to screen only") + print('No log folder, logging to screen only') MYAPP = RunningProcess() while MYAPP.alreadyrunning(): - print("Waiting for existing session to end") + print('Waiting for existing session to end') time.sleep(30) try: - locale.setlocale(locale.LC_ALL, "") + locale.setlocale(locale.LC_ALL, '') SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass @@ -275,7 +275,7 @@ def initialize(section=None): SYS_ENCODING = 'UTF-8' if six.PY2: - if not hasattr(sys, "setdefaultencoding"): + if not hasattr(sys, 'setdefaultencoding'): reload_module(sys) try: @@ -296,7 +296,7 @@ def initialize(section=None): # run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options. if not config.migrate(): - logger.error("Unable to migrate config file {0}, exiting ...".format(CONFIG_FILE)) + logger.error('Unable to migrate config file {0}, exiting ...'.format(CONFIG_FILE)) if 'NZBOP_SCRIPTDIR' in os.environ: pass # We will try and read config from Environment. else: @@ -307,7 +307,7 @@ def initialize(section=None): CFG = config.addnzbget() else: # load newly migrated config - logger.info("Loading config from [{0}]".format(CONFIG_FILE)) + logger.info('Loading config from [{0}]'.format(CONFIG_FILE)) CFG = config() # Enable/Disable DEBUG Logging @@ -318,7 +318,7 @@ def initialize(section=None): if LOG_ENV: for item in os.environ: - logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT") + logger.info('{0}: {1}'.format(item, os.environ[item]), 'ENVIRONMENT') # initialize the main SB database main_db.upgrade_database(main_db.DBConnection(), databases.InitialSchema) @@ -331,16 +331,16 @@ def initialize(section=None): GIT_PATH = CFG['General']['git_path'] GIT_USER = CFG['General']['git_user'] or 'clinton-hall' GIT_BRANCH = CFG['General']['git_branch'] or 'master' - FORCE_CLEAN = int(CFG["General"]["force_clean"]) - FFMPEG_PATH = CFG["General"]["ffmpeg_path"] - CHECK_MEDIA = int(CFG["General"]["check_media"]) - SAFE_MODE = int(CFG["General"]["safe_mode"]) - NOEXTRACTFAILED = int(CFG["General"]["no_extract_failed"]) + FORCE_CLEAN = int(CFG['General']['force_clean']) + FFMPEG_PATH = CFG['General']['ffmpeg_path'] + CHECK_MEDIA = int(CFG['General']['check_media']) + SAFE_MODE = int(CFG['General']['safe_mode']) + NOEXTRACTFAILED = int(CFG['General']['no_extract_failed']) # Check for updates via GitHUB if version_check.CheckVersion().check_for_new_version(): if AUTO_UPDATE == 1: - logger.info("Auto-Updating nzbToMedia, Please wait ...") + logger.info('Auto-Updating nzbToMedia, Please wait ...') updated = version_check.CheckVersion().update() if updated: # restart nzbToMedia @@ -350,61 +350,61 @@ def initialize(section=None): pass restart() else: - logger.error("Update wasn't successful, not restarting. Check your log for more information.") + logger.error('Update wasn\'t successful, not restarting. Check your log for more information.') # Set Current Version logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format (version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH, system=platform.system(), release=platform.release())) - if int(CFG["WakeOnLan"]["wake"]) == 1: + if int(CFG['WakeOnLan']['wake']) == 1: wake_up() - NZB_CLIENTAGENT = CFG["Nzb"]["clientAgent"] # sabnzbd - SABNZBDHOST = CFG["Nzb"]["sabnzbd_host"] - SABNZBDPORT = int(CFG["Nzb"]["sabnzbd_port"] or 8080) # defaults to accomodate NzbGet - SABNZBDAPIKEY = CFG["Nzb"]["sabnzbd_apikey"] - NZB_DEFAULTDIR = CFG["Nzb"]["default_downloadDirectory"] - GROUPS = CFG["Custom"]["remove_group"] + NZB_CLIENTAGENT = CFG['Nzb']['clientAgent'] # sabnzbd + SABNZBDHOST = CFG['Nzb']['sabnzbd_host'] + SABNZBDPORT = int(CFG['Nzb']['sabnzbd_port'] or 8080) # defaults to accomodate NzbGet + SABNZBDAPIKEY = CFG['Nzb']['sabnzbd_apikey'] + NZB_DEFAULTDIR = CFG['Nzb']['default_downloadDirectory'] + GROUPS = CFG['Custom']['remove_group'] if isinstance(GROUPS, str): GROUPS = GROUPS.split(',') if GROUPS == ['']: GROUPS = None - TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent |other - USELINK = CFG["Torrent"]["useLink"] # no | hard | sym - OUTPUTDIRECTORY = CFG["Torrent"]["outputDirectory"] # /abs/path/to/complete/ - TORRENT_DEFAULTDIR = CFG["Torrent"]["default_downloadDirectory"] - CATEGORIES = (CFG["Torrent"]["categories"]) # music,music_videos,pictures,software - NOFLATTEN = (CFG["Torrent"]["noFlatten"]) + TORRENT_CLIENTAGENT = CFG['Torrent']['clientAgent'] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent |other + USELINK = CFG['Torrent']['useLink'] # no | hard | sym + OUTPUTDIRECTORY = CFG['Torrent']['outputDirectory'] # /abs/path/to/complete/ + TORRENT_DEFAULTDIR = CFG['Torrent']['default_downloadDirectory'] + CATEGORIES = (CFG['Torrent']['categories']) # music,music_videos,pictures,software + NOFLATTEN = (CFG['Torrent']['noFlatten']) if isinstance(NOFLATTEN, str): NOFLATTEN = NOFLATTEN.split(',') if isinstance(CATEGORIES, str): CATEGORIES = CATEGORIES.split(',') - DELETE_ORIGINAL = int(CFG["Torrent"]["deleteOriginal"]) - TORRENT_CHMOD_DIRECTORY = int(str(CFG["Torrent"]["chmodDirectory"]), 8) - TORRENT_RESUME_ON_FAILURE = int(CFG["Torrent"]["resumeOnFailure"]) - TORRENT_RESUME = int(CFG["Torrent"]["resume"]) - UTORRENTWEBUI = CFG["Torrent"]["uTorrentWEBui"] # http://localhost:8090/gui/ - UTORRENTUSR = CFG["Torrent"]["uTorrentUSR"] # mysecretusr - UTORRENTPWD = CFG["Torrent"]["uTorrentPWD"] # mysecretpwr + DELETE_ORIGINAL = int(CFG['Torrent']['deleteOriginal']) + TORRENT_CHMOD_DIRECTORY = int(str(CFG['Torrent']['chmodDirectory']), 8) + TORRENT_RESUME_ON_FAILURE = int(CFG['Torrent']['resumeOnFailure']) + TORRENT_RESUME = int(CFG['Torrent']['resume']) + UTORRENTWEBUI = CFG['Torrent']['uTorrentWEBui'] # http://localhost:8090/gui/ + UTORRENTUSR = CFG['Torrent']['uTorrentUSR'] # mysecretusr + UTORRENTPWD = CFG['Torrent']['uTorrentPWD'] # mysecretpwr - TRANSMISSIONHOST = CFG["Torrent"]["TransmissionHost"] # localhost - TRANSMISSIONPORT = int(CFG["Torrent"]["TransmissionPort"]) - TRANSMISSIONUSR = CFG["Torrent"]["TransmissionUSR"] # mysecretusr - TRANSMISSIONPWD = CFG["Torrent"]["TransmissionPWD"] # mysecretpwr + TRANSMISSIONHOST = CFG['Torrent']['TransmissionHost'] # localhost + TRANSMISSIONPORT = int(CFG['Torrent']['TransmissionPort']) + TRANSMISSIONUSR = CFG['Torrent']['TransmissionUSR'] # mysecretusr + TRANSMISSIONPWD = CFG['Torrent']['TransmissionPWD'] # mysecretpwr - DELUGEHOST = CFG["Torrent"]["DelugeHost"] # localhost - DELUGEPORT = int(CFG["Torrent"]["DelugePort"]) # 8084 - DELUGEUSR = CFG["Torrent"]["DelugeUSR"] # mysecretusr - DELUGEPWD = CFG["Torrent"]["DelugePWD"] # mysecretpwr + DELUGEHOST = CFG['Torrent']['DelugeHost'] # localhost + DELUGEPORT = int(CFG['Torrent']['DelugePort']) # 8084 + DELUGEUSR = CFG['Torrent']['DelugeUSR'] # mysecretusr + DELUGEPWD = CFG['Torrent']['DelugePWD'] # mysecretpwr - QBITTORRENTHOST = CFG["Torrent"]["qBittorrenHost"] # localhost - QBITTORRENTPORT = int(CFG["Torrent"]["qBittorrentPort"]) # 8080 - QBITTORRENTUSR = CFG["Torrent"]["qBittorrentUSR"] # mysecretusr - QBITTORRENTPWD = CFG["Torrent"]["qBittorrentPWD"] # mysecretpwr + QBITTORRENTHOST = CFG['Torrent']['qBittorrenHost'] # localhost + QBITTORRENTPORT = int(CFG['Torrent']['qBittorrentPort']) # 8080 + QBITTORRENTUSR = CFG['Torrent']['qBittorrentUSR'] # mysecretusr + QBITTORRENTPWD = CFG['Torrent']['qBittorrentPWD'] # mysecretpwr - REMOTEPATHS = CFG["Network"]["mount_points"] or [] + REMOTEPATHS = CFG['Network']['mount_points'] or [] if REMOTEPATHS: if isinstance(REMOTEPATHS, list): REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list. @@ -413,11 +413,11 @@ def initialize(section=None): REMOTEPATHS = [(local.strip(), remote.strip()) for local, remote in REMOTEPATHS] # strip trailing and leading whitespaces - PLEXSSL = int(CFG["Plex"]["plex_ssl"]) - PLEXHOST = CFG["Plex"]["plex_host"] - PLEXPORT = CFG["Plex"]["plex_port"] - PLEXTOKEN = CFG["Plex"]["plex_token"] - PLEXSEC = CFG["Plex"]["plex_sections"] or [] + PLEXSSL = int(CFG['Plex']['plex_ssl']) + PLEXHOST = CFG['Plex']['plex_host'] + PLEXPORT = CFG['Plex']['plex_port'] + PLEXTOKEN = CFG['Plex']['plex_token'] + PLEXSEC = CFG['Plex']['plex_sections'] or [] if PLEXSEC: if isinstance(PLEXSEC, list): PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list. @@ -425,21 +425,21 @@ def initialize(section=None): devnull = open(os.devnull, 'w') try: - subprocess.Popen(["nice"], stdout=devnull, stderr=devnull).communicate() - NICENESS.extend(['nice', '-n{0}'.format(int(CFG["Posix"]["niceness"]))]) + subprocess.Popen(['nice'], stdout=devnull, stderr=devnull).communicate() + NICENESS.extend(['nice', '-n{0}'.format(int(CFG['Posix']['niceness']))]) except Exception: pass try: - subprocess.Popen(["ionice"], stdout=devnull, stderr=devnull).communicate() + subprocess.Popen(['ionice'], stdout=devnull, stderr=devnull).communicate() try: - NICENESS.extend(['ionice', '-c{0}'.format(int(CFG["Posix"]["ionice_class"]))]) + NICENESS.extend(['ionice', '-c{0}'.format(int(CFG['Posix']['ionice_class']))]) except Exception: pass try: if 'ionice' in NICENESS: - NICENESS.extend(['-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))]) + NICENESS.extend(['-n{0}'.format(int(CFG['Posix']['ionice_classdata']))]) else: - NICENESS.extend(['ionice', '-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))]) + NICENESS.extend(['ionice', '-n{0}'.format(int(CFG['Posix']['ionice_classdata']))]) except Exception: pass except Exception: @@ -449,10 +449,10 @@ def initialize(section=None): COMPRESSEDCONTAINER = [re.compile(r'.r\d{2}$', re.I), re.compile(r'.part\d+.rar$', re.I), re.compile('.rar$', re.I)] - COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG["Extensions"]["compressedExtensions"]] - MEDIACONTAINER = CFG["Extensions"]["mediaExtensions"] - AUDIOCONTAINER = CFG["Extensions"]["audioExtensions"] - METACONTAINER = CFG["Extensions"]["metaExtensions"] # .nfo,.sub,.srt + COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG['Extensions']['compressedExtensions']] + MEDIACONTAINER = CFG['Extensions']['mediaExtensions'] + AUDIOCONTAINER = CFG['Extensions']['audioExtensions'] + METACONTAINER = CFG['Extensions']['metaExtensions'] # .nfo,.sub,.srt if isinstance(COMPRESSEDCONTAINER, str): COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',') if isinstance(MEDIACONTAINER, str): @@ -462,15 +462,15 @@ def initialize(section=None): if isinstance(METACONTAINER, str): METACONTAINER = METACONTAINER.split(',') - GETSUBS = int(CFG["Transcoder"]["getSubs"]) - TRANSCODE = int(CFG["Transcoder"]["transcode"]) - DUPLICATE = int(CFG["Transcoder"]["duplicate"]) - CONCAT = int(CFG["Transcoder"]["concat"]) - IGNOREEXTENSIONS = (CFG["Transcoder"]["ignoreExtensions"]) + GETSUBS = int(CFG['Transcoder']['getSubs']) + TRANSCODE = int(CFG['Transcoder']['transcode']) + DUPLICATE = int(CFG['Transcoder']['duplicate']) + CONCAT = int(CFG['Transcoder']['concat']) + IGNOREEXTENSIONS = (CFG['Transcoder']['ignoreExtensions']) if isinstance(IGNOREEXTENSIONS, str): IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',') - OUTPUTFASTSTART = int(CFG["Transcoder"]["outputFastStart"]) - GENERALOPTS = (CFG["Transcoder"]["generalOptions"]) + OUTPUTFASTSTART = int(CFG['Transcoder']['outputFastStart']) + GENERALOPTS = (CFG['Transcoder']['generalOptions']) if isinstance(GENERALOPTS, str): GENERALOPTS = GENERALOPTS.split(',') if GENERALOPTS == ['']: @@ -480,93 +480,93 @@ def initialize(section=None): if '+genpts' not in GENERALOPTS: GENERALOPTS.append('+genpts') try: - OUTPUTQUALITYPERCENT = int(CFG["Transcoder"]["outputQualityPercent"]) + OUTPUTQUALITYPERCENT = int(CFG['Transcoder']['outputQualityPercent']) except Exception: pass - OUTPUTVIDEOPATH = CFG["Transcoder"]["outputVideoPath"] - PROCESSOUTPUT = int(CFG["Transcoder"]["processOutput"]) - ALANGUAGE = CFG["Transcoder"]["audioLanguage"] - AINCLUDE = int(CFG["Transcoder"]["allAudioLanguages"]) - SLANGUAGES = CFG["Transcoder"]["subLanguages"] + OUTPUTVIDEOPATH = CFG['Transcoder']['outputVideoPath'] + PROCESSOUTPUT = int(CFG['Transcoder']['processOutput']) + ALANGUAGE = CFG['Transcoder']['audioLanguage'] + AINCLUDE = int(CFG['Transcoder']['allAudioLanguages']) + SLANGUAGES = CFG['Transcoder']['subLanguages'] if isinstance(SLANGUAGES, str): SLANGUAGES = SLANGUAGES.split(',') if SLANGUAGES == ['']: SLANGUAGES = [] - SINCLUDE = int(CFG["Transcoder"]["allSubLanguages"]) - SEXTRACT = int(CFG["Transcoder"]["extractSubs"]) - SEMBED = int(CFG["Transcoder"]["embedSubs"]) - SUBSDIR = CFG["Transcoder"]["externalSubDir"] - VEXTENSION = CFG["Transcoder"]["outputVideoExtension"].strip() - VCODEC = CFG["Transcoder"]["outputVideoCodec"].strip() - VCODEC_ALLOW = CFG["Transcoder"]["VideoCodecAllow"].strip() + SINCLUDE = int(CFG['Transcoder']['allSubLanguages']) + SEXTRACT = int(CFG['Transcoder']['extractSubs']) + SEMBED = int(CFG['Transcoder']['embedSubs']) + SUBSDIR = CFG['Transcoder']['externalSubDir'] + VEXTENSION = CFG['Transcoder']['outputVideoExtension'].strip() + VCODEC = CFG['Transcoder']['outputVideoCodec'].strip() + VCODEC_ALLOW = CFG['Transcoder']['VideoCodecAllow'].strip() if isinstance(VCODEC_ALLOW, str): VCODEC_ALLOW = VCODEC_ALLOW.split(',') if VCODEC_ALLOW == ['']: VCODEC_ALLOW = [] - VPRESET = CFG["Transcoder"]["outputVideoPreset"].strip() + VPRESET = CFG['Transcoder']['outputVideoPreset'].strip() try: - VFRAMERATE = float(CFG["Transcoder"]["outputVideoFramerate"].strip()) + VFRAMERATE = float(CFG['Transcoder']['outputVideoFramerate'].strip()) except Exception: pass try: - VCRF = int(CFG["Transcoder"]["outputVideoCRF"].strip()) + VCRF = int(CFG['Transcoder']['outputVideoCRF'].strip()) except Exception: pass try: - VLEVEL = CFG["Transcoder"]["outputVideoLevel"].strip() + VLEVEL = CFG['Transcoder']['outputVideoLevel'].strip() except Exception: pass try: - VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k', '000')) + VBITRATE = int((CFG['Transcoder']['outputVideoBitrate'].strip()).replace('k', '000')) except Exception: pass - VRESOLUTION = CFG["Transcoder"]["outputVideoResolution"] - ACODEC = CFG["Transcoder"]["outputAudioCodec"].strip() - ACODEC_ALLOW = CFG["Transcoder"]["AudioCodecAllow"].strip() + VRESOLUTION = CFG['Transcoder']['outputVideoResolution'] + ACODEC = CFG['Transcoder']['outputAudioCodec'].strip() + ACODEC_ALLOW = CFG['Transcoder']['AudioCodecAllow'].strip() if isinstance(ACODEC_ALLOW, str): ACODEC_ALLOW = ACODEC_ALLOW.split(',') if ACODEC_ALLOW == ['']: ACODEC_ALLOW = [] try: - ACHANNELS = int(CFG["Transcoder"]["outputAudioChannels"].strip()) + ACHANNELS = int(CFG['Transcoder']['outputAudioChannels'].strip()) except Exception: pass try: - ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k', '000')) + ABITRATE = int((CFG['Transcoder']['outputAudioBitrate'].strip()).replace('k', '000')) except Exception: pass - ACODEC2 = CFG["Transcoder"]["outputAudioTrack2Codec"].strip() - ACODEC2_ALLOW = CFG["Transcoder"]["AudioCodec2Allow"].strip() + ACODEC2 = CFG['Transcoder']['outputAudioTrack2Codec'].strip() + ACODEC2_ALLOW = CFG['Transcoder']['AudioCodec2Allow'].strip() if isinstance(ACODEC2_ALLOW, str): ACODEC2_ALLOW = ACODEC2_ALLOW.split(',') if ACODEC2_ALLOW == ['']: ACODEC2_ALLOW = [] try: - ACHANNELS2 = int(CFG["Transcoder"]["outputAudioTrack2Channels"].strip()) + ACHANNELS2 = int(CFG['Transcoder']['outputAudioTrack2Channels'].strip()) except Exception: pass try: - ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k', '000')) + ABITRATE2 = int((CFG['Transcoder']['outputAudioTrack2Bitrate'].strip()).replace('k', '000')) except Exception: pass - ACODEC3 = CFG["Transcoder"]["outputAudioOtherCodec"].strip() - ACODEC3_ALLOW = CFG["Transcoder"]["AudioOtherCodecAllow"].strip() + ACODEC3 = CFG['Transcoder']['outputAudioOtherCodec'].strip() + ACODEC3_ALLOW = CFG['Transcoder']['AudioOtherCodecAllow'].strip() if isinstance(ACODEC3_ALLOW, str): ACODEC3_ALLOW = ACODEC3_ALLOW.split(',') if ACODEC3_ALLOW == ['']: ACODEC3_ALLOW = [] try: - ACHANNELS3 = int(CFG["Transcoder"]["outputAudioOtherChannels"].strip()) + ACHANNELS3 = int(CFG['Transcoder']['outputAudioOtherChannels'].strip()) except Exception: pass try: - ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k', '000')) + ABITRATE3 = int((CFG['Transcoder']['outputAudioOtherBitrate'].strip()).replace('k', '000')) except Exception: pass - SCODEC = CFG["Transcoder"]["outputSubtitleCodec"].strip() - BURN = int(CFG["Transcoder"]["burnInSubtitle"].strip()) - DEFAULTS = CFG["Transcoder"]["outputDefault"].strip() - HWACCEL = int(CFG["Transcoder"]["hwAccel"]) + SCODEC = CFG['Transcoder']['outputSubtitleCodec'].strip() + BURN = int(CFG['Transcoder']['burnInSubtitle'].strip()) + DEFAULTS = CFG['Transcoder']['outputDefault'].strip() + HWACCEL = int(CFG['Transcoder']['hwAccel']) allow_subs = ['.mkv', '.mp4', '.m4v', 'asf', 'wma', 'wmv'] codec_alias = { @@ -743,25 +743,25 @@ def initialize(section=None): ACODEC3_ALLOW.extend(extra) codec_alias = {} # clear memory - PASSWORDSFILE = CFG["passwords"]["PassWordFile"] + PASSWORDSFILE = CFG['passwords']['PassWordFile'] # Setup FFMPEG, FFPROBE and SEVENZIP locations if platform.system() == 'Windows': FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg.exe') FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe.exe') SEVENZIP = os.path.join(APP_ROOT, 'core', 'extractor', 'bin', platform.machine(), '7z.exe') - SHOWEXTRACT = int(str(CFG["Windows"]["show_extraction"]), 0) + SHOWEXTRACT = int(str(CFG['Windows']['show_extraction']), 0) if not (os.path.isfile(FFMPEG)): # problem FFMPEG = None - logger.warning("Failed to locate ffmpeg.exe. Transcoding disabled!") - logger.warning("Install ffmpeg with x264 support to enable this feature ...") + logger.warning('Failed to locate ffmpeg.exe. Transcoding disabled!') + logger.warning('Install ffmpeg with x264 support to enable this feature ...') if not (os.path.isfile(FFPROBE)): FFPROBE = None if CHECK_MEDIA: - logger.warning("Failed to locate ffprobe.exe. Video corruption detection disabled!") - logger.warning("Install ffmpeg with x264 support to enable this feature ...") + logger.warning('Failed to locate ffprobe.exe. Video corruption detection disabled!') + logger.warning('Install ffmpeg with x264 support to enable this feature ...') else: try: @@ -781,7 +781,7 @@ def initialize(section=None): if not SEVENZIP: SEVENZIP = None logger.warning( - "Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!") + 'Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!') try: PAR2CMD = subprocess.Popen(['which', 'par2'], stdout=subprocess.PIPE).communicate()[0].strip() except Exception: @@ -789,7 +789,7 @@ def initialize(section=None): if not PAR2CMD: PAR2CMD = None logger.warning( - "Failed to locate par2. Repair and rename using par files will not be possible!") + 'Failed to locate par2. Repair and rename using par files will not be possible!') if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'), os.X_OK): FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg') @@ -808,8 +808,8 @@ def initialize(section=None): pass if not FFMPEG: FFMPEG = None - logger.warning("Failed to locate ffmpeg. Transcoding disabled!") - logger.warning("Install ffmpeg with x264 support to enable this feature ...") + logger.warning('Failed to locate ffmpeg. Transcoding disabled!') + logger.warning('Install ffmpeg with x264 support to enable this feature ...') if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'), os.X_OK): @@ -830,8 +830,8 @@ def initialize(section=None): if not FFPROBE: FFPROBE = None if CHECK_MEDIA: - logger.warning("Failed to locate ffprobe. Video corruption detection disabled!") - logger.warning("Install ffmpeg with x264 support to enable this feature ...") + logger.warning('Failed to locate ffprobe. Video corruption detection disabled!') + logger.warning('Install ffmpeg with x264 support to enable this feature ...') # check for script-defied section and if None set to allow sections SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)] @@ -857,7 +857,7 @@ def restart(): if popen_list: popen_list += SYS_ARGV - logger.log(u"Restarting nzbToMedia with {args}".format(args=popen_list)) + logger.log(u'Restarting nzbToMedia with {args}'.format(args=popen_list)) logger.close() p = subprocess.Popen(popen_list, cwd=os.getcwd()) p.wait() @@ -867,7 +867,7 @@ def restart(): def rchmod(path, mod): - logger.log("Changing file mode of {0} to {1}".format(path, oct(mod))) + logger.log('Changing file mode of {0} to {1}'.format(path, oct(mod))) os.chmod(path, mod) if not os.path.isdir(path): return # Skip files diff --git a/core/auto_process/comics.py b/core/auto_process/comics.py index 0dfda8d0..7049704b 100644 --- a/core/auto_process/comics.py +++ b/core/auto_process/comics.py @@ -13,24 +13,24 @@ requests.packages.urllib3.disable_warnings() def process(section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None): - apc_version = "2.04" - comicrn_version = "1.01" + apc_version = '2.04' + comicrn_version = '1.01' cfg = dict(core.CFG[section][input_category]) - host = cfg["host"] - port = cfg["port"] - apikey = cfg["apikey"] - ssl = int(cfg.get("ssl", 0)) - web_root = cfg.get("web_root", "") - remote_path = int(cfg.get("remote_path"), 0) - protocol = "https://" if ssl else "http://" + host = cfg['host'] + port = cfg['port'] + apikey = cfg['apikey'] + ssl = int(cfg.get('ssl', 0)) + web_root = cfg.get('web_root', '') + remote_path = int(cfg.get('remote_path'), 0) + protocol = 'https://' if ssl else 'http://' - url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root) + url = '{0}{1}:{2}{3}/api'.format(protocol, host, port, web_root) if not server_responding(url): - logger.error("Server did not respond. Exiting", section) + logger.error('Server did not respond. Exiting', section) return ProcessResult( - message="{0}: Failed to post-process - {0} did not respond.".format(section), + message='{0}: Failed to post-process - {0} did not respond.'.format(section), status_code=1, ) @@ -53,19 +53,19 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual', success = False - logger.debug("Opening URL: {0}".format(url), section) + logger.debug('Opening URL: {0}'.format(url), section) try: r = requests.post(url, params=params, stream=True, verify=False, timeout=(30, 300)) except requests.ConnectionError: - logger.error("Unable to open URL", section) + logger.error('Unable to open URL', section) return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {0}".format(section), + message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1 ) if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return ProcessResult( - message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), + message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code), status_code=1, ) @@ -74,19 +74,19 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual', result = result.split('\n') for line in result: if line: - logger.postprocess("{0}".format(line), section) - if "Post Processing SUCCESSFUL" in line: + logger.postprocess('{0}'.format(line), section) + if 'Post Processing SUCCESSFUL' in line: success = True if success: - logger.postprocess("SUCCESS: This issue has been processed successfully", section) + logger.postprocess('SUCCESS: This issue has been processed successfully', section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) else: - logger.warning("The issue does not appear to have successfully processed. Please check your Logs", section) + logger.warning('The issue does not appear to have successfully processed. Please check your Logs', section) return ProcessResult( - message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), + message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section), status_code=1, ) diff --git a/core/auto_process/common.py b/core/auto_process/common.py index 53e231e0..8da83485 100644 --- a/core/auto_process/common.py +++ b/core/auto_process/common.py @@ -31,32 +31,32 @@ def command_complete(url, params, headers, section): try: r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60)) except requests.ConnectionError: - logger.error("Unable to open URL: {0}".format(url), section) + logger.error('Unable to open URL: {0}'.format(url), section) return None if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return None else: try: return r.json()['state'] except (ValueError, KeyError): # ValueError catches simplejson's JSONDecodeError and json's ValueError - logger.error("{0} did not return expected json data.".format(section), section) + logger.error('{0} did not return expected json data.'.format(section), section) return None -def completed_download_handling(url2, headers, section="MAIN"): +def completed_download_handling(url2, headers, section='MAIN'): try: r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60)) except requests.ConnectionError: - logger.error("Unable to open URL: {0}".format(url2), section) + logger.error('Unable to open URL: {0}'.format(url2), section) return False if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return False else: try: - return r.json().get("enableCompletedDownloadHandling", False) + return r.json().get('enableCompletedDownloadHandling', False) except ValueError: # ValueError catches simplejson's JSONDecodeError and json's ValueError return False diff --git a/core/auto_process/games.py b/core/auto_process/games.py index f0cd63a3..c412a690 100644 --- a/core/auto_process/games.py +++ b/core/auto_process/games.py @@ -18,27 +18,27 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual', cfg = dict(core.CFG[section][input_category]) - host = cfg["host"] - port = cfg["port"] - apikey = cfg["apikey"] - library = cfg.get("library") - ssl = int(cfg.get("ssl", 0)) - web_root = cfg.get("web_root", "") - protocol = "https://" if ssl else "http://" + host = cfg['host'] + port = cfg['port'] + apikey = cfg['apikey'] + library = cfg.get('library') + ssl = int(cfg.get('ssl', 0)) + web_root = cfg.get('web_root', '') + protocol = 'https://' if ssl else 'http://' - url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root) + url = '{0}{1}:{2}{3}/api'.format(protocol, host, port, web_root) if not server_responding(url): - logger.error("Server did not respond. Exiting", section) + logger.error('Server did not respond. Exiting', section) return ProcessResult( - message="{0}: Failed to post-process - {0} did not respond.".format(section), + message='{0}: Failed to post-process - {0} did not respond.'.format(section), status_code=1, ) input_name, dir_name = convert_to_ascii(input_name, dir_name) - fields = input_name.split("-") + fields = input_name.split('-') - gamez_id = fields[0].replace("[", "").replace("]", "").replace(" ", "") + gamez_id = fields[0].replace('[', '').replace(']', '').replace(' ', '') download_status = 'Downloaded' if status == 0 else 'Wanted' @@ -49,51 +49,51 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual', 'status': download_status } - logger.debug("Opening URL: {0}".format(url), section) + logger.debug('Opening URL: {0}'.format(url), section) try: r = requests.get(url, params=params, verify=False, timeout=(30, 300)) except requests.ConnectionError: - logger.error("Unable to open URL") + logger.error('Unable to open URL') return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {1}".format(section, section), + message='{0}: Failed to post-process - Unable to connect to {1}'.format(section, section), status_code=1, ) result = r.json() - logger.postprocess("{0}".format(result), section) + logger.postprocess('{0}'.format(result), section) if library: - logger.postprocess("moving files to library: {0}".format(library), section) + logger.postprocess('moving files to library: {0}'.format(library), section) try: shutil.move(dir_name, os.path.join(library, input_name)) except Exception: - logger.error("Unable to move {0} to {1}".format(dir_name, os.path.join(library, input_name)), section) + logger.error('Unable to move {0} to {1}'.format(dir_name, os.path.join(library, input_name)), section) return ProcessResult( - message="{0}: Failed to post-process - Unable to move files".format(section), + message='{0}: Failed to post-process - Unable to move files'.format(section), status_code=1, ) else: - logger.error("No library specified to move files to. Please edit your configuration.", section) + logger.error('No library specified to move files to. Please edit your configuration.', section) return ProcessResult( - message="{0}: Failed to post-process - No library defined in {0}".format(section), + message='{0}: Failed to post-process - No library defined in {0}'.format(section), status_code=1, ) if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return ProcessResult( - message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), + message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code), status_code=1, ) elif result['success']: - logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamez_id, download_status), section) + logger.postprocess('SUCCESS: Status for {0} has been set to {1} in Gamez'.format(gamez_id, download_status), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) else: - logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamez_id), section) + logger.error('FAILED: Status for {0} has NOT been updated in Gamez'.format(gamez_id), section) return ProcessResult( - message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), + message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section), status_code=1, ) diff --git a/core/auto_process/movies.py b/core/auto_process/movies.py index 22b1fd77..cde56f27 100644 --- a/core/auto_process/movies.py +++ b/core/auto_process/movies.py @@ -15,54 +15,54 @@ from core.utils import convert_to_ascii, find_download, find_imdbid, import_subs requests.packages.urllib3.disable_warnings() -def process(section, dir_name, input_name=None, status=0, client_agent="manual", download_id="", input_category=None, failure_link=None): +def process(section, dir_name, input_name=None, status=0, client_agent='manual', download_id='', input_category=None, failure_link=None): cfg = dict(core.CFG[section][input_category]) - host = cfg["host"] - port = cfg["port"] - apikey = cfg["apikey"] - if section == "CouchPotato": - method = cfg["method"] + host = cfg['host'] + port = cfg['port'] + apikey = cfg['apikey'] + if section == 'CouchPotato': + method = cfg['method'] else: method = None # added importMode for Radarr config - if section == "Radarr": - import_mode = cfg.get("importMode", "Move") + if section == 'Radarr': + import_mode = cfg.get('importMode', 'Move') else: import_mode = None - delete_failed = int(cfg["delete_failed"]) - wait_for = int(cfg["wait_for"]) - ssl = int(cfg.get("ssl", 0)) - web_root = cfg.get("web_root", "") - remote_path = int(cfg.get("remote_path", 0)) - protocol = "https://" if ssl else "http://" - omdbapikey = cfg.get("omdbapikey", "") + delete_failed = int(cfg['delete_failed']) + wait_for = int(cfg['wait_for']) + ssl = int(cfg.get('ssl', 0)) + web_root = cfg.get('web_root', '') + remote_path = int(cfg.get('remote_path', 0)) + protocol = 'https://' if ssl else 'http://' + omdbapikey = cfg.get('omdbapikey', '') status = int(status) if status > 0 and core.NOEXTRACTFAILED: extract = 0 else: - extract = int(cfg.get("extract", 0)) + extract = int(cfg.get('extract', 0)) imdbid = find_imdbid(dir_name, input_name, omdbapikey) - if section == "CouchPotato": - base_url = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey) - if section == "Radarr": - base_url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root) - url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root) + if section == 'CouchPotato': + base_url = '{0}{1}:{2}{3}/api/{4}/'.format(protocol, host, port, web_root, apikey) + if section == 'Radarr': + base_url = '{0}{1}:{2}{3}/api/command'.format(protocol, host, port, web_root) + url2 = '{0}{1}:{2}{3}/api/config/downloadClient'.format(protocol, host, port, web_root) headers = {'X-Api-Key': apikey} if not apikey: logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only') release = None elif server_responding(base_url): - if section == "CouchPotato": + if section == 'CouchPotato': release = get_release(base_url, imdbid, download_id) else: release = None else: - logger.error("Server did not respond. Exiting", section) + logger.error('Server did not respond. Exiting', section) return ProcessResult( - message="{0}: Failed to post-process - {0} did not respond.".format(section), + message='{0}: Failed to post-process - {0} did not respond.'.format(section), status_code=1, ) @@ -86,7 +86,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", specific_path = os.path.join(dir_name, str(input_name)) clean_name = os.path.splitext(specific_path) - if clean_name[1] == ".nzb": + if clean_name[1] == '.nzb': specific_path = clean_name[0] if os.path.isdir(specific_path): dir_name = specific_path @@ -109,23 +109,23 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", good_files += 1 if num_files and good_files == num_files: if status: - logger.info("Status shown as failed from Downloader, but {0} valid video files found. Setting as success.".format(good_files), section) + logger.info('Status shown as failed from Downloader, but {0} valid video files found. Setting as success.'.format(good_files), section) status = 0 elif num_files and good_files < num_files: - logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section) + logger.info('Status shown as success from Downloader, but corrupt video files found. Setting as failed.', section) if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': print('[NZB] MARK=BAD') if failure_link: failure_link += '&corrupt=true' status = 1 - elif client_agent == "manual": - logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section) + elif client_agent == 'manual': + logger.warning('No media files found in directory {0} to manually process.'.format(dir_name), section) return ProcessResult( - message="", + message='', status_code=0, # Success (as far as this script is concerned) ) else: - logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section) + logger.warning('No media files found in directory {0}. Processing this as a failed download'.format(dir_name), section) status = 1 if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': print('[NZB] MARK=BAD') @@ -134,24 +134,24 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", if core.TRANSCODE == 1: result, new_dir_name = transcoder.transcode_directory(dir_name) if result == 0: - logger.debug("Transcoding succeeded for files in {0}".format(dir_name), section) + logger.debug('Transcoding succeeded for files in {0}'.format(dir_name), section) dir_name = new_dir_name - chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8) - logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section) + chmod_directory = int(str(cfg.get('chmodDirectory', '0')), 8) + logger.debug('Config setting \'chmodDirectory\' currently set to {0}'.format(oct(chmod_directory)), section) if chmod_directory: - logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section) + logger.info('Attempting to set the octal permission of \'{0}\' on directory \'{1}\''.format(oct(chmod_directory), dir_name), section) core.rchmod(dir_name, chmod_directory) else: - logger.error("Transcoding failed for files in {0}".format(dir_name), section) + logger.error('Transcoding failed for files in {0}'.format(dir_name), section) return ProcessResult( - message="{0}: Failed to post-process - Transcoding failed".format(section), + message='{0}: Failed to post-process - Transcoding failed'.format(section), status_code=1, ) for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False): - if not release and ".cp(tt" not in video and imdbid: + if not release and '.cp(tt' not in video and imdbid: video_name, video_ext = os.path.splitext(video) - video2 = "{0}.cp({1}){2}".format(video_name, imdbid, video_ext) + video2 = '{0}.cp({1}){2}'.format(video_name, imdbid, video_ext) if not (client_agent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'): logger.debug('Renaming: {0} to: {1}'.format(video, video2)) os.rename(video, video2) @@ -159,7 +159,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", if not apikey: # If only using Transcoder functions, exit here. logger.info('No CouchPotato or Radarr apikey entered. Processing completed.') return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) @@ -170,157 +170,157 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", params['media_folder'] = remote_dir(dir_name) if remote_path else dir_name - if section == "CouchPotato": - if method == "manage": - command = "manage.update" + if section == 'CouchPotato': + if method == 'manage': + command = 'manage.update' params = {} else: - command = "renamer.scan" + command = 'renamer.scan' - url = "{0}{1}".format(base_url, command) - logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) - logger.postprocess("Starting {0} scan for {1}".format(method, input_name), section) + url = '{0}{1}'.format(base_url, command) + logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params), section) + logger.postprocess('Starting {0} scan for {1}'.format(method, input_name), section) - if section == "Radarr": + if section == 'Radarr': payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id, 'importMode': import_mode} if not download_id: - payload.pop("downloadClientId") - logger.debug("Opening URL: {0} with PARAMS: {1}".format(base_url, payload), section) - logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(input_name), section) + payload.pop('downloadClientId') + logger.debug('Opening URL: {0} with PARAMS: {1}'.format(base_url, payload), section) + logger.postprocess('Starting DownloadedMoviesScan scan for {0}'.format(input_name), section) try: - if section == "CouchPotato": + if section == 'CouchPotato': r = requests.get(url, params=params, verify=False, timeout=(30, 1800)) else: r = requests.post(base_url, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800)) except requests.ConnectionError: - logger.error("Unable to open URL", section) + logger.error('Unable to open URL', section) return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {0}".format(section), + message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1, ) result = r.json() if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return ProcessResult( - message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), + message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code), status_code=1, ) - elif section == "CouchPotato" and result['success']: - logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dir_name), section) - if method == "manage": + elif section == 'CouchPotato' and result['success']: + logger.postprocess('SUCCESS: Finished {0} scan for folder {1}'.format(method, dir_name), section) + if method == 'manage': return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) - elif section == "Radarr": - logger.postprocess("Radarr response: {0}".format(result['state'])) + elif section == 'Radarr': + logger.postprocess('Radarr response: {0}'.format(result['state'])) try: res = json.loads(r.content) scan_id = int(res['id']) - logger.debug("Scan started with id: {0}".format(scan_id), section) + logger.debug('Scan started with id: {0}'.format(scan_id), section) started = True except Exception as e: - logger.warning("No scan id was returned due to: {0}".format(e), section) + logger.warning('No scan id was returned due to: {0}'.format(e), section) scan_id = None else: - logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dir_name), + logger.error('FAILED: {0} scan was unable to finish for folder {1}. exiting!'.format(method, dir_name), section) return ProcessResult( - message="{0}: Failed to post-process - Server did not return success".format(section), + message='{0}: Failed to post-process - Server did not return success'.format(section), status_code=1, ) else: core.FAILED = True - logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(input_name), section) + logger.postprocess('FAILED DOWNLOAD DETECTED FOR {0}'.format(input_name), section) if failure_link: report_nzb(failure_link, client_agent) - if section == "Radarr": - logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section) + if section == 'Radarr': + logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(section), section) return ProcessResult( - message="{0}: Download Failed. Sending back to {0}".format(section), + message='{0}: Download Failed. Sending back to {0}'.format(section), status_code=1, # Return as failed to flag this in the downloader. ) if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name: - logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) + logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section) remove_dir(dir_name) if not release_id and not media_id: - logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(input_name), + logger.error('Could not find a downloaded movie in the database matching {0}, exiting!'.format(input_name), section) return ProcessResult( - message="{0}: Failed to post-process - Failed download not found in {0}".format(section), + message='{0}: Failed to post-process - Failed download not found in {0}'.format(section), status_code=1, ) if release_id: - logger.postprocess("Setting failed release {0} to ignored ...".format(input_name), section) + logger.postprocess('Setting failed release {0} to ignored ...'.format(input_name), section) - url = "{url}release.ignore".format(url=base_url) + url = '{url}release.ignore'.format(url=base_url) params = {'id': release_id} - logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) + logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params), section) try: r = requests.get(url, params=params, verify=False, timeout=(30, 120)) except requests.ConnectionError: - logger.error("Unable to open URL {0}".format(url), section) + logger.error('Unable to open URL {0}'.format(url), section) return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {1}".format(section), + message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1, ) result = r.json() if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return ProcessResult( status_code=1, - message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), + message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code), ) elif result['success']: - logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(input_name), section) + logger.postprocess('SUCCESS: {0} has been set to ignored ...'.format(input_name), section) else: - logger.warning("FAILED: Unable to set {0} to ignored!".format(input_name), section) + logger.warning('FAILED: Unable to set {0} to ignored!'.format(input_name), section) return ProcessResult( - message="{0}: Failed to post-process - Unable to set {1} to ignored".format(section, input_name), + message='{0}: Failed to post-process - Unable to set {1} to ignored'.format(section, input_name), status_code=1, ) - logger.postprocess("Trying to snatch the next highest ranked release.", section) + logger.postprocess('Trying to snatch the next highest ranked release.', section) - url = "{0}movie.searcher.try_next".format(base_url) - logger.debug("Opening URL: {0}".format(url), section) + url = '{0}movie.searcher.try_next'.format(base_url) + logger.debug('Opening URL: {0}'.format(url), section) try: r = requests.get(url, params={'media_id': media_id}, verify=False, timeout=(30, 600)) except requests.ConnectionError: - logger.error("Unable to open URL {0}".format(url), section) + logger.error('Unable to open URL {0}'.format(url), section) return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {0}".format(section), + message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1, ) result = r.json() if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return ProcessResult( - message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), + message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code), status_code=1, ) elif result['success']: - logger.postprocess("SUCCESS: Snatched the next highest release ...", section) + logger.postprocess('SUCCESS: Snatched the next highest release ...', section) return ProcessResult( - message="{0}: Successfully snatched next highest release".format(section), + message='{0}: Successfully snatched next highest release'.format(section), status_code=0, ) else: - logger.postprocess("SUCCESS: Unable to find a new release to snatch now. CP will keep searching!", section) + logger.postprocess('SUCCESS: Unable to find a new release to snatch now. CP will keep searching!', section) return ProcessResult( status_code=0, - message="{0}: No new release found now. {0} will keep searching".format(section), + message='{0}: No new release found now. {0} will keep searching'.format(section), ) # Added a release that was not in the wanted list so confirm rename successful by finding this movie media.list. @@ -330,8 +330,8 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", # we will now check to see if CPS has finished renaming before returning to TorrentToMedia and unpausing. timeout = time.time() + 60 * wait_for while time.time() < timeout: # only wait 2 (default) minutes, then return. - logger.postprocess("Checking for status change, please stand by ...", section) - if section == "CouchPotato": + logger.postprocess('Checking for status change, please stand by ...', section) + if section == 'CouchPotato': release = get_release(base_url, imdbid, download_id, release_id) scan_id = None else: @@ -342,50 +342,50 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", title = release[release_id]['title'] release_status_new = release[release_id]['status'] if release_status_old is None: # we didn't have a release before, but now we do. - logger.postprocess("SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]".format( + logger.postprocess('SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]'.format( title, str(release_status_new).upper()), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) if release_status_new != release_status_old: - logger.postprocess("SUCCESS: Release for {0} has now been marked with a status of [{1}]".format( + logger.postprocess('SUCCESS: Release for {0} has now been marked with a status of [{1}]'.format( title, str(release_status_new).upper()), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) except Exception: pass elif scan_id: - url = "{0}/{1}".format(base_url, scan_id) + url = '{0}/{1}'.format(base_url, scan_id) command_status = command_complete(url, params, headers, section) if command_status: - logger.debug("The Scan command return status: {0}".format(command_status), section) + logger.debug('The Scan command return status: {0}'.format(command_status), section) if command_status in ['completed']: - logger.debug("The Scan command has completed successfully. Renaming was successful.", section) - return [0, "{0}: Successfully post-processed {1}".format(section, input_name)] + logger.debug('The Scan command has completed successfully. Renaming was successful.', section) + return [0, '{0}: Successfully post-processed {1}'.format(section, input_name)] elif command_status in ['failed']: - logger.debug("The Scan command has failed. Renaming was not successful.", section) + logger.debug('The Scan command has failed. Renaming was not successful.', section) # return ProcessResult( - # message="{0}: Failed to post-process {1}".format(section, input_name), + # message='{0}: Failed to post-process {1}'.format(section, input_name), # status_code=1, # ) if not os.path.isdir(dir_name): - logger.postprocess("SUCCESS: Input Directory [{0}] has been processed and removed".format( + logger.postprocess('SUCCESS: Input Directory [{0}] has been processed and removed'.format( dir_name), section) return ProcessResult( status_code=0, - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), ) elif not list_media_files(dir_name, media=True, audio=False, meta=False, archives=True): - logger.postprocess("SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.".format( + logger.postprocess('SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.'.format( dir_name), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) @@ -393,19 +393,19 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", time.sleep(10 * wait_for) # The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now. - if section == "Radarr" and completed_download_handling(url2, headers, section=section): - logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section) + if section == 'Radarr' and completed_download_handling(url2, headers, section=section): + logger.debug('The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.'.format(section), section) return ProcessResult( - message="{0}: Complete DownLoad Handling is enabled. Passing back to {0}".format(section), + message='{0}: Complete DownLoad Handling is enabled. Passing back to {0}'.format(section), status_code=status, ) logger.warning( - "{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(input_name, wait_for), + '{0} does not appear to have changed status after {1} minutes, Please check your logs.'.format(input_name, wait_for), section, ) return ProcessResult( status_code=1, - message="{0}: Failed to post-process - No change in status".format(section), + message='{0}: Failed to post-process - No change in status'.format(section), ) @@ -415,39 +415,39 @@ def get_release(base_url, imdb_id=None, download_id=None, release_id=None): # determine cmd and params to send to CouchPotato to get our results section = 'movies' - cmd = "media.list" + cmd = 'media.list' if release_id or imdb_id: section = 'media' - cmd = "media.get" + cmd = 'media.get' params['id'] = release_id or imdb_id if not (release_id or imdb_id or download_id): - logger.debug("No information available to filter CP results") + logger.debug('No information available to filter CP results') return results - url = "{0}{1}".format(base_url, cmd) - logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params)) + url = '{0}{1}'.format(base_url, cmd) + logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params)) try: r = requests.get(url, params=params, verify=False, timeout=(30, 60)) except requests.ConnectionError: - logger.error("Unable to open URL {0}".format(url)) + logger.error('Unable to open URL {0}'.format(url)) return results try: result = r.json() except ValueError: # ValueError catches simplejson's JSONDecodeError and json's ValueError - logger.error("CouchPotato returned the following non-json data") + logger.error('CouchPotato returned the following non-json data') for line in r.iter_lines(): - logger.error("{0}".format(line)) + logger.error('{0}'.format(line)) return results if not result['success']: if 'error' in result: logger.error('{0}'.format(result['error'])) else: - logger.error("no media found for id {0}".format(params['id'])) + logger.error('no media found for id {0}'.format(params['id'])) return results # Gather release info and return it back, no need to narrow results @@ -489,7 +489,7 @@ def get_release(base_url, imdb_id=None, download_id=None, release_id=None): for id1, x1 in results.items(): for id2, x2 in results.items(): try: - if x2["last_edit"] > x1["last_edit"]: + if x2['last_edit'] > x1['last_edit']: results.pop(id1) except Exception: continue diff --git a/core/auto_process/music.py b/core/auto_process/music.py index dbdf085d..9c5f7048 100644 --- a/core/auto_process/music.py +++ b/core/auto_process/music.py @@ -15,34 +15,34 @@ from core.utils import convert_to_ascii, list_media_files, remote_dir, remove_di requests.packages.urllib3.disable_warnings() -def process(section, dir_name, input_name=None, status=0, client_agent="manual", input_category=None): +def process(section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None): status = int(status) cfg = dict(core.CFG[section][input_category]) - host = cfg["host"] - port = cfg["port"] - apikey = cfg["apikey"] - wait_for = int(cfg["wait_for"]) - ssl = int(cfg.get("ssl", 0)) - delete_failed = int(cfg["delete_failed"]) - web_root = cfg.get("web_root", "") - remote_path = int(cfg.get("remote_path", 0)) - protocol = "https://" if ssl else "http://" + host = cfg['host'] + port = cfg['port'] + apikey = cfg['apikey'] + wait_for = int(cfg['wait_for']) + ssl = int(cfg.get('ssl', 0)) + delete_failed = int(cfg['delete_failed']) + web_root = cfg.get('web_root', '') + remote_path = int(cfg.get('remote_path', 0)) + protocol = 'https://' if ssl else 'http://' status = int(status) if status > 0 and core.NOEXTRACTFAILED: extract = 0 else: - extract = int(cfg.get("extract", 0)) + extract = int(cfg.get('extract', 0)) - if section == "Lidarr": - url = "{0}{1}:{2}{3}/api/v1".format(protocol, host, port, web_root) + if section == 'Lidarr': + url = '{0}{1}:{2}{3}/api/v1'.format(protocol, host, port, web_root) else: - url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root) + url = '{0}{1}:{2}{3}/api'.format(protocol, host, port, web_root) if not server_responding(url): - logger.error("Server did not respond. Exiting", section) + logger.error('Server did not respond. Exiting', section) return ProcessResult( - message="{0}: Failed to post-process - {0} did not respond.".format(section), + message='{0}: Failed to post-process - {0} did not respond.'.format(section), status_code=1, ) @@ -51,7 +51,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", specific_path = os.path.join(dir_name, str(input_name)) clean_name = os.path.splitext(specific_path) - if clean_name[1] == ".nzb": + if clean_name[1] == '.nzb': specific_path = clean_name[0] if os.path.isdir(specific_path): dir_name = specific_path @@ -65,14 +65,14 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", input_name, dir_name = convert_to_ascii(input_name, dir_name) # if listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and status: - # logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section) + # logger.info('Status shown as failed from Downloader, but valid video files found. Setting as successful.', section) # status = 0 - if status == 0 and section == "HeadPhones": + if status == 0 and section == 'HeadPhones': params = { 'apikey': apikey, - 'cmd': "forceProcess", + 'cmd': 'forceProcess', 'dir': remote_dir(dir_name) if remote_path else dir_name } @@ -82,7 +82,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", params = { 'apikey': apikey, - 'cmd': "forceProcess", + 'cmd': 'forceProcess', 'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0] } @@ -91,29 +91,29 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", return res # The status hasn't changed. uTorrent can resume seeding now. - logger.warning("The music album does not appear to have changed status after {0} minutes. Please check your Logs".format(wait_for), section) + logger.warning('The music album does not appear to have changed status after {0} minutes. Please check your Logs'.format(wait_for), section) return ProcessResult( - message="{0}: Failed to post-process - No change in wanted status".format(section), + message='{0}: Failed to post-process - No change in wanted status'.format(section), status_code=1, ) - elif status == 0 and section == "Lidarr": - url = "{0}{1}:{2}{3}/api/v1/command".format(protocol, host, port, web_root) - headers = {"X-Api-Key": apikey} + elif status == 0 and section == 'Lidarr': + url = '{0}{1}:{2}{3}/api/v1/command'.format(protocol, host, port, web_root) + headers = {'X-Api-Key': apikey} if remote_path: - logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section) - data = {"name": "Rename", "path": remote_dir(dir_name)} + logger.debug('remote_path: {0}'.format(remote_dir(dir_name)), section) + data = {'name': 'Rename', 'path': remote_dir(dir_name)} else: - logger.debug("path: {0}".format(dir_name), section) - data = {"name": "Rename", "path": dir_name} + logger.debug('path: {0}'.format(dir_name), section) + data = {'name': 'Rename', 'path': dir_name} data = json.dumps(data) try: - logger.debug("Opening URL: {0} with data: {1}".format(url, data), section) + logger.debug('Opening URL: {0} with data: {1}'.format(url, data), section) r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800)) except requests.ConnectionError: - logger.error("Unable to open URL: {0}".format(url), section) + logger.error('Unable to open URL: {0}'.format(url), section) return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {0}".format(section), + message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1, ) @@ -123,20 +123,20 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", try: res = json.loads(r.content) scan_id = int(res['id']) - logger.debug("Scan started with id: {0}".format(scan_id), section) + logger.debug('Scan started with id: {0}'.format(scan_id), section) started = True except Exception as e: - logger.warning("No scan id was returned due to: {0}".format(e), section) + logger.warning('No scan id was returned due to: {0}'.format(e), section) scan_id = None started = False return ProcessResult( - message="{0}: Failed to post-process - Unable to start scan".format(section), + message='{0}: Failed to post-process - Unable to start scan'.format(section), status_code=1, ) n = 0 params = {} - url = "{0}/{1}".format(url, scan_id) + url = '{0}/{1}'.format(url, scan_id) while n < 6: # set up wait_for minutes to see if command completes.. time.sleep(10 * wait_for) command_status = command_complete(url, params, headers, section) @@ -144,64 +144,64 @@ def process(section, dir_name, input_name=None, status=0, client_agent="manual", break n += 1 if command_status: - logger.debug("The Scan command return status: {0}".format(command_status), section) + logger.debug('The Scan command return status: {0}'.format(command_status), section) if not os.path.exists(dir_name): - logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section) + logger.debug('The directory {0} has been removed. Renaming was successful.'.format(dir_name), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) elif command_status and command_status in ['completed']: - logger.debug("The Scan command has completed successfully. Renaming was successful.", section) + logger.debug('The Scan command has completed successfully. Renaming was successful.', section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) elif command_status and command_status in ['failed']: - logger.debug("The Scan command has failed. Renaming was not successful.", section) + logger.debug('The Scan command has failed. Renaming was not successful.', section) # return ProcessResult( - # message="{0}: Failed to post-process {1}".format(section, input_name), + # message='{0}: Failed to post-process {1}'.format(section, input_name), # status_code=1, # ) else: - logger.debug("The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.".format(section), section) + logger.debug('The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.'.format(section), section) return ProcessResult( - message="{0}: Passing back to {0} to attempt Complete Download Handling".format(section), + message='{0}: Passing back to {0} to attempt Complete Download Handling'.format(section), status_code=status, ) else: - if section == "Lidarr": - logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section) + if section == 'Lidarr': + logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(section), section) return ProcessResult( - message="{0}: Download Failed. Sending back to {0}".format(section), + message='{0}: Download Failed. Sending back to {0}'.format(section), status_code=1, # Return as failed to flag this in the downloader. ) else: - logger.warning("FAILED DOWNLOAD DETECTED", section) + logger.warning('FAILED DOWNLOAD DETECTED', section) if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name: - logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) + logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section) remove_dir(dir_name) return ProcessResult( - message="{0}: Failed to post-process. {0} does not support failed downloads".format(section), + message='{0}: Failed to post-process. {0} does not support failed downloads'.format(section), status_code=1, # Return as failed to flag this in the downloader. ) def get_status(url, apikey, dir_name): - logger.debug("Attempting to get current status for release:{0}".format(os.path.basename(dir_name))) + logger.debug('Attempting to get current status for release:{0}'.format(os.path.basename(dir_name))) params = { 'apikey': apikey, - 'cmd': "getHistory" + 'cmd': 'getHistory' } - logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params)) + logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params)) try: r = requests.get(url, params=params, verify=False, timeout=(30, 120)) except requests.RequestException: - logger.error("Unable to open URL") + logger.error('Unable to open URL') return None try: @@ -212,39 +212,39 @@ def get_status(url, apikey, dir_name): for album in result: if os.path.basename(dir_name) == album['FolderName']: - return album["Status"].lower() + return album['Status'].lower() def force_process(params, url, apikey, input_name, dir_name, section, wait_for): release_status = get_status(url, apikey, dir_name) if not release_status: - logger.error("Could not find a status for {0}, is it in the wanted list ?".format(input_name), section) + logger.error('Could not find a status for {0}, is it in the wanted list ?'.format(input_name), section) - logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) + logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params), section) try: r = requests.get(url, params=params, verify=False, timeout=(30, 300)) except requests.ConnectionError: - logger.error("Unable to open URL {0}".format(url), section) + logger.error('Unable to open URL {0}'.format(url), section) return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {0}".format(section), + message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1, ) - logger.debug("Result: {0}".format(r.text), section) + logger.debug('Result: {0}'.format(r.text), section) if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return ProcessResult( - message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), + message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code), status_code=1, ) - elif r.text == "OK": - logger.postprocess("SUCCESS: Post-Processing started for {0} in folder {1} ...".format(input_name, dir_name), section) + elif r.text == 'OK': + logger.postprocess('SUCCESS: Post-Processing started for {0} in folder {1} ...'.format(input_name, dir_name), section) else: - logger.error("FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!".format(input_name, dir_name), section) + logger.error('FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!'.format(input_name, dir_name), section) return ProcessResult( - message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), + message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section), status_code=1, ) @@ -253,20 +253,20 @@ def force_process(params, url, apikey, input_name, dir_name, section, wait_for): while time.time() < timeout: current_status = get_status(url, apikey, dir_name) if current_status is not None and current_status != release_status: # Something has changed. CPS must have processed this movie. - logger.postprocess("SUCCESS: This release is now marked as status [{0}]".format(current_status), section) + logger.postprocess('SUCCESS: This release is now marked as status [{0}]'.format(current_status), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) if not os.path.isdir(dir_name): - logger.postprocess("SUCCESS: The input directory {0} has been removed Processing must have finished.".format(dir_name), section) + logger.postprocess('SUCCESS: The input directory {0} has been removed Processing must have finished.'.format(dir_name), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) time.sleep(10 * wait_for) # The status hasn't changed. return ProcessResult( - message="no change", + message='no change', status_code=2, ) diff --git a/core/auto_process/tv.py b/core/auto_process/tv.py index 17d7a153..32c83ab4 100644 --- a/core/auto_process/tv.py +++ b/core/auto_process/tv.py @@ -18,56 +18,56 @@ from core.utils import convert_to_ascii, flatten, import_subs, list_media_files, requests.packages.urllib3.disable_warnings() -def process(section, dir_name, input_name=None, failed=False, client_agent="manual", download_id=None, input_category=None, failure_link=None): +def process(section, dir_name, input_name=None, failed=False, client_agent='manual', download_id=None, input_category=None, failure_link=None): cfg = dict(core.CFG[section][input_category]) - host = cfg["host"] - port = cfg["port"] - ssl = int(cfg.get("ssl", 0)) - web_root = cfg.get("web_root", "") - protocol = "https://" if ssl else "http://" - username = cfg.get("username", "") - password = cfg.get("password", "") - apikey = cfg.get("apikey", "") + host = cfg['host'] + port = cfg['port'] + ssl = int(cfg.get('ssl', 0)) + web_root = cfg.get('web_root', '') + protocol = 'https://' if ssl else 'http://' + username = cfg.get('username', '') + password = cfg.get('password', '') + apikey = cfg.get('apikey', '') - if server_responding("{0}{1}:{2}{3}".format(protocol, host, port, web_root)): + if server_responding('{0}{1}:{2}{3}'.format(protocol, host, port, web_root)): # auto-detect correct fork fork, fork_params = auto_fork(section, input_category) elif not username and not apikey: logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only') - fork, fork_params = "None", {} + fork, fork_params = 'None', {} else: - logger.error("Server did not respond. Exiting", section) + logger.error('Server did not respond. Exiting', section) return ProcessResult( status_code=1, - message="{0}: Failed to post-process - {0} did not respond.".format(section), + message='{0}: Failed to post-process - {0} did not respond.'.format(section), ) - delete_failed = int(cfg.get("delete_failed", 0)) - nzb_extraction_by = cfg.get("nzbExtractionBy", "Downloader") - process_method = cfg.get("process_method") - if client_agent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym": - process_method = "symlink" - remote_path = int(cfg.get("remote_path", 0)) - wait_for = int(cfg.get("wait_for", 2)) - force = int(cfg.get("force", 0)) - delete_on = int(cfg.get("delete_on", 0)) - ignore_subs = int(cfg.get("ignore_subs", 0)) + delete_failed = int(cfg.get('delete_failed', 0)) + nzb_extraction_by = cfg.get('nzbExtractionBy', 'Downloader') + process_method = cfg.get('process_method') + if client_agent == core.TORRENT_CLIENTAGENT and core.USELINK == 'move-sym': + process_method = 'symlink' + remote_path = int(cfg.get('remote_path', 0)) + wait_for = int(cfg.get('wait_for', 2)) + force = int(cfg.get('force', 0)) + delete_on = int(cfg.get('delete_on', 0)) + ignore_subs = int(cfg.get('ignore_subs', 0)) status = int(failed) if status > 0 and core.NOEXTRACTFAILED: extract = 0 else: - extract = int(cfg.get("extract", 0)) - # get importmode, default to "Move" for consistency with legacy - import_mode = cfg.get("importMode", "Move") + extract = int(cfg.get('extract', 0)) + # get importmode, default to 'Move' for consistency with legacy + import_mode = cfg.get('importMode', 'Move') if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name. dir_name = os.path.split(os.path.normpath(dir_name))[0] specific_path = os.path.join(dir_name, str(input_name)) clean_name = os.path.splitext(specific_path) - if clean_name[1] == ".nzb": + if clean_name[1] == '.nzb': specific_path = clean_name[0] if os.path.isdir(specific_path): dir_name = specific_path @@ -82,7 +82,7 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu if e.errno != errno.EEXIST: raise - if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != "Destination"): + if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != 'Destination'): if input_name: process_all_exceptions(input_name, dir_name) input_name, dir_name = convert_to_ascii(input_name, dir_name) @@ -118,24 +118,24 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu print('[NZB] MARK=BAD') if failure_link: failure_link += '&corrupt=true' - elif client_agent == "manual": - logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section) + elif client_agent == 'manual': + logger.warning('No media files found in directory {0} to manually process.'.format(dir_name), section) return ProcessResult( - message="", + message='', status_code=0, # Success (as far as this script is concerned) ) - elif nzb_extraction_by == "Destination": - logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.") + elif nzb_extraction_by == 'Destination': + logger.info('Check for media files ignored because nzbExtractionBy is set to Destination.') if int(failed) == 0: - logger.info("Setting Status Success.") + logger.info('Setting Status Success.') status = 0 failed = 0 else: - logger.info("Downloader reported an error during download or verification. Processing this as a failed download.") + logger.info('Downloader reported an error during download or verification. Processing this as a failed download.') status = 1 failed = 1 else: - logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section) + logger.warning('No media files found in directory {0}. Processing this as a failed download'.format(dir_name), section) status = 1 failed = 1 if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': @@ -144,18 +144,18 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads result, new_dir_name = transcoder.transcode_directory(dir_name) if result == 0: - logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dir_name), section) + logger.debug('SUCCESS: Transcoding succeeded for files in {0}'.format(dir_name), section) dir_name = new_dir_name - chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8) - logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section) + chmod_directory = int(str(cfg.get('chmodDirectory', '0')), 8) + logger.debug('Config setting \'chmodDirectory\' currently set to {0}'.format(oct(chmod_directory)), section) if chmod_directory: - logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section) + logger.info('Attempting to set the octal permission of \'{0}\' on directory \'{1}\''.format(oct(chmod_directory), dir_name), section) core.rchmod(dir_name, chmod_directory) else: - logger.error("FAILED: Transcoding failed for files in {0}".format(dir_name), section) + logger.error('FAILED: Transcoding failed for files in {0}'.format(dir_name), section) return ProcessResult( - message="{0}: Failed to post-process - Transcoding failed".format(section), + message='{0}: Failed to post-process - Transcoding failed'.format(section), status_code=1, ) @@ -166,140 +166,140 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu fork_params['nzbName'] = input_name for param in copy.copy(fork_params): - if param == "failed": + if param == 'failed': fork_params[param] = failed del fork_params['proc_type'] - if "type" in fork_params: + if 'type' in fork_params: del fork_params['type'] - if param == "return_data": + if param == 'return_data': fork_params[param] = 0 del fork_params['quiet'] - if param == "type": + if param == 'type': fork_params[param] = 'manual' - if "proc_type" in fork_params: + if 'proc_type' in fork_params: del fork_params['proc_type'] - if param in ["dir_name", "dir", "proc_dir", "process_directory", "path"]: + if param in ['dir_name', 'dir', 'proc_dir', 'process_directory', 'path']: fork_params[param] = dir_name if remote_path: fork_params[param] = remote_dir(dir_name) - if param == "process_method": + if param == 'process_method': if process_method: fork_params[param] = process_method else: del fork_params[param] - if param in ["force", "force_replace"]: + if param in ['force', 'force_replace']: if force: fork_params[param] = force else: del fork_params[param] - if param in ["delete_on", "delete"]: + if param in ['delete_on', 'delete']: if delete_on: fork_params[param] = delete_on else: del fork_params[param] - if param == "ignore_subs": + if param == 'ignore_subs': if ignore_subs: fork_params[param] = ignore_subs else: del fork_params[param] - if param == "force_next": + if param == 'force_next': fork_params[param] = 1 # delete any unused params so we don't pass them to SB by mistake [fork_params.pop(k) for k, v in fork_params.items() if v is None] if status == 0: - if section == "NzbDrone" and not apikey: + if section == 'NzbDrone' and not apikey: logger.info('No Sonarr apikey entered. Processing completed.') return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) - logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section) + logger.postprocess('SUCCESS: The download succeeded, sending a post-process request', section) else: core.FAILED = True if failure_link: report_nzb(failure_link, client_agent) if 'failed' in fork_params: - logger.postprocess("FAILED: The download failed. Sending 'failed' process request to {0} branch".format(fork), section) - elif section == "NzbDrone": - logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(fork), section) + logger.postprocess('FAILED: The download failed. Sending \'failed\' process request to {0} branch'.format(fork), section) + elif section == 'NzbDrone': + logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(fork), section) return ProcessResult( - message="{0}: Download Failed. Sending back to {0}".format(section), + message='{0}: Download Failed. Sending back to {0}'.format(section), status_code=1, # Return as failed to flag this in the downloader. ) else: - logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section) + logger.postprocess('FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process'.format(fork), section) if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name: - logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) + logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section) remove_dir(dir_name) return ProcessResult( - message="{0}: Failed to post-process. {0} does not support failed downloads".format(section), + message='{0}: Failed to post-process. {0} does not support failed downloads'.format(section), status_code=1, # Return as failed to flag this in the downloader. ) url = None - if section == "SickBeard": + if section == 'SickBeard': if apikey: - url = "{0}{1}:{2}{3}/api/{4}/?cmd=postprocess".format(protocol, host, port, web_root, apikey) + url = '{0}{1}:{2}{3}/api/{4}/?cmd=postprocess'.format(protocol, host, port, web_root, apikey) else: - url = "{0}{1}:{2}{3}/home/postprocess/processEpisode".format(protocol, host, port, web_root) - elif section == "NzbDrone": - url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root) - url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root) - headers = {"X-Api-Key": apikey} + url = '{0}{1}:{2}{3}/home/postprocess/processEpisode'.format(protocol, host, port, web_root) + elif section == 'NzbDrone': + url = '{0}{1}:{2}{3}/api/command'.format(protocol, host, port, web_root) + url2 = '{0}{1}:{2}{3}/api/config/downloadClient'.format(protocol, host, port, web_root) + headers = {'X-Api-Key': apikey} # params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'} if remote_path: - logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section) - data = {"name": "DownloadedEpisodesScan", "path": remote_dir(dir_name), "downloadClientId": download_id, "importMode": import_mode} + logger.debug('remote_path: {0}'.format(remote_dir(dir_name)), section) + data = {'name': 'DownloadedEpisodesScan', 'path': remote_dir(dir_name), 'downloadClientId': download_id, 'importMode': import_mode} else: - logger.debug("path: {0}".format(dir_name), section) - data = {"name": "DownloadedEpisodesScan", "path": dir_name, "downloadClientId": download_id, "importMode": import_mode} + logger.debug('path: {0}'.format(dir_name), section) + data = {'name': 'DownloadedEpisodesScan', 'path': dir_name, 'downloadClientId': download_id, 'importMode': import_mode} if not download_id: - data.pop("downloadClientId") + data.pop('downloadClientId') data = json.dumps(data) try: - if section == "SickBeard": - logger.debug("Opening URL: {0} with params: {1}".format(url, fork_params), section) + if section == 'SickBeard': + logger.debug('Opening URL: {0} with params: {1}'.format(url, fork_params), section) s = requests.Session() if not apikey and username and password: - login = "{0}{1}:{2}{3}/login".format(protocol, host, port, web_root) + login = '{0}{1}:{2}{3}/login'.format(protocol, host, port, web_root) login_params = {'username': username, 'password': password} r = s.get(login, verify=False, timeout=(30, 60)) if r.status_code == 401 and r.cookies.get('_xsrf'): login_params['_xsrf'] = r.cookies.get('_xsrf') s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60)) r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800)) - elif section == "NzbDrone": - logger.debug("Opening URL: {0} with data: {1}".format(url, data), section) + elif section == 'NzbDrone': + logger.debug('Opening URL: {0} with data: {1}'.format(url, data), section) r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800)) except requests.ConnectionError: - logger.error("Unable to open URL: {0}".format(url), section) + logger.error('Unable to open URL: {0}'.format(url), section) return ProcessResult( - message="{0}: Failed to post-process - Unable to connect to {0}".format(section), + message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1, ) if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: - logger.error("Server returned status {0}".format(r.status_code), section) + logger.error('Server returned status {0}'.format(r.status_code), section) return ProcessResult( - message="{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code), + message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code), status_code=1, ) success = False queued = False started = False - if section == "SickBeard": + if section == 'SickBeard': if apikey: if r.json()['result'] == 'success': success = True @@ -307,40 +307,40 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu for line in r.iter_lines(): if line: line = line.decode('utf-8') - logger.postprocess("{0}".format(line), section) - if "Moving file from" in line: + logger.postprocess('{0}'.format(line), section) + if 'Moving file from' in line: input_name = os.path.split(line)[1] - if "added to the queue" in line: + if 'added to the queue' in line: queued = True - if "Processing succeeded" in line or "Successfully processed" in line: + if 'Processing succeeded' in line or 'Successfully processed' in line: success = True if queued: time.sleep(60) - elif section == "NzbDrone": + elif section == 'NzbDrone': try: res = json.loads(r.content) scan_id = int(res['id']) - logger.debug("Scan started with id: {0}".format(scan_id), section) + logger.debug('Scan started with id: {0}'.format(scan_id), section) started = True except Exception as e: - logger.warning("No scan id was returned due to: {0}".format(e), section) + logger.warning('No scan id was returned due to: {0}'.format(e), section) scan_id = None started = False if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name: - logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section) + logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section) remove_dir(dir_name) if success: return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) - elif section == "NzbDrone" and started: + elif section == 'NzbDrone' and started: n = 0 params = {} - url = "{0}/{1}".format(url, scan_id) + url = '{0}/{1}'.format(url, scan_id) while n < 6: # set up wait_for minutes to see if command completes.. time.sleep(10 * wait_for) command_status = command_complete(url, params, headers, section) @@ -348,39 +348,39 @@ def process(section, dir_name, input_name=None, failed=False, client_agent="manu break n += 1 if command_status: - logger.debug("The Scan command return status: {0}".format(command_status), section) + logger.debug('The Scan command return status: {0}'.format(command_status), section) if not os.path.exists(dir_name): - logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section) + logger.debug('The directory {0} has been removed. Renaming was successful.'.format(dir_name), section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) elif command_status and command_status in ['completed']: - logger.debug("The Scan command has completed successfully. Renaming was successful.", section) + logger.debug('The Scan command has completed successfully. Renaming was successful.', section) return ProcessResult( - message="{0}: Successfully post-processed {1}".format(section, input_name), + message='{0}: Successfully post-processed {1}'.format(section, input_name), status_code=0, ) elif command_status and command_status in ['failed']: - logger.debug("The Scan command has failed. Renaming was not successful.", section) + logger.debug('The Scan command has failed. Renaming was not successful.', section) # return ProcessResult( - # message="{0}: Failed to post-process {1}".format(section, input_name), + # message='{0}: Failed to post-process {1}'.format(section, input_name), # status_code=1, # ) if completed_download_handling(url2, headers, section=section): - logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section) + logger.debug('The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.'.format(section), section) return ProcessResult( - message="{0}: Complete DownLoad Handling is enabled. Passing back to {0}".format(section), + message='{0}: Complete DownLoad Handling is enabled. Passing back to {0}'.format(section), status_code=status, ) else: - logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section) + logger.warning('The Scan command did not return a valid status. Renaming was not successful.', section) return ProcessResult( - message="{0}: Failed to post-process {1}".format(section, input_name), + message='{0}: Failed to post-process {1}'.format(section, input_name), status_code=1, ) else: return ProcessResult( - message="{0}: Failed to post-process - Returned log from {0} was not as expected.".format(section), + message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section), status_code=1, # We did not receive Success confirmation. ) diff --git a/core/configuration.py b/core/configuration.py index d469ad67..31ea4852 100644 --- a/core/configuration.py +++ b/core/configuration.py @@ -120,7 +120,7 @@ class ConfigObj(configobj.ConfigObj, Section): shutil.copyfile(core.CONFIG_SPEC_FILE, core.CONFIG_FILE) CFG_OLD = config(core.CONFIG_FILE) except Exception as error: - logger.debug("Error {msg} when copying to .cfg".format(msg=error)) + logger.debug('Error {msg} when copying to .cfg'.format(msg=error)) try: # check for autoProcessMedia.cfg.spec and create if it does not exist @@ -128,7 +128,7 @@ class ConfigObj(configobj.ConfigObj, Section): shutil.copyfile(core.CONFIG_FILE, core.CONFIG_SPEC_FILE) CFG_NEW = config(core.CONFIG_SPEC_FILE) except Exception as error: - logger.debug("Error {msg} when copying to .spec".format(msg=error)) + logger.debug('Error {msg} when copying to .spec'.format(msg=error)) # check for autoProcessMedia.cfg and autoProcessMedia.cfg.spec and if they don't exist return and fail if CFG_NEW is None or CFG_OLD is None: @@ -143,7 +143,7 @@ class ConfigObj(configobj.ConfigObj, Section): if CFG_OLD[section].sections: subsections.update({section: CFG_OLD[section].sections}) for option, value in CFG_OLD[section].items(): - if option in ["category", "cpsCategory", "sbCategory", "hpCategory", "mlCategory", "gzCategory", "raCategory", "ndCategory"]: + if option in ['category', 'cpsCategory', 'sbCategory', 'hpCategory', 'mlCategory', 'gzCategory', 'raCategory', 'ndCategory']: if not isinstance(value, list): value = [value] @@ -161,34 +161,34 @@ class ConfigObj(configobj.ConfigObj, Section): if section in ['CouchPotato', 'HeadPhones', 'Gamez', 'Mylar']: if option in ['username', 'password']: values.pop(option) - if section in ["SickBeard", "Mylar"]: - if option == "wait_for": # remove old format + if section in ['SickBeard', 'Mylar']: + if option == 'wait_for': # remove old format values.pop(option) - if section in ["SickBeard", "NzbDrone"]: - if option == "failed_fork": # change this old format + if section in ['SickBeard', 'NzbDrone']: + if option == 'failed_fork': # change this old format values['failed'] = 'auto' values.pop(option) - if option == "outputDirectory": # move this to new location format + if option == 'outputDirectory': # move this to new location format CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0] values.pop(option) - if section in ["Torrent"]: - if option in ["compressedExtensions", "mediaExtensions", "metaExtensions", "minSampleSize"]: + if section in ['Torrent']: + if option in ['compressedExtensions', 'mediaExtensions', 'metaExtensions', 'minSampleSize']: CFG_NEW['Extensions'][option] = value values.pop(option) - if option == "useLink": # Sym links supported now as well. + if option == 'useLink': # Sym links supported now as well. if value in ['1', 1]: value = 'hard' elif value in ['0', 0]: value = 'no' values[option] = value - if option == "forceClean": + if option == 'forceClean': CFG_NEW['General']['force_clean'] = value values.pop(option) - if section in ["Transcoder"]: - if option in ["niceness"]: + if section in ['Transcoder']: + if option in ['niceness']: CFG_NEW['Posix'][option] = value values.pop(option) - if option == "remote_path": + if option == 'remote_path': if value and value not in ['0', '1', 0, 1]: value = 1 elif not value: @@ -239,7 +239,7 @@ class ConfigObj(configobj.ConfigObj, Section): process_section(section, subsection) # create a backup of our old config - CFG_OLD.filename = "{config}.old".format(config=core.CONFIG_FILE) + CFG_OLD.filename = '{config}.old'.format(config=core.CONFIG_FILE) CFG_OLD.write() # write our new config to autoProcessMedia.cfg @@ -256,27 +256,27 @@ class ConfigObj(configobj.ConfigObj, Section): try: if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ: if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']: - logger.warning("{x} category is set for SickBeard and Sonarr. " - "Please check your config in NZBGet".format + logger.warning('{x} category is set for SickBeard and Sonarr. ' + 'Please check your config in NZBGet'.format (x=os.environ['NZBPO_NDCATEGORY'])) if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ: if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_CPSCATEGORY']: - logger.warning("{x} category is set for CouchPotato and Radarr. " - "Please check your config in NZBGet".format + logger.warning('{x} category is set for CouchPotato and Radarr. ' + 'Please check your config in NZBGet'.format (x=os.environ['NZBPO_RACATEGORY'])) if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ: if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']: - logger.warning("{x} category is set for HeadPhones and Lidarr. " - "Please check your config in NZBGet".format + logger.warning('{x} category is set for HeadPhones and Lidarr. ' + 'Please check your config in NZBGet'.format (x=os.environ['NZBPO_LICATEGORY'])) - section = "Nzb" + section = 'Nzb' key = 'NZBOP_DESTDIR' if key in os.environ: option = 'default_downloadDirectory' value = os.environ[key] cfg_new[section][option] = value - section = "General" + section = 'General' env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED'] cfg_keys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed'] for index in range(len(env_keys)): @@ -286,7 +286,7 @@ class ConfigObj(configobj.ConfigObj, Section): value = os.environ[key] cfg_new[section][option] = value - section = "Network" + section = 'Network' env_keys = ['MOUNTPOINTS'] cfg_keys = ['mount_points'] for index in range(len(env_keys)): @@ -296,7 +296,7 @@ class ConfigObj(configobj.ConfigObj, Section): value = os.environ[key] cfg_new[section][option] = value - section = "CouchPotato" + section = 'CouchPotato' env_cat_key = 'NZBPO_CPSCATEGORY' env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', 'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY'] @@ -315,7 +315,7 @@ class ConfigObj(configobj.ConfigObj, Section): if os.environ[env_cat_key] in cfg_new['Radarr'].sections: cfg_new['Radarr'][env_cat_key]['enabled'] = 0 - section = "SickBeard" + section = 'SickBeard' env_cat_key = 'NZBPO_SBCATEGORY' env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD'] @@ -334,7 +334,7 @@ class ConfigObj(configobj.ConfigObj, Section): if os.environ[env_cat_key] in cfg_new['NzbDrone'].sections: cfg_new['NzbDrone'][env_cat_key]['enabled'] = 0 - section = "HeadPhones" + section = 'HeadPhones' env_cat_key = 'NZBPO_HPCATEGORY' env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED'] cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed'] @@ -351,7 +351,7 @@ class ConfigObj(configobj.ConfigObj, Section): if os.environ[env_cat_key] in cfg_new['Lidarr'].sections: cfg_new['Lidarr'][env_cat_key]['enabled'] = 0 - section = "Mylar" + section = 'Mylar' env_cat_key = 'NZBPO_MYCATEGORY' env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'REMOTE_PATH'] @@ -368,7 +368,7 @@ class ConfigObj(configobj.ConfigObj, Section): cfg_new[section][os.environ[env_cat_key]][option] = value cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1 - section = "Gamez" + section = 'Gamez' env_cat_key = 'NZBPO_GZCATEGORY' env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH'] cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path'] @@ -383,7 +383,7 @@ class ConfigObj(configobj.ConfigObj, Section): cfg_new[section][os.environ[env_cat_key]][option] = value cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1 - section = "NzbDrone" + section = 'NzbDrone' env_cat_key = 'NZBPO_NDCATEGORY' env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE'] @@ -403,7 +403,7 @@ class ConfigObj(configobj.ConfigObj, Section): if os.environ[env_cat_key] in cfg_new['SickBeard'].sections: cfg_new['SickBeard'][env_cat_key]['enabled'] = 0 - section = "Radarr" + section = 'Radarr' env_cat_key = 'NZBPO_RACATEGORY' env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE'] @@ -423,7 +423,7 @@ class ConfigObj(configobj.ConfigObj, Section): if os.environ[env_cat_key] in cfg_new['CouchPotato'].sections: cfg_new['CouchPotato'][env_cat_key]['enabled'] = 0 - section = "Lidarr" + section = 'Lidarr' env_cat_key = 'NZBPO_LICATEGORY' env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH'] @@ -442,7 +442,7 @@ class ConfigObj(configobj.ConfigObj, Section): if os.environ[env_cat_key] in cfg_new['HeadPhones'].sections: cfg_new['HeadPhones'][env_cat_key]['enabled'] = 0 - section = "Extensions" + section = 'Extensions' env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS'] cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions'] for index in range(len(env_keys)): @@ -452,7 +452,7 @@ class ConfigObj(configobj.ConfigObj, Section): value = os.environ[key] cfg_new[section][option] = value - section = "Posix" + section = 'Posix' env_keys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA'] cfg_keys = ['niceness', 'ionice_class', 'ionice_classdata'] for index in range(len(env_keys)): @@ -462,7 +462,7 @@ class ConfigObj(configobj.ConfigObj, Section): value = os.environ[key] cfg_new[section][option] = value - section = "Transcoder" + section = 'Transcoder' env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH', 'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES', 'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR', @@ -490,7 +490,7 @@ class ConfigObj(configobj.ConfigObj, Section): value = os.environ[key] cfg_new[section][option] = value - section = "WakeOnLan" + section = 'WakeOnLan' env_keys = ['WAKE', 'HOST', 'PORT', 'MAC'] cfg_keys = ['wake', 'host', 'port', 'mac'] for index in range(len(env_keys)): @@ -500,7 +500,7 @@ class ConfigObj(configobj.ConfigObj, Section): value = os.environ[key] cfg_new[section][option] = value - section = "UserScript" + section = 'UserScript' env_cat_key = 'NZBPO_USCATEGORY' env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE', 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH'] @@ -518,14 +518,14 @@ class ConfigObj(configobj.ConfigObj, Section): cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1 except Exception as error: - logger.debug("Error {msg} when applying NZBGet config".format(msg=error)) + logger.debug('Error {msg} when applying NZBGet config'.format(msg=error)) try: # write our new config to autoProcessMedia.cfg cfg_new.filename = core.CONFIG_FILE cfg_new.write() except Exception as error: - logger.debug("Error {msg} when writing changes to .cfg".format(msg=error)) + logger.debug('Error {msg} when writing changes to .cfg'.format(msg=error)) return cfg_new diff --git a/core/databases.py b/core/databases.py index fce380a8..07a78a4d 100644 --- a/core/databases.py +++ b/core/databases.py @@ -8,11 +8,11 @@ MAX_DB_VERSION = 2 def backup_database(version): - logger.info("Backing up database before upgrade") + logger.info('Backing up database before upgrade') if not backup_versioned_file(main_db.db_filename(), version): - logger.log_error_and_exit("Database backup failed, abort upgrading database") + logger.log_error_and_exit('Database backup failed, abort upgrading database') else: - logger.info("Proceeding with upgrade") + logger.info('Proceeding with upgrade') # ====================== @@ -23,17 +23,17 @@ def backup_database(version): class InitialSchema(main_db.SchemaUpgrade): def test(self): no_update = False - if self.has_table("db_version"): + if self.has_table('db_version'): cur_db_version = self.check_db_version() no_update = not cur_db_version < MAX_DB_VERSION return no_update def execute(self): - if not self.has_table("downloads") and not self.has_table("db_version"): + if not self.has_table('downloads') and not self.has_table('db_version'): queries = [ - "CREATE TABLE db_version (db_version INTEGER);", - "CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));", - "INSERT INTO db_version (db_version) VALUES (2);" + 'CREATE TABLE db_version (db_version INTEGER);', + 'CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));', + 'INSERT INTO db_version (db_version) VALUES (2);' ] for query in queries: self.connection.action(query) @@ -42,24 +42,24 @@ class InitialSchema(main_db.SchemaUpgrade): cur_db_version = self.check_db_version() if cur_db_version < MIN_DB_VERSION: - logger.log_error_and_exit(u"Your database version ({current}) is too old to migrate " - u"from what this version of nzbToMedia supports ({min})." - u"\nPlease remove nzbtomedia.db file to begin fresh.".format + logger.log_error_and_exit(u'Your database version ({current}) is too old to migrate ' + u'from what this version of nzbToMedia supports ({min}).' + u'\nPlease remove nzbtomedia.db file to begin fresh.'.format (current=cur_db_version, min=MIN_DB_VERSION)) if cur_db_version > MAX_DB_VERSION: - logger.log_error_and_exit(u"Your database version ({current}) has been incremented " - u"past what this version of nzbToMedia supports ({max})." - u"\nIf you have used other forks of nzbToMedia, your database " - u"may be unusable due to their modifications.".format + logger.log_error_and_exit(u'Your database version ({current}) has been incremented ' + u'past what this version of nzbToMedia supports ({max}).' + u'\nIf you have used other forks of nzbToMedia, your database ' + u'may be unusable due to their modifications.'.format (current=cur_db_version, max=MAX_DB_VERSION)) if cur_db_version < MAX_DB_VERSION: # We need to upgrade. queries = [ - "CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));", - "INSERT INTO downloads2 SELECT * FROM downloads;", - "DROP TABLE IF EXISTS downloads;", - "ALTER TABLE downloads2 RENAME TO downloads;", - "INSERT INTO db_version (db_version) VALUES (2);" + 'CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));', + 'INSERT INTO downloads2 SELECT * FROM downloads;', + 'DROP TABLE IF EXISTS downloads;', + 'ALTER TABLE downloads2 RENAME TO downloads;', + 'INSERT INTO db_version (db_version) VALUES (2);' ] for query in queries: self.connection.action(query) diff --git a/core/extractor/__init__.py b/core/extractor/__init__.py index b8aceae5..b1090ea6 100644 --- a/core/extractor/__init__.py +++ b/core/extractor/__init__.py @@ -16,32 +16,32 @@ def extract(file_path, output_destination): # Using Windows if platform.system() == 'Windows': if not os.path.exists(core.SEVENZIP): - core.logger.error("EXTRACTOR: Could not find 7-zip, Exiting") + core.logger.error('EXTRACTOR: Could not find 7-zip, Exiting') return False wscriptlocation = os.path.join(os.environ['WINDIR'], 'system32', 'wscript.exe') invislocation = os.path.join(core.APP_ROOT, 'core', 'extractor', 'bin', 'invisible.vbs') - cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, "x", "-y"] - ext_7zip = [".rar", ".zip", ".tar.gz", "tgz", ".tar.bz2", ".tbz", ".tar.lzma", ".tlz", ".7z", ".xz"] + cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, 'x', '-y'] + ext_7zip = ['.rar', '.zip', '.tar.gz', 'tgz', '.tar.bz2', '.tbz', '.tar.lzma', '.tlz', '.7z', '.xz'] extract_commands = dict.fromkeys(ext_7zip, cmd_7zip) # Using unix else: - required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"] + required_cmds = ['unrar', 'unzip', 'tar', 'unxz', 'unlzma', '7zr', 'bunzip2'] # ## Possible future suport: # gunzip: gz (cmd will delete original archive) # ## the following do not extract to dest dir - # ".xz": ["xz", "-d --keep"], - # ".lzma": ["xz", "-d --format=lzma --keep"], - # ".bz2": ["bzip2", "-d --keep"], + # '.xz': ['xz', '-d --keep'], + # '.lzma': ['xz', '-d --format=lzma --keep'], + # '.bz2': ['bzip2', '-d --keep'], extract_commands = { - ".rar": ["unrar", "x", "-o+", "-y"], - ".tar": ["tar", "-xf"], - ".zip": ["unzip"], - ".tar.gz": ["tar", "-xzf"], ".tgz": ["tar", "-xzf"], - ".tar.bz2": ["tar", "-xjf"], ".tbz": ["tar", "-xjf"], - ".tar.lzma": ["tar", "--lzma", "-xf"], ".tlz": ["tar", "--lzma", "-xf"], - ".tar.xz": ["tar", "--xz", "-xf"], ".txz": ["tar", "--xz", "-xf"], - ".7z": ["7zr", "x"], + '.rar': ['unrar', 'x', '-o+', '-y'], + '.tar': ['tar', '-xf'], + '.zip': ['unzip'], + '.tar.gz': ['tar', '-xzf'], '.tgz': ['tar', '-xzf'], + '.tar.bz2': ['tar', '-xjf'], '.tbz': ['tar', '-xjf'], + '.tar.lzma': ['tar', '--lzma', '-xf'], '.tlz': ['tar', '--lzma', '-xf'], + '.tar.xz': ['tar', '--xz', '-xf'], '.txz': ['tar', '--xz', '-xf'], + '.7z': ['7zr', 'x'], } # Test command exists and if not, remove if not os.getenv('TR_TORRENT_DIR'): @@ -51,39 +51,39 @@ def extract(file_path, output_destination): stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist. for k, v in extract_commands.items(): if cmd in v[0]: - if not call(["which", "7zr"], stdout=devnull, stderr=devnull): # we do have "7zr" - extract_commands[k] = ["7zr", "x", "-y"] - elif not call(["which", "7z"], stdout=devnull, stderr=devnull): # we do have "7z" - extract_commands[k] = ["7z", "x", "-y"] - elif not call(["which", "7za"], stdout=devnull, stderr=devnull): # we do have "7za" - extract_commands[k] = ["7za", "x", "-y"] + if not call(['which', '7zr'], stdout=devnull, stderr=devnull): # we do have '7zr' + extract_commands[k] = ['7zr', 'x', '-y'] + elif not call(['which', '7z'], stdout=devnull, stderr=devnull): # we do have '7z' + extract_commands[k] = ['7z', 'x', '-y'] + elif not call(['which', '7za'], stdout=devnull, stderr=devnull): # we do have '7za' + extract_commands[k] = ['7za', 'x', '-y'] else: - core.logger.error("EXTRACTOR: {cmd} not found, " - "disabling support for {feature}".format + core.logger.error('EXTRACTOR: {cmd} not found, ' + 'disabling support for {feature}'.format (cmd=cmd, feature=k)) del extract_commands[k] devnull.close() else: - core.logger.warning("EXTRACTOR: Cannot determine which tool to use when called from Transmission") + core.logger.warning('EXTRACTOR: Cannot determine which tool to use when called from Transmission') if not extract_commands: - core.logger.warning("EXTRACTOR: No archive extracting programs found, plugin will be disabled") + core.logger.warning('EXTRACTOR: No archive extracting programs found, plugin will be disabled') ext = os.path.splitext(file_path) cmd = [] - if ext[1] in (".gz", ".bz2", ".lzma"): + if ext[1] in ('.gz', '.bz2', '.lzma'): # Check if this is a tar - if os.path.splitext(ext[0])[1] == ".tar": - cmd = extract_commands[".tar{ext}".format(ext=ext[1])] - elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"): + if os.path.splitext(ext[0])[1] == '.tar': + cmd = extract_commands['.tar{ext}'.format(ext=ext[1])] + elif ext[1] in ('.1', '.01', '.001') and os.path.splitext(ext[0])[1] in ('.rar', '.zip', '.7z'): cmd = extract_commands[os.path.splitext(ext[0])[1]] - elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives. + elif ext[1] in ('.cb7', '.cba', '.cbr', '.cbt', '.cbz'): # don't extract these comic book archives. return False else: if ext[1] in extract_commands: cmd = extract_commands[ext[1]] else: - core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format + core.logger.debug('EXTRACTOR: Unknown file type: {ext}'.format (ext=ext[1])) return False @@ -95,9 +95,9 @@ def extract(file_path, output_destination): else: passwords = [] - core.logger.info("Extracting {file} to {destination}".format + core.logger.info('Extracting {file} to {destination}'.format (file=file_path, destination=output_destination)) - core.logger.debug("Extracting {cmd} {file} {destination}".format + core.logger.debug('Extracting {cmd} {file} {destination}'.format (cmd=cmd, file=file_path, destination=output_destination)) orig_files = [] @@ -121,35 +121,35 @@ def extract(file_path, output_destination): else: cmd = core.NICENESS + cmd cmd2 = cmd - cmd2.append("-p-") # don't prompt for password. + cmd2.append('-p-') # don't prompt for password. p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. res = p.wait() if res == 0: # Both Linux and Windows return 0 for successful. - core.logger.info("EXTRACTOR: Extraction was successful for {file} to {destination}".format + core.logger.info('EXTRACTOR: Extraction was successful for {file} to {destination}'.format (file=file_path, destination=output_destination)) success = 1 elif len(passwords) > 0: - core.logger.info("EXTRACTOR: Attempting to extract with passwords") + core.logger.info('EXTRACTOR: Attempting to extract with passwords') for password in passwords: - if password == "": # if edited in windows or otherwise if blank lines. + if password == '': # if edited in windows or otherwise if blank lines. continue cmd2 = cmd # append password here. - passcmd = "-p{pwd}".format(pwd=password) + passcmd = '-p{pwd}'.format(pwd=password) cmd2.append(passcmd) p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine. res = p.wait() if (res >= 0 and platform == 'Windows') or res == 0: - core.logger.info("EXTRACTOR: Extraction was successful " - "for {file} to {destination} using password: {pwd}".format + core.logger.info('EXTRACTOR: Extraction was successful ' + 'for {file} to {destination} using password: {pwd}'.format (file=file_path, destination=output_destination, pwd=password)) success = 1 break else: continue except Exception: - core.logger.error("EXTRACTOR: Extraction failed for {file}. " - "Could not call command {cmd}".format + core.logger.error('EXTRACTOR: Extraction failed for {file}. ' + 'Could not call command {cmd}'.format (file=file_path, cmd=cmd)) os.chdir(pwd) return False @@ -175,7 +175,7 @@ def extract(file_path, output_destination): pass return True else: - core.logger.error("EXTRACTOR: Extraction failed for {file}. " - "Result was {result}".format + core.logger.error('EXTRACTOR: Extraction failed for {file}. ' + 'Result was {result}'.format (file=file_path, result=res)) return False diff --git a/core/forks.py b/core/forks.py index 5cf4e5f1..f1c73cf4 100644 --- a/core/forks.py +++ b/core/forks.py @@ -13,59 +13,59 @@ def auto_fork(section, input_category): cfg = dict(core.CFG[section][input_category]) - host = cfg.get("host") - port = cfg.get("port") - username = cfg.get("username") - password = cfg.get("password") - apikey = cfg.get("apikey") - ssl = int(cfg.get("ssl", 0)) - web_root = cfg.get("web_root", "") + host = cfg.get('host') + port = cfg.get('port') + username = cfg.get('username') + password = cfg.get('password') + apikey = cfg.get('apikey') + ssl = int(cfg.get('ssl', 0)) + web_root = cfg.get('web_root', '') replace = {'sickrage': 'SickRage', 'sickchill': 'SickChill', 'sickgear': 'SickGear', 'medusa': 'Medusa', 'sickbeard-api': 'SickBeard-api'} - f1 = replace[cfg.get("fork", "auto")] if cfg.get("fork", "auto") in replace else cfg.get("fork", "auto") + f1 = replace[cfg.get('fork', 'auto')] if cfg.get('fork', 'auto') in replace else cfg.get('fork', 'auto') try: fork = f1, core.FORKS[f1] except KeyError: - fork = "auto" - protocol = "https://" if ssl else "http://" + fork = 'auto' + protocol = 'https://' if ssl else 'http://' detected = False - if section == "NzbDrone": - logger.info("Attempting to verify {category} fork".format + if section == 'NzbDrone': + logger.info('Attempting to verify {category} fork'.format (category=input_category)) - url = "{protocol}{host}:{port}{root}/api/rootfolder".format( + url = '{protocol}{host}:{port}{root}/api/rootfolder'.format( protocol=protocol, host=host, port=port, root=web_root) - headers = {"X-Api-Key": apikey} + headers = {'X-Api-Key': apikey} try: r = requests.get(url, headers=headers, stream=True, verify=False) except requests.ConnectionError: - logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, input_category)) + logger.warning('Could not connect to {0}:{1} to verify fork!'.format(section, input_category)) if not r.ok: - logger.warning("Connection to {section}:{category} failed! " - "Check your configuration".format + logger.warning('Connection to {section}:{category} failed! ' + 'Check your configuration'.format (section=section, category=input_category)) fork = ['default', {}] - elif fork == "auto": + elif fork == 'auto': params = core.ALL_FORKS rem_params = [] - logger.info("Attempting to auto-detect {category} fork".format(category=input_category)) + logger.info('Attempting to auto-detect {category} fork'.format(category=input_category)) # define the order to test. Default must be first since the default fork doesn't reject parameters. # then in order of most unique parameters. if apikey: - url = "{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess".format( + url = '{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess'.format( protocol=protocol, host=host, port=port, root=web_root, apikey=apikey) else: - url = "{protocol}{host}:{port}{root}/home/postprocess/".format( + url = '{protocol}{host}:{port}{root}/home/postprocess/'.format( protocol=protocol, host=host, port=port, root=web_root) # attempting to auto-detect fork try: s = requests.Session() if not apikey and username and password: - login = "{protocol}{host}:{port}{root}/login".format( + login = '{protocol}{host}:{port}{root}/login'.format( protocol=protocol, host=host, port=port, root=web_root) login_params = {'username': username, 'password': password} r = s.get(login, verify=False, timeout=(30, 60)) @@ -74,7 +74,7 @@ def auto_fork(section, input_category): s.post(login, data=login_params, stream=True, verify=False) r = s.get(url, auth=(username, password), verify=False) except requests.ConnectionError: - logger.info("Could not connect to {section}:{category} to perform auto-fork detection!".format + logger.info('Could not connect to {section}:{category} to perform auto-fork detection!'.format (section=section, category=input_category)) r = [] if r and r.ok: @@ -98,17 +98,17 @@ def auto_fork(section, input_category): detected = True break if detected: - logger.info("{section}:{category} fork auto-detection successful ...".format + logger.info('{section}:{category} fork auto-detection successful ...'.format (section=section, category=input_category)) elif rem_params: - logger.info("{section}:{category} fork auto-detection found custom params {params}".format + logger.info('{section}:{category} fork auto-detection found custom params {params}'.format (section=section, category=input_category, params=params)) fork = ['custom', params] else: - logger.info("{section}:{category} fork auto-detection failed".format + logger.info('{section}:{category} fork auto-detection failed'.format (section=section, category=input_category)) fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)] - logger.info("{section}:{category} fork set to {fork}".format + logger.info('{section}:{category} fork set to {fork}'.format (section=section, category=input_category, fork=fork[0])) return fork[0], fork[1] diff --git a/core/logger.py b/core/logger.py index 1e3881ed..3305a96e 100644 --- a/core/logger.py +++ b/core/logger.py @@ -193,9 +193,9 @@ class NTMRotatingLogHandler(object): self.writes_since_check += 1 try: - message = u"{0}: {1}".format(section.upper(), to_log) + message = u'{0}: {1}'.format(section.upper(), to_log) except UnicodeError: - message = u"{0}: Message contains non-utf-8 string".format(section.upper()) + message = u'{0}: Message contains non-utf-8 string'.format(section.upper()) out_line = message diff --git a/core/main_db.py b/core/main_db.py index a39b3a82..6d2b6b95 100644 --- a/core/main_db.py +++ b/core/main_db.py @@ -12,7 +12,7 @@ import core from core import logger -def db_filename(filename="nzbtomedia.db", suffix=None): +def db_filename(filename='nzbtomedia.db', suffix=None): """ @param filename: The sqlite database filename to use. If not specified, will be made to be nzbtomedia.db @@ -21,16 +21,16 @@ def db_filename(filename="nzbtomedia.db", suffix=None): @return: the correct location of the database file. """ if suffix: - filename = "{0}.{1}".format(filename, suffix) + filename = '{0}.{1}'.format(filename, suffix) return core.os.path.join(core.APP_ROOT, filename) class DBConnection(object): - def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None): + def __init__(self, filename='nzbtomedia.db', suffix=None, row_type=None): self.filename = filename self.connection = sqlite3.connect(db_filename(filename), 20) - if row_type == "dict": + if row_type == 'dict': self.connection.row_factory = self._dict_factory else: self.connection.row_factory = sqlite3.Row @@ -38,13 +38,13 @@ class DBConnection(object): def check_db_version(self): result = None try: - result = self.select("SELECT db_version FROM db_version") + result = self.select('SELECT db_version FROM db_version') except sqlite3.OperationalError as e: - if "no such table: db_version" in e.args[0]: + if 'no such table: db_version' in e.args[0]: return 0 if result: - return int(result[0]["db_version"]) + return int(result[0]['db_version']) else: return 0 @@ -58,12 +58,12 @@ class DBConnection(object): while attempt < 5: try: if args is None: - logger.log("{name}: {query}".format(name=self.filename, query=query), logger.DB) + logger.log('{name}: {query}'.format(name=self.filename, query=query), logger.DB) cursor = self.connection.cursor() cursor.execute(query) sql_result = cursor.fetchone()[0] else: - logger.log("{name}: {query} with args {args}".format + logger.log('{name}: {query} with args {args}'.format (name=self.filename, query=query, args=args), logger.DB) cursor = self.connection.cursor() cursor.execute(query, args) @@ -72,15 +72,15 @@ class DBConnection(object): # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError as error: - if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]: - logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING) + if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]: + logger.log(u'DB error: {msg}'.format(msg=error), logger.WARNING) attempt += 1 time.sleep(1) else: - logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR) + logger.log(u'DB error: {msg}'.format(msg=error), logger.ERROR) raise except sqlite3.DatabaseError as error: - logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) + logger.log(u'Fatal error executing query: {msg}'.format(msg=error), logger.ERROR) raise return sql_result @@ -101,26 +101,26 @@ class DBConnection(object): sql_result.append(self.connection.execute(qu[0])) elif len(qu) > 1: if log_transaction: - logger.log(u"{query} with args {args}".format(query=qu[0], args=qu[1]), logger.DEBUG) + logger.log(u'{query} with args {args}'.format(query=qu[0], args=qu[1]), logger.DEBUG) sql_result.append(self.connection.execute(qu[0], qu[1])) self.connection.commit() - logger.log(u"Transaction with {x} query's executed".format(x=len(querylist)), logger.DEBUG) + logger.log(u'Transaction with {x} query\'s executed'.format(x=len(querylist)), logger.DEBUG) return sql_result except sqlite3.OperationalError as error: sql_result = [] if self.connection: self.connection.rollback() - if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]: - logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING) + if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]: + logger.log(u'DB error: {msg}'.format(msg=error), logger.WARNING) attempt += 1 time.sleep(1) else: - logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR) + logger.log(u'DB error: {msg}'.format(msg=error), logger.ERROR) raise except sqlite3.DatabaseError as error: if self.connection: self.connection.rollback() - logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) + logger.log(u'Fatal error executing query: {msg}'.format(msg=error), logger.ERROR) raise return sql_result @@ -135,25 +135,25 @@ class DBConnection(object): while attempt < 5: try: if args is None: - logger.log(u"{name}: {query}".format(name=self.filename, query=query), logger.DB) + logger.log(u'{name}: {query}'.format(name=self.filename, query=query), logger.DB) sql_result = self.connection.execute(query) else: - logger.log(u"{name}: {query} with args {args}".format + logger.log(u'{name}: {query} with args {args}'.format (name=self.filename, query=query, args=args), logger.DB) sql_result = self.connection.execute(query, args) self.connection.commit() # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError as error: - if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]: - logger.log(u"DB error: {msg}".format(msg=error), logger.WARNING) + if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]: + logger.log(u'DB error: {msg}'.format(msg=error), logger.WARNING) attempt += 1 time.sleep(1) else: - logger.log(u"DB error: {msg}".format(msg=error), logger.ERROR) + logger.log(u'DB error: {msg}'.format(msg=error), logger.ERROR) raise except sqlite3.DatabaseError as error: - logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) + logger.log(u'Fatal error executing query: {msg}'.format(msg=error), logger.ERROR) raise return sql_result @@ -171,37 +171,37 @@ class DBConnection(object): def gen_params(my_dict): return [ - "{key} = ?".format(key=k) + '{key} = ?'.format(key=k) for k in my_dict.keys() ] changes_before = self.connection.total_changes items = list(value_dict.values()) + list(key_dict.values()) self.action( - "UPDATE {table} " - "SET {params} " - "WHERE {conditions}".format( + 'UPDATE {table} ' + 'SET {params} ' + 'WHERE {conditions}'.format( table=table_name, - params=", ".join(gen_params(value_dict)), - conditions=" AND ".join(gen_params(key_dict)) + params=', '.join(gen_params(value_dict)), + conditions=' AND '.join(gen_params(key_dict)) ), items ) if self.connection.total_changes == changes_before: self.action( - "INSERT OR IGNORE INTO {table} ({columns}) " - "VALUES ({values})".format( + 'INSERT OR IGNORE INTO {table} ({columns}) ' + 'VALUES ({values})'.format( table=table_name, - columns=", ".join(map(text_type, value_dict.keys())), - values=", ".join(["?"] * len(value_dict.values())) + columns=', '.join(map(text_type, value_dict.keys())), + values=', '.join(['?'] * len(value_dict.values())) ), list(value_dict.values()) ) def table_info(self, table_name): # FIXME ? binding is not supported here, but I cannot find a way to escape a string manually - cursor = self.connection.execute("PRAGMA table_info({0})".format(table_name)) + cursor = self.connection.execute('PRAGMA table_info({0})'.format(table_name)) columns = {} for column in cursor: columns[column['name']] = {'type': column['type']} @@ -232,31 +232,31 @@ class DBSanityCheck(object): # =============== def upgrade_database(connection, schema): - logger.log(u"Checking database structure...", logger.MESSAGE) + logger.log(u'Checking database structure...', logger.MESSAGE) _process_upgrade(connection, schema) def pretty_name(class_name): - return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)]) + return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)]) def _process_upgrade(connection, upgrade_class): instance = upgrade_class(connection) - logger.log(u"Checking {name} database upgrade".format + logger.log(u'Checking {name} database upgrade'.format (name=pretty_name(upgrade_class.__name__)), logger.DEBUG) if not instance.test(): - logger.log(u"Database upgrade required: {name}".format + logger.log(u'Database upgrade required: {name}'.format (name=pretty_name(upgrade_class.__name__)), logger.MESSAGE) try: instance.execute() except sqlite3.DatabaseError as error: - print(u"Error in {name}: {msg}".format + print(u'Error in {name}: {msg}'.format (name=upgrade_class.__name__, msg=error)) raise - logger.log(u"{name} upgrade completed".format + logger.log(u'{name} upgrade completed'.format (name=upgrade_class.__name__), logger.DEBUG) else: - logger.log(u"{name} upgrade not required".format + logger.log(u'{name} upgrade not required'.format (name=upgrade_class.__name__), logger.DEBUG) for upgradeSubClass in upgrade_class.__subclasses__(): @@ -269,23 +269,23 @@ class SchemaUpgrade(object): self.connection = connection def has_table(self, table_name): - return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (table_name,)).fetchall()) > 0 + return len(self.connection.action('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,)).fetchall()) > 0 def has_column(self, table_name, column): return column in self.connection.table_info(table_name) - def add_column(self, table, column, data_type="NUMERIC", default=0): - self.connection.action("ALTER TABLE {0} ADD {1} {2}".format(table, column, data_type)) - self.connection.action("UPDATE {0} SET {1} = ?".format(table, column), (default,)) + def add_column(self, table, column, data_type='NUMERIC', default=0): + self.connection.action('ALTER TABLE {0} ADD {1} {2}'.format(table, column, data_type)) + self.connection.action('UPDATE {0} SET {1} = ?'.format(table, column), (default,)) def check_db_version(self): - result = self.connection.select("SELECT db_version FROM db_version") + result = self.connection.select('SELECT db_version FROM db_version') if result: - return int(result[-1]["db_version"]) + return int(result[-1]['db_version']) else: return 0 def inc_db_version(self): new_version = self.check_db_version() + 1 - self.connection.action("UPDATE db_version SET db_version = ?", [new_version]) + self.connection.action('UPDATE db_version SET db_version = ?', [new_version]) return new_version diff --git a/core/scene_exceptions.py b/core/scene_exceptions.py index 0c888953..5d830012 100644 --- a/core/scene_exceptions.py +++ b/core/scene_exceptions.py @@ -10,22 +10,22 @@ import core from core import logger from core.utils import list_media_files -reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.", - r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.", - r"\.ld[.-]?bew\.", r"\.pir.?(dov|dvd|bew|db|rb)\.", r"\brdvd\.", r"\.vts\.", r"\.reneercs\.", - r"\.dcv\.", r"\b(pir|mac)dh\b", r"\.reporp\.", r"\.kcaper\.", - r"\.lanretni\.", r"\b3ca\b", r"\.cstn\."] +reverse_list = [r'\.\d{2}e\d{2}s\.', r'\.[pi]0801\.', r'\.p027\.', r'\.[pi]675\.', r'\.[pi]084\.', r'\.p063\.', + r'\b[45]62[xh]\.', r'\.yarulb\.', r'\.vtd[hp]\.', + r'\.ld[.-]?bew\.', r'\.pir.?(dov|dvd|bew|db|rb)\.', r'\brdvd\.', r'\.vts\.', r'\.reneercs\.', + r'\.dcv\.', r'\b(pir|mac)dh\b', r'\.reporp\.', r'\.kcaper\.', + r'\.lanretni\.', r'\b3ca\b', r'\.cstn\.'] reverse_pattern = re.compile('|'.join(reverse_list), flags=re.IGNORECASE) -season_pattern = re.compile(r"(.*\.\d{2}e\d{2}s\.)(.*)", flags=re.IGNORECASE) -word_pattern = re.compile(r"([^A-Z0-9]*[A-Z0-9]+)") -media_list = [r"\.s\d{2}e\d{2}\.", r"\.1080[pi]\.", r"\.720p\.", r"\.576[pi]", r"\.480[pi]\.", r"\.360p\.", - r"\.[xh]26[45]\b", r"\.bluray\.", r"\.[hp]dtv\.", - r"\.web[.-]?dl\.", r"\.(vod|dvd|web|bd|br).?rip\.", r"\.dvdr\b", r"\.stv\.", r"\.screener\.", r"\.vcd\.", - r"\bhd(cam|rip)\b", r"\.proper\.", r"\.repack\.", - r"\.internal\.", r"\bac3\b", r"\.ntsc\.", r"\.pal\.", r"\.secam\.", r"\bdivx\b", r"\bxvid\b"] +season_pattern = re.compile(r'(.*\.\d{2}e\d{2}s\.)(.*)', flags=re.IGNORECASE) +word_pattern = re.compile(r'([^A-Z0-9]*[A-Z0-9]+)') +media_list = [r'\.s\d{2}e\d{2}\.', r'\.1080[pi]\.', r'\.720p\.', r'\.576[pi]', r'\.480[pi]\.', r'\.360p\.', + r'\.[xh]26[45]\b', r'\.bluray\.', r'\.[hp]dtv\.', + r'\.web[.-]?dl\.', r'\.(vod|dvd|web|bd|br).?rip\.', r'\.dvdr\b', r'\.stv\.', r'\.screener\.', r'\.vcd\.', + r'\bhd(cam|rip)\b', r'\.proper\.', r'\.repack\.', + r'\.internal\.', r'\bac3\b', r'\.ntsc\.', r'\.pal\.', r'\.secam\.', r'\bdivx\b', r'\bxvid\b'] media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE) -garbage_name = re.compile(r"^[a-zA-Z0-9]*$") -char_replace = [[r"(\w)1\.(\w)", r"\1i\2"] +garbage_name = re.compile(r'^[a-zA-Z0-9]*$') +char_replace = [[r'(\w)1\.(\w)', r'\1i\2'] ] @@ -67,26 +67,26 @@ def strip_groups(filename): def rename_file(filename, newfile_path): if os.path.isfile(newfile_path): - newfile_path = os.path.splitext(newfile_path)[0] + ".NTM" + os.path.splitext(newfile_path)[1] - logger.debug("Replacing file name {old} with download name {new}".format - (old=filename, new=newfile_path), "EXCEPTION") + newfile_path = os.path.splitext(newfile_path)[0] + '.NTM' + os.path.splitext(newfile_path)[1] + logger.debug('Replacing file name {old} with download name {new}'.format + (old=filename, new=newfile_path), 'EXCEPTION') try: os.rename(filename, newfile_path) except Exception as error: - logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION") + logger.error('Unable to rename file due to: {error}'.format(error=error), 'EXCEPTION') def replace_filename(filename, dirname, name): head, file_extension = os.path.splitext(os.path.basename(filename)) if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None: newname = os.path.basename(dirname).replace(' ', '.') - logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION") + logger.debug('Replacing file name {old} with directory name {new}'.format(old=head, new=newname), 'EXCEPTION') elif media_pattern.search(name.replace(' ', '.').lower()) is not None: newname = name.replace(' ', '.') - logger.debug("Replacing file name {old} with download name {new}".format - (old=head, new=newname), "EXCEPTION") + logger.debug('Replacing file name {old} with download name {new}'.format + (old=head, new=newname), 'EXCEPTION') else: - logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION") + logger.warning('No name replacement determined for {name}'.format(name=head), 'EXCEPTION') newname = name newfile = newname + file_extension newfile_path = os.path.join(dirname, newfile) @@ -99,11 +99,11 @@ def reverse_filename(filename, dirname, name): if na_parts is not None: word_p = word_pattern.findall(na_parts.group(2)) if word_p: - new_words = "" + new_words = '' for wp in word_p: - if wp[0] == ".": - new_words += "." - new_words += re.sub(r"\W", "", wp) + if wp[0] == '.': + new_words += '.' + new_words += re.sub(r'\W', '', wp) else: new_words = na_parts.group(2) for cr in char_replace: @@ -112,15 +112,15 @@ def reverse_filename(filename, dirname, name): else: newname = head[::-1].title() newname = newname.replace(' ', '.') - logger.debug("Reversing filename {old} to {new}".format - (old=head, new=newname), "EXCEPTION") + logger.debug('Reversing filename {old} to {new}'.format + (old=head, new=newname), 'EXCEPTION') newfile = newname + file_extension newfile_path = os.path.join(dirname, newfile) return newfile_path def rename_script(dirname): - rename_file = "" + rename_file = '' for directory, directories, files in os.walk(dirname): for file in files: if re.search(r'(rename\S*\.(sh|bat)$)', file, re.IGNORECASE): @@ -139,23 +139,23 @@ def rename_script(dirname): dest = os.path.join(dirname, cmd[1].split('\\')[-1].split('/')[-1]) if os.path.isfile(dest): continue - logger.debug("Renaming file {source} to {destination}".format - (source=orig, destination=dest), "EXCEPTION") + logger.debug('Renaming file {source} to {destination}'.format + (source=orig, destination=dest), 'EXCEPTION') try: os.rename(orig, dest) except Exception as error: - logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION") + logger.error('Unable to rename file due to: {error}'.format(error=error), 'EXCEPTION') def par2(dirname): newlist = [] sofar = 0 - parfile = "" + parfile = '' objects = [] if os.path.exists(dirname): objects = os.listdir(dirname) for item in objects: - if item.endswith(".par2"): + if item.endswith('.par2'): size = os.path.getsize(os.path.join(dirname, item)) if size > sofar: sofar = size @@ -167,20 +167,20 @@ def par2(dirname): bitbucket = open('NUL') else: bitbucket = open('/dev/null') - logger.info("Running par2 on file {0}.".format(parfile), "PAR2") - command = [core.PAR2CMD, 'r', parfile, "*"] - cmd = "" + logger.info('Running par2 on file {0}.'.format(parfile), 'PAR2') + command = [core.PAR2CMD, 'r', parfile, '*'] + cmd = '' for item in command: - cmd = "{cmd} {item}".format(cmd=cmd, item=item) - logger.debug("calling command:{0}".format(cmd), "PAR2") + cmd = '{cmd} {item}'.format(cmd=cmd, item=item) + logger.debug('calling command:{0}'.format(cmd), 'PAR2') try: proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) proc.communicate() result = proc.returncode except Exception: - logger.error("par2 file processing for {0} has failed".format(parfile), "PAR2") + logger.error('par2 file processing for {0} has failed'.format(parfile), 'PAR2') if result == 0: - logger.info("par2 file processing succeeded", "PAR2") + logger.info('par2 file processing succeeded', 'PAR2') os.chdir(pwd) bitbucket.close() diff --git a/core/transcoder.py b/core/transcoder.py index 08ed6077..38973a33 100644 --- a/core/transcoder.py +++ b/core/transcoder.py @@ -26,18 +26,18 @@ def is_video_good(videofile, status): disable = True else: test_details, res = get_video_details(core.TEST_FILE) - if res != 0 or test_details.get("error"): + if res != 0 or test_details.get('error'): disable = True - logger.info("DISABLED: ffprobe failed to analyse test file. Stopping corruption check.", 'TRANSCODER') - if test_details.get("streams"): - vid_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"] - aud_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"] + logger.info('DISABLED: ffprobe failed to analyse test file. Stopping corruption check.', 'TRANSCODER') + if test_details.get('streams'): + vid_streams = [item for item in test_details['streams'] if 'codec_type' in item and item['codec_type'] == 'video'] + aud_streams = [item for item in test_details['streams'] if 'codec_type' in item and item['codec_type'] == 'audio'] if not (len(vid_streams) > 0 and len(aud_streams) > 0): disable = True - logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.", + logger.info('DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.', 'TRANSCODER') if disable: - if status: # if the download was "failed", assume bad. If it was successful, assume good. + if status: # if the download was 'failed', assume bad. If it was successful, assume good. return False else: return True @@ -46,20 +46,20 @@ def is_video_good(videofile, status): video_details, result = get_video_details(videofile) if result != 0: - logger.error("FAILED: [{0}] is corrupted!".format(file_name_ext), 'TRANSCODER') + logger.error('FAILED: [{0}] is corrupted!'.format(file_name_ext), 'TRANSCODER') return False - if video_details.get("error"): - logger.info("FAILED: [{0}] returned error [{1}].".format(file_name_ext, video_details.get("error")), 'TRANSCODER') + if video_details.get('error'): + logger.info('FAILED: [{0}] returned error [{1}].'.format(file_name_ext, video_details.get('error')), 'TRANSCODER') return False - if video_details.get("streams"): - video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"] - audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] + if video_details.get('streams'): + video_streams = [item for item in video_details['streams'] if item['codec_type'] == 'video'] + audio_streams = [item for item in video_details['streams'] if item['codec_type'] == 'audio'] if len(video_streams) > 0 and len(audio_streams) > 0: - logger.info("SUCCESS: [{0}] has no corruption.".format(file_name_ext), 'TRANSCODER') + logger.info('SUCCESS: [{0}] has no corruption.'.format(file_name_ext), 'TRANSCODER') return True else: - logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. " - "Assume corruption.".format + logger.info('FAILED: [{0}] has {1} video streams and {2} audio streams. ' + 'Assume corruption.'.format (file_name_ext, len(video_streams), len(audio_streams)), 'TRANSCODER') return False @@ -70,7 +70,7 @@ def zip_out(file, img, bitbucket): try: procin = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) except Exception: - logger.error("Extracting [{0}] has failed".format(file), 'TRANSCODER') + logger.error('Extracting [{0}] has failed'.format(file), 'TRANSCODER') return procin @@ -114,7 +114,7 @@ def get_video_details(videofile, img=None, bitbucket=None): result = proc.returncode video_details = json.loads(out) except Exception: - logger.error("Checking [{0}] has failed".format(file), 'TRANSCODER') + logger.error('Checking [{0}] has failed'.format(file), 'TRANSCODER') return video_details, result @@ -126,13 +126,13 @@ def build_commands(file, new_dir, movie_name, bitbucket): video_details, result = get_video_details(file) directory, name = os.path.split(file) name, ext = os.path.splitext(name) - check = re.match("VTS_([0-9][0-9])_[0-9]+", name) + check = re.match('VTS_([0-9][0-9])_[0-9]+', name) if check and core.CONCAT: name = movie_name elif check: name = ('{0}.cd{1}'.format(movie_name, check.groups()[0])) - elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name): - name = re.sub('([ ._=:-]+[cC][dD][0-9])', "", name) + elif core.CONCAT and re.match('(.+)[cC][dD][0-9]', name): + name = re.sub('([ ._=:-]+[cC][dD][0-9])', '', name) if ext == core.VEXTENSION and new_dir == directory: # we need to change the name to prevent overwriting itself. core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext' else: @@ -153,7 +153,7 @@ def build_commands(file, new_dir, movie_name, bitbucket): other_cmd = [] if not video_details or not video_details.get( - "streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding. + 'streams'): # we couldn't read streams with ffprobe. Set defaults to try transcoding. video_streams = [] audio_streams = [] sub_streams = [] @@ -203,19 +203,19 @@ def build_commands(file, new_dir, movie_name, bitbucket): other_cmd.extend(['-movflags', '+faststart']) else: - video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"] - audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] - sub_streams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"] + video_streams = [item for item in video_details['streams'] if item['codec_type'] == 'video'] + audio_streams = [item for item in video_details['streams'] if item['codec_type'] == 'audio'] + sub_streams = [item for item in video_details['streams'] if item['codec_type'] == 'subtitle'] if core.VEXTENSION not in ['.mkv', '.mpegts']: - sub_streams = [item for item in video_details["streams"] if - item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ - "codec_name"] != "pgssub"] + sub_streams = [item for item in video_details['streams'] if + item['codec_type'] == 'subtitle' and item['codec_name'] != 'hdmv_pgs_subtitle' and item[ + 'codec_name'] != 'pgssub'] for video in video_streams: - codec = video["codec_name"] - fr = video.get("avg_frame_rate", 0) - width = video.get("width", 0) - height = video.get("height", 0) + codec = video['codec_name'] + fr = video.get('avg_frame_rate', 0) + width = video.get('width', 0) + height = video.get('height', 0) scale = core.VRESOLUTION if codec in core.VCODEC_ALLOW or not core.VCODEC: video_cmd.extend(['-c:v', 'copy']) @@ -227,14 +227,14 @@ def build_commands(file, new_dir, movie_name, bitbucket): w_scale = width / float(scale.split(':')[0]) h_scale = height / float(scale.split(':')[1]) if w_scale > h_scale: # widescreen, Scale by width only. - scale = "{width}:{height}".format( + scale = '{width}:{height}'.format( width=scale.split(':')[0], height=int((height / w_scale) / 2) * 2, ) if w_scale > 1: video_cmd.extend(['-vf', 'scale={width}'.format(width=scale)]) else: # lower or matching ratio, scale by height only. - scale = "{width}:{height}".format( + scale = '{width}:{height}'.format( width=int((width / h_scale) / 2) * 2, height=scale.split(':')[1], ) @@ -253,7 +253,7 @@ def build_commands(file, new_dir, movie_name, bitbucket): video_cmd[1] = core.VCODEC if core.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding. video_cmd = ['-c:v', 'copy'] - map_cmd.extend(['-map', '0:{index}'.format(index=video["index"])]) + map_cmd.extend(['-map', '0:{index}'.format(index=video['index'])]) break # Only one video needed used_audio = 0 @@ -262,51 +262,51 @@ def build_commands(file, new_dir, movie_name, bitbucket): if audio_streams: for i, val in reversed(list(enumerate(audio_streams))): try: - if "Commentary" in val.get("tags").get("title"): # Split out commentry tracks. + if 'Commentary' in val.get('tags').get('title'): # Split out commentry tracks. commentary.append(val) del audio_streams[i] except Exception: continue try: - audio1 = [item for item in audio_streams if item["tags"]["language"] == core.ALANGUAGE] + audio1 = [item for item in audio_streams if item['tags']['language'] == core.ALANGUAGE] except Exception: # no language tags. Assume only 1 language. audio1 = audio_streams try: - audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW] + audio2 = [item for item in audio1 if item['codec_name'] in core.ACODEC_ALLOW] except Exception: audio2 = [] try: - audio3 = [item for item in audio_streams if item["tags"]["language"] != core.ALANGUAGE] + audio3 = [item for item in audio_streams if item['tags']['language'] != core.ALANGUAGE] except Exception: audio3 = [] try: - audio4 = [item for item in audio3 if item["codec_name"] in core.ACODEC_ALLOW] + audio4 = [item for item in audio3 if item['codec_name'] in core.ACODEC_ALLOW] except Exception: audio4 = [] if audio2: # right (or only) language and codec... - map_cmd.extend(['-map', '0:{index}'.format(index=audio2[0]["index"])]) - a_mapped.extend([audio2[0]["index"]]) - bitrate = int(float(audio2[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio2[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio2[0]['index'])]) + a_mapped.extend([audio2[0]['index']]) + bitrate = int(float(audio2[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio2[0].get('channels', 0))) audio_cmd.extend(['-c:a:{0}'.format(used_audio), 'copy']) elif audio1: # right (or only) language, wrong codec. - map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]["index"])]) - a_mapped.extend([audio1[0]["index"]]) - bitrate = int(float(audio1[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio1[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]['index'])]) + a_mapped.extend([audio1[0]['index']]) + bitrate = int(float(audio1[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio1[0].get('channels', 0))) audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy']) elif audio4: # wrong language, right codec. - map_cmd.extend(['-map', '0:{index}'.format(index=audio4[0]["index"])]) - a_mapped.extend([audio4[0]["index"]]) - bitrate = int(float(audio4[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio4[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio4[0]['index'])]) + a_mapped.extend([audio4[0]['index']]) + bitrate = int(float(audio4[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio4[0].get('channels', 0))) audio_cmd.extend(['-c:a:{0}'.format(used_audio), 'copy']) elif audio3: # wrong language, wrong codec. just pick the default audio track - map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]["index"])]) - a_mapped.extend([audio3[0]["index"]]) - bitrate = int(float(audio3[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio3[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]['index'])]) + a_mapped.extend([audio3[0]['index']]) + bitrate = int(float(audio3[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio3[0].get('channels', 0))) audio_cmd.extend(['-c:a:{0}'.format(used_audio), core.ACODEC if core.ACODEC else 'copy']) if core.ACHANNELS and channels and channels > core.ACHANNELS: @@ -327,39 +327,39 @@ def build_commands(file, new_dir, movie_name, bitbucket): if core.ACODEC2_ALLOW: used_audio += 1 try: - audio5 = [item for item in audio1 if item["codec_name"] in core.ACODEC2_ALLOW] + audio5 = [item for item in audio1 if item['codec_name'] in core.ACODEC2_ALLOW] except Exception: audio5 = [] try: - audio6 = [item for item in audio3 if item["codec_name"] in core.ACODEC2_ALLOW] + audio6 = [item for item in audio3 if item['codec_name'] in core.ACODEC2_ALLOW] except Exception: audio6 = [] if audio5: # right language and codec. - map_cmd.extend(['-map', '0:{index}'.format(index=audio5[0]["index"])]) - a_mapped.extend([audio5[0]["index"]]) - bitrate = int(float(audio5[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio5[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio5[0]['index'])]) + a_mapped.extend([audio5[0]['index']]) + bitrate = int(float(audio5[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio5[0].get('channels', 0))) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy']) elif audio1: # right language wrong codec. - map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]["index"])]) - a_mapped.extend([audio1[0]["index"]]) - bitrate = int(float(audio1[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio1[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]['index'])]) + a_mapped.extend([audio1[0]['index']]) + bitrate = int(float(audio1[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio1[0].get('channels', 0))) if core.ACODEC2: audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2]) else: audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy']) elif audio6: # wrong language, right codec - map_cmd.extend(['-map', '0:{index}'.format(index=audio6[0]["index"])]) - a_mapped.extend([audio6[0]["index"]]) - bitrate = int(float(audio6[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio6[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio6[0]['index'])]) + a_mapped.extend([audio6[0]['index']]) + bitrate = int(float(audio6[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio6[0].get('channels', 0))) audio_cmd2.extend(['-c:a:{0}'.format(used_audio), 'copy']) elif audio3: # wrong language, wrong codec just pick the default audio track - map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]["index"])]) - a_mapped.extend([audio3[0]["index"]]) - bitrate = int(float(audio3[0].get("bit_rate", 0))) / 1000 - channels = int(float(audio3[0].get("channels", 0))) + map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]['index'])]) + a_mapped.extend([audio3[0]['index']]) + bitrate = int(float(audio3[0].get('bit_rate', 0))) / 1000 + channels = int(float(audio3[0].get('channels', 0))) if core.ACODEC2: audio_cmd2.extend(['-c:a:{0}'.format(used_audio), core.ACODEC2]) else: @@ -388,14 +388,14 @@ def build_commands(file, new_dir, movie_name, bitbucket): if core.AINCLUDE and core.ACODEC3: audio_streams.extend(commentary) # add commentry tracks back here. for audio in audio_streams: - if audio["index"] in a_mapped: + if audio['index'] in a_mapped: continue used_audio += 1 - map_cmd.extend(['-map', '0:{index}'.format(index=audio["index"])]) + map_cmd.extend(['-map', '0:{index}'.format(index=audio['index'])]) audio_cmd3 = [] - bitrate = int(float(audio.get("bit_rate", 0))) / 1000 - channels = int(float(audio.get("channels", 0))) - if audio["codec_name"] in core.ACODEC3_ALLOW: + bitrate = int(float(audio.get('bit_rate', 0))) / 1000 + channels = int(float(audio.get('channels', 0))) + if audio['codec_name'] in core.ACODEC3_ALLOW: audio_cmd3.extend(['-c:a:{0}'.format(used_audio), 'copy']) else: if core.ACODEC3: @@ -424,7 +424,7 @@ def build_commands(file, new_dir, movie_name, bitbucket): n = 0 for lan in core.SLANGUAGES: try: - subs1 = [item for item in sub_streams if item["tags"]["language"] == lan] + subs1 = [item for item in sub_streams if item['tags']['language'] == lan] except Exception: subs1 = [] if core.BURN and not subs1 and not burnt and os.path.isfile(file): @@ -436,28 +436,28 @@ def build_commands(file, new_dir, movie_name, bitbucket): if core.BURN and not burnt and os.path.isfile(input_file): subloc = 0 for index in range(len(sub_streams)): - if sub_streams[index]["index"] == sub["index"]: + if sub_streams[index]['index'] == sub['index']: subloc = index break video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=input_file, loc=subloc)]) burnt = 1 if not core.ALLOWSUBS: break - if sub["codec_name"] in ["dvd_subtitle", "VobSub"] and core.SCODEC == "mov_text": # We can't convert these. + if sub['codec_name'] in ['dvd_subtitle', 'VobSub'] and core.SCODEC == 'mov_text': # We can't convert these. continue - map_cmd.extend(['-map', '0:{index}'.format(index=sub["index"])]) - s_mapped.extend([sub["index"]]) + map_cmd.extend(['-map', '0:{index}'.format(index=sub['index'])]) + s_mapped.extend([sub['index']]) if core.SINCLUDE: for sub in sub_streams: if not core.ALLOWSUBS: break - if sub["index"] in s_mapped: + if sub['index'] in s_mapped: continue - if sub["codec_name"] in ["dvd_subtitle", "VobSub"] and core.SCODEC == "mov_text": # We can't convert these. + if sub['codec_name'] in ['dvd_subtitle', 'VobSub'] and core.SCODEC == 'mov_text': # We can't convert these. continue - map_cmd.extend(['-map', '0:{index}'.format(index=sub["index"])]) - s_mapped.extend([sub["index"]]) + map_cmd.extend(['-map', '0:{index}'.format(index=sub['index'])]) + s_mapped.extend([sub['index']]) if core.OUTPUTFASTSTART: other_cmd.extend(['-movflags', '+faststart']) @@ -474,11 +474,11 @@ def build_commands(file, new_dir, movie_name, bitbucket): if core.SEMBED and os.path.isfile(file): for subfile in get_subs(file): sub_details, result = get_video_details(subfile) - if not sub_details or not sub_details.get("streams"): + if not sub_details or not sub_details.get('streams'): continue - if core.SCODEC == "mov_text": - subcode = [stream["codec_name"] for stream in sub_details["streams"]] - if set(subcode).intersection(["dvd_subtitle", "VobSub"]): # We can't convert these. + if core.SCODEC == 'mov_text': + subcode = [stream['codec_name'] for stream in sub_details['streams']] + if set(subcode).intersection(['dvd_subtitle', 'VobSub']): # We can't convert these. continue command.extend(['-i', subfile]) lan = os.path.splitext(os.path.splitext(subfile)[0])[1][1:].split('-')[0] @@ -541,34 +541,34 @@ def extract_subs(file, newfile_path, bitbucket): name = os.path.splitext(os.path.split(newfile_path)[1])[0] try: - sub_streams = [item for item in video_details["streams"] if - item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[ - "codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"] + sub_streams = [item for item in video_details['streams'] if + item['codec_type'] == 'subtitle' and item['tags']['language'] in core.SLANGUAGES and item[ + 'codec_name'] != 'hdmv_pgs_subtitle' and item['codec_name'] != 'pgssub'] except Exception: - sub_streams = [item for item in video_details["streams"] if - item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ - "codec_name"] != "pgssub"] + sub_streams = [item for item in video_details['streams'] if + item['codec_type'] == 'subtitle' and item['codec_name'] != 'hdmv_pgs_subtitle' and item[ + 'codec_name'] != 'pgssub'] num = len(sub_streams) for n in range(num): sub = sub_streams[n] - idx = sub["index"] - lan = sub.get("tags", {}).get("language", "unk") + idx = sub['index'] + lan = sub.get('tags', {}).get('language', 'unk') if num == 1: - output_file = os.path.join(subdir, "{0}.srt".format(name)) + output_file = os.path.join(subdir, '{0}.srt'.format(name)) if os.path.isfile(output_file): - output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, n)) + output_file = os.path.join(subdir, '{0}.{1}.srt'.format(name, n)) else: - output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, lan)) + output_file = os.path.join(subdir, '{0}.{1}.srt'.format(name, lan)) if os.path.isfile(output_file): - output_file = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n)) + output_file = os.path.join(subdir, '{0}.{1}.{2}.srt'.format(name, lan, n)) command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', '-codec:{index}'.format(index=idx), 'srt', output_file] if platform.system() != 'Windows': command = core.NICENESS + command - logger.info("Extracting {0} subtitle from: {1}".format(lan, file)) + logger.info('Extracting {0} subtitle from: {1}'.format(lan, file)) print_cmd(command) result = 1 # set result to failed in case call fails. try: @@ -576,16 +576,16 @@ def extract_subs(file, newfile_path, bitbucket): proc.communicate() result = proc.returncode except Exception: - logger.error("Extracting subtitle has failed") + logger.error('Extracting subtitle has failed') if result == 0: try: shutil.copymode(file, output_file) except Exception: pass - logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file)) + logger.info('Extracting {0} subtitle from {1} has succeeded'.format(lan, file)) else: - logger.error("Extracting subtitles has failed") + logger.error('Extracting subtitles has failed') def process_list(it, new_dir, bitbucket): @@ -597,20 +597,20 @@ def process_list(it, new_dir, bitbucket): for item in it: ext = os.path.splitext(item)[1].lower() if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS: - logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER") + logger.debug('Attempting to rip disk image: {0}'.format(item), 'TRANSCODER') new_list.extend(rip_iso(item, new_dir, bitbucket)) rem_list.append(item) - elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS: - logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER") + elif re.match('.+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', item) and '.vob' not in core.IGNOREEXTENSIONS: + logger.debug('Found VIDEO_TS image file: {0}'.format(item), 'TRANSCODER') if not vts_path: try: - vts_path = re.match("(.+VIDEO_TS)", item).groups()[0] + vts_path = re.match('(.+VIDEO_TS)', item).groups()[0] except Exception: vts_path = os.path.split(item)[0] rem_list.append(item) - elif re.match(".+VIDEO_TS.", item) or re.match(".+VTS_[0-9][0-9]_[0-9].", item): + elif re.match('.+VIDEO_TS.', item) or re.match('.+VTS_[0-9][0-9]_[0-9].', item): rem_list.append(item) - elif core.CONCAT and re.match(".+[cC][dD][0-9].", item): + elif core.CONCAT and re.match('.+[cC][dD][0-9].', item): rem_list.append(item) combine.append(item) else: @@ -627,11 +627,11 @@ def process_list(it, new_dir, bitbucket): it.extend(new_list) for item in rem_list: it.remove(item) - logger.debug("Successfully extracted .vob file {0} from disk image".format(new_list[0]), "TRANSCODER") + logger.debug('Successfully extracted .vob file {0} from disk image'.format(new_list[0]), 'TRANSCODER') elif new_list and not success: new_list = [] rem_list = [] - logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER") + logger.error('Failed extracting .vob files from disk image. Stopping transcoding.', 'TRANSCODER') return it, rem_list, new_list, success @@ -640,17 +640,17 @@ def rip_iso(item, new_dir, bitbucket): failure_dir = 'failure' # Mount the ISO in your OS and call combineVTS. if not core.SEVENZIP: - logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER") + logger.error('No 7zip installed. Can\'t extract image file {0}'.format(item), 'TRANSCODER') new_files = [failure_dir] return new_files cmd = [core.SEVENZIP, 'l', item] try: - logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER") + logger.debug('Attempting to extract .vob from image file {0}'.format(item), 'TRANSCODER') print_cmd(cmd) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) out, err = proc.communicate() - file_list = [re.match(r".+(VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in - out.splitlines() if re.match(r".+VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)] + file_list = [re.match(r'.+(VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])', line).groups()[0] for line in + out.splitlines() if re.match(r'.+VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', line)] combined = [] for n in range(99): concat = [] @@ -675,10 +675,10 @@ def rip_iso(item, new_dir, bitbucket): name = os.path.splitext(os.path.split(item)[1])[0] new_files.append({item: {'name': name, 'files': combined}}) if not new_files: - logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER") + logger.error('No VIDEO_TS folder found in image file {0}'.format(item), 'TRANSCODER') new_files = [failure_dir] except Exception: - logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER") + logger.error('Failed to extract from image file {0}'.format(item), 'TRANSCODER') new_files = [failure_dir] return new_files @@ -709,11 +709,11 @@ def combine_vts(vts_path): def combine_cd(combine): new_files = [] - for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]): + for item in set([re.match('(.+)[cC][dD][0-9].', item).groups()[0] for item in combine]): concat = '' for n in range(99): files = [file for file in combine if - n + 1 == int(re.match(".+[cC][dD]([0-9]+).", file).groups()[0]) and item in file] + n + 1 == int(re.match('.+[cC][dD]([0-9]+).', file).groups()[0]) and item in file] if files: concat += '{file}|'.format(file=files[0]) else: @@ -724,16 +724,16 @@ def combine_cd(combine): def print_cmd(command): - cmd = "" + cmd = '' for item in command: - cmd = "{cmd} {item}".format(cmd=cmd, item=item) - logger.debug("calling command:{0}".format(cmd)) + cmd = '{cmd} {item}'.format(cmd=cmd, item=item) + logger.debug('calling command:{0}'.format(cmd)) def transcode_directory(dir_name): if not core.FFMPEG: return 1, dir_name - logger.info("Checking for files to be transcoded") + logger.info('Checking for files to be transcoded') final_result = 0 # initialize as successful if core.OUTPUTVIDEOPATH: new_dir = core.OUTPUTVIDEOPATH @@ -768,11 +768,11 @@ def transcode_directory(dir_name): os.remove(newfile_path) except OSError as e: if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist - logger.debug("Error when removing transcoding target: {0}".format(e)) + logger.debug('Error when removing transcoding target: {0}'.format(e)) except Exception as e: - logger.debug("Error when removing transcoding target: {0}".format(e)) + logger.debug('Error when removing transcoding target: {0}'.format(e)) - logger.info("Transcoding video: {0}".format(newfile_path)) + logger.info('Transcoding video: {0}'.format(newfile_path)) print_cmd(command) result = 1 # set result to failed in case call fails. try: @@ -789,7 +789,7 @@ def transcode_directory(dir_name): proc.communicate() result = proc.returncode except Exception: - logger.error("Transcoding of video {0} has failed".format(newfile_path)) + logger.error('Transcoding of video {0} has failed'.format(newfile_path)) if core.SUBSDIR and result == 0 and isinstance(file, string_types): for sub in get_subs(file): @@ -805,14 +805,14 @@ def transcode_directory(dir_name): shutil.copymode(file, newfile_path) except Exception: pass - logger.info("Transcoding of video to {0} succeeded".format(newfile_path)) + logger.info('Transcoding of video to {0} succeeded'.format(newfile_path)) if os.path.isfile(newfile_path) and (file in new_list or not core.DUPLICATE): try: os.unlink(file) except Exception: pass else: - logger.error("Transcoding of video to {0} failed with result {1}".format(newfile_path, result)) + logger.error('Transcoding of video to {0} failed with result {1}'.format(newfile_path, result)) # this will be 0 (successful) it all are successful, else will return a positive integer for failure. final_result = final_result + result if final_result == 0 and not core.DUPLICATE: diff --git a/core/user_scripts.py b/core/user_scripts.py index 1fc97dc5..83f17d60 100644 --- a/core/user_scripts.py +++ b/core/user_scripts.py @@ -12,38 +12,38 @@ def external_script(output_destination, torrent_name, torrent_label, settings): final_result = 0 # start at 0. num_files = 0 try: - core.USER_SCRIPT_MEDIAEXTENSIONS = settings["user_script_mediaExtensions"].lower() + core.USER_SCRIPT_MEDIAEXTENSIONS = settings['user_script_mediaExtensions'].lower() if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str): core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',') except Exception: core.USER_SCRIPT_MEDIAEXTENSIONS = [] - core.USER_SCRIPT = settings.get("user_script_path") + core.USER_SCRIPT = settings.get('user_script_path') - if not core.USER_SCRIPT or core.USER_SCRIPT == "None": # do nothing and return success. - return [0, ""] + if not core.USER_SCRIPT or core.USER_SCRIPT == 'None': # do nothing and return success. + return [0, ''] try: - core.USER_SCRIPT_PARAM = settings["user_script_param"] + core.USER_SCRIPT_PARAM = settings['user_script_param'] if isinstance(core.USER_SCRIPT_PARAM, str): core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',') except Exception: core.USER_SCRIPT_PARAM = [] try: - core.USER_SCRIPT_SUCCESSCODES = settings["user_script_successCodes"] + core.USER_SCRIPT_SUCCESSCODES = settings['user_script_successCodes'] if isinstance(core.USER_SCRIPT_SUCCESSCODES, str): core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',') except Exception: core.USER_SCRIPT_SUCCESSCODES = 0 - core.USER_SCRIPT_CLEAN = int(settings.get("user_script_clean", 1)) - core.USER_SCRIPT_RUNONCE = int(settings.get("user_script_runOnce", 1)) + core.USER_SCRIPT_CLEAN = int(settings.get('user_script_clean', 1)) + core.USER_SCRIPT_RUNONCE = int(settings.get('user_script_runOnce', 1)) if core.CHECK_MEDIA: for video in list_media_files(output_destination, media=True, audio=False, meta=False, archives=False): if transcoder.is_video_good(video, 0): import_subs(video) else: - logger.info("Corrupt video file found {0}. Deleting.".format(video), "USERSCRIPT") + logger.info('Corrupt video file found {0}. Deleting.'.format(video), 'USERSCRIPT') os.unlink(video) for dirpath, dirnames, filenames in os.walk(output_destination): @@ -52,25 +52,25 @@ def external_script(output_destination, torrent_name, torrent_label, settings): file_path = core.os.path.join(dirpath, file) file_name, file_extension = os.path.splitext(file) - if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS: + if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or 'all' in core.USER_SCRIPT_MEDIAEXTENSIONS: num_files += 1 if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files. continue command = [core.USER_SCRIPT] for param in core.USER_SCRIPT_PARAM: - if param == "FN": + if param == 'FN': command.append('{0}'.format(file)) continue - elif param == "FP": + elif param == 'FP': command.append('{0}'.format(file_path)) continue - elif param == "TN": + elif param == 'TN': command.append('{0}'.format(torrent_name)) continue - elif param == "TL": + elif param == 'TL': command.append('{0}'.format(torrent_label)) continue - elif param == "DN": + elif param == 'DN': if core.USER_SCRIPT_RUNONCE == 1: command.append('{0}'.format(output_destination)) else: @@ -79,24 +79,24 @@ def external_script(output_destination, torrent_name, torrent_label, settings): else: command.append(param) continue - cmd = "" + cmd = '' for item in command: - cmd = "{cmd} {item}".format(cmd=cmd, item=item) - logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=file_path), "USERSCRIPT") + cmd = '{cmd} {item}'.format(cmd=cmd, item=item) + logger.info('Running script {cmd} on file {path}.'.format(cmd=cmd, path=file_path), 'USERSCRIPT') try: p = Popen(command) res = p.wait() if str(res) in core.USER_SCRIPT_SUCCESSCODES: # Linux returns 0 for successful. - logger.info("UserScript {0} was successfull".format(command[0])) + logger.info('UserScript {0} was successfull'.format(command[0])) result = 0 else: - logger.error("UserScript {0} has failed with return code: {1}".format(command[0], res), "USERSCRIPT") + logger.error('UserScript {0} has failed with return code: {1}'.format(command[0], res), 'USERSCRIPT') logger.info( - "If the UserScript completed successfully you should add {0} to the user_script_successCodes".format( - res), "USERSCRIPT") + 'If the UserScript completed successfully you should add {0} to the user_script_successCodes'.format( + res), 'USERSCRIPT') result = int(1) except Exception: - logger.error("UserScript {0} has failed".format(command[0]), "USERSCRIPT") + logger.error('UserScript {0} has failed'.format(command[0]), 'USERSCRIPT') result = int(1) final_result += result @@ -105,13 +105,13 @@ def external_script(output_destination, torrent_name, torrent_label, settings): for file in filenames: file_name, file_extension = os.path.splitext(file) - if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL": + if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == 'ALL': num_files_new += 1 if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0: - logger.info("All files have been processed. Cleaning outputDirectory {0}".format(output_destination)) + logger.info('All files have been processed. Cleaning outputDirectory {0}'.format(output_destination)) remove_dir(output_destination) elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0: - logger.info("{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.".format( + logger.info('{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.'.format( num_files, num_files_new)) return [final_result, ''] diff --git a/core/utils.py b/core/utils.py index a8590f43..9c8d81cc 100644 --- a/core/utils.py +++ b/core/utils.py @@ -56,7 +56,7 @@ shutil.copyfileobj = copyfileobj_fast def report_nzb(failure_link, client_agent): # Contact indexer site - logger.info("Sending failure notification to indexer site") + logger.info('Sending failure notification to indexer site') if client_agent == 'nzbget': headers = {'User-Agent': 'NZBGet / nzbToMedia.py'} elif client_agent == 'sabnzbd': @@ -66,7 +66,7 @@ def report_nzb(failure_link, client_agent): try: requests.post(failure_link, headers=headers, timeout=(30, 300)) except Exception as e: - logger.error("Unable to open URL {0} due to {1}".format(failure_link, e)) + logger.error('Unable to open URL {0} due to {1}'.format(failure_link, e)) return @@ -83,8 +83,8 @@ def sanitize_name(name): """ # remove bad chars from the filename - name = re.sub(r'[\\\/*]', '-', name) - name = re.sub(r'[:"<>|?]', '', name) + name = re.sub(r'[\\/*]', '-', name) + name = re.sub(r'[:\'<>|?]', '', name) # remove leading/trailing periods and spaces name = name.strip(' .') @@ -110,15 +110,15 @@ def remote_dir(path): return path for local, remote in core.REMOTEPATHS: if local in path: - base_dirs = path.replace(local, "").split(os.sep) + base_dirs = path.replace(local, '').split(os.sep) if '/' in remote: remote_sep = '/' else: remote_sep = '\\' new_path = remote_sep.join([remote] + base_dirs) new_path = re.sub(r'(\S)(\\+)', r'\1\\', new_path) - new_path = re.sub(r'(\/+)', r'/', new_path) - new_path = re.sub(r'([\/\\])$', r'', new_path) + new_path = re.sub(r'(/+)', r'/', new_path) + new_path = re.sub(r'([/\\])$', r'', new_path) return new_path return path @@ -141,16 +141,16 @@ def category_search(input_directory, input_name, input_category, root, categorie pathlist = os.path.normpath(input_directory).split(os.sep) if input_category and input_category in pathlist: - logger.debug("SEARCH: Found the Category: {0} in directory structure".format(input_category)) + logger.debug('SEARCH: Found the Category: {0} in directory structure'.format(input_category)) elif input_category: - logger.debug("SEARCH: Could not find the category: {0} in the directory structure".format(input_category)) + logger.debug('SEARCH: Could not find the category: {0} in the directory structure'.format(input_category)) else: try: input_category = list(set(pathlist) & set(categories))[-1] # assume last match is most relevant category. - logger.debug("SEARCH: Found Category: {0} in directory structure".format(input_category)) + logger.debug('SEARCH: Found Category: {0} in directory structure'.format(input_category)) except IndexError: - input_category = "" - logger.debug("SEARCH: Could not find a category in the directory structure") + input_category = '' + logger.debug('SEARCH: Could not find a category in the directory structure') if not os.path.isdir(input_directory) and os.path.isfile(input_directory): # If the input directory is a file if not input_name: input_name = os.path.split(os.path.normpath(input_directory))[1] @@ -158,30 +158,30 @@ def category_search(input_directory, input_name, input_category, root, categorie if input_category and os.path.isdir(os.path.join(input_directory, input_category)): logger.info( - "SEARCH: Found category directory {0} in input directory directory {1}".format(input_category, input_directory)) + 'SEARCH: Found category directory {0} in input directory directory {1}'.format(input_category, input_directory)) input_directory = os.path.join(input_directory, input_category) - logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) + logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory)) if input_name and os.path.isdir(os.path.join(input_directory, input_name)): - logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format(input_name, input_directory)) + logger.info('SEARCH: Found torrent directory {0} in input directory directory {1}'.format(input_name, input_directory)) input_directory = os.path.join(input_directory, input_name) - logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) + logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory)) tordir = True elif input_name and os.path.isdir(os.path.join(input_directory, sanitize_name(input_name))): - logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format( + logger.info('SEARCH: Found torrent directory {0} in input directory directory {1}'.format( sanitize_name(input_name), input_directory)) input_directory = os.path.join(input_directory, sanitize_name(input_name)) - logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) + logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory)) tordir = True elif input_name and os.path.isfile(os.path.join(input_directory, input_name)): - logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format(input_name, input_directory)) + logger.info('SEARCH: Found torrent file {0} in input directory directory {1}'.format(input_name, input_directory)) input_directory = os.path.join(input_directory, input_name) - logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) + logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory)) tordir = True elif input_name and os.path.isfile(os.path.join(input_directory, sanitize_name(input_name))): - logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format( + logger.info('SEARCH: Found torrent file {0} in input directory directory {1}'.format( sanitize_name(input_name), input_directory)) input_directory = os.path.join(input_directory, sanitize_name(input_name)) - logger.info("SEARCH: Setting input_directory to {0}".format(input_directory)) + logger.info('SEARCH: Setting input_directory to {0}'.format(input_directory)) tordir = True imdbid = [item for item in pathlist if '.cp(tt' in item] # This looks for the .cp(tt imdb id in the path. @@ -194,7 +194,7 @@ def category_search(input_directory, input_name, input_category, root, categorie index = pathlist.index(input_category) if index + 1 < len(pathlist): tordir = True - logger.info("SEARCH: Found a unique directory {0} in the category directory".format + logger.info('SEARCH: Found a unique directory {0} in the category directory'.format (pathlist[index + 1])) if not input_name: input_name = pathlist[index + 1] @@ -203,7 +203,7 @@ def category_search(input_directory, input_name, input_category, root, categorie if input_name and not tordir: if input_name in pathlist or sanitize_name(input_name) in pathlist: - logger.info("SEARCH: Found torrent directory {0} in the directory structure".format(input_name)) + logger.info('SEARCH: Found torrent directory {0} in the directory structure'.format(input_name)) tordir = True else: root = 1 @@ -211,8 +211,8 @@ def category_search(input_directory, input_name, input_category, root, categorie root = 2 if root > 0: - logger.info("SEARCH: Could not find a unique directory for this download. Assume a common directory.") - logger.info("SEARCH: We will try and determine which files to process, individually") + logger.info('SEARCH: Could not find a unique directory for this download. Assume a common directory.') + logger.info('SEARCH: We will try and determine which files to process, individually') return input_directory, input_name, input_category, root @@ -234,7 +234,7 @@ def is_min_size(input_name, min_size): try: input_size = get_dir_size(os.path.dirname(input_name)) except Exception: - logger.error("Failed to get file size for {0}".format(input_name), 'MINSIZE') + logger.error('Failed to get file size for {0}'.format(input_name), 'MINSIZE') return True # Ignore files under a certain size @@ -249,51 +249,51 @@ def is_sample(input_name): def copy_link(src, target_link, use_link): - logger.info("MEDIAFILE: [{0}]".format(os.path.basename(target_link)), 'COPYLINK') - logger.info("SOURCE FOLDER: [{0}]".format(os.path.dirname(src)), 'COPYLINK') - logger.info("TARGET FOLDER: [{0}]".format(os.path.dirname(target_link)), 'COPYLINK') + logger.info('MEDIAFILE: [{0}]'.format(os.path.basename(target_link)), 'COPYLINK') + logger.info('SOURCE FOLDER: [{0}]'.format(os.path.dirname(src)), 'COPYLINK') + logger.info('TARGET FOLDER: [{0}]'.format(os.path.dirname(target_link)), 'COPYLINK') if src != target_link and os.path.exists(target_link): - logger.info("MEDIAFILE already exists in the TARGET folder, skipping ...", 'COPYLINK') + logger.info('MEDIAFILE already exists in the TARGET folder, skipping ...', 'COPYLINK') return True elif src == target_link and os.path.isfile(target_link) and os.path.isfile(src): - logger.info("SOURCE AND TARGET files are the same, skipping ...", 'COPYLINK') + logger.info('SOURCE AND TARGET files are the same, skipping ...', 'COPYLINK') return True elif src == os.path.dirname(target_link): - logger.info("SOURCE AND TARGET folders are the same, skipping ...", 'COPYLINK') + logger.info('SOURCE AND TARGET folders are the same, skipping ...', 'COPYLINK') return True make_dir(os.path.dirname(target_link)) try: if use_link == 'dir': - logger.info("Directory linking SOURCE FOLDER -> TARGET FOLDER", 'COPYLINK') + logger.info('Directory linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK') linktastic.dirlink(src, target_link) return True if use_link == 'junction': - logger.info("Directory junction linking SOURCE FOLDER -> TARGET FOLDER", 'COPYLINK') + logger.info('Directory junction linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK') linktastic.dirlink(src, target_link) return True - elif use_link == "hard": - logger.info("Hard linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') + elif use_link == 'hard': + logger.info('Hard linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK') linktastic.link(src, target_link) return True - elif use_link == "sym": - logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') + elif use_link == 'sym': + logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK') linktastic.symlink(src, target_link) return True - elif use_link == "move-sym": - logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') + elif use_link == 'move-sym': + logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK') shutil.move(src, target_link) linktastic.symlink(target_link, src) return True - elif use_link == "move": - logger.info("Moving SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') + elif use_link == 'move': + logger.info('Moving SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK') shutil.move(src, target_link) return True except Exception as e: - logger.warning("Error: {0}, copying instead ... ".format(e), 'COPYLINK') + logger.warning('Error: {0}, copying instead ... '.format(e), 'COPYLINK') - logger.info("Copying SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK') + logger.info('Copying SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK') shutil.copy(src, target_link) return True @@ -317,13 +317,13 @@ def replace_links(link): target = os.readlink(target) n = n + 1 if n > 1: - logger.info("Changing sym-link: {0} to point directly to file: {1}".format(link, target), 'COPYLINK') + logger.info('Changing sym-link: {0} to point directly to file: {1}'.format(link, target), 'COPYLINK') os.unlink(link) linktastic.symlink(target, link) def flatten(output_destination): - logger.info("FLATTEN: Flattening directory: {0}".format(output_destination)) + logger.info('FLATTEN: Flattening directory: {0}'.format(output_destination)) for outputFile in list_media_files(output_destination): dir_path = os.path.dirname(outputFile) file_name = os.path.basename(outputFile) @@ -336,7 +336,7 @@ def flatten(output_destination): try: shutil.move(outputFile, target) except Exception: - logger.error("Could not flatten {0}".format(outputFile), 'FLATTEN') + logger.error('Could not flatten {0}'.format(outputFile), 'FLATTEN') remove_empty_folders(output_destination) # Cleanup empty directories @@ -347,7 +347,7 @@ def remove_empty_folders(path, remove_root=True): return # remove empty subfolders - logger.debug("Checking for empty folders in:{0}".format(path)) + logger.debug('Checking for empty folders in:{0}'.format(path)) files = os.listdir(text_type(path)) if len(files): for f in files: @@ -358,7 +358,7 @@ def remove_empty_folders(path, remove_root=True): # if folder empty, delete it files = os.listdir(text_type(path)) if len(files) == 0 and remove_root: - logger.debug("Removing empty folder:{}".format(path)) + logger.debug('Removing empty folder:{}'.format(path)) os.rmdir(path) @@ -386,7 +386,7 @@ def wake_on_lan(ethernet_address): int(addr_byte[4], 16), int(addr_byte[5], 16)) - # Build the Wake-On-LAN "Magic Packet"... + # Build the Wake-On-LAN 'Magic Packet'... msg = b'\xff' * 6 + hw_addr * 16 @@ -402,28 +402,28 @@ def wake_on_lan(ethernet_address): def test_connection(host, port): try: socket.create_connection((host, port)) - return "Up" + return 'Up' except Exception: - return "Down" + return 'Down' def wake_up(): - host = core.CFG["WakeOnLan"]["host"] - port = int(core.CFG["WakeOnLan"]["port"]) - mac = core.CFG["WakeOnLan"]["mac"] + host = core.CFG['WakeOnLan']['host'] + port = int(core.CFG['WakeOnLan']['port']) + mac = core.CFG['WakeOnLan']['mac'] i = 1 - while test_connection(host, port) == "Down" and i < 4: - logger.info(("Sending WakeOnLan Magic Packet for mac: {0}".format(mac))) + while test_connection(host, port) == 'Down' and i < 4: + logger.info(('Sending WakeOnLan Magic Packet for mac: {0}'.format(mac))) wake_on_lan(mac) time.sleep(20) i = i + 1 - if test_connection(host, port) == "Down": # final check. - logger.warning("System with mac: {0} has not woken after 3 attempts. " - "Continuing with the rest of the script.".format(mac)) + if test_connection(host, port) == 'Down': # final check. + logger.warning('System with mac: {0} has not woken after 3 attempts. ' + 'Continuing with the rest of the script.'.format(mac)) else: - logger.info("System with mac: {0} has been woken. Continuing with the rest of the script.".format(mac)) + logger.info('System with mac: {0} has been woken. Continuing with the rest of the script.'.format(mac)) def char_replace(name): @@ -470,36 +470,36 @@ def char_replace(name): def convert_to_ascii(input_name, dir_name): - ascii_convert = int(core.CFG["ASCII"]["convert"]) - if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and "\" is replaced!. + ascii_convert = int(core.CFG['ASCII']['convert']) + if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and '\' is replaced!. return input_name, dir_name encoded, input_name = char_replace(input_name) directory, base = os.path.split(dir_name) - if not base: # ended with "/" + if not base: # ended with '/' directory, base = os.path.split(directory) encoded, base2 = char_replace(base) if encoded: dir_name = os.path.join(directory, base2) - logger.info("Renaming directory to: {0}.".format(base2), 'ENCODER') + logger.info('Renaming directory to: {0}.'.format(base2), 'ENCODER') os.rename(os.path.join(directory, base), dir_name) if 'NZBOP_SCRIPTDIR' in os.environ: - print("[NZB] DIRECTORY={0}".format(dir_name)) + print('[NZB] DIRECTORY={0}'.format(dir_name)) for dirname, dirnames, filenames in os.walk(dir_name, topdown=False): for subdirname in dirnames: encoded, subdirname2 = char_replace(subdirname) if encoded: - logger.info("Renaming directory to: {0}.".format(subdirname2), 'ENCODER') + logger.info('Renaming directory to: {0}.'.format(subdirname2), 'ENCODER') os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2)) for dirname, dirnames, filenames in os.walk(dir_name): for filename in filenames: encoded, filename2 = char_replace(filename) if encoded: - logger.info("Renaming file to: {0}.".format(filename2), 'ENCODER') + logger.info('Renaming file to: {0}.'.format(filename2), 'ENCODER') os.rename(os.path.join(dirname, filename), os.path.join(dirname, filename2)) return input_name, dir_name @@ -511,7 +511,7 @@ def parse_other(args): def parse_rtorrent(args): # rtorrent usage: system.method.set_key = event.download.finished,TorrentToMedia, - # "execute={/path/to/nzbToMedia/TorrentToMedia.py,\"$d.get_base_path=\",\"$d.get_name=\",\"$d.get_custom1=\",\"$d.get_hash=\"}" + # 'execute={/path/to/nzbToMedia/TorrentToMedia.py,\'$d.get_base_path=\',\'$d.get_name=\',\'$d.get_custom1=\',\'$d.get_hash=\'}' input_directory = os.path.normpath(args[1]) try: input_name = args[2] @@ -534,7 +534,7 @@ def parse_rtorrent(args): def parse_utorrent(args): - # uTorrent usage: call TorrentToMedia.py "%D" "%N" "%L" "%I" + # uTorrent usage: call TorrentToMedia.py '%D' '%N' '%L' '%I' input_directory = os.path.normpath(args[1]) input_name = args[2] try: @@ -577,7 +577,7 @@ def parse_transmission(args): def parse_vuze(args): - # vuze usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py "%D%N%L%I%K%F" + # vuze usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py '%D%N%L%I%K%F' try: cur_input = args[1].split(',') except Exception: @@ -612,29 +612,29 @@ def parse_vuze(args): def parse_qbittorrent(args): - # qbittorrent usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py "%D|%N|%L|%I" + # qbittorrent usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py '%D|%N|%L|%I' try: cur_input = args[1].split('|') except Exception: cur_input = [] try: - input_directory = os.path.normpath(cur_input[0].replace('"', '')) + input_directory = os.path.normpath(cur_input[0].replace('\'', '')) except Exception: input_directory = '' try: - input_name = cur_input[1].replace('"', '') + input_name = cur_input[1].replace('\'', '') except Exception: input_name = '' try: - input_category = cur_input[2].replace('"', '') + input_category = cur_input[2].replace('\'', '') except Exception: input_category = '' try: - input_hash = cur_input[3].replace('"', '') + input_hash = cur_input[3].replace('\'', '') except Exception: input_hash = '' try: - input_id = cur_input[3].replace('"', '') + input_id = cur_input[3].replace('\'', '') except Exception: input_id = '' @@ -664,7 +664,7 @@ def get_dirs(section, subsection, link='hard'): def process_dir(path): folders = [] - logger.info("Searching {0} for mediafiles to post-process ...".format(path)) + logger.info('Searching {0} for mediafiles to post-process ...'.format(path)) sync = [o for o in os.listdir(text_type(path)) if os.path.splitext(o)[1] in ['.!sync', '.bts']] # search for single files and move them into their own folder for post-processing for mediafile in [os.path.join(path, o) for o in os.listdir(text_type(path)) if @@ -674,7 +674,7 @@ def get_dirs(section, subsection, link='hard'): if os.path.split(mediafile)[1] in ['Thumbs.db', 'thumbs.db']: continue try: - logger.debug("Found file {0} in root directory {1}.".format(os.path.split(mediafile)[1], path)) + logger.debug('Found file {0} in root directory {1}.'.format(os.path.split(mediafile)[1], path)) new_path = None file_ext = os.path.splitext(mediafile)[1] try: @@ -686,7 +686,7 @@ def get_dirs(section, subsection, link='hard'): album = f.album # create new path - new_path = os.path.join(path, "{0} - {1}".format(sanitize_name(artist), sanitize_name(album))) + new_path = os.path.join(path, '{0} - {1}'.format(sanitize_name(artist), sanitize_name(album))) elif file_ext in core.MEDIACONTAINER: f = guessit.guessit(mediafile) @@ -698,7 +698,7 @@ def get_dirs(section, subsection, link='hard'): new_path = os.path.join(path, sanitize_name(title)) except Exception as e: - logger.error("Exception parsing name for media file: {0}: {1}".format(os.path.split(mediafile)[1], e)) + logger.error('Exception parsing name for media file: {0}: {1}'.format(os.path.split(mediafile)[1], e)) if not new_path: title = os.path.splitext(os.path.basename(mediafile))[0] @@ -727,7 +727,7 @@ def get_dirs(section, subsection, link='hard'): # link file to its new path copy_link(mediafile, newfile, link) except Exception as e: - logger.error("Failed to move {0} to its own directory: {1}".format(os.path.split(mediafile)[1], e)) + logger.error('Failed to move {0} to its own directory: {1}'.format(os.path.split(mediafile)[1], e)) # removeEmptyFolders(path, removeRoot=False) @@ -741,14 +741,14 @@ def get_dirs(section, subsection, link='hard'): return folders try: - watch_dir = os.path.join(core.CFG[section][subsection]["watch_dir"], subsection) + watch_dir = os.path.join(core.CFG[section][subsection]['watch_dir'], subsection) if os.path.exists(watch_dir): to_return.extend(process_dir(watch_dir)) - elif os.path.exists(core.CFG[section][subsection]["watch_dir"]): - to_return.extend(process_dir(core.CFG[section][subsection]["watch_dir"])) + elif os.path.exists(core.CFG[section][subsection]['watch_dir']): + to_return.extend(process_dir(core.CFG[section][subsection]['watch_dir'])) except Exception as e: - logger.error("Failed to add directories from {0} for post-processing: {1}".format - (core.CFG[section][subsection]["watch_dir"], e)) + logger.error('Failed to add directories from {0} for post-processing: {1}'.format + (core.CFG[section][subsection]['watch_dir'], e)) if core.USELINK == 'move': try: @@ -756,10 +756,10 @@ def get_dirs(section, subsection, link='hard'): if os.path.exists(output_directory): to_return.extend(process_dir(output_directory)) except Exception as e: - logger.error("Failed to add directories from {0} for post-processing: {1}".format(core.OUTPUTDIRECTORY, e)) + logger.error('Failed to add directories from {0} for post-processing: {1}'.format(core.OUTPUTDIRECTORY, e)) if not to_return: - logger.debug("No directories identified in {0}:{1} for post-processing".format(section, subsection)) + logger.debug('No directories identified in {0}:{1} for post-processing'.format(section, subsection)) return list(set(to_return)) @@ -784,11 +784,11 @@ def onerror(func, path, exc_info): def remove_dir(dir_name): - logger.info("Deleting {0}".format(dir_name)) + logger.info('Deleting {0}'.format(dir_name)) try: shutil.rmtree(text_type(dir_name), onerror=onerror) except Exception: - logger.error("Unable to delete folder {0}".format(dir_name)) + logger.error('Unable to delete folder {0}'.format(dir_name)) def clean_dir(path, section, subsection): @@ -808,15 +808,15 @@ def clean_dir(path, section, subsection): num_files = 'unknown' if num_files > 0: logger.info( - "Directory {0} still contains {1} unprocessed file(s), skipping ...".format(path, num_files), + 'Directory {0} still contains {1} unprocessed file(s), skipping ...'.format(path, num_files), 'CLEANDIRS') return - logger.info("Directory {0} has been processed, removing ...".format(path), 'CLEANDIRS') + logger.info('Directory {0} has been processed, removing ...'.format(path), 'CLEANDIRS') try: shutil.rmtree(path, onerror=onerror) except Exception: - logger.error("Unable to delete directory {0}".format(path)) + logger.error('Unable to delete directory {0}'.format(path)) def create_torrent_class(client_agent): @@ -825,97 +825,97 @@ def create_torrent_class(client_agent): if client_agent == 'utorrent': try: - logger.debug("Connecting to {0}: {1}".format(client_agent, core.UTORRENTWEBUI)) + logger.debug('Connecting to {0}: {1}'.format(client_agent, core.UTORRENTWEBUI)) tc = UTorrentClient(core.UTORRENTWEBUI, core.UTORRENTUSR, core.UTORRENTPWD) except Exception: - logger.error("Failed to connect to uTorrent") + logger.error('Failed to connect to uTorrent') if client_agent == 'transmission': try: - logger.debug("Connecting to {0}: http://{1}:{2}".format( + logger.debug('Connecting to {0}: http://{1}:{2}'.format( client_agent, core.TRANSMISSIONHOST, core.TRANSMISSIONPORT)) tc = TransmissionClient(core.TRANSMISSIONHOST, core.TRANSMISSIONPORT, core.TRANSMISSIONUSR, core.TRANSMISSIONPWD) except Exception: - logger.error("Failed to connect to Transmission") + logger.error('Failed to connect to Transmission') if client_agent == 'deluge': try: - logger.debug("Connecting to {0}: http://{1}:{2}".format(client_agent, core.DELUGEHOST, core.DELUGEPORT)) + logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.DELUGEHOST, core.DELUGEPORT)) tc = DelugeClient() tc.connect(host=core.DELUGEHOST, port=core.DELUGEPORT, username=core.DELUGEUSR, password=core.DELUGEPWD) except Exception: - logger.error("Failed to connect to Deluge") + logger.error('Failed to connect to Deluge') if client_agent == 'qbittorrent': try: - logger.debug("Connecting to {0}: http://{1}:{2}".format(client_agent, core.QBITTORRENTHOST, core.QBITTORRENTPORT)) - tc = qBittorrentClient("http://{0}:{1}/".format(core.QBITTORRENTHOST, core.QBITTORRENTPORT)) + logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.QBITTORRENTHOST, core.QBITTORRENTPORT)) + tc = qBittorrentClient('http://{0}:{1}/'.format(core.QBITTORRENTHOST, core.QBITTORRENTPORT)) tc.login(core.QBITTORRENTUSR, core.QBITTORRENTPWD) except Exception: - logger.error("Failed to connect to qBittorrent") + logger.error('Failed to connect to qBittorrent') return tc def pause_torrent(client_agent, input_hash, input_id, input_name): - logger.debug("Stopping torrent {0} in {1} while processing".format(input_name, client_agent)) + logger.debug('Stopping torrent {0} in {1} while processing'.format(input_name, client_agent)) try: - if client_agent == 'utorrent' and core.TORRENT_CLASS != "": + if client_agent == 'utorrent' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.stop(input_hash) - if client_agent == 'transmission' and core.TORRENT_CLASS != "": + if client_agent == 'transmission' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.stop_torrent(input_id) - if client_agent == 'deluge' and core.TORRENT_CLASS != "": + if client_agent == 'deluge' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.core.pause_torrent([input_id]) - if client_agent == 'qbittorrent' and core.TORRENT_CLASS != "": + if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.pause(input_hash) time.sleep(5) except Exception: - logger.warning("Failed to stop torrent {0} in {1}".format(input_name, client_agent)) + logger.warning('Failed to stop torrent {0} in {1}'.format(input_name, client_agent)) def resume_torrent(client_agent, input_hash, input_id, input_name): if not core.TORRENT_RESUME == 1: return - logger.debug("Starting torrent {0} in {1}".format(input_name, client_agent)) + logger.debug('Starting torrent {0} in {1}'.format(input_name, client_agent)) try: - if client_agent == 'utorrent' and core.TORRENT_CLASS != "": + if client_agent == 'utorrent' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.start(input_hash) - if client_agent == 'transmission' and core.TORRENT_CLASS != "": + if client_agent == 'transmission' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.start_torrent(input_id) - if client_agent == 'deluge' and core.TORRENT_CLASS != "": + if client_agent == 'deluge' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.core.resume_torrent([input_id]) - if client_agent == 'qbittorrent' and core.TORRENT_CLASS != "": + if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.resume(input_hash) time.sleep(5) except Exception: - logger.warning("Failed to start torrent {0} in {1}".format(input_name, client_agent)) + logger.warning('Failed to start torrent {0} in {1}'.format(input_name, client_agent)) def remove_torrent(client_agent, input_hash, input_id, input_name): if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move': - logger.debug("Deleting torrent {0} from {1}".format(input_name, client_agent)) + logger.debug('Deleting torrent {0} from {1}'.format(input_name, client_agent)) try: - if client_agent == 'utorrent' and core.TORRENT_CLASS != "": + if client_agent == 'utorrent' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.removedata(input_hash) core.TORRENT_CLASS.remove(input_hash) - if client_agent == 'transmission' and core.TORRENT_CLASS != "": + if client_agent == 'transmission' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.remove_torrent(input_id, True) - if client_agent == 'deluge' and core.TORRENT_CLASS != "": + if client_agent == 'deluge' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.core.remove_torrent(input_id, True) - if client_agent == 'qbittorrent' and core.TORRENT_CLASS != "": + if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '': core.TORRENT_CLASS.delete_permanently(input_hash) time.sleep(5) except Exception: - logger.warning("Failed to delete torrent {0} in {1}".format(input_name, client_agent)) + logger.warning('Failed to delete torrent {0} in {1}'.format(input_name, client_agent)) else: resume_torrent(client_agent, input_hash, input_id, input_name) def find_download(client_agent, download_id): - logger.debug("Searching for Download on {0} ...".format(client_agent)) + logger.debug('Searching for Download on {0} ...'.format(client_agent)) if client_agent == 'utorrent': torrents = core.TORRENT_CLASS.list()[1]['torrents'] for torrent in torrents: @@ -935,21 +935,21 @@ def find_download(client_agent, download_id): if torrent['hash'] == download_id: return True if client_agent == 'sabnzbd': - if "http" in core.SABNZBDHOST: - base_url = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) + if 'http' in core.SABNZBDHOST: + base_url = '{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT) else: - base_url = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) + base_url = 'http://{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT) url = base_url params = { 'apikey': core.SABNZBDAPIKEY, - 'mode': "get_files", + 'mode': 'get_files', 'output': 'json', 'value': download_id, } try: r = requests.get(url, params=params, verify=False, timeout=(30, 120)) except requests.ConnectionError: - logger.error("Unable to open URL") + logger.error('Unable to open URL') return False # failure result = r.json() @@ -961,48 +961,48 @@ def find_download(client_agent, download_id): def get_nzoid(input_name): nzoid = None slots = [] - logger.debug("Searching for nzoid from SAbnzbd ...") - if "http" in core.SABNZBDHOST: - base_url = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) + logger.debug('Searching for nzoid from SAbnzbd ...') + if 'http' in core.SABNZBDHOST: + base_url = '{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT) else: - base_url = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT) + base_url = 'http://{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT) url = base_url params = { 'apikey': core.SABNZBDAPIKEY, - 'mode': "queue", + 'mode': 'queue', 'output': 'json', } try: r = requests.get(url, params=params, verify=False, timeout=(30, 120)) except requests.ConnectionError: - logger.error("Unable to open URL") + logger.error('Unable to open URL') return nzoid # failure try: result = r.json() clean_name = os.path.splitext(os.path.split(input_name)[1])[0] slots.extend([(slot['nzo_id'], slot['filename']) for slot in result['queue']['slots']]) except Exception: - logger.warning("Data from SABnzbd queue could not be parsed") - params['mode'] = "history" + logger.warning('Data from SABnzbd queue could not be parsed') + params['mode'] = 'history' try: r = requests.get(url, params=params, verify=False, timeout=(30, 120)) except requests.ConnectionError: - logger.error("Unable to open URL") + logger.error('Unable to open URL') return nzoid # failure try: result = r.json() clean_name = os.path.splitext(os.path.split(input_name)[1])[0] slots.extend([(slot['nzo_id'], slot['name']) for slot in result['history']['slots']]) except Exception: - logger.warning("Data from SABnzbd history could not be parsed") + logger.warning('Data from SABnzbd history could not be parsed') try: for nzo_id, name in slots: if name in [input_name, clean_name]: nzoid = nzo_id - logger.debug("Found nzoid: {0}".format(nzoid)) + logger.debug('Found nzoid: {0}'.format(nzoid)) break except Exception: - logger.warning("Data from SABnzbd could not be parsed") + logger.warning('Data from SABnzbd could not be parsed') return nzoid @@ -1014,13 +1014,13 @@ def clean_file_name(filename): space, but handles decimal numbers in string, for example: """ - filename = re.sub(r"(\D)\.(?!\s)(\D)", r"\1 \2", filename) - filename = re.sub(r"(\d)\.(\d{4})", r"\1 \2", filename) # if it ends in a year then don't keep the dot - filename = re.sub(r"(\D)\.(?!\s)", r"\1 ", filename) - filename = re.sub(r"\.(?!\s)(\D)", r" \1", filename) - filename = filename.replace("_", " ") - filename = re.sub("-$", "", filename) - filename = re.sub(r"^\[.*]", "", filename) + filename = re.sub(r'(\D)\.(?!\s)(\D)', r'\1 \2', filename) + filename = re.sub(r'(\d)\.(\d{4})', r'\1 \2', filename) # if it ends in a year then don't keep the dot + filename = re.sub(r'(\D)\.(?!\s)', r'\1 ', filename) + filename = re.sub(r'\.(?!\s)(\D)', r' \1', filename) + filename = filename.replace('_', ' ') + filename = re.sub('-$', '', filename) + filename = re.sub(r'^\[.*]', '', filename) return filename.strip() @@ -1039,7 +1039,7 @@ def is_media_file(mediafile, media=True, audio=True, meta=True, archives=True, o file_name, file_ext = os.path.splitext(mediafile) try: - # ignore MAC OS's "resource fork" files + # ignore MAC OS's 'resource fork' files if file_name.startswith('._'): return False except Exception: @@ -1111,14 +1111,14 @@ def find_imdbid(dir_name, input_name, omdb_api_key): m = re.search(r'(tt\d{7})', dir_name + input_name) if m: imdbid = m.group(1) - logger.info("Found imdbID [{0}]".format(imdbid)) + logger.info('Found imdbID [{0}]'.format(imdbid)) return imdbid if os.path.isdir(dir_name): for file in os.listdir(text_type(dir_name)): m = re.search(r'(tt\d{7})', file) if m: imdbid = m.group(1) - logger.info("Found imdbID [{0}] via file name".format(imdbid)) + logger.info('Found imdbID [{0}] via file name'.format(imdbid)) return imdbid if 'NZBPR__DNZB_MOREINFO' in os.environ: dnzb_more_info = os.environ.get('NZBPR__DNZB_MOREINFO', '') @@ -1127,7 +1127,7 @@ def find_imdbid(dir_name, input_name, omdb_api_key): m = regex.match(dnzb_more_info) if m: imdbid = m.group(1) - logger.info("Found imdbID [{0}] from DNZB-MoreInfo".format(imdbid)) + logger.info('Found imdbID [{0}] from DNZB-MoreInfo'.format(imdbid)) return imdbid logger.info('Searching IMDB for imdbID ...') try: @@ -1145,33 +1145,33 @@ def find_imdbid(dir_name, input_name, omdb_api_key): if 'year' in guess: year = guess['year'] - url = "http://www.omdbapi.com" + url = 'http://www.omdbapi.com' if not omdb_api_key: - logger.info("Unable to determine imdbID: No api key provided for ombdapi.com.") + logger.info('Unable to determine imdbID: No api key provided for ombdapi.com.') return - logger.debug("Opening URL: {0}".format(url)) + logger.debug('Opening URL: {0}'.format(url)) try: r = requests.get(url, params={'apikey': omdb_api_key, 'y': year, 't': title}, verify=False, timeout=(60, 300)) except requests.ConnectionError: - logger.error("Unable to open URL {0}".format(url)) + logger.error('Unable to open URL {0}'.format(url)) return try: results = r.json() except Exception: - logger.error("No json data returned from omdbapi.com") + logger.error('No json data returned from omdbapi.com') try: imdbid = results['imdbID'] except Exception: - logger.error("No imdbID returned from omdbapi.com") + logger.error('No imdbID returned from omdbapi.com') if imdbid: - logger.info("Found imdbID [{0}]".format(imdbid)) + logger.info('Found imdbID [{0}]'.format(imdbid)) return imdbid logger.warning('Unable to find a imdbID for {0}'.format(input_name)) @@ -1186,7 +1186,7 @@ def extract_files(src, dst=None, keep_archive=None): dir_path = os.path.dirname(inputFile) full_file_name = os.path.basename(inputFile) archive_name = os.path.splitext(full_file_name)[0] - archive_name = re.sub(r"part[0-9]+", "", archive_name) + archive_name = re.sub(r'part[0-9]+', '', archive_name) if dir_path in extracted_folder and archive_name in extracted_archive: continue # no need to extract this, but keep going to look for other archives and sub directories. @@ -1196,23 +1196,23 @@ def extract_files(src, dst=None, keep_archive=None): extracted_folder.append(dir_path) extracted_archive.append(archive_name) except Exception: - logger.error("Extraction failed for: {0}".format(full_file_name)) + logger.error('Extraction failed for: {0}'.format(full_file_name)) for folder in extracted_folder: for inputFile in list_media_files(folder, media=False, audio=False, meta=False, archives=True): full_file_name = os.path.basename(inputFile) archive_name = os.path.splitext(full_file_name)[0] - archive_name = re.sub(r"part[0-9]+", "", archive_name) + archive_name = re.sub(r'part[0-9]+', '', archive_name) if archive_name not in extracted_archive or keep_archive: continue # don't remove if we haven't extracted this archive, or if we want to preserve them. - logger.info("Removing extracted archive {0} from folder {1} ...".format(full_file_name, folder)) + logger.info('Removing extracted archive {0} from folder {1} ...'.format(full_file_name, folder)) try: if not os.access(inputFile, os.W_OK): os.chmod(inputFile, stat.S_IWUSR) os.remove(inputFile) time.sleep(1) except Exception as e: - logger.error("Unable to remove file {0} due to: {1}".format(inputFile, e)) + logger.error('Unable to remove file {0} due to: {1}'.format(inputFile, e)) def import_subs(filename): @@ -1232,23 +1232,23 @@ def import_subs(filename): if not languages: return - logger.info("Attempting to download subtitles for {0}".format(filename), 'SUBTITLES') + logger.info('Attempting to download subtitles for {0}'.format(filename), 'SUBTITLES') try: video = subliminal.scan_video(filename) subtitles = subliminal.download_best_subtitles({video}, languages) subliminal.save_subtitles(video, subtitles[video]) except Exception as e: - logger.error("Failed to download subtitles for {0} due to: {1}".format(filename, e), 'SUBTITLES') + logger.error('Failed to download subtitles for {0} due to: {1}'.format(filename, e), 'SUBTITLES') def server_responding(base_url): - logger.debug("Attempting to connect to server at {0}".format(base_url), 'SERVER') + logger.debug('Attempting to connect to server at {0}'.format(base_url), 'SERVER') try: requests.get(base_url, timeout=(60, 120), verify=False) - logger.debug("Server responded at {0}".format(base_url), 'SERVER') + logger.debug('Server responded at {0}'.format(base_url), 'SERVER') return True except (requests.ConnectionError, requests.exceptions.Timeout): - logger.error("Server failed to respond at {0}".format(base_url), 'SERVER') + logger.error('Server failed to respond at {0}'.format(base_url), 'SERVER') return False @@ -1263,7 +1263,7 @@ def plex_update(category): section = None if not core.PLEXSEC: return - logger.debug("Attempting to update Plex Library for category {0}.".format(category), 'PLEX') + logger.debug('Attempting to update Plex Library for category {0}.'.format(category), 'PLEX') for item in core.PLEXSEC: if item[0] == category: section = item[1] @@ -1271,9 +1271,9 @@ def plex_update(category): if section: url = '{url}{section}/refresh?X-Plex-Token={token}'.format(url=url, section=section, token=core.PLEXTOKEN) requests.get(url, timeout=(60, 120), verify=False) - logger.debug("Plex Library has been refreshed.", 'PLEX') + logger.debug('Plex Library has been refreshed.', 'PLEX') else: - logger.debug("Could not identify section for plex update", 'PLEX') + logger.debug('Could not identify section for plex update', 'PLEX') def backup_versioned_file(old_file, version): @@ -1283,41 +1283,41 @@ def backup_versioned_file(old_file, version): while not os.path.isfile(new_file): if not os.path.isfile(old_file): - logger.log(u"Not creating backup, {file} doesn't exist".format(file=old_file), logger.DEBUG) + logger.log(u'Not creating backup, {file} doesn\'t exist'.format(file=old_file), logger.DEBUG) break try: - logger.log(u"Trying to back up {old} to {new]".format(old=old_file, new=new_file), logger.DEBUG) + logger.log(u'Trying to back up {old} to {new]'.format(old=old_file, new=new_file), logger.DEBUG) shutil.copy(old_file, new_file) - logger.log(u"Backup done", logger.DEBUG) + logger.log(u'Backup done', logger.DEBUG) break except Exception as error: - logger.log(u"Error while trying to back up {old} to {new} : {msg}".format + logger.log(u'Error while trying to back up {old} to {new} : {msg}'.format (old=old_file, new=new_file, msg=error), logger.WARNING) num_tries += 1 time.sleep(1) - logger.log(u"Trying again.", logger.DEBUG) + logger.log(u'Trying again.', logger.DEBUG) if num_tries >= 10: - logger.log(u"Unable to back up {old} to {new} please do it manually.".format(old=old_file, new=new_file), logger.ERROR) + logger.log(u'Unable to back up {old} to {new} please do it manually.'.format(old=old_file, new=new_file), logger.ERROR) return False return True def update_download_info_status(input_name, status): - logger.db("Updating status of our download {0} in the DB to {1}".format(input_name, status)) + logger.db('Updating status of our download {0} in the DB to {1}'.format(input_name, status)) my_db = main_db.DBConnection() - my_db.action("UPDATE downloads SET status=?, last_update=? WHERE input_name=?", + my_db.action('UPDATE downloads SET status=?, last_update=? WHERE input_name=?', [status, datetime.date.today().toordinal(), text_type(input_name)]) def get_download_info(input_name, status): - logger.db("Getting download info for {0} from the DB".format(input_name)) + logger.db('Getting download info for {0} from the DB'.format(input_name)) my_db = main_db.DBConnection() - sql_results = my_db.select("SELECT * FROM downloads WHERE input_name=? AND status=?", + sql_results = my_db.select('SELECT * FROM downloads WHERE input_name=? AND status=?', [text_type(input_name), status]) return sql_results @@ -1326,7 +1326,7 @@ def get_download_info(input_name, status): class WindowsProcess(object): def __init__(self): self.mutex = None - self.mutexname = "nzbtomedia_{pid}".format(pid=core.PID_FILE.replace('\\', '/')) # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}" + self.mutexname = 'nzbtomedia_{pid}'.format(pid=core.PID_FILE.replace('\\', '/')) # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}' self.CreateMutex = CreateMutex self.CloseHandle = CloseHandle self.GetLastError = GetLastError @@ -1358,13 +1358,13 @@ class PosixProcess(object): self.lasterror = False return self.lasterror except socket.error as e: - if "Address already in use" in e: + if 'Address already in use' in e: self.lasterror = True return self.lasterror except AttributeError: pass if os.path.exists(self.pidpath): - # Make sure it is not a "stale" pidFile + # Make sure it is not a 'stale' pidFile try: pid = int(open(self.pidpath, 'r').read().strip()) except Exception: diff --git a/core/version_check.py b/core/version_check.py index e818c907..a414f4b9 100644 --- a/core/version_check.py +++ b/core/version_check.py @@ -66,13 +66,13 @@ class CheckVersion(object): """ if not core.VERSION_NOTIFY and not force: - logger.log(u"Version checking is disabled, not checking for the newest version") + logger.log(u'Version checking is disabled, not checking for the newest version') return False - logger.log(u"Checking if {install} needs an update".format(install=self.install_type)) + logger.log(u'Checking if {install} needs an update'.format(install=self.install_type)) if not self.updater.need_update(): core.NEWEST_VERSION_STRING = None - logger.log(u"No update needed") + logger.log(u'No update needed') return False self.updater.set_newest_text() @@ -116,19 +116,19 @@ class GitUpdateManager(UpdateManager): test_cmd = 'version' if core.GIT_PATH: - main_git = '"{git}"'.format(git=core.GIT_PATH) + main_git = '\'{git}\''.format(git=core.GIT_PATH) else: main_git = 'git' - logger.log(u"Checking if we can use git commands: {git} {cmd}".format + logger.log(u'Checking if we can use git commands: {git} {cmd}'.format (git=main_git, cmd=test_cmd), logger.DEBUG) output, err, exit_status = self._run_git(main_git, test_cmd) if exit_status == 0: - logger.log(u"Using: {git}".format(git=main_git), logger.DEBUG) + logger.log(u'Using: {git}'.format(git=main_git), logger.DEBUG) return main_git else: - logger.log(u"Not using: {git}".format(git=main_git), logger.DEBUG) + logger.log(u'Not using: {git}'.format(git=main_git), logger.DEBUG) # trying alternatives @@ -143,18 +143,18 @@ class GitUpdateManager(UpdateManager): alternative_git.append(main_git.lower()) if alternative_git: - logger.log(u"Trying known alternative git locations", logger.DEBUG) + logger.log(u'Trying known alternative git locations', logger.DEBUG) for cur_git in alternative_git: - logger.log(u"Checking if we can use git commands: {git} {cmd}".format + logger.log(u'Checking if we can use git commands: {git} {cmd}'.format (git=cur_git, cmd=test_cmd), logger.DEBUG) output, err, exit_status = self._run_git(cur_git, test_cmd) if exit_status == 0: - logger.log(u"Using: {git}".format(git=cur_git), logger.DEBUG) + logger.log(u'Using: {git}'.format(git=cur_git), logger.DEBUG) return cur_git else: - logger.log(u"Not using: {git}".format(git=cur_git), logger.DEBUG) + logger.log(u'Not using: {git}'.format(git=cur_git), logger.DEBUG) # Still haven't found a working git logger.debug('Unable to find your git executable - ' @@ -169,14 +169,14 @@ class GitUpdateManager(UpdateManager): err = None if not git_path: - logger.log(u"No git specified, can't use git commands", logger.DEBUG) + logger.log(u'No git specified, can\'t use git commands', logger.DEBUG) exit_status = 1 return output, err, exit_status cmd = '{git} {args}'.format(git=git_path, args=args) try: - logger.log(u"Executing {cmd} with your shell in {directory}".format + logger.log(u'Executing {cmd} with your shell in {directory}'.format (cmd=cmd, directory=core.APP_ROOT), logger.DEBUG) p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=core.APP_ROOT) @@ -188,22 +188,22 @@ class GitUpdateManager(UpdateManager): if output: output = output.strip() if core.LOG_GIT: - logger.log(u"git output: {output}".format(output=output), logger.DEBUG) + logger.log(u'git output: {output}'.format(output=output), logger.DEBUG) except OSError: - logger.log(u"Command {cmd} didn't work".format(cmd=cmd)) + logger.log(u'Command {cmd} didn\'t work'.format(cmd=cmd)) exit_status = 1 exit_status = 128 if ('fatal:' in output) or err else exit_status if exit_status == 0: - logger.log(u"{cmd} : returned successful".format(cmd=cmd), logger.DEBUG) + logger.log(u'{cmd} : returned successful'.format(cmd=cmd), logger.DEBUG) exit_status = 0 elif core.LOG_GIT and exit_status in (1, 128): - logger.log(u"{cmd} returned : {output}".format + logger.log(u'{cmd} returned : {output}'.format (cmd=cmd, output=output), logger.DEBUG) else: if core.LOG_GIT: - logger.log(u"{cmd} returned : {output}, treat as error for now".format + logger.log(u'{cmd} returned : {output}, treat as error for now'.format (cmd=cmd, output=output), logger.DEBUG) exit_status = 1 @@ -223,7 +223,7 @@ class GitUpdateManager(UpdateManager): if exit_status == 0 and output: cur_commit_hash = output.strip() if not re.match('^[a-z0-9]+$', cur_commit_hash): - logger.log(u"Output doesn't look like a hash, not using it", logger.ERROR) + logger.log(u'Output doesn\'t look like a hash, not using it', logger.ERROR) return False self._cur_commit_hash = cur_commit_hash if self._cur_commit_hash: @@ -256,56 +256,56 @@ class GitUpdateManager(UpdateManager): output, err, exit_status = self._run_git(self._git_path, 'fetch origin') if not exit_status == 0: - logger.log(u"Unable to contact github, can't check for update", logger.ERROR) + logger.log(u'Unable to contact github, can\'t check for update', logger.ERROR) return # get latest commit_hash from remote - output, err, exit_status = self._run_git(self._git_path, 'rev-parse --verify --quiet "@{upstream}"') + output, err, exit_status = self._run_git(self._git_path, 'rev-parse --verify --quiet \'@{upstream}\'') if exit_status == 0 and output: cur_commit_hash = output.strip() if not re.match('^[a-z0-9]+$', cur_commit_hash): - logger.log(u"Output doesn't look like a hash, not using it", logger.DEBUG) + logger.log(u'Output doesn\'t look like a hash, not using it', logger.DEBUG) return else: self._newest_commit_hash = cur_commit_hash else: - logger.log(u"git didn't return newest commit hash", logger.DEBUG) + logger.log(u'git didn\'t return newest commit hash', logger.DEBUG) return # get number of commits behind and ahead (option --count not supported git < 1.7.2) - output, err, exit_status = self._run_git(self._git_path, 'rev-list --left-right "@{upstream}"...HEAD') + output, err, exit_status = self._run_git(self._git_path, 'rev-list --left-right \'@{upstream}\'...HEAD') if exit_status == 0 and output: try: - self._num_commits_behind = int(output.count("<")) - self._num_commits_ahead = int(output.count(">")) + self._num_commits_behind = int(output.count('<')) + self._num_commits_ahead = int(output.count('>')) except Exception: - logger.log(u"git didn't return numbers for behind and ahead, not using it", logger.DEBUG) + logger.log(u'git didn\'t return numbers for behind and ahead, not using it', logger.DEBUG) return - logger.log(u"cur_commit = {current} % (newest_commit)= {new}, " - u"num_commits_behind = {x}, num_commits_ahead = {y}".format + logger.log(u'cur_commit = {current} % (newest_commit)= {new}, ' + u'num_commits_behind = {x}, num_commits_ahead = {y}'.format (current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind, y=self._num_commits_ahead), logger.DEBUG) def set_newest_text(self): if self._num_commits_ahead: - logger.log(u"Local branch is ahead of {branch}. Automatic update not possible.".format + logger.log(u'Local branch is ahead of {branch}. Automatic update not possible.'.format (branch=self.branch), logger.ERROR) elif self._num_commits_behind: - logger.log(u"There is a newer version available (you're {x} commit{s} behind)".format + logger.log(u'There is a newer version available (you\'re {x} commit{s} behind)'.format (x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE) else: return def need_update(self): if not self._find_installed_version(): - logger.error("Unable to determine installed version via git, please check your logs!") + logger.error('Unable to determine installed version via git, please check your logs!') return False if not self._cur_commit_hash: @@ -314,7 +314,7 @@ class GitUpdateManager(UpdateManager): try: self._check_github_for_update() except Exception as error: - logger.log(u"Unable to contact github, can't check for update: {msg!r}".format(msg=error), logger.ERROR) + logger.log(u'Unable to contact github, can\'t check for update: {msg!r}'.format(msg=error), logger.ERROR) return False if self._num_commits_behind > 0: @@ -358,7 +358,7 @@ class SourceUpdateManager(UpdateManager): with open(version_file, 'r') as fp: self._cur_commit_hash = fp.read().strip(' \n\r') except EnvironmentError as error: - logger.log(u"Unable to open 'version.txt': {msg}".format(msg=error), logger.DEBUG) + logger.log(u'Unable to open \'version.txt\': {msg}'.format(msg=error), logger.DEBUG) if not self._cur_commit_hash: self._cur_commit_hash = None @@ -372,7 +372,7 @@ class SourceUpdateManager(UpdateManager): try: self._check_github_for_update() except Exception as error: - logger.log(u"Unable to contact github, can't check for update: {msg!r}".format(msg=error), logger.ERROR) + logger.log(u'Unable to contact github, can\'t check for update: {msg!r}'.format(msg=error), logger.ERROR) return False if not self._cur_commit_hash or self._num_commits_behind > 0: @@ -418,7 +418,7 @@ class SourceUpdateManager(UpdateManager): # when _cur_commit_hash doesn't match anything _num_commits_behind == 100 self._num_commits_behind += 1 - logger.log(u"cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}".format + logger.log(u'cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}'.format (current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG) def set_newest_text(self): @@ -427,9 +427,9 @@ class SourceUpdateManager(UpdateManager): core.NEWEST_VERSION_STRING = None if not self._cur_commit_hash: - logger.log(u"Unknown current version number, don't know if we should update or not", logger.ERROR) + logger.log(u'Unknown current version number, don\'t know if we should update or not', logger.ERROR) elif self._num_commits_behind > 0: - logger.log(u"There is a newer version available (you're {x} commit{s} behind)".format + logger.log(u'There is a newer version available (you\'re {x} commit{s} behind)'.format (x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE) else: return @@ -447,47 +447,47 @@ class SourceUpdateManager(UpdateManager): sb_update_dir = os.path.join(core.APP_ROOT, u'sb-update') if os.path.isdir(sb_update_dir): - logger.log(u"Clearing out update folder {dir} before extracting".format(dir=sb_update_dir)) + logger.log(u'Clearing out update folder {dir} before extracting'.format(dir=sb_update_dir)) shutil.rmtree(sb_update_dir) - logger.log(u"Creating update folder {dir} before extracting".format(dir=sb_update_dir)) + logger.log(u'Creating update folder {dir} before extracting'.format(dir=sb_update_dir)) os.makedirs(sb_update_dir) # retrieve file - logger.log(u"Downloading update from {url!r}".format(url=tar_download_url)) + logger.log(u'Downloading update from {url!r}'.format(url=tar_download_url)) tar_download_path = os.path.join(sb_update_dir, u'nzbtomedia-update.tar') urlretrieve(tar_download_url, tar_download_path) if not os.path.isfile(tar_download_path): - logger.log(u"Unable to retrieve new version from {url}, can't update".format + logger.log(u'Unable to retrieve new version from {url}, can\'t update'.format (url=tar_download_url), logger.ERROR) return False if not tarfile.is_tarfile(tar_download_path): - logger.log(u"Retrieved version from {url} is corrupt, can't update".format + logger.log(u'Retrieved version from {url} is corrupt, can\'t update'.format (url=tar_download_url), logger.ERROR) return False # extract to sb-update dir - logger.log(u"Extracting file {path}".format(path=tar_download_path)) + logger.log(u'Extracting file {path}'.format(path=tar_download_path)) tar = tarfile.open(tar_download_path) tar.extractall(sb_update_dir) tar.close() # delete .tar.gz - logger.log(u"Deleting file {path}".format(path=tar_download_path)) + logger.log(u'Deleting file {path}'.format(path=tar_download_path)) os.remove(tar_download_path) # find update dir name update_dir_contents = [x for x in os.listdir(sb_update_dir) if os.path.isdir(os.path.join(sb_update_dir, x))] if len(update_dir_contents) != 1: - logger.log(u"Invalid update data, update failed: {0}".format(update_dir_contents), logger.ERROR) + logger.log(u'Invalid update data, update failed: {0}'.format(update_dir_contents), logger.ERROR) return False content_dir = os.path.join(sb_update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder - logger.log(u"Moving files from {source} to {destination}".format + logger.log(u'Moving files from {source} to {destination}'.format (source=content_dir, destination=core.APP_ROOT)) for dirname, dirnames, filenames in os.walk(content_dir): # @UnusedVariable dirname = dirname[len(content_dir) + 1:] @@ -504,7 +504,7 @@ class SourceUpdateManager(UpdateManager): os.remove(new_path) os.renames(old_path, new_path) except Exception as error: - logger.log(u"Unable to update {path}: {msg}".format + logger.log(u'Unable to update {path}: {msg}'.format (path=new_path, msg=error), logger.DEBUG) os.remove(old_path) # Trash the updated file without moving in new path continue @@ -518,14 +518,14 @@ class SourceUpdateManager(UpdateManager): with open(version_path, 'w') as ver_file: ver_file.write(self._newest_commit_hash) except EnvironmentError as error: - logger.log(u"Unable to write version file, update not complete: {msg}".format + logger.log(u'Unable to write version file, update not complete: {msg}'.format (msg=error), logger.ERROR) return False except Exception as error: - logger.log(u"Error while trying to update: {msg}".format + logger.log(u'Error while trying to update: {msg}'.format (msg=error), logger.ERROR) - logger.log(u"Traceback: {error}".format(error=traceback.format_exc()), logger.DEBUG) + logger.log(u'Traceback: {error}'.format(error=traceback.format_exc()), logger.DEBUG) return False return True diff --git a/libs/custom/utorrent/client.py b/libs/custom/utorrent/client.py index a429f1ed..2be51c6d 100644 --- a/libs/custom/utorrent/client.py +++ b/libs/custom/utorrent/client.py @@ -31,7 +31,7 @@ class UTorrentClient(object): # TODO refresh token, when necessary def _make_opener(self, realm, base_url, username, password): - '''uTorrent API need HTTP Basic Auth and cookie support for token verify.''' + """uTorrent API need HTTP Basic Auth and cookie support for token verify.""" auth_handler = HTTPBasicAuthHandler() auth_handler.add_password(realm=realm, diff --git a/nzbToCouchPotato.py b/nzbToCouchPotato.py index 00ffbd13..0cf8cd2e 100755 --- a/nzbToCouchPotato.py +++ b/nzbToCouchPotato.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "CouchPotato" +section = 'CouchPotato' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/nzbToGamez.py b/nzbToGamez.py index 31a3512c..a297bc56 100755 --- a/nzbToGamez.py +++ b/nzbToGamez.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "Gamez" +section = 'Gamez' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/nzbToHeadPhones.py b/nzbToHeadPhones.py index 0d85cdf4..530a3eb2 100755 --- a/nzbToHeadPhones.py +++ b/nzbToHeadPhones.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "HeadPhones" +section = 'HeadPhones' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/nzbToLidarr.py b/nzbToLidarr.py index 8567e658..870f7a60 100755 --- a/nzbToLidarr.py +++ b/nzbToLidarr.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "Lidarr" +section = 'Lidarr' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/nzbToMedia.py b/nzbToMedia.py index 5e904c91..de591d84 100755 --- a/nzbToMedia.py +++ b/nzbToMedia.py @@ -29,7 +29,7 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d logger.error( 'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format( input_directory)) - return [-1, ""] + return [-1, ''] if not download_id and client_agent == 'sabnzbd': download_id = get_nzoid(input_name) @@ -48,16 +48,16 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d except Exception: pass - control_value_dict = {"input_directory": text_type(input_directory1)} + control_value_dict = {'input_directory': text_type(input_directory1)} new_value_dict = { - "input_name": text_type(input_name1), - "input_hash": text_type(download_id), - "input_id": text_type(download_id), - "client_agent": text_type(client_agent), - "status": 0, - "last_update": datetime.date.today().toordinal(), + 'input_name': text_type(input_name1), + 'input_hash': text_type(download_id), + 'input_id': text_type(download_id), + 'client_agent': text_type(client_agent), + 'status': 0, + 'last_update': datetime.date.today().toordinal(), } - my_db.upsert("downloads", new_value_dict, control_value_dict) + my_db.upsert('downloads', new_value_dict, control_value_dict) # auto-detect section if input_category is None: @@ -65,41 +65,41 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d usercat = input_category section = core.CFG.findsection(input_category).isenabled() if section is None: - section = core.CFG.findsection("ALL").isenabled() + section = core.CFG.findsection('ALL').isenabled() if section is None: logger.error( 'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format( input_category)) - return [-1, ""] + return [-1, ''] else: - usercat = "ALL" + usercat = 'ALL' if len(section) > 1: logger.error( 'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format( input_category, section.keys())) - return [-1, ""] + return [-1, ''] if section: section_name = section.keys()[0] logger.info('Auto-detected SECTION:{0}'.format(section_name)) else: - logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format( + logger.error('Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!'.format( input_category)) - return [-1, ""] + return [-1, ''] cfg = dict(core.CFG[section_name][usercat]) - extract = int(cfg.get("extract", 0)) + extract = int(cfg.get('extract', 0)) try: - if int(cfg.get("remote_path")) and not core.REMOTEPATHS: + if int(cfg.get('remote_path')) and not core.REMOTEPATHS: logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format( section_name, input_category)) - return [-1, ""] + return [-1, ''] except Exception: logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format( - core.get("remote_path"), section_name, input_category)) + core.get('remote_path'), section_name, input_category)) input_name, input_directory = convert_to_ascii(input_name, input_directory) @@ -107,23 +107,23 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory)) extract_files(input_directory) - logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, input_category, input_name)) + logger.info('Calling {0}:{1} to post-process:{2}'.format(section_name, input_category, input_name)) - if section_name in ["CouchPotato", "Radarr"]: + if section_name in ['CouchPotato', 'Radarr']: result = movies.process(section_name, input_directory, input_name, status, client_agent, download_id, input_category, failure_link) - elif section_name in ["SickBeard", "NzbDrone", "Sonarr"]: + elif section_name in ['SickBeard', 'NzbDrone', 'Sonarr']: result = tv.process(section_name, input_directory, input_name, status, client_agent, download_id, input_category, failure_link) - elif section_name in ["HeadPhones", "Lidarr"]: + elif section_name in ['HeadPhones', 'Lidarr']: result = music.process(section_name, input_directory, input_name, status, client_agent, input_category) - elif section_name == "Mylar": + elif section_name == 'Mylar': result = comics.process(section_name, input_directory, input_name, status, client_agent, input_category) - elif section_name == "Gamez": + elif section_name == 'Gamez': result = games.process(section_name, input_directory, input_name, status, client_agent, input_category) elif section_name == 'UserScript': result = external_script(input_directory, input_name, input_category, section[usercat]) else: result = ProcessResult( - message="", + message='', status_code=-1, ) @@ -144,16 +144,16 @@ def main(args, section=None): # Initialize the config core.initialize(section) - logger.info("#########################################################") - logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) - logger.info("#########################################################") + logger.info('#########################################################') + logger.info('## ..::[{0}]::.. ##'.format(os.path.basename(__file__))) + logger.info('#########################################################') # debug command line options - logger.debug("Options passed into nzbToMedia: {0}".format(args)) + logger.debug('Options passed into nzbToMedia: {0}'.format(args)) # Post-Processing Result result = ProcessResult( - message="", + message='', status_code=0, ) status = 0 @@ -162,26 +162,26 @@ def main(args, section=None): if 'NZBOP_SCRIPTDIR' in os.environ: # Check if the script is called from nzbget 11.0 or later if os.environ['NZBOP_VERSION'][0:5] < '11.0': - logger.error("NZBGet Version {0} is not supported. Please update NZBGet.".format(os.environ['NZBOP_VERSION'])) + logger.error('NZBGet Version {0} is not supported. Please update NZBGet.'.format(os.environ['NZBOP_VERSION'])) sys.exit(core.NZBGET_POSTPROCESS_ERROR) - logger.info("Script triggered from NZBGet Version {0}.".format(os.environ['NZBOP_VERSION'])) + logger.info('Script triggered from NZBGet Version {0}.'.format(os.environ['NZBOP_VERSION'])) # Check if the script is called from nzbget 13.0 or later if 'NZBPP_TOTALSTATUS' in os.environ: if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS': - logger.info("Download failed with status {0}.".format(os.environ['NZBPP_STATUS'])) + logger.info('Download failed with status {0}.'.format(os.environ['NZBPP_STATUS'])) status = 1 else: # Check par status if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4': - logger.warning("Par-repair failed, setting status \"failed\"") + logger.warning('Par-repair failed, setting status \'failed\'') status = 1 # Check unpack status if os.environ['NZBPP_UNPACKSTATUS'] == '1': - logger.warning("Unpack failed, setting status \"failed\"") + logger.warning('Unpack failed, setting status \'failed\'') status = 1 if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0': @@ -189,17 +189,17 @@ def main(args, section=None): if os.environ['NZBPP_HEALTH'] < 1000: logger.warning( - "Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"") - logger.info("Please check your Par-check/repair settings for future downloads.") + 'Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \'failed\'') + logger.info('Please check your Par-check/repair settings for future downloads.') status = 1 else: logger.info( - "Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful") - logger.info("Please check your Par-check/repair settings for future downloads.") + 'Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful') + logger.info('Please check your Par-check/repair settings for future downloads.') # Check for download_id to pass to CouchPotato - download_id = "" + download_id = '' failure_link = None if 'NZBPR_COUCHPOTATO' in os.environ: download_id = os.environ['NZBPR_COUCHPOTATO'] @@ -224,13 +224,13 @@ def main(args, section=None): # SABnzbd argv: # 1 The final directory of the job (full path) # 2 The original name of the NZB file - # 3 Clean version of the job name (no path info and ".nzb" removed) + # 3 Clean version of the job name (no path info and '.nzb' removed) # 4 Indexer's report number (if supported) # 5 User-defined category # 6 Group that the NZB was posted in e.g. alt.binaries.x # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2 client_agent = 'sabnzbd' - logger.info("Script triggered from SABnzbd") + logger.info('Script triggered from SABnzbd') result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent, download_id='') # SABnzbd 0.7.17+ @@ -238,36 +238,36 @@ def main(args, section=None): # SABnzbd argv: # 1 The final directory of the job (full path) # 2 The original name of the NZB file - # 3 Clean version of the job name (no path info and ".nzb" removed) + # 3 Clean version of the job name (no path info and '.nzb' removed) # 4 Indexer's report number (if supported) # 5 User-defined category # 6 Group that the NZB was posted in e.g. alt.binaries.x # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2 # 8 Failure URL client_agent = 'sabnzbd' - logger.info("Script triggered from SABnzbd 0.7.17+") + logger.info('Script triggered from SABnzbd 0.7.17+') result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent, download_id='', failure_link=''.join(args[8:])) # Generic program elif len(args) > 5 and args[5] == 'generic': - logger.info("Script triggered from generic program") + logger.info('Script triggered from generic program') result = process(args[1], input_name=args[2], input_category=args[3], download_id=args[4]) else: # Perform Manual Post-Processing - logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...") + logger.warning('Invalid number of arguments received from client, Switching to manual run mode ...') for section, subsections in core.SECTIONS.items(): for subsection in subsections: if not core.CFG[section][subsection].isenabled(): continue for dir_name in get_dirs(section, subsection, link='move'): - logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dir_name)) - logger.info("Checking database for download info for {0} ...".format(os.path.basename(dir_name))) + logger.info('Starting manual run for {0}:{1} - Folder: {2}'.format(section, subsection, dir_name)) + logger.info('Checking database for download info for {0} ...'.format(os.path.basename(dir_name))) core.DOWNLOADINFO = get_download_info(os.path.basename(dir_name), 0) if core.DOWNLOADINFO: - logger.info("Found download info for {0}, " - "setting variables now ...".format + logger.info('Found download info for {0}, ' + 'setting variables now ...'.format (os.path.basename(dir_name))) client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual')) download_id = text_type(core.DOWNLOADINFO[0].get('input_id', '')) @@ -294,21 +294,21 @@ def main(args, section=None): results = process(dir_name, input_name, 0, client_agent=client_agent, download_id=download_id or None, input_category=subsection) if results.status_code != 0: - logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format + logger.error('A problem was reported when trying to perform a manual run for {0}:{1}.'.format (section, subsection)) result = results if result.status_code == 0: - logger.info("The {0} script completed successfully.".format(args[0])) + logger.info('The {0} script completed successfully.'.format(args[0])) if result.message: - print(result.message + "!") + print(result.message + '!') if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11 del core.MYAPP return core.NZBGET_POSTPROCESS_SUCCESS else: - logger.error("A problem was reported in the {0} script.".format(args[0])) + logger.error('A problem was reported in the {0} script.'.format(args[0])) if result.message: - print(result.message + "!") + print(result.message + '!') if 'NZBOP_SCRIPTDIR' in os.environ: # return code for nzbget v11 del core.MYAPP return core.NZBGET_POSTPROCESS_ERROR diff --git a/nzbToMylar.py b/nzbToMylar.py index 57628cdc..0dd205c2 100755 --- a/nzbToMylar.py +++ b/nzbToMylar.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "Mylar" +section = 'Mylar' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/nzbToNzbDrone.py b/nzbToNzbDrone.py index e300c764..3c721381 100755 --- a/nzbToNzbDrone.py +++ b/nzbToNzbDrone.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "NzbDrone" +section = 'NzbDrone' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/nzbToRadarr.py b/nzbToRadarr.py index d3c86006..d73a20e1 100755 --- a/nzbToRadarr.py +++ b/nzbToRadarr.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "Radarr" +section = 'Radarr' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/nzbToSickBeard.py b/nzbToSickBeard.py index 294b6c69..78dc7285 100755 --- a/nzbToSickBeard.py +++ b/nzbToSickBeard.py @@ -5,6 +5,6 @@ import sys import nzbToMedia -section = "SickBeard" +section = 'SickBeard' result = nzbToMedia.main(sys.argv, section) sys.exit(result) diff --git a/tests/general.py b/tests/general.py index 5552e196..ed0ef06c 100755 --- a/tests/general.py +++ b/tests/general.py @@ -14,13 +14,13 @@ from core.utils import server_responding # Initialize the config core.initialize() -# label = core.TORRENT_CLASS.core.get_torrent_status("f33a9c4b15cbd9170722d700069af86746817ade", ["label"]).get()['label'] +# label = core.TORRENT_CLASS.core.get_torrent_status('f33a9c4b15cbd9170722d700069af86746817ade', ['label']).get()['label'] # print(label) if transcoder.is_video_good(core.TEST_FILE, 0): - print("FFPROBE Works") + print('FFPROBE Works') else: - print("FFPROBE FAILED") + print('FFPROBE FAILED') test = core.CFG['SickBeard', 'NzbDrone']['tv'].isenabled() print(test) @@ -29,22 +29,22 @@ print(section) print(len(section)) fork, fork_params = auto_fork('SickBeard', 'tv') -if server_responding("http://127.0.0.1:5050"): - print("CouchPotato Running") -if server_responding("http://127.0.0.1:7073"): - print("SickBeard Running") -if server_responding("http://127.0.0.1:8181"): - print("HeadPhones Running") -if server_responding("http://127.0.0.1:8085"): - print("Gamez Running") -if server_responding("http://127.0.0.1:8090"): - print("Mylar Running") +if server_responding('http://127.0.0.1:5050'): + print('CouchPotato Running') +if server_responding('http://127.0.0.1:7073'): + print('SickBeard Running') +if server_responding('http://127.0.0.1:8181'): + print('HeadPhones Running') +if server_responding('http://127.0.0.1:8085'): + print('Gamez Running') +if server_responding('http://127.0.0.1:8090'): + print('Mylar Running') lan = 'pt' lan = Language.fromalpha2(lan) print(lan.alpha3) -vidName = "/volume1/Public/Movies/A Few Good Men/A Few Good Men(1992).mkv" -inputName = "in.the.name.of.ben.hur.2016.bdrip.x264-rusted.nzb" +vidName = '/volume1/Public/Movies/A Few Good Men/A Few Good Men(1992).mkv' +inputName = 'in.the.name.of.ben.hur.2016.bdrip.x264-rusted.nzb' guess = guessit.guessit(inputName) if guess: # Movie Title @@ -55,7 +55,7 @@ if guess: year = None if 'year' in guess: year = guess['year'] - url = "http://www.omdbapi.com" + url = 'http://www.omdbapi.com' r = requests.get(url, params={'y': year, 't': title}, verify=False, timeout=(60, 300)) results = r.json() print(results)