Merge pull request #1431 from clinton-hall/quality/pep8

Various PEP8 fixes
This commit is contained in:
Labrys of Knossos 2018-12-16 23:40:11 -05:00 committed by GitHub
commit 7798a71448
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 1341 additions and 1341 deletions

View file

@ -7,69 +7,69 @@ import sys
import core import core
from core import logger, nzbToMediaDB from core import logger, nzbToMediaDB
from core.nzbToMediaUserScript import external_script from core.nzbToMediaUserScript import external_script
from core.nzbToMediaUtil import CharReplace, convert_to_ascii, plex_update, replace_links from core.nzbToMediaUtil import char_replace, convert_to_ascii, plex_update, replace_links
from libs.six import text_type from libs.six import text_type
def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent): def process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent):
status = 1 # 1 = failed | 0 = success status = 1 # 1 = failed | 0 = success
root = 0 root = 0
foundFile = 0 found_file = 0
if clientAgent != 'manual' and not core.DOWNLOADINFO: if client_agent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding TORRENT download info for directory {0} to database'.format(inputDirectory)) logger.debug('Adding TORRENT download info for directory {0} to database'.format(input_directory))
myDB = nzbToMediaDB.DBConnection() my_db = nzbToMediaDB.DBConnection()
inputDirectory1 = inputDirectory input_directory1 = input_directory
inputName1 = inputName input_name1 = input_name
try: try:
encoded, inputDirectory1 = CharReplace(inputDirectory) encoded, input_directory1 = char_replace(input_directory)
encoded, inputName1 = CharReplace(inputName) encoded, input_name1 = char_replace(input_name)
except: except:
pass pass
controlValueDict = {"input_directory": text_type(inputDirectory1)} control_value_dict = {"input_directory": text_type(input_directory1)}
newValueDict = {"input_name": text_type(inputName1), new_value_dict = {"input_name": text_type(input_name1),
"input_hash": text_type(inputHash), "input_hash": text_type(input_hash),
"input_id": text_type(inputID), "input_id": text_type(input_id),
"client_agent": text_type(clientAgent), "client_agent": text_type(client_agent),
"status": 0, "status": 0,
"last_update": datetime.date.today().toordinal() "last_update": datetime.date.today().toordinal()
} }
myDB.upsert("downloads", newValueDict, controlValueDict) my_db.upsert("downloads", new_value_dict, control_value_dict)
logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory)) logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(input_directory, input_name, input_category))
# Confirm the category by parsing directory structure # Confirm the category by parsing directory structure
inputDirectory, inputName, inputCategory, root = core.category_search(inputDirectory, inputName, inputCategory, input_directory, input_name, input_category, root = core.category_search(input_directory, input_name, input_category,
root, core.CATEGORIES) root, core.CATEGORIES)
if inputCategory == "": if input_category == "":
inputCategory = "UNCAT" input_category = "UNCAT"
usercat = inputCategory usercat = input_category
try: try:
inputName = inputName.encode(core.SYS_ENCODING) input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
try: try:
inputDirectory = inputDirectory.encode(core.SYS_ENCODING) input_directory = input_directory.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format
(inputDirectory, inputName, inputCategory)) (input_directory, input_name, input_category))
# auto-detect section # auto-detect section
section = core.CFG.findsection(inputCategory).isenabled() section = core.CFG.findsection(input_category).isenabled()
if section is None: if section is None:
section = core.CFG.findsection("ALL").isenabled() section = core.CFG.findsection("ALL").isenabled()
if section is None: if section is None:
logger.error('Category:[{0}] is not defined or is not enabled. ' logger.error('Category:[{0}] is not defined or is not enabled. '
'Please rename it or ensure it is enabled for the appropriate section ' 'Please rename it or ensure it is enabled for the appropriate section '
'in your autoProcessMedia.cfg and try again.'.format 'in your autoProcessMedia.cfg and try again.'.format
(inputCategory)) (input_category))
return [-1, ""] return [-1, ""]
else: else:
usercat = "ALL" usercat = "ALL"
@ -82,95 +82,95 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
return [-1, ""] return [-1, ""]
if section: if section:
sectionName = section.keys()[0] section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(sectionName)) logger.info('Auto-detected SECTION:{0}'.format(section_name))
else: else:
logger.error("Unable to locate a section with subsection:{0} " logger.error("Unable to locate a section with subsection:{0} "
"enabled in your autoProcessMedia.cfg, exiting!".format "enabled in your autoProcessMedia.cfg, exiting!".format
(inputCategory)) (input_category))
return [-1, ""] return [-1, ""]
section = dict(section[sectionName][usercat]) # Type cast to dict() to allow effective usage of .get() section = dict(section[section_name][usercat]) # Type cast to dict() to allow effective usage of .get()
Torrent_NoLink = int(section.get("Torrent_NoLink", 0)) torrent_no_link = int(section.get("Torrent_NoLink", 0))
keep_archive = int(section.get("keep_archive", 0)) keep_archive = int(section.get("keep_archive", 0))
extract = int(section.get('extract', 0)) extract = int(section.get('extract', 0))
extensions = section.get('user_script_mediaExtensions', "").lower().split(',') extensions = section.get('user_script_mediaExtensions', "").lower().split(',')
uniquePath = int(section.get("unique_path", 1)) unique_path = int(section.get("unique_path", 1))
if clientAgent != 'manual': if client_agent != 'manual':
core.pause_torrent(clientAgent, inputHash, inputID, inputName) core.pause_torrent(client_agent, input_hash, input_id, input_name)
# In case input is not directory, make sure to create one. # In case input is not directory, make sure to create one.
# This way Processing is isolated. # This way Processing is isolated.
if not os.path.isdir(os.path.join(inputDirectory, inputName)): if not os.path.isdir(os.path.join(input_directory, input_name)):
basename = os.path.basename(inputDirectory) basename = os.path.basename(input_directory)
basename = core.sanitizeName(inputName) \ basename = core.sanitize_name(input_name) \
if inputName == basename else os.path.splitext(core.sanitizeName(inputName))[0] if input_name == basename else os.path.splitext(core.sanitize_name(input_name))[0]
outputDestination = os.path.join(core.OUTPUTDIRECTORY, inputCategory, basename) output_destination = os.path.join(core.OUTPUTDIRECTORY, input_category, basename)
elif uniquePath: elif unique_path:
outputDestination = os.path.normpath( output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, inputCategory, core.sanitizeName(inputName).replace(" ","."))) core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitize_name(input_name).replace(" ", ".")))
else: else:
outputDestination = os.path.normpath( output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, inputCategory)) core.os.path.join(core.OUTPUTDIRECTORY, input_category))
try: try:
outputDestination = outputDestination.encode(core.SYS_ENCODING) output_destination = output_destination.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
if outputDestination in inputDirectory: if output_destination in input_directory:
outputDestination = inputDirectory output_destination = input_directory
logger.info("Output directory set to: {0}".format(outputDestination)) logger.info("Output directory set to: {0}".format(output_destination))
if core.SAFE_MODE and outputDestination == core.TORRENT_DEFAULTDIR: if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULTDIR:
logger.error('The output directory:[{0}] is the Download Directory. ' logger.error('The output directory:[{0}] is the Download Directory. '
'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format 'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format
(inputDirectory)) (input_directory))
return [-1, ""] return [-1, ""]
logger.debug("Scanning files in directory: {0}".format(inputDirectory)) logger.debug("Scanning files in directory: {0}".format(input_directory))
if sectionName in ['HeadPhones', 'Lidarr']: if section_name in ['HeadPhones', 'Lidarr']:
core.NOFLATTEN.extend( core.NOFLATTEN.extend(
inputCategory) # Make sure we preserve folder structure for HeadPhones. input_category) # Make sure we preserve folder structure for HeadPhones.
now = datetime.datetime.now() now = datetime.datetime.now()
if extract == 1: if extract == 1:
inputFiles = core.listMediaFiles(inputDirectory, archives=False, other=True, otherext=extensions) input_files = core.list_media_files(input_directory, archives=False, other=True, otherext=extensions)
else: else:
inputFiles = core.listMediaFiles(inputDirectory, other=True, otherext=extensions) input_files = core.list_media_files(input_directory, other=True, otherext=extensions)
if len(inputFiles) == 0 and os.path.isfile(inputDirectory): if len(input_files) == 0 and os.path.isfile(input_directory):
inputFiles = [inputDirectory] input_files = [input_directory]
logger.debug("Found 1 file to process: {0}".format(inputDirectory)) logger.debug("Found 1 file to process: {0}".format(input_directory))
else: else:
logger.debug("Found {0} files in {1}".format(len(inputFiles), inputDirectory)) logger.debug("Found {0} files in {1}".format(len(input_files), input_directory))
for inputFile in inputFiles: for inputFile in input_files:
filePath = os.path.dirname(inputFile) file_path = os.path.dirname(inputFile)
fileName, fileExt = os.path.splitext(os.path.basename(inputFile)) file_name, file_ext = os.path.splitext(os.path.basename(inputFile))
fullFileName = os.path.basename(inputFile) full_file_name = os.path.basename(inputFile)
targetFile = core.os.path.join(outputDestination, fullFileName) target_file = core.os.path.join(output_destination, full_file_name)
if inputCategory in core.NOFLATTEN: if input_category in core.NOFLATTEN:
if not os.path.basename(filePath) in outputDestination: if not os.path.basename(file_path) in output_destination:
targetFile = core.os.path.join( target_file = core.os.path.join(
core.os.path.join(outputDestination, os.path.basename(filePath)), fullFileName) core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name)
logger.debug("Setting outputDestination to {0} to preserve folder structure".format logger.debug("Setting outputDestination to {0} to preserve folder structure".format
(os.path.dirname(targetFile))) (os.path.dirname(target_file)))
try: try:
targetFile = targetFile.encode(core.SYS_ENCODING) target_file = target_file.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
if root == 1: if root == 1:
if not foundFile: if not found_file:
logger.debug("Looking for {0} in: {1}".format(inputName, inputFile)) logger.debug("Looking for {0} in: {1}".format(input_name, inputFile))
if any([core.sanitizeName(inputName) in core.sanitizeName(inputFile), if any([core.sanitize_name(input_name) in core.sanitize_name(inputFile),
core.sanitizeName(fileName) in core.sanitizeName(inputName)]): core.sanitize_name(file_name) in core.sanitize_name(input_name)]):
foundFile = True found_file = True
logger.debug("Found file {0} that matches Torrent Name {1}".format logger.debug("Found file {0} that matches Torrent Name {1}".format
(fullFileName, inputName)) (full_file_name, input_name))
else: else:
continue continue
@ -178,106 +178,106 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(inputFile)) mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(inputFile))
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile)) ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile))
if not foundFile: if not found_file:
logger.debug("Looking for files with modified/created dates less than 5 minutes old.") logger.debug("Looking for files with modified/created dates less than 5 minutes old.")
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)): if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)):
foundFile = True found_file = True
logger.debug("Found file {0} with date modified/created less than 5 minutes ago.".format logger.debug("Found file {0} with date modified/created less than 5 minutes ago.".format
(fullFileName)) (full_file_name))
else: else:
continue # This file has not been recently moved or created, skip it continue # This file has not been recently moved or created, skip it
if Torrent_NoLink == 0: if torrent_no_link == 0:
try: try:
core.copy_link(inputFile, targetFile, core.USELINK) core.copy_link(inputFile, target_file, core.USELINK)
core.rmReadOnly(targetFile) core.remove_read_only(target_file)
except: except:
logger.error("Failed to link: {0} to {1}".format(inputFile, targetFile)) logger.error("Failed to link: {0} to {1}".format(inputFile, target_file))
inputName, outputDestination = convert_to_ascii(inputName, outputDestination) input_name, output_destination = convert_to_ascii(input_name, output_destination)
if extract == 1: if extract == 1:
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory)) logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
core.extractFiles(inputDirectory, outputDestination, keep_archive) core.extract_files(input_directory, output_destination, keep_archive)
if inputCategory not in core.NOFLATTEN: if input_category not in core.NOFLATTEN:
# don't flatten hp in case multi cd albums, and we need to copy this back later. # don't flatten hp in case multi cd albums, and we need to copy this back later.
core.flatten(outputDestination) core.flatten(output_destination)
# Now check if video files exist in destination: # Now check if video files exist in destination:
if sectionName in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]: if section_name in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]:
numVideos = len( num_videos = len(
core.listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False)) core.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False))
if numVideos > 0: if num_videos > 0:
logger.info("Found {0} media files in {1}".format(numVideos, outputDestination)) logger.info("Found {0} media files in {1}".format(num_videos, output_destination))
status = 0 status = 0
elif extract != 1: elif extract != 1:
logger.info("Found no media files in {0}. Sending to {1} to process".format(outputDestination, sectionName)) logger.info("Found no media files in {0}. Sending to {1} to process".format(output_destination, section_name))
status = 0 status = 0
else: else:
logger.warning("Found no media files in {0}".format(outputDestination)) logger.warning("Found no media files in {0}".format(output_destination))
# Only these sections can handling failed downloads # Only these sections can handling failed downloads
# so make sure everything else gets through without the check for failed # so make sure everything else gets through without the check for failed
if sectionName not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']: if section_name not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']:
status = 0 status = 0
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, usercat, inputName)) logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, usercat, input_name))
if core.TORRENT_CHMOD_DIRECTORY: if core.TORRENT_CHMOD_DIRECTORY:
core.rchmod(outputDestination, core.TORRENT_CHMOD_DIRECTORY) core.rchmod(output_destination, core.TORRENT_CHMOD_DIRECTORY)
result = [0, ""] result = [0, ""]
if sectionName == 'UserScript': if section_name == 'UserScript':
result = external_script(outputDestination, inputName, inputCategory, section) result = external_script(output_destination, input_name, input_category, section)
elif sectionName in ['CouchPotato', 'Radarr']: elif section_name in ['CouchPotato', 'Radarr']:
result = core.autoProcessMovie().process(sectionName, outputDestination, inputName, result = core.Movie().process(section_name, output_destination, input_name,
status, clientAgent, inputHash, inputCategory) status, client_agent, input_hash, input_category)
elif sectionName in ['SickBeard', 'NzbDrone', 'Sonarr']: elif section_name in ['SickBeard', 'NzbDrone', 'Sonarr']:
if inputHash: if input_hash:
inputHash = inputHash.upper() input_hash = input_hash.upper()
result = core.autoProcessTV().processEpisode(sectionName, outputDestination, inputName, result = core.TV().process_episode(section_name, output_destination, input_name,
status, clientAgent, inputHash, inputCategory) status, client_agent, input_hash, input_category)
elif sectionName in ['HeadPhones', 'Lidarr']: elif section_name in ['HeadPhones', 'Lidarr']:
result = core.autoProcessMusic().process(sectionName, outputDestination, inputName, result = core.Music().process(section_name, output_destination, input_name,
status, clientAgent, inputCategory) status, client_agent, input_category)
elif sectionName == 'Mylar': elif section_name == 'Mylar':
result = core.autoProcessComics().processEpisode(sectionName, outputDestination, inputName, result = core.Comic().process_episode(section_name, output_destination, input_name,
status, clientAgent, inputCategory) status, client_agent, input_category)
elif sectionName == 'Gamez': elif section_name == 'Gamez':
result = core.autoProcessGames().process(sectionName, outputDestination, inputName, result = core.Game().process(section_name, output_destination, input_name,
status, clientAgent, inputCategory) status, client_agent, input_category)
plex_update(inputCategory) plex_update(input_category)
if result[0] != 0: if result[0] != 0:
if not core.TORRENT_RESUME_ON_FAILURE: if not core.TORRENT_RESUME_ON_FAILURE:
logger.error("A problem was reported in the autoProcess* script. " logger.error("A problem was reported in the autoProcess* script. "
"Torrent won't resume seeding (settings)") "Torrent won't resume seeding (settings)")
elif clientAgent != 'manual': elif client_agent != 'manual':
logger.error("A problem was reported in the autoProcess* script. " logger.error("A problem was reported in the autoProcess* script. "
"If torrent was paused we will resume seeding") "If torrent was paused we will resume seeding")
core.resume_torrent(clientAgent, inputHash, inputID, inputName) core.resume_torrent(client_agent, input_hash, input_id, input_name)
else: else:
if clientAgent != 'manual': if client_agent != 'manual':
# update download status in our DB # update download status in our DB
core.update_downloadInfoStatus(inputName, 1) core.update_download_info_status(input_name, 1)
# remove torrent # remove torrent
if core.USELINK == 'move-sym' and not core.DELETE_ORIGINAL == 1: if core.USELINK == 'move-sym' and not core.DELETE_ORIGINAL == 1:
logger.debug('Checking for sym-links to re-direct in: {0}'.format(inputDirectory)) logger.debug('Checking for sym-links to re-direct in: {0}'.format(input_directory))
for dirpath, dirs, files in os.walk(inputDirectory): for dirpath, dirs, files in os.walk(input_directory):
for file in files: for file in files:
logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file))) logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file)))
replace_links(os.path.join(dirpath, file)) replace_links(os.path.join(dirpath, file))
core.remove_torrent(clientAgent, inputHash, inputID, inputName) core.remove_torrent(client_agent, input_hash, input_id, input_name)
if not sectionName == 'UserScript': if not section_name == 'UserScript':
# for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN # for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN
# cleanup our processing folders of any misc unwanted files and empty directories # cleanup our processing folders of any misc unwanted files and empty directories
core.cleanDir(outputDestination, sectionName, inputCategory) core.clean_dir(output_destination, section_name, input_category)
return result return result
@ -287,7 +287,7 @@ def main(args):
core.initialize() core.initialize()
# clientAgent for Torrents # clientAgent for Torrents
clientAgent = core.TORRENT_CLIENTAGENT client_agent = core.TORRENT_CLIENTAGENT
logger.info("#########################################################") logger.info("#########################################################")
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__))) logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__)))
@ -300,13 +300,13 @@ def main(args):
result = [0, ""] result = [0, ""]
try: try:
inputDirectory, inputName, inputCategory, inputHash, inputID = core.parse_args(clientAgent, args) input_directory, input_name, input_category, input_hash, input_id = core.parse_args(client_agent, args)
except: except:
logger.error("There was a problem loading variables") logger.error("There was a problem loading variables")
return -1 return -1
if inputDirectory and inputName and inputHash and inputID: if input_directory and input_name and input_hash and input_id:
result = processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent) result = process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent)
else: else:
# Perform Manual Post-Processing # Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...") logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
@ -315,42 +315,42 @@ def main(args):
for subsection in subsections: for subsection in subsections:
if not core.CFG[section][subsection].isenabled(): if not core.CFG[section][subsection].isenabled():
continue continue
for dirName in core.getDirs(section, subsection, link='hard'): for dir_name in core.get_dirs(section, subsection, link='hard'):
logger.info("Starting manual run for {0}:{1} - Folder:{2}".format logger.info("Starting manual run for {0}:{1} - Folder:{2}".format
(section, subsection, dirName)) (section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format logger.info("Checking database for download info for {0} ...".format
(os.path.basename(dirName))) (os.path.basename(dir_name)))
core.DOWNLOADINFO = core.get_downloadInfo(os.path.basename(dirName), 0) core.DOWNLOADINFO = core.get_download_info(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO: if core.DOWNLOADINFO:
clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual')) client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
inputHash = text_type(core.DOWNLOADINFO[0].get('input_hash', '')) input_hash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
inputID = text_type(core.DOWNLOADINFO[0].get('input_id', '')) input_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
logger.info("Found download info for {0}, " logger.info("Found download info for {0}, "
"setting variables now ...".format(os.path.basename(dirName))) "setting variables now ...".format(os.path.basename(dir_name)))
else: else:
logger.info('Unable to locate download info for {0}, ' logger.info('Unable to locate download info for {0}, '
'continuing to try and process this release ...'.format 'continuing to try and process this release ...'.format
(os.path.basename(dirName))) (os.path.basename(dir_name)))
clientAgent = 'manual' client_agent = 'manual'
inputHash = '' input_hash = ''
inputID = '' input_id = ''
if clientAgent.lower() not in core.TORRENT_CLIENTS: if client_agent.lower() not in core.TORRENT_CLIENTS:
continue continue
try: try:
dirName = dirName.encode(core.SYS_ENCODING) dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
inputName = os.path.basename(dirName) input_name = os.path.basename(dir_name)
try: try:
inputName = inputName.encode(core.SYS_ENCODING) input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
results = processTorrent(dirName, inputName, subsection, inputHash or None, inputID or None, results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None,
clientAgent) client_agent)
if results[0] != 0: if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format
(section, subsection)) (section, subsection))

View file

@ -37,18 +37,18 @@ import six
from six.moves import reload_module from six.moves import reload_module
from core import logger, nzbToMediaDB, versionCheck from core import logger, nzbToMediaDB, versionCheck
from core.autoProcess.autoProcessComics import autoProcessComics from core.autoProcess.autoProcessComics import Comic
from core.autoProcess.autoProcessGames import autoProcessGames from core.autoProcess.autoProcessGames import Game
from core.autoProcess.autoProcessMovie import autoProcessMovie from core.autoProcess.autoProcessMovie import Movie
from core.autoProcess.autoProcessMusic import autoProcessMusic from core.autoProcess.autoProcessMusic import Music
from core.autoProcess.autoProcessTV import autoProcessTV from core.autoProcess.autoProcessTV import TV
from core.databases import mainDB from core.databases import mainDB
from core.nzbToMediaConfig import config from core.nzbToMediaConfig import config
from core.nzbToMediaUtil import ( from core.nzbToMediaUtil import (
RunningProcess, WakeUp, category_search, cleanDir, cleanDir, copy_link, RunningProcess, wake_up, category_search, clean_dir, clean_dir, copy_link,
create_torrent_class, extractFiles, flatten, getDirs, get_downloadInfo, create_torrent_class, extract_files, flatten, get_dirs, get_download_info,
listMediaFiles, makeDir, parse_args, pause_torrent, remove_torrent, list_media_files, make_dir, parse_args, pause_torrent, remove_torrent,
resume_torrent, rmDir, rmReadOnly, sanitizeName, update_downloadInfoStatus, resume_torrent, remove_dir, remove_read_only, sanitize_name, update_download_info_status,
) )
from core.transcoder import transcoder from core.transcoder import transcoder
@ -255,7 +255,7 @@ def initialize(section=None):
LOG_FILE = os.environ['NTM_LOGFILE'] LOG_FILE = os.environ['NTM_LOGFILE']
LOG_DIR = os.path.split(LOG_FILE)[0] LOG_DIR = os.path.split(LOG_FILE)[0]
if not makeDir(LOG_DIR): if not make_dir(LOG_DIR):
print("No log folder, logging to screen only") print("No log folder, logging to screen only")
MYAPP = RunningProcess() MYAPP = RunningProcess()
@ -291,7 +291,7 @@ def initialize(section=None):
sys.exit(1) sys.exit(1)
# init logging # init logging
logger.ntm_log_instance.initLogging() logger.ntm_log_instance.init_logging()
# run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options. # run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
if not config.migrate(): if not config.migrate():
@ -320,7 +320,7 @@ def initialize(section=None):
logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT") logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT")
# initialize the main SB database # initialize the main SB database
nzbToMediaDB.upgradeDatabase(nzbToMediaDB.DBConnection(), mainDB.InitialSchema) nzbToMediaDB.upgrade_database(nzbToMediaDB.DBConnection(), mainDB.InitialSchema)
# Set Version and GIT variables # Set Version and GIT variables
NZBTOMEDIA_VERSION = '11.06' NZBTOMEDIA_VERSION = '11.06'
@ -357,7 +357,7 @@ def initialize(section=None):
system=platform.system(), release=platform.release())) system=platform.system(), release=platform.release()))
if int(CFG["WakeOnLan"]["wake"]) == 1: if int(CFG["WakeOnLan"]["wake"]) == 1:
WakeUp() wake_up()
NZB_CLIENTAGENT = CFG["Nzb"]["clientAgent"] # sabnzbd NZB_CLIENTAGENT = CFG["Nzb"]["clientAgent"] # sabnzbd
SABNZBDHOST = CFG["Nzb"]["sabnzbd_host"] SABNZBDHOST = CFG["Nzb"]["sabnzbd_host"]

View file

@ -6,18 +6,17 @@ import requests
import core import core
from core import logger from core import logger
from core.nzbToMediaUtil import convert_to_ascii, remoteDir, server_responding from core.nzbToMediaUtil import convert_to_ascii, remote_dir, server_responding
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
class autoProcessComics(object): class Comic(object):
def processEpisode(self, section, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None): def process_episode(self, section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None):
apc_version = "2.04" apc_version = "2.04"
comicrn_version = "1.01" comicrn_version = "1.01"
cfg = dict(core.CFG[section][inputCategory]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg["host"]
port = cfg["port"] port = cfg["port"]
@ -32,19 +31,19 @@ class autoProcessComics(object):
logger.error("Server did not respond. Exiting", section) logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)] return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
clean_name, ext = os.path.splitext(inputName) clean_name, ext = os.path.splitext(input_name)
if len(ext) == 4: # we assume this was a standard extension. if len(ext) == 4: # we assume this was a standard extension.
inputName = clean_name input_name = clean_name
params = { params = {
'cmd': 'forceProcess', 'cmd': 'forceProcess',
'apikey': apikey, 'apikey': apikey,
'nzb_folder': remoteDir(dirName) if remote_path else dirName, 'nzb_folder': remote_dir(dir_name) if remote_path else dir_name,
} }
if inputName is not None: if input_name is not None:
params['nzb_name'] = inputName params['nzb_name'] = input_name
params['failed'] = int(status) params['failed'] = int(status)
params['apc_version'] = apc_version params['apc_version'] = apc_version
params['comicrn_version'] = comicrn_version params['comicrn_version'] = comicrn_version
@ -72,7 +71,7 @@ class autoProcessComics(object):
if success: if success:
logger.postprocess("SUCCESS: This issue has been processed successfully", section) logger.postprocess("SUCCESS: This issue has been processed successfully", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
else: else:
logger.warning("The issue does not appear to have successfully processed. Please check your Logs", section) logger.warning("The issue does not appear to have successfully processed. Please check your Logs", section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)] return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]

View file

@ -12,11 +12,11 @@ from core.nzbToMediaUtil import convert_to_ascii, server_responding
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
class autoProcessGames(object): class Game(object):
def process(self, section, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None): def process(self, section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None):
status = int(status) status = int(status)
cfg = dict(core.CFG[section][inputCategory]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg["host"]
port = cfg["port"] port = cfg["port"]
@ -31,19 +31,19 @@ class autoProcessGames(object):
logger.error("Server did not respond. Exiting", section) logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)] return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
fields = inputName.split("-") fields = input_name.split("-")
gamezID = fields[0].replace("[", "").replace("]", "").replace(" ", "") gamez_id = fields[0].replace("[", "").replace("]", "").replace(" ", "")
downloadStatus = 'Downloaded' if status == 0 else 'Wanted' download_status = 'Downloaded' if status == 0 else 'Wanted'
params = { params = {
'api_key': apikey, 'api_key': apikey,
'mode': 'UPDATEREQUESTEDSTATUS', 'mode': 'UPDATEREQUESTEDSTATUS',
'db_id': gamezID, 'db_id': gamez_id,
'status': downloadStatus 'status': download_status
} }
logger.debug("Opening URL: {0}".format(url), section) logger.debug("Opening URL: {0}".format(url), section)
@ -59,9 +59,9 @@ class autoProcessGames(object):
if library: if library:
logger.postprocess("moving files to library: {0}".format(library), section) logger.postprocess("moving files to library: {0}".format(library), section)
try: try:
shutil.move(dirName, os.path.join(library, inputName)) shutil.move(dir_name, os.path.join(library, input_name))
except: except:
logger.error("Unable to move {0} to {1}".format(dirName, os.path.join(library, inputName)), section) logger.error("Unable to move {0} to {1}".format(dir_name, os.path.join(library, input_name)), section)
return [1, "{0}: Failed to post-process - Unable to move files".format(section)] return [1, "{0}: Failed to post-process - Unable to move files".format(section)]
else: else:
logger.error("No library specified to move files to. Please edit your configuration.", section) logger.error("No library specified to move files to. Please edit your configuration.", section)
@ -71,8 +71,8 @@ class autoProcessGames(object):
logger.error("Server returned status {0}".format(r.status_code), section) logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)] return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif result['success']: elif result['success']:
logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamezID, downloadStatus), section) logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamez_id, download_status), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
else: else:
logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamezID), section) logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamez_id), section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)] return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]

View file

@ -9,30 +9,30 @@ import requests
import core import core
from core import logger from core import logger
from core.nzbToMediaSceneExceptions import process_all_exceptions from core.nzbToMediaSceneExceptions import process_all_exceptions
from core.nzbToMediaUtil import convert_to_ascii, find_download, find_imdbid, import_subs, listMediaFiles, remoteDir, reportNzb, rmDir, server_responding from core.nzbToMediaUtil import convert_to_ascii, find_download, find_imdbid, import_subs, list_media_files, remote_dir, report_nzb, remove_dir, server_responding
from core.transcoder import transcoder from core.transcoder import transcoder
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
class autoProcessMovie(object): class Movie(object):
def get_release(self, baseURL, imdbid=None, download_id=None, release_id=None): def get_release(self, base_url, imdb_id=None, download_id=None, release_id=None):
results = {} results = {}
params = {} params = {}
# determine cmd and params to send to CouchPotato to get our results # determine cmd and params to send to CouchPotato to get our results
section = 'movies' section = 'movies'
cmd = "media.list" cmd = "media.list"
if release_id or imdbid: if release_id or imdb_id:
section = 'media' section = 'media'
cmd = "media.get" cmd = "media.get"
params['id'] = release_id or imdbid params['id'] = release_id or imdb_id
if not (release_id or imdbid or download_id): if not (release_id or imdb_id or download_id):
logger.debug("No information available to filter CP results") logger.debug("No information available to filter CP results")
return results return results
url = "{0}{1}".format(baseURL, cmd) url = "{0}{1}".format(base_url, cmd)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params)) logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params))
try: try:
@ -129,7 +129,7 @@ class autoProcessMovie(object):
logger.error("{0} did not return expected json data.".format(section), section) logger.error("{0} did not return expected json data.".format(section), section)
return None return None
def CDH(self, url2, headers, section="MAIN"): def completed_download_handling(self, url2, headers, section="MAIN"):
try: try:
r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60)) r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
except requests.ConnectionError: except requests.ConnectionError:
@ -145,9 +145,9 @@ class autoProcessMovie(object):
# ValueError catches simplejson's JSONDecodeError and json's ValueError # ValueError catches simplejson's JSONDecodeError and json's ValueError
return False return False
def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", download_id="", inputCategory=None, failureLink=None): def process(self, section, dir_name, input_name=None, status=0, client_agent="manual", download_id="", input_category=None, failure_link=None):
cfg = dict(core.CFG[section][inputCategory]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg["host"]
port = cfg["port"] port = cfg["port"]
@ -158,9 +158,9 @@ class autoProcessMovie(object):
method = None method = None
#added importMode for Radarr config #added importMode for Radarr config
if section == "Radarr": if section == "Radarr":
importMode = cfg.get("importMode","Move") import_mode = cfg.get("importMode","Move")
else: else:
importMode = None import_mode = None
delete_failed = int(cfg["delete_failed"]) delete_failed = int(cfg["delete_failed"])
wait_for = int(cfg["wait_for"]) wait_for = int(cfg["wait_for"])
ssl = int(cfg.get("ssl", 0)) ssl = int(cfg.get("ssl", 0))
@ -174,19 +174,19 @@ class autoProcessMovie(object):
else: else:
extract = int(cfg.get("extract", 0)) extract = int(cfg.get("extract", 0))
imdbid = find_imdbid(dirName, inputName, omdbapikey) imdbid = find_imdbid(dir_name, input_name, omdbapikey)
if section == "CouchPotato": if section == "CouchPotato":
baseURL = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey) base_url = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey)
if section == "Radarr": if section == "Radarr":
baseURL = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root) base_url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root)
url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root) url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root)
headers = {'X-Api-Key': apikey} headers = {'X-Api-Key': apikey}
if not apikey: if not apikey:
logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only') logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only')
release = None release = None
elif server_responding(baseURL): elif server_responding(base_url):
if section == "CouchPotato": if section == "CouchPotato":
release = self.get_release(baseURL, imdbid, download_id) release = self.get_release(base_url, imdbid, download_id)
else: else:
release = None release = None
else: else:
@ -208,30 +208,30 @@ class autoProcessMovie(object):
except: except:
pass pass
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name. if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0] dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName)) specific_path = os.path.join(dir_name, str(input_name))
cleanName = os.path.splitext(SpecificPath) clean_name = os.path.splitext(specific_path)
if cleanName[1] == ".nzb": if clean_name[1] == ".nzb":
SpecificPath = cleanName[0] specific_path = clean_name[0]
if os.path.isdir(SpecificPath): if os.path.isdir(specific_path):
dirName = SpecificPath dir_name = specific_path
process_all_exceptions(inputName, dirName) process_all_exceptions(input_name, dir_name)
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract: if not list_media_files(dir_name, media=True, audio=False, meta=False, archives=False) and list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName)) logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extractFiles(dirName) core.extract_files(dir_name)
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
good_files = 0 good_files = 0
num_files = 0 num_files = 0
# Check video files for corruption # Check video files for corruption
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
num_files += 1 num_files += 1
if transcoder.isVideoGood(video, status): if transcoder.is_video_good(video, status):
import_subs(video) import_subs(video)
good_files += 1 good_files += 1
if num_files and good_files == num_files: if num_files and good_files == num_files:
@ -242,51 +242,51 @@ class autoProcessMovie(object):
logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section) logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section)
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD') print('[NZB] MARK=BAD')
if failureLink: if failure_link:
failureLink += '&corrupt=true' failure_link += '&corrupt=true'
status = 1 status = 1
elif clientAgent == "manual": elif client_agent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dirName), section) logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section)
return [0, ""] # Success (as far as this script is concerned) return [0, ""] # Success (as far as this script is concerned)
else: else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section) logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section)
status = 1 status = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD') print('[NZB] MARK=BAD')
if status == 0: if status == 0:
if core.TRANSCODE == 1: if core.TRANSCODE == 1:
result, newDirName = transcoder.Transcode_directory(dirName) result, new_dir_name = transcoder.transcode_directory(dir_name)
if result == 0: if result == 0:
logger.debug("Transcoding succeeded for files in {0}".format(dirName), section) logger.debug("Transcoding succeeded for files in {0}".format(dir_name), section)
dirName = newDirName dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8) chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section) logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
if chmod_directory: if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section) logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section)
core.rchmod(dirName, chmod_directory) core.rchmod(dir_name, chmod_directory)
else: else:
logger.error("Transcoding failed for files in {0}".format(dirName), section) logger.error("Transcoding failed for files in {0}".format(dir_name), section)
return [1, "{0}: Failed to post-process - Transcoding failed".format(section)] return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
if not release and ".cp(tt" not in video and imdbid: if not release and ".cp(tt" not in video and imdbid:
videoName, videoExt = os.path.splitext(video) video_name, video_ext = os.path.splitext(video)
video2 = "{0}.cp({1}){2}".format(videoName, imdbid, videoExt) video2 = "{0}.cp({1}){2}".format(video_name, imdbid, video_ext)
if not (clientAgent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'): if not (client_agent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'):
logger.debug('Renaming: {0} to: {1}'.format(video, video2)) logger.debug('Renaming: {0} to: {1}'.format(video, video2))
os.rename(video, video2) os.rename(video, video2)
if not apikey: #If only using Transcoder functions, exit here. if not apikey: #If only using Transcoder functions, exit here.
logger.info('No CouchPotato or Radarr apikey entered. Processing completed.') logger.info('No CouchPotato or Radarr apikey entered. Processing completed.')
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
params = {} params = {}
if download_id and release_id: if download_id and release_id:
params['downloader'] = downloader or clientAgent params['downloader'] = downloader or client_agent
params['download_id'] = download_id params['download_id'] = download_id
params['media_folder'] = remoteDir(dirName) if remote_path else dirName params['media_folder'] = remote_dir(dir_name) if remote_path else dir_name
if section == "CouchPotato": if section == "CouchPotato":
if method == "manage": if method == "manage":
@ -295,22 +295,22 @@ class autoProcessMovie(object):
else: else:
command = "renamer.scan" command = "renamer.scan"
url = "{0}{1}".format(baseURL, command) url = "{0}{1}".format(base_url, command)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
logger.postprocess("Starting {0} scan for {1}".format(method, inputName), section) logger.postprocess("Starting {0} scan for {1}".format(method, input_name), section)
if section == "Radarr": if section == "Radarr":
payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id,'importMode' : importMode} payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id,'importMode' : import_mode}
if not download_id: if not download_id:
payload.pop("downloadClientId") payload.pop("downloadClientId")
logger.debug("Opening URL: {0} with PARAMS: {1}".format(baseURL, payload), section) logger.debug("Opening URL: {0} with PARAMS: {1}".format(base_url, payload), section)
logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(inputName), section) logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(input_name), section)
try: try:
if section == "CouchPotato": if section == "CouchPotato":
r = requests.get(url, params=params, verify=False, timeout=(30, 1800)) r = requests.get(url, params=params, verify=False, timeout=(30, 1800))
else: else:
r = requests.post(baseURL, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800)) r = requests.post(base_url, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800))
except requests.ConnectionError: except requests.ConnectionError:
logger.error("Unable to open URL", section) logger.error("Unable to open URL", section)
return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)] return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
@ -320,47 +320,47 @@ class autoProcessMovie(object):
logger.error("Server returned status {0}".format(r.status_code), section) logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)] return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif section == "CouchPotato" and result['success']: elif section == "CouchPotato" and result['success']:
logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dirName), section) logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dir_name), section)
if method == "manage": if method == "manage":
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif section == "Radarr": elif section == "Radarr":
logger.postprocess("Radarr response: {0}".format(result['state'])) logger.postprocess("Radarr response: {0}".format(result['state']))
try: try:
res = json.loads(r.content) res = json.loads(r.content)
scan_id = int(res['id']) scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section) logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True started = True
except Exception as e: except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section) logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None scan_id = None
else: else:
logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dirName), logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dir_name),
section) section)
return [1, "{0}: Failed to post-process - Server did not return success".format(section)] return [1, "{0}: Failed to post-process - Server did not return success".format(section)]
else: else:
core.FAILED = True core.FAILED = True
logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(inputName), section) logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(input_name), section)
if failureLink: if failure_link:
reportNzb(failureLink, clientAgent) report_nzb(failure_link, client_agent)
if section == "Radarr": if section == "Radarr":
logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section) logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section)
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader. return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName: if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section) logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dirName) remove_dir(dir_name)
if not release_id and not media_id: if not release_id and not media_id:
logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(inputName), logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(input_name),
section) section)
return [1, "{0}: Failed to post-process - Failed download not found in {1}".format(section, section)] return [1, "{0}: Failed to post-process - Failed download not found in {1}".format(section, section)]
if release_id: if release_id:
logger.postprocess("Setting failed release {0} to ignored ...".format(inputName), section) logger.postprocess("Setting failed release {0} to ignored ...".format(input_name), section)
url = "{url}release.ignore".format(url=baseURL) url = "{url}release.ignore".format(url=base_url)
params = {'id': release_id} params = {'id': release_id}
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
@ -376,14 +376,14 @@ class autoProcessMovie(object):
logger.error("Server returned status {0}".format(r.status_code), section) logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)] return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif result['success']: elif result['success']:
logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(inputName), section) logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(input_name), section)
else: else:
logger.warning("FAILED: Unable to set {0} to ignored!".format(inputName), section) logger.warning("FAILED: Unable to set {0} to ignored!".format(input_name), section)
return [1, "{0}: Failed to post-process - Unable to set {1} to ignored".format(section, inputName)] return [1, "{0}: Failed to post-process - Unable to set {1} to ignored".format(section, input_name)]
logger.postprocess("Trying to snatch the next highest ranked release.", section) logger.postprocess("Trying to snatch the next highest ranked release.", section)
url = "{0}movie.searcher.try_next".format(baseURL) url = "{0}movie.searcher.try_next".format(base_url)
logger.debug("Opening URL: {0}".format(url), section) logger.debug("Opening URL: {0}".format(url), section)
try: try:
@ -412,7 +412,7 @@ class autoProcessMovie(object):
while time.time() < timeout: # only wait 2 (default) minutes, then return. while time.time() < timeout: # only wait 2 (default) minutes, then return.
logger.postprocess("Checking for status change, please stand by ...", section) logger.postprocess("Checking for status change, please stand by ...", section)
if section == "CouchPotato": if section == "CouchPotato":
release = self.get_release(baseURL, imdbid, download_id, release_id) release = self.get_release(base_url, imdbid, download_id, release_id)
scan_id = None scan_id = None
else: else:
release = None release = None
@ -424,44 +424,44 @@ class autoProcessMovie(object):
if release_status_old is None: # we didn't have a release before, but now we do. if release_status_old is None: # we didn't have a release before, but now we do.
logger.postprocess("SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]".format( logger.postprocess("SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]".format(
title, str(release_status_new).upper()), section) title, str(release_status_new).upper()), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
if release_status_new != release_status_old: if release_status_new != release_status_old:
logger.postprocess("SUCCESS: Release for {0} has now been marked with a status of [{1}]".format( logger.postprocess("SUCCESS: Release for {0} has now been marked with a status of [{1}]".format(
title, str(release_status_new).upper()), section) title, str(release_status_new).upper()), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
except: except:
pass pass
elif scan_id: elif scan_id:
url = "{0}/{1}".format(baseURL, scan_id) url = "{0}/{1}".format(base_url, scan_id)
command_status = self.command_complete(url, params, headers, section) command_status = self.command_complete(url, params, headers, section)
if command_status: if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section) logger.debug("The Scan command return status: {0}".format(command_status), section)
if command_status in ['completed']: if command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section) logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status in ['failed']: elif command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section) logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ] # return [1, "%s: Failed to post-process %s" % (section, input_name) ]
if not os.path.isdir(dirName): if not os.path.isdir(dir_name):
logger.postprocess("SUCCESS: Input Directory [{0}] has been processed and removed".format( logger.postprocess("SUCCESS: Input Directory [{0}] has been processed and removed".format(
dirName), section) dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=True): elif not list_media_files(dir_name, media=True, audio=False, meta=False, archives=True):
logger.postprocess("SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.".format( logger.postprocess("SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.".format(
dirName), section) dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
# pause and let CouchPotatoServer/Radarr catch its breath # pause and let CouchPotatoServer/Radarr catch its breath
time.sleep(10 * wait_for) time.sleep(10 * wait_for)
# The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now. # The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now.
if section == "Radarr" and self.CDH(url2, headers, section=section): if section == "Radarr" and self.completed_download_handling(url2, headers, section=section):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section) logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)] return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
logger.warning( logger.warning(
"{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(inputName, wait_for), "{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(input_name, wait_for),
section) section)
return [1, "{0}: Failed to post-process - No change in status".format(section)] return [1, "{0}: Failed to post-process - No change in status".format(section)]

View file

@ -9,12 +9,12 @@ import requests
import core import core
from core import logger from core import logger
from core.nzbToMediaSceneExceptions import process_all_exceptions from core.nzbToMediaSceneExceptions import process_all_exceptions
from core.nzbToMediaUtil import convert_to_ascii, listMediaFiles, remoteDir, rmDir, server_responding from core.nzbToMediaUtil import convert_to_ascii, list_media_files, remote_dir, remove_dir, server_responding
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
class autoProcessMusic(object): class Music(object):
def command_complete(self, url, params, headers, section): def command_complete(self, url, params, headers, section):
try: try:
r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60)) r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60))
@ -32,8 +32,8 @@ class autoProcessMusic(object):
logger.error("{0} did not return expected json data.".format(section), section) logger.error("{0} did not return expected json data.".format(section), section)
return None return None
def get_status(self, url, apikey, dirName): def get_status(self, url, apikey, dir_name):
logger.debug("Attempting to get current status for release:{0}".format(os.path.basename(dirName))) logger.debug("Attempting to get current status for release:{0}".format(os.path.basename(dir_name)))
params = { params = {
'apikey': apikey, 'apikey': apikey,
@ -55,13 +55,13 @@ class autoProcessMusic(object):
return None return None
for album in result: for album in result:
if os.path.basename(dirName) == album['FolderName']: if os.path.basename(dir_name) == album['FolderName']:
return album["Status"].lower() return album["Status"].lower()
def forceProcess(self, params, url, apikey, inputName, dirName, section, wait_for): def force_process(self, params, url, apikey, input_name, dir_name, section, wait_for):
release_status = self.get_status(url, apikey, dirName) release_status = self.get_status(url, apikey, dir_name)
if not release_status: if not release_status:
logger.error("Could not find a status for {0}, is it in the wanted list ?".format(inputName), section) logger.error("Could not find a status for {0}, is it in the wanted list ?".format(input_name), section)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section) logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
@ -77,29 +77,29 @@ class autoProcessMusic(object):
logger.error("Server returned status {0}".format(r.status_code), section) logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)] return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif r.text == "OK": elif r.text == "OK":
logger.postprocess("SUCCESS: Post-Processing started for {0} in folder {1} ...".format(inputName, dirName), section) logger.postprocess("SUCCESS: Post-Processing started for {0} in folder {1} ...".format(input_name, dir_name), section)
else: else:
logger.error("FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!".format(inputName, dirName), section) logger.error("FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!".format(input_name, dir_name), section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)] return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]
# we will now wait for this album to be processed before returning to TorrentToMedia and unpausing. # we will now wait for this album to be processed before returning to TorrentToMedia and unpausing.
timeout = time.time() + 60 * wait_for timeout = time.time() + 60 * wait_for
while time.time() < timeout: while time.time() < timeout:
current_status = self.get_status(url, apikey, dirName) current_status = self.get_status(url, apikey, dir_name)
if current_status is not None and current_status != release_status: # Something has changed. CPS must have processed this movie. if current_status is not None and current_status != release_status: # Something has changed. CPS must have processed this movie.
logger.postprocess("SUCCESS: This release is now marked as status [{0}]".format(current_status), section) logger.postprocess("SUCCESS: This release is now marked as status [{0}]".format(current_status), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
if not os.path.isdir(dirName): if not os.path.isdir(dir_name):
logger.postprocess("SUCCESS: The input directory {0} has been removed Processing must have finished.".format(dirName), section) logger.postprocess("SUCCESS: The input directory {0} has been removed Processing must have finished.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
time.sleep(10 * wait_for) time.sleep(10 * wait_for)
# The status hasn't changed. # The status hasn't changed.
return [2, "no change"] return [2, "no change"]
def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", inputCategory=None): def process(self, section, dir_name, input_name=None, status=0, client_agent="manual", input_category=None):
status = int(status) status = int(status)
cfg = dict(core.CFG[section][inputCategory]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg["host"]
port = cfg["port"] port = cfg["port"]
@ -124,25 +124,25 @@ class autoProcessMusic(object):
logger.error("Server did not respond. Exiting", section) logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)] return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name. if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0] dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName)) specific_path = os.path.join(dir_name, str(input_name))
cleanName = os.path.splitext(SpecificPath) clean_name = os.path.splitext(specific_path)
if cleanName[1] == ".nzb": if clean_name[1] == ".nzb":
SpecificPath = cleanName[0] specific_path = clean_name[0]
if os.path.isdir(SpecificPath): if os.path.isdir(specific_path):
dirName = SpecificPath dir_name = specific_path
process_all_exceptions(inputName, dirName) process_all_exceptions(input_name, dir_name)
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
if not listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract: if not list_media_files(dir_name, media=False, audio=True, meta=False, archives=False) and list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName)) logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extractFiles(dirName) core.extract_files(dir_name)
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
#if listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and status: #if listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and status:
# logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section) # logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section)
# status = 0 # status = 0
@ -151,20 +151,20 @@ class autoProcessMusic(object):
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': "forceProcess", 'cmd': "forceProcess",
'dir': remoteDir(dirName) if remote_path else dirName 'dir': remote_dir(dir_name) if remote_path else dir_name
} }
res = self.forceProcess(params, url, apikey, inputName, dirName, section, wait_for) res = self.force_process(params, url, apikey, input_name, dir_name, section, wait_for)
if res[0] in [0, 1]: if res[0] in [0, 1]:
return res return res
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': "forceProcess", 'cmd': "forceProcess",
'dir': os.path.split(remoteDir(dirName))[0] if remote_path else os.path.split(dirName)[0] 'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0]
} }
res = self.forceProcess(params, url, apikey, inputName, dirName, section, wait_for) res = self.force_process(params, url, apikey, input_name, dir_name, section, wait_for)
if res[0] in [0, 1]: if res[0] in [0, 1]:
return res return res
@ -176,11 +176,11 @@ class autoProcessMusic(object):
url = "{0}{1}:{2}{3}/api/v1/command".format(protocol, host, port, web_root) url = "{0}{1}:{2}{3}/api/v1/command".format(protocol, host, port, web_root)
headers = {"X-Api-Key": apikey} headers = {"X-Api-Key": apikey}
if remote_path: if remote_path:
logger.debug("remote_path: {0}".format(remoteDir(dirName)), section) logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section)
data = {"name": "Rename", "path": remoteDir(dirName)} data = {"name": "Rename", "path": remote_dir(dir_name)}
else: else:
logger.debug("path: {0}".format(dirName), section) logger.debug("path: {0}".format(dir_name), section)
data = {"name": "Rename", "path": dirName} data = {"name": "Rename", "path": dir_name}
data = json.dumps(data) data = json.dumps(data)
try: try:
logger.debug("Opening URL: {0} with data: {1}".format(url, data), section) logger.debug("Opening URL: {0} with data: {1}".format(url, data), section)
@ -189,18 +189,18 @@ class autoProcessMusic(object):
logger.error("Unable to open URL: {0}".format(url), section) logger.error("Unable to open URL: {0}".format(url), section)
return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)] return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
Success = False success = False
Queued = False queued = False
Started = False started = False
try: try:
res = json.loads(r.content) res = json.loads(r.content)
scan_id = int(res['id']) scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section) logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True started = True
except Exception as e: except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section) logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None scan_id = None
Started = False started = False
return [1, "{0}: Failed to post-process - Unable to start scan".format(section)] return [1, "{0}: Failed to post-process - Unable to start scan".format(section)]
n = 0 n = 0
@ -214,15 +214,15 @@ class autoProcessMusic(object):
n += 1 n += 1
if command_status: if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section) logger.debug("The Scan command return status: {0}".format(command_status), section)
if not os.path.exists(dirName): if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section) logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['completed']: elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section) logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['failed']: elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section) logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ] # return [1, "%s: Failed to post-process %s" % (section, input_name) ]
else: else:
logger.debug("The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.".format(section), section) logger.debug("The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.".format(section), section)
return [status, "{0}: Passing back to {1} to attempt Complete Download Handling".format(section, section)] return [status, "{0}: Passing back to {1} to attempt Complete Download Handling".format(section, section)]
@ -233,7 +233,7 @@ class autoProcessMusic(object):
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader. return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
else: else:
logger.warning("FAILED DOWNLOAD DETECTED", section) logger.warning("FAILED DOWNLOAD DETECTED", section)
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName: if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section) logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dirName) remove_dir(dir_name)
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader. return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader.

View file

@ -10,15 +10,15 @@ import requests
import core import core
from core import logger from core import logger
from core.nzbToMediaAutoFork import autoFork from core.nzbToMediaAutoFork import auto_fork
from core.nzbToMediaSceneExceptions import process_all_exceptions from core.nzbToMediaSceneExceptions import process_all_exceptions
from core.nzbToMediaUtil import convert_to_ascii, flatten, import_subs, listMediaFiles, remoteDir, reportNzb, rmDir, server_responding from core.nzbToMediaUtil import convert_to_ascii, flatten, import_subs, list_media_files, remote_dir, report_nzb, remove_dir, server_responding
from core.transcoder import transcoder from core.transcoder import transcoder
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
class autoProcessTV(object): class TV(object):
def command_complete(self, url, params, headers, section): def command_complete(self, url, params, headers, section):
try: try:
r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60)) r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60))
@ -36,7 +36,7 @@ class autoProcessTV(object):
logger.error("{0} did not return expected json data.".format(section), section) logger.error("{0} did not return expected json data.".format(section), section)
return None return None
def CDH(self, url2, headers, section="MAIN"): def completed_download_handling(self, url2, headers, section="MAIN"):
try: try:
r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60)) r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
except requests.ConnectionError: except requests.ConnectionError:
@ -52,9 +52,9 @@ class autoProcessTV(object):
# ValueError catches simplejson's JSONDecodeError and json's ValueError # ValueError catches simplejson's JSONDecodeError and json's ValueError
return False return False
def processEpisode(self, section, dirName, inputName=None, failed=False, clientAgent="manual", download_id=None, inputCategory=None, failureLink=None): def process_episode(self, section, dir_name, input_name=None, failed=False, client_agent="manual", download_id=None, input_category=None, failure_link=None):
cfg = dict(core.CFG[section][inputCategory]) cfg = dict(core.CFG[section][input_category])
host = cfg["host"] host = cfg["host"]
port = cfg["port"] port = cfg["port"]
@ -67,7 +67,7 @@ class autoProcessTV(object):
if server_responding("{0}{1}:{2}{3}".format(protocol, host, port, web_root)): if server_responding("{0}{1}:{2}{3}".format(protocol, host, port, web_root)):
# auto-detect correct fork # auto-detect correct fork
fork, fork_params = autoFork(section, inputCategory) fork, fork_params = auto_fork(section, input_category)
elif not username and not apikey: elif not username and not apikey:
logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only') logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only')
fork, fork_params = "None", {} fork, fork_params = "None", {}
@ -76,9 +76,9 @@ class autoProcessTV(object):
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)] return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
delete_failed = int(cfg.get("delete_failed", 0)) delete_failed = int(cfg.get("delete_failed", 0))
nzbExtractionBy = cfg.get("nzbExtractionBy", "Downloader") nzb_extraction_by = cfg.get("nzbExtractionBy", "Downloader")
process_method = cfg.get("process_method") process_method = cfg.get("process_method")
if clientAgent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym": if client_agent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym":
process_method = "symlink" process_method = "symlink"
remote_path = int(cfg.get("remote_path", 0)) remote_path = int(cfg.get("remote_path", 0))
wait_for = int(cfg.get("wait_for", 2)) wait_for = int(cfg.get("wait_for", 2))
@ -91,49 +91,49 @@ class autoProcessTV(object):
else: else:
extract = int(cfg.get("extract", 0)) extract = int(cfg.get("extract", 0))
#get importmode, default to "Move" for consistency with legacy #get importmode, default to "Move" for consistency with legacy
importMode = cfg.get("importMode","Move") import_mode = cfg.get("importMode","Move")
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name. if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0] dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName)) specific_path = os.path.join(dir_name, str(input_name))
cleanName = os.path.splitext(SpecificPath) clean_name = os.path.splitext(specific_path)
if cleanName[1] == ".nzb": if clean_name[1] == ".nzb":
SpecificPath = cleanName[0] specific_path = clean_name[0]
if os.path.isdir(SpecificPath): if os.path.isdir(specific_path):
dirName = SpecificPath dir_name = specific_path
# Attempt to create the directory if it doesn't exist and ignore any # Attempt to create the directory if it doesn't exist and ignore any
# error stating that it already exists. This fixes a bug where SickRage # error stating that it already exists. This fixes a bug where SickRage
# won't process the directory because it doesn't exist. # won't process the directory because it doesn't exist.
try: try:
os.makedirs(dirName) # Attempt to create the directory os.makedirs(dir_name) # Attempt to create the directory
except OSError as e: except OSError as e:
# Re-raise the error if it wasn't about the directory not existing # Re-raise the error if it wasn't about the directory not existing
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
raise raise
if 'process_method' not in fork_params or (clientAgent in ['nzbget', 'sabnzbd'] and nzbExtractionBy != "Destination"): if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != "Destination"):
if inputName: if input_name:
process_all_exceptions(inputName, dirName) process_all_exceptions(input_name, dir_name)
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
# Now check if tv files exist in destination. # Now check if tv files exist in destination.
if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): if not list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
if listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract: if list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName)) logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extractFiles(dirName) core.extract_files(dir_name)
inputName, dirName = convert_to_ascii(inputName, dirName) input_name, dir_name = convert_to_ascii(input_name, dir_name)
if listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): # Check that a video exists. if not, assume failed. if list_media_files(dir_name, media=True, audio=False, meta=False, archives=False): # Check that a video exists. if not, assume failed.
flatten(dirName) flatten(dir_name)
# Check video files for corruption # Check video files for corruption
good_files = 0 good_files = 0
num_files = 0 num_files = 0
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
num_files += 1 num_files += 1
if transcoder.isVideoGood(video, status): if transcoder.is_video_good(video, status):
good_files += 1 good_files += 1
import_subs(video) import_subs(video)
if num_files > 0: if num_files > 0:
@ -147,12 +147,12 @@ class autoProcessTV(object):
failed = 1 failed = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD') print('[NZB] MARK=BAD')
if failureLink: if failure_link:
failureLink += '&corrupt=true' failure_link += '&corrupt=true'
elif clientAgent == "manual": elif client_agent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dirName), section) logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section)
return [0, ""] # Success (as far as this script is concerned) return [0, ""] # Success (as far as this script is concerned)
elif nzbExtractionBy == "Destination": elif nzb_extraction_by == "Destination":
logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.") logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.")
if int(failed) == 0: if int(failed) == 0:
logger.info("Setting Status Success.") logger.info("Setting Status Success.")
@ -163,32 +163,32 @@ class autoProcessTV(object):
status = 1 status = 1
failed = 1 failed = 1
else: else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section) logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section)
status = 1 status = 1
failed = 1 failed = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0': if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD') print('[NZB] MARK=BAD')
if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads
result, newDirName = transcoder.Transcode_directory(dirName) result, new_dir_name = transcoder.transcode_directory(dir_name)
if result == 0: if result == 0:
logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dirName), section) logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dir_name), section)
dirName = newDirName dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8) chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section) logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
if chmod_directory: if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section) logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section)
core.rchmod(dirName, chmod_directory) core.rchmod(dir_name, chmod_directory)
else: else:
logger.error("FAILED: Transcoding failed for files in {0}".format(dirName), section) logger.error("FAILED: Transcoding failed for files in {0}".format(dir_name), section)
return [1, "{0}: Failed to post-process - Transcoding failed".format(section)] return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]
# configure SB params to pass # configure SB params to pass
fork_params['quiet'] = 1 fork_params['quiet'] = 1
fork_params['proc_type'] = 'manual' fork_params['proc_type'] = 'manual'
if inputName is not None: if input_name is not None:
fork_params['nzbName'] = inputName fork_params['nzbName'] = input_name
for param in copy.copy(fork_params): for param in copy.copy(fork_params):
if param == "failed": if param == "failed":
@ -206,10 +206,10 @@ class autoProcessTV(object):
if "proc_type" in fork_params: if "proc_type" in fork_params:
del fork_params['proc_type'] del fork_params['proc_type']
if param in ["dirName", "dir", "proc_dir", "process_directory", "path"]: if param in ["dir_name", "dir", "proc_dir", "process_directory", "path"]:
fork_params[param] = dirName fork_params[param] = dir_name
if remote_path: if remote_path:
fork_params[param] = remoteDir(dirName) fork_params[param] = remote_dir(dir_name)
if param == "process_method": if param == "process_method":
if process_method: if process_method:
@ -244,12 +244,12 @@ class autoProcessTV(object):
if status == 0: if status == 0:
if section == "NzbDrone" and not apikey: if section == "NzbDrone" and not apikey:
logger.info('No Sonarr apikey entered. Processing completed.') logger.info('No Sonarr apikey entered. Processing completed.')
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section) logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section)
else: else:
core.FAILED = True core.FAILED = True
if failureLink: if failure_link:
reportNzb(failureLink, clientAgent) report_nzb(failure_link, client_agent)
if 'failed' in fork_params: if 'failed' in fork_params:
logger.postprocess("FAILED: The download failed. Sending 'failed' process request to {0} branch".format(fork), section) logger.postprocess("FAILED: The download failed. Sending 'failed' process request to {0} branch".format(fork), section)
elif section == "NzbDrone": elif section == "NzbDrone":
@ -257,9 +257,9 @@ class autoProcessTV(object):
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader. return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
else: else:
logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section) logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section)
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName: if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section) logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dirName) remove_dir(dir_name)
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader. return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader.
url = None url = None
@ -274,11 +274,11 @@ class autoProcessTV(object):
headers = {"X-Api-Key": apikey} headers = {"X-Api-Key": apikey}
# params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'} # params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
if remote_path: if remote_path:
logger.debug("remote_path: {0}".format(remoteDir(dirName)), section) logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section)
data = {"name": "DownloadedEpisodesScan", "path": remoteDir(dirName), "downloadClientId": download_id, "importMode": importMode} data = {"name": "DownloadedEpisodesScan", "path": remote_dir(dir_name), "downloadClientId": download_id, "importMode": import_mode}
else: else:
logger.debug("path: {0}".format(dirName), section) logger.debug("path: {0}".format(dir_name), section)
data = {"name": "DownloadedEpisodesScan", "path": dirName, "downloadClientId": download_id, "importMode": importMode} data = {"name": "DownloadedEpisodesScan", "path": dir_name, "downloadClientId": download_id, "importMode": import_mode}
if not download_id: if not download_id:
data.pop("downloadClientId") data.pop("downloadClientId")
data = json.dumps(data) data = json.dumps(data)
@ -306,45 +306,45 @@ class autoProcessTV(object):
logger.error("Server returned status {0}".format(r.status_code), section) logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)] return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
Success = False success = False
Queued = False queued = False
Started = False started = False
if section == "SickBeard": if section == "SickBeard":
if apikey: if apikey:
if r.json()['result'] == 'success': if r.json()['result'] == 'success':
Success = True success = True
else: else:
for line in r.iter_lines(): for line in r.iter_lines():
if line: if line:
line = line.decode('utf-8') line = line.decode('utf-8')
logger.postprocess("{0}".format(line), section) logger.postprocess("{0}".format(line), section)
if "Moving file from" in line: if "Moving file from" in line:
inputName = os.path.split(line)[1] input_name = os.path.split(line)[1]
if "added to the queue" in line: if "added to the queue" in line:
Queued = True queued = True
if "Processing succeeded" in line or "Successfully processed" in line: if "Processing succeeded" in line or "Successfully processed" in line:
Success = True success = True
if Queued: if queued:
time.sleep(60) time.sleep(60)
elif section == "NzbDrone": elif section == "NzbDrone":
try: try:
res = json.loads(r.content) res = json.loads(r.content)
scan_id = int(res['id']) scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section) logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True started = True
except Exception as e: except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section) logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None scan_id = None
Started = False started = False
if status != 0 and delete_failed and not os.path.dirname(dirName) == dirName: if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section) logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dirName) remove_dir(dir_name)
if Success: if success:
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif section == "NzbDrone" and Started: elif section == "NzbDrone" and started:
n = 0 n = 0
params = {} params = {}
url = "{0}/{1}".format(url, scan_id) url = "{0}/{1}".format(url, scan_id)
@ -356,20 +356,20 @@ class autoProcessTV(object):
n += 1 n += 1
if command_status: if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section) logger.debug("The Scan command return status: {0}".format(command_status), section)
if not os.path.exists(dirName): if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section) logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['completed']: elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section) logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)] return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['failed']: elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section) logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ] # return [1, "%s: Failed to post-process %s" % (section, input_name) ]
if self.CDH(url2, headers, section=section): if self.completed_download_handling(url2, headers, section=section):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section) logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)] return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
else: else:
logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section) logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section)
return [1, "{0}: Failed to post-process {1}".format(section, inputName)] return [1, "{0}: Failed to post-process {1}".format(section, input_name)]
else: else:
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)] # We did not receive Success confirmation. return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)] # We did not receive Success confirmation.

View file

@ -1,15 +1,15 @@
# coding=utf-8 # coding=utf-8
from core import logger, nzbToMediaDB from core import logger, nzbToMediaDB
from core.nzbToMediaUtil import backupVersionedFile from core.nzbToMediaUtil import backup_versioned_file
MIN_DB_VERSION = 1 # oldest db version we support migrating from MIN_DB_VERSION = 1 # oldest db version we support migrating from
MAX_DB_VERSION = 2 MAX_DB_VERSION = 2
def backupDatabase(version): def backup_database(version):
logger.info("Backing up database before upgrade") logger.info("Backing up database before upgrade")
if not backupVersionedFile(nzbToMediaDB.dbFilename(), version): if not backup_versioned_file(nzbToMediaDB.db_filename(), version):
logger.log_error_and_exit("Database backup failed, abort upgrading database") logger.log_error_and_exit("Database backup failed, abort upgrading database")
else: else:
logger.info("Proceeding with upgrade") logger.info("Proceeding with upgrade")
@ -23,13 +23,13 @@ def backupDatabase(version):
class InitialSchema(nzbToMediaDB.SchemaUpgrade): class InitialSchema(nzbToMediaDB.SchemaUpgrade):
def test(self): def test(self):
no_update = False no_update = False
if self.hasTable("db_version"): if self.has_table("db_version"):
cur_db_version = self.checkDBVersion() cur_db_version = self.check_db_version()
no_update = not cur_db_version < MAX_DB_VERSION no_update = not cur_db_version < MAX_DB_VERSION
return no_update return no_update
def execute(self): def execute(self):
if not self.hasTable("downloads") and not self.hasTable("db_version"): if not self.has_table("downloads") and not self.has_table("db_version"):
queries = [ queries = [
"CREATE TABLE db_version (db_version INTEGER);", "CREATE TABLE db_version (db_version INTEGER);",
"CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));", "CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));",
@ -39,7 +39,7 @@ class InitialSchema(nzbToMediaDB.SchemaUpgrade):
self.connection.action(query) self.connection.action(query)
else: else:
cur_db_version = self.checkDBVersion() cur_db_version = self.check_db_version()
if cur_db_version < MIN_DB_VERSION: if cur_db_version < MIN_DB_VERSION:
logger.log_error_and_exit(u"Your database version ({current}) is too old to migrate " logger.log_error_and_exit(u"Your database version ({current}) is too old to migrate "

View file

@ -11,7 +11,7 @@ from time import sleep
import core import core
def extract(filePath, outputDestination): def extract(file_path, output_destination):
success = 0 success = 0
# Using Windows # Using Windows
if platform.system() == 'Windows': if platform.system() == 'Windows':
@ -22,7 +22,7 @@ def extract(filePath, outputDestination):
invislocation = os.path.join(core.PROGRAM_DIR, 'core', 'extractor', 'bin', 'invisible.vbs') invislocation = os.path.join(core.PROGRAM_DIR, 'core', 'extractor', 'bin', 'invisible.vbs')
cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, "x", "-y"] cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, "x", "-y"]
ext_7zip = [".rar", ".zip", ".tar.gz", "tgz", ".tar.bz2", ".tbz", ".tar.lzma", ".tlz", ".7z", ".xz"] ext_7zip = [".rar", ".zip", ".tar.gz", "tgz", ".tar.bz2", ".tbz", ".tar.lzma", ".tlz", ".7z", ".xz"]
EXTRACT_COMMANDS = dict.fromkeys(ext_7zip, cmd_7zip) extract_commands = dict.fromkeys(ext_7zip, cmd_7zip)
# Using unix # Using unix
else: else:
required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"] required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"]
@ -33,7 +33,7 @@ def extract(filePath, outputDestination):
# ".lzma": ["xz", "-d --format=lzma --keep"], # ".lzma": ["xz", "-d --format=lzma --keep"],
# ".bz2": ["bzip2", "-d --keep"], # ".bz2": ["bzip2", "-d --keep"],
EXTRACT_COMMANDS = { extract_commands = {
".rar": ["unrar", "x", "-o+", "-y"], ".rar": ["unrar", "x", "-o+", "-y"],
".tar": ["tar", "-xf"], ".tar": ["tar", "-xf"],
".zip": ["unzip"], ".zip": ["unzip"],
@ -49,46 +49,46 @@ def extract(filePath, outputDestination):
for cmd in required_cmds: for cmd in required_cmds:
if call(['which', cmd], stdout=devnull, if call(['which', cmd], stdout=devnull,
stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist. stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist.
for k, v in EXTRACT_COMMANDS.items(): for k, v in extract_commands.items():
if cmd in v[0]: if cmd in v[0]:
if not call(["which", "7zr"], stdout=devnull, stderr=devnull): # we do have "7zr" if not call(["which", "7zr"], stdout=devnull, stderr=devnull): # we do have "7zr"
EXTRACT_COMMANDS[k] = ["7zr", "x", "-y"] extract_commands[k] = ["7zr", "x", "-y"]
elif not call(["which", "7z"], stdout=devnull, stderr=devnull): # we do have "7z" elif not call(["which", "7z"], stdout=devnull, stderr=devnull): # we do have "7z"
EXTRACT_COMMANDS[k] = ["7z", "x", "-y"] extract_commands[k] = ["7z", "x", "-y"]
elif not call(["which", "7za"], stdout=devnull, stderr=devnull): # we do have "7za" elif not call(["which", "7za"], stdout=devnull, stderr=devnull): # we do have "7za"
EXTRACT_COMMANDS[k] = ["7za", "x", "-y"] extract_commands[k] = ["7za", "x", "-y"]
else: else:
core.logger.error("EXTRACTOR: {cmd} not found, " core.logger.error("EXTRACTOR: {cmd} not found, "
"disabling support for {feature}".format "disabling support for {feature}".format
(cmd=cmd, feature=k)) (cmd=cmd, feature=k))
del EXTRACT_COMMANDS[k] del extract_commands[k]
devnull.close() devnull.close()
else: else:
core.logger.warning("EXTRACTOR: Cannot determine which tool to use when called from Transmission") core.logger.warning("EXTRACTOR: Cannot determine which tool to use when called from Transmission")
if not EXTRACT_COMMANDS: if not extract_commands:
core.logger.warning("EXTRACTOR: No archive extracting programs found, plugin will be disabled") core.logger.warning("EXTRACTOR: No archive extracting programs found, plugin will be disabled")
ext = os.path.splitext(filePath) ext = os.path.splitext(file_path)
cmd = [] cmd = []
if ext[1] in (".gz", ".bz2", ".lzma"): if ext[1] in (".gz", ".bz2", ".lzma"):
# Check if this is a tar # Check if this is a tar
if os.path.splitext(ext[0])[1] == ".tar": if os.path.splitext(ext[0])[1] == ".tar":
cmd = EXTRACT_COMMANDS[".tar{ext}".format(ext=ext[1])] cmd = extract_commands[".tar{ext}".format(ext=ext[1])]
elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"): elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"):
cmd = EXTRACT_COMMANDS[os.path.splitext(ext[0])[1]] cmd = extract_commands[os.path.splitext(ext[0])[1]]
elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives. elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives.
return False return False
else: else:
if ext[1] in EXTRACT_COMMANDS: if ext[1] in extract_commands:
cmd = EXTRACT_COMMANDS[ext[1]] cmd = extract_commands[ext[1]]
else: else:
core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format
(ext=ext[1])) (ext=ext[1]))
return False return False
# Create outputDestination folder # Create outputDestination folder
core.makeDir(outputDestination) core.make_dir(output_destination)
if core.PASSWORDSFILE and os.path.isfile(os.path.normpath(core.PASSWORDSFILE)): if core.PASSWORDSFILE and os.path.isfile(os.path.normpath(core.PASSWORDSFILE)):
passwords = [line.strip() for line in open(os.path.normpath(core.PASSWORDSFILE))] passwords = [line.strip() for line in open(os.path.normpath(core.PASSWORDSFILE))]
@ -96,25 +96,25 @@ def extract(filePath, outputDestination):
passwords = [] passwords = []
core.logger.info("Extracting {file} to {destination}".format core.logger.info("Extracting {file} to {destination}".format
(file=filePath, destination=outputDestination)) (file=file_path, destination=output_destination))
core.logger.debug("Extracting {cmd} {file} {destination}".format core.logger.debug("Extracting {cmd} {file} {destination}".format
(cmd=cmd, file=filePath, destination=outputDestination)) (cmd=cmd, file=file_path, destination=output_destination))
origFiles = [] orig_files = []
origDirs = [] orig_dirs = []
for dir, subdirs, files in os.walk(outputDestination): for dir, subdirs, files in os.walk(output_destination):
for subdir in subdirs: for subdir in subdirs:
origDirs.append(os.path.join(dir, subdir)) orig_dirs.append(os.path.join(dir, subdir))
for file in files: for file in files:
origFiles.append(os.path.join(dir, file)) orig_files.append(os.path.join(dir, file))
pwd = os.getcwd() # Get our Present Working Directory pwd = os.getcwd() # Get our Present Working Directory
os.chdir(outputDestination) # Not all unpack commands accept full paths, so just extract into this directory os.chdir(output_destination) # Not all unpack commands accept full paths, so just extract into this directory
devnull = open(os.devnull, 'w') devnull = open(os.devnull, 'w')
try: # now works same for nt and *nix try: # now works same for nt and *nix
info = None info = None
cmd.append(filePath) # add filePath to final cmd arg. cmd.append(file_path) # add filePath to final cmd arg.
if platform.system() == 'Windows': if platform.system() == 'Windows':
info = subprocess.STARTUPINFO() info = subprocess.STARTUPINFO()
info.dwFlags |= subprocess.STARTF_USESHOWWINDOW info.dwFlags |= subprocess.STARTF_USESHOWWINDOW
@ -126,7 +126,7 @@ def extract(filePath, outputDestination):
res = p.wait() res = p.wait()
if res == 0: # Both Linux and Windows return 0 for successful. if res == 0: # Both Linux and Windows return 0 for successful.
core.logger.info("EXTRACTOR: Extraction was successful for {file} to {destination}".format core.logger.info("EXTRACTOR: Extraction was successful for {file} to {destination}".format
(file=filePath, destination=outputDestination)) (file=file_path, destination=output_destination))
success = 1 success = 1
elif len(passwords) > 0: elif len(passwords) > 0:
core.logger.info("EXTRACTOR: Attempting to extract with passwords") core.logger.info("EXTRACTOR: Attempting to extract with passwords")
@ -142,7 +142,7 @@ def extract(filePath, outputDestination):
if (res >= 0 and platform == 'Windows') or res == 0: if (res >= 0 and platform == 'Windows') or res == 0:
core.logger.info("EXTRACTOR: Extraction was successful " core.logger.info("EXTRACTOR: Extraction was successful "
"for {file} to {destination} using password: {pwd}".format "for {file} to {destination} using password: {pwd}".format
(file=filePath, destination=outputDestination, pwd=password)) (file=file_path, destination=output_destination, pwd=password))
success = 1 success = 1
break break
else: else:
@ -150,7 +150,7 @@ def extract(filePath, outputDestination):
except: except:
core.logger.error("EXTRACTOR: Extraction failed for {file}. " core.logger.error("EXTRACTOR: Extraction failed for {file}. "
"Could not call command {cmd}".format "Could not call command {cmd}".format
(file=filePath, cmd=cmd)) (file=file_path, cmd=cmd))
os.chdir(pwd) os.chdir(pwd)
return False return False
@ -159,23 +159,23 @@ def extract(filePath, outputDestination):
if success: if success:
# sleep to let files finish writing to disk # sleep to let files finish writing to disk
sleep(3) sleep(3)
perms = stat.S_IMODE(os.lstat(os.path.split(filePath)[0]).st_mode) perms = stat.S_IMODE(os.lstat(os.path.split(file_path)[0]).st_mode)
for dir, subdirs, files in os.walk(outputDestination): for dir, subdirs, files in os.walk(output_destination):
for subdir in subdirs: for subdir in subdirs:
if not os.path.join(dir, subdir) in origFiles: if not os.path.join(dir, subdir) in orig_files:
try: try:
os.chmod(os.path.join(dir, subdir), perms) os.chmod(os.path.join(dir, subdir), perms)
except: except:
pass pass
for file in files: for file in files:
if not os.path.join(dir, file) in origFiles: if not os.path.join(dir, file) in orig_files:
try: try:
shutil.copymode(filePath, os.path.join(dir, file)) shutil.copymode(file_path, os.path.join(dir, file))
except: except:
pass pass
return True return True
else: else:
core.logger.error("EXTRACTOR: Extraction failed for {file}. " core.logger.error("EXTRACTOR: Extraction failed for {file}. "
"Result was {result}".format "Result was {result}".format
(file=filePath, result=res)) (file=file_path, result=res))
return False return False

View file

@ -14,7 +14,7 @@ class GitHub(object):
self.github_repo = github_repo self.github_repo = github_repo
self.branch = branch self.branch = branch
def _access_API(self, path, params=None): def _access_api(self, path, params=None):
""" """
Access the API at the path given and with the optional params given. Access the API at the path given and with the optional params given.
""" """
@ -32,7 +32,7 @@ class GitHub(object):
Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/ Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
""" """
return self._access_API( return self._access_api(
['repos', self.github_repo_user, self.github_repo, 'commits'], ['repos', self.github_repo_user, self.github_repo, 'commits'],
params={'per_page': 100, 'sha': self.branch}, params={'per_page': 100, 'sha': self.branch},
) )
@ -49,7 +49,7 @@ class GitHub(object):
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/ Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/
""" """
return self._access_API( return self._access_api(
['repos', self.github_repo_user, self.github_repo, 'compare', ['repos', self.github_repo_user, self.github_repo, 'compare',
'{base}...{head}'.format(base=base, head=head)], '{base}...{head}'.format(base=base, head=head)],
params={'per_page': per_page}, params={'per_page': per_page},

View file

@ -58,10 +58,10 @@ class NTMRotatingLogHandler(object):
handler.flush() handler.flush()
handler.close() handler.close()
def initLogging(self, consoleLogging=True): def init_logging(self, console_logging=True):
if consoleLogging: if console_logging:
self.console_logging = consoleLogging self.console_logging = console_logging
old_handler = None old_handler = None
@ -180,7 +180,7 @@ class NTMRotatingLogHandler(object):
pp_logger.addHandler(new_file_handler) pp_logger.addHandler(new_file_handler)
db_logger.addHandler(new_file_handler) db_logger.addHandler(new_file_handler)
def log(self, toLog, logLevel=MESSAGE, section='MAIN'): def log(self, to_log, log_level=MESSAGE, section='MAIN'):
with self.log_lock: with self.log_lock:
@ -193,7 +193,7 @@ class NTMRotatingLogHandler(object):
self.writes_since_check += 1 self.writes_since_check += 1
try: try:
message = u"{0}: {1}".format(section.upper(), toLog) message = u"{0}: {1}".format(section.upper(), to_log)
except UnicodeError: except UnicodeError:
message = u"{0}: Message contains non-utf-8 string".format(section.upper()) message = u"{0}: Message contains non-utf-8 string".format(section.upper())
@ -206,22 +206,22 @@ class NTMRotatingLogHandler(object):
setattr(db_logger, 'db', lambda *args: db_logger.log(DB, *args)) setattr(db_logger, 'db', lambda *args: db_logger.log(DB, *args))
try: try:
if logLevel == DEBUG: if log_level == DEBUG:
if core.LOG_DEBUG == 1: if core.LOG_DEBUG == 1:
ntm_logger.debug(out_line) ntm_logger.debug(out_line)
elif logLevel == MESSAGE: elif log_level == MESSAGE:
ntm_logger.info(out_line) ntm_logger.info(out_line)
elif logLevel == WARNING: elif log_level == WARNING:
ntm_logger.warning(out_line) ntm_logger.warning(out_line)
elif logLevel == ERROR: elif log_level == ERROR:
ntm_logger.error(out_line) ntm_logger.error(out_line)
elif logLevel == POSTPROCESS: elif log_level == POSTPROCESS:
pp_logger.postprocess(out_line) pp_logger.postprocess(out_line)
elif logLevel == DB: elif log_level == DB:
if core.LOG_DB == 1: if core.LOG_DB == 1:
db_logger.db(out_line) db_logger.db(out_line)
else: else:
ntm_logger.info(logLevel, out_line) ntm_logger.info(log_level, out_line)
except ValueError: except ValueError:
pass pass
@ -249,32 +249,32 @@ class DispatchingFormatter(object):
ntm_log_instance = NTMRotatingLogHandler(core.LOG_FILE, NUM_LOGS, LOG_SIZE) ntm_log_instance = NTMRotatingLogHandler(core.LOG_FILE, NUM_LOGS, LOG_SIZE)
def log(toLog, logLevel=MESSAGE, section='MAIN'): def log(to_log, log_level=MESSAGE, section='MAIN'):
ntm_log_instance.log(toLog, logLevel, section) ntm_log_instance.log(to_log, log_level, section)
def info(toLog, section='MAIN'): def info(to_log, section='MAIN'):
log(toLog, MESSAGE, section) log(to_log, MESSAGE, section)
def error(toLog, section='MAIN'): def error(to_log, section='MAIN'):
log(toLog, ERROR, section) log(to_log, ERROR, section)
def warning(toLog, section='MAIN'): def warning(to_log, section='MAIN'):
log(toLog, WARNING, section) log(to_log, WARNING, section)
def debug(toLog, section='MAIN'): def debug(to_log, section='MAIN'):
log(toLog, DEBUG, section) log(to_log, DEBUG, section)
def postprocess(toLog, section='POSTPROCESS'): def postprocess(to_log, section='POSTPROCESS'):
log(toLog, POSTPROCESS, section) log(to_log, POSTPROCESS, section)
def db(toLog, section='DB'): def db(to_log, section='DB'):
log(toLog, DB, section) log(to_log, DB, section)
def log_error_and_exit(error_msg): def log_error_and_exit(error_msg):

View file

@ -7,11 +7,11 @@ import core
from core import logger from core import logger
def autoFork(section, inputCategory): def auto_fork(section, input_category):
# auto-detect correct section # auto-detect correct section
# config settings # config settings
cfg = dict(core.CFG[section][inputCategory]) cfg = dict(core.CFG[section][input_category])
host = cfg.get("host") host = cfg.get("host")
port = cfg.get("port") port = cfg.get("port")
@ -31,26 +31,26 @@ def autoFork(section, inputCategory):
detected = False detected = False
if section == "NzbDrone": if section == "NzbDrone":
logger.info("Attempting to verify {category} fork".format logger.info("Attempting to verify {category} fork".format
(category=inputCategory)) (category=input_category))
url = "{protocol}{host}:{port}{root}/api/rootfolder".format( url = "{protocol}{host}:{port}{root}/api/rootfolder".format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root)
headers = {"X-Api-Key": apikey} headers = {"X-Api-Key": apikey}
try: try:
r = requests.get(url, headers=headers, stream=True, verify=False) r = requests.get(url, headers=headers, stream=True, verify=False)
except requests.ConnectionError: except requests.ConnectionError:
logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, inputCategory)) logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, input_category))
if not r.ok: if not r.ok:
logger.warning("Connection to {section}:{category} failed! " logger.warning("Connection to {section}:{category} failed! "
"Check your configuration".format "Check your configuration".format
(section=section, category=inputCategory)) (section=section, category=input_category))
fork = ['default', {}] fork = ['default', {}]
elif fork == "auto": elif fork == "auto":
params = core.ALL_FORKS params = core.ALL_FORKS
rem_params = [] rem_params = []
logger.info("Attempting to auto-detect {category} fork".format(category=inputCategory)) logger.info("Attempting to auto-detect {category} fork".format(category=input_category))
# define the order to test. Default must be first since the default fork doesn't reject parameters. # define the order to test. Default must be first since the default fork doesn't reject parameters.
# then in order of most unique parameters. # then in order of most unique parameters.
@ -75,17 +75,17 @@ def autoFork(section, inputCategory):
r = s.get(url, auth=(username, password), verify=False) r = s.get(url, auth=(username, password), verify=False)
except requests.ConnectionError: except requests.ConnectionError:
logger.info("Could not connect to {section}:{category} to perform auto-fork detection!".format logger.info("Could not connect to {section}:{category} to perform auto-fork detection!".format
(section=section, category=inputCategory)) (section=section, category=input_category))
r = [] r = []
if r and r.ok: if r and r.ok:
if apikey: if apikey:
optionalParameters = [] optional_parameters = []
try: try:
optionalParameters = r.json()['data']['optionalParameters'].keys() optional_parameters = r.json()['data']['optionalParameters'].keys()
except: except:
optionalParameters = r.json()['data']['data']['optionalParameters'].keys() optional_parameters = r.json()['data']['data']['optionalParameters'].keys()
for param in params: for param in params:
if param not in optionalParameters: if param not in optional_parameters:
rem_params.append(param) rem_params.append(param)
else: else:
for param in params: for param in params:
@ -99,16 +99,16 @@ def autoFork(section, inputCategory):
break break
if detected: if detected:
logger.info("{section}:{category} fork auto-detection successful ...".format logger.info("{section}:{category} fork auto-detection successful ...".format
(section=section, category=inputCategory)) (section=section, category=input_category))
elif rem_params: elif rem_params:
logger.info("{section}:{category} fork auto-detection found custom params {params}".format logger.info("{section}:{category} fork auto-detection found custom params {params}".format
(section=section, category=inputCategory, params=params)) (section=section, category=input_category, params=params))
fork = ['custom', params] fork = ['custom', params]
else: else:
logger.info("{section}:{category} fork auto-detection failed".format logger.info("{section}:{category} fork auto-detection failed".format
(section=section, category=inputCategory)) (section=section, category=input_category))
fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)] fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)]
logger.info("{section}:{category} fork set to {fork}".format logger.info("{section}:{category} fork set to {fork}".format
(section=section, category=inputCategory, fork=fork[0])) (section=section, category=input_category, fork=fork[0]))
return fork[0], fork[1] return fork[0], fork[1]

View file

@ -251,7 +251,7 @@ class ConfigObj(configobj.ConfigObj, Section):
@staticmethod @staticmethod
def addnzbget(): def addnzbget():
# load configs into memory # load configs into memory
CFG_NEW = config() cfg_new = config()
try: try:
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ: if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
@ -274,196 +274,196 @@ class ConfigObj(configobj.ConfigObj, Section):
if key in os.environ: if key in os.environ:
option = 'default_downloadDirectory' option = 'default_downloadDirectory'
value = os.environ[key] value = os.environ[key]
CFG_NEW[section][option] = value cfg_new[section][option] = value
section = "General" section = "General"
envKeys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED'] env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
cfgKeys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed'] cfg_keys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed']
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=envKeys[index]) key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
CFG_NEW[section][option] = value cfg_new[section][option] = value
section = "Network" section = "Network"
envKeys = ['MOUNTPOINTS'] env_keys = ['MOUNTPOINTS']
cfgKeys = ['mount_points'] cfg_keys = ['mount_points']
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=envKeys[index]) key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
CFG_NEW[section][option] = value cfg_new[section][option] = value
section = "CouchPotato" section = "CouchPotato"
envCatKey = 'NZBPO_CPSCATEGORY' env_cat_key = 'NZBPO_CPSCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY'] 'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path', cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
'wait_for', 'watch_dir', 'omdbapikey'] 'wait_for', 'watch_dir', 'omdbapikey']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_CPS{index}'.format(index=envKeys[index]) key = 'NZBPO_CPS{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['Radarr'].sections: if os.environ[env_cat_key] in cfg_new['Radarr'].sections:
CFG_NEW['Radarr'][envCatKey]['enabled'] = 0 cfg_new['Radarr'][env_cat_key]['enabled'] = 0
section = "SickBeard" section = "SickBeard"
envCatKey = 'NZBPO_SBCATEGORY' env_cat_key = 'NZBPO_SBCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD'] 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
cfgKeys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', cfg_keys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork',
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method'] 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_SB{index}'.format(index=envKeys[index]) key = 'NZBPO_SB{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['NzbDrone'].sections: if os.environ[env_cat_key] in cfg_new['NzbDrone'].sections:
CFG_NEW['NzbDrone'][envCatKey]['enabled'] = 0 cfg_new['NzbDrone'][env_cat_key]['enabled'] = 0
section = "HeadPhones" section = "HeadPhones"
envCatKey = 'NZBPO_HPCATEGORY' env_cat_key = 'NZBPO_HPCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED'] env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed'] cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_HP{index}'.format(index=envKeys[index]) key = 'NZBPO_HP{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['Lidarr'].sections: if os.environ[env_cat_key] in cfg_new['Lidarr'].sections:
CFG_NEW['Lidarr'][envCatKey]['enabled'] = 0 cfg_new['Lidarr'][env_cat_key]['enabled'] = 0
section = "Mylar" section = "Mylar"
envCatKey = 'NZBPO_MYCATEGORY' env_cat_key = 'NZBPO_MYCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR', env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
'REMOTE_PATH'] 'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir', cfg_keys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
'remote_path'] 'remote_path']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_MY{index}'.format(index=envKeys[index]) key = 'NZBPO_MY{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "Gamez" section = "Gamez"
envCatKey = 'NZBPO_GZCATEGORY' env_cat_key = 'NZBPO_GZCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH'] env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path'] cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_GZ{index}'.format(index=envKeys[index]) key = 'NZBPO_GZ{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "NzbDrone" section = "NzbDrone"
envCatKey = 'NZBPO_NDCATEGORY' env_cat_key = 'NZBPO_NDCATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE'] 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE']
#new cfgKey added for importMode #new cfgKey added for importMode
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path','importMode'] 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path','importMode']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_ND{index}'.format(index=envKeys[index]) key = 'NZBPO_ND{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['SickBeard'].sections: if os.environ[env_cat_key] in cfg_new['SickBeard'].sections:
CFG_NEW['SickBeard'][envCatKey]['enabled'] = 0 cfg_new['SickBeard'][env_cat_key]['enabled'] = 0
section = "Radarr" section = "Radarr"
envCatKey = 'NZBPO_RACATEGORY' env_cat_key = 'NZBPO_RACATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE'] 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE']
#new cfgKey added for importMode #new cfgKey added for importMode
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey','importMode'] 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey','importMode']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_RA{index}'.format(index=envKeys[index]) key = 'NZBPO_RA{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['CouchPotato'].sections: if os.environ[env_cat_key] in cfg_new['CouchPotato'].sections:
CFG_NEW['CouchPotato'][envCatKey]['enabled'] = 0 cfg_new['CouchPotato'][env_cat_key]['enabled'] = 0
section = "Lidarr" section = "Lidarr"
envCatKey = 'NZBPO_LICATEGORY' env_cat_key = 'NZBPO_LICATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH'] 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path'] 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_LI{index}'.format(index=envKeys[index]) key = 'NZBPO_LI{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['HeadPhones'].sections: if os.environ[env_cat_key] in cfg_new['HeadPhones'].sections:
CFG_NEW['HeadPhones'][envCatKey]['enabled'] = 0 cfg_new['HeadPhones'][env_cat_key]['enabled'] = 0
section = "Extensions" section = "Extensions"
envKeys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS'] env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions'] cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=envKeys[index]) key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
CFG_NEW[section][option] = value cfg_new[section][option] = value
section = "Posix" section = "Posix"
envKeys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA'] env_keys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata'] cfg_keys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=envKeys[index]) key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
CFG_NEW[section][option] = value cfg_new[section][option] = value
section = "Transcoder" section = "Transcoder"
envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH', env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES', 'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR', 'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW', 'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
@ -473,7 +473,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE', 'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS', 'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS',
'OUTPUTAUDIOOTHERCHANNELS','OUTPUTVIDEORESOLUTION'] 'OUTPUTAUDIOOTHERCHANNELS','OUTPUTVIDEORESOLUTION']
cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath', cfg_keys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages', 'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir', 'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir',
'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow', 'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
@ -483,51 +483,51 @@ class ConfigObj(configobj.ConfigObj, Section):
'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate', 'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels', 'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
'outputAudioOtherChannels', 'outputVideoResolution'] 'outputAudioOtherChannels', 'outputVideoResolution']
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=envKeys[index]) key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
CFG_NEW[section][option] = value cfg_new[section][option] = value
section = "WakeOnLan" section = "WakeOnLan"
envKeys = ['WAKE', 'HOST', 'PORT', 'MAC'] env_keys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfgKeys = ['wake', 'host', 'port', 'mac'] cfg_keys = ['wake', 'host', 'port', 'mac']
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_WOL{index}'.format(index=envKeys[index]) key = 'NZBPO_WOL{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
CFG_NEW[section][option] = value cfg_new[section][option] = value
section = "UserScript" section = "UserScript"
envCatKey = 'NZBPO_USCATEGORY' env_cat_key = 'NZBPO_USCATEGORY'
envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE', env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH'] 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce', cfg_keys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path'] 'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
if envCatKey in os.environ: if env_cat_key in os.environ:
for index in range(len(envKeys)): for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=envKeys[index]) key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ: if key in os.environ:
option = cfgKeys[index] option = cfg_keys[index]
value = os.environ[key] value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections: if os.environ[env_cat_key] not in cfg_new[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {} cfg_new[section][os.environ[env_cat_key]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
except Exception as error: except Exception as error:
logger.debug("Error {msg} when applying NZBGet config".format(msg=error)) logger.debug("Error {msg} when applying NZBGet config".format(msg=error))
try: try:
# write our new config to autoProcessMedia.cfg # write our new config to autoProcessMedia.cfg
CFG_NEW.filename = core.CONFIG_FILE cfg_new.filename = core.CONFIG_FILE
CFG_NEW.write() cfg_new.write()
except Exception as error: except Exception as error:
logger.debug("Error {msg} when writing changes to .cfg".format(msg=error)) logger.debug("Error {msg} when writing changes to .cfg".format(msg=error))
return CFG_NEW return cfg_new
configobj.Section = Section configobj.Section = Section

View file

@ -12,7 +12,7 @@ import core
from core import logger from core import logger
def dbFilename(filename="nzbtomedia.db", suffix=None): def db_filename(filename="nzbtomedia.db", suffix=None):
""" """
@param filename: The sqlite database filename to use. If not specified, @param filename: The sqlite database filename to use. If not specified,
will be made to be nzbtomedia.db will be made to be nzbtomedia.db
@ -29,13 +29,13 @@ class DBConnection(object):
def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None): def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None):
self.filename = filename self.filename = filename
self.connection = sqlite3.connect(dbFilename(filename), 20) self.connection = sqlite3.connect(db_filename(filename), 20)
if row_type == "dict": if row_type == "dict":
self.connection.row_factory = self._dict_factory self.connection.row_factory = self._dict_factory
else: else:
self.connection.row_factory = sqlite3.Row self.connection.row_factory = sqlite3.Row
def checkDBVersion(self): def check_db_version(self):
result = None result = None
try: try:
result = self.select("SELECT db_version FROM db_version") result = self.select("SELECT db_version FROM db_version")
@ -52,7 +52,7 @@ class DBConnection(object):
if query is None: if query is None:
return return
sqlResult = None sql_result = None
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
@ -61,13 +61,13 @@ class DBConnection(object):
logger.log("{name}: {query}".format(name=self.filename, query=query), logger.DB) logger.log("{name}: {query}".format(name=self.filename, query=query), logger.DB)
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute(query) cursor.execute(query)
sqlResult = cursor.fetchone()[0] sql_result = cursor.fetchone()[0]
else: else:
logger.log("{name}: {query} with args {args}".format logger.log("{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB) (name=self.filename, query=query, args=args), logger.DB)
cursor = self.connection.cursor() cursor = self.connection.cursor()
cursor.execute(query, args) cursor.execute(query, args)
sqlResult = cursor.fetchone()[0] sql_result = cursor.fetchone()[0]
# get out of the connection attempt loop since we were successful # get out of the connection attempt loop since we were successful
break break
@ -83,31 +83,31 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise raise
return sqlResult return sql_result
def mass_action(self, querylist, logTransaction=False): def mass_action(self, querylist, log_transaction=False):
if querylist is None: if querylist is None:
return return
sqlResult = [] sql_result = []
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
try: try:
for qu in querylist: for qu in querylist:
if len(qu) == 1: if len(qu) == 1:
if logTransaction: if log_transaction:
logger.log(qu[0], logger.DEBUG) logger.log(qu[0], logger.DEBUG)
sqlResult.append(self.connection.execute(qu[0])) sql_result.append(self.connection.execute(qu[0]))
elif len(qu) > 1: elif len(qu) > 1:
if logTransaction: if log_transaction:
logger.log(u"{query} with args {args}".format(query=qu[0], args=qu[1]), logger.DEBUG) logger.log(u"{query} with args {args}".format(query=qu[0], args=qu[1]), logger.DEBUG)
sqlResult.append(self.connection.execute(qu[0], qu[1])) sql_result.append(self.connection.execute(qu[0], qu[1]))
self.connection.commit() self.connection.commit()
logger.log(u"Transaction with {x} query's executed".format(x=len(querylist)), logger.DEBUG) logger.log(u"Transaction with {x} query's executed".format(x=len(querylist)), logger.DEBUG)
return sqlResult return sql_result
except sqlite3.OperationalError as error: except sqlite3.OperationalError as error:
sqlResult = [] sql_result = []
if self.connection: if self.connection:
self.connection.rollback() self.connection.rollback()
if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]: if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]:
@ -123,24 +123,24 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise raise
return sqlResult return sql_result
def action(self, query, args=None): def action(self, query, args=None):
if query is None: if query is None:
return return
sqlResult = None sql_result = None
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
try: try:
if args is None: if args is None:
logger.log(u"{name}: {query}".format(name=self.filename, query=query), logger.DB) logger.log(u"{name}: {query}".format(name=self.filename, query=query), logger.DB)
sqlResult = self.connection.execute(query) sql_result = self.connection.execute(query)
else: else:
logger.log(u"{name}: {query} with args {args}".format logger.log(u"{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB) (name=self.filename, query=query, args=args), logger.DB)
sqlResult = self.connection.execute(query, args) sql_result = self.connection.execute(query, args)
self.connection.commit() self.connection.commit()
# get out of the connection attempt loop since we were successful # get out of the connection attempt loop since we were successful
break break
@ -156,49 +156,49 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR) logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise raise
return sqlResult return sql_result
def select(self, query, args=None): def select(self, query, args=None):
sqlResults = self.action(query, args).fetchall() sql_results = self.action(query, args).fetchall()
if sqlResults is None: if sql_results is None:
return [] return []
return sqlResults return sql_results
def upsert(self, tableName, valueDict, keyDict): def upsert(self, table_name, value_dict, key_dict):
changesBefore = self.connection.total_changes changes_before = self.connection.total_changes
genParams = lambda myDict: ["{key} = ?".format(key=k) for k in myDict.keys()] gen_params = lambda my_dict: ["{key} = ?".format(key=k) for k in my_dict.keys()]
items = list(valueDict.values()) + list(keyDict.values()) items = list(value_dict.values()) + list(key_dict.values())
self.action( self.action(
"UPDATE {table} " "UPDATE {table} "
"SET {params} " "SET {params} "
"WHERE {conditions}".format( "WHERE {conditions}".format(
table=tableName, table=table_name,
params=", ".join(genParams(valueDict)), params=", ".join(gen_params(value_dict)),
conditions=" AND ".join(genParams(keyDict)) conditions=" AND ".join(gen_params(key_dict))
), ),
items items
) )
if self.connection.total_changes == changesBefore: if self.connection.total_changes == changes_before:
self.action( self.action(
"INSERT OR IGNORE INTO {table} ({columns}) " "INSERT OR IGNORE INTO {table} ({columns}) "
"VALUES ({values})".format( "VALUES ({values})".format(
table=tableName, table=table_name,
columns=", ".join(map(text_type, valueDict.keys())), columns=", ".join(map(text_type, value_dict.keys())),
values=", ".join(["?"] * len(valueDict.values())) values=", ".join(["?"] * len(value_dict.values()))
), ),
list(valueDict.values()) list(value_dict.values())
) )
def tableInfo(self, tableName): def table_info(self, table_name):
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually # FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
cursor = self.connection.execute("PRAGMA table_info({0})".format(tableName)) cursor = self.connection.execute("PRAGMA table_info({0})".format(table_name))
columns = {} columns = {}
for column in cursor: for column in cursor:
columns[column['name']] = {'type': column['type']} columns[column['name']] = {'type': column['type']}
@ -212,7 +212,7 @@ class DBConnection(object):
return d return d
def sanityCheckDatabase(connection, sanity_check): def sanity_check_database(connection, sanity_check):
sanity_check(connection).check() sanity_check(connection).check()
@ -228,36 +228,36 @@ class DBSanityCheck(object):
# = Upgrade API = # = Upgrade API =
# =============== # ===============
def upgradeDatabase(connection, schema): def upgrade_database(connection, schema):
logger.log(u"Checking database structure...", logger.MESSAGE) logger.log(u"Checking database structure...", logger.MESSAGE)
_processUpgrade(connection, schema) _process_upgrade(connection, schema)
def prettyName(class_name): def pretty_name(class_name):
return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)]) return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
def _processUpgrade(connection, upgradeClass): def _process_upgrade(connection, upgrade_class):
instance = upgradeClass(connection) instance = upgrade_class(connection)
logger.log(u"Checking {name} database upgrade".format logger.log(u"Checking {name} database upgrade".format
(name=prettyName(upgradeClass.__name__)), logger.DEBUG) (name=pretty_name(upgrade_class.__name__)), logger.DEBUG)
if not instance.test(): if not instance.test():
logger.log(u"Database upgrade required: {name}".format logger.log(u"Database upgrade required: {name}".format
(name=prettyName(upgradeClass.__name__)), logger.MESSAGE) (name=pretty_name(upgrade_class.__name__)), logger.MESSAGE)
try: try:
instance.execute() instance.execute()
except sqlite3.DatabaseError as error: except sqlite3.DatabaseError as error:
print(u"Error in {name}: {msg}".format print(u"Error in {name}: {msg}".format
(name=upgradeClass.__name__, msg=error)) (name=upgrade_class.__name__, msg=error))
raise raise
logger.log(u"{name} upgrade completed".format logger.log(u"{name} upgrade completed".format
(name=upgradeClass.__name__), logger.DEBUG) (name=upgrade_class.__name__), logger.DEBUG)
else: else:
logger.log(u"{name} upgrade not required".format logger.log(u"{name} upgrade not required".format
(name=upgradeClass.__name__), logger.DEBUG) (name=upgrade_class.__name__), logger.DEBUG)
for upgradeSubClass in upgradeClass.__subclasses__(): for upgradeSubClass in upgrade_class.__subclasses__():
_processUpgrade(connection, upgradeSubClass) _process_upgrade(connection, upgradeSubClass)
# Base migration class. All future DB changes should be subclassed from this class # Base migration class. All future DB changes should be subclassed from this class
@ -265,24 +265,24 @@ class SchemaUpgrade(object):
def __init__(self, connection): def __init__(self, connection):
self.connection = connection self.connection = connection
def hasTable(self, tableName): def has_table(self, table_name):
return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName,)).fetchall()) > 0 return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (table_name,)).fetchall()) > 0
def hasColumn(self, tableName, column): def has_column(self, table_name, column):
return column in self.connection.tableInfo(tableName) return column in self.connection.table_info(table_name)
def addColumn(self, table, column, type="NUMERIC", default=0): def add_column(self, table, column, type="NUMERIC", default=0):
self.connection.action("ALTER TABLE {0} ADD {1} {2}".format(table, column, type)) self.connection.action("ALTER TABLE {0} ADD {1} {2}".format(table, column, type))
self.connection.action("UPDATE {0} SET {1} = ?".format(table, column), (default,)) self.connection.action("UPDATE {0} SET {1} = ?".format(table, column), (default,))
def checkDBVersion(self): def check_db_version(self):
result = self.connection.select("SELECT db_version FROM db_version") result = self.connection.select("SELECT db_version FROM db_version")
if result: if result:
return int(result[-1]["db_version"]) return int(result[-1]["db_version"])
else: else:
return 0 return 0
def incDBVersion(self): def inc_db_version(self):
new_version = self.checkDBVersion() + 1 new_version = self.check_db_version() + 1
self.connection.action("UPDATE db_version SET db_version = ?", [new_version]) self.connection.action("UPDATE db_version SET db_version = ?", [new_version])
return new_version return new_version

View file

@ -8,7 +8,7 @@ import subprocess
import core import core
from core import logger from core import logger
from core.nzbToMediaUtil import listMediaFiles from core.nzbToMediaUtil import list_media_files
reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.", reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.",
r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.", r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.",
@ -32,10 +32,10 @@ char_replace = [[r"(\w)1\.(\w)", r"\1i\2"]
def process_all_exceptions(name, dirname): def process_all_exceptions(name, dirname):
par2(dirname) par2(dirname)
rename_script(dirname) rename_script(dirname)
for filename in listMediaFiles(dirname): for filename in list_media_files(dirname):
newfilename = None newfilename = None
parentDir = os.path.dirname(filename) parent_dir = os.path.dirname(filename)
head, fileExtension = os.path.splitext(os.path.basename(filename)) head, file_extension = os.path.splitext(os.path.basename(filename))
if reverse_pattern.search(head) is not None: if reverse_pattern.search(head) is not None:
exception = reverse_filename exception = reverse_filename
elif garbage_name.search(head) is not None: elif garbage_name.search(head) is not None:
@ -44,7 +44,7 @@ def process_all_exceptions(name, dirname):
exception = None exception = None
newfilename = filename newfilename = filename
if not newfilename: if not newfilename:
newfilename = exception(filename, parentDir, name) newfilename = exception(filename, parent_dir, name)
if core.GROUPS: if core.GROUPS:
newfilename = strip_groups(newfilename) newfilename = strip_groups(newfilename)
if newfilename != filename: if newfilename != filename:
@ -55,29 +55,29 @@ def strip_groups(filename):
if not core.GROUPS: if not core.GROUPS:
return filename return filename
dirname, file = os.path.split(filename) dirname, file = os.path.split(filename)
head, fileExtension = os.path.splitext(file) head, file_extension = os.path.splitext(file)
newname = head.replace(' ', '.') newname = head.replace(' ', '.')
for group in core.GROUPS: for group in core.GROUPS:
newname = newname.replace(group, '') newname = newname.replace(group, '')
newname = newname.replace('[]', '') newname = newname.replace('[]', '')
newfile = newname + fileExtension newfile = newname + file_extension
newfilePath = os.path.join(dirname, newfile) newfile_path = os.path.join(dirname, newfile)
return newfilePath return newfile_path
def rename_file(filename, newfilePath): def rename_file(filename, newfile_path):
if os.path.isfile(newfilePath): if os.path.isfile(newfile_path):
newfilePath = os.path.splitext(newfilePath)[0] + ".NTM" + os.path.splitext(newfilePath)[1] newfile_path = os.path.splitext(newfile_path)[0] + ".NTM" + os.path.splitext(newfile_path)[1]
logger.debug("Replacing file name {old} with download name {new}".format logger.debug("Replacing file name {old} with download name {new}".format
(old=filename, new=newfilePath), "EXCEPTION") (old=filename, new=newfile_path), "EXCEPTION")
try: try:
os.rename(filename, newfilePath) os.rename(filename, newfile_path)
except Exception as error: except Exception as error:
logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION") logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION")
def replace_filename(filename, dirname, name): def replace_filename(filename, dirname, name):
head, fileExtension = os.path.splitext(os.path.basename(filename)) head, file_extension = os.path.splitext(os.path.basename(filename))
if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None: if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
newname = os.path.basename(dirname).replace(' ', '.') newname = os.path.basename(dirname).replace(' ', '.')
logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION") logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION")
@ -88,13 +88,13 @@ def replace_filename(filename, dirname, name):
else: else:
logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION") logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION")
newname = name newname = name
newfile = newname + fileExtension newfile = newname + file_extension
newfilePath = os.path.join(dirname, newfile) newfile_path = os.path.join(dirname, newfile)
return newfilePath return newfile_path
def reverse_filename(filename, dirname, name): def reverse_filename(filename, dirname, name):
head, fileExtension = os.path.splitext(os.path.basename(filename)) head, file_extension = os.path.splitext(os.path.basename(filename))
na_parts = season_pattern.search(head) na_parts = season_pattern.search(head)
if na_parts is not None: if na_parts is not None:
word_p = word_pattern.findall(na_parts.group(2)) word_p = word_pattern.findall(na_parts.group(2))
@ -114,9 +114,9 @@ def reverse_filename(filename, dirname, name):
newname = newname.replace(' ', '.') newname = newname.replace(' ', '.')
logger.debug("Reversing filename {old} to {new}".format logger.debug("Reversing filename {old} to {new}".format
(old=head, new=newname), "EXCEPTION") (old=head, new=newname), "EXCEPTION")
newfile = newname + fileExtension newfile = newname + file_extension
newfilePath = os.path.join(dirname, newfile) newfile_path = os.path.join(dirname, newfile)
return newfilePath return newfile_path
def rename_script(dirname): def rename_script(dirname):

View file

@ -5,11 +5,11 @@ from subprocess import Popen
import core import core
from core import logger from core import logger
from core.nzbToMediaUtil import import_subs, listMediaFiles, rmDir from core.nzbToMediaUtil import import_subs, list_media_files, remove_dir
from core.transcoder import transcoder from core.transcoder import transcoder
def external_script(outputDestination, torrentName, torrentLabel, settings): def external_script(output_destination, torrent_name, torrent_label, settings):
final_result = 0 # start at 0. final_result = 0 # start at 0.
num_files = 0 num_files = 0
try: try:
@ -40,20 +40,20 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
core.USER_SCRIPT_RUNONCE = int(settings.get("user_script_runOnce", 1)) core.USER_SCRIPT_RUNONCE = int(settings.get("user_script_runOnce", 1))
if core.CHECK_MEDIA: if core.CHECK_MEDIA:
for video in listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False): for video in list_media_files(output_destination, media=True, audio=False, meta=False, archives=False):
if transcoder.isVideoGood(video, 0): if transcoder.is_video_good(video, 0):
import_subs(video) import_subs(video)
else: else:
logger.info("Corrupt video file found {0}. Deleting.".format(video), "USERSCRIPT") logger.info("Corrupt video file found {0}. Deleting.".format(video), "USERSCRIPT")
os.unlink(video) os.unlink(video)
for dirpath, dirnames, filenames in os.walk(outputDestination): for dirpath, dirnames, filenames in os.walk(output_destination):
for file in filenames: for file in filenames:
filePath = core.os.path.join(dirpath, file) file_path = core.os.path.join(dirpath, file)
fileName, fileExtension = os.path.splitext(file) file_name, file_extension = os.path.splitext(file)
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS: if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS:
num_files += 1 num_files += 1
if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files. if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files.
continue continue
@ -63,17 +63,17 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
command.append('{0}'.format(file)) command.append('{0}'.format(file))
continue continue
elif param == "FP": elif param == "FP":
command.append('{0}'.format(filePath)) command.append('{0}'.format(file_path))
continue continue
elif param == "TN": elif param == "TN":
command.append('{0}'.format(torrentName)) command.append('{0}'.format(torrent_name))
continue continue
elif param == "TL": elif param == "TL":
command.append('{0}'.format(torrentLabel)) command.append('{0}'.format(torrent_label))
continue continue
elif param == "DN": elif param == "DN":
if core.USER_SCRIPT_RUNONCE == 1: if core.USER_SCRIPT_RUNONCE == 1:
command.append('{0}'.format(outputDestination)) command.append('{0}'.format(output_destination))
else: else:
command.append('{0}'.format(dirpath)) command.append('{0}'.format(dirpath))
continue continue
@ -83,7 +83,7 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
cmd = "" cmd = ""
for item in command: for item in command:
cmd = "{cmd} {item}".format(cmd=cmd, item=item) cmd = "{cmd} {item}".format(cmd=cmd, item=item)
logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=filePath), "USERSCRIPT") logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=file_path), "USERSCRIPT")
try: try:
p = Popen(command) p = Popen(command)
res = p.wait() res = p.wait()
@ -102,16 +102,16 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
final_result += result final_result += result
num_files_new = 0 num_files_new = 0
for dirpath, dirnames, filenames in os.walk(outputDestination): for dirpath, dirnames, filenames in os.walk(output_destination):
for file in filenames: for file in filenames:
fileName, fileExtension = os.path.splitext(file) file_name, file_extension = os.path.splitext(file)
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL": if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL":
num_files_new += 1 num_files_new += 1
if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0: if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0:
logger.info("All files have been processed. Cleaning outputDirectory {0}".format(outputDestination)) logger.info("All files have been processed. Cleaning outputDirectory {0}".format(output_destination))
rmDir(outputDestination) remove_dir(output_destination)
elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0: elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0:
logger.info("{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.".format( logger.info("{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.".format(
num_files, num_files_new)) num_files, num_files_new))

File diff suppressed because it is too large Load diff

View file

@ -13,24 +13,24 @@ from six import iteritems, text_type, string_types
import core import core
from core import logger from core import logger
from core.nzbToMediaUtil import makeDir from core.nzbToMediaUtil import make_dir
def isVideoGood(videofile, status): def is_video_good(videofile, status):
fileNameExt = os.path.basename(videofile) file_name_ext = os.path.basename(videofile)
fileName, fileExt = os.path.splitext(fileNameExt) file_name, file_ext = os.path.splitext(file_name_ext)
disable = False disable = False
if fileExt not in core.MEDIACONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or fileExt in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED): if file_ext not in core.MEDIACONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or file_ext in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED):
disable = True disable = True
else: else:
test_details, res = getVideoDetails(core.TEST_FILE) test_details, res = get_video_details(core.TEST_FILE)
if res != 0 or test_details.get("error"): if res != 0 or test_details.get("error"):
disable = True disable = True
logger.info("DISABLED: ffprobe failed to analyse test file. Stopping corruption check.", 'TRANSCODER') logger.info("DISABLED: ffprobe failed to analyse test file. Stopping corruption check.", 'TRANSCODER')
if test_details.get("streams"): if test_details.get("streams"):
vidStreams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"] vid_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"]
audStreams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"] aud_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"]
if not (len(vidStreams) > 0 and len(audStreams) > 0): if not (len(vid_streams) > 0 and len(aud_streams) > 0):
disable = True disable = True
logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.", logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.",
'TRANSCODER') 'TRANSCODER')
@ -40,25 +40,25 @@ def isVideoGood(videofile, status):
else: else:
return True return True
logger.info('Checking [{0}] for corruption, please stand by ...'.format(fileNameExt), 'TRANSCODER') logger.info('Checking [{0}] for corruption, please stand by ...'.format(file_name_ext), 'TRANSCODER')
video_details, result = getVideoDetails(videofile) video_details, result = get_video_details(videofile)
if result != 0: if result != 0:
logger.error("FAILED: [{0}] is corrupted!".format(fileNameExt), 'TRANSCODER') logger.error("FAILED: [{0}] is corrupted!".format(file_name_ext), 'TRANSCODER')
return False return False
if video_details.get("error"): if video_details.get("error"):
logger.info("FAILED: [{0}] returned error [{1}].".format(fileNameExt, video_details.get("error")), 'TRANSCODER') logger.info("FAILED: [{0}] returned error [{1}].".format(file_name_ext, video_details.get("error")), 'TRANSCODER')
return False return False
if video_details.get("streams"): if video_details.get("streams"):
videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"] video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
if len(videoStreams) > 0 and len(audioStreams) > 0: if len(video_streams) > 0 and len(audio_streams) > 0:
logger.info("SUCCESS: [{0}] has no corruption.".format(fileNameExt), 'TRANSCODER') logger.info("SUCCESS: [{0}] has no corruption.".format(file_name_ext), 'TRANSCODER')
return True return True
else: else:
logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. " logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. "
"Assume corruption.".format "Assume corruption.".format
(fileNameExt, len(videoStreams), len(audioStreams)), 'TRANSCODER') (file_name_ext, len(video_streams), len(audio_streams)), 'TRANSCODER')
return False return False
@ -72,7 +72,7 @@ def zip_out(file, img, bitbucket):
return procin return procin
def getVideoDetails(videofile, img=None, bitbucket=None): def get_video_details(videofile, img=None, bitbucket=None):
video_details = {} video_details = {}
result = 1 result = 1
file = videofile file = videofile
@ -116,31 +116,31 @@ def getVideoDetails(videofile, img=None, bitbucket=None):
return video_details, result return video_details, result
def buildCommands(file, newDir, movieName, bitbucket): def build_commands(file, new_dir, movie_name, bitbucket):
if isinstance(file, string_types): if isinstance(file, string_types):
inputFile = file input_file = file
if 'concat:' in file: if 'concat:' in file:
file = file.split('|')[0].replace('concat:', '') file = file.split('|')[0].replace('concat:', '')
video_details, result = getVideoDetails(file) video_details, result = get_video_details(file)
dir, name = os.path.split(file) dir, name = os.path.split(file)
name, ext = os.path.splitext(name) name, ext = os.path.splitext(name)
check = re.match("VTS_([0-9][0-9])_[0-9]+", name) check = re.match("VTS_([0-9][0-9])_[0-9]+", name)
if check and core.CONCAT: if check and core.CONCAT:
name = movieName name = movie_name
elif check: elif check:
name = ('{0}.cd{1}'.format(movieName, check.groups()[0])) name = ('{0}.cd{1}'.format(movie_name, check.groups()[0]))
elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name): elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name):
name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name) name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name)
if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself. if ext == core.VEXTENSION and new_dir == dir: # we need to change the name to prevent overwriting itself.
core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext' core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext'
else: else:
img, data = next(iteritems(file)) img, data = next(iteritems(file))
name = data['name'] name = data['name']
video_details, result = getVideoDetails(data['files'][0], img, bitbucket) video_details, result = get_video_details(data['files'][0], img, bitbucket)
inputFile = '-' input_file = '-'
file = '-' file = '-'
newfilePath = os.path.normpath(os.path.join(newDir, name) + core.VEXTENSION) newfile_path = os.path.normpath(os.path.join(new_dir, name) + core.VEXTENSION)
map_cmd = [] map_cmd = []
video_cmd = [] video_cmd = []
@ -152,9 +152,9 @@ def buildCommands(file, newDir, movieName, bitbucket):
if not video_details or not video_details.get( if not video_details or not video_details.get(
"streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding. "streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
videoStreams = [] video_streams = []
audioStreams = [] audio_streams = []
subStreams = [] sub_streams = []
map_cmd.extend(['-map', '0']) map_cmd.extend(['-map', '0'])
if core.VCODEC: if core.VCODEC:
@ -201,15 +201,15 @@ def buildCommands(file, newDir, movieName, bitbucket):
other_cmd.extend(['-movflags', '+faststart']) other_cmd.extend(['-movflags', '+faststart'])
else: else:
videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"] video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"] audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"] sub_streams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"]
if core.VEXTENSION not in ['.mkv', '.mpegts']: if core.VEXTENSION not in ['.mkv', '.mpegts']:
subStreams = [item for item in video_details["streams"] if sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
"codec_name"] != "pgssub"] "codec_name"] != "pgssub"]
for video in videoStreams: for video in video_streams:
codec = video["codec_name"] codec = video["codec_name"]
fr = video.get("avg_frame_rate", 0) fr = video.get("avg_frame_rate", 0)
width = video.get("width", 0) width = video.get("width", 0)
@ -257,24 +257,24 @@ def buildCommands(file, newDir, movieName, bitbucket):
used_audio = 0 used_audio = 0
a_mapped = [] a_mapped = []
commentary = [] commentary = []
if audioStreams: if audio_streams:
for i, val in reversed(list(enumerate(audioStreams))): for i, val in reversed(list(enumerate(audio_streams))):
try: try:
if "Commentary" in val.get("tags").get("title"): # Split out commentry tracks. if "Commentary" in val.get("tags").get("title"): # Split out commentry tracks.
commentary.append(val) commentary.append(val)
del audioStreams[i] del audio_streams[i]
except: except:
continue continue
try: try:
audio1 = [item for item in audioStreams if item["tags"]["language"] == core.ALANGUAGE] audio1 = [item for item in audio_streams if item["tags"]["language"] == core.ALANGUAGE]
except: # no language tags. Assume only 1 language. except: # no language tags. Assume only 1 language.
audio1 = audioStreams audio1 = audio_streams
try: try:
audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW] audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW]
except: except:
audio2 = [] audio2 = []
try: try:
audio3 = [item for item in audioStreams if item["tags"]["language"] != core.ALANGUAGE] audio3 = [item for item in audio_streams if item["tags"]["language"] != core.ALANGUAGE]
except: except:
audio3 = [] audio3 = []
try: try:
@ -384,8 +384,8 @@ def buildCommands(file, newDir, movieName, bitbucket):
audio_cmd.extend(audio_cmd2) audio_cmd.extend(audio_cmd2)
if core.AINCLUDE and core.ACODEC3: if core.AINCLUDE and core.ACODEC3:
audioStreams.extend(commentary) #add commentry tracks back here. audio_streams.extend(commentary) #add commentry tracks back here.
for audio in audioStreams: for audio in audio_streams:
if audio["index"] in a_mapped: if audio["index"] in a_mapped:
continue continue
used_audio += 1 used_audio += 1
@ -422,7 +422,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
n = 0 n = 0
for lan in core.SLANGUAGES: for lan in core.SLANGUAGES:
try: try:
subs1 = [item for item in subStreams if item["tags"]["language"] == lan] subs1 = [item for item in sub_streams if item["tags"]["language"] == lan]
except: except:
subs1 = [] subs1 = []
if core.BURN and not subs1 and not burnt and os.path.isfile(file): if core.BURN and not subs1 and not burnt and os.path.isfile(file):
@ -431,13 +431,13 @@ def buildCommands(file, newDir, movieName, bitbucket):
video_cmd.extend(['-vf', 'subtitles={subs}'.format(subs=subfile)]) video_cmd.extend(['-vf', 'subtitles={subs}'.format(subs=subfile)])
burnt = 1 burnt = 1
for sub in subs1: for sub in subs1:
if core.BURN and not burnt and os.path.isfile(inputFile): if core.BURN and not burnt and os.path.isfile(input_file):
subloc = 0 subloc = 0
for index in range(len(subStreams)): for index in range(len(sub_streams)):
if subStreams[index]["index"] == sub["index"]: if sub_streams[index]["index"] == sub["index"]:
subloc = index subloc = index
break break
video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=inputFile, loc=subloc)]) video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=input_file, loc=subloc)])
burnt = 1 burnt = 1
if not core.ALLOWSUBS: if not core.ALLOWSUBS:
break break
@ -447,7 +447,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
s_mapped.extend([sub["index"]]) s_mapped.extend([sub["index"]])
if core.SINCLUDE: if core.SINCLUDE:
for sub in subStreams: for sub in sub_streams:
if not core.ALLOWSUBS: if not core.ALLOWSUBS:
break break
if sub["index"] in s_mapped: if sub["index"] in s_mapped:
@ -467,11 +467,11 @@ def buildCommands(file, newDir, movieName, bitbucket):
if core.GENERALOPTS: if core.GENERALOPTS:
command.extend(core.GENERALOPTS) command.extend(core.GENERALOPTS)
command.extend(['-i', inputFile]) command.extend(['-i', input_file])
if core.SEMBED and os.path.isfile(file): if core.SEMBED and os.path.isfile(file):
for subfile in get_subs(file): for subfile in get_subs(file):
sub_details, result = getVideoDetails(subfile) sub_details, result = get_video_details(subfile)
if not sub_details or not sub_details.get("streams"): if not sub_details or not sub_details.get("streams"):
continue continue
if core.SCODEC == "mov_text": if core.SCODEC == "mov_text":
@ -509,7 +509,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
command.extend(sub_cmd) command.extend(sub_cmd)
command.extend(meta_cmd) command.extend(meta_cmd)
command.extend(other_cmd) command.extend(other_cmd)
command.append(newfilePath) command.append(newfile_path)
if platform.system() != 'Windows': if platform.system() != 'Windows':
command = core.NICENESS + command command = core.NICENESS + command
return command return command
@ -517,52 +517,52 @@ def buildCommands(file, newDir, movieName, bitbucket):
def get_subs(file): def get_subs(file):
filepaths = [] filepaths = []
subExt = ['.srt', '.sub', '.idx'] sub_ext = ['.srt', '.sub', '.idx']
name = os.path.splitext(os.path.split(file)[1])[0] name = os.path.splitext(os.path.split(file)[1])[0]
dir = os.path.split(file)[0] dir = os.path.split(file)[0]
for dirname, dirs, filenames in os.walk(dir): for dirname, dirs, filenames in os.walk(dir):
for filename in filenames: for filename in filenames:
filepaths.extend([os.path.join(dirname, filename)]) filepaths.extend([os.path.join(dirname, filename)])
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in subExt and name in item] subfiles = [item for item in filepaths if os.path.splitext(item)[1] in sub_ext and name in item]
return subfiles return subfiles
def extract_subs(file, newfilePath, bitbucket): def extract_subs(file, newfile_path, bitbucket):
video_details, result = getVideoDetails(file) video_details, result = get_video_details(file)
if not video_details: if not video_details:
return return
if core.SUBSDIR: if core.SUBSDIR:
subdir = core.SUBSDIR subdir = core.SUBSDIR
else: else:
subdir = os.path.split(newfilePath)[0] subdir = os.path.split(newfile_path)[0]
name = os.path.splitext(os.path.split(newfilePath)[1])[0] name = os.path.splitext(os.path.split(newfile_path)[1])[0]
try: try:
subStreams = [item for item in video_details["streams"] if sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[ item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[
"codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"] "codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"]
except: except:
subStreams = [item for item in video_details["streams"] if sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[ item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
"codec_name"] != "pgssub"] "codec_name"] != "pgssub"]
num = len(subStreams) num = len(sub_streams)
for n in range(num): for n in range(num):
sub = subStreams[n] sub = sub_streams[n]
idx = sub["index"] idx = sub["index"]
lan = sub.get("tags", {}).get("language", "unk") lan = sub.get("tags", {}).get("language", "unk")
if num == 1: if num == 1:
outputFile = os.path.join(subdir, "{0}.srt".format(name)) output_file = os.path.join(subdir, "{0}.srt".format(name))
if os.path.isfile(outputFile): if os.path.isfile(output_file):
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, n)) output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, n))
else: else:
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, lan)) output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, lan))
if os.path.isfile(outputFile): if os.path.isfile(output_file):
outputFile = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n)) output_file = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n))
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an', command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an',
'-codec:{index}'.format(index=idx), 'srt', outputFile] '-codec:{index}'.format(index=idx), 'srt', output_file]
if platform.system() != 'Windows': if platform.system() != 'Windows':
command = core.NICENESS + command command = core.NICENESS + command
@ -578,7 +578,7 @@ def extract_subs(file, newfilePath, bitbucket):
if result == 0: if result == 0:
try: try:
shutil.copymode(file, outputFile) shutil.copymode(file, output_file)
except: except:
pass pass
logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file)) logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file))
@ -586,77 +586,77 @@ def extract_subs(file, newfilePath, bitbucket):
logger.error("Extracting subtitles has failed") logger.error("Extracting subtitles has failed")
def processList(List, newDir, bitbucket): def process_list(it, new_dir, bitbucket):
remList = [] rem_list = []
newList = [] new_list = []
combine = [] combine = []
vtsPath = None vts_path = None
success = True success = True
for item in List: for item in it:
ext = os.path.splitext(item)[1].lower() ext = os.path.splitext(item)[1].lower()
if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS: if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS:
logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER") logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER")
newList.extend(ripISO(item, newDir, bitbucket)) new_list.extend(rip_iso(item, new_dir, bitbucket))
remList.append(item) rem_list.append(item)
elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS: elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS:
logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER") logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER")
if not vtsPath: if not vts_path:
try: try:
vtsPath = re.match("(.+VIDEO_TS)", item).groups()[0] vts_path = re.match("(.+VIDEO_TS)", item).groups()[0]
except: except:
vtsPath = os.path.split(item)[0] vts_path = os.path.split(item)[0]
remList.append(item) rem_list.append(item)
elif re.match(".+VIDEO_TS.", item) or re.match(".+VTS_[0-9][0-9]_[0-9].", item): elif re.match(".+VIDEO_TS.", item) or re.match(".+VTS_[0-9][0-9]_[0-9].", item):
remList.append(item) rem_list.append(item)
elif core.CONCAT and re.match(".+[cC][dD][0-9].", item): elif core.CONCAT and re.match(".+[cC][dD][0-9].", item):
remList.append(item) rem_list.append(item)
combine.append(item) combine.append(item)
else: else:
continue continue
if vtsPath: if vts_path:
newList.extend(combineVTS(vtsPath)) new_list.extend(combine_vts(vts_path))
if combine: if combine:
newList.extend(combineCD(combine)) new_list.extend(combine_cd(combine))
for file in newList: for file in new_list:
if isinstance(file, string_types) and 'concat:' not in file and not os.path.isfile(file): if isinstance(file, string_types) and 'concat:' not in file and not os.path.isfile(file):
success = False success = False
break break
if success and newList: if success and new_list:
List.extend(newList) it.extend(new_list)
for item in remList: for item in rem_list:
List.remove(item) it.remove(item)
logger.debug("Successfully extracted .vob file {0} from disk image".format(newList[0]), "TRANSCODER") logger.debug("Successfully extracted .vob file {0} from disk image".format(new_list[0]), "TRANSCODER")
elif newList and not success: elif new_list and not success:
newList = [] new_list = []
remList = [] rem_list = []
logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER") logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER")
return List, remList, newList, success return it, rem_list, new_list, success
def ripISO(item, newDir, bitbucket): def rip_iso(item, new_dir, bitbucket):
newFiles = [] new_files = []
failure_dir = 'failure' failure_dir = 'failure'
# Mount the ISO in your OS and call combineVTS. # Mount the ISO in your OS and call combineVTS.
if not core.SEVENZIP: if not core.SEVENZIP:
logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER") logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir] new_files = [failure_dir]
return newFiles return new_files
cmd = [core.SEVENZIP, 'l', item] cmd = [core.SEVENZIP, 'l', item]
try: try:
logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER") logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER")
print_cmd(cmd) print_cmd(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
out, err = proc.communicate() out, err = proc.communicate()
fileList = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in file_list = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in
out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)] out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)]
combined = [] combined = []
for n in range(99): for n in range(99):
concat = [] concat = []
m = 1 m = 1
while True: while True:
vtsName = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m) vts_name = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m)
if vtsName in fileList: if vts_name in file_list:
concat.append(vtsName) concat.append(vts_name)
m += 1 m += 1
else: else:
break break
@ -668,29 +668,29 @@ def ripISO(item, newDir, bitbucket):
name = '{name}.cd{x}'.format( name = '{name}.cd{x}'.format(
name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1 name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1
) )
newFiles.append({item: {'name': name, 'files': concat}}) new_files.append({item: {'name': name, 'files': concat}})
if core.CONCAT: if core.CONCAT:
name = os.path.splitext(os.path.split(item)[1])[0] name = os.path.splitext(os.path.split(item)[1])[0]
newFiles.append({item: {'name': name, 'files': combined}}) new_files.append({item: {'name': name, 'files': combined}})
if not newFiles: if not new_files:
logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER") logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir] new_files = [failure_dir]
except: except:
logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER") logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir] new_files = [failure_dir]
return newFiles return new_files
def combineVTS(vtsPath): def combine_vts(vts_path):
newFiles = [] new_files = []
combined = '' combined = ''
for n in range(99): for n in range(99):
concat = '' concat = ''
m = 1 m = 1
while True: while True:
vtsName = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m) vts_name = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
if os.path.isfile(os.path.join(vtsPath, vtsName)): if os.path.isfile(os.path.join(vts_path, vts_name)):
concat += '{file}|'.format(file=os.path.join(vtsPath, vtsName)) concat += '{file}|'.format(file=os.path.join(vts_path, vts_name))
m += 1 m += 1
else: else:
break break
@ -699,14 +699,14 @@ def combineVTS(vtsPath):
if core.CONCAT: if core.CONCAT:
combined += '{files}|'.format(files=concat) combined += '{files}|'.format(files=concat)
continue continue
newFiles.append('concat:{0}'.format(concat[:-1])) new_files.append('concat:{0}'.format(concat[:-1]))
if core.CONCAT: if core.CONCAT:
newFiles.append('concat:{0}'.format(combined[:-1])) new_files.append('concat:{0}'.format(combined[:-1]))
return newFiles return new_files
def combineCD(combine): def combine_cd(combine):
newFiles = [] new_files = []
for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]): for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]):
concat = '' concat = ''
for n in range(99): for n in range(99):
@ -717,8 +717,8 @@ def combineCD(combine):
else: else:
break break
if concat: if concat:
newFiles.append('concat:{0}'.format(concat[:-1])) new_files.append('concat:{0}'.format(concat[:-1]))
return newFiles return new_files
def print_cmd(command): def print_cmd(command):
@ -728,49 +728,49 @@ def print_cmd(command):
logger.debug("calling command:{0}".format(cmd)) logger.debug("calling command:{0}".format(cmd))
def Transcode_directory(dirName): def transcode_directory(dir_name):
if not core.FFMPEG: if not core.FFMPEG:
return 1, dirName return 1, dir_name
logger.info("Checking for files to be transcoded") logger.info("Checking for files to be transcoded")
final_result = 0 # initialize as successful final_result = 0 # initialize as successful
if core.OUTPUTVIDEOPATH: if core.OUTPUTVIDEOPATH:
newDir = core.OUTPUTVIDEOPATH new_dir = core.OUTPUTVIDEOPATH
makeDir(newDir) make_dir(new_dir)
name = os.path.splitext(os.path.split(dirName)[1])[0] name = os.path.splitext(os.path.split(dir_name)[1])[0]
newDir = os.path.join(newDir, name) new_dir = os.path.join(new_dir, name)
makeDir(newDir) make_dir(new_dir)
else: else:
newDir = dirName new_dir = dir_name
if platform.system() == 'Windows': if platform.system() == 'Windows':
bitbucket = open('NUL') bitbucket = open('NUL')
else: else:
bitbucket = open('/dev/null') bitbucket = open('/dev/null')
movieName = os.path.splitext(os.path.split(dirName)[1])[0] movie_name = os.path.splitext(os.path.split(dir_name)[1])[0]
List = core.listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False) file_list = core.list_media_files(dir_name, media=True, audio=False, meta=False, archives=False)
List, remList, newList, success = processList(List, newDir, bitbucket) file_list, rem_list, new_list, success = process_list(file_list, new_dir, bitbucket)
if not success: if not success:
bitbucket.close() bitbucket.close()
return 1, dirName return 1, dir_name
for file in List: for file in file_list:
if isinstance(file, string_types) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS: if isinstance(file, string_types) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS:
continue continue
command = buildCommands(file, newDir, movieName, bitbucket) command = build_commands(file, new_dir, movie_name, bitbucket)
newfilePath = command[-1] newfile_path = command[-1]
# transcoding files may remove the original file, so make sure to extract subtitles first # transcoding files may remove the original file, so make sure to extract subtitles first
if core.SEXTRACT and isinstance(file, string_types): if core.SEXTRACT and isinstance(file, string_types):
extract_subs(file, newfilePath, bitbucket) extract_subs(file, newfile_path, bitbucket)
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason) try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
os.remove(newfilePath) os.remove(newfile_path)
except OSError as e: except OSError as e:
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
logger.debug("Error when removing transcoding target: {0}".format(e)) logger.debug("Error when removing transcoding target: {0}".format(e))
except Exception as e: except Exception as e:
logger.debug("Error when removing transcoding target: {0}".format(e)) logger.debug("Error when removing transcoding target: {0}".format(e))
logger.info("Transcoding video: {0}".format(newfilePath)) logger.info("Transcoding video: {0}".format(newfile_path))
print_cmd(command) print_cmd(command)
result = 1 # set result to failed in case call fails. result = 1 # set result to failed in case call fails.
try: try:
@ -787,42 +787,42 @@ def Transcode_directory(dirName):
proc.communicate() proc.communicate()
result = proc.returncode result = proc.returncode
except: except:
logger.error("Transcoding of video {0} has failed".format(newfilePath)) logger.error("Transcoding of video {0} has failed".format(newfile_path))
if core.SUBSDIR and result == 0 and isinstance(file, string_types): if core.SUBSDIR and result == 0 and isinstance(file, string_types):
for sub in get_subs(file): for sub in get_subs(file):
name = os.path.splitext(os.path.split(file)[1])[0] name = os.path.splitext(os.path.split(file)[1])[0]
subname = os.path.split(sub)[1] subname = os.path.split(sub)[1]
newname = os.path.splitext(os.path.split(newfilePath)[1])[0] newname = os.path.splitext(os.path.split(newfile_path)[1])[0]
newpath = os.path.join(core.SUBSDIR, subname.replace(name, newname)) newpath = os.path.join(core.SUBSDIR, subname.replace(name, newname))
if not os.path.isfile(newpath): if not os.path.isfile(newpath):
os.rename(sub, newpath) os.rename(sub, newpath)
if result == 0: if result == 0:
try: try:
shutil.copymode(file, newfilePath) shutil.copymode(file, newfile_path)
except: except:
pass pass
logger.info("Transcoding of video to {0} succeeded".format(newfilePath)) logger.info("Transcoding of video to {0} succeeded".format(newfile_path))
if os.path.isfile(newfilePath) and (file in newList or not core.DUPLICATE): if os.path.isfile(newfile_path) and (file in new_list or not core.DUPLICATE):
try: try:
os.unlink(file) os.unlink(file)
except: except:
pass pass
else: else:
logger.error("Transcoding of video to {0} failed with result {1}".format(newfilePath, result)) logger.error("Transcoding of video to {0} failed with result {1}".format(newfile_path, result))
# this will be 0 (successful) it all are successful, else will return a positive integer for failure. # this will be 0 (successful) it all are successful, else will return a positive integer for failure.
final_result = final_result + result final_result = final_result + result
if final_result == 0 and not core.DUPLICATE: if final_result == 0 and not core.DUPLICATE:
for file in remList: for file in rem_list:
try: try:
os.unlink(file) os.unlink(file)
except: except:
pass pass
if not os.listdir(text_type(newDir)): # this is an empty directory and we didn't transcode into it. if not os.listdir(text_type(new_dir)): # this is an empty directory and we didn't transcode into it.
os.rmdir(newDir) os.rmdir(new_dir)
newDir = dirName new_dir = dir_name
if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB
newDir = dirName new_dir = dir_name
bitbucket.close() bitbucket.close()
return final_result, newDir return final_result, new_dir

View file

@ -629,13 +629,13 @@ import sys
import core import core
from core import logger, nzbToMediaDB from core import logger, nzbToMediaDB
from core.autoProcess.autoProcessComics import autoProcessComics from core.autoProcess.autoProcessComics import Comic
from core.autoProcess.autoProcessGames import autoProcessGames from core.autoProcess.autoProcessGames import Game
from core.autoProcess.autoProcessMovie import autoProcessMovie from core.autoProcess.autoProcessMovie import Movie
from core.autoProcess.autoProcessMusic import autoProcessMusic from core.autoProcess.autoProcessMusic import Music
from core.autoProcess.autoProcessTV import autoProcessTV from core.autoProcess.autoProcessTV import TV
from core.nzbToMediaUserScript import external_script from core.nzbToMediaUserScript import external_script
from core.nzbToMediaUtil import CharReplace, cleanDir, convert_to_ascii, extractFiles, getDirs, get_downloadInfo, get_nzoid, plex_update, update_downloadInfoStatus from core.nzbToMediaUtil import char_replace, clean_dir, convert_to_ascii, extract_files, get_dirs, get_download_info, get_nzoid, plex_update, update_download_info_status
try: try:
text_type = unicode text_type = unicode
@ -644,51 +644,51 @@ except NameError:
# post-processing # post-processing
def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None, failureLink=None): def process(input_directory, input_name=None, status=0, client_agent='manual', download_id=None, input_category=None, failure_link=None):
if core.SAFE_MODE and inputDirectory == core.NZB_DEFAULTDIR: if core.SAFE_MODE and input_directory == core.NZB_DEFAULTDIR:
logger.error( logger.error(
'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format( 'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format(
inputDirectory)) input_directory))
return [-1, ""] return [-1, ""]
if not download_id and clientAgent == 'sabnzbd': if not download_id and client_agent == 'sabnzbd':
download_id = get_nzoid(inputName) download_id = get_nzoid(input_name)
if clientAgent != 'manual' and not core.DOWNLOADINFO: if client_agent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding NZB download info for directory {0} to database'.format(inputDirectory)) logger.debug('Adding NZB download info for directory {0} to database'.format(input_directory))
myDB = nzbToMediaDB.DBConnection() my_db = nzbToMediaDB.DBConnection()
inputDirectory1 = inputDirectory input_directory1 = input_directory
inputName1 = inputName input_name1 = input_name
try: try:
encoded, inputDirectory1 = CharReplace(inputDirectory) encoded, input_directory1 = char_replace(input_directory)
encoded, inputName1 = CharReplace(inputName) encoded, input_name1 = char_replace(input_name)
except: except:
pass pass
controlValueDict = {"input_directory": text_type(inputDirectory1)} control_value_dict = {"input_directory": text_type(input_directory1)}
newValueDict = {"input_name": text_type(inputName1), new_value_dict = {"input_name": text_type(input_name1),
"input_hash": text_type(download_id), "input_hash": text_type(download_id),
"input_id": text_type(download_id), "input_id": text_type(download_id),
"client_agent": text_type(clientAgent), "client_agent": text_type(client_agent),
"status": 0, "status": 0,
"last_update": datetime.date.today().toordinal() "last_update": datetime.date.today().toordinal()
} }
myDB.upsert("downloads", newValueDict, controlValueDict) my_db.upsert("downloads", new_value_dict, control_value_dict)
# auto-detect section # auto-detect section
if inputCategory is None: if input_category is None:
inputCategory = 'UNCAT' input_category = 'UNCAT'
usercat = inputCategory usercat = input_category
section = core.CFG.findsection(inputCategory).isenabled() section = core.CFG.findsection(input_category).isenabled()
if section is None: if section is None:
section = core.CFG.findsection("ALL").isenabled() section = core.CFG.findsection("ALL").isenabled()
if section is None: if section is None:
logger.error( logger.error(
'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format( 'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format(
inputCategory)) input_category))
return [-1, ""] return [-1, ""]
else: else:
usercat = "ALL" usercat = "ALL"
@ -696,65 +696,65 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
if len(section) > 1: if len(section) > 1:
logger.error( logger.error(
'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format( 'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format(
inputCategory, section.keys())) input_category, section.keys()))
return [-1, ""] return [-1, ""]
if section: if section:
sectionName = section.keys()[0] section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(sectionName)) logger.info('Auto-detected SECTION:{0}'.format(section_name))
else: else:
logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format( logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format(
inputCategory)) input_category))
return [-1, ""] return [-1, ""]
cfg = dict(core.CFG[sectionName][usercat]) cfg = dict(core.CFG[section_name][usercat])
extract = int(cfg.get("extract", 0)) extract = int(cfg.get("extract", 0))
try: try:
if int(cfg.get("remote_path")) and not core.REMOTEPATHS: if int(cfg.get("remote_path")) and not core.REMOTEPATHS:
logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format( logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format(
sectionName, inputCategory)) section_name, input_category))
return [-1, ""] return [-1, ""]
except: except:
logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format( logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format(
core.get("remote_path"), sectionName, inputCategory)) core.get("remote_path"), section_name, input_category))
inputName, inputDirectory = convert_to_ascii(inputName, inputDirectory) input_name, input_directory = convert_to_ascii(input_name, input_directory)
if extract == 1: if extract == 1:
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory)) logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
extractFiles(inputDirectory) extract_files(input_directory)
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, inputCategory, inputName)) logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, input_category, input_name))
if sectionName in ["CouchPotato", "Radarr"]: if section_name in ["CouchPotato", "Radarr"]:
result = autoProcessMovie().process(sectionName, inputDirectory, inputName, status, clientAgent, download_id, result = Movie().process(section_name, input_directory, input_name, status, client_agent, download_id,
inputCategory, failureLink) input_category, failure_link)
elif sectionName in ["SickBeard", "NzbDrone", "Sonarr"]: elif section_name in ["SickBeard", "NzbDrone", "Sonarr"]:
result = autoProcessTV().processEpisode(sectionName, inputDirectory, inputName, status, clientAgent, result = TV().process_episode(section_name, input_directory, input_name, status, client_agent,
download_id, inputCategory, failureLink) download_id, input_category, failure_link)
elif sectionName in ["HeadPhones", "Lidarr"]: elif section_name in ["HeadPhones", "Lidarr"]:
result = autoProcessMusic().process(sectionName, inputDirectory, inputName, status, clientAgent, inputCategory) result = Music().process(section_name, input_directory, input_name, status, client_agent, input_category)
elif sectionName == "Mylar": elif section_name == "Mylar":
result = autoProcessComics().processEpisode(sectionName, inputDirectory, inputName, status, clientAgent, result = Comic().process_episode(section_name, input_directory, input_name, status, client_agent,
inputCategory) input_category)
elif sectionName == "Gamez": elif section_name == "Gamez":
result = autoProcessGames().process(sectionName, inputDirectory, inputName, status, clientAgent, inputCategory) result = Game().process(section_name, input_directory, input_name, status, client_agent, input_category)
elif sectionName == 'UserScript': elif section_name == 'UserScript':
result = external_script(inputDirectory, inputName, inputCategory, section[usercat]) result = external_script(input_directory, input_name, input_category, section[usercat])
else: else:
result = [-1, ""] result = [-1, ""]
plex_update(inputCategory) plex_update(input_category)
if result[0] == 0: if result[0] == 0:
if clientAgent != 'manual': if client_agent != 'manual':
# update download status in our DB # update download status in our DB
update_downloadInfoStatus(inputName, 1) update_download_info_status(input_name, 1)
if sectionName not in ['UserScript', 'NzbDrone', 'Sonarr', 'Radarr', 'Lidarr']: if section_name not in ['UserScript', 'NzbDrone', 'Sonarr', 'Radarr', 'Lidarr']:
# cleanup our processing folders of any misc unwanted files and empty directories # cleanup our processing folders of any misc unwanted files and empty directories
cleanDir(inputDirectory, sectionName, inputCategory) clean_dir(input_directory, section_name, input_category)
return result return result
@ -816,7 +816,7 @@ def main(args, section=None):
# Check for download_id to pass to CouchPotato # Check for download_id to pass to CouchPotato
download_id = "" download_id = ""
failureLink = None failure_link = None
if 'NZBPR_COUCHPOTATO' in os.environ: if 'NZBPR_COUCHPOTATO' in os.environ:
download_id = os.environ['NZBPR_COUCHPOTATO'] download_id = os.environ['NZBPR_COUCHPOTATO']
elif 'NZBPR_DRONE' in os.environ: elif 'NZBPR_DRONE' in os.environ:
@ -828,13 +828,13 @@ def main(args, section=None):
elif 'NZBPR_LIDARR' in os.environ: elif 'NZBPR_LIDARR' in os.environ:
download_id = os.environ['NZBPR_LIDARR'] download_id = os.environ['NZBPR_LIDARR']
if 'NZBPR__DNZB_FAILURE' in os.environ: if 'NZBPR__DNZB_FAILURE' in os.environ:
failureLink = os.environ['NZBPR__DNZB_FAILURE'] failure_link = os.environ['NZBPR__DNZB_FAILURE']
# All checks done, now launching the script. # All checks done, now launching the script.
clientAgent = 'nzbget' client_agent = 'nzbget'
result = process(os.environ['NZBPP_DIRECTORY'], inputName=os.environ['NZBPP_NZBNAME'], status=status, result = process(os.environ['NZBPP_DIRECTORY'], input_name=os.environ['NZBPP_NZBNAME'], status=status,
clientAgent=clientAgent, download_id=download_id, inputCategory=os.environ['NZBPP_CATEGORY'], client_agent=client_agent, download_id=download_id, input_category=os.environ['NZBPP_CATEGORY'],
failureLink=failureLink) failure_link=failure_link)
# SABnzbd Pre 0.7.17 # SABnzbd Pre 0.7.17
elif len(args) == core.SABNZB_NO_OF_ARGUMENTS: elif len(args) == core.SABNZB_NO_OF_ARGUMENTS:
# SABnzbd argv: # SABnzbd argv:
@ -845,9 +845,9 @@ def main(args, section=None):
# 5 User-defined category # 5 User-defined category
# 6 Group that the NZB was posted in e.g. alt.binaries.x # 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2 # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
clientAgent = 'sabnzbd' client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd") logger.info("Script triggered from SABnzbd")
result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent, result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent,
download_id='') download_id='')
# SABnzbd 0.7.17+ # SABnzbd 0.7.17+
elif len(args) >= core.SABNZB_0717_NO_OF_ARGUMENTS: elif len(args) >= core.SABNZB_0717_NO_OF_ARGUMENTS:
@ -860,14 +860,14 @@ def main(args, section=None):
# 6 Group that the NZB was posted in e.g. alt.binaries.x # 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2 # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
# 8 Failure URL # 8 Failure URL
clientAgent = 'sabnzbd' client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd 0.7.17+") logger.info("Script triggered from SABnzbd 0.7.17+")
result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent, result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent,
download_id='', failureLink=''.join(args[8:])) download_id='', failure_link=''.join(args[8:]))
# Generic program # Generic program
elif len(args) > 5 and args[5] == 'generic': elif len(args) > 5 and args[5] == 'generic':
logger.info("Script triggered from generic program") logger.info("Script triggered from generic program")
result = process(args[1], inputName=args[2], inputCategory=args[3], download_id=args[4]) result = process(args[1], input_name=args[2], input_category=args[3], download_id=args[4])
else: else:
# Perform Manual Post-Processing # Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...") logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
@ -876,39 +876,39 @@ def main(args, section=None):
for subsection in subsections: for subsection in subsections:
if not core.CFG[section][subsection].isenabled(): if not core.CFG[section][subsection].isenabled():
continue continue
for dirName in getDirs(section, subsection, link='move'): for dir_name in get_dirs(section, subsection, link='move'):
logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dirName)) logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dirName))) logger.info("Checking database for download info for {0} ...".format(os.path.basename(dir_name)))
core.DOWNLOADINFO = get_downloadInfo(os.path.basename(dirName), 0) core.DOWNLOADINFO = get_download_info(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO: if core.DOWNLOADINFO:
logger.info("Found download info for {0}, " logger.info("Found download info for {0}, "
"setting variables now ...".format "setting variables now ...".format
(os.path.basename(dirName))) (os.path.basename(dir_name)))
clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual')) client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
download_id = text_type(core.DOWNLOADINFO[0].get('input_id', '')) download_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
else: else:
logger.info('Unable to locate download info for {0}, ' logger.info('Unable to locate download info for {0}, '
'continuing to try and process this release ...'.format 'continuing to try and process this release ...'.format
(os.path.basename(dirName))) (os.path.basename(dir_name)))
clientAgent = 'manual' client_agent = 'manual'
download_id = '' download_id = ''
if clientAgent and clientAgent.lower() not in core.NZB_CLIENTS: if client_agent and client_agent.lower() not in core.NZB_CLIENTS:
continue continue
try: try:
dirName = dirName.encode(core.SYS_ENCODING) dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
inputName = os.path.basename(dirName) input_name = os.path.basename(dir_name)
try: try:
inputName = inputName.encode(core.SYS_ENCODING) input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError: except UnicodeError:
pass pass
results = process(dirName, inputName, 0, clientAgent=clientAgent, results = process(dir_name, input_name, 0, client_agent=client_agent,
download_id=download_id or None, inputCategory=subsection) download_id=download_id or None, input_category=subsection)
if results[0] != 0: if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format
(section, subsection)) (section, subsection))

View file

@ -5,7 +5,7 @@ import guessit
import requests import requests
import core import core
from core.nzbToMediaAutoFork import autoFork from core.nzbToMediaAutoFork import auto_fork
from core.nzbToMediaUtil import server_responding from core.nzbToMediaUtil import server_responding
from core.transcoder import transcoder from core.transcoder import transcoder
@ -15,7 +15,7 @@ core.initialize()
#label = core.TORRENT_CLASS.core.get_torrent_status("f33a9c4b15cbd9170722d700069af86746817ade", ["label"]).get()['label'] #label = core.TORRENT_CLASS.core.get_torrent_status("f33a9c4b15cbd9170722d700069af86746817ade", ["label"]).get()['label']
#print label #print label
if transcoder.isVideoGood(core.TEST_FILE, 0): if transcoder.is_video_good(core.TEST_FILE, 0):
print("FFPROBE Works") print("FFPROBE Works")
else: else:
print("FFPROBE FAILED") print("FFPROBE FAILED")
@ -25,7 +25,7 @@ print(test)
section = core.CFG.findsection('tv').isenabled() section = core.CFG.findsection('tv').isenabled()
print(section) print(section)
print(len(section)) print(len(section))
fork, fork_params = autoFork('SickBeard', 'tv') fork, fork_params = auto_fork('SickBeard', 'tv')
if server_responding("http://127.0.0.1:5050"): if server_responding("http://127.0.0.1:5050"):
print("CouchPotato Running") print("CouchPotato Running")