PEP8 Variable in function should be lowercase

This commit is contained in:
Labrys of Knossos 2018-12-16 20:00:13 -05:00
commit 97e1ed71b3
15 changed files with 977 additions and 953 deletions

View file

@ -12,64 +12,68 @@ from libs.six import text_type
def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent):
input_directory = inputDirectory
input_name = inputName
input_category = inputCategory
input_hash = inputHash
status = 1 # 1 = failed | 0 = success
root = 0
foundFile = 0
found_file = 0
if clientAgent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding TORRENT download info for directory {0} to database'.format(inputDirectory))
logger.debug('Adding TORRENT download info for directory {0} to database'.format(input_directory))
myDB = nzbToMediaDB.DBConnection()
my_db = nzbToMediaDB.DBConnection()
inputDirectory1 = inputDirectory
inputName1 = inputName
input_directory1 = input_directory
input_name1 = input_name
try:
encoded, inputDirectory1 = CharReplace(inputDirectory)
encoded, inputName1 = CharReplace(inputName)
encoded, input_directory1 = CharReplace(input_directory)
encoded, input_name1 = CharReplace(input_name)
except:
pass
controlValueDict = {"input_directory": text_type(inputDirectory1)}
newValueDict = {"input_name": text_type(inputName1),
"input_hash": text_type(inputHash),
control_value_dict = {"input_directory": text_type(input_directory1)}
new_value_dict = {"input_name": text_type(input_name1),
"input_hash": text_type(input_hash),
"input_id": text_type(inputID),
"client_agent": text_type(clientAgent),
"status": 0,
"last_update": datetime.date.today().toordinal()
}
myDB.upsert("downloads", newValueDict, controlValueDict)
my_db.upsert("downloads", new_value_dict, control_value_dict)
logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory))
logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(input_directory, input_name, input_category))
# Confirm the category by parsing directory structure
inputDirectory, inputName, inputCategory, root = core.category_search(inputDirectory, inputName, inputCategory,
root, core.CATEGORIES)
if inputCategory == "":
inputCategory = "UNCAT"
input_directory, input_name, input_category, root = core.category_search(input_directory, input_name, input_category,
root, core.CATEGORIES)
if input_category == "":
input_category = "UNCAT"
usercat = inputCategory
usercat = input_category
try:
inputName = inputName.encode(core.SYS_ENCODING)
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
try:
inputDirectory = inputDirectory.encode(core.SYS_ENCODING)
input_directory = input_directory.encode(core.SYS_ENCODING)
except UnicodeError:
pass
logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format
(inputDirectory, inputName, inputCategory))
(input_directory, input_name, input_category))
# auto-detect section
section = core.CFG.findsection(inputCategory).isenabled()
section = core.CFG.findsection(input_category).isenabled()
if section is None:
section = core.CFG.findsection("ALL").isenabled()
if section is None:
logger.error('Category:[{0}] is not defined or is not enabled. '
'Please rename it or ensure it is enabled for the appropriate section '
'in your autoProcessMedia.cfg and try again.'.format
(inputCategory))
(input_category))
return [-1, ""]
else:
usercat = "ALL"
@ -82,95 +86,95 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
return [-1, ""]
if section:
sectionName = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(sectionName))
section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(section_name))
else:
logger.error("Unable to locate a section with subsection:{0} "
"enabled in your autoProcessMedia.cfg, exiting!".format
(inputCategory))
(input_category))
return [-1, ""]
section = dict(section[sectionName][usercat]) # Type cast to dict() to allow effective usage of .get()
section = dict(section[section_name][usercat]) # Type cast to dict() to allow effective usage of .get()
Torrent_NoLink = int(section.get("Torrent_NoLink", 0))
torrent_no_link = int(section.get("Torrent_NoLink", 0))
keep_archive = int(section.get("keep_archive", 0))
extract = int(section.get('extract', 0))
extensions = section.get('user_script_mediaExtensions', "").lower().split(',')
uniquePath = int(section.get("unique_path", 1))
unique_path = int(section.get("unique_path", 1))
if clientAgent != 'manual':
core.pause_torrent(clientAgent, inputHash, inputID, inputName)
core.pause_torrent(clientAgent, input_hash, inputID, input_name)
# In case input is not directory, make sure to create one.
# This way Processing is isolated.
if not os.path.isdir(os.path.join(inputDirectory, inputName)):
basename = os.path.basename(inputDirectory)
basename = core.sanitizeName(inputName) \
if inputName == basename else os.path.splitext(core.sanitizeName(inputName))[0]
outputDestination = os.path.join(core.OUTPUTDIRECTORY, inputCategory, basename)
elif uniquePath:
outputDestination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, inputCategory, core.sanitizeName(inputName).replace(" ",".")))
if not os.path.isdir(os.path.join(input_directory, input_name)):
basename = os.path.basename(input_directory)
basename = core.sanitizeName(input_name) \
if input_name == basename else os.path.splitext(core.sanitizeName(input_name))[0]
output_destination = os.path.join(core.OUTPUTDIRECTORY, input_category, basename)
elif unique_path:
output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitizeName(input_name).replace(" ",".")))
else:
outputDestination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, inputCategory))
output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, input_category))
try:
outputDestination = outputDestination.encode(core.SYS_ENCODING)
output_destination = output_destination.encode(core.SYS_ENCODING)
except UnicodeError:
pass
if outputDestination in inputDirectory:
outputDestination = inputDirectory
if output_destination in input_directory:
output_destination = input_directory
logger.info("Output directory set to: {0}".format(outputDestination))
logger.info("Output directory set to: {0}".format(output_destination))
if core.SAFE_MODE and outputDestination == core.TORRENT_DEFAULTDIR:
if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULTDIR:
logger.error('The output directory:[{0}] is the Download Directory. '
'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format
(inputDirectory))
(input_directory))
return [-1, ""]
logger.debug("Scanning files in directory: {0}".format(inputDirectory))
logger.debug("Scanning files in directory: {0}".format(input_directory))
if sectionName in ['HeadPhones', 'Lidarr']:
if section_name in ['HeadPhones', 'Lidarr']:
core.NOFLATTEN.extend(
inputCategory) # Make sure we preserve folder structure for HeadPhones.
input_category) # Make sure we preserve folder structure for HeadPhones.
now = datetime.datetime.now()
if extract == 1:
inputFiles = core.listMediaFiles(inputDirectory, archives=False, other=True, otherext=extensions)
input_files = core.listMediaFiles(input_directory, archives=False, other=True, otherext=extensions)
else:
inputFiles = core.listMediaFiles(inputDirectory, other=True, otherext=extensions)
if len(inputFiles) == 0 and os.path.isfile(inputDirectory):
inputFiles = [inputDirectory]
logger.debug("Found 1 file to process: {0}".format(inputDirectory))
input_files = core.listMediaFiles(input_directory, other=True, otherext=extensions)
if len(input_files) == 0 and os.path.isfile(input_directory):
input_files = [input_directory]
logger.debug("Found 1 file to process: {0}".format(input_directory))
else:
logger.debug("Found {0} files in {1}".format(len(inputFiles), inputDirectory))
for inputFile in inputFiles:
filePath = os.path.dirname(inputFile)
fileName, fileExt = os.path.splitext(os.path.basename(inputFile))
fullFileName = os.path.basename(inputFile)
logger.debug("Found {0} files in {1}".format(len(input_files), input_directory))
for inputFile in input_files:
file_path = os.path.dirname(inputFile)
file_name, file_ext = os.path.splitext(os.path.basename(inputFile))
full_file_name = os.path.basename(inputFile)
targetFile = core.os.path.join(outputDestination, fullFileName)
if inputCategory in core.NOFLATTEN:
if not os.path.basename(filePath) in outputDestination:
targetFile = core.os.path.join(
core.os.path.join(outputDestination, os.path.basename(filePath)), fullFileName)
target_file = core.os.path.join(output_destination, full_file_name)
if input_category in core.NOFLATTEN:
if not os.path.basename(file_path) in output_destination:
target_file = core.os.path.join(
core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name)
logger.debug("Setting outputDestination to {0} to preserve folder structure".format
(os.path.dirname(targetFile)))
(os.path.dirname(target_file)))
try:
targetFile = targetFile.encode(core.SYS_ENCODING)
target_file = target_file.encode(core.SYS_ENCODING)
except UnicodeError:
pass
if root == 1:
if not foundFile:
logger.debug("Looking for {0} in: {1}".format(inputName, inputFile))
if any([core.sanitizeName(inputName) in core.sanitizeName(inputFile),
core.sanitizeName(fileName) in core.sanitizeName(inputName)]):
foundFile = True
if not found_file:
logger.debug("Looking for {0} in: {1}".format(input_name, inputFile))
if any([core.sanitizeName(input_name) in core.sanitizeName(inputFile),
core.sanitizeName(file_name) in core.sanitizeName(input_name)]):
found_file = True
logger.debug("Found file {0} that matches Torrent Name {1}".format
(fullFileName, inputName))
(full_file_name, input_name))
else:
continue
@ -178,78 +182,78 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(inputFile))
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile))
if not foundFile:
if not found_file:
logger.debug("Looking for files with modified/created dates less than 5 minutes old.")
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)):
foundFile = True
found_file = True
logger.debug("Found file {0} with date modified/created less than 5 minutes ago.".format
(fullFileName))
(full_file_name))
else:
continue # This file has not been recently moved or created, skip it
if Torrent_NoLink == 0:
if torrent_no_link == 0:
try:
core.copy_link(inputFile, targetFile, core.USELINK)
core.rmReadOnly(targetFile)
core.copy_link(inputFile, target_file, core.USELINK)
core.rmReadOnly(target_file)
except:
logger.error("Failed to link: {0} to {1}".format(inputFile, targetFile))
logger.error("Failed to link: {0} to {1}".format(inputFile, target_file))
inputName, outputDestination = convert_to_ascii(inputName, outputDestination)
input_name, output_destination = convert_to_ascii(input_name, output_destination)
if extract == 1:
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory))
core.extractFiles(inputDirectory, outputDestination, keep_archive)
logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
core.extractFiles(input_directory, output_destination, keep_archive)
if inputCategory not in core.NOFLATTEN:
if input_category not in core.NOFLATTEN:
# don't flatten hp in case multi cd albums, and we need to copy this back later.
core.flatten(outputDestination)
core.flatten(output_destination)
# Now check if video files exist in destination:
if sectionName in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]:
numVideos = len(
core.listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False))
if numVideos > 0:
logger.info("Found {0} media files in {1}".format(numVideos, outputDestination))
if section_name in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]:
num_videos = len(
core.listMediaFiles(output_destination, media=True, audio=False, meta=False, archives=False))
if num_videos > 0:
logger.info("Found {0} media files in {1}".format(num_videos, output_destination))
status = 0
elif extract != 1:
logger.info("Found no media files in {0}. Sending to {1} to process".format(outputDestination, sectionName))
logger.info("Found no media files in {0}. Sending to {1} to process".format(output_destination, section_name))
status = 0
else:
logger.warning("Found no media files in {0}".format(outputDestination))
logger.warning("Found no media files in {0}".format(output_destination))
# Only these sections can handling failed downloads
# so make sure everything else gets through without the check for failed
if sectionName not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']:
if section_name not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']:
status = 0
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, usercat, inputName))
logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, usercat, input_name))
if core.TORRENT_CHMOD_DIRECTORY:
core.rchmod(outputDestination, core.TORRENT_CHMOD_DIRECTORY)
core.rchmod(output_destination, core.TORRENT_CHMOD_DIRECTORY)
result = [0, ""]
if sectionName == 'UserScript':
result = external_script(outputDestination, inputName, inputCategory, section)
if section_name == 'UserScript':
result = external_script(output_destination, input_name, input_category, section)
elif sectionName in ['CouchPotato', 'Radarr']:
result = core.autoProcessMovie().process(sectionName, outputDestination, inputName,
status, clientAgent, inputHash, inputCategory)
elif sectionName in ['SickBeard', 'NzbDrone', 'Sonarr']:
if inputHash:
inputHash = inputHash.upper()
result = core.autoProcessTV().processEpisode(sectionName, outputDestination, inputName,
status, clientAgent, inputHash, inputCategory)
elif sectionName in ['HeadPhones', 'Lidarr']:
result = core.autoProcessMusic().process(sectionName, outputDestination, inputName,
status, clientAgent, inputCategory)
elif sectionName == 'Mylar':
result = core.autoProcessComics().processEpisode(sectionName, outputDestination, inputName,
status, clientAgent, inputCategory)
elif sectionName == 'Gamez':
result = core.autoProcessGames().process(sectionName, outputDestination, inputName,
status, clientAgent, inputCategory)
elif section_name in ['CouchPotato', 'Radarr']:
result = core.autoProcessMovie().process(section_name, output_destination, input_name,
status, clientAgent, input_hash, input_category)
elif section_name in ['SickBeard', 'NzbDrone', 'Sonarr']:
if input_hash:
input_hash = input_hash.upper()
result = core.autoProcessTV().processEpisode(section_name, output_destination, input_name,
status, clientAgent, input_hash, input_category)
elif section_name in ['HeadPhones', 'Lidarr']:
result = core.autoProcessMusic().process(section_name, output_destination, input_name,
status, clientAgent, input_category)
elif section_name == 'Mylar':
result = core.autoProcessComics().processEpisode(section_name, output_destination, input_name,
status, clientAgent, input_category)
elif section_name == 'Gamez':
result = core.autoProcessGames().process(section_name, output_destination, input_name,
status, clientAgent, input_category)
plex_update(inputCategory)
plex_update(input_category)
if result[0] != 0:
if not core.TORRENT_RESUME_ON_FAILURE:
@ -258,26 +262,26 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
elif clientAgent != 'manual':
logger.error("A problem was reported in the autoProcess* script. "
"If torrent was paused we will resume seeding")
core.resume_torrent(clientAgent, inputHash, inputID, inputName)
core.resume_torrent(clientAgent, input_hash, inputID, input_name)
else:
if clientAgent != 'manual':
# update download status in our DB
core.update_downloadInfoStatus(inputName, 1)
core.update_downloadInfoStatus(input_name, 1)
# remove torrent
if core.USELINK == 'move-sym' and not core.DELETE_ORIGINAL == 1:
logger.debug('Checking for sym-links to re-direct in: {0}'.format(inputDirectory))
for dirpath, dirs, files in os.walk(inputDirectory):
logger.debug('Checking for sym-links to re-direct in: {0}'.format(input_directory))
for dirpath, dirs, files in os.walk(input_directory):
for file in files:
logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file)))
replace_links(os.path.join(dirpath, file))
core.remove_torrent(clientAgent, inputHash, inputID, inputName)
core.remove_torrent(clientAgent, input_hash, inputID, input_name)
if not sectionName == 'UserScript':
if not section_name == 'UserScript':
# for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN
# cleanup our processing folders of any misc unwanted files and empty directories
core.cleanDir(outputDestination, sectionName, inputCategory)
core.cleanDir(output_destination, section_name, input_category)
return result
@ -287,7 +291,7 @@ def main(args):
core.initialize()
# clientAgent for Torrents
clientAgent = core.TORRENT_CLIENTAGENT
client_agent = core.TORRENT_CLIENTAGENT
logger.info("#########################################################")
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__)))
@ -300,13 +304,13 @@ def main(args):
result = [0, ""]
try:
inputDirectory, inputName, inputCategory, inputHash, inputID = core.parse_args(clientAgent, args)
input_directory, input_name, input_category, input_hash, input_id = core.parse_args(client_agent, args)
except:
logger.error("There was a problem loading variables")
return -1
if inputDirectory and inputName and inputHash and inputID:
result = processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent)
if input_directory and input_name and input_hash and input_id:
result = processTorrent(input_directory, input_name, input_category, input_hash, input_id, client_agent)
else:
# Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
@ -315,42 +319,42 @@ def main(args):
for subsection in subsections:
if not core.CFG[section][subsection].isenabled():
continue
for dirName in core.getDirs(section, subsection, link='hard'):
for dir_name in core.getDirs(section, subsection, link='hard'):
logger.info("Starting manual run for {0}:{1} - Folder:{2}".format
(section, subsection, dirName))
(section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format
(os.path.basename(dirName)))
core.DOWNLOADINFO = core.get_downloadInfo(os.path.basename(dirName), 0)
(os.path.basename(dir_name)))
core.DOWNLOADINFO = core.get_downloadInfo(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO:
clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
inputHash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
inputID = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
input_hash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
input_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
logger.info("Found download info for {0}, "
"setting variables now ...".format(os.path.basename(dirName)))
"setting variables now ...".format(os.path.basename(dir_name)))
else:
logger.info('Unable to locate download info for {0}, '
'continuing to try and process this release ...'.format
(os.path.basename(dirName)))
clientAgent = 'manual'
inputHash = ''
inputID = ''
(os.path.basename(dir_name)))
client_agent = 'manual'
input_hash = ''
input_id = ''
if clientAgent.lower() not in core.TORRENT_CLIENTS:
if client_agent.lower() not in core.TORRENT_CLIENTS:
continue
try:
dirName = dirName.encode(core.SYS_ENCODING)
dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
inputName = os.path.basename(dirName)
input_name = os.path.basename(dir_name)
try:
inputName = inputName.encode(core.SYS_ENCODING)
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
results = processTorrent(dirName, inputName, subsection, inputHash or None, inputID or None,
clientAgent)
results = processTorrent(dir_name, input_name, subsection, input_hash or None, input_id or None,
client_agent)
if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format
(section, subsection))

View file

@ -13,6 +13,8 @@ requests.packages.urllib3.disable_warnings()
class autoProcessComics(object):
def processEpisode(self, section, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None):
dir_name = dirName
input_name = inputName
apc_version = "2.04"
comicrn_version = "1.01"
@ -32,19 +34,19 @@ class autoProcessComics(object):
logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
inputName, dirName = convert_to_ascii(inputName, dirName)
clean_name, ext = os.path.splitext(inputName)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
clean_name, ext = os.path.splitext(input_name)
if len(ext) == 4: # we assume this was a standard extension.
inputName = clean_name
input_name = clean_name
params = {
'cmd': 'forceProcess',
'apikey': apikey,
'nzb_folder': remoteDir(dirName) if remote_path else dirName,
'nzb_folder': remoteDir(dir_name) if remote_path else dir_name,
}
if inputName is not None:
params['nzb_name'] = inputName
if input_name is not None:
params['nzb_name'] = input_name
params['failed'] = int(status)
params['apc_version'] = apc_version
params['comicrn_version'] = comicrn_version
@ -72,7 +74,7 @@ class autoProcessComics(object):
if success:
logger.postprocess("SUCCESS: This issue has been processed successfully", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
else:
logger.warning("The issue does not appear to have successfully processed. Please check your Logs", section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]

View file

@ -14,6 +14,8 @@ requests.packages.urllib3.disable_warnings()
class autoProcessGames(object):
def process(self, section, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None):
dir_name = dirName
input_name = inputName
status = int(status)
cfg = dict(core.CFG[section][inputCategory])
@ -31,19 +33,19 @@ class autoProcessGames(object):
logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
inputName, dirName = convert_to_ascii(inputName, dirName)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
fields = inputName.split("-")
fields = input_name.split("-")
gamezID = fields[0].replace("[", "").replace("]", "").replace(" ", "")
gamez_id = fields[0].replace("[", "").replace("]", "").replace(" ", "")
downloadStatus = 'Downloaded' if status == 0 else 'Wanted'
download_status = 'Downloaded' if status == 0 else 'Wanted'
params = {
'api_key': apikey,
'mode': 'UPDATEREQUESTEDSTATUS',
'db_id': gamezID,
'status': downloadStatus
'db_id': gamez_id,
'status': download_status
}
logger.debug("Opening URL: {0}".format(url), section)
@ -59,9 +61,9 @@ class autoProcessGames(object):
if library:
logger.postprocess("moving files to library: {0}".format(library), section)
try:
shutil.move(dirName, os.path.join(library, inputName))
shutil.move(dir_name, os.path.join(library, input_name))
except:
logger.error("Unable to move {0} to {1}".format(dirName, os.path.join(library, inputName)), section)
logger.error("Unable to move {0} to {1}".format(dir_name, os.path.join(library, input_name)), section)
return [1, "{0}: Failed to post-process - Unable to move files".format(section)]
else:
logger.error("No library specified to move files to. Please edit your configuration.", section)
@ -71,8 +73,8 @@ class autoProcessGames(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif result['success']:
logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamezID, downloadStatus), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamez_id, download_status), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
else:
logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamezID), section)
logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamez_id), section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]

View file

@ -146,6 +146,8 @@ class autoProcessMovie(object):
return False
def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", download_id="", inputCategory=None, failureLink=None):
dir_name = dirName
input_name = inputName
cfg = dict(core.CFG[section][inputCategory])
@ -158,9 +160,9 @@ class autoProcessMovie(object):
method = None
#added importMode for Radarr config
if section == "Radarr":
importMode = cfg.get("importMode","Move")
import_mode = cfg.get("importMode","Move")
else:
importMode = None
import_mode = None
delete_failed = int(cfg["delete_failed"])
wait_for = int(cfg["wait_for"])
ssl = int(cfg.get("ssl", 0))
@ -174,19 +176,19 @@ class autoProcessMovie(object):
else:
extract = int(cfg.get("extract", 0))
imdbid = find_imdbid(dirName, inputName, omdbapikey)
imdbid = find_imdbid(dir_name, input_name, omdbapikey)
if section == "CouchPotato":
baseURL = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey)
base_url = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey)
if section == "Radarr":
baseURL = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root)
base_url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root)
url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root)
headers = {'X-Api-Key': apikey}
if not apikey:
logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only')
release = None
elif server_responding(baseURL):
elif server_responding(base_url):
if section == "CouchPotato":
release = self.get_release(baseURL, imdbid, download_id)
release = self.get_release(base_url, imdbid, download_id)
else:
release = None
else:
@ -208,28 +210,28 @@ class autoProcessMovie(object):
except:
pass
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0]
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName))
cleanName = os.path.splitext(SpecificPath)
if cleanName[1] == ".nzb":
SpecificPath = cleanName[0]
if os.path.isdir(SpecificPath):
dirName = SpecificPath
specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb":
specific_path = clean_name[0]
if os.path.isdir(specific_path):
dir_name = specific_path
process_all_exceptions(inputName, dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
process_all_exceptions(input_name, dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
core.extractFiles(dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if not listMediaFiles(dir_name, media=True, audio=False, meta=False, archives=False) and listMediaFiles(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extractFiles(dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
good_files = 0
num_files = 0
# Check video files for corruption
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
for video in listMediaFiles(dir_name, media=True, audio=False, meta=False, archives=False):
num_files += 1
if transcoder.isVideoGood(video, status):
import_subs(video)
@ -246,47 +248,47 @@ class autoProcessMovie(object):
failureLink += '&corrupt=true'
status = 1
elif clientAgent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dirName), section)
logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section)
return [0, ""] # Success (as far as this script is concerned)
else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section)
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section)
status = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD')
if status == 0:
if core.TRANSCODE == 1:
result, newDirName = transcoder.Transcode_directory(dirName)
result, new_dir_name = transcoder.Transcode_directory(dir_name)
if result == 0:
logger.debug("Transcoding succeeded for files in {0}".format(dirName), section)
dirName = newDirName
logger.debug("Transcoding succeeded for files in {0}".format(dir_name), section)
dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section)
core.rchmod(dirName, chmod_directory)
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section)
core.rchmod(dir_name, chmod_directory)
else:
logger.error("Transcoding failed for files in {0}".format(dirName), section)
logger.error("Transcoding failed for files in {0}".format(dir_name), section)
return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
for video in listMediaFiles(dir_name, media=True, audio=False, meta=False, archives=False):
if not release and ".cp(tt" not in video and imdbid:
videoName, videoExt = os.path.splitext(video)
video2 = "{0}.cp({1}){2}".format(videoName, imdbid, videoExt)
video_name, video_ext = os.path.splitext(video)
video2 = "{0}.cp({1}){2}".format(video_name, imdbid, video_ext)
if not (clientAgent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'):
logger.debug('Renaming: {0} to: {1}'.format(video, video2))
os.rename(video, video2)
if not apikey: #If only using Transcoder functions, exit here.
logger.info('No CouchPotato or Radarr apikey entered. Processing completed.')
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
params = {}
if download_id and release_id:
params['downloader'] = downloader or clientAgent
params['download_id'] = download_id
params['media_folder'] = remoteDir(dirName) if remote_path else dirName
params['media_folder'] = remoteDir(dir_name) if remote_path else dir_name
if section == "CouchPotato":
if method == "manage":
@ -295,22 +297,22 @@ class autoProcessMovie(object):
else:
command = "renamer.scan"
url = "{0}{1}".format(baseURL, command)
url = "{0}{1}".format(base_url, command)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
logger.postprocess("Starting {0} scan for {1}".format(method, inputName), section)
logger.postprocess("Starting {0} scan for {1}".format(method, input_name), section)
if section == "Radarr":
payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id,'importMode' : importMode}
payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id,'importMode' : import_mode}
if not download_id:
payload.pop("downloadClientId")
logger.debug("Opening URL: {0} with PARAMS: {1}".format(baseURL, payload), section)
logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(inputName), section)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(base_url, payload), section)
logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(input_name), section)
try:
if section == "CouchPotato":
r = requests.get(url, params=params, verify=False, timeout=(30, 1800))
else:
r = requests.post(baseURL, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800))
r = requests.post(base_url, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800))
except requests.ConnectionError:
logger.error("Unable to open URL", section)
return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
@ -320,27 +322,27 @@ class autoProcessMovie(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif section == "CouchPotato" and result['success']:
logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dirName), section)
logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dir_name), section)
if method == "manage":
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif section == "Radarr":
logger.postprocess("Radarr response: {0}".format(result['state']))
try:
res = json.loads(r.content)
scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True
started = True
except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None
else:
logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dirName),
logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dir_name),
section)
return [1, "{0}: Failed to post-process - Server did not return success".format(section)]
else:
core.FAILED = True
logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(inputName), section)
logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(input_name), section)
if failureLink:
reportNzb(failureLink, clientAgent)
@ -348,19 +350,19 @@ class autoProcessMovie(object):
logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section)
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dir_name)
if not release_id and not media_id:
logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(inputName),
logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(input_name),
section)
return [1, "{0}: Failed to post-process - Failed download not found in {1}".format(section, section)]
if release_id:
logger.postprocess("Setting failed release {0} to ignored ...".format(inputName), section)
logger.postprocess("Setting failed release {0} to ignored ...".format(input_name), section)
url = "{url}release.ignore".format(url=baseURL)
url = "{url}release.ignore".format(url=base_url)
params = {'id': release_id}
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
@ -376,14 +378,14 @@ class autoProcessMovie(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif result['success']:
logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(inputName), section)
logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(input_name), section)
else:
logger.warning("FAILED: Unable to set {0} to ignored!".format(inputName), section)
return [1, "{0}: Failed to post-process - Unable to set {1} to ignored".format(section, inputName)]
logger.warning("FAILED: Unable to set {0} to ignored!".format(input_name), section)
return [1, "{0}: Failed to post-process - Unable to set {1} to ignored".format(section, input_name)]
logger.postprocess("Trying to snatch the next highest ranked release.", section)
url = "{0}movie.searcher.try_next".format(baseURL)
url = "{0}movie.searcher.try_next".format(base_url)
logger.debug("Opening URL: {0}".format(url), section)
try:
@ -412,7 +414,7 @@ class autoProcessMovie(object):
while time.time() < timeout: # only wait 2 (default) minutes, then return.
logger.postprocess("Checking for status change, please stand by ...", section)
if section == "CouchPotato":
release = self.get_release(baseURL, imdbid, download_id, release_id)
release = self.get_release(base_url, imdbid, download_id, release_id)
scan_id = None
else:
release = None
@ -424,35 +426,35 @@ class autoProcessMovie(object):
if release_status_old is None: # we didn't have a release before, but now we do.
logger.postprocess("SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]".format(
title, str(release_status_new).upper()), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
if release_status_new != release_status_old:
logger.postprocess("SUCCESS: Release for {0} has now been marked with a status of [{1}]".format(
title, str(release_status_new).upper()), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
except:
pass
elif scan_id:
url = "{0}/{1}".format(baseURL, scan_id)
url = "{0}/{1}".format(base_url, scan_id)
command_status = self.command_complete(url, params, headers, section)
if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section)
if command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ]
# return [1, "%s: Failed to post-process %s" % (section, input_name) ]
if not os.path.isdir(dirName):
if not os.path.isdir(dir_name):
logger.postprocess("SUCCESS: Input Directory [{0}] has been processed and removed".format(
dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=True):
elif not listMediaFiles(dir_name, media=True, audio=False, meta=False, archives=True):
logger.postprocess("SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.".format(
dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
# pause and let CouchPotatoServer/Radarr catch its breath
time.sleep(10 * wait_for)
@ -462,6 +464,6 @@ class autoProcessMovie(object):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
logger.warning(
"{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(inputName, wait_for),
"{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(input_name, wait_for),
section)
return [1, "{0}: Failed to post-process - No change in status".format(section)]

View file

@ -97,6 +97,9 @@ class autoProcessMusic(object):
return [2, "no change"]
def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", inputCategory=None):
dir_name = dirName
input_name = inputName
status = int(status)
cfg = dict(core.CFG[section][inputCategory])
@ -124,25 +127,25 @@ class autoProcessMusic(object):
logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0]
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName))
cleanName = os.path.splitext(SpecificPath)
if cleanName[1] == ".nzb":
SpecificPath = cleanName[0]
if os.path.isdir(SpecificPath):
dirName = SpecificPath
specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb":
specific_path = clean_name[0]
if os.path.isdir(specific_path):
dir_name = specific_path
process_all_exceptions(inputName, dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
process_all_exceptions(input_name, dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
if not listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
core.extractFiles(dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if not listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and listMediaFiles(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extractFiles(dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
#if listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and status:
#if listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and status:
# logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section)
# status = 0
@ -151,20 +154,20 @@ class autoProcessMusic(object):
params = {
'apikey': apikey,
'cmd': "forceProcess",
'dir': remoteDir(dirName) if remote_path else dirName
'dir': remoteDir(dir_name) if remote_path else dir_name
}
res = self.forceProcess(params, url, apikey, inputName, dirName, section, wait_for)
res = self.forceProcess(params, url, apikey, input_name, dir_name, section, wait_for)
if res[0] in [0, 1]:
return res
params = {
'apikey': apikey,
'cmd': "forceProcess",
'dir': os.path.split(remoteDir(dirName))[0] if remote_path else os.path.split(dirName)[0]
'dir': os.path.split(remoteDir(dir_name))[0] if remote_path else os.path.split(dir_name)[0]
}
res = self.forceProcess(params, url, apikey, inputName, dirName, section, wait_for)
res = self.forceProcess(params, url, apikey, input_name, dir_name, section, wait_for)
if res[0] in [0, 1]:
return res
@ -176,11 +179,11 @@ class autoProcessMusic(object):
url = "{0}{1}:{2}{3}/api/v1/command".format(protocol, host, port, web_root)
headers = {"X-Api-Key": apikey}
if remote_path:
logger.debug("remote_path: {0}".format(remoteDir(dirName)), section)
data = {"name": "Rename", "path": remoteDir(dirName)}
logger.debug("remote_path: {0}".format(remoteDir(dir_name)), section)
data = {"name": "Rename", "path": remoteDir(dir_name)}
else:
logger.debug("path: {0}".format(dirName), section)
data = {"name": "Rename", "path": dirName}
logger.debug("path: {0}".format(dir_name), section)
data = {"name": "Rename", "path": dir_name}
data = json.dumps(data)
try:
logger.debug("Opening URL: {0} with data: {1}".format(url, data), section)
@ -189,18 +192,18 @@ class autoProcessMusic(object):
logger.error("Unable to open URL: {0}".format(url), section)
return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
Success = False
Queued = False
Started = False
success = False
queued = False
started = False
try:
res = json.loads(r.content)
scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True
started = True
except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None
Started = False
started = False
return [1, "{0}: Failed to post-process - Unable to start scan".format(section)]
n = 0
@ -214,15 +217,15 @@ class autoProcessMusic(object):
n += 1
if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section)
if not os.path.exists(dirName):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ]
# return [1, "%s: Failed to post-process %s" % (section, input_name) ]
else:
logger.debug("The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.".format(section), section)
return [status, "{0}: Passing back to {1} to attempt Complete Download Handling".format(section, section)]
@ -233,7 +236,7 @@ class autoProcessMusic(object):
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
else:
logger.warning("FAILED DOWNLOAD DETECTED", section)
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dir_name)
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader.

View file

@ -76,7 +76,7 @@ class autoProcessTV(object):
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
delete_failed = int(cfg.get("delete_failed", 0))
nzbExtractionBy = cfg.get("nzbExtractionBy", "Downloader")
nzb_extraction_by = cfg.get("nzbExtractionBy", "Downloader")
process_method = cfg.get("process_method")
if clientAgent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym":
process_method = "symlink"
@ -91,47 +91,47 @@ class autoProcessTV(object):
else:
extract = int(cfg.get("extract", 0))
#get importmode, default to "Move" for consistency with legacy
importMode = cfg.get("importMode","Move")
import_mode = cfg.get("importMode","Move")
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0]
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName))
cleanName = os.path.splitext(SpecificPath)
if cleanName[1] == ".nzb":
SpecificPath = cleanName[0]
if os.path.isdir(SpecificPath):
dirName = SpecificPath
specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb":
specific_path = clean_name[0]
if os.path.isdir(specific_path):
dir_name = specific_path
# Attempt to create the directory if it doesn't exist and ignore any
# error stating that it already exists. This fixes a bug where SickRage
# won't process the directory because it doesn't exist.
try:
os.makedirs(dirName) # Attempt to create the directory
os.makedirs(dir_name) # Attempt to create the directory
except OSError as e:
# Re-raise the error if it wasn't about the directory not existing
if e.errno != errno.EEXIST:
raise
if 'process_method' not in fork_params or (clientAgent in ['nzbget', 'sabnzbd'] and nzbExtractionBy != "Destination"):
if inputName:
process_all_exceptions(inputName, dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if 'process_method' not in fork_params or (clientAgent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != "Destination"):
if input_name:
process_all_exceptions(input_name, dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
# Now check if tv files exist in destination.
if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
if listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
core.extractFiles(dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if not listMediaFiles(dir_name, media=True, audio=False, meta=False, archives=False):
if listMediaFiles(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extractFiles(dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
if listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): # Check that a video exists. if not, assume failed.
flatten(dirName)
if listMediaFiles(dir_name, media=True, audio=False, meta=False, archives=False): # Check that a video exists. if not, assume failed.
flatten(dir_name)
# Check video files for corruption
good_files = 0
num_files = 0
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
for video in listMediaFiles(dir_name, media=True, audio=False, meta=False, archives=False):
num_files += 1
if transcoder.isVideoGood(video, status):
good_files += 1
@ -150,9 +150,9 @@ class autoProcessTV(object):
if failureLink:
failureLink += '&corrupt=true'
elif clientAgent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dirName), section)
logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section)
return [0, ""] # Success (as far as this script is concerned)
elif nzbExtractionBy == "Destination":
elif nzb_extraction_by == "Destination":
logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.")
if int(failed) == 0:
logger.info("Setting Status Success.")
@ -163,32 +163,32 @@ class autoProcessTV(object):
status = 1
failed = 1
else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section)
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section)
status = 1
failed = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD')
if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads
result, newDirName = transcoder.Transcode_directory(dirName)
result, new_dir_name = transcoder.Transcode_directory(dir_name)
if result == 0:
logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dirName), section)
dirName = newDirName
logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dir_name), section)
dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section)
core.rchmod(dirName, chmod_directory)
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section)
core.rchmod(dir_name, chmod_directory)
else:
logger.error("FAILED: Transcoding failed for files in {0}".format(dirName), section)
logger.error("FAILED: Transcoding failed for files in {0}".format(dir_name), section)
return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]
# configure SB params to pass
fork_params['quiet'] = 1
fork_params['proc_type'] = 'manual'
if inputName is not None:
fork_params['nzbName'] = inputName
if input_name is not None:
fork_params['nzbName'] = input_name
for param in copy.copy(fork_params):
if param == "failed":
@ -206,10 +206,10 @@ class autoProcessTV(object):
if "proc_type" in fork_params:
del fork_params['proc_type']
if param in ["dirName", "dir", "proc_dir", "process_directory", "path"]:
fork_params[param] = dirName
if param in ["dir_name", "dir", "proc_dir", "process_directory", "path"]:
fork_params[param] = dir_name
if remote_path:
fork_params[param] = remoteDir(dirName)
fork_params[param] = remoteDir(dir_name)
if param == "process_method":
if process_method:
@ -244,7 +244,7 @@ class autoProcessTV(object):
if status == 0:
if section == "NzbDrone" and not apikey:
logger.info('No Sonarr apikey entered. Processing completed.')
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section)
else:
core.FAILED = True
@ -257,9 +257,9 @@ class autoProcessTV(object):
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
else:
logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section)
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dir_name)
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader.
url = None
@ -274,11 +274,11 @@ class autoProcessTV(object):
headers = {"X-Api-Key": apikey}
# params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
if remote_path:
logger.debug("remote_path: {0}".format(remoteDir(dirName)), section)
data = {"name": "DownloadedEpisodesScan", "path": remoteDir(dirName), "downloadClientId": download_id, "importMode": importMode}
logger.debug("remote_path: {0}".format(remoteDir(dir_name)), section)
data = {"name": "DownloadedEpisodesScan", "path": remoteDir(dir_name), "downloadClientId": download_id, "importMode": import_mode}
else:
logger.debug("path: {0}".format(dirName), section)
data = {"name": "DownloadedEpisodesScan", "path": dirName, "downloadClientId": download_id, "importMode": importMode}
logger.debug("path: {0}".format(dir_name), section)
data = {"name": "DownloadedEpisodesScan", "path": dir_name, "downloadClientId": download_id, "importMode": import_mode}
if not download_id:
data.pop("downloadClientId")
data = json.dumps(data)
@ -306,45 +306,45 @@ class autoProcessTV(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
Success = False
Queued = False
Started = False
success = False
queued = False
started = False
if section == "SickBeard":
if apikey:
if r.json()['result'] == 'success':
Success = True
success = True
else:
for line in r.iter_lines():
if line:
line = line.decode('utf-8')
logger.postprocess("{0}".format(line), section)
if "Moving file from" in line:
inputName = os.path.split(line)[1]
input_name = os.path.split(line)[1]
if "added to the queue" in line:
Queued = True
queued = True
if "Processing succeeded" in line or "Successfully processed" in line:
Success = True
success = True
if Queued:
if queued:
time.sleep(60)
elif section == "NzbDrone":
try:
res = json.loads(r.content)
scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True
started = True
except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None
Started = False
started = False
if status != 0 and delete_failed and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
rmDir(dir_name)
if Success:
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
elif section == "NzbDrone" and Started:
if success:
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif section == "NzbDrone" and started:
n = 0
params = {}
url = "{0}/{1}".format(url, scan_id)
@ -356,20 +356,20 @@ class autoProcessTV(object):
n += 1
if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section)
if not os.path.exists(dirName):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ]
# return [1, "%s: Failed to post-process %s" % (section, input_name) ]
if self.CDH(url2, headers, section=section):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
else:
logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section)
return [1, "{0}: Failed to post-process {1}".format(section, inputName)]
return [1, "{0}: Failed to post-process {1}".format(section, input_name)]
else:
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)] # We did not receive Success confirmation.

View file

@ -22,7 +22,7 @@ def extract(filePath, outputDestination):
invislocation = os.path.join(core.PROGRAM_DIR, 'core', 'extractor', 'bin', 'invisible.vbs')
cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, "x", "-y"]
ext_7zip = [".rar", ".zip", ".tar.gz", "tgz", ".tar.bz2", ".tbz", ".tar.lzma", ".tlz", ".7z", ".xz"]
EXTRACT_COMMANDS = dict.fromkeys(ext_7zip, cmd_7zip)
extract_commands = dict.fromkeys(ext_7zip, cmd_7zip)
# Using unix
else:
required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"]
@ -33,7 +33,7 @@ def extract(filePath, outputDestination):
# ".lzma": ["xz", "-d --format=lzma --keep"],
# ".bz2": ["bzip2", "-d --keep"],
EXTRACT_COMMANDS = {
extract_commands = {
".rar": ["unrar", "x", "-o+", "-y"],
".tar": ["tar", "-xf"],
".zip": ["unzip"],
@ -49,24 +49,24 @@ def extract(filePath, outputDestination):
for cmd in required_cmds:
if call(['which', cmd], stdout=devnull,
stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist.
for k, v in EXTRACT_COMMANDS.items():
for k, v in extract_commands.items():
if cmd in v[0]:
if not call(["which", "7zr"], stdout=devnull, stderr=devnull): # we do have "7zr"
EXTRACT_COMMANDS[k] = ["7zr", "x", "-y"]
extract_commands[k] = ["7zr", "x", "-y"]
elif not call(["which", "7z"], stdout=devnull, stderr=devnull): # we do have "7z"
EXTRACT_COMMANDS[k] = ["7z", "x", "-y"]
extract_commands[k] = ["7z", "x", "-y"]
elif not call(["which", "7za"], stdout=devnull, stderr=devnull): # we do have "7za"
EXTRACT_COMMANDS[k] = ["7za", "x", "-y"]
extract_commands[k] = ["7za", "x", "-y"]
else:
core.logger.error("EXTRACTOR: {cmd} not found, "
"disabling support for {feature}".format
(cmd=cmd, feature=k))
del EXTRACT_COMMANDS[k]
del extract_commands[k]
devnull.close()
else:
core.logger.warning("EXTRACTOR: Cannot determine which tool to use when called from Transmission")
if not EXTRACT_COMMANDS:
if not extract_commands:
core.logger.warning("EXTRACTOR: No archive extracting programs found, plugin will be disabled")
ext = os.path.splitext(filePath)
@ -74,14 +74,14 @@ def extract(filePath, outputDestination):
if ext[1] in (".gz", ".bz2", ".lzma"):
# Check if this is a tar
if os.path.splitext(ext[0])[1] == ".tar":
cmd = EXTRACT_COMMANDS[".tar{ext}".format(ext=ext[1])]
cmd = extract_commands[".tar{ext}".format(ext=ext[1])]
elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"):
cmd = EXTRACT_COMMANDS[os.path.splitext(ext[0])[1]]
cmd = extract_commands[os.path.splitext(ext[0])[1]]
elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives.
return False
else:
if ext[1] in EXTRACT_COMMANDS:
cmd = EXTRACT_COMMANDS[ext[1]]
if ext[1] in extract_commands:
cmd = extract_commands[ext[1]]
else:
core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format
(ext=ext[1]))
@ -100,13 +100,13 @@ def extract(filePath, outputDestination):
core.logger.debug("Extracting {cmd} {file} {destination}".format
(cmd=cmd, file=filePath, destination=outputDestination))
origFiles = []
origDirs = []
orig_files = []
orig_dirs = []
for dir, subdirs, files in os.walk(outputDestination):
for subdir in subdirs:
origDirs.append(os.path.join(dir, subdir))
orig_dirs.append(os.path.join(dir, subdir))
for file in files:
origFiles.append(os.path.join(dir, file))
orig_files.append(os.path.join(dir, file))
pwd = os.getcwd() # Get our Present Working Directory
os.chdir(outputDestination) # Not all unpack commands accept full paths, so just extract into this directory
@ -162,13 +162,13 @@ def extract(filePath, outputDestination):
perms = stat.S_IMODE(os.lstat(os.path.split(filePath)[0]).st_mode)
for dir, subdirs, files in os.walk(outputDestination):
for subdir in subdirs:
if not os.path.join(dir, subdir) in origFiles:
if not os.path.join(dir, subdir) in orig_files:
try:
os.chmod(os.path.join(dir, subdir), perms)
except:
pass
for file in files:
if not os.path.join(dir, file) in origFiles:
if not os.path.join(dir, file) in orig_files:
try:
shutil.copymode(filePath, os.path.join(dir, file))
except:

View file

@ -79,13 +79,13 @@ def autoFork(section, inputCategory):
r = []
if r and r.ok:
if apikey:
optionalParameters = []
optional_parameters = []
try:
optionalParameters = r.json()['data']['optionalParameters'].keys()
optional_parameters = r.json()['data']['optionalParameters'].keys()
except:
optionalParameters = r.json()['data']['data']['optionalParameters'].keys()
optional_parameters = r.json()['data']['data']['optionalParameters'].keys()
for param in params:
if param not in optionalParameters:
if param not in optional_parameters:
rem_params.append(param)
else:
for param in params:

View file

@ -251,7 +251,7 @@ class ConfigObj(configobj.ConfigObj, Section):
@staticmethod
def addnzbget():
# load configs into memory
CFG_NEW = config()
cfg_new = config()
try:
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
@ -274,196 +274,196 @@ class ConfigObj(configobj.ConfigObj, Section):
if key in os.environ:
option = 'default_downloadDirectory'
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "General"
envKeys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
cfgKeys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
cfg_keys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "Network"
envKeys = ['MOUNTPOINTS']
cfgKeys = ['mount_points']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['MOUNTPOINTS']
cfg_keys = ['mount_points']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "CouchPotato"
envCatKey = 'NZBPO_CPSCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
env_cat_key = 'NZBPO_CPSCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
'wait_for', 'watch_dir', 'omdbapikey']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_CPS{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_CPS{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['Radarr'].sections:
CFG_NEW['Radarr'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['Radarr'].sections:
cfg_new['Radarr'][env_cat_key]['enabled'] = 0
section = "SickBeard"
envCatKey = 'NZBPO_SBCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
env_cat_key = 'NZBPO_SBCATEGORY'
env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
cfgKeys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork',
cfg_keys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork',
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_SB{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_SB{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['NzbDrone'].sections:
CFG_NEW['NzbDrone'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['NzbDrone'].sections:
cfg_new['NzbDrone'][env_cat_key]['enabled'] = 0
section = "HeadPhones"
envCatKey = 'NZBPO_HPCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_HP{index}'.format(index=envKeys[index])
env_cat_key = 'NZBPO_HPCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_HP{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['Lidarr'].sections:
CFG_NEW['Lidarr'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['Lidarr'].sections:
cfg_new['Lidarr'][env_cat_key]['enabled'] = 0
section = "Mylar"
envCatKey = 'NZBPO_MYCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
env_cat_key = 'NZBPO_MYCATEGORY'
env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
cfg_keys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_MY{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_MY{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "Gamez"
envCatKey = 'NZBPO_GZCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_GZ{index}'.format(index=envKeys[index])
env_cat_key = 'NZBPO_GZCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_GZ{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "NzbDrone"
envCatKey = 'NZBPO_NDCATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
env_cat_key = 'NZBPO_NDCATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE']
#new cfgKey added for importMode
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path','importMode']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_ND{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_ND{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['SickBeard'].sections:
CFG_NEW['SickBeard'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['SickBeard'].sections:
cfg_new['SickBeard'][env_cat_key]['enabled'] = 0
section = "Radarr"
envCatKey = 'NZBPO_RACATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
env_cat_key = 'NZBPO_RACATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE']
#new cfgKey added for importMode
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey','importMode']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_RA{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_RA{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['CouchPotato'].sections:
CFG_NEW['CouchPotato'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['CouchPotato'].sections:
cfg_new['CouchPotato'][env_cat_key]['enabled'] = 0
section = "Lidarr"
envCatKey = 'NZBPO_LICATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
env_cat_key = 'NZBPO_LICATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_LI{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_LI{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['HeadPhones'].sections:
CFG_NEW['HeadPhones'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['HeadPhones'].sections:
cfg_new['HeadPhones'][env_cat_key]['enabled'] = 0
section = "Extensions"
envKeys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "Posix"
envKeys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfg_keys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "Transcoder"
envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
@ -473,7 +473,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS',
'OUTPUTAUDIOOTHERCHANNELS','OUTPUTVIDEORESOLUTION']
cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
cfg_keys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir',
'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
@ -483,51 +483,51 @@ class ConfigObj(configobj.ConfigObj, Section):
'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
'outputAudioOtherChannels', 'outputVideoResolution']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "WakeOnLan"
envKeys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfgKeys = ['wake', 'host', 'port', 'mac']
for index in range(len(envKeys)):
key = 'NZBPO_WOL{index}'.format(index=envKeys[index])
env_keys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfg_keys = ['wake', 'host', 'port', 'mac']
for index in range(len(env_keys)):
key = 'NZBPO_WOL{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "UserScript"
envCatKey = 'NZBPO_USCATEGORY'
envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
env_cat_key = 'NZBPO_USCATEGORY'
env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
cfg_keys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
except Exception as error:
logger.debug("Error {msg} when applying NZBGet config".format(msg=error))
try:
# write our new config to autoProcessMedia.cfg
CFG_NEW.filename = core.CONFIG_FILE
CFG_NEW.write()
cfg_new.filename = core.CONFIG_FILE
cfg_new.write()
except Exception as error:
logger.debug("Error {msg} when writing changes to .cfg".format(msg=error))
return CFG_NEW
return cfg_new
configobj.Section = Section

View file

@ -52,7 +52,7 @@ class DBConnection(object):
if query is None:
return
sqlResult = None
sql_result = None
attempt = 0
while attempt < 5:
@ -61,13 +61,13 @@ class DBConnection(object):
logger.log("{name}: {query}".format(name=self.filename, query=query), logger.DB)
cursor = self.connection.cursor()
cursor.execute(query)
sqlResult = cursor.fetchone()[0]
sql_result = cursor.fetchone()[0]
else:
logger.log("{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB)
cursor = self.connection.cursor()
cursor.execute(query, args)
sqlResult = cursor.fetchone()[0]
sql_result = cursor.fetchone()[0]
# get out of the connection attempt loop since we were successful
break
@ -83,13 +83,13 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise
return sqlResult
return sql_result
def mass_action(self, querylist, logTransaction=False):
if querylist is None:
return
sqlResult = []
sql_result = []
attempt = 0
while attempt < 5:
@ -98,16 +98,16 @@ class DBConnection(object):
if len(qu) == 1:
if logTransaction:
logger.log(qu[0], logger.DEBUG)
sqlResult.append(self.connection.execute(qu[0]))
sql_result.append(self.connection.execute(qu[0]))
elif len(qu) > 1:
if logTransaction:
logger.log(u"{query} with args {args}".format(query=qu[0], args=qu[1]), logger.DEBUG)
sqlResult.append(self.connection.execute(qu[0], qu[1]))
sql_result.append(self.connection.execute(qu[0], qu[1]))
self.connection.commit()
logger.log(u"Transaction with {x} query's executed".format(x=len(querylist)), logger.DEBUG)
return sqlResult
return sql_result
except sqlite3.OperationalError as error:
sqlResult = []
sql_result = []
if self.connection:
self.connection.rollback()
if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]:
@ -123,24 +123,24 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise
return sqlResult
return sql_result
def action(self, query, args=None):
if query is None:
return
sqlResult = None
sql_result = None
attempt = 0
while attempt < 5:
try:
if args is None:
logger.log(u"{name}: {query}".format(name=self.filename, query=query), logger.DB)
sqlResult = self.connection.execute(query)
sql_result = self.connection.execute(query)
else:
logger.log(u"{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB)
sqlResult = self.connection.execute(query, args)
sql_result = self.connection.execute(query, args)
self.connection.commit()
# get out of the connection attempt loop since we were successful
break
@ -156,22 +156,22 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise
return sqlResult
return sql_result
def select(self, query, args=None):
sqlResults = self.action(query, args).fetchall()
sql_results = self.action(query, args).fetchall()
if sqlResults is None:
if sql_results is None:
return []
return sqlResults
return sql_results
def upsert(self, tableName, valueDict, keyDict):
changesBefore = self.connection.total_changes
changes_before = self.connection.total_changes
genParams = lambda myDict: ["{key} = ?".format(key=k) for k in myDict.keys()]
gen_params = lambda myDict: ["{key} = ?".format(key=k) for k in myDict.keys()]
items = list(valueDict.values()) + list(keyDict.values())
self.action(
@ -179,13 +179,13 @@ class DBConnection(object):
"SET {params} "
"WHERE {conditions}".format(
table=tableName,
params=", ".join(genParams(valueDict)),
conditions=" AND ".join(genParams(keyDict))
params=", ".join(gen_params(valueDict)),
conditions=" AND ".join(gen_params(keyDict))
),
items
)
if self.connection.total_changes == changesBefore:
if self.connection.total_changes == changes_before:
self.action(
"INSERT OR IGNORE INTO {table} ({columns}) "
"VALUES ({values})".format(

View file

@ -34,8 +34,8 @@ def process_all_exceptions(name, dirname):
rename_script(dirname)
for filename in listMediaFiles(dirname):
newfilename = None
parentDir = os.path.dirname(filename)
head, fileExtension = os.path.splitext(os.path.basename(filename))
parent_dir = os.path.dirname(filename)
head, file_extension = os.path.splitext(os.path.basename(filename))
if reverse_pattern.search(head) is not None:
exception = reverse_filename
elif garbage_name.search(head) is not None:
@ -44,7 +44,7 @@ def process_all_exceptions(name, dirname):
exception = None
newfilename = filename
if not newfilename:
newfilename = exception(filename, parentDir, name)
newfilename = exception(filename, parent_dir, name)
if core.GROUPS:
newfilename = strip_groups(newfilename)
if newfilename != filename:
@ -55,29 +55,30 @@ def strip_groups(filename):
if not core.GROUPS:
return filename
dirname, file = os.path.split(filename)
head, fileExtension = os.path.splitext(file)
head, file_extension = os.path.splitext(file)
newname = head.replace(' ', '.')
for group in core.GROUPS:
newname = newname.replace(group, '')
newname = newname.replace('[]', '')
newfile = newname + fileExtension
newfilePath = os.path.join(dirname, newfile)
return newfilePath
newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile)
return newfile_path
def rename_file(filename, newfilePath):
if os.path.isfile(newfilePath):
newfilePath = os.path.splitext(newfilePath)[0] + ".NTM" + os.path.splitext(newfilePath)[1]
newfile_path = newfilePath
if os.path.isfile(newfile_path):
newfile_path = os.path.splitext(newfile_path)[0] + ".NTM" + os.path.splitext(newfile_path)[1]
logger.debug("Replacing file name {old} with download name {new}".format
(old=filename, new=newfilePath), "EXCEPTION")
(old=filename, new=newfile_path), "EXCEPTION")
try:
os.rename(filename, newfilePath)
os.rename(filename, newfile_path)
except Exception as error:
logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION")
def replace_filename(filename, dirname, name):
head, fileExtension = os.path.splitext(os.path.basename(filename))
head, file_extension = os.path.splitext(os.path.basename(filename))
if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
newname = os.path.basename(dirname).replace(' ', '.')
logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION")
@ -88,13 +89,13 @@ def replace_filename(filename, dirname, name):
else:
logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION")
newname = name
newfile = newname + fileExtension
newfilePath = os.path.join(dirname, newfile)
return newfilePath
newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile)
return newfile_path
def reverse_filename(filename, dirname, name):
head, fileExtension = os.path.splitext(os.path.basename(filename))
head, file_extension = os.path.splitext(os.path.basename(filename))
na_parts = season_pattern.search(head)
if na_parts is not None:
word_p = word_pattern.findall(na_parts.group(2))
@ -114,9 +115,9 @@ def reverse_filename(filename, dirname, name):
newname = newname.replace(' ', '.')
logger.debug("Reversing filename {old} to {new}".format
(old=head, new=newname), "EXCEPTION")
newfile = newname + fileExtension
newfilePath = os.path.join(dirname, newfile)
return newfilePath
newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile)
return newfile_path
def rename_script(dirname):

View file

@ -50,10 +50,10 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
for dirpath, dirnames, filenames in os.walk(outputDestination):
for file in filenames:
filePath = core.os.path.join(dirpath, file)
fileName, fileExtension = os.path.splitext(file)
file_path = core.os.path.join(dirpath, file)
file_name, file_extension = os.path.splitext(file)
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS:
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS:
num_files += 1
if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files.
continue
@ -63,7 +63,7 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
command.append('{0}'.format(file))
continue
elif param == "FP":
command.append('{0}'.format(filePath))
command.append('{0}'.format(file_path))
continue
elif param == "TN":
command.append('{0}'.format(torrentName))
@ -83,7 +83,7 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
cmd = ""
for item in command:
cmd = "{cmd} {item}".format(cmd=cmd, item=item)
logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=filePath), "USERSCRIPT")
logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=file_path), "USERSCRIPT")
try:
p = Popen(command)
res = p.wait()
@ -104,9 +104,9 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
num_files_new = 0
for dirpath, dirnames, filenames in os.walk(outputDestination):
for file in filenames:
fileName, fileExtension = os.path.splitext(file)
file_name, file_extension = os.path.splitext(file)
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL":
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL":
num_files_new += 1
if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0:

View file

@ -106,86 +106,89 @@ def remoteDir(path):
def category_search(inputDirectory, inputName, inputCategory, root, categories):
input_directory = inputDirectory
input_category = inputCategory
input_name = inputName
tordir = False
try:
inputName = inputName.encode(core.SYS_ENCODING)
input_name = input_name.encode(core.SYS_ENCODING)
except:
pass
try:
inputDirectory = inputDirectory.encode(core.SYS_ENCODING)
input_directory = input_directory.encode(core.SYS_ENCODING)
except:
pass
if inputDirectory is None: # =Nothing to process here.
return inputDirectory, inputName, inputCategory, root
if input_directory is None: # =Nothing to process here.
return input_directory, input_name, input_category, root
pathlist = os.path.normpath(inputDirectory).split(os.sep)
pathlist = os.path.normpath(input_directory).split(os.sep)
if inputCategory and inputCategory in pathlist:
logger.debug("SEARCH: Found the Category: {0} in directory structure".format(inputCategory))
elif inputCategory:
logger.debug("SEARCH: Could not find the category: {0} in the directory structure".format(inputCategory))
if input_category and input_category in pathlist:
logger.debug("SEARCH: Found the Category: {0} in directory structure".format(input_category))
elif input_category:
logger.debug("SEARCH: Could not find the category: {0} in the directory structure".format(input_category))
else:
try:
inputCategory = list(set(pathlist) & set(categories))[-1] # assume last match is most relevant category.
logger.debug("SEARCH: Found Category: {0} in directory structure".format(inputCategory))
input_category = list(set(pathlist) & set(categories))[-1] # assume last match is most relevant category.
logger.debug("SEARCH: Found Category: {0} in directory structure".format(input_category))
except IndexError:
inputCategory = ""
input_category = ""
logger.debug("SEARCH: Could not find a category in the directory structure")
if not os.path.isdir(inputDirectory) and os.path.isfile(inputDirectory): # If the input directory is a file
if not inputName:
inputName = os.path.split(os.path.normpath(inputDirectory))[1]
return inputDirectory, inputName, inputCategory, root
if not os.path.isdir(input_directory) and os.path.isfile(input_directory): # If the input directory is a file
if not input_name:
input_name = os.path.split(os.path.normpath(input_directory))[1]
return input_directory, input_name, input_category, root
if inputCategory and os.path.isdir(os.path.join(inputDirectory, inputCategory)):
if input_category and os.path.isdir(os.path.join(input_directory, input_category)):
logger.info(
"SEARCH: Found category directory {0} in input directory directory {1}".format(inputCategory, inputDirectory))
inputDirectory = os.path.join(inputDirectory, inputCategory)
logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory))
if inputName and os.path.isdir(os.path.join(inputDirectory, inputName)):
logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format(inputName, inputDirectory))
inputDirectory = os.path.join(inputDirectory, inputName)
logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory))
"SEARCH: Found category directory {0} in input directory directory {1}".format(input_category, input_directory))
input_directory = os.path.join(input_directory, input_category)
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory))
if input_name and os.path.isdir(os.path.join(input_directory, input_name)):
logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format(input_name, input_directory))
input_directory = os.path.join(input_directory, input_name)
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory))
tordir = True
elif inputName and os.path.isdir(os.path.join(inputDirectory, sanitizeName(inputName))):
elif input_name and os.path.isdir(os.path.join(input_directory, sanitizeName(input_name))):
logger.info("SEARCH: Found torrent directory {0} in input directory directory {1}".format(
sanitizeName(inputName), inputDirectory))
inputDirectory = os.path.join(inputDirectory, sanitizeName(inputName))
logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory))
sanitizeName(input_name), input_directory))
input_directory = os.path.join(input_directory, sanitizeName(input_name))
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory))
tordir = True
elif inputName and os.path.isfile(os.path.join(inputDirectory, inputName)):
logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format(inputName, inputDirectory))
inputDirectory = os.path.join(inputDirectory, inputName)
logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory))
elif input_name and os.path.isfile(os.path.join(input_directory, input_name)):
logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format(input_name, input_directory))
input_directory = os.path.join(input_directory, input_name)
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory))
tordir = True
elif inputName and os.path.isfile(os.path.join(inputDirectory, sanitizeName(inputName))):
elif input_name and os.path.isfile(os.path.join(input_directory, sanitizeName(input_name))):
logger.info("SEARCH: Found torrent file {0} in input directory directory {1}".format(
sanitizeName(inputName), inputDirectory))
inputDirectory = os.path.join(inputDirectory, sanitizeName(inputName))
logger.info("SEARCH: Setting inputDirectory to {0}".format(inputDirectory))
sanitizeName(input_name), input_directory))
input_directory = os.path.join(input_directory, sanitizeName(input_name))
logger.info("SEARCH: Setting input_directory to {0}".format(input_directory))
tordir = True
imdbid = [item for item in pathlist if '.cp(tt' in item] # This looks for the .cp(tt imdb id in the path.
if imdbid and '.cp(tt' not in inputName:
inputName = imdbid[0] # This ensures the imdb id is preserved and passed to CP
if imdbid and '.cp(tt' not in input_name:
input_name = imdbid[0] # This ensures the imdb id is preserved and passed to CP
tordir = True
if inputCategory and not tordir:
if input_category and not tordir:
try:
index = pathlist.index(inputCategory)
index = pathlist.index(input_category)
if index + 1 < len(pathlist):
tordir = True
logger.info("SEARCH: Found a unique directory {0} in the category directory".format
(pathlist[index + 1]))
if not inputName:
inputName = pathlist[index + 1]
if not input_name:
input_name = pathlist[index + 1]
except ValueError:
pass
if inputName and not tordir:
if inputName in pathlist or sanitizeName(inputName) in pathlist:
logger.info("SEARCH: Found torrent directory {0} in the directory structure".format(inputName))
if input_name and not tordir:
if input_name in pathlist or sanitizeName(input_name) in pathlist:
logger.info("SEARCH: Found torrent directory {0} in the directory structure".format(input_name))
tordir = True
else:
root = 1
@ -196,7 +199,7 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
logger.info("SEARCH: Could not find a unique directory for this download. Assume a common directory.")
logger.info("SEARCH: We will try and determine which files to process, individually")
return inputDirectory, inputName, inputCategory, root
return input_directory, input_name, input_category, root
def getDirSize(inputPath):
@ -209,19 +212,19 @@ def getDirSize(inputPath):
def is_minSize(inputName, minSize):
fileName, fileExt = os.path.splitext(os.path.basename(inputName))
file_name, file_ext = os.path.splitext(os.path.basename(inputName))
# audio files we need to check directory size not file size
inputSize = os.path.getsize(inputName)
if fileExt in core.AUDIOCONTAINER:
input_size = os.path.getsize(inputName)
if file_ext in core.AUDIOCONTAINER:
try:
inputSize = getDirSize(os.path.dirname(inputName))
input_size = getDirSize(os.path.dirname(inputName))
except:
logger.error("Failed to get file size for {0}".format(inputName), 'MINSIZE')
return True
# Ignore files under a certain size
if inputSize > minSize * 1048576:
if input_size > minSize * 1048576:
return True
@ -309,13 +312,13 @@ def replace_links(link):
def flatten(outputDestination):
logger.info("FLATTEN: Flattening directory: {0}".format(outputDestination))
for outputFile in listMediaFiles(outputDestination):
dirPath = os.path.dirname(outputFile)
fileName = os.path.basename(outputFile)
dir_path = os.path.dirname(outputFile)
file_name = os.path.basename(outputFile)
if dirPath == outputDestination:
if dir_path == outputDestination:
continue
target = os.path.join(outputDestination, fileName)
target = os.path.join(outputDestination, file_name)
try:
shutil.move(outputFile, target)
@ -411,6 +414,7 @@ def WakeUp():
def CharReplace(Name):
name = Name
# Special character hex range:
# CP850: 0x80-0xA5 (fortunately not used in ISO-8859-15)
# UTF-8: 1st hex code 0xC2-0xC3 followed by a 2nd hex code 0xA1-0xFF
@ -419,73 +423,76 @@ def CharReplace(Name):
# If there is special character, detects if it is a UTF-8, CP850 or ISO-8859-15 encoding
encoded = False
encoding = None
if isinstance(Name, text_type):
return encoded, Name.encode(core.SYS_ENCODING)
for Idx in range(len(Name)):
if isinstance(name, text_type):
return encoded, name.encode(core.SYS_ENCODING)
for Idx in range(len(name)):
# /!\ detection is done 2char by 2char for UTF-8 special character
if (len(Name) != 1) & (Idx < (len(Name) - 1)):
if (len(name) != 1) & (Idx < (len(name) - 1)):
# Detect UTF-8
if ((Name[Idx] == '\xC2') | (Name[Idx] == '\xC3')) & (
(Name[Idx + 1] >= '\xA0') & (Name[Idx + 1] <= '\xFF')):
if ((name[Idx] == '\xC2') | (name[Idx] == '\xC3')) & (
(name[Idx + 1] >= '\xA0') & (name[Idx + 1] <= '\xFF')):
encoding = 'utf-8'
break
# Detect CP850
elif (Name[Idx] >= '\x80') & (Name[Idx] <= '\xA5'):
elif (name[Idx] >= '\x80') & (name[Idx] <= '\xA5'):
encoding = 'cp850'
break
# Detect ISO-8859-15
elif (Name[Idx] >= '\xA6') & (Name[Idx] <= '\xFF'):
elif (name[Idx] >= '\xA6') & (name[Idx] <= '\xFF'):
encoding = 'iso-8859-15'
break
else:
# Detect CP850
if (Name[Idx] >= '\x80') & (Name[Idx] <= '\xA5'):
if (name[Idx] >= '\x80') & (name[Idx] <= '\xA5'):
encoding = 'cp850'
break
# Detect ISO-8859-15
elif (Name[Idx] >= '\xA6') & (Name[Idx] <= '\xFF'):
elif (name[Idx] >= '\xA6') & (name[Idx] <= '\xFF'):
encoding = 'iso-8859-15'
break
if encoding and not encoding == core.SYS_ENCODING:
encoded = True
Name = Name.decode(encoding).encode(core.SYS_ENCODING)
return encoded, Name
name = name.decode(encoding).encode(core.SYS_ENCODING)
return encoded, name
def convert_to_ascii(inputName, dirName):
input_name = inputName
dir_name = dirName
ascii_convert = int(core.CFG["ASCII"]["convert"])
if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and "\" is replaced!.
return inputName, dirName
return input_name, dir_name
encoded, inputName = CharReplace(inputName)
encoded, input_name = CharReplace(input_name)
dir, base = os.path.split(dirName)
dir, base = os.path.split(dir_name)
if not base: # ended with "/"
dir, base = os.path.split(dir)
encoded, base2 = CharReplace(base)
if encoded:
dirName = os.path.join(dir, base2)
dir_name = os.path.join(dir, base2)
logger.info("Renaming directory to: {0}.".format(base2), 'ENCODER')
os.rename(os.path.join(dir, base), dirName)
os.rename(os.path.join(dir, base), dir_name)
if 'NZBOP_SCRIPTDIR' in os.environ:
print("[NZB] DIRECTORY={0}".format(dirName))
print("[NZB] DIRECTORY={0}".format(dir_name))
for dirname, dirnames, filenames in os.walk(dirName, topdown=False):
for dirname, dirnames, filenames in os.walk(dir_name, topdown=False):
for subdirname in dirnames:
encoded, subdirname2 = CharReplace(subdirname)
if encoded:
logger.info("Renaming directory to: {0}.".format(subdirname2), 'ENCODER')
os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2))
for dirname, dirnames, filenames in os.walk(dirName):
for dirname, dirnames, filenames in os.walk(dir_name):
for filename in filenames:
encoded, filename2 = CharReplace(filename)
if encoded:
logger.info("Renaming file to: {0}.".format(filename2), 'ENCODER')
os.rename(os.path.join(dirname, filename), os.path.join(dirname, filename2))
return inputName, dirName
return input_name, dir_name
def parse_other(args):
@ -495,68 +502,68 @@ def parse_other(args):
def parse_rtorrent(args):
# rtorrent usage: system.method.set_key = event.download.finished,TorrentToMedia,
# "execute={/path/to/nzbToMedia/TorrentToMedia.py,\"$d.get_base_path=\",\"$d.get_name=\",\"$d.get_custom1=\",\"$d.get_hash=\"}"
inputDirectory = os.path.normpath(args[1])
input_directory = os.path.normpath(args[1])
try:
inputName = args[2]
input_name = args[2]
except:
inputName = ''
input_name = ''
try:
inputCategory = args[3]
input_category = args[3]
except:
inputCategory = ''
input_category = ''
try:
inputHash = args[4]
input_hash = args[4]
except:
inputHash = ''
input_hash = ''
try:
inputID = args[4]
input_id = args[4]
except:
inputID = ''
input_id = ''
return inputDirectory, inputName, inputCategory, inputHash, inputID
return input_directory, input_name, input_category, input_hash, input_id
def parse_utorrent(args):
# uTorrent usage: call TorrentToMedia.py "%D" "%N" "%L" "%I"
inputDirectory = os.path.normpath(args[1])
inputName = args[2]
input_directory = os.path.normpath(args[1])
input_name = args[2]
try:
inputCategory = args[3]
input_category = args[3]
except:
inputCategory = ''
input_category = ''
try:
inputHash = args[4]
input_hash = args[4]
except:
inputHash = ''
input_hash = ''
try:
inputID = args[4]
input_id = args[4]
except:
inputID = ''
input_id = ''
return inputDirectory, inputName, inputCategory, inputHash, inputID
return input_directory, input_name, input_category, input_hash, input_id
def parse_deluge(args):
# Deluge usage: call TorrentToMedia.py TORRENT_ID TORRENT_NAME TORRENT_DIR
inputDirectory = os.path.normpath(args[3])
inputName = args[2]
inputHash = args[1]
inputID = args[1]
input_directory = os.path.normpath(args[3])
input_name = args[2]
input_hash = args[1]
input_id = args[1]
try:
inputCategory = core.TORRENT_CLASS.core.get_torrent_status(inputID, ['label']).get()['label']
input_category = core.TORRENT_CLASS.core.get_torrent_status(input_id, ['label']).get()['label']
except:
inputCategory = ''
return inputDirectory, inputName, inputCategory, inputHash, inputID
input_category = ''
return input_directory, input_name, input_category, input_hash, input_id
def parse_transmission(args):
# Transmission usage: call TorrenToMedia.py (%TR_TORRENT_DIR% %TR_TORRENT_NAME% is passed on as environmental variables)
inputDirectory = os.path.normpath(os.getenv('TR_TORRENT_DIR'))
inputName = os.getenv('TR_TORRENT_NAME')
inputCategory = '' # We dont have a category yet
inputHash = os.getenv('TR_TORRENT_HASH')
inputID = os.getenv('TR_TORRENT_ID')
return inputDirectory, inputName, inputCategory, inputHash, inputID
input_directory = os.path.normpath(os.getenv('TR_TORRENT_DIR'))
input_name = os.getenv('TR_TORRENT_NAME')
input_category = '' # We dont have a category yet
input_hash = os.getenv('TR_TORRENT_HASH')
input_id = os.getenv('TR_TORRENT_ID')
return input_directory, input_name, input_category, input_hash, input_id
def parse_vuze(args):
@ -566,32 +573,32 @@ def parse_vuze(args):
except:
input = []
try:
inputDirectory = os.path.normpath(input[0])
input_directory = os.path.normpath(input[0])
except:
inputDirectory = ''
input_directory = ''
try:
inputName = input[1]
input_name = input[1]
except:
inputName = ''
input_name = ''
try:
inputCategory = input[2]
input_category = input[2]
except:
inputCategory = ''
input_category = ''
try:
inputHash = input[3]
input_hash = input[3]
except:
inputHash = ''
input_hash = ''
try:
inputID = input[3]
input_id = input[3]
except:
inputID = ''
input_id = ''
try:
if input[4] == 'single':
inputName = input[5]
input_name = input[5]
except:
pass
return inputDirectory, inputName, inputCategory, inputHash, inputID
return input_directory, input_name, input_category, input_hash, input_id
def parse_qbittorrent(args):
# qbittorrent usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py "%D|%N|%L|%I"
@ -600,27 +607,27 @@ def parse_qbittorrent(args):
except:
input = []
try:
inputDirectory = os.path.normpath(input[0].replace('"',''))
input_directory = os.path.normpath(input[0].replace('"',''))
except:
inputDirectory = ''
input_directory = ''
try:
inputName = input[1].replace('"','')
input_name = input[1].replace('"','')
except:
inputName = ''
input_name = ''
try:
inputCategory = input[2].replace('"','')
input_category = input[2].replace('"','')
except:
inputCategory = ''
input_category = ''
try:
inputHash = input[3].replace('"','')
input_hash = input[3].replace('"','')
except:
inputHash = ''
input_hash = ''
try:
inputID = input[3].replace('"','')
input_id = input[3].replace('"','')
except:
inputID = ''
input_id = ''
return inputDirectory, inputName, inputCategory, inputHash, inputID
return input_directory, input_name, input_category, input_hash, input_id
def parse_args(clientAgent, args):
clients = {
@ -656,10 +663,10 @@ def getDirs(section, subsection, link='hard'):
continue
try:
logger.debug("Found file {0} in root directory {1}.".format(os.path.split(mediafile)[1], path))
newPath = None
fileExt = os.path.splitext(mediafile)[1]
new_path = None
file_ext = os.path.splitext(mediafile)[1]
try:
if fileExt in core.AUDIOCONTAINER:
if file_ext in core.AUDIOCONTAINER:
f = beets.mediafile.MediaFile(mediafile)
# get artist and album info
@ -667,8 +674,8 @@ def getDirs(section, subsection, link='hard'):
album = f.album
# create new path
newPath = os.path.join(path, "{0} - {1}".format(sanitizeName(artist), sanitizeName(album)))
elif fileExt in core.MEDIACONTAINER:
new_path = os.path.join(path, "{0} - {1}".format(sanitizeName(artist), sanitizeName(album)))
elif file_ext in core.MEDIACONTAINER:
f = guessit.guessit(mediafile)
# get title
@ -677,29 +684,29 @@ def getDirs(section, subsection, link='hard'):
if not title:
title = os.path.splitext(os.path.basename(mediafile))[0]
newPath = os.path.join(path, sanitizeName(title))
new_path = os.path.join(path, sanitizeName(title))
except Exception as e:
logger.error("Exception parsing name for media file: {0}: {1}".format(os.path.split(mediafile)[1], e))
if not newPath:
if not new_path:
title = os.path.splitext(os.path.basename(mediafile))[0]
newPath = os.path.join(path, sanitizeName(title))
new_path = os.path.join(path, sanitizeName(title))
try:
newPath = newPath.encode(core.SYS_ENCODING)
new_path = new_path.encode(core.SYS_ENCODING)
except:
pass
# Just fail-safe incase we already have afile with this clean-name (was actually a bug from earlier code, but let's be safe).
if os.path.isfile(newPath):
newPath2 = os.path.join(os.path.join(os.path.split(newPath)[0], 'new'), os.path.split(newPath)[1])
newPath = newPath2
if os.path.isfile(new_path):
new_path2 = os.path.join(os.path.join(os.path.split(new_path)[0], 'new'), os.path.split(new_path)[1])
new_path = new_path2
# create new path if it does not exist
if not os.path.exists(newPath):
makeDir(newPath)
if not os.path.exists(new_path):
makeDir(new_path)
newfile = os.path.join(newPath, sanitizeName(os.path.split(mediafile)[1]))
newfile = os.path.join(new_path, sanitizeName(os.path.split(mediafile)[1]))
try:
newfile = newfile.encode(core.SYS_ENCODING)
except:
@ -733,9 +740,9 @@ def getDirs(section, subsection, link='hard'):
if core.USELINK == 'move':
try:
outputDirectory = os.path.join(core.OUTPUTDIRECTORY, subsection)
if os.path.exists(outputDirectory):
to_return.extend(processDir(outputDirectory))
output_directory = os.path.join(core.OUTPUTDIRECTORY, subsection)
if os.path.exists(output_directory):
to_return.extend(processDir(output_directory))
except Exception as e:
logger.error("Failed to add directories from {0} for post-processing: {1}".format(core.OUTPUTDIRECTORY, e))
@ -781,10 +788,10 @@ def cleanDir(path, section, subsection):
logger.info('Doing Forceful Clean of {0}'.format(path), 'CLEANDIR')
rmDir(path)
return
minSize = int(cfg.get('minSize', 0))
min_size = int(cfg.get('minSize', 0))
delete_ignored = int(cfg.get('delete_ignored', 0))
try:
num_files = len(listMediaFiles(path, minSize=minSize, delete_ignored=delete_ignored))
num_files = len(listMediaFiles(path, minSize=min_size, delete_ignored=delete_ignored))
except:
num_files = 'unknown'
if num_files > 0:
@ -917,10 +924,10 @@ def find_download(clientAgent, download_id):
return True
if clientAgent == 'sabnzbd':
if "http" in core.SABNZBDHOST:
baseURL = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
base_url = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
else:
baseURL = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
url = baseURL
base_url = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
url = base_url
params = {
'apikey': core.SABNZBDAPIKEY,
'mode': "get_files",
@ -944,10 +951,10 @@ def get_nzoid(inputName):
slots = []
logger.debug("Searching for nzoid from SAbnzbd ...")
if "http" in core.SABNZBDHOST:
baseURL = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
base_url = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
else:
baseURL = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
url = baseURL
base_url = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
url = base_url
params = {
'apikey': core.SABNZBDAPIKEY,
'mode': "queue",
@ -960,7 +967,7 @@ def get_nzoid(inputName):
return nzoid # failure
try:
result = r.json()
cleanName = os.path.splitext(os.path.split(inputName)[1])[0]
clean_name = os.path.splitext(os.path.split(inputName)[1])[0]
slots.extend([(slot['nzo_id'], slot['filename']) for slot in result['queue']['slots']])
except:
logger.warning("Data from SABnzbd queue could not be parsed")
@ -972,13 +979,13 @@ def get_nzoid(inputName):
return nzoid # failure
try:
result = r.json()
cleanName = os.path.splitext(os.path.split(inputName)[1])[0]
clean_name = os.path.splitext(os.path.split(inputName)[1])[0]
slots.extend([(slot['nzo_id'], slot['name']) for slot in result['history']['slots']])
except:
logger.warning("Data from SABnzbd history could not be parsed")
try:
for nzo_id, name in slots:
if name in [inputName, cleanName]:
if name in [inputName, clean_name]:
nzoid = nzo_id
logger.debug("Found nzoid: {0}".format(nzoid))
break
@ -1014,19 +1021,19 @@ def is_archive_file(filename):
def isMediaFile(mediafile, media=True, audio=True, meta=True, archives=True, other=False, otherext=[]):
fileName, fileExt = os.path.splitext(mediafile)
file_name, file_ext = os.path.splitext(mediafile)
try:
# ignore MAC OS's "resource fork" files
if fileName.startswith('._'):
if file_name.startswith('._'):
return False
except:
pass
if (media and fileExt.lower() in core.MEDIACONTAINER) \
or (audio and fileExt.lower() in core.AUDIOCONTAINER) \
or (meta and fileExt.lower() in core.METACONTAINER) \
if (media and file_ext.lower() in core.MEDIACONTAINER) \
or (audio and file_ext.lower() in core.AUDIOCONTAINER) \
or (meta and file_ext.lower() in core.METACONTAINER) \
or (archives and is_archive_file(mediafile)) \
or (other and (fileExt.lower() in otherext or 'all' in otherext)):
or (other and (file_ext.lower() in otherext or 'all' in otherext)):
return True
else:
return False
@ -1036,15 +1043,15 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me
files = []
if not os.path.isdir(path):
if os.path.isfile(path): # Single file downloads.
curFile = os.path.split(path)[1]
if isMediaFile(curFile, media, audio, meta, archives, other, otherext):
cur_file = os.path.split(path)[1]
if isMediaFile(cur_file, media, audio, meta, archives, other, otherext):
# Optionally ignore sample files
if is_sample(path) or not is_minSize(path, minSize):
if delete_ignored == 1:
try:
os.unlink(path)
logger.debug('Ignored file {0} has been removed ...'.format
(curFile))
(cur_file))
except:
pass
else:
@ -1052,26 +1059,26 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me
return files
for curFile in os.listdir(text_type(path)):
fullCurFile = os.path.join(path, curFile)
for cur_file in os.listdir(text_type(path)):
full_cur_file = os.path.join(path, cur_file)
# if it's a folder do it recursively
if os.path.isdir(fullCurFile) and not curFile.startswith('.'):
files += listMediaFiles(fullCurFile, minSize, delete_ignored, media, audio, meta, archives, other, otherext)
if os.path.isdir(full_cur_file) and not cur_file.startswith('.'):
files += listMediaFiles(full_cur_file, minSize, delete_ignored, media, audio, meta, archives, other, otherext)
elif isMediaFile(curFile, media, audio, meta, archives, other, otherext):
elif isMediaFile(cur_file, media, audio, meta, archives, other, otherext):
# Optionally ignore sample files
if is_sample(fullCurFile) or not is_minSize(fullCurFile, minSize):
if is_sample(full_cur_file) or not is_minSize(full_cur_file, minSize):
if delete_ignored == 1:
try:
os.unlink(fullCurFile)
os.unlink(full_cur_file)
logger.debug('Ignored file {0} has been removed ...'.format
(curFile))
(cur_file))
except:
pass
continue
files.append(fullCurFile)
files.append(full_cur_file)
return sorted(files, key=len)
@ -1158,29 +1165,29 @@ def extractFiles(src, dst=None, keep_archive=None):
extracted_archive = []
for inputFile in listMediaFiles(src, media=False, audio=False, meta=False, archives=True):
dirPath = os.path.dirname(inputFile)
fullFileName = os.path.basename(inputFile)
archiveName = os.path.splitext(fullFileName)[0]
archiveName = re.sub(r"part[0-9]+", "", archiveName)
dir_path = os.path.dirname(inputFile)
full_file_name = os.path.basename(inputFile)
archive_name = os.path.splitext(full_file_name)[0]
archive_name = re.sub(r"part[0-9]+", "", archive_name)
if dirPath in extracted_folder and archiveName in extracted_archive:
if dir_path in extracted_folder and archive_name in extracted_archive:
continue # no need to extract this, but keep going to look for other archives and sub directories.
try:
if extractor.extract(inputFile, dst or dirPath):
extracted_folder.append(dirPath)
extracted_archive.append(archiveName)
if extractor.extract(inputFile, dst or dir_path):
extracted_folder.append(dir_path)
extracted_archive.append(archive_name)
except Exception:
logger.error("Extraction failed for: {0}".format(fullFileName))
logger.error("Extraction failed for: {0}".format(full_file_name))
for folder in extracted_folder:
for inputFile in listMediaFiles(folder, media=False, audio=False, meta=False, archives=True):
fullFileName = os.path.basename(inputFile)
archiveName = os.path.splitext(fullFileName)[0]
archiveName = re.sub(r"part[0-9]+", "", archiveName)
if archiveName not in extracted_archive or keep_archive:
full_file_name = os.path.basename(inputFile)
archive_name = os.path.splitext(full_file_name)[0]
archive_name = re.sub(r"part[0-9]+", "", archive_name)
if archive_name not in extracted_archive or keep_archive:
continue # don't remove if we haven't extracted this archive, or if we want to preserve them.
logger.info("Removing extracted archive {0} from folder {1} ...".format(fullFileName, folder))
logger.info("Removing extracted archive {0} from folder {1} ...".format(full_file_name, folder))
try:
if not os.access(inputFile, os.W_OK):
os.chmod(inputFile, stat.S_IWUSR)
@ -1252,7 +1259,7 @@ def plex_update(category):
def backupVersionedFile(old_file, version):
numTries = 0
num_tries = 0
new_file = '{old}.v{version}'.format(old=old_file, version=version)
@ -1269,11 +1276,11 @@ def backupVersionedFile(old_file, version):
except Exception as error:
logger.log(u"Error while trying to back up {old} to {new} : {msg}".format
(old=old_file, new=new_file, msg=error), logger.WARNING)
numTries += 1
num_tries += 1
time.sleep(1)
logger.log(u"Trying again.", logger.DEBUG)
if numTries >= 10:
if num_tries >= 10:
logger.log(u"Unable to back up {old} to {new} please do it manually.".format(old=old_file, new=new_file), logger.ERROR)
return False
@ -1283,19 +1290,19 @@ def backupVersionedFile(old_file, version):
def update_downloadInfoStatus(inputName, status):
logger.db("Updating status of our download {0} in the DB to {1}".format(inputName, status))
myDB = nzbToMediaDB.DBConnection()
myDB.action("UPDATE downloads SET status=?, last_update=? WHERE input_name=?",
my_db = nzbToMediaDB.DBConnection()
my_db.action("UPDATE downloads SET status=?, last_update=? WHERE input_name=?",
[status, datetime.date.today().toordinal(), text_type(inputName)])
def get_downloadInfo(inputName, status):
logger.db("Getting download info for {0} from the DB".format(inputName))
myDB = nzbToMediaDB.DBConnection()
sqlResults = myDB.select("SELECT * FROM downloads WHERE input_name=? AND status=?",
my_db = nzbToMediaDB.DBConnection()
sql_results = my_db.select("SELECT * FROM downloads WHERE input_name=? AND status=?",
[text_type(inputName), status])
return sqlResults
return sql_results
class RunningProcess(object):

View file

@ -17,10 +17,10 @@ from core.nzbToMediaUtil import makeDir
def isVideoGood(videofile, status):
fileNameExt = os.path.basename(videofile)
fileName, fileExt = os.path.splitext(fileNameExt)
file_name_ext = os.path.basename(videofile)
file_name, file_ext = os.path.splitext(file_name_ext)
disable = False
if fileExt not in core.MEDIACONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or fileExt in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED):
if file_ext not in core.MEDIACONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or file_ext in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED):
disable = True
else:
test_details, res = getVideoDetails(core.TEST_FILE)
@ -28,9 +28,9 @@ def isVideoGood(videofile, status):
disable = True
logger.info("DISABLED: ffprobe failed to analyse test file. Stopping corruption check.", 'TRANSCODER')
if test_details.get("streams"):
vidStreams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"]
audStreams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"]
if not (len(vidStreams) > 0 and len(audStreams) > 0):
vid_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"]
aud_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"]
if not (len(vid_streams) > 0 and len(aud_streams) > 0):
disable = True
logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.",
'TRANSCODER')
@ -40,25 +40,25 @@ def isVideoGood(videofile, status):
else:
return True
logger.info('Checking [{0}] for corruption, please stand by ...'.format(fileNameExt), 'TRANSCODER')
logger.info('Checking [{0}] for corruption, please stand by ...'.format(file_name_ext), 'TRANSCODER')
video_details, result = getVideoDetails(videofile)
if result != 0:
logger.error("FAILED: [{0}] is corrupted!".format(fileNameExt), 'TRANSCODER')
logger.error("FAILED: [{0}] is corrupted!".format(file_name_ext), 'TRANSCODER')
return False
if video_details.get("error"):
logger.info("FAILED: [{0}] returned error [{1}].".format(fileNameExt, video_details.get("error")), 'TRANSCODER')
logger.info("FAILED: [{0}] returned error [{1}].".format(file_name_ext, video_details.get("error")), 'TRANSCODER')
return False
if video_details.get("streams"):
videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
if len(videoStreams) > 0 and len(audioStreams) > 0:
logger.info("SUCCESS: [{0}] has no corruption.".format(fileNameExt), 'TRANSCODER')
video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
if len(video_streams) > 0 and len(audio_streams) > 0:
logger.info("SUCCESS: [{0}] has no corruption.".format(file_name_ext), 'TRANSCODER')
return True
else:
logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. "
"Assume corruption.".format
(fileNameExt, len(videoStreams), len(audioStreams)), 'TRANSCODER')
(file_name_ext, len(video_streams), len(audio_streams)), 'TRANSCODER')
return False
@ -118,7 +118,7 @@ def getVideoDetails(videofile, img=None, bitbucket=None):
def buildCommands(file, newDir, movieName, bitbucket):
if isinstance(file, string_types):
inputFile = file
input_file = file
if 'concat:' in file:
file = file.split('|')[0].replace('concat:', '')
video_details, result = getVideoDetails(file)
@ -137,10 +137,10 @@ def buildCommands(file, newDir, movieName, bitbucket):
img, data = next(iteritems(file))
name = data['name']
video_details, result = getVideoDetails(data['files'][0], img, bitbucket)
inputFile = '-'
input_file = '-'
file = '-'
newfilePath = os.path.normpath(os.path.join(newDir, name) + core.VEXTENSION)
newfile_path = os.path.normpath(os.path.join(newDir, name) + core.VEXTENSION)
map_cmd = []
video_cmd = []
@ -152,9 +152,9 @@ def buildCommands(file, newDir, movieName, bitbucket):
if not video_details or not video_details.get(
"streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
videoStreams = []
audioStreams = []
subStreams = []
video_streams = []
audio_streams = []
sub_streams = []
map_cmd.extend(['-map', '0'])
if core.VCODEC:
@ -201,15 +201,15 @@ def buildCommands(file, newDir, movieName, bitbucket):
other_cmd.extend(['-movflags', '+faststart'])
else:
videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"]
video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
sub_streams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"]
if core.VEXTENSION not in ['.mkv', '.mpegts']:
subStreams = [item for item in video_details["streams"] if
sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
"codec_name"] != "pgssub"]
for video in videoStreams:
for video in video_streams:
codec = video["codec_name"]
fr = video.get("avg_frame_rate", 0)
width = video.get("width", 0)
@ -257,24 +257,24 @@ def buildCommands(file, newDir, movieName, bitbucket):
used_audio = 0
a_mapped = []
commentary = []
if audioStreams:
for i, val in reversed(list(enumerate(audioStreams))):
if audio_streams:
for i, val in reversed(list(enumerate(audio_streams))):
try:
if "Commentary" in val.get("tags").get("title"): # Split out commentry tracks.
commentary.append(val)
del audioStreams[i]
del audio_streams[i]
except:
continue
try:
audio1 = [item for item in audioStreams if item["tags"]["language"] == core.ALANGUAGE]
audio1 = [item for item in audio_streams if item["tags"]["language"] == core.ALANGUAGE]
except: # no language tags. Assume only 1 language.
audio1 = audioStreams
audio1 = audio_streams
try:
audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW]
except:
audio2 = []
try:
audio3 = [item for item in audioStreams if item["tags"]["language"] != core.ALANGUAGE]
audio3 = [item for item in audio_streams if item["tags"]["language"] != core.ALANGUAGE]
except:
audio3 = []
try:
@ -384,8 +384,8 @@ def buildCommands(file, newDir, movieName, bitbucket):
audio_cmd.extend(audio_cmd2)
if core.AINCLUDE and core.ACODEC3:
audioStreams.extend(commentary) #add commentry tracks back here.
for audio in audioStreams:
audio_streams.extend(commentary) #add commentry tracks back here.
for audio in audio_streams:
if audio["index"] in a_mapped:
continue
used_audio += 1
@ -422,7 +422,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
n = 0
for lan in core.SLANGUAGES:
try:
subs1 = [item for item in subStreams if item["tags"]["language"] == lan]
subs1 = [item for item in sub_streams if item["tags"]["language"] == lan]
except:
subs1 = []
if core.BURN and not subs1 and not burnt and os.path.isfile(file):
@ -431,13 +431,13 @@ def buildCommands(file, newDir, movieName, bitbucket):
video_cmd.extend(['-vf', 'subtitles={subs}'.format(subs=subfile)])
burnt = 1
for sub in subs1:
if core.BURN and not burnt and os.path.isfile(inputFile):
if core.BURN and not burnt and os.path.isfile(input_file):
subloc = 0
for index in range(len(subStreams)):
if subStreams[index]["index"] == sub["index"]:
for index in range(len(sub_streams)):
if sub_streams[index]["index"] == sub["index"]:
subloc = index
break
video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=inputFile, loc=subloc)])
video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=input_file, loc=subloc)])
burnt = 1
if not core.ALLOWSUBS:
break
@ -447,7 +447,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
s_mapped.extend([sub["index"]])
if core.SINCLUDE:
for sub in subStreams:
for sub in sub_streams:
if not core.ALLOWSUBS:
break
if sub["index"] in s_mapped:
@ -467,7 +467,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
if core.GENERALOPTS:
command.extend(core.GENERALOPTS)
command.extend(['-i', inputFile])
command.extend(['-i', input_file])
if core.SEMBED and os.path.isfile(file):
for subfile in get_subs(file):
@ -509,7 +509,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
command.extend(sub_cmd)
command.extend(meta_cmd)
command.extend(other_cmd)
command.append(newfilePath)
command.append(newfile_path)
if platform.system() != 'Windows':
command = core.NICENESS + command
return command
@ -517,13 +517,13 @@ def buildCommands(file, newDir, movieName, bitbucket):
def get_subs(file):
filepaths = []
subExt = ['.srt', '.sub', '.idx']
sub_ext = ['.srt', '.sub', '.idx']
name = os.path.splitext(os.path.split(file)[1])[0]
dir = os.path.split(file)[0]
for dirname, dirs, filenames in os.walk(dir):
for filename in filenames:
filepaths.extend([os.path.join(dirname, filename)])
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in subExt and name in item]
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in sub_ext and name in item]
return subfiles
@ -539,30 +539,30 @@ def extract_subs(file, newfilePath, bitbucket):
name = os.path.splitext(os.path.split(newfilePath)[1])[0]
try:
subStreams = [item for item in video_details["streams"] if
sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[
"codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"]
except:
subStreams = [item for item in video_details["streams"] if
sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
"codec_name"] != "pgssub"]
num = len(subStreams)
num = len(sub_streams)
for n in range(num):
sub = subStreams[n]
sub = sub_streams[n]
idx = sub["index"]
lan = sub.get("tags", {}).get("language", "unk")
if num == 1:
outputFile = os.path.join(subdir, "{0}.srt".format(name))
if os.path.isfile(outputFile):
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, n))
output_file = os.path.join(subdir, "{0}.srt".format(name))
if os.path.isfile(output_file):
output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, n))
else:
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, lan))
if os.path.isfile(outputFile):
outputFile = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n))
output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, lan))
if os.path.isfile(output_file):
output_file = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n))
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an',
'-codec:{index}'.format(index=idx), 'srt', outputFile]
'-codec:{index}'.format(index=idx), 'srt', output_file]
if platform.system() != 'Windows':
command = core.NICENESS + command
@ -578,7 +578,7 @@ def extract_subs(file, newfilePath, bitbucket):
if result == 0:
try:
shutil.copymode(file, outputFile)
shutil.copymode(file, output_file)
except:
pass
logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file))
@ -587,76 +587,76 @@ def extract_subs(file, newfilePath, bitbucket):
def processList(List, newDir, bitbucket):
remList = []
newList = []
rem_list = []
new_list = []
combine = []
vtsPath = None
vts_path = None
success = True
for item in List:
ext = os.path.splitext(item)[1].lower()
if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS:
logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER")
newList.extend(ripISO(item, newDir, bitbucket))
remList.append(item)
new_list.extend(ripISO(item, newDir, bitbucket))
rem_list.append(item)
elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS:
logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER")
if not vtsPath:
if not vts_path:
try:
vtsPath = re.match("(.+VIDEO_TS)", item).groups()[0]
vts_path = re.match("(.+VIDEO_TS)", item).groups()[0]
except:
vtsPath = os.path.split(item)[0]
remList.append(item)
vts_path = os.path.split(item)[0]
rem_list.append(item)
elif re.match(".+VIDEO_TS.", item) or re.match(".+VTS_[0-9][0-9]_[0-9].", item):
remList.append(item)
rem_list.append(item)
elif core.CONCAT and re.match(".+[cC][dD][0-9].", item):
remList.append(item)
rem_list.append(item)
combine.append(item)
else:
continue
if vtsPath:
newList.extend(combineVTS(vtsPath))
if vts_path:
new_list.extend(combineVTS(vts_path))
if combine:
newList.extend(combineCD(combine))
for file in newList:
new_list.extend(combineCD(combine))
for file in new_list:
if isinstance(file, string_types) and 'concat:' not in file and not os.path.isfile(file):
success = False
break
if success and newList:
List.extend(newList)
for item in remList:
if success and new_list:
List.extend(new_list)
for item in rem_list:
List.remove(item)
logger.debug("Successfully extracted .vob file {0} from disk image".format(newList[0]), "TRANSCODER")
elif newList and not success:
newList = []
remList = []
logger.debug("Successfully extracted .vob file {0} from disk image".format(new_list[0]), "TRANSCODER")
elif new_list and not success:
new_list = []
rem_list = []
logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER")
return List, remList, newList, success
return List, rem_list, new_list, success
def ripISO(item, newDir, bitbucket):
newFiles = []
new_files = []
failure_dir = 'failure'
# Mount the ISO in your OS and call combineVTS.
if not core.SEVENZIP:
logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir]
return newFiles
new_files = [failure_dir]
return new_files
cmd = [core.SEVENZIP, 'l', item]
try:
logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER")
print_cmd(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
out, err = proc.communicate()
fileList = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in
file_list = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in
out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)]
combined = []
for n in range(99):
concat = []
m = 1
while True:
vtsName = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m)
if vtsName in fileList:
concat.append(vtsName)
vts_name = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m)
if vts_name in file_list:
concat.append(vts_name)
m += 1
else:
break
@ -668,29 +668,29 @@ def ripISO(item, newDir, bitbucket):
name = '{name}.cd{x}'.format(
name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1
)
newFiles.append({item: {'name': name, 'files': concat}})
new_files.append({item: {'name': name, 'files': concat}})
if core.CONCAT:
name = os.path.splitext(os.path.split(item)[1])[0]
newFiles.append({item: {'name': name, 'files': combined}})
if not newFiles:
new_files.append({item: {'name': name, 'files': combined}})
if not new_files:
logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir]
new_files = [failure_dir]
except:
logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir]
return newFiles
new_files = [failure_dir]
return new_files
def combineVTS(vtsPath):
newFiles = []
new_files = []
combined = ''
for n in range(99):
concat = ''
m = 1
while True:
vtsName = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
if os.path.isfile(os.path.join(vtsPath, vtsName)):
concat += '{file}|'.format(file=os.path.join(vtsPath, vtsName))
vts_name = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
if os.path.isfile(os.path.join(vtsPath, vts_name)):
concat += '{file}|'.format(file=os.path.join(vtsPath, vts_name))
m += 1
else:
break
@ -699,14 +699,14 @@ def combineVTS(vtsPath):
if core.CONCAT:
combined += '{files}|'.format(files=concat)
continue
newFiles.append('concat:{0}'.format(concat[:-1]))
new_files.append('concat:{0}'.format(concat[:-1]))
if core.CONCAT:
newFiles.append('concat:{0}'.format(combined[:-1]))
return newFiles
new_files.append('concat:{0}'.format(combined[:-1]))
return new_files
def combineCD(combine):
newFiles = []
new_files = []
for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]):
concat = ''
for n in range(99):
@ -717,8 +717,8 @@ def combineCD(combine):
else:
break
if concat:
newFiles.append('concat:{0}'.format(concat[:-1]))
return newFiles
new_files.append('concat:{0}'.format(concat[:-1]))
return new_files
def print_cmd(command):
@ -734,43 +734,43 @@ def Transcode_directory(dirName):
logger.info("Checking for files to be transcoded")
final_result = 0 # initialize as successful
if core.OUTPUTVIDEOPATH:
newDir = core.OUTPUTVIDEOPATH
makeDir(newDir)
new_dir = core.OUTPUTVIDEOPATH
makeDir(new_dir)
name = os.path.splitext(os.path.split(dirName)[1])[0]
newDir = os.path.join(newDir, name)
makeDir(newDir)
new_dir = os.path.join(new_dir, name)
makeDir(new_dir)
else:
newDir = dirName
new_dir = dirName
if platform.system() == 'Windows':
bitbucket = open('NUL')
else:
bitbucket = open('/dev/null')
movieName = os.path.splitext(os.path.split(dirName)[1])[0]
List = core.listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False)
List, remList, newList, success = processList(List, newDir, bitbucket)
movie_name = os.path.splitext(os.path.split(dirName)[1])[0]
file_list = core.listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False)
file_list, rem_list, new_list, success = processList(file_list, new_dir, bitbucket)
if not success:
bitbucket.close()
return 1, dirName
for file in List:
for file in file_list:
if isinstance(file, string_types) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS:
continue
command = buildCommands(file, newDir, movieName, bitbucket)
newfilePath = command[-1]
command = buildCommands(file, new_dir, movie_name, bitbucket)
newfile_path = command[-1]
# transcoding files may remove the original file, so make sure to extract subtitles first
if core.SEXTRACT and isinstance(file, string_types):
extract_subs(file, newfilePath, bitbucket)
extract_subs(file, newfile_path, bitbucket)
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
os.remove(newfilePath)
os.remove(newfile_path)
except OSError as e:
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
logger.debug("Error when removing transcoding target: {0}".format(e))
except Exception as e:
logger.debug("Error when removing transcoding target: {0}".format(e))
logger.info("Transcoding video: {0}".format(newfilePath))
logger.info("Transcoding video: {0}".format(newfile_path))
print_cmd(command)
result = 1 # set result to failed in case call fails.
try:
@ -787,42 +787,42 @@ def Transcode_directory(dirName):
proc.communicate()
result = proc.returncode
except:
logger.error("Transcoding of video {0} has failed".format(newfilePath))
logger.error("Transcoding of video {0} has failed".format(newfile_path))
if core.SUBSDIR and result == 0 and isinstance(file, string_types):
for sub in get_subs(file):
name = os.path.splitext(os.path.split(file)[1])[0]
subname = os.path.split(sub)[1]
newname = os.path.splitext(os.path.split(newfilePath)[1])[0]
newname = os.path.splitext(os.path.split(newfile_path)[1])[0]
newpath = os.path.join(core.SUBSDIR, subname.replace(name, newname))
if not os.path.isfile(newpath):
os.rename(sub, newpath)
if result == 0:
try:
shutil.copymode(file, newfilePath)
shutil.copymode(file, newfile_path)
except:
pass
logger.info("Transcoding of video to {0} succeeded".format(newfilePath))
if os.path.isfile(newfilePath) and (file in newList or not core.DUPLICATE):
logger.info("Transcoding of video to {0} succeeded".format(newfile_path))
if os.path.isfile(newfile_path) and (file in new_list or not core.DUPLICATE):
try:
os.unlink(file)
except:
pass
else:
logger.error("Transcoding of video to {0} failed with result {1}".format(newfilePath, result))
logger.error("Transcoding of video to {0} failed with result {1}".format(newfile_path, result))
# this will be 0 (successful) it all are successful, else will return a positive integer for failure.
final_result = final_result + result
if final_result == 0 and not core.DUPLICATE:
for file in remList:
for file in rem_list:
try:
os.unlink(file)
except:
pass
if not os.listdir(text_type(newDir)): # this is an empty directory and we didn't transcode into it.
os.rmdir(newDir)
newDir = dirName
if not os.listdir(text_type(new_dir)): # this is an empty directory and we didn't transcode into it.
os.rmdir(new_dir)
new_dir = dirName
if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB
newDir = dirName
new_dir = dirName
bitbucket.close()
return final_result, newDir
return final_result, new_dir

View file

@ -645,50 +645,53 @@ except NameError:
# post-processing
def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None, failureLink=None):
if core.SAFE_MODE and inputDirectory == core.NZB_DEFAULTDIR:
input_directory = inputDirectory
input_name = inputName
input_category = inputCategory
if core.SAFE_MODE and input_directory == core.NZB_DEFAULTDIR:
logger.error(
'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format(
inputDirectory))
input_directory))
return [-1, ""]
if not download_id and clientAgent == 'sabnzbd':
download_id = get_nzoid(inputName)
download_id = get_nzoid(input_name)
if clientAgent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding NZB download info for directory {0} to database'.format(inputDirectory))
logger.debug('Adding NZB download info for directory {0} to database'.format(input_directory))
myDB = nzbToMediaDB.DBConnection()
my_db = nzbToMediaDB.DBConnection()
inputDirectory1 = inputDirectory
inputName1 = inputName
input_directory1 = input_directory
input_name1 = input_name
try:
encoded, inputDirectory1 = CharReplace(inputDirectory)
encoded, inputName1 = CharReplace(inputName)
encoded, input_directory1 = CharReplace(input_directory)
encoded, input_name1 = CharReplace(input_name)
except:
pass
controlValueDict = {"input_directory": text_type(inputDirectory1)}
newValueDict = {"input_name": text_type(inputName1),
control_value_dict = {"input_directory": text_type(input_directory1)}
new_value_dict = {"input_name": text_type(input_name1),
"input_hash": text_type(download_id),
"input_id": text_type(download_id),
"client_agent": text_type(clientAgent),
"status": 0,
"last_update": datetime.date.today().toordinal()
}
myDB.upsert("downloads", newValueDict, controlValueDict)
my_db.upsert("downloads", new_value_dict, control_value_dict)
# auto-detect section
if inputCategory is None:
inputCategory = 'UNCAT'
usercat = inputCategory
section = core.CFG.findsection(inputCategory).isenabled()
if input_category is None:
input_category = 'UNCAT'
usercat = input_category
section = core.CFG.findsection(input_category).isenabled()
if section is None:
section = core.CFG.findsection("ALL").isenabled()
if section is None:
logger.error(
'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format(
inputCategory))
input_category))
return [-1, ""]
else:
usercat = "ALL"
@ -696,65 +699,65 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
if len(section) > 1:
logger.error(
'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format(
inputCategory, section.keys()))
input_category, section.keys()))
return [-1, ""]
if section:
sectionName = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(sectionName))
section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(section_name))
else:
logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format(
inputCategory))
input_category))
return [-1, ""]
cfg = dict(core.CFG[sectionName][usercat])
cfg = dict(core.CFG[section_name][usercat])
extract = int(cfg.get("extract", 0))
try:
if int(cfg.get("remote_path")) and not core.REMOTEPATHS:
logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format(
sectionName, inputCategory))
section_name, input_category))
return [-1, ""]
except:
logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format(
core.get("remote_path"), sectionName, inputCategory))
core.get("remote_path"), section_name, input_category))
inputName, inputDirectory = convert_to_ascii(inputName, inputDirectory)
input_name, input_directory = convert_to_ascii(input_name, input_directory)
if extract == 1:
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory))
extractFiles(inputDirectory)
logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
extractFiles(input_directory)
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, inputCategory, inputName))
logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, input_category, input_name))
if sectionName in ["CouchPotato", "Radarr"]:
result = autoProcessMovie().process(sectionName, inputDirectory, inputName, status, clientAgent, download_id,
inputCategory, failureLink)
elif sectionName in ["SickBeard", "NzbDrone", "Sonarr"]:
result = autoProcessTV().processEpisode(sectionName, inputDirectory, inputName, status, clientAgent,
download_id, inputCategory, failureLink)
elif sectionName in ["HeadPhones", "Lidarr"]:
result = autoProcessMusic().process(sectionName, inputDirectory, inputName, status, clientAgent, inputCategory)
elif sectionName == "Mylar":
result = autoProcessComics().processEpisode(sectionName, inputDirectory, inputName, status, clientAgent,
inputCategory)
elif sectionName == "Gamez":
result = autoProcessGames().process(sectionName, inputDirectory, inputName, status, clientAgent, inputCategory)
elif sectionName == 'UserScript':
result = external_script(inputDirectory, inputName, inputCategory, section[usercat])
if section_name in ["CouchPotato", "Radarr"]:
result = autoProcessMovie().process(section_name, input_directory, input_name, status, clientAgent, download_id,
input_category, failureLink)
elif section_name in ["SickBeard", "NzbDrone", "Sonarr"]:
result = autoProcessTV().processEpisode(section_name, input_directory, input_name, status, clientAgent,
download_id, input_category, failureLink)
elif section_name in ["HeadPhones", "Lidarr"]:
result = autoProcessMusic().process(section_name, input_directory, input_name, status, clientAgent, input_category)
elif section_name == "Mylar":
result = autoProcessComics().processEpisode(section_name, input_directory, input_name, status, clientAgent,
input_category)
elif section_name == "Gamez":
result = autoProcessGames().process(section_name, input_directory, input_name, status, clientAgent, input_category)
elif section_name == 'UserScript':
result = external_script(input_directory, input_name, input_category, section[usercat])
else:
result = [-1, ""]
plex_update(inputCategory)
plex_update(input_category)
if result[0] == 0:
if clientAgent != 'manual':
# update download status in our DB
update_downloadInfoStatus(inputName, 1)
if sectionName not in ['UserScript', 'NzbDrone', 'Sonarr', 'Radarr', 'Lidarr']:
update_downloadInfoStatus(input_name, 1)
if section_name not in ['UserScript', 'NzbDrone', 'Sonarr', 'Radarr', 'Lidarr']:
# cleanup our processing folders of any misc unwanted files and empty directories
cleanDir(inputDirectory, sectionName, inputCategory)
cleanDir(input_directory, section_name, input_category)
return result
@ -816,7 +819,7 @@ def main(args, section=None):
# Check for download_id to pass to CouchPotato
download_id = ""
failureLink = None
failure_link = None
if 'NZBPR_COUCHPOTATO' in os.environ:
download_id = os.environ['NZBPR_COUCHPOTATO']
elif 'NZBPR_DRONE' in os.environ:
@ -828,13 +831,13 @@ def main(args, section=None):
elif 'NZBPR_LIDARR' in os.environ:
download_id = os.environ['NZBPR_LIDARR']
if 'NZBPR__DNZB_FAILURE' in os.environ:
failureLink = os.environ['NZBPR__DNZB_FAILURE']
failure_link = os.environ['NZBPR__DNZB_FAILURE']
# All checks done, now launching the script.
clientAgent = 'nzbget'
result = process(os.environ['NZBPP_DIRECTORY'], inputName=os.environ['NZBPP_NZBNAME'], status=status,
clientAgent=clientAgent, download_id=download_id, inputCategory=os.environ['NZBPP_CATEGORY'],
failureLink=failureLink)
client_agent = 'nzbget'
result = process(os.environ['NZBPP_DIRECTORY'], input_name=os.environ['NZBPP_NZBNAME'], status=status,
clientAgent=client_agent, download_id=download_id, input_category=os.environ['NZBPP_CATEGORY'],
failureLink=failure_link)
# SABnzbd Pre 0.7.17
elif len(args) == core.SABNZB_NO_OF_ARGUMENTS:
# SABnzbd argv:
@ -845,9 +848,9 @@ def main(args, section=None):
# 5 User-defined category
# 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
clientAgent = 'sabnzbd'
client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd")
result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent,
result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], clientAgent=client_agent,
download_id='')
# SABnzbd 0.7.17+
elif len(args) >= core.SABNZB_0717_NO_OF_ARGUMENTS:
@ -860,14 +863,14 @@ def main(args, section=None):
# 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
# 8 Failure URL
clientAgent = 'sabnzbd'
client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd 0.7.17+")
result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent,
result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], clientAgent=client_agent,
download_id='', failureLink=''.join(args[8:]))
# Generic program
elif len(args) > 5 and args[5] == 'generic':
logger.info("Script triggered from generic program")
result = process(args[1], inputName=args[2], inputCategory=args[3], download_id=args[4])
result = process(args[1], input_name=args[2], input_category=args[3], download_id=args[4])
else:
# Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
@ -876,39 +879,39 @@ def main(args, section=None):
for subsection in subsections:
if not core.CFG[section][subsection].isenabled():
continue
for dirName in getDirs(section, subsection, link='move'):
logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dirName))
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dirName)))
for dir_name in getDirs(section, subsection, link='move'):
logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dir_name)))
core.DOWNLOADINFO = get_downloadInfo(os.path.basename(dirName), 0)
core.DOWNLOADINFO = get_downloadInfo(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO:
logger.info("Found download info for {0}, "
"setting variables now ...".format
(os.path.basename(dirName)))
clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
(os.path.basename(dir_name)))
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
download_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
else:
logger.info('Unable to locate download info for {0}, '
'continuing to try and process this release ...'.format
(os.path.basename(dirName)))
clientAgent = 'manual'
(os.path.basename(dir_name)))
client_agent = 'manual'
download_id = ''
if clientAgent and clientAgent.lower() not in core.NZB_CLIENTS:
if client_agent and client_agent.lower() not in core.NZB_CLIENTS:
continue
try:
dirName = dirName.encode(core.SYS_ENCODING)
dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
inputName = os.path.basename(dirName)
input_name = os.path.basename(dir_name)
try:
inputName = inputName.encode(core.SYS_ENCODING)
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
results = process(dirName, inputName, 0, clientAgent=clientAgent,
download_id=download_id or None, inputCategory=subsection)
results = process(dir_name, input_name, 0, clientAgent=client_agent,
download_id=download_id or None, input_category=subsection)
if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format
(section, subsection))