Merge pull request #1431 from clinton-hall/quality/pep8

Various PEP8 fixes
This commit is contained in:
Labrys of Knossos 2018-12-16 23:40:11 -05:00 committed by GitHub
commit 7798a71448
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 1341 additions and 1341 deletions

View file

@ -7,69 +7,69 @@ import sys
import core
from core import logger, nzbToMediaDB
from core.nzbToMediaUserScript import external_script
from core.nzbToMediaUtil import CharReplace, convert_to_ascii, plex_update, replace_links
from core.nzbToMediaUtil import char_replace, convert_to_ascii, plex_update, replace_links
from libs.six import text_type
def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent):
def process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent):
status = 1 # 1 = failed | 0 = success
root = 0
foundFile = 0
found_file = 0
if clientAgent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding TORRENT download info for directory {0} to database'.format(inputDirectory))
if client_agent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding TORRENT download info for directory {0} to database'.format(input_directory))
myDB = nzbToMediaDB.DBConnection()
my_db = nzbToMediaDB.DBConnection()
inputDirectory1 = inputDirectory
inputName1 = inputName
input_directory1 = input_directory
input_name1 = input_name
try:
encoded, inputDirectory1 = CharReplace(inputDirectory)
encoded, inputName1 = CharReplace(inputName)
encoded, input_directory1 = char_replace(input_directory)
encoded, input_name1 = char_replace(input_name)
except:
pass
controlValueDict = {"input_directory": text_type(inputDirectory1)}
newValueDict = {"input_name": text_type(inputName1),
"input_hash": text_type(inputHash),
"input_id": text_type(inputID),
"client_agent": text_type(clientAgent),
control_value_dict = {"input_directory": text_type(input_directory1)}
new_value_dict = {"input_name": text_type(input_name1),
"input_hash": text_type(input_hash),
"input_id": text_type(input_id),
"client_agent": text_type(client_agent),
"status": 0,
"last_update": datetime.date.today().toordinal()
}
myDB.upsert("downloads", newValueDict, controlValueDict)
my_db.upsert("downloads", new_value_dict, control_value_dict)
logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(inputDirectory, inputName, inputCategory))
logger.debug("Received Directory: {0} | Name: {1} | Category: {2}".format(input_directory, input_name, input_category))
# Confirm the category by parsing directory structure
inputDirectory, inputName, inputCategory, root = core.category_search(inputDirectory, inputName, inputCategory,
root, core.CATEGORIES)
if inputCategory == "":
inputCategory = "UNCAT"
input_directory, input_name, input_category, root = core.category_search(input_directory, input_name, input_category,
root, core.CATEGORIES)
if input_category == "":
input_category = "UNCAT"
usercat = inputCategory
usercat = input_category
try:
inputName = inputName.encode(core.SYS_ENCODING)
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
try:
inputDirectory = inputDirectory.encode(core.SYS_ENCODING)
input_directory = input_directory.encode(core.SYS_ENCODING)
except UnicodeError:
pass
logger.debug("Determined Directory: {0} | Name: {1} | Category: {2}".format
(inputDirectory, inputName, inputCategory))
(input_directory, input_name, input_category))
# auto-detect section
section = core.CFG.findsection(inputCategory).isenabled()
section = core.CFG.findsection(input_category).isenabled()
if section is None:
section = core.CFG.findsection("ALL").isenabled()
if section is None:
logger.error('Category:[{0}] is not defined or is not enabled. '
'Please rename it or ensure it is enabled for the appropriate section '
'in your autoProcessMedia.cfg and try again.'.format
(inputCategory))
(input_category))
return [-1, ""]
else:
usercat = "ALL"
@ -82,95 +82,95 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
return [-1, ""]
if section:
sectionName = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(sectionName))
section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(section_name))
else:
logger.error("Unable to locate a section with subsection:{0} "
"enabled in your autoProcessMedia.cfg, exiting!".format
(inputCategory))
(input_category))
return [-1, ""]
section = dict(section[sectionName][usercat]) # Type cast to dict() to allow effective usage of .get()
section = dict(section[section_name][usercat]) # Type cast to dict() to allow effective usage of .get()
Torrent_NoLink = int(section.get("Torrent_NoLink", 0))
torrent_no_link = int(section.get("Torrent_NoLink", 0))
keep_archive = int(section.get("keep_archive", 0))
extract = int(section.get('extract', 0))
extensions = section.get('user_script_mediaExtensions', "").lower().split(',')
uniquePath = int(section.get("unique_path", 1))
unique_path = int(section.get("unique_path", 1))
if clientAgent != 'manual':
core.pause_torrent(clientAgent, inputHash, inputID, inputName)
if client_agent != 'manual':
core.pause_torrent(client_agent, input_hash, input_id, input_name)
# In case input is not directory, make sure to create one.
# This way Processing is isolated.
if not os.path.isdir(os.path.join(inputDirectory, inputName)):
basename = os.path.basename(inputDirectory)
basename = core.sanitizeName(inputName) \
if inputName == basename else os.path.splitext(core.sanitizeName(inputName))[0]
outputDestination = os.path.join(core.OUTPUTDIRECTORY, inputCategory, basename)
elif uniquePath:
outputDestination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, inputCategory, core.sanitizeName(inputName).replace(" ",".")))
if not os.path.isdir(os.path.join(input_directory, input_name)):
basename = os.path.basename(input_directory)
basename = core.sanitize_name(input_name) \
if input_name == basename else os.path.splitext(core.sanitize_name(input_name))[0]
output_destination = os.path.join(core.OUTPUTDIRECTORY, input_category, basename)
elif unique_path:
output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitize_name(input_name).replace(" ", ".")))
else:
outputDestination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, inputCategory))
output_destination = os.path.normpath(
core.os.path.join(core.OUTPUTDIRECTORY, input_category))
try:
outputDestination = outputDestination.encode(core.SYS_ENCODING)
output_destination = output_destination.encode(core.SYS_ENCODING)
except UnicodeError:
pass
if outputDestination in inputDirectory:
outputDestination = inputDirectory
if output_destination in input_directory:
output_destination = input_directory
logger.info("Output directory set to: {0}".format(outputDestination))
logger.info("Output directory set to: {0}".format(output_destination))
if core.SAFE_MODE and outputDestination == core.TORRENT_DEFAULTDIR:
if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULTDIR:
logger.error('The output directory:[{0}] is the Download Directory. '
'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format
(inputDirectory))
(input_directory))
return [-1, ""]
logger.debug("Scanning files in directory: {0}".format(inputDirectory))
logger.debug("Scanning files in directory: {0}".format(input_directory))
if sectionName in ['HeadPhones', 'Lidarr']:
if section_name in ['HeadPhones', 'Lidarr']:
core.NOFLATTEN.extend(
inputCategory) # Make sure we preserve folder structure for HeadPhones.
input_category) # Make sure we preserve folder structure for HeadPhones.
now = datetime.datetime.now()
if extract == 1:
inputFiles = core.listMediaFiles(inputDirectory, archives=False, other=True, otherext=extensions)
input_files = core.list_media_files(input_directory, archives=False, other=True, otherext=extensions)
else:
inputFiles = core.listMediaFiles(inputDirectory, other=True, otherext=extensions)
if len(inputFiles) == 0 and os.path.isfile(inputDirectory):
inputFiles = [inputDirectory]
logger.debug("Found 1 file to process: {0}".format(inputDirectory))
input_files = core.list_media_files(input_directory, other=True, otherext=extensions)
if len(input_files) == 0 and os.path.isfile(input_directory):
input_files = [input_directory]
logger.debug("Found 1 file to process: {0}".format(input_directory))
else:
logger.debug("Found {0} files in {1}".format(len(inputFiles), inputDirectory))
for inputFile in inputFiles:
filePath = os.path.dirname(inputFile)
fileName, fileExt = os.path.splitext(os.path.basename(inputFile))
fullFileName = os.path.basename(inputFile)
logger.debug("Found {0} files in {1}".format(len(input_files), input_directory))
for inputFile in input_files:
file_path = os.path.dirname(inputFile)
file_name, file_ext = os.path.splitext(os.path.basename(inputFile))
full_file_name = os.path.basename(inputFile)
targetFile = core.os.path.join(outputDestination, fullFileName)
if inputCategory in core.NOFLATTEN:
if not os.path.basename(filePath) in outputDestination:
targetFile = core.os.path.join(
core.os.path.join(outputDestination, os.path.basename(filePath)), fullFileName)
target_file = core.os.path.join(output_destination, full_file_name)
if input_category in core.NOFLATTEN:
if not os.path.basename(file_path) in output_destination:
target_file = core.os.path.join(
core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name)
logger.debug("Setting outputDestination to {0} to preserve folder structure".format
(os.path.dirname(targetFile)))
(os.path.dirname(target_file)))
try:
targetFile = targetFile.encode(core.SYS_ENCODING)
target_file = target_file.encode(core.SYS_ENCODING)
except UnicodeError:
pass
if root == 1:
if not foundFile:
logger.debug("Looking for {0} in: {1}".format(inputName, inputFile))
if any([core.sanitizeName(inputName) in core.sanitizeName(inputFile),
core.sanitizeName(fileName) in core.sanitizeName(inputName)]):
foundFile = True
if not found_file:
logger.debug("Looking for {0} in: {1}".format(input_name, inputFile))
if any([core.sanitize_name(input_name) in core.sanitize_name(inputFile),
core.sanitize_name(file_name) in core.sanitize_name(input_name)]):
found_file = True
logger.debug("Found file {0} that matches Torrent Name {1}".format
(fullFileName, inputName))
(full_file_name, input_name))
else:
continue
@ -178,106 +178,106 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(inputFile))
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile))
if not foundFile:
if not found_file:
logger.debug("Looking for files with modified/created dates less than 5 minutes old.")
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)):
foundFile = True
found_file = True
logger.debug("Found file {0} with date modified/created less than 5 minutes ago.".format
(fullFileName))
(full_file_name))
else:
continue # This file has not been recently moved or created, skip it
if Torrent_NoLink == 0:
if torrent_no_link == 0:
try:
core.copy_link(inputFile, targetFile, core.USELINK)
core.rmReadOnly(targetFile)
core.copy_link(inputFile, target_file, core.USELINK)
core.remove_read_only(target_file)
except:
logger.error("Failed to link: {0} to {1}".format(inputFile, targetFile))
logger.error("Failed to link: {0} to {1}".format(inputFile, target_file))
inputName, outputDestination = convert_to_ascii(inputName, outputDestination)
input_name, output_destination = convert_to_ascii(input_name, output_destination)
if extract == 1:
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory))
core.extractFiles(inputDirectory, outputDestination, keep_archive)
logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
core.extract_files(input_directory, output_destination, keep_archive)
if inputCategory not in core.NOFLATTEN:
if input_category not in core.NOFLATTEN:
# don't flatten hp in case multi cd albums, and we need to copy this back later.
core.flatten(outputDestination)
core.flatten(output_destination)
# Now check if video files exist in destination:
if sectionName in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]:
numVideos = len(
core.listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False))
if numVideos > 0:
logger.info("Found {0} media files in {1}".format(numVideos, outputDestination))
if section_name in ["SickBeard", "NzbDrone", "Sonarr", "CouchPotato", "Radarr"]:
num_videos = len(
core.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False))
if num_videos > 0:
logger.info("Found {0} media files in {1}".format(num_videos, output_destination))
status = 0
elif extract != 1:
logger.info("Found no media files in {0}. Sending to {1} to process".format(outputDestination, sectionName))
logger.info("Found no media files in {0}. Sending to {1} to process".format(output_destination, section_name))
status = 0
else:
logger.warning("Found no media files in {0}".format(outputDestination))
logger.warning("Found no media files in {0}".format(output_destination))
# Only these sections can handling failed downloads
# so make sure everything else gets through without the check for failed
if sectionName not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']:
if section_name not in ['CouchPotato', 'Radarr', 'SickBeard', 'NzbDrone', 'Sonarr']:
status = 0
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, usercat, inputName))
logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, usercat, input_name))
if core.TORRENT_CHMOD_DIRECTORY:
core.rchmod(outputDestination, core.TORRENT_CHMOD_DIRECTORY)
core.rchmod(output_destination, core.TORRENT_CHMOD_DIRECTORY)
result = [0, ""]
if sectionName == 'UserScript':
result = external_script(outputDestination, inputName, inputCategory, section)
if section_name == 'UserScript':
result = external_script(output_destination, input_name, input_category, section)
elif sectionName in ['CouchPotato', 'Radarr']:
result = core.autoProcessMovie().process(sectionName, outputDestination, inputName,
status, clientAgent, inputHash, inputCategory)
elif sectionName in ['SickBeard', 'NzbDrone', 'Sonarr']:
if inputHash:
inputHash = inputHash.upper()
result = core.autoProcessTV().processEpisode(sectionName, outputDestination, inputName,
status, clientAgent, inputHash, inputCategory)
elif sectionName in ['HeadPhones', 'Lidarr']:
result = core.autoProcessMusic().process(sectionName, outputDestination, inputName,
status, clientAgent, inputCategory)
elif sectionName == 'Mylar':
result = core.autoProcessComics().processEpisode(sectionName, outputDestination, inputName,
status, clientAgent, inputCategory)
elif sectionName == 'Gamez':
result = core.autoProcessGames().process(sectionName, outputDestination, inputName,
status, clientAgent, inputCategory)
elif section_name in ['CouchPotato', 'Radarr']:
result = core.Movie().process(section_name, output_destination, input_name,
status, client_agent, input_hash, input_category)
elif section_name in ['SickBeard', 'NzbDrone', 'Sonarr']:
if input_hash:
input_hash = input_hash.upper()
result = core.TV().process_episode(section_name, output_destination, input_name,
status, client_agent, input_hash, input_category)
elif section_name in ['HeadPhones', 'Lidarr']:
result = core.Music().process(section_name, output_destination, input_name,
status, client_agent, input_category)
elif section_name == 'Mylar':
result = core.Comic().process_episode(section_name, output_destination, input_name,
status, client_agent, input_category)
elif section_name == 'Gamez':
result = core.Game().process(section_name, output_destination, input_name,
status, client_agent, input_category)
plex_update(inputCategory)
plex_update(input_category)
if result[0] != 0:
if not core.TORRENT_RESUME_ON_FAILURE:
logger.error("A problem was reported in the autoProcess* script. "
"Torrent won't resume seeding (settings)")
elif clientAgent != 'manual':
elif client_agent != 'manual':
logger.error("A problem was reported in the autoProcess* script. "
"If torrent was paused we will resume seeding")
core.resume_torrent(clientAgent, inputHash, inputID, inputName)
core.resume_torrent(client_agent, input_hash, input_id, input_name)
else:
if clientAgent != 'manual':
if client_agent != 'manual':
# update download status in our DB
core.update_downloadInfoStatus(inputName, 1)
core.update_download_info_status(input_name, 1)
# remove torrent
if core.USELINK == 'move-sym' and not core.DELETE_ORIGINAL == 1:
logger.debug('Checking for sym-links to re-direct in: {0}'.format(inputDirectory))
for dirpath, dirs, files in os.walk(inputDirectory):
logger.debug('Checking for sym-links to re-direct in: {0}'.format(input_directory))
for dirpath, dirs, files in os.walk(input_directory):
for file in files:
logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file)))
replace_links(os.path.join(dirpath, file))
core.remove_torrent(clientAgent, inputHash, inputID, inputName)
core.remove_torrent(client_agent, input_hash, input_id, input_name)
if not sectionName == 'UserScript':
if not section_name == 'UserScript':
# for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN
# cleanup our processing folders of any misc unwanted files and empty directories
core.cleanDir(outputDestination, sectionName, inputCategory)
core.clean_dir(output_destination, section_name, input_category)
return result
@ -287,7 +287,7 @@ def main(args):
core.initialize()
# clientAgent for Torrents
clientAgent = core.TORRENT_CLIENTAGENT
client_agent = core.TORRENT_CLIENTAGENT
logger.info("#########################################################")
logger.info("## ..::[{0}]::.. ##".format(os.path.basename(__file__)))
@ -300,13 +300,13 @@ def main(args):
result = [0, ""]
try:
inputDirectory, inputName, inputCategory, inputHash, inputID = core.parse_args(clientAgent, args)
input_directory, input_name, input_category, input_hash, input_id = core.parse_args(client_agent, args)
except:
logger.error("There was a problem loading variables")
return -1
if inputDirectory and inputName and inputHash and inputID:
result = processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent)
if input_directory and input_name and input_hash and input_id:
result = process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent)
else:
# Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
@ -315,42 +315,42 @@ def main(args):
for subsection in subsections:
if not core.CFG[section][subsection].isenabled():
continue
for dirName in core.getDirs(section, subsection, link='hard'):
for dir_name in core.get_dirs(section, subsection, link='hard'):
logger.info("Starting manual run for {0}:{1} - Folder:{2}".format
(section, subsection, dirName))
(section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format
(os.path.basename(dirName)))
core.DOWNLOADINFO = core.get_downloadInfo(os.path.basename(dirName), 0)
(os.path.basename(dir_name)))
core.DOWNLOADINFO = core.get_download_info(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO:
clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
inputHash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
inputID = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
input_hash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
input_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
logger.info("Found download info for {0}, "
"setting variables now ...".format(os.path.basename(dirName)))
"setting variables now ...".format(os.path.basename(dir_name)))
else:
logger.info('Unable to locate download info for {0}, '
'continuing to try and process this release ...'.format
(os.path.basename(dirName)))
clientAgent = 'manual'
inputHash = ''
inputID = ''
(os.path.basename(dir_name)))
client_agent = 'manual'
input_hash = ''
input_id = ''
if clientAgent.lower() not in core.TORRENT_CLIENTS:
if client_agent.lower() not in core.TORRENT_CLIENTS:
continue
try:
dirName = dirName.encode(core.SYS_ENCODING)
dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
inputName = os.path.basename(dirName)
input_name = os.path.basename(dir_name)
try:
inputName = inputName.encode(core.SYS_ENCODING)
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
results = processTorrent(dirName, inputName, subsection, inputHash or None, inputID or None,
clientAgent)
results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None,
client_agent)
if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format
(section, subsection))

View file

@ -37,18 +37,18 @@ import six
from six.moves import reload_module
from core import logger, nzbToMediaDB, versionCheck
from core.autoProcess.autoProcessComics import autoProcessComics
from core.autoProcess.autoProcessGames import autoProcessGames
from core.autoProcess.autoProcessMovie import autoProcessMovie
from core.autoProcess.autoProcessMusic import autoProcessMusic
from core.autoProcess.autoProcessTV import autoProcessTV
from core.autoProcess.autoProcessComics import Comic
from core.autoProcess.autoProcessGames import Game
from core.autoProcess.autoProcessMovie import Movie
from core.autoProcess.autoProcessMusic import Music
from core.autoProcess.autoProcessTV import TV
from core.databases import mainDB
from core.nzbToMediaConfig import config
from core.nzbToMediaUtil import (
RunningProcess, WakeUp, category_search, cleanDir, cleanDir, copy_link,
create_torrent_class, extractFiles, flatten, getDirs, get_downloadInfo,
listMediaFiles, makeDir, parse_args, pause_torrent, remove_torrent,
resume_torrent, rmDir, rmReadOnly, sanitizeName, update_downloadInfoStatus,
RunningProcess, wake_up, category_search, clean_dir, clean_dir, copy_link,
create_torrent_class, extract_files, flatten, get_dirs, get_download_info,
list_media_files, make_dir, parse_args, pause_torrent, remove_torrent,
resume_torrent, remove_dir, remove_read_only, sanitize_name, update_download_info_status,
)
from core.transcoder import transcoder
@ -255,7 +255,7 @@ def initialize(section=None):
LOG_FILE = os.environ['NTM_LOGFILE']
LOG_DIR = os.path.split(LOG_FILE)[0]
if not makeDir(LOG_DIR):
if not make_dir(LOG_DIR):
print("No log folder, logging to screen only")
MYAPP = RunningProcess()
@ -291,7 +291,7 @@ def initialize(section=None):
sys.exit(1)
# init logging
logger.ntm_log_instance.initLogging()
logger.ntm_log_instance.init_logging()
# run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
if not config.migrate():
@ -320,7 +320,7 @@ def initialize(section=None):
logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT")
# initialize the main SB database
nzbToMediaDB.upgradeDatabase(nzbToMediaDB.DBConnection(), mainDB.InitialSchema)
nzbToMediaDB.upgrade_database(nzbToMediaDB.DBConnection(), mainDB.InitialSchema)
# Set Version and GIT variables
NZBTOMEDIA_VERSION = '11.06'
@ -357,7 +357,7 @@ def initialize(section=None):
system=platform.system(), release=platform.release()))
if int(CFG["WakeOnLan"]["wake"]) == 1:
WakeUp()
wake_up()
NZB_CLIENTAGENT = CFG["Nzb"]["clientAgent"] # sabnzbd
SABNZBDHOST = CFG["Nzb"]["sabnzbd_host"]

View file

@ -6,18 +6,17 @@ import requests
import core
from core import logger
from core.nzbToMediaUtil import convert_to_ascii, remoteDir, server_responding
from core.nzbToMediaUtil import convert_to_ascii, remote_dir, server_responding
requests.packages.urllib3.disable_warnings()
class autoProcessComics(object):
def processEpisode(self, section, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None):
class Comic(object):
def process_episode(self, section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None):
apc_version = "2.04"
comicrn_version = "1.01"
cfg = dict(core.CFG[section][inputCategory])
cfg = dict(core.CFG[section][input_category])
host = cfg["host"]
port = cfg["port"]
@ -32,19 +31,19 @@ class autoProcessComics(object):
logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
inputName, dirName = convert_to_ascii(inputName, dirName)
clean_name, ext = os.path.splitext(inputName)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
clean_name, ext = os.path.splitext(input_name)
if len(ext) == 4: # we assume this was a standard extension.
inputName = clean_name
input_name = clean_name
params = {
'cmd': 'forceProcess',
'apikey': apikey,
'nzb_folder': remoteDir(dirName) if remote_path else dirName,
'nzb_folder': remote_dir(dir_name) if remote_path else dir_name,
}
if inputName is not None:
params['nzb_name'] = inputName
if input_name is not None:
params['nzb_name'] = input_name
params['failed'] = int(status)
params['apc_version'] = apc_version
params['comicrn_version'] = comicrn_version
@ -72,7 +71,7 @@ class autoProcessComics(object):
if success:
logger.postprocess("SUCCESS: This issue has been processed successfully", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
else:
logger.warning("The issue does not appear to have successfully processed. Please check your Logs", section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]

View file

@ -12,11 +12,11 @@ from core.nzbToMediaUtil import convert_to_ascii, server_responding
requests.packages.urllib3.disable_warnings()
class autoProcessGames(object):
def process(self, section, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None):
class Game(object):
def process(self, section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None):
status = int(status)
cfg = dict(core.CFG[section][inputCategory])
cfg = dict(core.CFG[section][input_category])
host = cfg["host"]
port = cfg["port"]
@ -31,19 +31,19 @@ class autoProcessGames(object):
logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
inputName, dirName = convert_to_ascii(inputName, dirName)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
fields = inputName.split("-")
fields = input_name.split("-")
gamezID = fields[0].replace("[", "").replace("]", "").replace(" ", "")
gamez_id = fields[0].replace("[", "").replace("]", "").replace(" ", "")
downloadStatus = 'Downloaded' if status == 0 else 'Wanted'
download_status = 'Downloaded' if status == 0 else 'Wanted'
params = {
'api_key': apikey,
'mode': 'UPDATEREQUESTEDSTATUS',
'db_id': gamezID,
'status': downloadStatus
'db_id': gamez_id,
'status': download_status
}
logger.debug("Opening URL: {0}".format(url), section)
@ -59,9 +59,9 @@ class autoProcessGames(object):
if library:
logger.postprocess("moving files to library: {0}".format(library), section)
try:
shutil.move(dirName, os.path.join(library, inputName))
shutil.move(dir_name, os.path.join(library, input_name))
except:
logger.error("Unable to move {0} to {1}".format(dirName, os.path.join(library, inputName)), section)
logger.error("Unable to move {0} to {1}".format(dir_name, os.path.join(library, input_name)), section)
return [1, "{0}: Failed to post-process - Unable to move files".format(section)]
else:
logger.error("No library specified to move files to. Please edit your configuration.", section)
@ -71,8 +71,8 @@ class autoProcessGames(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif result['success']:
logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamezID, downloadStatus), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
logger.postprocess("SUCCESS: Status for {0} has been set to {1} in Gamez".format(gamez_id, download_status), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
else:
logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamezID), section)
logger.error("FAILED: Status for {0} has NOT been updated in Gamez".format(gamez_id), section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]

View file

@ -9,30 +9,30 @@ import requests
import core
from core import logger
from core.nzbToMediaSceneExceptions import process_all_exceptions
from core.nzbToMediaUtil import convert_to_ascii, find_download, find_imdbid, import_subs, listMediaFiles, remoteDir, reportNzb, rmDir, server_responding
from core.nzbToMediaUtil import convert_to_ascii, find_download, find_imdbid, import_subs, list_media_files, remote_dir, report_nzb, remove_dir, server_responding
from core.transcoder import transcoder
requests.packages.urllib3.disable_warnings()
class autoProcessMovie(object):
def get_release(self, baseURL, imdbid=None, download_id=None, release_id=None):
class Movie(object):
def get_release(self, base_url, imdb_id=None, download_id=None, release_id=None):
results = {}
params = {}
# determine cmd and params to send to CouchPotato to get our results
section = 'movies'
cmd = "media.list"
if release_id or imdbid:
if release_id or imdb_id:
section = 'media'
cmd = "media.get"
params['id'] = release_id or imdbid
params['id'] = release_id or imdb_id
if not (release_id or imdbid or download_id):
if not (release_id or imdb_id or download_id):
logger.debug("No information available to filter CP results")
return results
url = "{0}{1}".format(baseURL, cmd)
url = "{0}{1}".format(base_url, cmd)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params))
try:
@ -129,7 +129,7 @@ class autoProcessMovie(object):
logger.error("{0} did not return expected json data.".format(section), section)
return None
def CDH(self, url2, headers, section="MAIN"):
def completed_download_handling(self, url2, headers, section="MAIN"):
try:
r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
except requests.ConnectionError:
@ -145,9 +145,9 @@ class autoProcessMovie(object):
# ValueError catches simplejson's JSONDecodeError and json's ValueError
return False
def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", download_id="", inputCategory=None, failureLink=None):
def process(self, section, dir_name, input_name=None, status=0, client_agent="manual", download_id="", input_category=None, failure_link=None):
cfg = dict(core.CFG[section][inputCategory])
cfg = dict(core.CFG[section][input_category])
host = cfg["host"]
port = cfg["port"]
@ -158,9 +158,9 @@ class autoProcessMovie(object):
method = None
#added importMode for Radarr config
if section == "Radarr":
importMode = cfg.get("importMode","Move")
import_mode = cfg.get("importMode","Move")
else:
importMode = None
import_mode = None
delete_failed = int(cfg["delete_failed"])
wait_for = int(cfg["wait_for"])
ssl = int(cfg.get("ssl", 0))
@ -174,19 +174,19 @@ class autoProcessMovie(object):
else:
extract = int(cfg.get("extract", 0))
imdbid = find_imdbid(dirName, inputName, omdbapikey)
imdbid = find_imdbid(dir_name, input_name, omdbapikey)
if section == "CouchPotato":
baseURL = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey)
base_url = "{0}{1}:{2}{3}/api/{4}/".format(protocol, host, port, web_root, apikey)
if section == "Radarr":
baseURL = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root)
base_url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root)
url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root)
headers = {'X-Api-Key': apikey}
if not apikey:
logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only')
release = None
elif server_responding(baseURL):
elif server_responding(base_url):
if section == "CouchPotato":
release = self.get_release(baseURL, imdbid, download_id)
release = self.get_release(base_url, imdbid, download_id)
else:
release = None
else:
@ -208,30 +208,30 @@ class autoProcessMovie(object):
except:
pass
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0]
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName))
cleanName = os.path.splitext(SpecificPath)
if cleanName[1] == ".nzb":
SpecificPath = cleanName[0]
if os.path.isdir(SpecificPath):
dirName = SpecificPath
specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb":
specific_path = clean_name[0]
if os.path.isdir(specific_path):
dir_name = specific_path
process_all_exceptions(inputName, dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
process_all_exceptions(input_name, dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
core.extractFiles(dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if not list_media_files(dir_name, media=True, audio=False, meta=False, archives=False) and list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extract_files(dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
good_files = 0
num_files = 0
# Check video files for corruption
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
num_files += 1
if transcoder.isVideoGood(video, status):
if transcoder.is_video_good(video, status):
import_subs(video)
good_files += 1
if num_files and good_files == num_files:
@ -242,51 +242,51 @@ class autoProcessMovie(object):
logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section)
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD')
if failureLink:
failureLink += '&corrupt=true'
if failure_link:
failure_link += '&corrupt=true'
status = 1
elif clientAgent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dirName), section)
elif client_agent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section)
return [0, ""] # Success (as far as this script is concerned)
else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section)
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section)
status = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD')
if status == 0:
if core.TRANSCODE == 1:
result, newDirName = transcoder.Transcode_directory(dirName)
result, new_dir_name = transcoder.transcode_directory(dir_name)
if result == 0:
logger.debug("Transcoding succeeded for files in {0}".format(dirName), section)
dirName = newDirName
logger.debug("Transcoding succeeded for files in {0}".format(dir_name), section)
dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section)
core.rchmod(dirName, chmod_directory)
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section)
core.rchmod(dir_name, chmod_directory)
else:
logger.error("Transcoding failed for files in {0}".format(dirName), section)
logger.error("Transcoding failed for files in {0}".format(dir_name), section)
return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
if not release and ".cp(tt" not in video and imdbid:
videoName, videoExt = os.path.splitext(video)
video2 = "{0}.cp({1}){2}".format(videoName, imdbid, videoExt)
if not (clientAgent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'):
video_name, video_ext = os.path.splitext(video)
video2 = "{0}.cp({1}){2}".format(video_name, imdbid, video_ext)
if not (client_agent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'):
logger.debug('Renaming: {0} to: {1}'.format(video, video2))
os.rename(video, video2)
if not apikey: #If only using Transcoder functions, exit here.
logger.info('No CouchPotato or Radarr apikey entered. Processing completed.')
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
params = {}
if download_id and release_id:
params['downloader'] = downloader or clientAgent
params['downloader'] = downloader or client_agent
params['download_id'] = download_id
params['media_folder'] = remoteDir(dirName) if remote_path else dirName
params['media_folder'] = remote_dir(dir_name) if remote_path else dir_name
if section == "CouchPotato":
if method == "manage":
@ -295,22 +295,22 @@ class autoProcessMovie(object):
else:
command = "renamer.scan"
url = "{0}{1}".format(baseURL, command)
url = "{0}{1}".format(base_url, command)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
logger.postprocess("Starting {0} scan for {1}".format(method, inputName), section)
logger.postprocess("Starting {0} scan for {1}".format(method, input_name), section)
if section == "Radarr":
payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id,'importMode' : importMode}
payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id,'importMode' : import_mode}
if not download_id:
payload.pop("downloadClientId")
logger.debug("Opening URL: {0} with PARAMS: {1}".format(baseURL, payload), section)
logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(inputName), section)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(base_url, payload), section)
logger.postprocess("Starting DownloadedMoviesScan scan for {0}".format(input_name), section)
try:
if section == "CouchPotato":
r = requests.get(url, params=params, verify=False, timeout=(30, 1800))
else:
r = requests.post(baseURL, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800))
r = requests.post(base_url, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800))
except requests.ConnectionError:
logger.error("Unable to open URL", section)
return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
@ -320,47 +320,47 @@ class autoProcessMovie(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif section == "CouchPotato" and result['success']:
logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dirName), section)
logger.postprocess("SUCCESS: Finished {0} scan for folder {1}".format(method, dir_name), section)
if method == "manage":
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif section == "Radarr":
logger.postprocess("Radarr response: {0}".format(result['state']))
try:
res = json.loads(r.content)
scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True
started = True
except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None
else:
logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dirName),
logger.error("FAILED: {0} scan was unable to finish for folder {1}. exiting!".format(method, dir_name),
section)
return [1, "{0}: Failed to post-process - Server did not return success".format(section)]
else:
core.FAILED = True
logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(inputName), section)
if failureLink:
reportNzb(failureLink, clientAgent)
logger.postprocess("FAILED DOWNLOAD DETECTED FOR {0}".format(input_name), section)
if failure_link:
report_nzb(failure_link, client_agent)
if section == "Radarr":
logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section)
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
remove_dir(dir_name)
if not release_id and not media_id:
logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(inputName),
logger.error("Could not find a downloaded movie in the database matching {0}, exiting!".format(input_name),
section)
return [1, "{0}: Failed to post-process - Failed download not found in {1}".format(section, section)]
if release_id:
logger.postprocess("Setting failed release {0} to ignored ...".format(inputName), section)
logger.postprocess("Setting failed release {0} to ignored ...".format(input_name), section)
url = "{url}release.ignore".format(url=baseURL)
url = "{url}release.ignore".format(url=base_url)
params = {'id': release_id}
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
@ -376,14 +376,14 @@ class autoProcessMovie(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif result['success']:
logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(inputName), section)
logger.postprocess("SUCCESS: {0} has been set to ignored ...".format(input_name), section)
else:
logger.warning("FAILED: Unable to set {0} to ignored!".format(inputName), section)
return [1, "{0}: Failed to post-process - Unable to set {1} to ignored".format(section, inputName)]
logger.warning("FAILED: Unable to set {0} to ignored!".format(input_name), section)
return [1, "{0}: Failed to post-process - Unable to set {1} to ignored".format(section, input_name)]
logger.postprocess("Trying to snatch the next highest ranked release.", section)
url = "{0}movie.searcher.try_next".format(baseURL)
url = "{0}movie.searcher.try_next".format(base_url)
logger.debug("Opening URL: {0}".format(url), section)
try:
@ -412,7 +412,7 @@ class autoProcessMovie(object):
while time.time() < timeout: # only wait 2 (default) minutes, then return.
logger.postprocess("Checking for status change, please stand by ...", section)
if section == "CouchPotato":
release = self.get_release(baseURL, imdbid, download_id, release_id)
release = self.get_release(base_url, imdbid, download_id, release_id)
scan_id = None
else:
release = None
@ -424,44 +424,44 @@ class autoProcessMovie(object):
if release_status_old is None: # we didn't have a release before, but now we do.
logger.postprocess("SUCCESS: Movie {0} has now been added to CouchPotato with release status of [{1}]".format(
title, str(release_status_new).upper()), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
if release_status_new != release_status_old:
logger.postprocess("SUCCESS: Release for {0} has now been marked with a status of [{1}]".format(
title, str(release_status_new).upper()), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
except:
pass
elif scan_id:
url = "{0}/{1}".format(baseURL, scan_id)
url = "{0}/{1}".format(base_url, scan_id)
command_status = self.command_complete(url, params, headers, section)
if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section)
if command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ]
# return [1, "%s: Failed to post-process %s" % (section, input_name) ]
if not os.path.isdir(dirName):
if not os.path.isdir(dir_name):
logger.postprocess("SUCCESS: Input Directory [{0}] has been processed and removed".format(
dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=True):
elif not list_media_files(dir_name, media=True, audio=False, meta=False, archives=True):
logger.postprocess("SUCCESS: Input Directory [{0}] has no remaining media files. This has been fully processed.".format(
dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
# pause and let CouchPotatoServer/Radarr catch its breath
time.sleep(10 * wait_for)
# The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now.
if section == "Radarr" and self.CDH(url2, headers, section=section):
if section == "Radarr" and self.completed_download_handling(url2, headers, section=section):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
logger.warning(
"{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(inputName, wait_for),
"{0} does not appear to have changed status after {1} minutes, Please check your logs.".format(input_name, wait_for),
section)
return [1, "{0}: Failed to post-process - No change in status".format(section)]

View file

@ -9,12 +9,12 @@ import requests
import core
from core import logger
from core.nzbToMediaSceneExceptions import process_all_exceptions
from core.nzbToMediaUtil import convert_to_ascii, listMediaFiles, remoteDir, rmDir, server_responding
from core.nzbToMediaUtil import convert_to_ascii, list_media_files, remote_dir, remove_dir, server_responding
requests.packages.urllib3.disable_warnings()
class autoProcessMusic(object):
class Music(object):
def command_complete(self, url, params, headers, section):
try:
r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60))
@ -32,8 +32,8 @@ class autoProcessMusic(object):
logger.error("{0} did not return expected json data.".format(section), section)
return None
def get_status(self, url, apikey, dirName):
logger.debug("Attempting to get current status for release:{0}".format(os.path.basename(dirName)))
def get_status(self, url, apikey, dir_name):
logger.debug("Attempting to get current status for release:{0}".format(os.path.basename(dir_name)))
params = {
'apikey': apikey,
@ -55,13 +55,13 @@ class autoProcessMusic(object):
return None
for album in result:
if os.path.basename(dirName) == album['FolderName']:
if os.path.basename(dir_name) == album['FolderName']:
return album["Status"].lower()
def forceProcess(self, params, url, apikey, inputName, dirName, section, wait_for):
release_status = self.get_status(url, apikey, dirName)
def force_process(self, params, url, apikey, input_name, dir_name, section, wait_for):
release_status = self.get_status(url, apikey, dir_name)
if not release_status:
logger.error("Could not find a status for {0}, is it in the wanted list ?".format(inputName), section)
logger.error("Could not find a status for {0}, is it in the wanted list ?".format(input_name), section)
logger.debug("Opening URL: {0} with PARAMS: {1}".format(url, params), section)
@ -77,29 +77,29 @@ class autoProcessMusic(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
elif r.text == "OK":
logger.postprocess("SUCCESS: Post-Processing started for {0} in folder {1} ...".format(inputName, dirName), section)
logger.postprocess("SUCCESS: Post-Processing started for {0} in folder {1} ...".format(input_name, dir_name), section)
else:
logger.error("FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!".format(inputName, dirName), section)
logger.error("FAILED: Post-Processing has NOT started for {0} in folder {1}. exiting!".format(input_name, dir_name), section)
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]
# we will now wait for this album to be processed before returning to TorrentToMedia and unpausing.
timeout = time.time() + 60 * wait_for
while time.time() < timeout:
current_status = self.get_status(url, apikey, dirName)
current_status = self.get_status(url, apikey, dir_name)
if current_status is not None and current_status != release_status: # Something has changed. CPS must have processed this movie.
logger.postprocess("SUCCESS: This release is now marked as status [{0}]".format(current_status), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
if not os.path.isdir(dirName):
logger.postprocess("SUCCESS: The input directory {0} has been removed Processing must have finished.".format(dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
if not os.path.isdir(dir_name):
logger.postprocess("SUCCESS: The input directory {0} has been removed Processing must have finished.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
time.sleep(10 * wait_for)
# The status hasn't changed.
return [2, "no change"]
def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", inputCategory=None):
def process(self, section, dir_name, input_name=None, status=0, client_agent="manual", input_category=None):
status = int(status)
cfg = dict(core.CFG[section][inputCategory])
cfg = dict(core.CFG[section][input_category])
host = cfg["host"]
port = cfg["port"]
@ -124,25 +124,25 @@ class autoProcessMusic(object):
logger.error("Server did not respond. Exiting", section)
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0]
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName))
cleanName = os.path.splitext(SpecificPath)
if cleanName[1] == ".nzb":
SpecificPath = cleanName[0]
if os.path.isdir(SpecificPath):
dirName = SpecificPath
specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb":
specific_path = clean_name[0]
if os.path.isdir(specific_path):
dir_name = specific_path
process_all_exceptions(inputName, dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
process_all_exceptions(input_name, dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
if not listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
core.extractFiles(dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if not list_media_files(dir_name, media=False, audio=True, meta=False, archives=False) and list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extract_files(dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
#if listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and status:
#if listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and status:
# logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section)
# status = 0
@ -151,20 +151,20 @@ class autoProcessMusic(object):
params = {
'apikey': apikey,
'cmd': "forceProcess",
'dir': remoteDir(dirName) if remote_path else dirName
'dir': remote_dir(dir_name) if remote_path else dir_name
}
res = self.forceProcess(params, url, apikey, inputName, dirName, section, wait_for)
res = self.force_process(params, url, apikey, input_name, dir_name, section, wait_for)
if res[0] in [0, 1]:
return res
params = {
'apikey': apikey,
'cmd': "forceProcess",
'dir': os.path.split(remoteDir(dirName))[0] if remote_path else os.path.split(dirName)[0]
'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0]
}
res = self.forceProcess(params, url, apikey, inputName, dirName, section, wait_for)
res = self.force_process(params, url, apikey, input_name, dir_name, section, wait_for)
if res[0] in [0, 1]:
return res
@ -176,11 +176,11 @@ class autoProcessMusic(object):
url = "{0}{1}:{2}{3}/api/v1/command".format(protocol, host, port, web_root)
headers = {"X-Api-Key": apikey}
if remote_path:
logger.debug("remote_path: {0}".format(remoteDir(dirName)), section)
data = {"name": "Rename", "path": remoteDir(dirName)}
logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section)
data = {"name": "Rename", "path": remote_dir(dir_name)}
else:
logger.debug("path: {0}".format(dirName), section)
data = {"name": "Rename", "path": dirName}
logger.debug("path: {0}".format(dir_name), section)
data = {"name": "Rename", "path": dir_name}
data = json.dumps(data)
try:
logger.debug("Opening URL: {0} with data: {1}".format(url, data), section)
@ -189,18 +189,18 @@ class autoProcessMusic(object):
logger.error("Unable to open URL: {0}".format(url), section)
return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
Success = False
Queued = False
Started = False
success = False
queued = False
started = False
try:
res = json.loads(r.content)
scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True
started = True
except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None
Started = False
started = False
return [1, "{0}: Failed to post-process - Unable to start scan".format(section)]
n = 0
@ -214,15 +214,15 @@ class autoProcessMusic(object):
n += 1
if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section)
if not os.path.exists(dirName):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ]
# return [1, "%s: Failed to post-process %s" % (section, input_name) ]
else:
logger.debug("The Scan command did not return status completed. Passing back to {0} to attempt complete download handling.".format(section), section)
return [status, "{0}: Passing back to {1} to attempt Complete Download Handling".format(section, section)]
@ -233,7 +233,7 @@ class autoProcessMusic(object):
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
else:
logger.warning("FAILED DOWNLOAD DETECTED", section)
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
remove_dir(dir_name)
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader.

View file

@ -10,15 +10,15 @@ import requests
import core
from core import logger
from core.nzbToMediaAutoFork import autoFork
from core.nzbToMediaAutoFork import auto_fork
from core.nzbToMediaSceneExceptions import process_all_exceptions
from core.nzbToMediaUtil import convert_to_ascii, flatten, import_subs, listMediaFiles, remoteDir, reportNzb, rmDir, server_responding
from core.nzbToMediaUtil import convert_to_ascii, flatten, import_subs, list_media_files, remote_dir, report_nzb, remove_dir, server_responding
from core.transcoder import transcoder
requests.packages.urllib3.disable_warnings()
class autoProcessTV(object):
class TV(object):
def command_complete(self, url, params, headers, section):
try:
r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60))
@ -36,7 +36,7 @@ class autoProcessTV(object):
logger.error("{0} did not return expected json data.".format(section), section)
return None
def CDH(self, url2, headers, section="MAIN"):
def completed_download_handling(self, url2, headers, section="MAIN"):
try:
r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
except requests.ConnectionError:
@ -52,9 +52,9 @@ class autoProcessTV(object):
# ValueError catches simplejson's JSONDecodeError and json's ValueError
return False
def processEpisode(self, section, dirName, inputName=None, failed=False, clientAgent="manual", download_id=None, inputCategory=None, failureLink=None):
def process_episode(self, section, dir_name, input_name=None, failed=False, client_agent="manual", download_id=None, input_category=None, failure_link=None):
cfg = dict(core.CFG[section][inputCategory])
cfg = dict(core.CFG[section][input_category])
host = cfg["host"]
port = cfg["port"]
@ -67,7 +67,7 @@ class autoProcessTV(object):
if server_responding("{0}{1}:{2}{3}".format(protocol, host, port, web_root)):
# auto-detect correct fork
fork, fork_params = autoFork(section, inputCategory)
fork, fork_params = auto_fork(section, input_category)
elif not username and not apikey:
logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only')
fork, fork_params = "None", {}
@ -76,9 +76,9 @@ class autoProcessTV(object):
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
delete_failed = int(cfg.get("delete_failed", 0))
nzbExtractionBy = cfg.get("nzbExtractionBy", "Downloader")
nzb_extraction_by = cfg.get("nzbExtractionBy", "Downloader")
process_method = cfg.get("process_method")
if clientAgent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym":
if client_agent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym":
process_method = "symlink"
remote_path = int(cfg.get("remote_path", 0))
wait_for = int(cfg.get("wait_for", 2))
@ -91,49 +91,49 @@ class autoProcessTV(object):
else:
extract = int(cfg.get("extract", 0))
#get importmode, default to "Move" for consistency with legacy
importMode = cfg.get("importMode","Move")
import_mode = cfg.get("importMode","Move")
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
dirName = os.path.split(os.path.normpath(dirName))[0]
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
dir_name = os.path.split(os.path.normpath(dir_name))[0]
SpecificPath = os.path.join(dirName, str(inputName))
cleanName = os.path.splitext(SpecificPath)
if cleanName[1] == ".nzb":
SpecificPath = cleanName[0]
if os.path.isdir(SpecificPath):
dirName = SpecificPath
specific_path = os.path.join(dir_name, str(input_name))
clean_name = os.path.splitext(specific_path)
if clean_name[1] == ".nzb":
specific_path = clean_name[0]
if os.path.isdir(specific_path):
dir_name = specific_path
# Attempt to create the directory if it doesn't exist and ignore any
# error stating that it already exists. This fixes a bug where SickRage
# won't process the directory because it doesn't exist.
try:
os.makedirs(dirName) # Attempt to create the directory
os.makedirs(dir_name) # Attempt to create the directory
except OSError as e:
# Re-raise the error if it wasn't about the directory not existing
if e.errno != errno.EEXIST:
raise
if 'process_method' not in fork_params or (clientAgent in ['nzbget', 'sabnzbd'] and nzbExtractionBy != "Destination"):
if inputName:
process_all_exceptions(inputName, dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != "Destination"):
if input_name:
process_all_exceptions(input_name, dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
# Now check if tv files exist in destination.
if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
if listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
core.extractFiles(dirName)
inputName, dirName = convert_to_ascii(inputName, dirName)
if not list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
if list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
core.extract_files(dir_name)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
if listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): # Check that a video exists. if not, assume failed.
flatten(dirName)
if list_media_files(dir_name, media=True, audio=False, meta=False, archives=False): # Check that a video exists. if not, assume failed.
flatten(dir_name)
# Check video files for corruption
good_files = 0
num_files = 0
for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
num_files += 1
if transcoder.isVideoGood(video, status):
if transcoder.is_video_good(video, status):
good_files += 1
import_subs(video)
if num_files > 0:
@ -147,12 +147,12 @@ class autoProcessTV(object):
failed = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD')
if failureLink:
failureLink += '&corrupt=true'
elif clientAgent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dirName), section)
if failure_link:
failure_link += '&corrupt=true'
elif client_agent == "manual":
logger.warning("No media files found in directory {0} to manually process.".format(dir_name), section)
return [0, ""] # Success (as far as this script is concerned)
elif nzbExtractionBy == "Destination":
elif nzb_extraction_by == "Destination":
logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.")
if int(failed) == 0:
logger.info("Setting Status Success.")
@ -163,32 +163,32 @@ class autoProcessTV(object):
status = 1
failed = 1
else:
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section)
logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dir_name), section)
status = 1
failed = 1
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
print('[NZB] MARK=BAD')
if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads
result, newDirName = transcoder.Transcode_directory(dirName)
result, new_dir_name = transcoder.transcode_directory(dir_name)
if result == 0:
logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dirName), section)
dirName = newDirName
logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dir_name), section)
dir_name = new_dir_name
chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
if chmod_directory:
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section)
core.rchmod(dirName, chmod_directory)
logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dir_name), section)
core.rchmod(dir_name, chmod_directory)
else:
logger.error("FAILED: Transcoding failed for files in {0}".format(dirName), section)
logger.error("FAILED: Transcoding failed for files in {0}".format(dir_name), section)
return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]
# configure SB params to pass
fork_params['quiet'] = 1
fork_params['proc_type'] = 'manual'
if inputName is not None:
fork_params['nzbName'] = inputName
if input_name is not None:
fork_params['nzbName'] = input_name
for param in copy.copy(fork_params):
if param == "failed":
@ -206,10 +206,10 @@ class autoProcessTV(object):
if "proc_type" in fork_params:
del fork_params['proc_type']
if param in ["dirName", "dir", "proc_dir", "process_directory", "path"]:
fork_params[param] = dirName
if param in ["dir_name", "dir", "proc_dir", "process_directory", "path"]:
fork_params[param] = dir_name
if remote_path:
fork_params[param] = remoteDir(dirName)
fork_params[param] = remote_dir(dir_name)
if param == "process_method":
if process_method:
@ -244,12 +244,12 @@ class autoProcessTV(object):
if status == 0:
if section == "NzbDrone" and not apikey:
logger.info('No Sonarr apikey entered. Processing completed.')
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section)
else:
core.FAILED = True
if failureLink:
reportNzb(failureLink, clientAgent)
if failure_link:
report_nzb(failure_link, client_agent)
if 'failed' in fork_params:
logger.postprocess("FAILED: The download failed. Sending 'failed' process request to {0} branch".format(fork), section)
elif section == "NzbDrone":
@ -257,9 +257,9 @@ class autoProcessTV(object):
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
else:
logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section)
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
remove_dir(dir_name)
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader.
url = None
@ -274,11 +274,11 @@ class autoProcessTV(object):
headers = {"X-Api-Key": apikey}
# params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
if remote_path:
logger.debug("remote_path: {0}".format(remoteDir(dirName)), section)
data = {"name": "DownloadedEpisodesScan", "path": remoteDir(dirName), "downloadClientId": download_id, "importMode": importMode}
logger.debug("remote_path: {0}".format(remote_dir(dir_name)), section)
data = {"name": "DownloadedEpisodesScan", "path": remote_dir(dir_name), "downloadClientId": download_id, "importMode": import_mode}
else:
logger.debug("path: {0}".format(dirName), section)
data = {"name": "DownloadedEpisodesScan", "path": dirName, "downloadClientId": download_id, "importMode": importMode}
logger.debug("path: {0}".format(dir_name), section)
data = {"name": "DownloadedEpisodesScan", "path": dir_name, "downloadClientId": download_id, "importMode": import_mode}
if not download_id:
data.pop("downloadClientId")
data = json.dumps(data)
@ -306,45 +306,45 @@ class autoProcessTV(object):
logger.error("Server returned status {0}".format(r.status_code), section)
return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]
Success = False
Queued = False
Started = False
success = False
queued = False
started = False
if section == "SickBeard":
if apikey:
if r.json()['result'] == 'success':
Success = True
success = True
else:
for line in r.iter_lines():
if line:
line = line.decode('utf-8')
logger.postprocess("{0}".format(line), section)
if "Moving file from" in line:
inputName = os.path.split(line)[1]
input_name = os.path.split(line)[1]
if "added to the queue" in line:
Queued = True
queued = True
if "Processing succeeded" in line or "Successfully processed" in line:
Success = True
success = True
if Queued:
if queued:
time.sleep(60)
elif section == "NzbDrone":
try:
res = json.loads(r.content)
scan_id = int(res['id'])
logger.debug("Scan started with id: {0}".format(scan_id), section)
Started = True
started = True
except Exception as e:
logger.warning("No scan id was returned due to: {0}".format(e), section)
scan_id = None
Started = False
started = False
if status != 0 and delete_failed and not os.path.dirname(dirName) == dirName:
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
rmDir(dirName)
if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name:
logger.postprocess("Deleting failed files and folder {0}".format(dir_name), section)
remove_dir(dir_name)
if Success:
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
elif section == "NzbDrone" and Started:
if success:
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif section == "NzbDrone" and started:
n = 0
params = {}
url = "{0}/{1}".format(url, scan_id)
@ -356,20 +356,20 @@ class autoProcessTV(object):
n += 1
if command_status:
logger.debug("The Scan command return status: {0}".format(command_status), section)
if not os.path.exists(dirName):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
if not os.path.exists(dir_name):
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dir_name), section)
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['completed']:
logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
return [0, "{0}: Successfully post-processed {1}".format(section, input_name)]
elif command_status and command_status in ['failed']:
logger.debug("The Scan command has failed. Renaming was not successful.", section)
# return [1, "%s: Failed to post-process %s" % (section, inputName) ]
if self.CDH(url2, headers, section=section):
# return [1, "%s: Failed to post-process %s" % (section, input_name) ]
if self.completed_download_handling(url2, headers, section=section):
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
else:
logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section)
return [1, "{0}: Failed to post-process {1}".format(section, inputName)]
return [1, "{0}: Failed to post-process {1}".format(section, input_name)]
else:
return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)] # We did not receive Success confirmation.

View file

@ -1,15 +1,15 @@
# coding=utf-8
from core import logger, nzbToMediaDB
from core.nzbToMediaUtil import backupVersionedFile
from core.nzbToMediaUtil import backup_versioned_file
MIN_DB_VERSION = 1 # oldest db version we support migrating from
MAX_DB_VERSION = 2
def backupDatabase(version):
def backup_database(version):
logger.info("Backing up database before upgrade")
if not backupVersionedFile(nzbToMediaDB.dbFilename(), version):
if not backup_versioned_file(nzbToMediaDB.db_filename(), version):
logger.log_error_and_exit("Database backup failed, abort upgrading database")
else:
logger.info("Proceeding with upgrade")
@ -23,13 +23,13 @@ def backupDatabase(version):
class InitialSchema(nzbToMediaDB.SchemaUpgrade):
def test(self):
no_update = False
if self.hasTable("db_version"):
cur_db_version = self.checkDBVersion()
if self.has_table("db_version"):
cur_db_version = self.check_db_version()
no_update = not cur_db_version < MAX_DB_VERSION
return no_update
def execute(self):
if not self.hasTable("downloads") and not self.hasTable("db_version"):
if not self.has_table("downloads") and not self.has_table("db_version"):
queries = [
"CREATE TABLE db_version (db_version INTEGER);",
"CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));",
@ -39,7 +39,7 @@ class InitialSchema(nzbToMediaDB.SchemaUpgrade):
self.connection.action(query)
else:
cur_db_version = self.checkDBVersion()
cur_db_version = self.check_db_version()
if cur_db_version < MIN_DB_VERSION:
logger.log_error_and_exit(u"Your database version ({current}) is too old to migrate "

View file

@ -11,7 +11,7 @@ from time import sleep
import core
def extract(filePath, outputDestination):
def extract(file_path, output_destination):
success = 0
# Using Windows
if platform.system() == 'Windows':
@ -22,7 +22,7 @@ def extract(filePath, outputDestination):
invislocation = os.path.join(core.PROGRAM_DIR, 'core', 'extractor', 'bin', 'invisible.vbs')
cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, "x", "-y"]
ext_7zip = [".rar", ".zip", ".tar.gz", "tgz", ".tar.bz2", ".tbz", ".tar.lzma", ".tlz", ".7z", ".xz"]
EXTRACT_COMMANDS = dict.fromkeys(ext_7zip, cmd_7zip)
extract_commands = dict.fromkeys(ext_7zip, cmd_7zip)
# Using unix
else:
required_cmds = ["unrar", "unzip", "tar", "unxz", "unlzma", "7zr", "bunzip2"]
@ -33,7 +33,7 @@ def extract(filePath, outputDestination):
# ".lzma": ["xz", "-d --format=lzma --keep"],
# ".bz2": ["bzip2", "-d --keep"],
EXTRACT_COMMANDS = {
extract_commands = {
".rar": ["unrar", "x", "-o+", "-y"],
".tar": ["tar", "-xf"],
".zip": ["unzip"],
@ -49,46 +49,46 @@ def extract(filePath, outputDestination):
for cmd in required_cmds:
if call(['which', cmd], stdout=devnull,
stderr=devnull): # note, returns 0 if exists, or 1 if doesn't exist.
for k, v in EXTRACT_COMMANDS.items():
for k, v in extract_commands.items():
if cmd in v[0]:
if not call(["which", "7zr"], stdout=devnull, stderr=devnull): # we do have "7zr"
EXTRACT_COMMANDS[k] = ["7zr", "x", "-y"]
extract_commands[k] = ["7zr", "x", "-y"]
elif not call(["which", "7z"], stdout=devnull, stderr=devnull): # we do have "7z"
EXTRACT_COMMANDS[k] = ["7z", "x", "-y"]
extract_commands[k] = ["7z", "x", "-y"]
elif not call(["which", "7za"], stdout=devnull, stderr=devnull): # we do have "7za"
EXTRACT_COMMANDS[k] = ["7za", "x", "-y"]
extract_commands[k] = ["7za", "x", "-y"]
else:
core.logger.error("EXTRACTOR: {cmd} not found, "
"disabling support for {feature}".format
(cmd=cmd, feature=k))
del EXTRACT_COMMANDS[k]
del extract_commands[k]
devnull.close()
else:
core.logger.warning("EXTRACTOR: Cannot determine which tool to use when called from Transmission")
if not EXTRACT_COMMANDS:
if not extract_commands:
core.logger.warning("EXTRACTOR: No archive extracting programs found, plugin will be disabled")
ext = os.path.splitext(filePath)
ext = os.path.splitext(file_path)
cmd = []
if ext[1] in (".gz", ".bz2", ".lzma"):
# Check if this is a tar
if os.path.splitext(ext[0])[1] == ".tar":
cmd = EXTRACT_COMMANDS[".tar{ext}".format(ext=ext[1])]
cmd = extract_commands[".tar{ext}".format(ext=ext[1])]
elif ext[1] in (".1", ".01", ".001") and os.path.splitext(ext[0])[1] in (".rar", ".zip", ".7z"):
cmd = EXTRACT_COMMANDS[os.path.splitext(ext[0])[1]]
cmd = extract_commands[os.path.splitext(ext[0])[1]]
elif ext[1] in (".cb7", ".cba", ".cbr", ".cbt", ".cbz"): # don't extract these comic book archives.
return False
else:
if ext[1] in EXTRACT_COMMANDS:
cmd = EXTRACT_COMMANDS[ext[1]]
if ext[1] in extract_commands:
cmd = extract_commands[ext[1]]
else:
core.logger.debug("EXTRACTOR: Unknown file type: {ext}".format
(ext=ext[1]))
return False
# Create outputDestination folder
core.makeDir(outputDestination)
core.make_dir(output_destination)
if core.PASSWORDSFILE and os.path.isfile(os.path.normpath(core.PASSWORDSFILE)):
passwords = [line.strip() for line in open(os.path.normpath(core.PASSWORDSFILE))]
@ -96,25 +96,25 @@ def extract(filePath, outputDestination):
passwords = []
core.logger.info("Extracting {file} to {destination}".format
(file=filePath, destination=outputDestination))
(file=file_path, destination=output_destination))
core.logger.debug("Extracting {cmd} {file} {destination}".format
(cmd=cmd, file=filePath, destination=outputDestination))
(cmd=cmd, file=file_path, destination=output_destination))
origFiles = []
origDirs = []
for dir, subdirs, files in os.walk(outputDestination):
orig_files = []
orig_dirs = []
for dir, subdirs, files in os.walk(output_destination):
for subdir in subdirs:
origDirs.append(os.path.join(dir, subdir))
orig_dirs.append(os.path.join(dir, subdir))
for file in files:
origFiles.append(os.path.join(dir, file))
orig_files.append(os.path.join(dir, file))
pwd = os.getcwd() # Get our Present Working Directory
os.chdir(outputDestination) # Not all unpack commands accept full paths, so just extract into this directory
os.chdir(output_destination) # Not all unpack commands accept full paths, so just extract into this directory
devnull = open(os.devnull, 'w')
try: # now works same for nt and *nix
info = None
cmd.append(filePath) # add filePath to final cmd arg.
cmd.append(file_path) # add filePath to final cmd arg.
if platform.system() == 'Windows':
info = subprocess.STARTUPINFO()
info.dwFlags |= subprocess.STARTF_USESHOWWINDOW
@ -126,7 +126,7 @@ def extract(filePath, outputDestination):
res = p.wait()
if res == 0: # Both Linux and Windows return 0 for successful.
core.logger.info("EXTRACTOR: Extraction was successful for {file} to {destination}".format
(file=filePath, destination=outputDestination))
(file=file_path, destination=output_destination))
success = 1
elif len(passwords) > 0:
core.logger.info("EXTRACTOR: Attempting to extract with passwords")
@ -142,7 +142,7 @@ def extract(filePath, outputDestination):
if (res >= 0 and platform == 'Windows') or res == 0:
core.logger.info("EXTRACTOR: Extraction was successful "
"for {file} to {destination} using password: {pwd}".format
(file=filePath, destination=outputDestination, pwd=password))
(file=file_path, destination=output_destination, pwd=password))
success = 1
break
else:
@ -150,7 +150,7 @@ def extract(filePath, outputDestination):
except:
core.logger.error("EXTRACTOR: Extraction failed for {file}. "
"Could not call command {cmd}".format
(file=filePath, cmd=cmd))
(file=file_path, cmd=cmd))
os.chdir(pwd)
return False
@ -159,23 +159,23 @@ def extract(filePath, outputDestination):
if success:
# sleep to let files finish writing to disk
sleep(3)
perms = stat.S_IMODE(os.lstat(os.path.split(filePath)[0]).st_mode)
for dir, subdirs, files in os.walk(outputDestination):
perms = stat.S_IMODE(os.lstat(os.path.split(file_path)[0]).st_mode)
for dir, subdirs, files in os.walk(output_destination):
for subdir in subdirs:
if not os.path.join(dir, subdir) in origFiles:
if not os.path.join(dir, subdir) in orig_files:
try:
os.chmod(os.path.join(dir, subdir), perms)
except:
pass
for file in files:
if not os.path.join(dir, file) in origFiles:
if not os.path.join(dir, file) in orig_files:
try:
shutil.copymode(filePath, os.path.join(dir, file))
shutil.copymode(file_path, os.path.join(dir, file))
except:
pass
return True
else:
core.logger.error("EXTRACTOR: Extraction failed for {file}. "
"Result was {result}".format
(file=filePath, result=res))
(file=file_path, result=res))
return False

View file

@ -14,7 +14,7 @@ class GitHub(object):
self.github_repo = github_repo
self.branch = branch
def _access_API(self, path, params=None):
def _access_api(self, path, params=None):
"""
Access the API at the path given and with the optional params given.
"""
@ -32,7 +32,7 @@ class GitHub(object):
Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
"""
return self._access_API(
return self._access_api(
['repos', self.github_repo_user, self.github_repo, 'commits'],
params={'per_page': 100, 'sha': self.branch},
)
@ -49,7 +49,7 @@ class GitHub(object):
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/
"""
return self._access_API(
return self._access_api(
['repos', self.github_repo_user, self.github_repo, 'compare',
'{base}...{head}'.format(base=base, head=head)],
params={'per_page': per_page},

View file

@ -58,10 +58,10 @@ class NTMRotatingLogHandler(object):
handler.flush()
handler.close()
def initLogging(self, consoleLogging=True):
def init_logging(self, console_logging=True):
if consoleLogging:
self.console_logging = consoleLogging
if console_logging:
self.console_logging = console_logging
old_handler = None
@ -180,7 +180,7 @@ class NTMRotatingLogHandler(object):
pp_logger.addHandler(new_file_handler)
db_logger.addHandler(new_file_handler)
def log(self, toLog, logLevel=MESSAGE, section='MAIN'):
def log(self, to_log, log_level=MESSAGE, section='MAIN'):
with self.log_lock:
@ -193,7 +193,7 @@ class NTMRotatingLogHandler(object):
self.writes_since_check += 1
try:
message = u"{0}: {1}".format(section.upper(), toLog)
message = u"{0}: {1}".format(section.upper(), to_log)
except UnicodeError:
message = u"{0}: Message contains non-utf-8 string".format(section.upper())
@ -206,22 +206,22 @@ class NTMRotatingLogHandler(object):
setattr(db_logger, 'db', lambda *args: db_logger.log(DB, *args))
try:
if logLevel == DEBUG:
if log_level == DEBUG:
if core.LOG_DEBUG == 1:
ntm_logger.debug(out_line)
elif logLevel == MESSAGE:
elif log_level == MESSAGE:
ntm_logger.info(out_line)
elif logLevel == WARNING:
elif log_level == WARNING:
ntm_logger.warning(out_line)
elif logLevel == ERROR:
elif log_level == ERROR:
ntm_logger.error(out_line)
elif logLevel == POSTPROCESS:
elif log_level == POSTPROCESS:
pp_logger.postprocess(out_line)
elif logLevel == DB:
elif log_level == DB:
if core.LOG_DB == 1:
db_logger.db(out_line)
else:
ntm_logger.info(logLevel, out_line)
ntm_logger.info(log_level, out_line)
except ValueError:
pass
@ -249,32 +249,32 @@ class DispatchingFormatter(object):
ntm_log_instance = NTMRotatingLogHandler(core.LOG_FILE, NUM_LOGS, LOG_SIZE)
def log(toLog, logLevel=MESSAGE, section='MAIN'):
ntm_log_instance.log(toLog, logLevel, section)
def log(to_log, log_level=MESSAGE, section='MAIN'):
ntm_log_instance.log(to_log, log_level, section)
def info(toLog, section='MAIN'):
log(toLog, MESSAGE, section)
def info(to_log, section='MAIN'):
log(to_log, MESSAGE, section)
def error(toLog, section='MAIN'):
log(toLog, ERROR, section)
def error(to_log, section='MAIN'):
log(to_log, ERROR, section)
def warning(toLog, section='MAIN'):
log(toLog, WARNING, section)
def warning(to_log, section='MAIN'):
log(to_log, WARNING, section)
def debug(toLog, section='MAIN'):
log(toLog, DEBUG, section)
def debug(to_log, section='MAIN'):
log(to_log, DEBUG, section)
def postprocess(toLog, section='POSTPROCESS'):
log(toLog, POSTPROCESS, section)
def postprocess(to_log, section='POSTPROCESS'):
log(to_log, POSTPROCESS, section)
def db(toLog, section='DB'):
log(toLog, DB, section)
def db(to_log, section='DB'):
log(to_log, DB, section)
def log_error_and_exit(error_msg):

View file

@ -7,11 +7,11 @@ import core
from core import logger
def autoFork(section, inputCategory):
def auto_fork(section, input_category):
# auto-detect correct section
# config settings
cfg = dict(core.CFG[section][inputCategory])
cfg = dict(core.CFG[section][input_category])
host = cfg.get("host")
port = cfg.get("port")
@ -31,26 +31,26 @@ def autoFork(section, inputCategory):
detected = False
if section == "NzbDrone":
logger.info("Attempting to verify {category} fork".format
(category=inputCategory))
(category=input_category))
url = "{protocol}{host}:{port}{root}/api/rootfolder".format(
protocol=protocol, host=host, port=port, root=web_root)
headers = {"X-Api-Key": apikey}
try:
r = requests.get(url, headers=headers, stream=True, verify=False)
except requests.ConnectionError:
logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, inputCategory))
logger.warning("Could not connect to {0}:{1} to verify fork!".format(section, input_category))
if not r.ok:
logger.warning("Connection to {section}:{category} failed! "
"Check your configuration".format
(section=section, category=inputCategory))
(section=section, category=input_category))
fork = ['default', {}]
elif fork == "auto":
params = core.ALL_FORKS
rem_params = []
logger.info("Attempting to auto-detect {category} fork".format(category=inputCategory))
logger.info("Attempting to auto-detect {category} fork".format(category=input_category))
# define the order to test. Default must be first since the default fork doesn't reject parameters.
# then in order of most unique parameters.
@ -75,17 +75,17 @@ def autoFork(section, inputCategory):
r = s.get(url, auth=(username, password), verify=False)
except requests.ConnectionError:
logger.info("Could not connect to {section}:{category} to perform auto-fork detection!".format
(section=section, category=inputCategory))
(section=section, category=input_category))
r = []
if r and r.ok:
if apikey:
optionalParameters = []
optional_parameters = []
try:
optionalParameters = r.json()['data']['optionalParameters'].keys()
optional_parameters = r.json()['data']['optionalParameters'].keys()
except:
optionalParameters = r.json()['data']['data']['optionalParameters'].keys()
optional_parameters = r.json()['data']['data']['optionalParameters'].keys()
for param in params:
if param not in optionalParameters:
if param not in optional_parameters:
rem_params.append(param)
else:
for param in params:
@ -99,16 +99,16 @@ def autoFork(section, inputCategory):
break
if detected:
logger.info("{section}:{category} fork auto-detection successful ...".format
(section=section, category=inputCategory))
(section=section, category=input_category))
elif rem_params:
logger.info("{section}:{category} fork auto-detection found custom params {params}".format
(section=section, category=inputCategory, params=params))
(section=section, category=input_category, params=params))
fork = ['custom', params]
else:
logger.info("{section}:{category} fork auto-detection failed".format
(section=section, category=inputCategory))
(section=section, category=input_category))
fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)]
logger.info("{section}:{category} fork set to {fork}".format
(section=section, category=inputCategory, fork=fork[0]))
(section=section, category=input_category, fork=fork[0]))
return fork[0], fork[1]

View file

@ -251,7 +251,7 @@ class ConfigObj(configobj.ConfigObj, Section):
@staticmethod
def addnzbget():
# load configs into memory
CFG_NEW = config()
cfg_new = config()
try:
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
@ -274,196 +274,196 @@ class ConfigObj(configobj.ConfigObj, Section):
if key in os.environ:
option = 'default_downloadDirectory'
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "General"
envKeys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
cfgKeys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
cfg_keys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "Network"
envKeys = ['MOUNTPOINTS']
cfgKeys = ['mount_points']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['MOUNTPOINTS']
cfg_keys = ['mount_points']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "CouchPotato"
envCatKey = 'NZBPO_CPSCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
env_cat_key = 'NZBPO_CPSCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
'wait_for', 'watch_dir', 'omdbapikey']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_CPS{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_CPS{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['Radarr'].sections:
CFG_NEW['Radarr'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['Radarr'].sections:
cfg_new['Radarr'][env_cat_key]['enabled'] = 0
section = "SickBeard"
envCatKey = 'NZBPO_SBCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
env_cat_key = 'NZBPO_SBCATEGORY'
env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
cfgKeys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork',
cfg_keys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork',
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_SB{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_SB{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['NzbDrone'].sections:
CFG_NEW['NzbDrone'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['NzbDrone'].sections:
cfg_new['NzbDrone'][env_cat_key]['enabled'] = 0
section = "HeadPhones"
envCatKey = 'NZBPO_HPCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_HP{index}'.format(index=envKeys[index])
env_cat_key = 'NZBPO_HPCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_HP{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['Lidarr'].sections:
CFG_NEW['Lidarr'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['Lidarr'].sections:
cfg_new['Lidarr'][env_cat_key]['enabled'] = 0
section = "Mylar"
envCatKey = 'NZBPO_MYCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
env_cat_key = 'NZBPO_MYCATEGORY'
env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
cfg_keys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_MY{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_MY{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "Gamez"
envCatKey = 'NZBPO_GZCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_GZ{index}'.format(index=envKeys[index])
env_cat_key = 'NZBPO_GZCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_GZ{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = "NzbDrone"
envCatKey = 'NZBPO_NDCATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
env_cat_key = 'NZBPO_NDCATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE']
#new cfgKey added for importMode
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path','importMode']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_ND{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_ND{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['SickBeard'].sections:
CFG_NEW['SickBeard'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['SickBeard'].sections:
cfg_new['SickBeard'][env_cat_key]['enabled'] = 0
section = "Radarr"
envCatKey = 'NZBPO_RACATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
env_cat_key = 'NZBPO_RACATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE']
#new cfgKey added for importMode
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey','importMode']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_RA{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_RA{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['CouchPotato'].sections:
CFG_NEW['CouchPotato'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['CouchPotato'].sections:
cfg_new['CouchPotato'][env_cat_key]['enabled'] = 0
section = "Lidarr"
envCatKey = 'NZBPO_LICATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
env_cat_key = 'NZBPO_LICATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_LI{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_LI{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['HeadPhones'].sections:
CFG_NEW['HeadPhones'][envCatKey]['enabled'] = 0
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
if os.environ[env_cat_key] in cfg_new['HeadPhones'].sections:
cfg_new['HeadPhones'][env_cat_key]['enabled'] = 0
section = "Extensions"
envKeys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "Posix"
envKeys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
env_keys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfg_keys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "Transcoder"
envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
@ -473,7 +473,7 @@ class ConfigObj(configobj.ConfigObj, Section):
'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS',
'OUTPUTAUDIOOTHERCHANNELS','OUTPUTVIDEORESOLUTION']
cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
cfg_keys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir',
'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
@ -483,51 +483,51 @@ class ConfigObj(configobj.ConfigObj, Section):
'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
'outputAudioOtherChannels', 'outputVideoResolution']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "WakeOnLan"
envKeys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfgKeys = ['wake', 'host', 'port', 'mac']
for index in range(len(envKeys)):
key = 'NZBPO_WOL{index}'.format(index=envKeys[index])
env_keys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfg_keys = ['wake', 'host', 'port', 'mac']
for index in range(len(env_keys)):
key = 'NZBPO_WOL{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
cfg_new[section][option] = value
section = "UserScript"
envCatKey = 'NZBPO_USCATEGORY'
envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
env_cat_key = 'NZBPO_USCATEGORY'
env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
cfg_keys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfgKeys[index]
option = cfg_keys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
except Exception as error:
logger.debug("Error {msg} when applying NZBGet config".format(msg=error))
try:
# write our new config to autoProcessMedia.cfg
CFG_NEW.filename = core.CONFIG_FILE
CFG_NEW.write()
cfg_new.filename = core.CONFIG_FILE
cfg_new.write()
except Exception as error:
logger.debug("Error {msg} when writing changes to .cfg".format(msg=error))
return CFG_NEW
return cfg_new
configobj.Section = Section

View file

@ -12,7 +12,7 @@ import core
from core import logger
def dbFilename(filename="nzbtomedia.db", suffix=None):
def db_filename(filename="nzbtomedia.db", suffix=None):
"""
@param filename: The sqlite database filename to use. If not specified,
will be made to be nzbtomedia.db
@ -29,13 +29,13 @@ class DBConnection(object):
def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None):
self.filename = filename
self.connection = sqlite3.connect(dbFilename(filename), 20)
self.connection = sqlite3.connect(db_filename(filename), 20)
if row_type == "dict":
self.connection.row_factory = self._dict_factory
else:
self.connection.row_factory = sqlite3.Row
def checkDBVersion(self):
def check_db_version(self):
result = None
try:
result = self.select("SELECT db_version FROM db_version")
@ -52,7 +52,7 @@ class DBConnection(object):
if query is None:
return
sqlResult = None
sql_result = None
attempt = 0
while attempt < 5:
@ -61,13 +61,13 @@ class DBConnection(object):
logger.log("{name}: {query}".format(name=self.filename, query=query), logger.DB)
cursor = self.connection.cursor()
cursor.execute(query)
sqlResult = cursor.fetchone()[0]
sql_result = cursor.fetchone()[0]
else:
logger.log("{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB)
cursor = self.connection.cursor()
cursor.execute(query, args)
sqlResult = cursor.fetchone()[0]
sql_result = cursor.fetchone()[0]
# get out of the connection attempt loop since we were successful
break
@ -83,31 +83,31 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise
return sqlResult
return sql_result
def mass_action(self, querylist, logTransaction=False):
def mass_action(self, querylist, log_transaction=False):
if querylist is None:
return
sqlResult = []
sql_result = []
attempt = 0
while attempt < 5:
try:
for qu in querylist:
if len(qu) == 1:
if logTransaction:
if log_transaction:
logger.log(qu[0], logger.DEBUG)
sqlResult.append(self.connection.execute(qu[0]))
sql_result.append(self.connection.execute(qu[0]))
elif len(qu) > 1:
if logTransaction:
if log_transaction:
logger.log(u"{query} with args {args}".format(query=qu[0], args=qu[1]), logger.DEBUG)
sqlResult.append(self.connection.execute(qu[0], qu[1]))
sql_result.append(self.connection.execute(qu[0], qu[1]))
self.connection.commit()
logger.log(u"Transaction with {x} query's executed".format(x=len(querylist)), logger.DEBUG)
return sqlResult
return sql_result
except sqlite3.OperationalError as error:
sqlResult = []
sql_result = []
if self.connection:
self.connection.rollback()
if "unable to open database file" in error.args[0] or "database is locked" in error.args[0]:
@ -123,24 +123,24 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise
return sqlResult
return sql_result
def action(self, query, args=None):
if query is None:
return
sqlResult = None
sql_result = None
attempt = 0
while attempt < 5:
try:
if args is None:
logger.log(u"{name}: {query}".format(name=self.filename, query=query), logger.DB)
sqlResult = self.connection.execute(query)
sql_result = self.connection.execute(query)
else:
logger.log(u"{name}: {query} with args {args}".format
(name=self.filename, query=query, args=args), logger.DB)
sqlResult = self.connection.execute(query, args)
sql_result = self.connection.execute(query, args)
self.connection.commit()
# get out of the connection attempt loop since we were successful
break
@ -156,49 +156,49 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: {msg}".format(msg=error), logger.ERROR)
raise
return sqlResult
return sql_result
def select(self, query, args=None):
sqlResults = self.action(query, args).fetchall()
sql_results = self.action(query, args).fetchall()
if sqlResults is None:
if sql_results is None:
return []
return sqlResults
return sql_results
def upsert(self, tableName, valueDict, keyDict):
def upsert(self, table_name, value_dict, key_dict):
changesBefore = self.connection.total_changes
changes_before = self.connection.total_changes
genParams = lambda myDict: ["{key} = ?".format(key=k) for k in myDict.keys()]
gen_params = lambda my_dict: ["{key} = ?".format(key=k) for k in my_dict.keys()]
items = list(valueDict.values()) + list(keyDict.values())
items = list(value_dict.values()) + list(key_dict.values())
self.action(
"UPDATE {table} "
"SET {params} "
"WHERE {conditions}".format(
table=tableName,
params=", ".join(genParams(valueDict)),
conditions=" AND ".join(genParams(keyDict))
table=table_name,
params=", ".join(gen_params(value_dict)),
conditions=" AND ".join(gen_params(key_dict))
),
items
)
if self.connection.total_changes == changesBefore:
if self.connection.total_changes == changes_before:
self.action(
"INSERT OR IGNORE INTO {table} ({columns}) "
"VALUES ({values})".format(
table=tableName,
columns=", ".join(map(text_type, valueDict.keys())),
values=", ".join(["?"] * len(valueDict.values()))
table=table_name,
columns=", ".join(map(text_type, value_dict.keys())),
values=", ".join(["?"] * len(value_dict.values()))
),
list(valueDict.values())
list(value_dict.values())
)
def tableInfo(self, tableName):
def table_info(self, table_name):
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
cursor = self.connection.execute("PRAGMA table_info({0})".format(tableName))
cursor = self.connection.execute("PRAGMA table_info({0})".format(table_name))
columns = {}
for column in cursor:
columns[column['name']] = {'type': column['type']}
@ -212,7 +212,7 @@ class DBConnection(object):
return d
def sanityCheckDatabase(connection, sanity_check):
def sanity_check_database(connection, sanity_check):
sanity_check(connection).check()
@ -228,36 +228,36 @@ class DBSanityCheck(object):
# = Upgrade API =
# ===============
def upgradeDatabase(connection, schema):
def upgrade_database(connection, schema):
logger.log(u"Checking database structure...", logger.MESSAGE)
_processUpgrade(connection, schema)
_process_upgrade(connection, schema)
def prettyName(class_name):
def pretty_name(class_name):
return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
def _processUpgrade(connection, upgradeClass):
instance = upgradeClass(connection)
def _process_upgrade(connection, upgrade_class):
instance = upgrade_class(connection)
logger.log(u"Checking {name} database upgrade".format
(name=prettyName(upgradeClass.__name__)), logger.DEBUG)
(name=pretty_name(upgrade_class.__name__)), logger.DEBUG)
if not instance.test():
logger.log(u"Database upgrade required: {name}".format
(name=prettyName(upgradeClass.__name__)), logger.MESSAGE)
(name=pretty_name(upgrade_class.__name__)), logger.MESSAGE)
try:
instance.execute()
except sqlite3.DatabaseError as error:
print(u"Error in {name}: {msg}".format
(name=upgradeClass.__name__, msg=error))
(name=upgrade_class.__name__, msg=error))
raise
logger.log(u"{name} upgrade completed".format
(name=upgradeClass.__name__), logger.DEBUG)
(name=upgrade_class.__name__), logger.DEBUG)
else:
logger.log(u"{name} upgrade not required".format
(name=upgradeClass.__name__), logger.DEBUG)
(name=upgrade_class.__name__), logger.DEBUG)
for upgradeSubClass in upgradeClass.__subclasses__():
_processUpgrade(connection, upgradeSubClass)
for upgradeSubClass in upgrade_class.__subclasses__():
_process_upgrade(connection, upgradeSubClass)
# Base migration class. All future DB changes should be subclassed from this class
@ -265,24 +265,24 @@ class SchemaUpgrade(object):
def __init__(self, connection):
self.connection = connection
def hasTable(self, tableName):
return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName,)).fetchall()) > 0
def has_table(self, table_name):
return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (table_name,)).fetchall()) > 0
def hasColumn(self, tableName, column):
return column in self.connection.tableInfo(tableName)
def has_column(self, table_name, column):
return column in self.connection.table_info(table_name)
def addColumn(self, table, column, type="NUMERIC", default=0):
def add_column(self, table, column, type="NUMERIC", default=0):
self.connection.action("ALTER TABLE {0} ADD {1} {2}".format(table, column, type))
self.connection.action("UPDATE {0} SET {1} = ?".format(table, column), (default,))
def checkDBVersion(self):
def check_db_version(self):
result = self.connection.select("SELECT db_version FROM db_version")
if result:
return int(result[-1]["db_version"])
else:
return 0
def incDBVersion(self):
new_version = self.checkDBVersion() + 1
def inc_db_version(self):
new_version = self.check_db_version() + 1
self.connection.action("UPDATE db_version SET db_version = ?", [new_version])
return new_version

View file

@ -8,7 +8,7 @@ import subprocess
import core
from core import logger
from core.nzbToMediaUtil import listMediaFiles
from core.nzbToMediaUtil import list_media_files
reverse_list = [r"\.\d{2}e\d{2}s\.", r"\.[pi]0801\.", r"\.p027\.", r"\.[pi]675\.", r"\.[pi]084\.", r"\.p063\.",
r"\b[45]62[xh]\.", r"\.yarulb\.", r"\.vtd[hp]\.",
@ -32,10 +32,10 @@ char_replace = [[r"(\w)1\.(\w)", r"\1i\2"]
def process_all_exceptions(name, dirname):
par2(dirname)
rename_script(dirname)
for filename in listMediaFiles(dirname):
for filename in list_media_files(dirname):
newfilename = None
parentDir = os.path.dirname(filename)
head, fileExtension = os.path.splitext(os.path.basename(filename))
parent_dir = os.path.dirname(filename)
head, file_extension = os.path.splitext(os.path.basename(filename))
if reverse_pattern.search(head) is not None:
exception = reverse_filename
elif garbage_name.search(head) is not None:
@ -44,7 +44,7 @@ def process_all_exceptions(name, dirname):
exception = None
newfilename = filename
if not newfilename:
newfilename = exception(filename, parentDir, name)
newfilename = exception(filename, parent_dir, name)
if core.GROUPS:
newfilename = strip_groups(newfilename)
if newfilename != filename:
@ -55,29 +55,29 @@ def strip_groups(filename):
if not core.GROUPS:
return filename
dirname, file = os.path.split(filename)
head, fileExtension = os.path.splitext(file)
head, file_extension = os.path.splitext(file)
newname = head.replace(' ', '.')
for group in core.GROUPS:
newname = newname.replace(group, '')
newname = newname.replace('[]', '')
newfile = newname + fileExtension
newfilePath = os.path.join(dirname, newfile)
return newfilePath
newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile)
return newfile_path
def rename_file(filename, newfilePath):
if os.path.isfile(newfilePath):
newfilePath = os.path.splitext(newfilePath)[0] + ".NTM" + os.path.splitext(newfilePath)[1]
def rename_file(filename, newfile_path):
if os.path.isfile(newfile_path):
newfile_path = os.path.splitext(newfile_path)[0] + ".NTM" + os.path.splitext(newfile_path)[1]
logger.debug("Replacing file name {old} with download name {new}".format
(old=filename, new=newfilePath), "EXCEPTION")
(old=filename, new=newfile_path), "EXCEPTION")
try:
os.rename(filename, newfilePath)
os.rename(filename, newfile_path)
except Exception as error:
logger.error("Unable to rename file due to: {error}".format(error=error), "EXCEPTION")
def replace_filename(filename, dirname, name):
head, fileExtension = os.path.splitext(os.path.basename(filename))
head, file_extension = os.path.splitext(os.path.basename(filename))
if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
newname = os.path.basename(dirname).replace(' ', '.')
logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION")
@ -88,13 +88,13 @@ def replace_filename(filename, dirname, name):
else:
logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION")
newname = name
newfile = newname + fileExtension
newfilePath = os.path.join(dirname, newfile)
return newfilePath
newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile)
return newfile_path
def reverse_filename(filename, dirname, name):
head, fileExtension = os.path.splitext(os.path.basename(filename))
head, file_extension = os.path.splitext(os.path.basename(filename))
na_parts = season_pattern.search(head)
if na_parts is not None:
word_p = word_pattern.findall(na_parts.group(2))
@ -114,9 +114,9 @@ def reverse_filename(filename, dirname, name):
newname = newname.replace(' ', '.')
logger.debug("Reversing filename {old} to {new}".format
(old=head, new=newname), "EXCEPTION")
newfile = newname + fileExtension
newfilePath = os.path.join(dirname, newfile)
return newfilePath
newfile = newname + file_extension
newfile_path = os.path.join(dirname, newfile)
return newfile_path
def rename_script(dirname):

View file

@ -5,11 +5,11 @@ from subprocess import Popen
import core
from core import logger
from core.nzbToMediaUtil import import_subs, listMediaFiles, rmDir
from core.nzbToMediaUtil import import_subs, list_media_files, remove_dir
from core.transcoder import transcoder
def external_script(outputDestination, torrentName, torrentLabel, settings):
def external_script(output_destination, torrent_name, torrent_label, settings):
final_result = 0 # start at 0.
num_files = 0
try:
@ -40,20 +40,20 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
core.USER_SCRIPT_RUNONCE = int(settings.get("user_script_runOnce", 1))
if core.CHECK_MEDIA:
for video in listMediaFiles(outputDestination, media=True, audio=False, meta=False, archives=False):
if transcoder.isVideoGood(video, 0):
for video in list_media_files(output_destination, media=True, audio=False, meta=False, archives=False):
if transcoder.is_video_good(video, 0):
import_subs(video)
else:
logger.info("Corrupt video file found {0}. Deleting.".format(video), "USERSCRIPT")
os.unlink(video)
for dirpath, dirnames, filenames in os.walk(outputDestination):
for dirpath, dirnames, filenames in os.walk(output_destination):
for file in filenames:
filePath = core.os.path.join(dirpath, file)
fileName, fileExtension = os.path.splitext(file)
file_path = core.os.path.join(dirpath, file)
file_name, file_extension = os.path.splitext(file)
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS:
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or "all" in core.USER_SCRIPT_MEDIAEXTENSIONS:
num_files += 1
if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files.
continue
@ -63,17 +63,17 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
command.append('{0}'.format(file))
continue
elif param == "FP":
command.append('{0}'.format(filePath))
command.append('{0}'.format(file_path))
continue
elif param == "TN":
command.append('{0}'.format(torrentName))
command.append('{0}'.format(torrent_name))
continue
elif param == "TL":
command.append('{0}'.format(torrentLabel))
command.append('{0}'.format(torrent_label))
continue
elif param == "DN":
if core.USER_SCRIPT_RUNONCE == 1:
command.append('{0}'.format(outputDestination))
command.append('{0}'.format(output_destination))
else:
command.append('{0}'.format(dirpath))
continue
@ -83,7 +83,7 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
cmd = ""
for item in command:
cmd = "{cmd} {item}".format(cmd=cmd, item=item)
logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=filePath), "USERSCRIPT")
logger.info("Running script {cmd} on file {path}.".format(cmd=cmd, path=file_path), "USERSCRIPT")
try:
p = Popen(command)
res = p.wait()
@ -102,16 +102,16 @@ def external_script(outputDestination, torrentName, torrentLabel, settings):
final_result += result
num_files_new = 0
for dirpath, dirnames, filenames in os.walk(outputDestination):
for dirpath, dirnames, filenames in os.walk(output_destination):
for file in filenames:
fileName, fileExtension = os.path.splitext(file)
file_name, file_extension = os.path.splitext(file)
if fileExtension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL":
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == "ALL":
num_files_new += 1
if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0:
logger.info("All files have been processed. Cleaning outputDirectory {0}".format(outputDestination))
rmDir(outputDestination)
logger.info("All files have been processed. Cleaning outputDirectory {0}".format(output_destination))
remove_dir(output_destination)
elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0:
logger.info("{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.".format(
num_files, num_files_new))

File diff suppressed because it is too large Load diff

View file

@ -13,24 +13,24 @@ from six import iteritems, text_type, string_types
import core
from core import logger
from core.nzbToMediaUtil import makeDir
from core.nzbToMediaUtil import make_dir
def isVideoGood(videofile, status):
fileNameExt = os.path.basename(videofile)
fileName, fileExt = os.path.splitext(fileNameExt)
def is_video_good(videofile, status):
file_name_ext = os.path.basename(videofile)
file_name, file_ext = os.path.splitext(file_name_ext)
disable = False
if fileExt not in core.MEDIACONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or fileExt in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED):
if file_ext not in core.MEDIACONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or file_ext in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED):
disable = True
else:
test_details, res = getVideoDetails(core.TEST_FILE)
test_details, res = get_video_details(core.TEST_FILE)
if res != 0 or test_details.get("error"):
disable = True
logger.info("DISABLED: ffprobe failed to analyse test file. Stopping corruption check.", 'TRANSCODER')
if test_details.get("streams"):
vidStreams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"]
audStreams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"]
if not (len(vidStreams) > 0 and len(audStreams) > 0):
vid_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "video"]
aud_streams = [item for item in test_details["streams"] if "codec_type" in item and item["codec_type"] == "audio"]
if not (len(vid_streams) > 0 and len(aud_streams) > 0):
disable = True
logger.info("DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.",
'TRANSCODER')
@ -40,25 +40,25 @@ def isVideoGood(videofile, status):
else:
return True
logger.info('Checking [{0}] for corruption, please stand by ...'.format(fileNameExt), 'TRANSCODER')
video_details, result = getVideoDetails(videofile)
logger.info('Checking [{0}] for corruption, please stand by ...'.format(file_name_ext), 'TRANSCODER')
video_details, result = get_video_details(videofile)
if result != 0:
logger.error("FAILED: [{0}] is corrupted!".format(fileNameExt), 'TRANSCODER')
logger.error("FAILED: [{0}] is corrupted!".format(file_name_ext), 'TRANSCODER')
return False
if video_details.get("error"):
logger.info("FAILED: [{0}] returned error [{1}].".format(fileNameExt, video_details.get("error")), 'TRANSCODER')
logger.info("FAILED: [{0}] returned error [{1}].".format(file_name_ext, video_details.get("error")), 'TRANSCODER')
return False
if video_details.get("streams"):
videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
if len(videoStreams) > 0 and len(audioStreams) > 0:
logger.info("SUCCESS: [{0}] has no corruption.".format(fileNameExt), 'TRANSCODER')
video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
if len(video_streams) > 0 and len(audio_streams) > 0:
logger.info("SUCCESS: [{0}] has no corruption.".format(file_name_ext), 'TRANSCODER')
return True
else:
logger.info("FAILED: [{0}] has {1} video streams and {2} audio streams. "
"Assume corruption.".format
(fileNameExt, len(videoStreams), len(audioStreams)), 'TRANSCODER')
(file_name_ext, len(video_streams), len(audio_streams)), 'TRANSCODER')
return False
@ -72,7 +72,7 @@ def zip_out(file, img, bitbucket):
return procin
def getVideoDetails(videofile, img=None, bitbucket=None):
def get_video_details(videofile, img=None, bitbucket=None):
video_details = {}
result = 1
file = videofile
@ -116,31 +116,31 @@ def getVideoDetails(videofile, img=None, bitbucket=None):
return video_details, result
def buildCommands(file, newDir, movieName, bitbucket):
def build_commands(file, new_dir, movie_name, bitbucket):
if isinstance(file, string_types):
inputFile = file
input_file = file
if 'concat:' in file:
file = file.split('|')[0].replace('concat:', '')
video_details, result = getVideoDetails(file)
video_details, result = get_video_details(file)
dir, name = os.path.split(file)
name, ext = os.path.splitext(name)
check = re.match("VTS_([0-9][0-9])_[0-9]+", name)
if check and core.CONCAT:
name = movieName
name = movie_name
elif check:
name = ('{0}.cd{1}'.format(movieName, check.groups()[0]))
name = ('{0}.cd{1}'.format(movie_name, check.groups()[0]))
elif core.CONCAT and re.match("(.+)[cC][dD][0-9]", name):
name = re.sub("([\ \.\-\_\=\:]+[cC][dD][0-9])", "", name)
if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself.
if ext == core.VEXTENSION and new_dir == dir: # we need to change the name to prevent overwriting itself.
core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext'
else:
img, data = next(iteritems(file))
name = data['name']
video_details, result = getVideoDetails(data['files'][0], img, bitbucket)
inputFile = '-'
video_details, result = get_video_details(data['files'][0], img, bitbucket)
input_file = '-'
file = '-'
newfilePath = os.path.normpath(os.path.join(newDir, name) + core.VEXTENSION)
newfile_path = os.path.normpath(os.path.join(new_dir, name) + core.VEXTENSION)
map_cmd = []
video_cmd = []
@ -152,9 +152,9 @@ def buildCommands(file, newDir, movieName, bitbucket):
if not video_details or not video_details.get(
"streams"): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
videoStreams = []
audioStreams = []
subStreams = []
video_streams = []
audio_streams = []
sub_streams = []
map_cmd.extend(['-map', '0'])
if core.VCODEC:
@ -201,15 +201,15 @@ def buildCommands(file, newDir, movieName, bitbucket):
other_cmd.extend(['-movflags', '+faststart'])
else:
videoStreams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audioStreams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
subStreams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"]
video_streams = [item for item in video_details["streams"] if item["codec_type"] == "video"]
audio_streams = [item for item in video_details["streams"] if item["codec_type"] == "audio"]
sub_streams = [item for item in video_details["streams"] if item["codec_type"] == "subtitle"]
if core.VEXTENSION not in ['.mkv', '.mpegts']:
subStreams = [item for item in video_details["streams"] if
sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
"codec_name"] != "pgssub"]
for video in videoStreams:
for video in video_streams:
codec = video["codec_name"]
fr = video.get("avg_frame_rate", 0)
width = video.get("width", 0)
@ -257,24 +257,24 @@ def buildCommands(file, newDir, movieName, bitbucket):
used_audio = 0
a_mapped = []
commentary = []
if audioStreams:
for i, val in reversed(list(enumerate(audioStreams))):
if audio_streams:
for i, val in reversed(list(enumerate(audio_streams))):
try:
if "Commentary" in val.get("tags").get("title"): # Split out commentry tracks.
commentary.append(val)
del audioStreams[i]
del audio_streams[i]
except:
continue
try:
audio1 = [item for item in audioStreams if item["tags"]["language"] == core.ALANGUAGE]
audio1 = [item for item in audio_streams if item["tags"]["language"] == core.ALANGUAGE]
except: # no language tags. Assume only 1 language.
audio1 = audioStreams
audio1 = audio_streams
try:
audio2 = [item for item in audio1 if item["codec_name"] in core.ACODEC_ALLOW]
except:
audio2 = []
try:
audio3 = [item for item in audioStreams if item["tags"]["language"] != core.ALANGUAGE]
audio3 = [item for item in audio_streams if item["tags"]["language"] != core.ALANGUAGE]
except:
audio3 = []
try:
@ -384,8 +384,8 @@ def buildCommands(file, newDir, movieName, bitbucket):
audio_cmd.extend(audio_cmd2)
if core.AINCLUDE and core.ACODEC3:
audioStreams.extend(commentary) #add commentry tracks back here.
for audio in audioStreams:
audio_streams.extend(commentary) #add commentry tracks back here.
for audio in audio_streams:
if audio["index"] in a_mapped:
continue
used_audio += 1
@ -422,7 +422,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
n = 0
for lan in core.SLANGUAGES:
try:
subs1 = [item for item in subStreams if item["tags"]["language"] == lan]
subs1 = [item for item in sub_streams if item["tags"]["language"] == lan]
except:
subs1 = []
if core.BURN and not subs1 and not burnt and os.path.isfile(file):
@ -431,13 +431,13 @@ def buildCommands(file, newDir, movieName, bitbucket):
video_cmd.extend(['-vf', 'subtitles={subs}'.format(subs=subfile)])
burnt = 1
for sub in subs1:
if core.BURN and not burnt and os.path.isfile(inputFile):
if core.BURN and not burnt and os.path.isfile(input_file):
subloc = 0
for index in range(len(subStreams)):
if subStreams[index]["index"] == sub["index"]:
for index in range(len(sub_streams)):
if sub_streams[index]["index"] == sub["index"]:
subloc = index
break
video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=inputFile, loc=subloc)])
video_cmd.extend(['-vf', 'subtitles={sub}:si={loc}'.format(sub=input_file, loc=subloc)])
burnt = 1
if not core.ALLOWSUBS:
break
@ -447,7 +447,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
s_mapped.extend([sub["index"]])
if core.SINCLUDE:
for sub in subStreams:
for sub in sub_streams:
if not core.ALLOWSUBS:
break
if sub["index"] in s_mapped:
@ -467,11 +467,11 @@ def buildCommands(file, newDir, movieName, bitbucket):
if core.GENERALOPTS:
command.extend(core.GENERALOPTS)
command.extend(['-i', inputFile])
command.extend(['-i', input_file])
if core.SEMBED and os.path.isfile(file):
for subfile in get_subs(file):
sub_details, result = getVideoDetails(subfile)
sub_details, result = get_video_details(subfile)
if not sub_details or not sub_details.get("streams"):
continue
if core.SCODEC == "mov_text":
@ -509,7 +509,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
command.extend(sub_cmd)
command.extend(meta_cmd)
command.extend(other_cmd)
command.append(newfilePath)
command.append(newfile_path)
if platform.system() != 'Windows':
command = core.NICENESS + command
return command
@ -517,52 +517,52 @@ def buildCommands(file, newDir, movieName, bitbucket):
def get_subs(file):
filepaths = []
subExt = ['.srt', '.sub', '.idx']
sub_ext = ['.srt', '.sub', '.idx']
name = os.path.splitext(os.path.split(file)[1])[0]
dir = os.path.split(file)[0]
for dirname, dirs, filenames in os.walk(dir):
for filename in filenames:
filepaths.extend([os.path.join(dirname, filename)])
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in subExt and name in item]
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in sub_ext and name in item]
return subfiles
def extract_subs(file, newfilePath, bitbucket):
video_details, result = getVideoDetails(file)
def extract_subs(file, newfile_path, bitbucket):
video_details, result = get_video_details(file)
if not video_details:
return
if core.SUBSDIR:
subdir = core.SUBSDIR
else:
subdir = os.path.split(newfilePath)[0]
name = os.path.splitext(os.path.split(newfilePath)[1])[0]
subdir = os.path.split(newfile_path)[0]
name = os.path.splitext(os.path.split(newfile_path)[1])[0]
try:
subStreams = [item for item in video_details["streams"] if
sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["tags"]["language"] in core.SLANGUAGES and item[
"codec_name"] != "hdmv_pgs_subtitle" and item["codec_name"] != "pgssub"]
except:
subStreams = [item for item in video_details["streams"] if
sub_streams = [item for item in video_details["streams"] if
item["codec_type"] == "subtitle" and item["codec_name"] != "hdmv_pgs_subtitle" and item[
"codec_name"] != "pgssub"]
num = len(subStreams)
num = len(sub_streams)
for n in range(num):
sub = subStreams[n]
sub = sub_streams[n]
idx = sub["index"]
lan = sub.get("tags", {}).get("language", "unk")
if num == 1:
outputFile = os.path.join(subdir, "{0}.srt".format(name))
if os.path.isfile(outputFile):
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, n))
output_file = os.path.join(subdir, "{0}.srt".format(name))
if os.path.isfile(output_file):
output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, n))
else:
outputFile = os.path.join(subdir, "{0}.{1}.srt".format(name, lan))
if os.path.isfile(outputFile):
outputFile = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n))
output_file = os.path.join(subdir, "{0}.{1}.srt".format(name, lan))
if os.path.isfile(output_file):
output_file = os.path.join(subdir, "{0}.{1}.{2}.srt".format(name, lan, n))
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an',
'-codec:{index}'.format(index=idx), 'srt', outputFile]
'-codec:{index}'.format(index=idx), 'srt', output_file]
if platform.system() != 'Windows':
command = core.NICENESS + command
@ -578,7 +578,7 @@ def extract_subs(file, newfilePath, bitbucket):
if result == 0:
try:
shutil.copymode(file, outputFile)
shutil.copymode(file, output_file)
except:
pass
logger.info("Extracting {0} subtitle from {1} has succeeded".format(lan, file))
@ -586,77 +586,77 @@ def extract_subs(file, newfilePath, bitbucket):
logger.error("Extracting subtitles has failed")
def processList(List, newDir, bitbucket):
remList = []
newList = []
def process_list(it, new_dir, bitbucket):
rem_list = []
new_list = []
combine = []
vtsPath = None
vts_path = None
success = True
for item in List:
for item in it:
ext = os.path.splitext(item)[1].lower()
if ext in ['.iso', '.bin', '.img'] and ext not in core.IGNOREEXTENSIONS:
logger.debug("Attempting to rip disk image: {0}".format(item), "TRANSCODER")
newList.extend(ripISO(item, newDir, bitbucket))
remList.append(item)
new_list.extend(rip_iso(item, new_dir, bitbucket))
rem_list.append(item)
elif re.match(".+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", item) and '.vob' not in core.IGNOREEXTENSIONS:
logger.debug("Found VIDEO_TS image file: {0}".format(item), "TRANSCODER")
if not vtsPath:
if not vts_path:
try:
vtsPath = re.match("(.+VIDEO_TS)", item).groups()[0]
vts_path = re.match("(.+VIDEO_TS)", item).groups()[0]
except:
vtsPath = os.path.split(item)[0]
remList.append(item)
vts_path = os.path.split(item)[0]
rem_list.append(item)
elif re.match(".+VIDEO_TS.", item) or re.match(".+VTS_[0-9][0-9]_[0-9].", item):
remList.append(item)
rem_list.append(item)
elif core.CONCAT and re.match(".+[cC][dD][0-9].", item):
remList.append(item)
rem_list.append(item)
combine.append(item)
else:
continue
if vtsPath:
newList.extend(combineVTS(vtsPath))
if vts_path:
new_list.extend(combine_vts(vts_path))
if combine:
newList.extend(combineCD(combine))
for file in newList:
new_list.extend(combine_cd(combine))
for file in new_list:
if isinstance(file, string_types) and 'concat:' not in file and not os.path.isfile(file):
success = False
break
if success and newList:
List.extend(newList)
for item in remList:
List.remove(item)
logger.debug("Successfully extracted .vob file {0} from disk image".format(newList[0]), "TRANSCODER")
elif newList and not success:
newList = []
remList = []
if success and new_list:
it.extend(new_list)
for item in rem_list:
it.remove(item)
logger.debug("Successfully extracted .vob file {0} from disk image".format(new_list[0]), "TRANSCODER")
elif new_list and not success:
new_list = []
rem_list = []
logger.error("Failed extracting .vob files from disk image. Stopping transcoding.", "TRANSCODER")
return List, remList, newList, success
return it, rem_list, new_list, success
def ripISO(item, newDir, bitbucket):
newFiles = []
def rip_iso(item, new_dir, bitbucket):
new_files = []
failure_dir = 'failure'
# Mount the ISO in your OS and call combineVTS.
if not core.SEVENZIP:
logger.error("No 7zip installed. Can't extract image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir]
return newFiles
new_files = [failure_dir]
return new_files
cmd = [core.SEVENZIP, 'l', item]
try:
logger.debug("Attempting to extract .vob from image file {0}".format(item), "TRANSCODER")
print_cmd(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
out, err = proc.communicate()
fileList = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in
file_list = [re.match(".+(VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])", line).groups()[0] for line in
out.splitlines() if re.match(".+VIDEO_TS[\\\/]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]", line)]
combined = []
for n in range(99):
concat = []
m = 1
while True:
vtsName = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m)
if vtsName in fileList:
concat.append(vtsName)
vts_name = 'VIDEO_TS{0}VTS_{1:02d}_{2:d}.VOB'.format(os.sep, n + 1, m)
if vts_name in file_list:
concat.append(vts_name)
m += 1
else:
break
@ -668,29 +668,29 @@ def ripISO(item, newDir, bitbucket):
name = '{name}.cd{x}'.format(
name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1
)
newFiles.append({item: {'name': name, 'files': concat}})
new_files.append({item: {'name': name, 'files': concat}})
if core.CONCAT:
name = os.path.splitext(os.path.split(item)[1])[0]
newFiles.append({item: {'name': name, 'files': combined}})
if not newFiles:
new_files.append({item: {'name': name, 'files': combined}})
if not new_files:
logger.error("No VIDEO_TS folder found in image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir]
new_files = [failure_dir]
except:
logger.error("Failed to extract from image file {0}".format(item), "TRANSCODER")
newFiles = [failure_dir]
return newFiles
new_files = [failure_dir]
return new_files
def combineVTS(vtsPath):
newFiles = []
def combine_vts(vts_path):
new_files = []
combined = ''
for n in range(99):
concat = ''
m = 1
while True:
vtsName = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
if os.path.isfile(os.path.join(vtsPath, vtsName)):
concat += '{file}|'.format(file=os.path.join(vtsPath, vtsName))
vts_name = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
if os.path.isfile(os.path.join(vts_path, vts_name)):
concat += '{file}|'.format(file=os.path.join(vts_path, vts_name))
m += 1
else:
break
@ -699,14 +699,14 @@ def combineVTS(vtsPath):
if core.CONCAT:
combined += '{files}|'.format(files=concat)
continue
newFiles.append('concat:{0}'.format(concat[:-1]))
new_files.append('concat:{0}'.format(concat[:-1]))
if core.CONCAT:
newFiles.append('concat:{0}'.format(combined[:-1]))
return newFiles
new_files.append('concat:{0}'.format(combined[:-1]))
return new_files
def combineCD(combine):
newFiles = []
def combine_cd(combine):
new_files = []
for item in set([re.match("(.+)[cC][dD][0-9].", item).groups()[0] for item in combine]):
concat = ''
for n in range(99):
@ -717,8 +717,8 @@ def combineCD(combine):
else:
break
if concat:
newFiles.append('concat:{0}'.format(concat[:-1]))
return newFiles
new_files.append('concat:{0}'.format(concat[:-1]))
return new_files
def print_cmd(command):
@ -728,49 +728,49 @@ def print_cmd(command):
logger.debug("calling command:{0}".format(cmd))
def Transcode_directory(dirName):
def transcode_directory(dir_name):
if not core.FFMPEG:
return 1, dirName
return 1, dir_name
logger.info("Checking for files to be transcoded")
final_result = 0 # initialize as successful
if core.OUTPUTVIDEOPATH:
newDir = core.OUTPUTVIDEOPATH
makeDir(newDir)
name = os.path.splitext(os.path.split(dirName)[1])[0]
newDir = os.path.join(newDir, name)
makeDir(newDir)
new_dir = core.OUTPUTVIDEOPATH
make_dir(new_dir)
name = os.path.splitext(os.path.split(dir_name)[1])[0]
new_dir = os.path.join(new_dir, name)
make_dir(new_dir)
else:
newDir = dirName
new_dir = dir_name
if platform.system() == 'Windows':
bitbucket = open('NUL')
else:
bitbucket = open('/dev/null')
movieName = os.path.splitext(os.path.split(dirName)[1])[0]
List = core.listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False)
List, remList, newList, success = processList(List, newDir, bitbucket)
movie_name = os.path.splitext(os.path.split(dir_name)[1])[0]
file_list = core.list_media_files(dir_name, media=True, audio=False, meta=False, archives=False)
file_list, rem_list, new_list, success = process_list(file_list, new_dir, bitbucket)
if not success:
bitbucket.close()
return 1, dirName
return 1, dir_name
for file in List:
for file in file_list:
if isinstance(file, string_types) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS:
continue
command = buildCommands(file, newDir, movieName, bitbucket)
newfilePath = command[-1]
command = build_commands(file, new_dir, movie_name, bitbucket)
newfile_path = command[-1]
# transcoding files may remove the original file, so make sure to extract subtitles first
if core.SEXTRACT and isinstance(file, string_types):
extract_subs(file, newfilePath, bitbucket)
extract_subs(file, newfile_path, bitbucket)
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
os.remove(newfilePath)
os.remove(newfile_path)
except OSError as e:
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
logger.debug("Error when removing transcoding target: {0}".format(e))
except Exception as e:
logger.debug("Error when removing transcoding target: {0}".format(e))
logger.info("Transcoding video: {0}".format(newfilePath))
logger.info("Transcoding video: {0}".format(newfile_path))
print_cmd(command)
result = 1 # set result to failed in case call fails.
try:
@ -787,42 +787,42 @@ def Transcode_directory(dirName):
proc.communicate()
result = proc.returncode
except:
logger.error("Transcoding of video {0} has failed".format(newfilePath))
logger.error("Transcoding of video {0} has failed".format(newfile_path))
if core.SUBSDIR and result == 0 and isinstance(file, string_types):
for sub in get_subs(file):
name = os.path.splitext(os.path.split(file)[1])[0]
subname = os.path.split(sub)[1]
newname = os.path.splitext(os.path.split(newfilePath)[1])[0]
newname = os.path.splitext(os.path.split(newfile_path)[1])[0]
newpath = os.path.join(core.SUBSDIR, subname.replace(name, newname))
if not os.path.isfile(newpath):
os.rename(sub, newpath)
if result == 0:
try:
shutil.copymode(file, newfilePath)
shutil.copymode(file, newfile_path)
except:
pass
logger.info("Transcoding of video to {0} succeeded".format(newfilePath))
if os.path.isfile(newfilePath) and (file in newList or not core.DUPLICATE):
logger.info("Transcoding of video to {0} succeeded".format(newfile_path))
if os.path.isfile(newfile_path) and (file in new_list or not core.DUPLICATE):
try:
os.unlink(file)
except:
pass
else:
logger.error("Transcoding of video to {0} failed with result {1}".format(newfilePath, result))
logger.error("Transcoding of video to {0} failed with result {1}".format(newfile_path, result))
# this will be 0 (successful) it all are successful, else will return a positive integer for failure.
final_result = final_result + result
if final_result == 0 and not core.DUPLICATE:
for file in remList:
for file in rem_list:
try:
os.unlink(file)
except:
pass
if not os.listdir(text_type(newDir)): # this is an empty directory and we didn't transcode into it.
os.rmdir(newDir)
newDir = dirName
if not os.listdir(text_type(new_dir)): # this is an empty directory and we didn't transcode into it.
os.rmdir(new_dir)
new_dir = dir_name
if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB
newDir = dirName
new_dir = dir_name
bitbucket.close()
return final_result, newDir
return final_result, new_dir

View file

@ -629,13 +629,13 @@ import sys
import core
from core import logger, nzbToMediaDB
from core.autoProcess.autoProcessComics import autoProcessComics
from core.autoProcess.autoProcessGames import autoProcessGames
from core.autoProcess.autoProcessMovie import autoProcessMovie
from core.autoProcess.autoProcessMusic import autoProcessMusic
from core.autoProcess.autoProcessTV import autoProcessTV
from core.autoProcess.autoProcessComics import Comic
from core.autoProcess.autoProcessGames import Game
from core.autoProcess.autoProcessMovie import Movie
from core.autoProcess.autoProcessMusic import Music
from core.autoProcess.autoProcessTV import TV
from core.nzbToMediaUserScript import external_script
from core.nzbToMediaUtil import CharReplace, cleanDir, convert_to_ascii, extractFiles, getDirs, get_downloadInfo, get_nzoid, plex_update, update_downloadInfoStatus
from core.nzbToMediaUtil import char_replace, clean_dir, convert_to_ascii, extract_files, get_dirs, get_download_info, get_nzoid, plex_update, update_download_info_status
try:
text_type = unicode
@ -644,51 +644,51 @@ except NameError:
# post-processing
def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None, failureLink=None):
if core.SAFE_MODE and inputDirectory == core.NZB_DEFAULTDIR:
def process(input_directory, input_name=None, status=0, client_agent='manual', download_id=None, input_category=None, failure_link=None):
if core.SAFE_MODE and input_directory == core.NZB_DEFAULTDIR:
logger.error(
'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format(
inputDirectory))
input_directory))
return [-1, ""]
if not download_id and clientAgent == 'sabnzbd':
download_id = get_nzoid(inputName)
if not download_id and client_agent == 'sabnzbd':
download_id = get_nzoid(input_name)
if clientAgent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding NZB download info for directory {0} to database'.format(inputDirectory))
if client_agent != 'manual' and not core.DOWNLOADINFO:
logger.debug('Adding NZB download info for directory {0} to database'.format(input_directory))
myDB = nzbToMediaDB.DBConnection()
my_db = nzbToMediaDB.DBConnection()
inputDirectory1 = inputDirectory
inputName1 = inputName
input_directory1 = input_directory
input_name1 = input_name
try:
encoded, inputDirectory1 = CharReplace(inputDirectory)
encoded, inputName1 = CharReplace(inputName)
encoded, input_directory1 = char_replace(input_directory)
encoded, input_name1 = char_replace(input_name)
except:
pass
controlValueDict = {"input_directory": text_type(inputDirectory1)}
newValueDict = {"input_name": text_type(inputName1),
control_value_dict = {"input_directory": text_type(input_directory1)}
new_value_dict = {"input_name": text_type(input_name1),
"input_hash": text_type(download_id),
"input_id": text_type(download_id),
"client_agent": text_type(clientAgent),
"client_agent": text_type(client_agent),
"status": 0,
"last_update": datetime.date.today().toordinal()
}
myDB.upsert("downloads", newValueDict, controlValueDict)
my_db.upsert("downloads", new_value_dict, control_value_dict)
# auto-detect section
if inputCategory is None:
inputCategory = 'UNCAT'
usercat = inputCategory
section = core.CFG.findsection(inputCategory).isenabled()
if input_category is None:
input_category = 'UNCAT'
usercat = input_category
section = core.CFG.findsection(input_category).isenabled()
if section is None:
section = core.CFG.findsection("ALL").isenabled()
if section is None:
logger.error(
'Category:[{0}] is not defined or is not enabled. Please rename it or ensure it is enabled for the appropriate section in your autoProcessMedia.cfg and try again.'.format(
inputCategory))
input_category))
return [-1, ""]
else:
usercat = "ALL"
@ -696,65 +696,65 @@ def process(inputDirectory, inputName=None, status=0, clientAgent='manual', down
if len(section) > 1:
logger.error(
'Category:[{0}] is not unique, {1} are using it. Please rename it or disable all other sections using the same category name in your autoProcessMedia.cfg and try again.'.format(
inputCategory, section.keys()))
input_category, section.keys()))
return [-1, ""]
if section:
sectionName = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(sectionName))
section_name = section.keys()[0]
logger.info('Auto-detected SECTION:{0}'.format(section_name))
else:
logger.error("Unable to locate a section with subsection:{0} enabled in your autoProcessMedia.cfg, exiting!".format(
inputCategory))
input_category))
return [-1, ""]
cfg = dict(core.CFG[sectionName][usercat])
cfg = dict(core.CFG[section_name][usercat])
extract = int(cfg.get("extract", 0))
try:
if int(cfg.get("remote_path")) and not core.REMOTEPATHS:
logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format(
sectionName, inputCategory))
section_name, input_category))
return [-1, ""]
except:
logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format(
core.get("remote_path"), sectionName, inputCategory))
core.get("remote_path"), section_name, input_category))
inputName, inputDirectory = convert_to_ascii(inputName, inputDirectory)
input_name, input_directory = convert_to_ascii(input_name, input_directory)
if extract == 1:
logger.debug('Checking for archives to extract in directory: {0}'.format(inputDirectory))
extractFiles(inputDirectory)
logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
extract_files(input_directory)
logger.info("Calling {0}:{1} to post-process:{2}".format(sectionName, inputCategory, inputName))
logger.info("Calling {0}:{1} to post-process:{2}".format(section_name, input_category, input_name))
if sectionName in ["CouchPotato", "Radarr"]:
result = autoProcessMovie().process(sectionName, inputDirectory, inputName, status, clientAgent, download_id,
inputCategory, failureLink)
elif sectionName in ["SickBeard", "NzbDrone", "Sonarr"]:
result = autoProcessTV().processEpisode(sectionName, inputDirectory, inputName, status, clientAgent,
download_id, inputCategory, failureLink)
elif sectionName in ["HeadPhones", "Lidarr"]:
result = autoProcessMusic().process(sectionName, inputDirectory, inputName, status, clientAgent, inputCategory)
elif sectionName == "Mylar":
result = autoProcessComics().processEpisode(sectionName, inputDirectory, inputName, status, clientAgent,
inputCategory)
elif sectionName == "Gamez":
result = autoProcessGames().process(sectionName, inputDirectory, inputName, status, clientAgent, inputCategory)
elif sectionName == 'UserScript':
result = external_script(inputDirectory, inputName, inputCategory, section[usercat])
if section_name in ["CouchPotato", "Radarr"]:
result = Movie().process(section_name, input_directory, input_name, status, client_agent, download_id,
input_category, failure_link)
elif section_name in ["SickBeard", "NzbDrone", "Sonarr"]:
result = TV().process_episode(section_name, input_directory, input_name, status, client_agent,
download_id, input_category, failure_link)
elif section_name in ["HeadPhones", "Lidarr"]:
result = Music().process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == "Mylar":
result = Comic().process_episode(section_name, input_directory, input_name, status, client_agent,
input_category)
elif section_name == "Gamez":
result = Game().process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == 'UserScript':
result = external_script(input_directory, input_name, input_category, section[usercat])
else:
result = [-1, ""]
plex_update(inputCategory)
plex_update(input_category)
if result[0] == 0:
if clientAgent != 'manual':
if client_agent != 'manual':
# update download status in our DB
update_downloadInfoStatus(inputName, 1)
if sectionName not in ['UserScript', 'NzbDrone', 'Sonarr', 'Radarr', 'Lidarr']:
update_download_info_status(input_name, 1)
if section_name not in ['UserScript', 'NzbDrone', 'Sonarr', 'Radarr', 'Lidarr']:
# cleanup our processing folders of any misc unwanted files and empty directories
cleanDir(inputDirectory, sectionName, inputCategory)
clean_dir(input_directory, section_name, input_category)
return result
@ -816,7 +816,7 @@ def main(args, section=None):
# Check for download_id to pass to CouchPotato
download_id = ""
failureLink = None
failure_link = None
if 'NZBPR_COUCHPOTATO' in os.environ:
download_id = os.environ['NZBPR_COUCHPOTATO']
elif 'NZBPR_DRONE' in os.environ:
@ -828,13 +828,13 @@ def main(args, section=None):
elif 'NZBPR_LIDARR' in os.environ:
download_id = os.environ['NZBPR_LIDARR']
if 'NZBPR__DNZB_FAILURE' in os.environ:
failureLink = os.environ['NZBPR__DNZB_FAILURE']
failure_link = os.environ['NZBPR__DNZB_FAILURE']
# All checks done, now launching the script.
clientAgent = 'nzbget'
result = process(os.environ['NZBPP_DIRECTORY'], inputName=os.environ['NZBPP_NZBNAME'], status=status,
clientAgent=clientAgent, download_id=download_id, inputCategory=os.environ['NZBPP_CATEGORY'],
failureLink=failureLink)
client_agent = 'nzbget'
result = process(os.environ['NZBPP_DIRECTORY'], input_name=os.environ['NZBPP_NZBNAME'], status=status,
client_agent=client_agent, download_id=download_id, input_category=os.environ['NZBPP_CATEGORY'],
failure_link=failure_link)
# SABnzbd Pre 0.7.17
elif len(args) == core.SABNZB_NO_OF_ARGUMENTS:
# SABnzbd argv:
@ -845,9 +845,9 @@ def main(args, section=None):
# 5 User-defined category
# 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
clientAgent = 'sabnzbd'
client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd")
result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent,
result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent,
download_id='')
# SABnzbd 0.7.17+
elif len(args) >= core.SABNZB_0717_NO_OF_ARGUMENTS:
@ -860,14 +860,14 @@ def main(args, section=None):
# 6 Group that the NZB was posted in e.g. alt.binaries.x
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
# 8 Failure URL
clientAgent = 'sabnzbd'
client_agent = 'sabnzbd'
logger.info("Script triggered from SABnzbd 0.7.17+")
result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent,
download_id='', failureLink=''.join(args[8:]))
result = process(args[1], input_name=args[2], status=args[7], input_category=args[5], client_agent=client_agent,
download_id='', failure_link=''.join(args[8:]))
# Generic program
elif len(args) > 5 and args[5] == 'generic':
logger.info("Script triggered from generic program")
result = process(args[1], inputName=args[2], inputCategory=args[3], download_id=args[4])
result = process(args[1], input_name=args[2], input_category=args[3], download_id=args[4])
else:
# Perform Manual Post-Processing
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
@ -876,39 +876,39 @@ def main(args, section=None):
for subsection in subsections:
if not core.CFG[section][subsection].isenabled():
continue
for dirName in getDirs(section, subsection, link='move'):
logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dirName))
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dirName)))
for dir_name in get_dirs(section, subsection, link='move'):
logger.info("Starting manual run for {0}:{1} - Folder: {2}".format(section, subsection, dir_name))
logger.info("Checking database for download info for {0} ...".format(os.path.basename(dir_name)))
core.DOWNLOADINFO = get_downloadInfo(os.path.basename(dirName), 0)
core.DOWNLOADINFO = get_download_info(os.path.basename(dir_name), 0)
if core.DOWNLOADINFO:
logger.info("Found download info for {0}, "
"setting variables now ...".format
(os.path.basename(dirName)))
clientAgent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
(os.path.basename(dir_name)))
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
download_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
else:
logger.info('Unable to locate download info for {0}, '
'continuing to try and process this release ...'.format
(os.path.basename(dirName)))
clientAgent = 'manual'
(os.path.basename(dir_name)))
client_agent = 'manual'
download_id = ''
if clientAgent and clientAgent.lower() not in core.NZB_CLIENTS:
if client_agent and client_agent.lower() not in core.NZB_CLIENTS:
continue
try:
dirName = dirName.encode(core.SYS_ENCODING)
dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
inputName = os.path.basename(dirName)
input_name = os.path.basename(dir_name)
try:
inputName = inputName.encode(core.SYS_ENCODING)
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
results = process(dirName, inputName, 0, clientAgent=clientAgent,
download_id=download_id or None, inputCategory=subsection)
results = process(dir_name, input_name, 0, client_agent=client_agent,
download_id=download_id or None, input_category=subsection)
if results[0] != 0:
logger.error("A problem was reported when trying to perform a manual run for {0}:{1}.".format
(section, subsection))

View file

@ -5,7 +5,7 @@ import guessit
import requests
import core
from core.nzbToMediaAutoFork import autoFork
from core.nzbToMediaAutoFork import auto_fork
from core.nzbToMediaUtil import server_responding
from core.transcoder import transcoder
@ -15,7 +15,7 @@ core.initialize()
#label = core.TORRENT_CLASS.core.get_torrent_status("f33a9c4b15cbd9170722d700069af86746817ade", ["label"]).get()['label']
#print label
if transcoder.isVideoGood(core.TEST_FILE, 0):
if transcoder.is_video_good(core.TEST_FILE, 0):
print("FFPROBE Works")
else:
print("FFPROBE FAILED")
@ -25,7 +25,7 @@ print(test)
section = core.CFG.findsection('tv').isenabled()
print(section)
print(len(section))
fork, fork_params = autoFork('SickBeard', 'tv')
fork, fork_params = auto_fork('SickBeard', 'tv')
if server_responding("http://127.0.0.1:5050"):
print("CouchPotato Running")