mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 05:43:16 -07:00
Merge branch 'dev'
This commit is contained in:
commit
cedd0c1a20
24 changed files with 767 additions and 259 deletions
|
@ -1,5 +1,5 @@
|
||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 12.0.7
|
current_version = 12.0.8
|
||||||
commit = True
|
commit = True
|
||||||
tag = False
|
tag = False
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
nzbToMedia v12.0.7
|
nzbToMedia v12.0.8
|
||||||
==================
|
==================
|
||||||
|
|
||||||
Provides an [efficient](https://github.com/clinton-hall/nzbToMedia/wiki/Efficient-on-demand-post-processing) way to handle postprocessing for [CouchPotatoServer](https://couchpota.to/ "CouchPotatoServer") and [SickBeard](http://sickbeard.com/ "SickBeard") (and its [forks](https://github.com/clinton-hall/nzbToMedia/wiki/Failed-Download-Handling-%28FDH%29#sick-beard-and-its-forks))
|
Provides an [efficient](https://github.com/clinton-hall/nzbToMedia/wiki/Efficient-on-demand-post-processing) way to handle postprocessing for [CouchPotatoServer](https://couchpota.to/ "CouchPotatoServer") and [SickBeard](http://sickbeard.com/ "SickBeard") (and its [forks](https://github.com/clinton-hall/nzbToMedia/wiki/Failed-Download-Handling-%28FDH%29#sick-beard-and-its-forks))
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
|
|
||||||
|
import eol
|
||||||
|
eol.check()
|
||||||
|
|
||||||
import cleanup
|
import cleanup
|
||||||
cleanup.clean(cleanup.FOLDER_STRUCTURE)
|
cleanup.clean(cleanup.FOLDER_STRUCTURE)
|
||||||
|
|
||||||
|
@ -22,7 +25,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
||||||
root = 0
|
root = 0
|
||||||
found_file = 0
|
found_file = 0
|
||||||
|
|
||||||
if client_agent != 'manual' and not core.DOWNLOADINFO:
|
if client_agent != 'manual' and not core.DOWNLOAD_INFO:
|
||||||
logger.debug('Adding TORRENT download info for directory {0} to database'.format(input_directory))
|
logger.debug('Adding TORRENT download info for directory {0} to database'.format(input_directory))
|
||||||
|
|
||||||
my_db = main_db.DBConnection()
|
my_db = main_db.DBConnection()
|
||||||
|
@ -114,13 +117,13 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
||||||
basename = os.path.basename(input_directory)
|
basename = os.path.basename(input_directory)
|
||||||
basename = core.sanitize_name(input_name) \
|
basename = core.sanitize_name(input_name) \
|
||||||
if input_name == basename else os.path.splitext(core.sanitize_name(input_name))[0]
|
if input_name == basename else os.path.splitext(core.sanitize_name(input_name))[0]
|
||||||
output_destination = os.path.join(core.OUTPUTDIRECTORY, input_category, basename)
|
output_destination = os.path.join(core.OUTPUT_DIRECTORY, input_category, basename)
|
||||||
elif unique_path:
|
elif unique_path:
|
||||||
output_destination = os.path.normpath(
|
output_destination = os.path.normpath(
|
||||||
core.os.path.join(core.OUTPUTDIRECTORY, input_category, core.sanitize_name(input_name).replace(' ', '.')))
|
core.os.path.join(core.OUTPUT_DIRECTORY, input_category, core.sanitize_name(input_name).replace(' ', '.')))
|
||||||
else:
|
else:
|
||||||
output_destination = os.path.normpath(
|
output_destination = os.path.normpath(
|
||||||
core.os.path.join(core.OUTPUTDIRECTORY, input_category))
|
core.os.path.join(core.OUTPUT_DIRECTORY, input_category))
|
||||||
try:
|
try:
|
||||||
output_destination = output_destination.encode(core.SYS_ENCODING)
|
output_destination = output_destination.encode(core.SYS_ENCODING)
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
|
@ -131,7 +134,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
||||||
|
|
||||||
logger.info('Output directory set to: {0}'.format(output_destination))
|
logger.info('Output directory set to: {0}'.format(output_destination))
|
||||||
|
|
||||||
if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULTDIR:
|
if core.SAFE_MODE and output_destination == core.TORRENT_DEFAULT_DIRECTORY:
|
||||||
logger.error('The output directory:[{0}] is the Download Directory. '
|
logger.error('The output directory:[{0}] is the Download Directory. '
|
||||||
'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format
|
'Edit outputDirectory in autoProcessMedia.cfg. Exiting'.format
|
||||||
(input_directory))
|
(input_directory))
|
||||||
|
@ -196,7 +199,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
||||||
|
|
||||||
if torrent_no_link == 0:
|
if torrent_no_link == 0:
|
||||||
try:
|
try:
|
||||||
core.copy_link(inputFile, target_file, core.USELINK)
|
core.copy_link(inputFile, target_file, core.USE_LINK)
|
||||||
core.remove_read_only(target_file)
|
core.remove_read_only(target_file)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed to link: {0} to {1}'.format(inputFile, target_file))
|
logger.error('Failed to link: {0} to {1}'.format(inputFile, target_file))
|
||||||
|
@ -270,7 +273,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
||||||
core.update_download_info_status(input_name, 1)
|
core.update_download_info_status(input_name, 1)
|
||||||
|
|
||||||
# remove torrent
|
# remove torrent
|
||||||
if core.USELINK == 'move-sym' and not core.DELETE_ORIGINAL == 1:
|
if core.USE_LINK == 'move-sym' and not core.DELETE_ORIGINAL == 1:
|
||||||
logger.debug('Checking for sym-links to re-direct in: {0}'.format(input_directory))
|
logger.debug('Checking for sym-links to re-direct in: {0}'.format(input_directory))
|
||||||
for dirpath, dirs, files in os.walk(input_directory):
|
for dirpath, dirs, files in os.walk(input_directory):
|
||||||
for file in files:
|
for file in files:
|
||||||
|
@ -291,7 +294,7 @@ def main(args):
|
||||||
core.initialize()
|
core.initialize()
|
||||||
|
|
||||||
# clientAgent for Torrents
|
# clientAgent for Torrents
|
||||||
client_agent = core.TORRENT_CLIENTAGENT
|
client_agent = core.TORRENT_CLIENT_AGENT
|
||||||
|
|
||||||
logger.info('#########################################################')
|
logger.info('#########################################################')
|
||||||
logger.info('## ..::[{0}]::.. ##'.format(os.path.basename(__file__)))
|
logger.info('## ..::[{0}]::.. ##'.format(os.path.basename(__file__)))
|
||||||
|
@ -328,11 +331,11 @@ def main(args):
|
||||||
|
|
||||||
logger.info('Checking database for download info for {0} ...'.format
|
logger.info('Checking database for download info for {0} ...'.format
|
||||||
(os.path.basename(dir_name)))
|
(os.path.basename(dir_name)))
|
||||||
core.DOWNLOADINFO = core.get_download_info(os.path.basename(dir_name), 0)
|
core.DOWNLOAD_INFO = core.get_download_info(os.path.basename(dir_name), 0)
|
||||||
if core.DOWNLOADINFO:
|
if core.DOWNLOAD_INFO:
|
||||||
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
|
client_agent = text_type(core.DOWNLOAD_INFO[0].get('client_agent', 'manual'))
|
||||||
input_hash = text_type(core.DOWNLOADINFO[0].get('input_hash', ''))
|
input_hash = text_type(core.DOWNLOAD_INFO[0].get('input_hash', ''))
|
||||||
input_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
|
input_id = text_type(core.DOWNLOAD_INFO[0].get('input_id', ''))
|
||||||
logger.info('Found download info for {0}, '
|
logger.info('Found download info for {0}, '
|
||||||
'setting variables now ...'.format(os.path.basename(dir_name)))
|
'setting variables now ...'.format(os.path.basename(dir_name)))
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -22,6 +22,8 @@
|
||||||
log_env = 0
|
log_env = 0
|
||||||
# Enable/Disable logging git output to debug nzbtomedia.log (helpful to track down update failures.)
|
# Enable/Disable logging git output to debug nzbtomedia.log (helpful to track down update failures.)
|
||||||
log_git = 0
|
log_git = 0
|
||||||
|
# Set to the directory to search for executables if not in default system path
|
||||||
|
sys_path =
|
||||||
# Set to the directory where your ffmpeg/ffprobe executables are located
|
# Set to the directory where your ffmpeg/ffprobe executables are located
|
||||||
ffmpeg_path =
|
ffmpeg_path =
|
||||||
# Enable/Disable media file checking using ffprobe.
|
# Enable/Disable media file checking using ffprobe.
|
||||||
|
|
|
@ -1,5 +1,18 @@
|
||||||
Change_LOG / History
|
Change_LOG / History
|
||||||
|
|
||||||
|
V12.0.8
|
||||||
|
|
||||||
|
Refactor and Rename Modules
|
||||||
|
Add Medusa API
|
||||||
|
Fix return parsing from HeadPhones
|
||||||
|
Add Python end of life detection and reporting
|
||||||
|
Fix Py3 return from Popen (Transcoder and executable path detection)
|
||||||
|
Add variable sys_path to config (allows user to specify separate path for binary detection)
|
||||||
|
Various Py3 compatability fixes
|
||||||
|
Log successful when returning to Radarr CDH
|
||||||
|
Add exception handling when failing to return to original directory (due to permissions)
|
||||||
|
Don't load Torrent Clients when calling NZB processing
|
||||||
|
|
||||||
V12.0.7
|
V12.0.7
|
||||||
|
|
||||||
Refactor utils
|
Refactor utils
|
||||||
|
|
12
cleanup.py
12
cleanup.py
|
@ -33,7 +33,17 @@ class WorkingDirectory(object):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
os.chdir(self.original_directory)
|
try:
|
||||||
|
os.chdir(self.original_directory)
|
||||||
|
except OSError as error:
|
||||||
|
print(
|
||||||
|
'Unable to return to {original_directory}: {error}\n'
|
||||||
|
'Continuing in {working_directory}'.format(
|
||||||
|
original_directory=self.original_directory,
|
||||||
|
error=error,
|
||||||
|
working_directory=self.working_directory,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def module_path(module=__file__, parent=False):
|
def module_path(module=__file__, parent=False):
|
||||||
|
|
638
core/__init__.py
638
core/__init__.py
|
@ -13,6 +13,7 @@ import time
|
||||||
|
|
||||||
import libs.autoload
|
import libs.autoload
|
||||||
import libs.util
|
import libs.util
|
||||||
|
import eol
|
||||||
|
|
||||||
if not libs.autoload.completed:
|
if not libs.autoload.completed:
|
||||||
sys.exit('Could not load vendored libraries.')
|
sys.exit('Could not load vendored libraries.')
|
||||||
|
@ -70,7 +71,7 @@ from core.utils import (
|
||||||
wake_up,
|
wake_up,
|
||||||
)
|
)
|
||||||
|
|
||||||
__version__ = '12.0.7'
|
__version__ = '12.0.8'
|
||||||
|
|
||||||
# Client Agents
|
# Client Agents
|
||||||
NZB_CLIENTS = ['sabnzbd', 'nzbget', 'manual']
|
NZB_CLIENTS = ['sabnzbd', 'nzbget', 'manual']
|
||||||
|
@ -88,6 +89,7 @@ FORK_SICKRAGE = 'SickRage'
|
||||||
FORK_SICKCHILL = 'SickChill'
|
FORK_SICKCHILL = 'SickChill'
|
||||||
FORK_SICKBEARD_API = 'SickBeard-api'
|
FORK_SICKBEARD_API = 'SickBeard-api'
|
||||||
FORK_MEDUSA = 'Medusa'
|
FORK_MEDUSA = 'Medusa'
|
||||||
|
FORK_MEDUSA_API = 'Medusa-api'
|
||||||
FORK_SICKGEAR = 'SickGear'
|
FORK_SICKGEAR = 'SickGear'
|
||||||
FORK_STHENO = 'Stheno'
|
FORK_STHENO = 'Stheno'
|
||||||
|
|
||||||
|
@ -99,13 +101,14 @@ FORKS = {
|
||||||
FORK_SICKCHILL: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None},
|
FORK_SICKCHILL: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None},
|
||||||
FORK_SICKBEARD_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None},
|
FORK_SICKBEARD_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None},
|
||||||
FORK_MEDUSA: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None},
|
FORK_MEDUSA: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None},
|
||||||
|
FORK_MEDUSA_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete_files': None, 'is_priority': None},
|
||||||
FORK_SICKGEAR: {'dir': None, 'failed': None, 'process_method': None, 'force': None},
|
FORK_SICKGEAR: {'dir': None, 'failed': None, 'process_method': None, 'force': None},
|
||||||
FORK_STHENO: {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "ignore_subs": None}
|
FORK_STHENO: {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "ignore_subs": None}
|
||||||
}
|
}
|
||||||
ALL_FORKS = {k: None for k in set(list(itertools.chain.from_iterable([FORKS[x].keys() for x in FORKS.keys()])))}
|
ALL_FORKS = {k: None for k in set(list(itertools.chain.from_iterable([FORKS[x].keys() for x in FORKS.keys()])))}
|
||||||
|
|
||||||
# NZBGet Exit Codes
|
# NZBGet Exit Codes
|
||||||
NZBGET_POSTPROCESS_PARCHECK = 92
|
NZBGET_POSTPROCESS_PAR_CHECK = 92
|
||||||
NZBGET_POSTPROCESS_SUCCESS = 93
|
NZBGET_POSTPROCESS_SUCCESS = 93
|
||||||
NZBGET_POSTPROCESS_ERROR = 94
|
NZBGET_POSTPROCESS_ERROR = 94
|
||||||
NZBGET_POSTPROCESS_NONE = 95
|
NZBGET_POSTPROCESS_NONE = 95
|
||||||
|
@ -131,55 +134,55 @@ FORCE_CLEAN = None
|
||||||
SAFE_MODE = None
|
SAFE_MODE = None
|
||||||
NOEXTRACTFAILED = None
|
NOEXTRACTFAILED = None
|
||||||
|
|
||||||
NZB_CLIENTAGENT = None
|
NZB_CLIENT_AGENT = None
|
||||||
SABNZBDHOST = None
|
SABNZBD_HOST = None
|
||||||
SABNZBDPORT = None
|
SABNZBD_PORT = None
|
||||||
SABNZBDAPIKEY = None
|
SABNZBD_APIKEY = None
|
||||||
NZB_DEFAULTDIR = None
|
NZB_DEFAULT_DIRECTORY = None
|
||||||
|
|
||||||
TORRENT_CLIENTAGENT = None
|
TORRENT_CLIENT_AGENT = None
|
||||||
TORRENT_CLASS = None
|
TORRENT_CLASS = None
|
||||||
USELINK = None
|
USE_LINK = None
|
||||||
OUTPUTDIRECTORY = None
|
OUTPUT_DIRECTORY = None
|
||||||
NOFLATTEN = []
|
NOFLATTEN = []
|
||||||
DELETE_ORIGINAL = None
|
DELETE_ORIGINAL = None
|
||||||
TORRENT_CHMOD_DIRECTORY = None
|
TORRENT_CHMOD_DIRECTORY = None
|
||||||
TORRENT_DEFAULTDIR = None
|
TORRENT_DEFAULT_DIRECTORY = None
|
||||||
TORRENT_RESUME = None
|
TORRENT_RESUME = None
|
||||||
TORRENT_RESUME_ON_FAILURE = None
|
TORRENT_RESUME_ON_FAILURE = None
|
||||||
|
|
||||||
REMOTEPATHS = []
|
REMOTE_PATHS = []
|
||||||
|
|
||||||
UTORRENTWEBUI = None
|
UTORRENT_WEB_UI = None
|
||||||
UTORRENTUSR = None
|
UTORRENT_USER = None
|
||||||
UTORRENTPWD = None
|
UTORRENT_PASSWORD = None
|
||||||
|
|
||||||
TRANSMISSIONHOST = None
|
TRANSMISSION_HOST = None
|
||||||
TRANSMISSIONPORT = None
|
TRANSMISSION_PORT = None
|
||||||
TRANSMISSIONUSR = None
|
TRANSMISSION_USER = None
|
||||||
TRANSMISSIONPWD = None
|
TRANSMISSION_PASSWORD = None
|
||||||
|
|
||||||
DELUGEHOST = None
|
DELUGE_HOST = None
|
||||||
DELUGEPORT = None
|
DELUGE_PORT = None
|
||||||
DELUGEUSR = None
|
DELUGE_USER = None
|
||||||
DELUGEPWD = None
|
DELUGE_PASSWORD = None
|
||||||
|
|
||||||
QBITTORRENTHOST = None
|
QBITTORRENT_HOST = None
|
||||||
QBITTORRENTPORT = None
|
QBITTORRENT_PORT = None
|
||||||
QBITTORRENTUSR = None
|
QBITTORRENT_USER = None
|
||||||
QBITTORRENTPWD = None
|
QBITTORRENT_PASSWORD = None
|
||||||
|
|
||||||
PLEXSSL = None
|
PLEX_SSL = None
|
||||||
PLEXHOST = None
|
PLEX_HOST = None
|
||||||
PLEXPORT = None
|
PLEX_PORT = None
|
||||||
PLEXTOKEN = None
|
PLEX_TOKEN = None
|
||||||
PLEXSEC = []
|
PLEX_SECTION = []
|
||||||
|
|
||||||
EXTCONTAINER = []
|
EXT_CONTAINER = []
|
||||||
COMPRESSEDCONTAINER = []
|
COMPRESSED_CONTAINER = []
|
||||||
MEDIACONTAINER = []
|
MEDIA_CONTAINER = []
|
||||||
AUDIOCONTAINER = []
|
AUDIO_CONTAINER = []
|
||||||
METACONTAINER = []
|
META_CONTAINER = []
|
||||||
|
|
||||||
SECTIONS = []
|
SECTIONS = []
|
||||||
CATEGORIES = []
|
CATEGORIES = []
|
||||||
|
@ -188,6 +191,7 @@ GETSUBS = False
|
||||||
TRANSCODE = None
|
TRANSCODE = None
|
||||||
CONCAT = None
|
CONCAT = None
|
||||||
FFMPEG_PATH = None
|
FFMPEG_PATH = None
|
||||||
|
SYS_PATH = None
|
||||||
DUPLICATE = None
|
DUPLICATE = None
|
||||||
IGNOREEXTENSIONS = []
|
IGNOREEXTENSIONS = []
|
||||||
VEXTENSION = None
|
VEXTENSION = None
|
||||||
|
@ -236,8 +240,8 @@ CHECK_MEDIA = None
|
||||||
NICENESS = []
|
NICENESS = []
|
||||||
HWACCEL = False
|
HWACCEL = False
|
||||||
|
|
||||||
PASSWORDSFILE = None
|
PASSWORDS_FILE = None
|
||||||
DOWNLOADINFO = None
|
DOWNLOAD_INFO = None
|
||||||
GROUPS = None
|
GROUPS = None
|
||||||
|
|
||||||
USER_SCRIPT_MEDIAEXTENSIONS = None
|
USER_SCRIPT_MEDIAEXTENSIONS = None
|
||||||
|
@ -251,28 +255,9 @@ USER_SCRIPT_RUNONCE = None
|
||||||
__INITIALIZED__ = False
|
__INITIALIZED__ = False
|
||||||
|
|
||||||
|
|
||||||
def initialize(section=None):
|
def configure_logging():
|
||||||
global NZBGET_POSTPROCESS_ERROR, NZBGET_POSTPROCESS_NONE, NZBGET_POSTPROCESS_PARCHECK, NZBGET_POSTPROCESS_SUCCESS, \
|
global LOG_FILE
|
||||||
NZBTOMEDIA_TIMEOUT, FORKS, FORK_DEFAULT, FORK_FAILED_TORRENT, FORK_FAILED, NOEXTRACTFAILED, SHOWEXTRACT, \
|
global LOG_DIR
|
||||||
NZBTOMEDIA_BRANCH, NZBTOMEDIA_VERSION, NEWEST_VERSION, NEWEST_VERSION_STRING, VERSION_NOTIFY, SYS_ARGV, CFG, \
|
|
||||||
SABNZB_NO_OF_ARGUMENTS, SABNZB_0717_NO_OF_ARGUMENTS, CATEGORIES, TORRENT_CLIENTAGENT, USELINK, OUTPUTDIRECTORY, \
|
|
||||||
NOFLATTEN, UTORRENTPWD, UTORRENTUSR, UTORRENTWEBUI, DELUGEHOST, DELUGEPORT, DELUGEUSR, DELUGEPWD, VLEVEL, \
|
|
||||||
TRANSMISSIONHOST, TRANSMISSIONPORT, TRANSMISSIONPWD, TRANSMISSIONUSR, COMPRESSEDCONTAINER, MEDIACONTAINER, \
|
|
||||||
METACONTAINER, SECTIONS, ALL_FORKS, TEST_FILE, GENERALOPTS, LOG_GIT, GROUPS, SEVENZIP, CONCAT, VCRF, \
|
|
||||||
__INITIALIZED__, AUTO_UPDATE, APP_FILENAME, USER_DELAY, APP_NAME, TRANSCODE, DEFAULTS, GIT_PATH, GIT_USER, \
|
|
||||||
GIT_BRANCH, GIT_REPO, SYS_ENCODING, NZB_CLIENTAGENT, SABNZBDHOST, SABNZBDPORT, SABNZBDAPIKEY, \
|
|
||||||
DUPLICATE, IGNOREEXTENSIONS, VEXTENSION, OUTPUTVIDEOPATH, PROCESSOUTPUT, VCODEC, VCODEC_ALLOW, VPRESET, \
|
|
||||||
VFRAMERATE, LOG_DB, VBITRATE, VRESOLUTION, ALANGUAGE, AINCLUDE, ACODEC, ACODEC_ALLOW, ABITRATE, FAILED, \
|
|
||||||
ACODEC2, ACODEC2_ALLOW, ABITRATE2, ACODEC3, ACODEC3_ALLOW, ABITRATE3, ALLOWSUBS, SEXTRACT, SEMBED, SLANGUAGES, \
|
|
||||||
SINCLUDE, SUBSDIR, SCODEC, OUTPUTFASTSTART, OUTPUTQUALITYPERCENT, BURN, GETSUBS, HWACCEL, LOG_DIR, LOG_FILE, \
|
|
||||||
NICENESS, LOG_DEBUG, FORCE_CLEAN, FFMPEG_PATH, FFMPEG, FFPROBE, AUDIOCONTAINER, EXTCONTAINER, TORRENT_CLASS, \
|
|
||||||
DELETE_ORIGINAL, TORRENT_CHMOD_DIRECTORY, PASSWORDSFILE, USER_DELAY, USER_SCRIPT, USER_SCRIPT_CLEAN, USER_SCRIPT_MEDIAEXTENSIONS, \
|
|
||||||
USER_SCRIPT_PARAM, USER_SCRIPT_RUNONCE, USER_SCRIPT_SUCCESSCODES, DOWNLOADINFO, CHECK_MEDIA, SAFE_MODE, \
|
|
||||||
TORRENT_DEFAULTDIR, TORRENT_RESUME_ON_FAILURE, NZB_DEFAULTDIR, REMOTEPATHS, LOG_ENV, PID_FILE, MYAPP, ACHANNELS, ACHANNELS2, ACHANNELS3, \
|
|
||||||
PLEXSSL, PLEXHOST, PLEXPORT, PLEXTOKEN, PLEXSEC, TORRENT_RESUME, PAR2CMD, QBITTORRENTHOST, QBITTORRENTPORT, QBITTORRENTUSR, QBITTORRENTPWD
|
|
||||||
|
|
||||||
if __INITIALIZED__:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if 'NTM_LOGFILE' in os.environ:
|
if 'NTM_LOGFILE' in os.environ:
|
||||||
LOG_FILE = os.environ['NTM_LOGFILE']
|
LOG_FILE = os.environ['NTM_LOGFILE']
|
||||||
|
@ -281,11 +266,19 @@ def initialize(section=None):
|
||||||
if not make_dir(LOG_DIR):
|
if not make_dir(LOG_DIR):
|
||||||
print('No log folder, logging to screen only')
|
print('No log folder, logging to screen only')
|
||||||
|
|
||||||
|
|
||||||
|
def configure_process():
|
||||||
|
global MYAPP
|
||||||
|
|
||||||
MYAPP = RunningProcess()
|
MYAPP = RunningProcess()
|
||||||
while MYAPP.alreadyrunning():
|
while MYAPP.alreadyrunning():
|
||||||
print('Waiting for existing session to end')
|
print('Waiting for existing session to end')
|
||||||
time.sleep(30)
|
time.sleep(30)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_locale():
|
||||||
|
global SYS_ENCODING
|
||||||
|
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_ALL, '')
|
locale.setlocale(locale.LC_ALL, '')
|
||||||
SYS_ENCODING = locale.getpreferredencoding()
|
SYS_ENCODING = locale.getpreferredencoding()
|
||||||
|
@ -313,8 +306,10 @@ def initialize(section=None):
|
||||||
else:
|
else:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# init logging
|
|
||||||
logger.ntm_log_instance.init_logging()
|
def configure_migration():
|
||||||
|
global CONFIG_FILE
|
||||||
|
global CFG
|
||||||
|
|
||||||
# run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
|
# run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
|
||||||
if not config.migrate():
|
if not config.migrate():
|
||||||
|
@ -332,9 +327,16 @@ def initialize(section=None):
|
||||||
logger.info('Loading config from [{0}]'.format(CONFIG_FILE))
|
logger.info('Loading config from [{0}]'.format(CONFIG_FILE))
|
||||||
CFG = config()
|
CFG = config()
|
||||||
|
|
||||||
|
|
||||||
|
def configure_logging_part_2():
|
||||||
|
global LOG_DB
|
||||||
|
global LOG_DEBUG
|
||||||
|
global LOG_ENV
|
||||||
|
global LOG_GIT
|
||||||
|
|
||||||
# Enable/Disable DEBUG Logging
|
# Enable/Disable DEBUG Logging
|
||||||
LOG_DEBUG = int(CFG['General']['log_debug'])
|
|
||||||
LOG_DB = int(CFG['General']['log_db'])
|
LOG_DB = int(CFG['General']['log_db'])
|
||||||
|
LOG_DEBUG = int(CFG['General']['log_debug'])
|
||||||
LOG_ENV = int(CFG['General']['log_env'])
|
LOG_ENV = int(CFG['General']['log_env'])
|
||||||
LOG_GIT = int(CFG['General']['log_git'])
|
LOG_GIT = int(CFG['General']['log_git'])
|
||||||
|
|
||||||
|
@ -342,146 +344,347 @@ def initialize(section=None):
|
||||||
for item in os.environ:
|
for item in os.environ:
|
||||||
logger.info('{0}: {1}'.format(item, os.environ[item]), 'ENVIRONMENT')
|
logger.info('{0}: {1}'.format(item, os.environ[item]), 'ENVIRONMENT')
|
||||||
|
|
||||||
# initialize the main SB database
|
|
||||||
main_db.upgrade_database(main_db.DBConnection(), databases.InitialSchema)
|
def configure_general():
|
||||||
|
global VERSION_NOTIFY
|
||||||
|
global GIT_REPO
|
||||||
|
global GIT_PATH
|
||||||
|
global GIT_USER
|
||||||
|
global GIT_BRANCH
|
||||||
|
global FORCE_CLEAN
|
||||||
|
global FFMPEG_PATH
|
||||||
|
global SYS_PATH
|
||||||
|
global CHECK_MEDIA
|
||||||
|
global SAFE_MODE
|
||||||
|
global NOEXTRACTFAILED
|
||||||
|
|
||||||
# Set Version and GIT variables
|
# Set Version and GIT variables
|
||||||
VERSION_NOTIFY = int(CFG['General']['version_notify'])
|
VERSION_NOTIFY = int(CFG['General']['version_notify'])
|
||||||
AUTO_UPDATE = int(CFG['General']['auto_update'])
|
|
||||||
GIT_REPO = 'nzbToMedia'
|
GIT_REPO = 'nzbToMedia'
|
||||||
GIT_PATH = CFG['General']['git_path']
|
GIT_PATH = CFG['General']['git_path']
|
||||||
GIT_USER = CFG['General']['git_user'] or 'clinton-hall'
|
GIT_USER = CFG['General']['git_user'] or 'clinton-hall'
|
||||||
GIT_BRANCH = CFG['General']['git_branch'] or 'master'
|
GIT_BRANCH = CFG['General']['git_branch'] or 'master'
|
||||||
FORCE_CLEAN = int(CFG['General']['force_clean'])
|
FORCE_CLEAN = int(CFG['General']['force_clean'])
|
||||||
FFMPEG_PATH = CFG['General']['ffmpeg_path']
|
FFMPEG_PATH = CFG['General']['ffmpeg_path']
|
||||||
|
SYS_PATH = CFG['General']['sys_path']
|
||||||
CHECK_MEDIA = int(CFG['General']['check_media'])
|
CHECK_MEDIA = int(CFG['General']['check_media'])
|
||||||
SAFE_MODE = int(CFG['General']['safe_mode'])
|
SAFE_MODE = int(CFG['General']['safe_mode'])
|
||||||
NOEXTRACTFAILED = int(CFG['General']['no_extract_failed'])
|
NOEXTRACTFAILED = int(CFG['General']['no_extract_failed'])
|
||||||
|
|
||||||
|
|
||||||
|
def configure_updates():
|
||||||
|
global AUTO_UPDATE
|
||||||
|
global MYAPP
|
||||||
|
|
||||||
|
AUTO_UPDATE = int(CFG['General']['auto_update'])
|
||||||
|
version_checker = version_check.CheckVersion()
|
||||||
|
|
||||||
# Check for updates via GitHUB
|
# Check for updates via GitHUB
|
||||||
if version_check.CheckVersion().check_for_new_version():
|
if version_checker.check_for_new_version() and AUTO_UPDATE:
|
||||||
if AUTO_UPDATE == 1:
|
logger.info('Auto-Updating nzbToMedia, Please wait ...')
|
||||||
logger.info('Auto-Updating nzbToMedia, Please wait ...')
|
if version_checker.update():
|
||||||
updated = version_check.CheckVersion().update()
|
# restart nzbToMedia
|
||||||
if updated:
|
try:
|
||||||
# restart nzbToMedia
|
del MYAPP
|
||||||
try:
|
except Exception:
|
||||||
del MYAPP
|
pass
|
||||||
except Exception:
|
restart()
|
||||||
pass
|
else:
|
||||||
restart()
|
logger.error('Update failed, not restarting. Check your log for more information.')
|
||||||
else:
|
|
||||||
logger.error('Update wasn\'t successful, not restarting. Check your log for more information.')
|
|
||||||
|
|
||||||
# Set Current Version
|
# Set Current Version
|
||||||
logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format
|
logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format
|
||||||
(version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH,
|
(version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH,
|
||||||
system=platform.system(), release=platform.release()))
|
system=platform.system(), release=platform.release()))
|
||||||
|
|
||||||
if int(CFG['WakeOnLan']['wake']) == 1:
|
|
||||||
|
def configure_wake_on_lan():
|
||||||
|
if int(CFG['WakeOnLan']['wake']):
|
||||||
wake_up()
|
wake_up()
|
||||||
|
|
||||||
NZB_CLIENTAGENT = CFG['Nzb']['clientAgent'] # sabnzbd
|
|
||||||
SABNZBDHOST = CFG['Nzb']['sabnzbd_host']
|
def configure_sabnzbd():
|
||||||
SABNZBDPORT = int(CFG['Nzb']['sabnzbd_port'] or 8080) # defaults to accomodate NzbGet
|
global SABNZBD_HOST
|
||||||
SABNZBDAPIKEY = CFG['Nzb']['sabnzbd_apikey']
|
global SABNZBD_PORT
|
||||||
NZB_DEFAULTDIR = CFG['Nzb']['default_downloadDirectory']
|
global SABNZBD_APIKEY
|
||||||
|
|
||||||
|
SABNZBD_HOST = CFG['Nzb']['sabnzbd_host']
|
||||||
|
SABNZBD_PORT = int(CFG['Nzb']['sabnzbd_port'] or 8080) # defaults to accommodate NzbGet
|
||||||
|
SABNZBD_APIKEY = CFG['Nzb']['sabnzbd_apikey']
|
||||||
|
|
||||||
|
|
||||||
|
def configure_nzbs():
|
||||||
|
global NZB_CLIENT_AGENT
|
||||||
|
global NZB_DEFAULT_DIRECTORY
|
||||||
|
|
||||||
|
NZB_CLIENT_AGENT = CFG['Nzb']['clientAgent'] # sabnzbd
|
||||||
|
NZB_DEFAULT_DIRECTORY = CFG['Nzb']['default_downloadDirectory']
|
||||||
|
|
||||||
|
configure_sabnzbd()
|
||||||
|
|
||||||
|
|
||||||
|
def configure_groups():
|
||||||
|
global GROUPS
|
||||||
|
|
||||||
GROUPS = CFG['Custom']['remove_group']
|
GROUPS = CFG['Custom']['remove_group']
|
||||||
|
|
||||||
if isinstance(GROUPS, str):
|
if isinstance(GROUPS, str):
|
||||||
GROUPS = GROUPS.split(',')
|
GROUPS = GROUPS.split(',')
|
||||||
|
|
||||||
if GROUPS == ['']:
|
if GROUPS == ['']:
|
||||||
GROUPS = None
|
GROUPS = None
|
||||||
|
|
||||||
TORRENT_CLIENTAGENT = CFG['Torrent']['clientAgent'] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent |other
|
|
||||||
USELINK = CFG['Torrent']['useLink'] # no | hard | sym
|
def configure_utorrent():
|
||||||
OUTPUTDIRECTORY = CFG['Torrent']['outputDirectory'] # /abs/path/to/complete/
|
global UTORRENT_WEB_UI
|
||||||
TORRENT_DEFAULTDIR = CFG['Torrent']['default_downloadDirectory']
|
global UTORRENT_USER
|
||||||
CATEGORIES = (CFG['Torrent']['categories']) # music,music_videos,pictures,software
|
global UTORRENT_PASSWORD
|
||||||
|
|
||||||
|
UTORRENT_WEB_UI = CFG['Torrent']['uTorrentWEBui'] # http://localhost:8090/gui/
|
||||||
|
UTORRENT_USER = CFG['Torrent']['uTorrentUSR'] # mysecretusr
|
||||||
|
UTORRENT_PASSWORD = CFG['Torrent']['uTorrentPWD'] # mysecretpwr
|
||||||
|
|
||||||
|
|
||||||
|
def configure_transmission():
|
||||||
|
global TRANSMISSION_HOST
|
||||||
|
global TRANSMISSION_PORT
|
||||||
|
global TRANSMISSION_USER
|
||||||
|
global TRANSMISSION_PASSWORD
|
||||||
|
|
||||||
|
TRANSMISSION_HOST = CFG['Torrent']['TransmissionHost'] # localhost
|
||||||
|
TRANSMISSION_PORT = int(CFG['Torrent']['TransmissionPort'])
|
||||||
|
TRANSMISSION_USER = CFG['Torrent']['TransmissionUSR'] # mysecretusr
|
||||||
|
TRANSMISSION_PASSWORD = CFG['Torrent']['TransmissionPWD'] # mysecretpwr
|
||||||
|
|
||||||
|
|
||||||
|
def configure_deluge():
|
||||||
|
global DELUGE_HOST
|
||||||
|
global DELUGE_PORT
|
||||||
|
global DELUGE_USER
|
||||||
|
global DELUGE_PASSWORD
|
||||||
|
|
||||||
|
DELUGE_HOST = CFG['Torrent']['DelugeHost'] # localhost
|
||||||
|
DELUGE_PORT = int(CFG['Torrent']['DelugePort']) # 8084
|
||||||
|
DELUGE_USER = CFG['Torrent']['DelugeUSR'] # mysecretusr
|
||||||
|
DELUGE_PASSWORD = CFG['Torrent']['DelugePWD'] # mysecretpwr
|
||||||
|
|
||||||
|
|
||||||
|
def configure_qbittorrent():
|
||||||
|
global QBITTORRENT_HOST
|
||||||
|
global QBITTORRENT_PORT
|
||||||
|
global QBITTORRENT_USER
|
||||||
|
global QBITTORRENT_PASSWORD
|
||||||
|
|
||||||
|
QBITTORRENT_HOST = CFG['Torrent']['qBittorrenHost'] # localhost
|
||||||
|
QBITTORRENT_PORT = int(CFG['Torrent']['qBittorrentPort']) # 8080
|
||||||
|
QBITTORRENT_USER = CFG['Torrent']['qBittorrentUSR'] # mysecretusr
|
||||||
|
QBITTORRENT_PASSWORD = CFG['Torrent']['qBittorrentPWD'] # mysecretpwr
|
||||||
|
|
||||||
|
|
||||||
|
def configure_flattening():
|
||||||
|
global NOFLATTEN
|
||||||
|
|
||||||
NOFLATTEN = (CFG['Torrent']['noFlatten'])
|
NOFLATTEN = (CFG['Torrent']['noFlatten'])
|
||||||
if isinstance(NOFLATTEN, str):
|
if isinstance(NOFLATTEN, str):
|
||||||
NOFLATTEN = NOFLATTEN.split(',')
|
NOFLATTEN = NOFLATTEN.split(',')
|
||||||
|
|
||||||
|
|
||||||
|
def configure_torrent_categories():
|
||||||
|
global CATEGORIES
|
||||||
|
|
||||||
|
CATEGORIES = (CFG['Torrent']['categories']) # music,music_videos,pictures,software
|
||||||
if isinstance(CATEGORIES, str):
|
if isinstance(CATEGORIES, str):
|
||||||
CATEGORIES = CATEGORIES.split(',')
|
CATEGORIES = CATEGORIES.split(',')
|
||||||
DELETE_ORIGINAL = int(CFG['Torrent']['deleteOriginal'])
|
|
||||||
TORRENT_CHMOD_DIRECTORY = int(str(CFG['Torrent']['chmodDirectory']), 8)
|
|
||||||
|
def configure_torrent_resuming():
|
||||||
|
global TORRENT_RESUME
|
||||||
|
global TORRENT_RESUME_ON_FAILURE
|
||||||
|
|
||||||
TORRENT_RESUME_ON_FAILURE = int(CFG['Torrent']['resumeOnFailure'])
|
TORRENT_RESUME_ON_FAILURE = int(CFG['Torrent']['resumeOnFailure'])
|
||||||
TORRENT_RESUME = int(CFG['Torrent']['resume'])
|
TORRENT_RESUME = int(CFG['Torrent']['resume'])
|
||||||
UTORRENTWEBUI = CFG['Torrent']['uTorrentWEBui'] # http://localhost:8090/gui/
|
|
||||||
UTORRENTUSR = CFG['Torrent']['uTorrentUSR'] # mysecretusr
|
|
||||||
UTORRENTPWD = CFG['Torrent']['uTorrentPWD'] # mysecretpwr
|
|
||||||
|
|
||||||
TRANSMISSIONHOST = CFG['Torrent']['TransmissionHost'] # localhost
|
|
||||||
TRANSMISSIONPORT = int(CFG['Torrent']['TransmissionPort'])
|
|
||||||
TRANSMISSIONUSR = CFG['Torrent']['TransmissionUSR'] # mysecretusr
|
|
||||||
TRANSMISSIONPWD = CFG['Torrent']['TransmissionPWD'] # mysecretpwr
|
|
||||||
|
|
||||||
DELUGEHOST = CFG['Torrent']['DelugeHost'] # localhost
|
def configure_torrent_permissions():
|
||||||
DELUGEPORT = int(CFG['Torrent']['DelugePort']) # 8084
|
global TORRENT_CHMOD_DIRECTORY
|
||||||
DELUGEUSR = CFG['Torrent']['DelugeUSR'] # mysecretusr
|
|
||||||
DELUGEPWD = CFG['Torrent']['DelugePWD'] # mysecretpwr
|
|
||||||
|
|
||||||
QBITTORRENTHOST = CFG['Torrent']['qBittorrenHost'] # localhost
|
TORRENT_CHMOD_DIRECTORY = int(str(CFG['Torrent']['chmodDirectory']), 8)
|
||||||
QBITTORRENTPORT = int(CFG['Torrent']['qBittorrentPort']) # 8080
|
|
||||||
QBITTORRENTUSR = CFG['Torrent']['qBittorrentUSR'] # mysecretusr
|
|
||||||
QBITTORRENTPWD = CFG['Torrent']['qBittorrentPWD'] # mysecretpwr
|
|
||||||
|
|
||||||
REMOTEPATHS = CFG['Network']['mount_points'] or []
|
|
||||||
if REMOTEPATHS:
|
|
||||||
if isinstance(REMOTEPATHS, list):
|
|
||||||
REMOTEPATHS = ','.join(REMOTEPATHS) # fix in case this imported as list.
|
|
||||||
REMOTEPATHS = [tuple(item.split(',')) for item in
|
|
||||||
REMOTEPATHS.split('|')] # /volume1/Public/,E:\|/volume2/share/,\\NAS\
|
|
||||||
REMOTEPATHS = [(local.strip(), remote.strip()) for local, remote in
|
|
||||||
REMOTEPATHS] # strip trailing and leading whitespaces
|
|
||||||
|
|
||||||
PLEXSSL = int(CFG['Plex']['plex_ssl'])
|
def configure_torrent_deltetion():
|
||||||
PLEXHOST = CFG['Plex']['plex_host']
|
global DELETE_ORIGINAL
|
||||||
PLEXPORT = CFG['Plex']['plex_port']
|
|
||||||
PLEXTOKEN = CFG['Plex']['plex_token']
|
|
||||||
PLEXSEC = CFG['Plex']['plex_sections'] or []
|
|
||||||
if PLEXSEC:
|
|
||||||
if isinstance(PLEXSEC, list):
|
|
||||||
PLEXSEC = ','.join(PLEXSEC) # fix in case this imported as list.
|
|
||||||
PLEXSEC = [tuple(item.split(',')) for item in PLEXSEC.split('|')]
|
|
||||||
|
|
||||||
devnull = open(os.devnull, 'w')
|
DELETE_ORIGINAL = int(CFG['Torrent']['deleteOriginal'])
|
||||||
try:
|
|
||||||
subprocess.Popen(['nice'], stdout=devnull, stderr=devnull).communicate()
|
|
||||||
NICENESS.extend(['nice', '-n{0}'.format(int(CFG['Posix']['niceness']))])
|
def configure_torrent_linking():
|
||||||
except Exception:
|
global USE_LINK
|
||||||
pass
|
|
||||||
try:
|
USE_LINK = CFG['Torrent']['useLink'] # no | hard | sym
|
||||||
subprocess.Popen(['ionice'], stdout=devnull, stderr=devnull).communicate()
|
|
||||||
|
|
||||||
|
def configure_torrents():
|
||||||
|
global TORRENT_CLIENT_AGENT
|
||||||
|
global OUTPUT_DIRECTORY
|
||||||
|
global TORRENT_DEFAULT_DIRECTORY
|
||||||
|
|
||||||
|
TORRENT_CLIENT_AGENT = CFG['Torrent']['clientAgent'] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent |other
|
||||||
|
OUTPUT_DIRECTORY = CFG['Torrent']['outputDirectory'] # /abs/path/to/complete/
|
||||||
|
TORRENT_DEFAULT_DIRECTORY = CFG['Torrent']['default_downloadDirectory']
|
||||||
|
configure_torrent_linking()
|
||||||
|
configure_flattening()
|
||||||
|
configure_torrent_deltetion()
|
||||||
|
configure_torrent_categories()
|
||||||
|
configure_torrent_permissions()
|
||||||
|
configure_torrent_resuming()
|
||||||
|
configure_utorrent()
|
||||||
|
configure_transmission()
|
||||||
|
configure_deluge()
|
||||||
|
configure_qbittorrent()
|
||||||
|
|
||||||
|
|
||||||
|
def configure_remote_paths():
|
||||||
|
global REMOTE_PATHS
|
||||||
|
|
||||||
|
REMOTE_PATHS = CFG['Network']['mount_points'] or []
|
||||||
|
|
||||||
|
if REMOTE_PATHS:
|
||||||
|
if isinstance(REMOTE_PATHS, list):
|
||||||
|
REMOTE_PATHS = ','.join(REMOTE_PATHS) # fix in case this imported as list.
|
||||||
|
|
||||||
|
REMOTE_PATHS = (
|
||||||
|
# /volume1/Public/,E:\|/volume2/share/,\\NAS\
|
||||||
|
tuple(item.split(','))
|
||||||
|
for item in REMOTE_PATHS.split('|')
|
||||||
|
)
|
||||||
|
|
||||||
|
REMOTE_PATHS = [
|
||||||
|
# strip trailing and leading whitespaces
|
||||||
|
(local.strip(), remote.strip())
|
||||||
|
for local, remote in REMOTE_PATHS
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def configure_plex():
|
||||||
|
global PLEX_SSL
|
||||||
|
global PLEX_HOST
|
||||||
|
global PLEX_PORT
|
||||||
|
global PLEX_TOKEN
|
||||||
|
global PLEX_SECTION
|
||||||
|
|
||||||
|
PLEX_SSL = int(CFG['Plex']['plex_ssl'])
|
||||||
|
PLEX_HOST = CFG['Plex']['plex_host']
|
||||||
|
PLEX_PORT = CFG['Plex']['plex_port']
|
||||||
|
PLEX_TOKEN = CFG['Plex']['plex_token']
|
||||||
|
PLEX_SECTION = CFG['Plex']['plex_sections'] or []
|
||||||
|
|
||||||
|
if PLEX_SECTION:
|
||||||
|
if isinstance(PLEX_SECTION, list):
|
||||||
|
PLEX_SECTION = ','.join(PLEX_SECTION) # fix in case this imported as list.
|
||||||
|
PLEX_SECTION = [
|
||||||
|
tuple(item.split(','))
|
||||||
|
for item in PLEX_SECTION.split('|')
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def configure_niceness():
|
||||||
|
global NICENESS
|
||||||
|
|
||||||
|
with open(os.devnull, 'w') as devnull:
|
||||||
try:
|
try:
|
||||||
NICENESS.extend(['ionice', '-c{0}'.format(int(CFG['Posix']['ionice_class']))])
|
subprocess.Popen(['nice'], stdout=devnull, stderr=devnull).communicate()
|
||||||
|
NICENESS.extend(['nice', '-n{0}'.format(int(CFG['Posix']['niceness']))])
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
if 'ionice' in NICENESS:
|
subprocess.Popen(['ionice'], stdout=devnull, stderr=devnull).communicate()
|
||||||
NICENESS.extend(['-n{0}'.format(int(CFG['Posix']['ionice_classdata']))])
|
try:
|
||||||
else:
|
NICENESS.extend(['ionice', '-c{0}'.format(int(CFG['Posix']['ionice_class']))])
|
||||||
NICENESS.extend(['ionice', '-n{0}'.format(int(CFG['Posix']['ionice_classdata']))])
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
if 'ionice' in NICENESS:
|
||||||
|
NICENESS.extend(['-n{0}'.format(int(CFG['Posix']['ionice_classdata']))])
|
||||||
|
else:
|
||||||
|
NICENESS.extend(['ionice', '-n{0}'.format(int(CFG['Posix']['ionice_classdata']))])
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
devnull.close()
|
|
||||||
|
|
||||||
COMPRESSEDCONTAINER = [re.compile(r'.r\d{2}$', re.I),
|
|
||||||
re.compile(r'.part\d+.rar$', re.I),
|
def configure_containers():
|
||||||
re.compile('.rar$', re.I)]
|
global COMPRESSED_CONTAINER
|
||||||
COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG['Extensions']['compressedExtensions']]
|
global MEDIA_CONTAINER
|
||||||
MEDIACONTAINER = CFG['Extensions']['mediaExtensions']
|
global AUDIO_CONTAINER
|
||||||
AUDIOCONTAINER = CFG['Extensions']['audioExtensions']
|
global META_CONTAINER
|
||||||
METACONTAINER = CFG['Extensions']['metaExtensions'] # .nfo,.sub,.srt
|
|
||||||
if isinstance(COMPRESSEDCONTAINER, str):
|
COMPRESSED_CONTAINER = [re.compile(r'.r\d{2}$', re.I),
|
||||||
COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',')
|
re.compile(r'.part\d+.rar$', re.I),
|
||||||
if isinstance(MEDIACONTAINER, str):
|
re.compile('.rar$', re.I)]
|
||||||
MEDIACONTAINER = MEDIACONTAINER.split(',')
|
COMPRESSED_CONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in
|
||||||
if isinstance(AUDIOCONTAINER, str):
|
CFG['Extensions']['compressedExtensions']]
|
||||||
AUDIOCONTAINER = AUDIOCONTAINER.split(',')
|
MEDIA_CONTAINER = CFG['Extensions']['mediaExtensions']
|
||||||
if isinstance(METACONTAINER, str):
|
AUDIO_CONTAINER = CFG['Extensions']['audioExtensions']
|
||||||
METACONTAINER = METACONTAINER.split(',')
|
META_CONTAINER = CFG['Extensions']['metaExtensions'] # .nfo,.sub,.srt
|
||||||
|
|
||||||
|
if isinstance(COMPRESSED_CONTAINER, str):
|
||||||
|
COMPRESSED_CONTAINER = COMPRESSED_CONTAINER.split(',')
|
||||||
|
|
||||||
|
if isinstance(MEDIA_CONTAINER, str):
|
||||||
|
MEDIA_CONTAINER = MEDIA_CONTAINER.split(',')
|
||||||
|
|
||||||
|
if isinstance(AUDIO_CONTAINER, str):
|
||||||
|
AUDIO_CONTAINER = AUDIO_CONTAINER.split(',')
|
||||||
|
|
||||||
|
if isinstance(META_CONTAINER, str):
|
||||||
|
META_CONTAINER = META_CONTAINER.split(',')
|
||||||
|
|
||||||
|
|
||||||
|
def configure_transcoder():
|
||||||
|
global GETSUBS
|
||||||
|
global TRANSCODE
|
||||||
|
global DUPLICATE
|
||||||
|
global CONCAT
|
||||||
|
global IGNOREEXTENSIONS
|
||||||
|
global OUTPUTFASTSTART
|
||||||
|
global GENERALOPTS
|
||||||
|
global OUTPUTQUALITYPERCENT
|
||||||
|
global OUTPUTVIDEOPATH
|
||||||
|
global PROCESSOUTPUT
|
||||||
|
global ALANGUAGE
|
||||||
|
global AINCLUDE
|
||||||
|
global SLANGUAGES
|
||||||
|
global SINCLUDE
|
||||||
|
global SEXTRACT
|
||||||
|
global SEMBED
|
||||||
|
global SUBSDIR
|
||||||
|
global VEXTENSION
|
||||||
|
global VCODEC
|
||||||
|
global VPRESET
|
||||||
|
global VFRAMERATE
|
||||||
|
global VBITRATE
|
||||||
|
global VRESOLUTION
|
||||||
|
global VCRF
|
||||||
|
global VLEVEL
|
||||||
|
global VCODEC_ALLOW
|
||||||
|
global ACODEC
|
||||||
|
global ACODEC_ALLOW
|
||||||
|
global ACHANNELS
|
||||||
|
global ABITRATE
|
||||||
|
global ACODEC2
|
||||||
|
global ACODEC2_ALLOW
|
||||||
|
global ACHANNELS2
|
||||||
|
global ABITRATE2
|
||||||
|
global ACODEC3
|
||||||
|
global ACODEC3_ALLOW
|
||||||
|
global ACHANNELS3
|
||||||
|
global ABITRATE3
|
||||||
|
global SCODEC
|
||||||
|
global BURN
|
||||||
|
global HWACCEL
|
||||||
|
global ALLOWSUBS
|
||||||
|
global DEFAULTS
|
||||||
|
|
||||||
GETSUBS = int(CFG['Transcoder']['getSubs'])
|
GETSUBS = int(CFG['Transcoder']['getSubs'])
|
||||||
TRANSCODE = int(CFG['Transcoder']['transcode'])
|
TRANSCODE = int(CFG['Transcoder']['transcode'])
|
||||||
|
@ -763,7 +966,39 @@ def initialize(section=None):
|
||||||
extra = [item for item in codec_alias[codec] if item not in ACODEC3_ALLOW]
|
extra = [item for item in codec_alias[codec] if item not in ACODEC3_ALLOW]
|
||||||
ACODEC3_ALLOW.extend(extra)
|
ACODEC3_ALLOW.extend(extra)
|
||||||
|
|
||||||
PASSWORDSFILE = CFG['passwords']['PassWordFile']
|
|
||||||
|
def configure_passwords_file():
|
||||||
|
global PASSWORDS_FILE
|
||||||
|
|
||||||
|
PASSWORDS_FILE = CFG['passwords']['PassWordFile']
|
||||||
|
|
||||||
|
|
||||||
|
def configure_torrent_class():
|
||||||
|
global TORRENT_CLASS
|
||||||
|
|
||||||
|
# create torrent class
|
||||||
|
TORRENT_CLASS = create_torrent_class(TORRENT_CLIENT_AGENT)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_sections(section):
|
||||||
|
global SECTIONS
|
||||||
|
global CATEGORIES
|
||||||
|
# check for script-defied section and if None set to allow sections
|
||||||
|
SECTIONS = CFG[
|
||||||
|
tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled())
|
||||||
|
if not section else (section,)
|
||||||
|
]
|
||||||
|
for section, subsections in SECTIONS.items():
|
||||||
|
CATEGORIES.extend([subsection for subsection in subsections if CFG[section][subsection].isenabled()])
|
||||||
|
CATEGORIES = list(set(CATEGORIES))
|
||||||
|
|
||||||
|
|
||||||
|
def configure_utility_locations():
|
||||||
|
global SHOWEXTRACT
|
||||||
|
global SEVENZIP
|
||||||
|
global FFMPEG
|
||||||
|
global FFPROBE
|
||||||
|
global PAR2CMD
|
||||||
|
|
||||||
# Setup FFMPEG, FFPROBE and SEVENZIP locations
|
# Setup FFMPEG, FFPROBE and SEVENZIP locations
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
|
@ -784,18 +1019,20 @@ def initialize(section=None):
|
||||||
logger.warning('Install ffmpeg with x264 support to enable this feature ...')
|
logger.warning('Install ffmpeg with x264 support to enable this feature ...')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
if SYS_PATH:
|
||||||
|
os.environ['PATH'] += ':'+SYS_PATH
|
||||||
try:
|
try:
|
||||||
SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip()
|
SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not SEVENZIP:
|
if not SEVENZIP:
|
||||||
try:
|
try:
|
||||||
SEVENZIP = subprocess.Popen(['which', '7zr'], stdout=subprocess.PIPE).communicate()[0].strip()
|
SEVENZIP = subprocess.Popen(['which', '7zr'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not SEVENZIP:
|
if not SEVENZIP:
|
||||||
try:
|
try:
|
||||||
SEVENZIP = subprocess.Popen(['which', '7za'], stdout=subprocess.PIPE).communicate()[0].strip()
|
SEVENZIP = subprocess.Popen(['which', '7za'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not SEVENZIP:
|
if not SEVENZIP:
|
||||||
|
@ -803,7 +1040,7 @@ def initialize(section=None):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!')
|
'Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!')
|
||||||
try:
|
try:
|
||||||
PAR2CMD = subprocess.Popen(['which', 'par2'], stdout=subprocess.PIPE).communicate()[0].strip()
|
PAR2CMD = subprocess.Popen(['which', 'par2'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not PAR2CMD:
|
if not PAR2CMD:
|
||||||
|
@ -818,12 +1055,12 @@ def initialize(section=None):
|
||||||
FFMPEG = os.path.join(FFMPEG_PATH, 'avconv')
|
FFMPEG = os.path.join(FFMPEG_PATH, 'avconv')
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
FFMPEG = subprocess.Popen(['which', 'ffmpeg'], stdout=subprocess.PIPE).communicate()[0].strip()
|
FFMPEG = subprocess.Popen(['which', 'ffmpeg'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not FFMPEG:
|
if not FFMPEG:
|
||||||
try:
|
try:
|
||||||
FFMPEG = subprocess.Popen(['which', 'avconv'], stdout=subprocess.PIPE).communicate()[0].strip()
|
FFMPEG = subprocess.Popen(['which', 'avconv'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not FFMPEG:
|
if not FFMPEG:
|
||||||
|
@ -839,12 +1076,12 @@ def initialize(section=None):
|
||||||
FFPROBE = os.path.join(FFMPEG_PATH, 'avprobe')
|
FFPROBE = os.path.join(FFMPEG_PATH, 'avprobe')
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
FFPROBE = subprocess.Popen(['which', 'ffprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
|
FFPROBE = subprocess.Popen(['which', 'ffprobe'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not FFPROBE:
|
if not FFPROBE:
|
||||||
try:
|
try:
|
||||||
FFPROBE = subprocess.Popen(['which', 'avprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
|
FFPROBE = subprocess.Popen(['which', 'avprobe'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not FFPROBE:
|
if not FFPROBE:
|
||||||
|
@ -853,14 +1090,67 @@ def initialize(section=None):
|
||||||
logger.warning('Failed to locate ffprobe. Video corruption detection disabled!')
|
logger.warning('Failed to locate ffprobe. Video corruption detection disabled!')
|
||||||
logger.warning('Install ffmpeg with x264 support to enable this feature ...')
|
logger.warning('Install ffmpeg with x264 support to enable this feature ...')
|
||||||
|
|
||||||
# check for script-defied section and if None set to allow sections
|
|
||||||
SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)]
|
|
||||||
for section, subsections in SECTIONS.items():
|
|
||||||
CATEGORIES.extend([subsection for subsection in subsections if CFG[section][subsection].isenabled()])
|
|
||||||
CATEGORIES = list(set(CATEGORIES))
|
|
||||||
|
|
||||||
# create torrent class
|
def check_python():
|
||||||
TORRENT_CLASS = create_torrent_class(TORRENT_CLIENTAGENT)
|
"""Check End-of-Life status for Python version."""
|
||||||
|
# Raise if end of life
|
||||||
|
eol.check()
|
||||||
|
|
||||||
# finished initalizing
|
# Warn if within grace period
|
||||||
return True
|
grace_period = 365 # days
|
||||||
|
eol.warn_for_status(grace_period=-grace_period)
|
||||||
|
|
||||||
|
# Log warning if within grace period
|
||||||
|
days_left = eol.lifetime()
|
||||||
|
logger.info(
|
||||||
|
'Python v{major}.{minor} will reach end of life in {x} days.'.format(
|
||||||
|
major=sys.version_info[0],
|
||||||
|
minor=sys.version_info[1],
|
||||||
|
x=days_left,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if days_left <= grace_period:
|
||||||
|
logger.warning('Please upgrade to a more recent Python version.')
|
||||||
|
|
||||||
|
|
||||||
|
def initialize(section=None):
|
||||||
|
global __INITIALIZED__
|
||||||
|
|
||||||
|
if __INITIALIZED__:
|
||||||
|
return False
|
||||||
|
|
||||||
|
configure_logging()
|
||||||
|
configure_process()
|
||||||
|
configure_locale()
|
||||||
|
|
||||||
|
# init logging
|
||||||
|
logger.ntm_log_instance.init_logging()
|
||||||
|
|
||||||
|
configure_migration()
|
||||||
|
configure_logging_part_2()
|
||||||
|
|
||||||
|
# check python version
|
||||||
|
check_python()
|
||||||
|
|
||||||
|
# initialize the main SB database
|
||||||
|
main_db.upgrade_database(main_db.DBConnection(), databases.InitialSchema)
|
||||||
|
|
||||||
|
configure_general()
|
||||||
|
configure_updates()
|
||||||
|
configure_wake_on_lan()
|
||||||
|
configure_nzbs()
|
||||||
|
configure_torrents()
|
||||||
|
configure_remote_paths()
|
||||||
|
configure_plex()
|
||||||
|
configure_niceness()
|
||||||
|
configure_containers()
|
||||||
|
configure_transcoder()
|
||||||
|
configure_passwords_file()
|
||||||
|
configure_utility_locations()
|
||||||
|
configure_sections(section)
|
||||||
|
configure_torrent_class()
|
||||||
|
|
||||||
|
__INITIALIZED__ = True
|
||||||
|
|
||||||
|
# finished initializing
|
||||||
|
return __INITIALIZED__
|
||||||
|
|
|
@ -152,7 +152,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
|
||||||
if not release and '.cp(tt' not in video and imdbid:
|
if not release and '.cp(tt' not in video and imdbid:
|
||||||
video_name, video_ext = os.path.splitext(video)
|
video_name, video_ext = os.path.splitext(video)
|
||||||
video2 = '{0}.cp({1}){2}'.format(video_name, imdbid, video_ext)
|
video2 = '{0}.cp({1}){2}'.format(video_name, imdbid, video_ext)
|
||||||
if not (client_agent in [core.TORRENT_CLIENTAGENT, 'manual'] and core.USELINK == 'move-sym'):
|
if not (client_agent in [core.TORRENT_CLIENT_AGENT, 'manual'] and core.USE_LINK == 'move-sym'):
|
||||||
logger.debug('Renaming: {0} to: {1}'.format(video, video2))
|
logger.debug('Renaming: {0} to: {1}'.format(video, video2))
|
||||||
os.rename(video, video2)
|
os.rename(video, video2)
|
||||||
|
|
||||||
|
@ -238,11 +238,11 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
|
||||||
report_nzb(failure_link, client_agent)
|
report_nzb(failure_link, client_agent)
|
||||||
|
|
||||||
if section == 'Radarr':
|
if section == 'Radarr':
|
||||||
logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(section), section)
|
logger.postprocess('SUCCESS: Sending failed download to {0} for CDH processing'.format(section), section)
|
||||||
return ProcessResult(
|
return ProcessResult(
|
||||||
message='{0}: Download Failed. Sending back to {0}'.format(section),
|
message='{0}: Sending failed download back to {0}'.format(section),
|
||||||
status_code=1, # Return as failed to flag this in the downloader.
|
status_code=1, # Return as failed to flag this in the downloader.
|
||||||
)
|
) # Return failed flag, but log the event as successful.
|
||||||
|
|
||||||
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
|
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
|
||||||
logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
|
logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
|
||||||
|
|
|
@ -77,7 +77,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
|
||||||
}
|
}
|
||||||
|
|
||||||
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
|
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
|
||||||
if res[0] in [0, 1]:
|
if res.status_code in [0, 1]:
|
||||||
return res
|
return res
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
|
|
|
@ -47,7 +47,7 @@ def process(section, dir_name, input_name=None, failed=False, client_agent='manu
|
||||||
delete_failed = int(cfg.get('delete_failed', 0))
|
delete_failed = int(cfg.get('delete_failed', 0))
|
||||||
nzb_extraction_by = cfg.get('nzbExtractionBy', 'Downloader')
|
nzb_extraction_by = cfg.get('nzbExtractionBy', 'Downloader')
|
||||||
process_method = cfg.get('process_method')
|
process_method = cfg.get('process_method')
|
||||||
if client_agent == core.TORRENT_CLIENTAGENT and core.USELINK == 'move-sym':
|
if client_agent == core.TORRENT_CLIENT_AGENT and core.USE_LINK == 'move-sym':
|
||||||
process_method = 'symlink'
|
process_method = 'symlink'
|
||||||
remote_path = int(cfg.get('remote_path', 0))
|
remote_path = int(cfg.get('remote_path', 0))
|
||||||
wait_for = int(cfg.get('wait_for', 2))
|
wait_for = int(cfg.get('wait_for', 2))
|
||||||
|
@ -168,13 +168,15 @@ def process(section, dir_name, input_name=None, failed=False, client_agent='manu
|
||||||
for param in copy.copy(fork_params):
|
for param in copy.copy(fork_params):
|
||||||
if param == 'failed':
|
if param == 'failed':
|
||||||
fork_params[param] = failed
|
fork_params[param] = failed
|
||||||
del fork_params['proc_type']
|
if 'proc_type' in fork_params:
|
||||||
|
del fork_params['proc_type']
|
||||||
if 'type' in fork_params:
|
if 'type' in fork_params:
|
||||||
del fork_params['type']
|
del fork_params['type']
|
||||||
|
|
||||||
if param == 'return_data':
|
if param == 'return_data':
|
||||||
fork_params[param] = 0
|
fork_params[param] = 0
|
||||||
del fork_params['quiet']
|
if 'quiet' in fork_params:
|
||||||
|
del fork_params['quiet']
|
||||||
|
|
||||||
if param == 'type':
|
if param == 'type':
|
||||||
fork_params[param] = 'manual'
|
fork_params[param] = 'manual'
|
||||||
|
@ -214,7 +216,7 @@ def process(section, dir_name, input_name=None, failed=False, client_agent='manu
|
||||||
fork_params[param] = 1
|
fork_params[param] = 1
|
||||||
|
|
||||||
# delete any unused params so we don't pass them to SB by mistake
|
# delete any unused params so we don't pass them to SB by mistake
|
||||||
[fork_params.pop(k) for k, v in fork_params.items() if v is None]
|
[fork_params.pop(k) for k, v in list(fork_params.items()) if v is None]
|
||||||
|
|
||||||
if status == 0:
|
if status == 0:
|
||||||
if section == 'NzbDrone' and not apikey:
|
if section == 'NzbDrone' and not apikey:
|
||||||
|
|
|
@ -90,8 +90,8 @@ def extract(file_path, output_destination):
|
||||||
# Create outputDestination folder
|
# Create outputDestination folder
|
||||||
core.make_dir(output_destination)
|
core.make_dir(output_destination)
|
||||||
|
|
||||||
if core.PASSWORDSFILE and os.path.isfile(os.path.normpath(core.PASSWORDSFILE)):
|
if core.PASSWORDS_FILE and os.path.isfile(os.path.normpath(core.PASSWORDS_FILE)):
|
||||||
passwords = [line.strip() for line in open(os.path.normpath(core.PASSWORDSFILE))]
|
passwords = [line.strip() for line in open(os.path.normpath(core.PASSWORDS_FILE))]
|
||||||
else:
|
else:
|
||||||
passwords = []
|
passwords = []
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ def auto_fork(section, input_category):
|
||||||
web_root = cfg.get('web_root', '')
|
web_root = cfg.get('web_root', '')
|
||||||
replace = {
|
replace = {
|
||||||
'medusa': 'Medusa',
|
'medusa': 'Medusa',
|
||||||
|
'medusa-api': 'Medusa-api',
|
||||||
'sickbeard-api': 'SickBeard-api',
|
'sickbeard-api': 'SickBeard-api',
|
||||||
'sickgear': 'SickGear',
|
'sickgear': 'SickGear',
|
||||||
'sickchill': 'SickChill',
|
'sickchill': 'SickChill',
|
||||||
|
|
|
@ -22,7 +22,7 @@ def is_video_good(videofile, status):
|
||||||
file_name_ext = os.path.basename(videofile)
|
file_name_ext = os.path.basename(videofile)
|
||||||
file_name, file_ext = os.path.splitext(file_name_ext)
|
file_name, file_ext = os.path.splitext(file_name_ext)
|
||||||
disable = False
|
disable = False
|
||||||
if file_ext not in core.MEDIACONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or file_ext in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED):
|
if file_ext not in core.MEDIA_CONTAINER or not core.FFPROBE or not core.CHECK_MEDIA or file_ext in ['.iso'] or (status > 0 and core.NOEXTRACTFAILED):
|
||||||
disable = True
|
disable = True
|
||||||
else:
|
else:
|
||||||
test_details, res = get_video_details(core.TEST_FILE)
|
test_details, res = get_video_details(core.TEST_FILE)
|
||||||
|
@ -95,7 +95,7 @@ def get_video_details(videofile, img=None, bitbucket=None):
|
||||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
result = proc.returncode
|
result = proc.returncode
|
||||||
video_details = json.loads(out)
|
video_details = json.loads(out.decode())
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if not video_details:
|
if not video_details:
|
||||||
|
@ -109,7 +109,7 @@ def get_video_details(videofile, img=None, bitbucket=None):
|
||||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
result = proc.returncode
|
result = proc.returncode
|
||||||
video_details = json.loads(out)
|
video_details = json.loads(out.decode())
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Checking [{0}] has failed'.format(file), 'TRANSCODER')
|
logger.error('Checking [{0}] has failed'.format(file), 'TRANSCODER')
|
||||||
return video_details, result
|
return video_details, result
|
||||||
|
@ -646,8 +646,8 @@ def rip_iso(item, new_dir, bitbucket):
|
||||||
print_cmd(cmd)
|
print_cmd(cmd)
|
||||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
|
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
|
||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
file_list = [re.match(r'.+(VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])', line).groups()[0] for line in
|
file_list = [re.match(r'.+(VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb])', line.decode()).groups()[0] for line in
|
||||||
out.splitlines() if re.match(r'.+VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', line)]
|
out.splitlines() if re.match(r'.+VIDEO_TS[/\\]VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', line.decode())]
|
||||||
combined = []
|
combined = []
|
||||||
for n in range(99):
|
for n in range(99):
|
||||||
concat = []
|
concat = []
|
||||||
|
|
|
@ -100,13 +100,13 @@ def get_dirs(section, subsection, link='hard'):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(watch_directory, e))
|
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(watch_directory, e))
|
||||||
|
|
||||||
if core.USELINK == 'move':
|
if core.USE_LINK == 'move':
|
||||||
try:
|
try:
|
||||||
output_directory = os.path.join(core.OUTPUTDIRECTORY, subsection)
|
output_directory = os.path.join(core.OUTPUT_DIRECTORY, subsection)
|
||||||
if os.path.exists(output_directory):
|
if os.path.exists(output_directory):
|
||||||
to_return.extend(process_dir(output_directory, link))
|
to_return.extend(process_dir(output_directory, link))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(core.OUTPUTDIRECTORY, e))
|
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(core.OUTPUT_DIRECTORY, e))
|
||||||
|
|
||||||
if not to_return:
|
if not to_return:
|
||||||
logger.debug('No directories identified in {0}:{1} for post-processing'.format(section, subsection))
|
logger.debug('No directories identified in {0}:{1} for post-processing'.format(section, subsection))
|
||||||
|
|
|
@ -20,7 +20,7 @@ def move_file(mediafile, path, link):
|
||||||
new_path = None
|
new_path = None
|
||||||
file_ext = os.path.splitext(mediafile)[1]
|
file_ext = os.path.splitext(mediafile)[1]
|
||||||
try:
|
try:
|
||||||
if file_ext in core.AUDIOCONTAINER:
|
if file_ext in core.AUDIO_CONTAINER:
|
||||||
f = beets.mediafile.MediaFile(mediafile)
|
f = beets.mediafile.MediaFile(mediafile)
|
||||||
|
|
||||||
# get artist and album info
|
# get artist and album info
|
||||||
|
@ -29,7 +29,7 @@ def move_file(mediafile, path, link):
|
||||||
|
|
||||||
# create new path
|
# create new path
|
||||||
new_path = os.path.join(path, '{0} - {1}'.format(sanitize_name(artist), sanitize_name(album)))
|
new_path = os.path.join(path, '{0} - {1}'.format(sanitize_name(artist), sanitize_name(album)))
|
||||||
elif file_ext in core.MEDIACONTAINER:
|
elif file_ext in core.MEDIA_CONTAINER:
|
||||||
f = guessit.guessit(mediafile)
|
f = guessit.guessit(mediafile)
|
||||||
|
|
||||||
# get title
|
# get title
|
||||||
|
@ -75,7 +75,7 @@ def is_min_size(input_name, min_size):
|
||||||
|
|
||||||
# audio files we need to check directory size not file size
|
# audio files we need to check directory size not file size
|
||||||
input_size = os.path.getsize(input_name)
|
input_size = os.path.getsize(input_name)
|
||||||
if file_ext in core.AUDIOCONTAINER:
|
if file_ext in core.AUDIO_CONTAINER:
|
||||||
try:
|
try:
|
||||||
input_size = get_dir_size(os.path.dirname(input_name))
|
input_size = get_dir_size(os.path.dirname(input_name))
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -89,7 +89,7 @@ def is_min_size(input_name, min_size):
|
||||||
|
|
||||||
def is_archive_file(filename):
|
def is_archive_file(filename):
|
||||||
"""Check if the filename is allowed for the Archive"""
|
"""Check if the filename is allowed for the Archive"""
|
||||||
for regext in core.COMPRESSEDCONTAINER:
|
for regext in core.COMPRESSED_CONTAINER:
|
||||||
if regext.search(filename):
|
if regext.search(filename):
|
||||||
return regext.split(filename)[0]
|
return regext.split(filename)[0]
|
||||||
return False
|
return False
|
||||||
|
@ -109,9 +109,9 @@ def is_media_file(mediafile, media=True, audio=True, meta=True, archives=True, o
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return any([
|
return any([
|
||||||
(media and file_ext.lower() in core.MEDIACONTAINER),
|
(media and file_ext.lower() in core.MEDIA_CONTAINER),
|
||||||
(audio and file_ext.lower() in core.AUDIOCONTAINER),
|
(audio and file_ext.lower() in core.AUDIO_CONTAINER),
|
||||||
(meta and file_ext.lower() in core.METACONTAINER),
|
(meta and file_ext.lower() in core.META_CONTAINER),
|
||||||
(archives and is_archive_file(mediafile)),
|
(archives and is_archive_file(mediafile)),
|
||||||
(other and (file_ext.lower() in otherext or 'all' in otherext)),
|
(other and (file_ext.lower() in otherext or 'all' in otherext)),
|
||||||
])
|
])
|
||||||
|
|
|
@ -101,13 +101,13 @@ def find_download(client_agent, download_id):
|
||||||
if torrent['hash'] == download_id:
|
if torrent['hash'] == download_id:
|
||||||
return True
|
return True
|
||||||
if client_agent == 'sabnzbd':
|
if client_agent == 'sabnzbd':
|
||||||
if 'http' in core.SABNZBDHOST:
|
if 'http' in core.SABNZBD_HOST:
|
||||||
base_url = '{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
|
base_url = '{0}:{1}/api'.format(core.SABNZBD_HOST, core.SABNZBD_PORT)
|
||||||
else:
|
else:
|
||||||
base_url = 'http://{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
|
base_url = 'http://{0}:{1}/api'.format(core.SABNZBD_HOST, core.SABNZBD_PORT)
|
||||||
url = base_url
|
url = base_url
|
||||||
params = {
|
params = {
|
||||||
'apikey': core.SABNZBDAPIKEY,
|
'apikey': core.SABNZBD_APIKEY,
|
||||||
'mode': 'get_files',
|
'mode': 'get_files',
|
||||||
'output': 'json',
|
'output': 'json',
|
||||||
'value': download_id,
|
'value': download_id,
|
||||||
|
|
|
@ -8,20 +8,20 @@ def plex_update(category):
|
||||||
if core.FAILED:
|
if core.FAILED:
|
||||||
return
|
return
|
||||||
url = '{scheme}://{host}:{port}/library/sections/'.format(
|
url = '{scheme}://{host}:{port}/library/sections/'.format(
|
||||||
scheme='https' if core.PLEXSSL else 'http',
|
scheme='https' if core.PLEX_SSL else 'http',
|
||||||
host=core.PLEXHOST,
|
host=core.PLEX_HOST,
|
||||||
port=core.PLEXPORT,
|
port=core.PLEX_PORT,
|
||||||
)
|
)
|
||||||
section = None
|
section = None
|
||||||
if not core.PLEXSEC:
|
if not core.PLEX_SECTION:
|
||||||
return
|
return
|
||||||
logger.debug('Attempting to update Plex Library for category {0}.'.format(category), 'PLEX')
|
logger.debug('Attempting to update Plex Library for category {0}.'.format(category), 'PLEX')
|
||||||
for item in core.PLEXSEC:
|
for item in core.PLEX_SECTION:
|
||||||
if item[0] == category:
|
if item[0] == category:
|
||||||
section = item[1]
|
section = item[1]
|
||||||
|
|
||||||
if section:
|
if section:
|
||||||
url = '{url}{section}/refresh?X-Plex-Token={token}'.format(url=url, section=section, token=core.PLEXTOKEN)
|
url = '{url}{section}/refresh?X-Plex-Token={token}'.format(url=url, section=section, token=core.PLEX_TOKEN)
|
||||||
requests.get(url, timeout=(60, 120), verify=False)
|
requests.get(url, timeout=(60, 120), verify=False)
|
||||||
logger.debug('Plex Library has been refreshed.', 'PLEX')
|
logger.debug('Plex Library has been refreshed.', 'PLEX')
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -10,13 +10,13 @@ def get_nzoid(input_name):
|
||||||
nzoid = None
|
nzoid = None
|
||||||
slots = []
|
slots = []
|
||||||
logger.debug('Searching for nzoid from SAbnzbd ...')
|
logger.debug('Searching for nzoid from SAbnzbd ...')
|
||||||
if 'http' in core.SABNZBDHOST:
|
if 'http' in core.SABNZBD_HOST:
|
||||||
base_url = '{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
|
base_url = '{0}:{1}/api'.format(core.SABNZBD_HOST, core.SABNZBD_PORT)
|
||||||
else:
|
else:
|
||||||
base_url = 'http://{0}:{1}/api'.format(core.SABNZBDHOST, core.SABNZBDPORT)
|
base_url = 'http://{0}:{1}/api'.format(core.SABNZBD_HOST, core.SABNZBD_PORT)
|
||||||
url = base_url
|
url = base_url
|
||||||
params = {
|
params = {
|
||||||
'apikey': core.SABNZBDAPIKEY,
|
'apikey': core.SABNZBD_APIKEY,
|
||||||
'mode': 'queue',
|
'mode': 'queue',
|
||||||
'output': 'json',
|
'output': 'json',
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,9 +48,9 @@ def make_dir(path):
|
||||||
|
|
||||||
|
|
||||||
def remote_dir(path):
|
def remote_dir(path):
|
||||||
if not core.REMOTEPATHS:
|
if not core.REMOTE_PATHS:
|
||||||
return path
|
return path
|
||||||
for local, remote in core.REMOTEPATHS:
|
for local, remote in core.REMOTE_PATHS:
|
||||||
if local in path:
|
if local in path:
|
||||||
base_dirs = path.replace(local, '').split(os.sep)
|
base_dirs = path.replace(local, '').split(os.sep)
|
||||||
if '/' in remote:
|
if '/' in remote:
|
||||||
|
|
|
@ -12,38 +12,40 @@ from core import logger
|
||||||
def create_torrent_class(client_agent):
|
def create_torrent_class(client_agent):
|
||||||
# Hardlink solution for Torrents
|
# Hardlink solution for Torrents
|
||||||
tc = None
|
tc = None
|
||||||
|
if not core.APP_NAME == 'TorrentToMedia.py': #Skip loading Torrent for NZBs.
|
||||||
|
return tc
|
||||||
|
|
||||||
if client_agent == 'utorrent':
|
if client_agent == 'utorrent':
|
||||||
try:
|
try:
|
||||||
logger.debug('Connecting to {0}: {1}'.format(client_agent, core.UTORRENTWEBUI))
|
logger.debug('Connecting to {0}: {1}'.format(client_agent, core.UTORRENT_WEB_UI))
|
||||||
tc = UTorrentClient(core.UTORRENTWEBUI, core.UTORRENTUSR, core.UTORRENTPWD)
|
tc = UTorrentClient(core.UTORRENT_WEB_UI, core.UTORRENT_USER, core.UTORRENT_PASSWORD)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed to connect to uTorrent')
|
logger.error('Failed to connect to uTorrent')
|
||||||
|
|
||||||
if client_agent == 'transmission':
|
if client_agent == 'transmission':
|
||||||
try:
|
try:
|
||||||
logger.debug('Connecting to {0}: http://{1}:{2}'.format(
|
logger.debug('Connecting to {0}: http://{1}:{2}'.format(
|
||||||
client_agent, core.TRANSMISSIONHOST, core.TRANSMISSIONPORT))
|
client_agent, core.TRANSMISSION_HOST, core.TRANSMISSION_PORT))
|
||||||
tc = TransmissionClient(core.TRANSMISSIONHOST, core.TRANSMISSIONPORT,
|
tc = TransmissionClient(core.TRANSMISSION_HOST, core.TRANSMISSION_PORT,
|
||||||
core.TRANSMISSIONUSR,
|
core.TRANSMISSION_USER,
|
||||||
core.TRANSMISSIONPWD)
|
core.TRANSMISSION_PASSWORD)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed to connect to Transmission')
|
logger.error('Failed to connect to Transmission')
|
||||||
|
|
||||||
if client_agent == 'deluge':
|
if client_agent == 'deluge':
|
||||||
try:
|
try:
|
||||||
logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.DELUGEHOST, core.DELUGEPORT))
|
logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.DELUGE_HOST, core.DELUGE_PORT))
|
||||||
tc = DelugeClient()
|
tc = DelugeClient()
|
||||||
tc.connect(host=core.DELUGEHOST, port=core.DELUGEPORT, username=core.DELUGEUSR,
|
tc.connect(host=core.DELUGE_HOST, port=core.DELUGE_PORT, username=core.DELUGE_USER,
|
||||||
password=core.DELUGEPWD)
|
password=core.DELUGE_PASSWORD)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed to connect to Deluge')
|
logger.error('Failed to connect to Deluge')
|
||||||
|
|
||||||
if client_agent == 'qbittorrent':
|
if client_agent == 'qbittorrent':
|
||||||
try:
|
try:
|
||||||
logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.QBITTORRENTHOST, core.QBITTORRENTPORT))
|
logger.debug('Connecting to {0}: http://{1}:{2}'.format(client_agent, core.QBITTORRENT_HOST, core.QBITTORRENT_PORT))
|
||||||
tc = qBittorrentClient('http://{0}:{1}/'.format(core.QBITTORRENTHOST, core.QBITTORRENTPORT))
|
tc = qBittorrentClient('http://{0}:{1}/'.format(core.QBITTORRENT_HOST, core.QBITTORRENT_PORT))
|
||||||
tc.login(core.QBITTORRENTUSR, core.QBITTORRENTPWD)
|
tc.login(core.QBITTORRENT_USER, core.QBITTORRENT_PASSWORD)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed to connect to qBittorrent')
|
logger.error('Failed to connect to qBittorrent')
|
||||||
|
|
||||||
|
@ -85,7 +87,7 @@ def resume_torrent(client_agent, input_hash, input_id, input_name):
|
||||||
|
|
||||||
|
|
||||||
def remove_torrent(client_agent, input_hash, input_id, input_name):
|
def remove_torrent(client_agent, input_hash, input_id, input_name):
|
||||||
if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move':
|
if core.DELETE_ORIGINAL == 1 or core.USE_LINK == 'move':
|
||||||
logger.debug('Deleting torrent {0} from {1}'.format(input_name, client_agent))
|
logger.debug('Deleting torrent {0} from {1}'.format(input_name, client_agent))
|
||||||
try:
|
try:
|
||||||
if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
|
if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
|
||||||
|
|
179
eol.py
Normal file
179
eol.py
Normal file
|
@ -0,0 +1,179 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
__version__ = '1.0.0'
|
||||||
|
|
||||||
|
|
||||||
|
def date(string, fmt='%Y-%m-%d'):
|
||||||
|
"""
|
||||||
|
Convert date string to date.
|
||||||
|
|
||||||
|
:param string: A date string
|
||||||
|
:param fmt: Format to use when parsing the date string
|
||||||
|
:return: A datetime.date
|
||||||
|
"""
|
||||||
|
return datetime.datetime.strptime(string, fmt).date()
|
||||||
|
|
||||||
|
|
||||||
|
# https://devguide.python.org/
|
||||||
|
# https://devguide.python.org/devcycle/#devcycle
|
||||||
|
PYTHON_EOL = {
|
||||||
|
(3, 7): date('2023-06-27'),
|
||||||
|
(3, 6): date('2021-12-23'),
|
||||||
|
(3, 5): date('2020-09-13'),
|
||||||
|
(3, 4): date('2019-03-16'),
|
||||||
|
(3, 3): date('2017-09-29'),
|
||||||
|
(3, 2): date('2016-02-20'),
|
||||||
|
(3, 1): date('2012-04-09'),
|
||||||
|
(3, 0): date('2009-01-13'),
|
||||||
|
(2, 7): date('2020-01-01'),
|
||||||
|
(2, 6): date('2013-10-29'),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
"""An error has occurred."""
|
||||||
|
|
||||||
|
|
||||||
|
class LifetimeError(Error):
|
||||||
|
"""Lifetime has been exceeded and upgrade is required."""
|
||||||
|
|
||||||
|
|
||||||
|
class LifetimeWarning(Warning):
|
||||||
|
"""Lifetime has been exceeded and is no longer supported."""
|
||||||
|
|
||||||
|
|
||||||
|
def lifetime(version=None):
|
||||||
|
"""
|
||||||
|
Calculate days left till End-of-Life for a version.
|
||||||
|
|
||||||
|
:param version: An optional tuple with version information
|
||||||
|
If a version is not provided, the current system version will be used.
|
||||||
|
:return: Days left until End-of-Life
|
||||||
|
"""
|
||||||
|
if version is None:
|
||||||
|
version = sys.version_info
|
||||||
|
major = version[0]
|
||||||
|
minor = version[1]
|
||||||
|
now = datetime.datetime.now().date()
|
||||||
|
time_left = PYTHON_EOL[(major, minor)] - now
|
||||||
|
return time_left.days
|
||||||
|
|
||||||
|
|
||||||
|
def expiration(version=None, grace_period=0):
|
||||||
|
"""
|
||||||
|
Calculate expiration date for a version given a grace period.
|
||||||
|
|
||||||
|
:param version: An optional tuple with version information
|
||||||
|
If a version is not provided, the current system version will be used.
|
||||||
|
:param grace_period: An optional number of days grace period
|
||||||
|
:return: Total days till expiration
|
||||||
|
"""
|
||||||
|
days_left = lifetime(version)
|
||||||
|
return days_left + grace_period
|
||||||
|
|
||||||
|
|
||||||
|
def check(version=None, grace_period=0):
|
||||||
|
"""
|
||||||
|
Raise an exception if end of life has been reached and recommend upgrade.
|
||||||
|
|
||||||
|
:param version: An optional tuple with version information
|
||||||
|
If a version is not provided, the current system version will be used.
|
||||||
|
:param grace_period: An optional number of days grace period
|
||||||
|
If a grace period is not provided, a default 60 days grace period will
|
||||||
|
be used.
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
raise_for_status(version, grace_period)
|
||||||
|
except LifetimeError as error:
|
||||||
|
print('Please use a newer version of Python.')
|
||||||
|
print_statuses()
|
||||||
|
sys.exit(error)
|
||||||
|
|
||||||
|
|
||||||
|
def raise_for_status(version=None, grace_period=0):
|
||||||
|
"""
|
||||||
|
Raise an exception if end of life has been reached.
|
||||||
|
|
||||||
|
:param version: An optional tuple with version information
|
||||||
|
If a version is not provided, the current system version will be used.
|
||||||
|
:param grace_period: An optional number of days grace period
|
||||||
|
If a grace period is not provided, a default 60 days grace period will
|
||||||
|
be used.
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
if version is None:
|
||||||
|
version = sys.version_info
|
||||||
|
days_left = lifetime(version)
|
||||||
|
expires = days_left + grace_period
|
||||||
|
if expires <= 0:
|
||||||
|
msg = 'Python {major}.{minor} is no longer supported.'.format(
|
||||||
|
major=version[0],
|
||||||
|
minor=version[1],
|
||||||
|
)
|
||||||
|
raise LifetimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def warn_for_status(version=None, grace_period=0):
|
||||||
|
"""
|
||||||
|
Warn if end of life has been reached.
|
||||||
|
|
||||||
|
:param version: An optional tuple with version information
|
||||||
|
If a version is not provided, the current system version will be used.
|
||||||
|
:param grace_period: An optional number of days grace period
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
if version is None:
|
||||||
|
version = sys.version_info
|
||||||
|
days_left = lifetime(version)
|
||||||
|
expires = days_left + grace_period
|
||||||
|
if expires <= 0:
|
||||||
|
msg = 'Python {major}.{minor} is no longer supported.'.format(
|
||||||
|
major=version[0],
|
||||||
|
minor=version[1],
|
||||||
|
)
|
||||||
|
warnings.warn(msg, LifetimeWarning)
|
||||||
|
|
||||||
|
|
||||||
|
def print_statuses(show_expired=False):
|
||||||
|
"""
|
||||||
|
Print end-of-life statuses of known python versions.
|
||||||
|
|
||||||
|
:param show_expired: If true also print expired python version statuses
|
||||||
|
"""
|
||||||
|
lifetimes = sorted(
|
||||||
|
(lifetime(python_version), python_version)
|
||||||
|
for python_version in PYTHON_EOL
|
||||||
|
)
|
||||||
|
print('Python End-of-Life for current versions:')
|
||||||
|
for days_left, python_version in lifetimes:
|
||||||
|
if days_left >= 0:
|
||||||
|
print(
|
||||||
|
'v{major}.{minor} in {remaining:>4} days'.format(
|
||||||
|
major=python_version[0],
|
||||||
|
minor=python_version[1],
|
||||||
|
remaining=days_left,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if not show_expired:
|
||||||
|
return
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('Python End-of-Life for expired versions:')
|
||||||
|
for days_left, python_version in lifetimes:
|
||||||
|
if days_left < 0:
|
||||||
|
print(
|
||||||
|
'v{major}.{minor} {remaining:>4} days ago'.format(
|
||||||
|
major=python_version[0],
|
||||||
|
minor=python_version[1],
|
||||||
|
remaining=-days_left,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print_statuses(show_expired=True)
|
|
@ -623,6 +623,9 @@
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import eol
|
||||||
|
eol.check()
|
||||||
|
|
||||||
import cleanup
|
import cleanup
|
||||||
cleanup.clean(cleanup.FOLDER_STRUCTURE)
|
cleanup.clean(cleanup.FOLDER_STRUCTURE)
|
||||||
|
|
||||||
|
@ -645,7 +648,7 @@ except NameError:
|
||||||
|
|
||||||
# post-processing
|
# post-processing
|
||||||
def process(input_directory, input_name=None, status=0, client_agent='manual', download_id=None, input_category=None, failure_link=None):
|
def process(input_directory, input_name=None, status=0, client_agent='manual', download_id=None, input_category=None, failure_link=None):
|
||||||
if core.SAFE_MODE and input_directory == core.NZB_DEFAULTDIR:
|
if core.SAFE_MODE and input_directory == core.NZB_DEFAULT_DIRECTORY:
|
||||||
logger.error(
|
logger.error(
|
||||||
'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format(
|
'The input directory:[{0}] is the Default Download Directory. Please configure category directories to prevent processing of other media.'.format(
|
||||||
input_directory))
|
input_directory))
|
||||||
|
@ -657,7 +660,7 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
|
||||||
if not download_id and client_agent == 'sabnzbd':
|
if not download_id and client_agent == 'sabnzbd':
|
||||||
download_id = get_nzoid(input_name)
|
download_id = get_nzoid(input_name)
|
||||||
|
|
||||||
if client_agent != 'manual' and not core.DOWNLOADINFO:
|
if client_agent != 'manual' and not core.DOWNLOAD_INFO:
|
||||||
logger.debug('Adding NZB download info for directory {0} to database'.format(input_directory))
|
logger.debug('Adding NZB download info for directory {0} to database'.format(input_directory))
|
||||||
|
|
||||||
my_db = main_db.DBConnection()
|
my_db = main_db.DBConnection()
|
||||||
|
@ -725,7 +728,7 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
|
||||||
extract = int(cfg.get('extract', 0))
|
extract = int(cfg.get('extract', 0))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if int(cfg.get('remote_path')) and not core.REMOTEPATHS:
|
if int(cfg.get('remote_path')) and not core.REMOTE_PATHS:
|
||||||
logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format(
|
logger.error('Remote Path is enabled for {0}:{1} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!'.format(
|
||||||
section_name, input_category))
|
section_name, input_category))
|
||||||
return ProcessResult(
|
return ProcessResult(
|
||||||
|
@ -899,13 +902,13 @@ def main(args, section=None):
|
||||||
logger.info('Starting manual run for {0}:{1} - Folder: {2}'.format(section, subsection, dir_name))
|
logger.info('Starting manual run for {0}:{1} - Folder: {2}'.format(section, subsection, dir_name))
|
||||||
logger.info('Checking database for download info for {0} ...'.format(os.path.basename(dir_name)))
|
logger.info('Checking database for download info for {0} ...'.format(os.path.basename(dir_name)))
|
||||||
|
|
||||||
core.DOWNLOADINFO = get_download_info(os.path.basename(dir_name), 0)
|
core.DOWNLOAD_INFO = get_download_info(os.path.basename(dir_name), 0)
|
||||||
if core.DOWNLOADINFO:
|
if core.DOWNLOAD_INFO:
|
||||||
logger.info('Found download info for {0}, '
|
logger.info('Found download info for {0}, '
|
||||||
'setting variables now ...'.format
|
'setting variables now ...'.format
|
||||||
(os.path.basename(dir_name)))
|
(os.path.basename(dir_name)))
|
||||||
client_agent = text_type(core.DOWNLOADINFO[0].get('client_agent', 'manual'))
|
client_agent = text_type(core.DOWNLOAD_INFO[0].get('client_agent', 'manual'))
|
||||||
download_id = text_type(core.DOWNLOADINFO[0].get('input_id', ''))
|
download_id = text_type(core.DOWNLOAD_INFO[0].get('input_id', ''))
|
||||||
else:
|
else:
|
||||||
logger.info('Unable to locate download info for {0}, '
|
logger.info('Unable to locate download info for {0}, '
|
||||||
'continuing to try and process this release ...'.format
|
'continuing to try and process this release ...'.format
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -18,7 +18,7 @@ def read(*names, **kwargs):
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='nzbToMedia',
|
name='nzbToMedia',
|
||||||
version='12.0.7',
|
version='12.0.8',
|
||||||
license='GPLv3',
|
license='GPLv3',
|
||||||
description='Efficient on demand post processing',
|
description='Efficient on demand post processing',
|
||||||
long_description="""
|
long_description="""
|
||||||
|
|
|
@ -1,15 +1,18 @@
|
||||||
#! /usr/bin/env python2
|
#! /usr/bin/env python2
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
from babelfish import Language
|
import datetime
|
||||||
import guessit
|
import os
|
||||||
import requests
|
import sys
|
||||||
import subliminal
|
|
||||||
|
|
||||||
import core
|
import core
|
||||||
from core import transcoder
|
from core import logger, main_db, transcoder
|
||||||
|
from core.auto_process import comics, games, movies, music, tv
|
||||||
|
from core.auto_process.common import ProcessResult
|
||||||
|
from core.user_scripts import external_script
|
||||||
from core.forks import auto_fork
|
from core.forks import auto_fork
|
||||||
from core.utils import server_responding
|
from core.utils import char_replace, clean_dir, convert_to_ascii, extract_files, get_dirs, get_download_info, get_nzoid, plex_update, update_download_info_status, server_responding
|
||||||
|
|
||||||
|
|
||||||
# Initialize the config
|
# Initialize the config
|
||||||
core.initialize()
|
core.initialize()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue