mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 13:53:15 -07:00
Add peppiness
This commit is contained in:
parent
b7156495a5
commit
5f54513b75
10 changed files with 401 additions and 254 deletions
|
@ -68,8 +68,10 @@ def module_path(module=__file__, parent=False):
|
|||
return normalized
|
||||
|
||||
|
||||
def git_clean(remove_directories=False, force=False, dry_run=False, interactive=False, quiet=False, exclude=None,
|
||||
ignore_rules=False, clean_ignored=False, paths=None):
|
||||
def git_clean(
|
||||
remove_directories=False, force=False, dry_run=False, interactive=False, quiet=False, exclude=None,
|
||||
ignore_rules=False, clean_ignored=False, paths=None,
|
||||
):
|
||||
"""Execute git clean commands."""
|
||||
command = ['git', 'clean']
|
||||
if remove_directories:
|
||||
|
|
|
@ -391,9 +391,9 @@ def configure_updates():
|
|||
logger.error('Update failed, not restarting. Check your log for more information.')
|
||||
|
||||
# Set Current Version
|
||||
logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format
|
||||
(version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH,
|
||||
system=platform.system(), release=platform.release()))
|
||||
logger.info(
|
||||
'nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format(version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH, system=platform.system(), release=platform.release()),
|
||||
)
|
||||
|
||||
|
||||
def configure_wake_on_lan():
|
||||
|
@ -473,11 +473,15 @@ def configure_containers():
|
|||
global AUDIO_CONTAINER
|
||||
global META_CONTAINER
|
||||
|
||||
COMPRESSED_CONTAINER = [re.compile(r'.r\d{2}$', re.I),
|
||||
COMPRESSED_CONTAINER = [
|
||||
re.compile(r'.r\d{2}$', re.I),
|
||||
re.compile(r'.part\d+.rar$', re.I),
|
||||
re.compile('.rar$', re.I)]
|
||||
COMPRESSED_CONTAINER += [re.compile(f'{ext}$', re.I) for ext in
|
||||
CFG['Extensions']['compressedExtensions']]
|
||||
re.compile('.rar$', re.I),
|
||||
]
|
||||
COMPRESSED_CONTAINER += [
|
||||
re.compile(f'{ext}$', re.I) for ext in
|
||||
CFG['Extensions']['compressedExtensions']
|
||||
]
|
||||
MEDIA_CONTAINER = CFG['Extensions']['mediaExtensions']
|
||||
AUDIO_CONTAINER = CFG['Extensions']['audioExtensions']
|
||||
META_CONTAINER = CFG['Extensions']['metaExtensions'] # .nfo,.sub,.srt
|
||||
|
@ -755,7 +759,7 @@ def configure_transcoder():
|
|||
'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8,
|
||||
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
|
||||
'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, 'ACHANNELS3': 8,
|
||||
'SCODEC': 'mov_text'
|
||||
'SCODEC': 'mov_text',
|
||||
},
|
||||
'mkv-bluray': {
|
||||
'VEXTENSION': '.mkv', 'VCODEC': 'libx265', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
|
||||
|
@ -901,7 +905,8 @@ def configure_utility_locations():
|
|||
if not SEVENZIP:
|
||||
SEVENZIP = None
|
||||
logger.warning(
|
||||
'Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!')
|
||||
'Failed to locate 7zip. Transcoding of disk images and extraction of .7z files will not be possible!',
|
||||
)
|
||||
try:
|
||||
PAR2CMD = subprocess.Popen(['which', 'par2'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
|
||||
except Exception:
|
||||
|
@ -909,12 +914,17 @@ def configure_utility_locations():
|
|||
if not PAR2CMD:
|
||||
PAR2CMD = None
|
||||
logger.warning(
|
||||
'Failed to locate par2. Repair and rename using par files will not be possible!')
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'),
|
||||
os.X_OK):
|
||||
'Failed to locate par2. Repair and rename using par files will not be possible!',
|
||||
)
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(
|
||||
os.path.join(FFMPEG_PATH, 'ffmpeg'),
|
||||
os.X_OK,
|
||||
):
|
||||
FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg')
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(os.path.join(FFMPEG_PATH, 'avconv'),
|
||||
os.X_OK):
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(
|
||||
os.path.join(FFMPEG_PATH, 'avconv'),
|
||||
os.X_OK,
|
||||
):
|
||||
FFMPEG = os.path.join(FFMPEG_PATH, 'avconv')
|
||||
else:
|
||||
try:
|
||||
|
@ -931,11 +941,15 @@ def configure_utility_locations():
|
|||
logger.warning('Failed to locate ffmpeg. Transcoding disabled!')
|
||||
logger.warning('Install ffmpeg with x264 support to enable this feature ...')
|
||||
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'),
|
||||
os.X_OK):
|
||||
if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(
|
||||
os.path.join(FFMPEG_PATH, 'ffprobe'),
|
||||
os.X_OK,
|
||||
):
|
||||
FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe')
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(os.path.join(FFMPEG_PATH, 'avprobe'),
|
||||
os.X_OK):
|
||||
elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(
|
||||
os.path.join(FFMPEG_PATH, 'avprobe'),
|
||||
os.X_OK,
|
||||
):
|
||||
FFPROBE = os.path.join(FFMPEG_PATH, 'avprobe')
|
||||
else:
|
||||
try:
|
||||
|
|
|
@ -139,7 +139,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if CFG_OLD[section].sections:
|
||||
subsections.update({section: CFG_OLD[section].sections})
|
||||
for option, value in CFG_OLD[section].items():
|
||||
if option in ['category',
|
||||
if option in [
|
||||
'category',
|
||||
'cpsCategory',
|
||||
'sbCategory',
|
||||
'srCategory',
|
||||
|
@ -148,7 +149,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
'gzCategory',
|
||||
'raCategory',
|
||||
'ndCategory',
|
||||
'W3Category']:
|
||||
'W3Category',
|
||||
]:
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
|
||||
|
@ -279,29 +281,29 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
try:
|
||||
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']:
|
||||
logger.warning('{x} category is set for SickBeard and Sonarr. '
|
||||
'Please check your config in NZBGet'.format
|
||||
(x=os.environ['NZBPO_NDCATEGORY']))
|
||||
logger.warning(
|
||||
'{x} category is set for SickBeard and Sonarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_NDCATEGORY']),
|
||||
)
|
||||
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
|
||||
logger.warning('{x} category is set for CouchPotato and Radarr. '
|
||||
'Please check your config in NZBGet'.format
|
||||
(x=os.environ['NZBPO_RACATEGORY']))
|
||||
logger.warning(
|
||||
'{x} category is set for CouchPotato and Radarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_RACATEGORY']),
|
||||
)
|
||||
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_W3CATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_W3CATEGORY']:
|
||||
logger.warning('{x} category is set for Watcher3 and Radarr. '
|
||||
'Please check your config in NZBGet'.format
|
||||
(x=os.environ['NZBPO_RACATEGORY']))
|
||||
logger.warning(
|
||||
'{x} category is set for Watcher3 and Radarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_RACATEGORY']),
|
||||
)
|
||||
if 'NZBPO_W3CATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_W3CATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
|
||||
logger.warning('{x} category is set for CouchPotato and Watcher3. '
|
||||
'Please check your config in NZBGet'.format
|
||||
(x=os.environ['NZBPO_W3CATEGORY']))
|
||||
logger.warning(
|
||||
'{x} category is set for CouchPotato and Watcher3. Please check your config in NZBGet'.format(x=os.environ['NZBPO_W3CATEGORY']),
|
||||
)
|
||||
if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']:
|
||||
logger.warning('{x} category is set for HeadPhones and Lidarr. '
|
||||
'Please check your config in NZBGet'.format
|
||||
(x=os.environ['NZBPO_LICATEGORY']))
|
||||
logger.warning(
|
||||
'{x} category is set for HeadPhones and Lidarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_LICATEGORY']),
|
||||
)
|
||||
section = 'Nzb'
|
||||
key = 'NZBOP_DESTDIR'
|
||||
if key in os.environ:
|
||||
|
@ -331,10 +333,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'CouchPotato'
|
||||
env_cat_key = 'NZBPO_CPSCATEGORY'
|
||||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
|
||||
'wait_for', 'watch_dir', 'omdbapikey']
|
||||
env_keys = [
|
||||
'ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
|
||||
'wait_for', 'watch_dir', 'omdbapikey',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_CPS{env_keys[index]}'
|
||||
|
@ -352,10 +358,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'Watcher3'
|
||||
env_cat_key = 'NZBPO_W3CATEGORY'
|
||||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
|
||||
'wait_for', 'watch_dir', 'omdbapikey']
|
||||
env_keys = [
|
||||
'ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
|
||||
'wait_for', 'watch_dir', 'omdbapikey',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_W3{env_keys[index]}'
|
||||
|
@ -373,10 +383,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'SickBeard'
|
||||
env_cat_key = 'NZBPO_SBCATEGORY'
|
||||
env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
|
||||
cfg_keys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink',
|
||||
'nzbExtractionBy', 'remote_path', 'process_method']
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink',
|
||||
'nzbExtractionBy', 'remote_path', 'process_method',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_SB{env_keys[index]}'
|
||||
|
@ -394,10 +408,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'SiCKRAGE'
|
||||
env_cat_key = 'NZBPO_SRCATEGORY'
|
||||
env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'API_VERSION', 'SSO_USERNAME', 'SSO_PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
|
||||
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
|
||||
cfg_keys = ['enabled', 'host', 'port', 'apikey', 'api_version', 'sso_username', 'sso_password', 'ssl', 'web_root', 'watch_dir', 'fork',
|
||||
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'PORT', 'APIKEY', 'API_VERSION', 'SSO_USERNAME', 'SSO_PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
|
||||
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'port', 'apikey', 'api_version', 'sso_username', 'sso_password', 'ssl', 'web_root', 'watch_dir', 'fork',
|
||||
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_SR{env_keys[index]}'
|
||||
|
@ -432,10 +450,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'Mylar'
|
||||
env_cat_key = 'NZBPO_MYCATEGORY'
|
||||
env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
|
||||
'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
|
||||
'remote_path']
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
|
||||
'REMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
|
||||
'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_MY{env_keys[index]}'
|
||||
|
@ -479,11 +501,15 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'NzbDrone'
|
||||
env_cat_key = 'NZBPO_NDCATEGORY'
|
||||
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE']
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE',
|
||||
]
|
||||
# new cfgKey added for importMode
|
||||
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'importMode']
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'importMode',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_ND{env_keys[index]}'
|
||||
|
@ -501,11 +527,15 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'Radarr'
|
||||
env_cat_key = 'NZBPO_RACATEGORY'
|
||||
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE']
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE',
|
||||
]
|
||||
# new cfgKey added for importMode
|
||||
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey', 'importMode']
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey', 'importMode',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_RA{env_keys[index]}'
|
||||
|
@ -523,10 +553,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'Lidarr'
|
||||
env_cat_key = 'NZBPO_LICATEGORY'
|
||||
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_LI{env_keys[index]}'
|
||||
|
@ -561,7 +595,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfg_new[section][option] = value
|
||||
|
||||
section = 'Transcoder'
|
||||
env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
|
||||
env_keys = [
|
||||
'TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
|
||||
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
|
||||
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
|
||||
'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
|
||||
|
@ -570,8 +605,10 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE',
|
||||
'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
|
||||
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS',
|
||||
'OUTPUTAUDIOOTHERCHANNELS', 'OUTPUTVIDEORESOLUTION']
|
||||
cfg_keys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
|
||||
'OUTPUTAUDIOOTHERCHANNELS', 'OUTPUTVIDEORESOLUTION',
|
||||
]
|
||||
cfg_keys = [
|
||||
'transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
|
||||
'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
|
||||
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir',
|
||||
'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
|
||||
|
@ -580,7 +617,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate',
|
||||
'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
|
||||
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
|
||||
'outputAudioOtherChannels', 'outputVideoResolution']
|
||||
'outputAudioOtherChannels', 'outputVideoResolution',
|
||||
]
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
if key in os.environ:
|
||||
|
@ -600,10 +638,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'UserScript'
|
||||
env_cat_key = 'NZBPO_USCATEGORY'
|
||||
env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
|
||||
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
|
||||
cfg_keys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
|
||||
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
|
||||
env_keys = [
|
||||
'USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
|
||||
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
|
||||
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
|
|
|
@ -40,17 +40,14 @@ class InitialSchema(main_db.SchemaUpgrade):
|
|||
cur_db_version = self.check_db_version()
|
||||
|
||||
if cur_db_version < MIN_DB_VERSION:
|
||||
logger.log_error_and_exit('Your database version ({current}) is too old to migrate '
|
||||
'from what this version of nzbToMedia supports ({min}).'
|
||||
'\nPlease remove nzbtomedia.db file to begin fresh.'.format
|
||||
(current=cur_db_version, min=MIN_DB_VERSION))
|
||||
logger.log_error_and_exit(
|
||||
'Your database version ({current}) is too old to migrate from what this version of nzbToMedia supports ({min}).\nPlease remove nzbtomedia.db file to begin fresh.'.format(current=cur_db_version, min=MIN_DB_VERSION),
|
||||
)
|
||||
|
||||
if cur_db_version > MAX_DB_VERSION:
|
||||
logger.log_error_and_exit('Your database version ({current}) has been incremented '
|
||||
'past what this version of nzbToMedia supports ({max}).'
|
||||
'\nIf you have used other forks of nzbToMedia, your database '
|
||||
'may be unusable due to their modifications.'.format
|
||||
(current=cur_db_version, max=MAX_DB_VERSION))
|
||||
logger.log_error_and_exit(
|
||||
'Your database version ({current}) has been incremented past what this version of nzbToMedia supports ({max}).\nIf you have used other forks of nzbToMedia, your database may be unusable due to their modifications.'.format(current=cur_db_version, max=MAX_DB_VERSION),
|
||||
)
|
||||
if cur_db_version < MAX_DB_VERSION: # We need to upgrade.
|
||||
queries = [
|
||||
'CREATE TABLE downloads2 (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));',
|
||||
|
|
|
@ -44,7 +44,9 @@ class GitHub:
|
|||
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/
|
||||
"""
|
||||
return self._access_api(
|
||||
['repos', self.github_repo_user, self.github_repo, 'compare',
|
||||
f'{base}...{head}'],
|
||||
[
|
||||
'repos', self.github_repo_user, self.github_repo, 'compare',
|
||||
f'{base}...{head}',
|
||||
],
|
||||
params={'per_page': per_page},
|
||||
)
|
||||
|
|
|
@ -19,12 +19,14 @@ DEBUG = logging.DEBUG
|
|||
POSTPROCESS = 21
|
||||
DB = 5
|
||||
|
||||
reverseNames = {'ERROR': ERROR,
|
||||
reverseNames = {
|
||||
'ERROR': ERROR,
|
||||
'WARNING': WARNING,
|
||||
'INFO': MESSAGE,
|
||||
'DEBUG': DEBUG,
|
||||
'POSTPROCESS': POSTPROCESS,
|
||||
'DB': DB}
|
||||
'DB': DB,
|
||||
}
|
||||
|
||||
|
||||
class NTMRotatingLogHandler:
|
||||
|
@ -81,12 +83,16 @@ class NTMRotatingLogHandler:
|
|||
console.setLevel(DB)
|
||||
|
||||
# set a format which is simpler for console use
|
||||
console.setFormatter(DispatchingFormatter(
|
||||
{'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
console.setFormatter(
|
||||
DispatchingFormatter(
|
||||
{
|
||||
'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'db': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
},
|
||||
logging.Formatter('%(message)s')))
|
||||
logging.Formatter('%(message)s'),
|
||||
),
|
||||
)
|
||||
|
||||
# add the handler to the root logger
|
||||
logging.getLogger('nzbtomedia').addHandler(console)
|
||||
|
@ -115,12 +121,16 @@ class NTMRotatingLogHandler:
|
|||
|
||||
file_handler.setLevel(DB)
|
||||
|
||||
file_handler.setFormatter(DispatchingFormatter(
|
||||
{'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
file_handler.setFormatter(
|
||||
DispatchingFormatter(
|
||||
{
|
||||
'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'db': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
},
|
||||
logging.Formatter('%(message)s')))
|
||||
logging.Formatter('%(message)s'),
|
||||
),
|
||||
)
|
||||
|
||||
return file_handler
|
||||
|
||||
|
|
|
@ -56,8 +56,9 @@ class DBConnection:
|
|||
cursor.execute(query)
|
||||
sql_result = cursor.fetchone()[0]
|
||||
else:
|
||||
logger.log('{name}: {query} with args {args}'.format
|
||||
(name=self.filename, query=query, args=args), logger.DB)
|
||||
logger.log(
|
||||
'{name}: {query} with args {args}'.format(name=self.filename, query=query, args=args), logger.DB,
|
||||
)
|
||||
cursor = self.connection.cursor()
|
||||
cursor.execute(query, args)
|
||||
sql_result = cursor.fetchone()[0]
|
||||
|
@ -131,8 +132,9 @@ class DBConnection:
|
|||
logger.log(f'{self.filename}: {query}', logger.DB)
|
||||
sql_result = self.connection.execute(query)
|
||||
else:
|
||||
logger.log('{name}: {query} with args {args}'.format
|
||||
(name=self.filename, query=query, args=args), logger.DB)
|
||||
logger.log(
|
||||
'{name}: {query} with args {args}'.format(name=self.filename, query=query, args=args), logger.DB,
|
||||
)
|
||||
sql_result = self.connection.execute(query, args)
|
||||
self.connection.commit()
|
||||
# get out of the connection attempt loop since we were successful
|
||||
|
@ -228,22 +230,27 @@ def pretty_name(class_name):
|
|||
|
||||
def _process_upgrade(connection, upgrade_class):
|
||||
instance = upgrade_class(connection)
|
||||
logger.log('Checking {name} database upgrade'.format
|
||||
(name=pretty_name(upgrade_class.__name__)), logger.DEBUG)
|
||||
logger.log(
|
||||
'Checking {name} database upgrade'.format(name=pretty_name(upgrade_class.__name__)), logger.DEBUG,
|
||||
)
|
||||
if not instance.test():
|
||||
logger.log('Database upgrade required: {name}'.format
|
||||
(name=pretty_name(upgrade_class.__name__)), logger.MESSAGE)
|
||||
logger.log(
|
||||
'Database upgrade required: {name}'.format(name=pretty_name(upgrade_class.__name__)), logger.MESSAGE,
|
||||
)
|
||||
try:
|
||||
instance.execute()
|
||||
except sqlite3.DatabaseError as error:
|
||||
print('Error in {name}: {msg}'.format
|
||||
(name=upgrade_class.__name__, msg=error))
|
||||
print(
|
||||
'Error in {name}: {msg}'.format(name=upgrade_class.__name__, msg=error),
|
||||
)
|
||||
raise
|
||||
logger.log('{name} upgrade completed'.format
|
||||
(name=upgrade_class.__name__), logger.DEBUG)
|
||||
logger.log(
|
||||
'{name} upgrade completed'.format(name=upgrade_class.__name__), logger.DEBUG,
|
||||
)
|
||||
else:
|
||||
logger.log('{name} upgrade not required'.format
|
||||
(name=upgrade_class.__name__), logger.DEBUG)
|
||||
logger.log(
|
||||
'{name} upgrade not required'.format(name=upgrade_class.__name__), logger.DEBUG,
|
||||
)
|
||||
|
||||
for upgradeSubClass in upgrade_class.__subclasses__():
|
||||
_process_upgrade(connection, upgradeSubClass)
|
||||
|
|
|
@ -8,22 +8,27 @@ import core
|
|||
from core import logger
|
||||
from core.utils import list_media_files
|
||||
|
||||
reverse_list = [r'\.\d{2}e\d{2}s\.', r'\.[pi]0801\.', r'\.p027\.', r'\.[pi]675\.', r'\.[pi]084\.', r'\.p063\.',
|
||||
reverse_list = [
|
||||
r'\.\d{2}e\d{2}s\.', r'\.[pi]0801\.', r'\.p027\.', r'\.[pi]675\.', r'\.[pi]084\.', r'\.p063\.',
|
||||
r'\b[45]62[xh]\.', r'\.yarulb\.', r'\.vtd[hp]\.',
|
||||
r'\.ld[.-]?bew\.', r'\.pir.?(dov|dvd|bew|db|rb)\.', r'\brdvd\.', r'\.vts\.', r'\.reneercs\.',
|
||||
r'\.dcv\.', r'\b(pir|mac)dh\b', r'\.reporp\.', r'\.kcaper\.',
|
||||
r'\.lanretni\.', r'\b3ca\b', r'\.cstn\.']
|
||||
r'\.lanretni\.', r'\b3ca\b', r'\.cstn\.',
|
||||
]
|
||||
reverse_pattern = re.compile('|'.join(reverse_list), flags=re.IGNORECASE)
|
||||
season_pattern = re.compile(r'(.*\.\d{2}e\d{2}s\.)(.*)', flags=re.IGNORECASE)
|
||||
word_pattern = re.compile(r'([^A-Z0-9]*[A-Z0-9]+)')
|
||||
media_list = [r'\.s\d{2}e\d{2}\.', r'\.1080[pi]\.', r'\.720p\.', r'\.576[pi]', r'\.480[pi]\.', r'\.360p\.',
|
||||
media_list = [
|
||||
r'\.s\d{2}e\d{2}\.', r'\.1080[pi]\.', r'\.720p\.', r'\.576[pi]', r'\.480[pi]\.', r'\.360p\.',
|
||||
r'\.[xh]26[45]\b', r'\.bluray\.', r'\.[hp]dtv\.',
|
||||
r'\.web[.-]?dl\.', r'\.(vod|dvd|web|bd|br).?rip\.', r'\.dvdr\b', r'\.stv\.', r'\.screener\.', r'\.vcd\.',
|
||||
r'\bhd(cam|rip)\b', r'\.proper\.', r'\.repack\.',
|
||||
r'\.internal\.', r'\bac3\b', r'\.ntsc\.', r'\.pal\.', r'\.secam\.', r'\bdivx\b', r'\bxvid\b']
|
||||
r'\.internal\.', r'\bac3\b', r'\.ntsc\.', r'\.pal\.', r'\.secam\.', r'\bdivx\b', r'\bxvid\b',
|
||||
]
|
||||
media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE)
|
||||
garbage_name = re.compile(r'^[a-zA-Z0-9]*$')
|
||||
char_replace = [[r'(\w)1\.(\w)', r'\1i\2'],
|
||||
char_replace = [
|
||||
[r'(\w)1\.(\w)', r'\1i\2'],
|
||||
]
|
||||
|
||||
|
||||
|
@ -66,8 +71,9 @@ def strip_groups(filename):
|
|||
def rename_file(filename, newfile_path):
|
||||
if os.path.isfile(newfile_path):
|
||||
newfile_path = os.path.splitext(newfile_path)[0] + '.NTM' + os.path.splitext(newfile_path)[1]
|
||||
logger.debug('Replacing file name {old} with download name {new}'.format
|
||||
(old=filename, new=newfile_path), 'EXCEPTION')
|
||||
logger.debug(
|
||||
'Replacing file name {old} with download name {new}'.format(old=filename, new=newfile_path), 'EXCEPTION',
|
||||
)
|
||||
try:
|
||||
os.rename(filename, newfile_path)
|
||||
except Exception as error:
|
||||
|
@ -81,8 +87,9 @@ def replace_filename(filename, dirname, name):
|
|||
logger.debug(f'Replacing file name {head} with directory name {newname}', 'EXCEPTION')
|
||||
elif media_pattern.search(name.replace(' ', '.').lower()) is not None:
|
||||
newname = name.replace(' ', '.')
|
||||
logger.debug('Replacing file name {old} with download name {new}'.format
|
||||
(old=head, new=newname), 'EXCEPTION')
|
||||
logger.debug(
|
||||
'Replacing file name {old} with download name {new}'.format(old=head, new=newname), 'EXCEPTION',
|
||||
)
|
||||
else:
|
||||
logger.warning(f'No name replacement determined for {head}', 'EXCEPTION')
|
||||
newname = name
|
||||
|
@ -110,8 +117,9 @@ def reverse_filename(filename, dirname, name):
|
|||
else:
|
||||
newname = head[::-1].title()
|
||||
newname = newname.replace(' ', '.')
|
||||
logger.debug('Reversing filename {old} to {new}'.format
|
||||
(old=head, new=newname), 'EXCEPTION')
|
||||
logger.debug(
|
||||
'Reversing filename {old} to {new}'.format(old=head, new=newname), 'EXCEPTION',
|
||||
)
|
||||
newfile = newname + file_extension
|
||||
newfile_path = os.path.join(dirname, newfile)
|
||||
return newfile_path
|
||||
|
@ -137,8 +145,9 @@ def rename_script(dirname):
|
|||
dest = os.path.join(dirname, cmd[1].split('\\')[-1].split('/')[-1])
|
||||
if os.path.isfile(dest):
|
||||
continue
|
||||
logger.debug('Renaming file {source} to {destination}'.format
|
||||
(source=orig, destination=dest), 'EXCEPTION')
|
||||
logger.debug(
|
||||
'Renaming file {source} to {destination}'.format(source=orig, destination=dest), 'EXCEPTION',
|
||||
)
|
||||
try:
|
||||
os.rename(orig, dest)
|
||||
except Exception as error:
|
||||
|
|
|
@ -33,8 +33,10 @@ def is_video_good(videofile, status, require_lan=None):
|
|||
aud_streams = [item for item in test_details['streams'] if 'codec_type' in item and item['codec_type'] == 'audio']
|
||||
if not (len(vid_streams) > 0 and len(aud_streams) > 0):
|
||||
disable = True
|
||||
logger.info('DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.',
|
||||
'TRANSCODER')
|
||||
logger.info(
|
||||
'DISABLED: ffprobe failed to analyse streams from test file. Stopping corruption check.',
|
||||
'TRANSCODER',
|
||||
)
|
||||
if disable:
|
||||
if status: # if the download was 'failed', assume bad. If it was successful, assume good.
|
||||
return False
|
||||
|
@ -89,8 +91,10 @@ def get_video_details(videofile, img=None, bitbucket=None):
|
|||
try:
|
||||
if img:
|
||||
videofile = '-'
|
||||
command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', '-show_error',
|
||||
videofile]
|
||||
command = [
|
||||
core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', '-show_error',
|
||||
videofile,
|
||||
]
|
||||
print_cmd(command)
|
||||
if img:
|
||||
procin = zip_out(file, img, bitbucket)
|
||||
|
@ -178,7 +182,8 @@ def build_commands(file, new_dir, movie_name, bitbucket):
|
|||
other_cmd = []
|
||||
|
||||
if not video_details or not video_details.get(
|
||||
'streams'): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
|
||||
'streams',
|
||||
): # we couldn't read streams with ffprobe. Set defaults to try transcoding.
|
||||
video_streams = []
|
||||
audio_streams = []
|
||||
sub_streams = []
|
||||
|
@ -205,8 +210,10 @@ def build_commands(file, new_dir, movie_name, bitbucket):
|
|||
|
||||
if core.ACODEC:
|
||||
audio_cmd.extend(['-c:a', core.ACODEC])
|
||||
if core.ACODEC in ['aac',
|
||||
'dts']: # Allow users to use the experimental AAC codec that's built into recent versions of ffmpeg
|
||||
if core.ACODEC in [
|
||||
'aac',
|
||||
'dts',
|
||||
]: # Allow users to use the experimental AAC codec that's built into recent versions of ffmpeg
|
||||
audio_cmd.extend(['-strict', '-2'])
|
||||
else:
|
||||
audio_cmd.extend(['-c:a', 'copy'])
|
||||
|
@ -232,9 +239,12 @@ def build_commands(file, new_dir, movie_name, bitbucket):
|
|||
audio_streams = [item for item in video_details['streams'] if item['codec_type'] == 'audio']
|
||||
sub_streams = [item for item in video_details['streams'] if item['codec_type'] == 'subtitle']
|
||||
if core.VEXTENSION not in ['.mkv', '.mpegts']:
|
||||
sub_streams = [item for item in video_details['streams'] if
|
||||
sub_streams = [
|
||||
item for item in video_details['streams'] if
|
||||
item['codec_type'] == 'subtitle' and item['codec_name'] != 'hdmv_pgs_subtitle' and item[
|
||||
'codec_name'] != 'pgssub']
|
||||
'codec_name'
|
||||
] != 'pgssub'
|
||||
]
|
||||
|
||||
for video in video_streams:
|
||||
codec = video['codec_name']
|
||||
|
@ -518,8 +528,10 @@ def build_commands(file, new_dir, movie_name, bitbucket):
|
|||
except Exception:
|
||||
pass
|
||||
if metlan:
|
||||
meta_cmd.extend([f'-metadata:s:s:{len(s_mapped) + n}',
|
||||
f'language={metlan.alpha3}'])
|
||||
meta_cmd.extend([
|
||||
f'-metadata:s:s:{len(s_mapped) + n}',
|
||||
f'language={metlan.alpha3}',
|
||||
])
|
||||
n += 1
|
||||
map_cmd.extend(['-map', f'{n}:0'])
|
||||
|
||||
|
@ -567,13 +579,19 @@ def extract_subs(file, newfile_path, bitbucket):
|
|||
name = os.path.splitext(os.path.split(newfile_path)[1])[0]
|
||||
|
||||
try:
|
||||
sub_streams = [item for item in video_details['streams'] if
|
||||
sub_streams = [
|
||||
item for item in video_details['streams'] if
|
||||
item['codec_type'] == 'subtitle' and item['tags']['language'] in core.SLANGUAGES and item[
|
||||
'codec_name'] != 'hdmv_pgs_subtitle' and item['codec_name'] != 'pgssub']
|
||||
'codec_name'
|
||||
] != 'hdmv_pgs_subtitle' and item['codec_name'] != 'pgssub'
|
||||
]
|
||||
except Exception:
|
||||
sub_streams = [item for item in video_details['streams'] if
|
||||
sub_streams = [
|
||||
item for item in video_details['streams'] if
|
||||
item['codec_type'] == 'subtitle' and item['codec_name'] != 'hdmv_pgs_subtitle' and item[
|
||||
'codec_name'] != 'pgssub']
|
||||
'codec_name'
|
||||
] != 'pgssub'
|
||||
]
|
||||
num = len(sub_streams)
|
||||
for n in range(num):
|
||||
sub = sub_streams[n]
|
||||
|
@ -589,8 +607,10 @@ def extract_subs(file, newfile_path, bitbucket):
|
|||
if os.path.isfile(output_file):
|
||||
output_file = os.path.join(subdir, f'{name}.{lan}.{n}.srt')
|
||||
|
||||
command = [core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an',
|
||||
f'-codec:{idx}', 'srt', output_file]
|
||||
command = [
|
||||
core.FFMPEG, '-loglevel', 'warning', '-i', file, '-vn', '-an',
|
||||
f'-codec:{idx}', 'srt', output_file,
|
||||
]
|
||||
if platform.system() != 'Windows':
|
||||
command = core.NICENESS + command
|
||||
|
||||
|
@ -749,7 +769,7 @@ def rip_iso(item, new_dir, bitbucket):
|
|||
combined.extend(concat)
|
||||
continue
|
||||
name = '{name}.cd{x}'.format(
|
||||
name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1
|
||||
name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1,
|
||||
)
|
||||
new_files.append({item: {'name': name, 'files': concat}})
|
||||
else: # check BlueRay for BDMV/STREAM/XXXX.MTS
|
||||
|
@ -775,7 +795,7 @@ def rip_iso(item, new_dir, bitbucket):
|
|||
combined.extend(concat)
|
||||
continue
|
||||
name = '{name}.cd{x}'.format(
|
||||
name=os.path.splitext(os.path.split(item)[1])[0], x=n
|
||||
name=os.path.splitext(os.path.split(item)[1])[0], x=n,
|
||||
)
|
||||
new_files.append({item: {'name': name, 'files': concat}})
|
||||
if core.CONCAT and combined:
|
||||
|
@ -814,7 +834,7 @@ def combine_vts(vts_path):
|
|||
combined.extend(concat)
|
||||
continue
|
||||
name = '{name}.cd{x}'.format(
|
||||
name=name, x=n + 1
|
||||
name=name, x=n + 1,
|
||||
)
|
||||
new_files.append({vts_path: {'name': name, 'files': concat}})
|
||||
if core.CONCAT:
|
||||
|
@ -836,14 +856,14 @@ def combine_mts(mts_path):
|
|||
mts_list.sort(key=lambda f: int(filter(str.isdigit, f)))
|
||||
else: # Python3 sorting
|
||||
mts_list.sort(key=lambda f: int(''.join(filter(str.isdigit, f))))
|
||||
for mts_name in mts_list: ### need to sort all files [1 - 998].mts in order
|
||||
for mts_name in mts_list: # need to sort all files [1 - 998].mts in order
|
||||
concat = []
|
||||
concat.append(os.path.join(mts_path, mts_name))
|
||||
if core.CONCAT:
|
||||
combined.extend(concat)
|
||||
continue
|
||||
name = '{name}.cd{x}'.format(
|
||||
name=name, x=n + 1
|
||||
name=name, x=n + 1,
|
||||
)
|
||||
new_files.append({mts_path: {'name': name, 'files': concat}})
|
||||
n += 1
|
||||
|
@ -857,8 +877,10 @@ def combine_cd(combine):
|
|||
for item in {re.match('(.+)[cC][dD][0-9].', item).groups()[0] for item in combine}:
|
||||
concat = ''
|
||||
for n in range(99):
|
||||
files = [file for file in combine if
|
||||
n + 1 == int(re.match('.+[cC][dD]([0-9]+).', file).groups()[0]) and item in file]
|
||||
files = [
|
||||
file for file in combine if
|
||||
n + 1 == int(re.match('.+[cC][dD]([0-9]+).', file).groups()[0]) and item in file
|
||||
]
|
||||
if files:
|
||||
concat += f'{files[0]}|'
|
||||
else:
|
||||
|
|
|
@ -105,7 +105,8 @@ class GitUpdateManager(UpdateManager):
|
|||
|
||||
def _git_error(self):
|
||||
logger.debug(
|
||||
'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
|
||||
'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.',
|
||||
)
|
||||
|
||||
def _find_working_git(self):
|
||||
test_cmd = 'version'
|
||||
|
@ -115,8 +116,10 @@ class GitUpdateManager(UpdateManager):
|
|||
else:
|
||||
main_git = 'git'
|
||||
|
||||
logger.log('Checking if we can use git commands: {git} {cmd}'.format
|
||||
(git=main_git, cmd=test_cmd), logger.DEBUG)
|
||||
logger.log(
|
||||
'Checking if we can use git commands: {git} {cmd}'.format
|
||||
(git=main_git, cmd=test_cmd), logger.DEBUG,
|
||||
)
|
||||
output, err, exit_status = self._run_git(main_git, test_cmd)
|
||||
|
||||
if exit_status == 0:
|
||||
|
@ -141,8 +144,10 @@ class GitUpdateManager(UpdateManager):
|
|||
logger.log('Trying known alternative git locations', logger.DEBUG)
|
||||
|
||||
for cur_git in alternative_git:
|
||||
logger.log('Checking if we can use git commands: {git} {cmd}'.format
|
||||
(git=cur_git, cmd=test_cmd), logger.DEBUG)
|
||||
logger.log(
|
||||
'Checking if we can use git commands: {git} {cmd}'.format
|
||||
(git=cur_git, cmd=test_cmd), logger.DEBUG,
|
||||
)
|
||||
output, err, exit_status = self._run_git(cur_git, test_cmd)
|
||||
|
||||
if exit_status == 0:
|
||||
|
@ -152,9 +157,11 @@ class GitUpdateManager(UpdateManager):
|
|||
logger.log(f'Not using: {cur_git}', logger.DEBUG)
|
||||
|
||||
# Still haven't found a working git
|
||||
logger.debug('Unable to find your git executable - '
|
||||
logger.debug(
|
||||
'Unable to find your git executable - '
|
||||
'Set git_path in your autoProcessMedia.cfg OR '
|
||||
'delete your .git folder and run from source to enable updates.')
|
||||
'delete your .git folder and run from source to enable updates.',
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
@ -171,10 +178,14 @@ class GitUpdateManager(UpdateManager):
|
|||
cmd = f'{git_path} {args}'
|
||||
|
||||
try:
|
||||
logger.log('Executing {cmd} with your shell in {directory}'.format
|
||||
(cmd=cmd, directory=core.APP_ROOT), logger.DEBUG)
|
||||
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
shell=True, cwd=core.APP_ROOT)
|
||||
logger.log(
|
||||
'Executing {cmd} with your shell in {directory}'.format
|
||||
(cmd=cmd, directory=core.APP_ROOT), logger.DEBUG,
|
||||
)
|
||||
p = subprocess.Popen(
|
||||
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
shell=True, cwd=core.APP_ROOT,
|
||||
)
|
||||
output, err = p.communicate()
|
||||
exit_status = p.returncode
|
||||
|
||||
|
@ -194,12 +205,16 @@ class GitUpdateManager(UpdateManager):
|
|||
logger.log(f'{cmd} : returned successful', logger.DEBUG)
|
||||
exit_status = 0
|
||||
elif core.LOG_GIT and exit_status in (1, 128):
|
||||
logger.log('{cmd} returned : {output}'.format
|
||||
(cmd=cmd, output=output), logger.DEBUG)
|
||||
logger.log(
|
||||
'{cmd} returned : {output}'.format
|
||||
(cmd=cmd, output=output), logger.DEBUG,
|
||||
)
|
||||
else:
|
||||
if core.LOG_GIT:
|
||||
logger.log('{cmd} returned : {output}, treat as error for now'.format
|
||||
(cmd=cmd, output=output), logger.DEBUG)
|
||||
logger.log(
|
||||
'{cmd} returned : {output}, treat as error for now'.format
|
||||
(cmd=cmd, output=output), logger.DEBUG,
|
||||
)
|
||||
exit_status = 1
|
||||
|
||||
return output, err, exit_status
|
||||
|
@ -284,18 +299,26 @@ class GitUpdateManager(UpdateManager):
|
|||
logger.log('git didn\'t return numbers for behind and ahead, not using it', logger.DEBUG)
|
||||
return
|
||||
|
||||
logger.log('cur_commit = {current} % (newest_commit)= {new}, '
|
||||
logger.log(
|
||||
'cur_commit = {current} % (newest_commit)= {new}, '
|
||||
'num_commits_behind = {x}, num_commits_ahead = {y}'.format
|
||||
(current=self._cur_commit_hash, new=self._newest_commit_hash,
|
||||
x=self._num_commits_behind, y=self._num_commits_ahead), logger.DEBUG)
|
||||
(
|
||||
current=self._cur_commit_hash, new=self._newest_commit_hash,
|
||||
x=self._num_commits_behind, y=self._num_commits_ahead,
|
||||
), logger.DEBUG,
|
||||
)
|
||||
|
||||
def set_newest_text(self):
|
||||
if self._num_commits_ahead:
|
||||
logger.log('Local branch is ahead of {branch}. Automatic update not possible.'.format
|
||||
(branch=self.branch), logger.ERROR)
|
||||
logger.log(
|
||||
'Local branch is ahead of {branch}. Automatic update not possible.'.format
|
||||
(branch=self.branch), logger.ERROR,
|
||||
)
|
||||
elif self._num_commits_behind:
|
||||
logger.log('There is a newer version available (you\'re {x} commit{s} behind)'.format
|
||||
(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''), logger.MESSAGE)
|
||||
logger.log(
|
||||
'There is a newer version available (you\'re {x} commit{s} behind)'.format
|
||||
(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''), logger.MESSAGE,
|
||||
)
|
||||
else:
|
||||
return
|
||||
|
||||
|
@ -417,8 +440,10 @@ class SourceUpdateManager(UpdateManager):
|
|||
# when _cur_commit_hash doesn't match anything _num_commits_behind == 100
|
||||
self._num_commits_behind += 1
|
||||
|
||||
logger.log('cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}'.format
|
||||
(current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG)
|
||||
logger.log(
|
||||
'cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}'.format
|
||||
(current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG,
|
||||
)
|
||||
|
||||
def set_newest_text(self):
|
||||
|
||||
|
@ -428,15 +453,18 @@ class SourceUpdateManager(UpdateManager):
|
|||
if not self._cur_commit_hash:
|
||||
logger.log('Unknown current version number, don\'t know if we should update or not', logger.ERROR)
|
||||
elif self._num_commits_behind > 0:
|
||||
logger.log('There is a newer version available (you\'re {x} commit{s} behind)'.format
|
||||
(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''), logger.MESSAGE)
|
||||
logger.log(
|
||||
'There is a newer version available (you\'re {x} commit{s} behind)'.format
|
||||
(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''), logger.MESSAGE,
|
||||
)
|
||||
else:
|
||||
return
|
||||
|
||||
def update(self):
|
||||
"""Download and install latest source tarball from github."""
|
||||
tar_download_url = 'https://github.com/{org}/{repo}/tarball/{branch}'.format(
|
||||
org=self.github_repo_user, repo=self.github_repo, branch=self.branch)
|
||||
org=self.github_repo_user, repo=self.github_repo, branch=self.branch,
|
||||
)
|
||||
version_path = os.path.join(core.APP_ROOT, 'version.txt')
|
||||
|
||||
try:
|
||||
|
@ -456,13 +484,17 @@ class SourceUpdateManager(UpdateManager):
|
|||
urlretrieve(tar_download_url, tar_download_path)
|
||||
|
||||
if not os.path.isfile(tar_download_path):
|
||||
logger.log('Unable to retrieve new version from {url}, can\'t update'.format
|
||||
(url=tar_download_url), logger.ERROR)
|
||||
logger.log(
|
||||
'Unable to retrieve new version from {url}, can\'t update'.format
|
||||
(url=tar_download_url), logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
if not tarfile.is_tarfile(tar_download_path):
|
||||
logger.log('Retrieved version from {url} is corrupt, can\'t update'.format
|
||||
(url=tar_download_url), logger.ERROR)
|
||||
logger.log(
|
||||
'Retrieved version from {url} is corrupt, can\'t update'.format
|
||||
(url=tar_download_url), logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
# extract to sb-update dir
|
||||
|
@ -476,16 +508,20 @@ class SourceUpdateManager(UpdateManager):
|
|||
os.remove(tar_download_path)
|
||||
|
||||
# find update dir name
|
||||
update_dir_contents = [x for x in os.listdir(sb_update_dir) if
|
||||
os.path.isdir(os.path.join(sb_update_dir, x))]
|
||||
update_dir_contents = [
|
||||
x for x in os.listdir(sb_update_dir) if
|
||||
os.path.isdir(os.path.join(sb_update_dir, x))
|
||||
]
|
||||
if len(update_dir_contents) != 1:
|
||||
logger.log(f'Invalid update data, update failed: {update_dir_contents}', logger.ERROR)
|
||||
return False
|
||||
content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
|
||||
|
||||
# walk temp folder and move files to main folder
|
||||
logger.log('Moving files from {source} to {destination}'.format
|
||||
(source=content_dir, destination=core.APP_ROOT))
|
||||
logger.log(
|
||||
'Moving files from {source} to {destination}'.format
|
||||
(source=content_dir, destination=core.APP_ROOT),
|
||||
)
|
||||
for dirname, _, filenames in os.walk(content_dir): # @UnusedVariable
|
||||
dirname = dirname[len(content_dir) + 1:]
|
||||
for curfile in filenames:
|
||||
|
@ -501,8 +537,10 @@ class SourceUpdateManager(UpdateManager):
|
|||
os.remove(new_path)
|
||||
os.renames(old_path, new_path)
|
||||
except Exception as error:
|
||||
logger.log('Unable to update {path}: {msg}'.format
|
||||
(path=new_path, msg=error), logger.DEBUG)
|
||||
logger.log(
|
||||
'Unable to update {path}: {msg}'.format
|
||||
(path=new_path, msg=error), logger.DEBUG,
|
||||
)
|
||||
os.remove(old_path) # Trash the updated file without moving in new path
|
||||
continue
|
||||
|
||||
|
@ -515,13 +553,17 @@ class SourceUpdateManager(UpdateManager):
|
|||
with open(version_path, 'w') as ver_file:
|
||||
ver_file.write(self._newest_commit_hash)
|
||||
except OSError as error:
|
||||
logger.log('Unable to write version file, update not complete: {msg}'.format
|
||||
(msg=error), logger.ERROR)
|
||||
logger.log(
|
||||
'Unable to write version file, update not complete: {msg}'.format
|
||||
(msg=error), logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as error:
|
||||
logger.log('Error while trying to update: {msg}'.format
|
||||
(msg=error), logger.ERROR)
|
||||
logger.log(
|
||||
'Error while trying to update: {msg}'.format
|
||||
(msg=error), logger.ERROR,
|
||||
)
|
||||
logger.log(f'Traceback: {traceback.format_exc()}', logger.DEBUG)
|
||||
return False
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue