mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-22 14:13:33 -07:00
Lint
- Order requirements.txt - Add missing commas - Blacken code
This commit is contained in:
parent
08850f2d94
commit
7709c3e514
43 changed files with 3475 additions and 903 deletions
|
@ -57,8 +57,10 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
logger.debug(f'Received Directory: {input_directory} | Name: {input_name} | Category: {input_category}')
|
||||
|
||||
# Confirm the category by parsing directory structure
|
||||
input_directory, input_name, input_category, root = core.category_search(input_directory, input_name, input_category,
|
||||
root, core.CATEGORIES)
|
||||
input_directory, input_name, input_category, root = core.category_search(
|
||||
input_directory, input_name, input_category,
|
||||
root, core.CATEGORIES,
|
||||
)
|
||||
if input_category == '':
|
||||
input_category = 'UNCAT'
|
||||
|
||||
|
@ -110,10 +112,12 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
output_destination = os.path.join(core.OUTPUT_DIRECTORY, input_category, basename)
|
||||
elif unique_path:
|
||||
output_destination = os.path.normpath(
|
||||
core.os.path.join(core.OUTPUT_DIRECTORY, input_category, core.sanitize_name(input_name).replace(' ', '.')))
|
||||
core.os.path.join(core.OUTPUT_DIRECTORY, input_category, core.sanitize_name(input_name).replace(' ', '.')),
|
||||
)
|
||||
else:
|
||||
output_destination = os.path.normpath(
|
||||
core.os.path.join(core.OUTPUT_DIRECTORY, input_category))
|
||||
core.os.path.join(core.OUTPUT_DIRECTORY, input_category),
|
||||
)
|
||||
|
||||
if output_destination in input_directory:
|
||||
output_destination = input_directory
|
||||
|
@ -128,7 +132,8 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
|
||||
if section_name in ['HeadPhones', 'Lidarr']:
|
||||
core.NOFLATTEN.extend(
|
||||
input_category) # Make sure we preserve folder structure for HeadPhones.
|
||||
input_category,
|
||||
) # Make sure we preserve folder structure for HeadPhones.
|
||||
|
||||
now = datetime.datetime.now()
|
||||
|
||||
|
@ -150,13 +155,16 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
if input_category in core.NOFLATTEN:
|
||||
if not os.path.basename(file_path) in output_destination:
|
||||
target_file = core.os.path.join(
|
||||
core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name)
|
||||
core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name,
|
||||
)
|
||||
logger.debug(f'Setting outputDestination to {os.path.dirname(target_file)} to preserve folder structure')
|
||||
if root == 1:
|
||||
if not found_file:
|
||||
logger.debug(f'Looking for {input_name} in: {inputFile}')
|
||||
if any([core.sanitize_name(input_name) in core.sanitize_name(inputFile),
|
||||
core.sanitize_name(file_name) in core.sanitize_name(input_name)]):
|
||||
if any([
|
||||
core.sanitize_name(input_name) in core.sanitize_name(inputFile),
|
||||
core.sanitize_name(file_name) in core.sanitize_name(input_name),
|
||||
]):
|
||||
found_file = True
|
||||
logger.debug(f'Found file {full_file_name} that matches Torrent Name {input_name}')
|
||||
else:
|
||||
|
@ -194,7 +202,8 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
# Now check if video files exist in destination:
|
||||
if section_name in ['SickBeard', 'SiCKRAGE', 'NzbDrone', 'Sonarr', 'CouchPotato', 'Radarr', 'Watcher3']:
|
||||
num_videos = len(
|
||||
core.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False))
|
||||
core.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False),
|
||||
)
|
||||
if num_videos > 0:
|
||||
logger.info(f'Found {num_videos} media files in {output_destination}')
|
||||
status = 0
|
||||
|
@ -248,11 +257,15 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
|
||||
if result.status_code != 0:
|
||||
if not core.TORRENT_RESUME_ON_FAILURE:
|
||||
logger.error('A problem was reported in the autoProcess* script. '
|
||||
'Torrent won\'t resume seeding (settings)')
|
||||
logger.error(
|
||||
'A problem was reported in the autoProcess* script. '
|
||||
'Torrent won\'t resume seeding (settings)',
|
||||
)
|
||||
elif client_agent != 'manual':
|
||||
logger.error('A problem was reported in the autoProcess* script. '
|
||||
'If torrent was paused we will resume seeding')
|
||||
logger.error(
|
||||
'A problem was reported in the autoProcess* script. '
|
||||
'If torrent was paused we will resume seeding',
|
||||
)
|
||||
core.resume_torrent(client_agent, input_hash, input_id, input_name)
|
||||
|
||||
else:
|
||||
|
@ -336,8 +349,10 @@ def main(args):
|
|||
|
||||
input_name = os.path.basename(dir_name)
|
||||
|
||||
results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None,
|
||||
client_agent)
|
||||
results = process_torrent(
|
||||
dir_name, input_name, subsection, input_hash or None, input_id or None,
|
||||
client_agent,
|
||||
)
|
||||
if results.status_code != 0:
|
||||
logger.error(f'A problem was reported when trying to perform a manual run for {section}:{subsection}.')
|
||||
result = results
|
||||
|
|
858
core/__init__.py
858
core/__init__.py
File diff suppressed because it is too large
Load diff
|
@ -94,19 +94,29 @@ def process(
|
|||
|
||||
logger.postprocess(f'{r.text}', section)
|
||||
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Server returned status '
|
||||
f'{r.status_code}',
|
||||
)
|
||||
elif r.text == 'OK':
|
||||
logger.postprocess(f'SUCCESS: ForceProcess for {dir_name} has been started in LazyLibrarian', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: ForceProcess for {dir_name} has been started in LazyLibrarian',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
else:
|
||||
logger.error(f'FAILED: ForceProcess of {dir_name} has Failed in LazyLibrarian', section)
|
||||
logger.error(
|
||||
f'FAILED: ForceProcess of {dir_name} has Failed in LazyLibrarian',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Returned log from {section} '
|
||||
f'was not as expected.',
|
||||
|
|
|
@ -96,14 +96,20 @@ def process(
|
|||
|
||||
logger.debug(f'Opening URL: {url}', section)
|
||||
try:
|
||||
r = requests.post(url, params=params, stream=True, verify=False, timeout=(30, 300))
|
||||
r = requests.post(
|
||||
url, params=params, stream=True, verify=False, timeout=(30, 300),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error('Unable to open URL', section)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Unable to connect to '
|
||||
f'{section}',
|
||||
)
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Server returned status '
|
||||
|
@ -120,12 +126,17 @@ def process(
|
|||
success = True
|
||||
|
||||
if success:
|
||||
logger.postprocess('SUCCESS: This issue has been processed successfully', section)
|
||||
logger.postprocess(
|
||||
'SUCCESS: This issue has been processed successfully', section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
else:
|
||||
logger.warning('The issue does not appear to have successfully processed. Please check your Logs', section)
|
||||
logger.warning(
|
||||
'The issue does not appear to have successfully processed. Please check your Logs',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Returned log from '
|
||||
f'{section} was not as expected.',
|
||||
|
|
|
@ -32,11 +32,22 @@ class ProcessResult(typing.NamedTuple):
|
|||
|
||||
def command_complete(url, params, headers, section):
|
||||
try:
|
||||
r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60))
|
||||
r = requests.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 60),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(f'Unable to open URL: {url}', section)
|
||||
return None
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return None
|
||||
else:
|
||||
|
@ -44,17 +55,30 @@ def command_complete(url, params, headers, section):
|
|||
return r.json()['status']
|
||||
except (ValueError, KeyError):
|
||||
# ValueError catches simplejson's JSONDecodeError and json's ValueError
|
||||
logger.error(f'{section} did not return expected json data.', section)
|
||||
logger.error(
|
||||
f'{section} did not return expected json data.', section,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def completed_download_handling(url2, headers, section='MAIN'):
|
||||
try:
|
||||
r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
|
||||
r = requests.get(
|
||||
url2,
|
||||
params={},
|
||||
headers=headers,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 60),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(f'Unable to open URL: {url2}', section)
|
||||
return False
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return False
|
||||
else:
|
||||
|
|
|
@ -69,7 +69,9 @@ def process(
|
|||
url = core.utils.common.create_url(scheme, host, port, web_root)
|
||||
if not server_responding(url):
|
||||
logger.error('Server did not respond. Exiting', section)
|
||||
return ProcessResult.failure(f'{section}: Failed to post-process - {section} did not respond.')
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - {section} did not respond.',
|
||||
)
|
||||
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
||||
|
@ -104,30 +106,46 @@ def process(
|
|||
try:
|
||||
shutil.move(dir_name, os.path.join(library, input_name))
|
||||
except Exception:
|
||||
logger.error(f'Unable to move {dir_name} to {os.path.join(library, input_name)}', section)
|
||||
logger.error(
|
||||
f'Unable to move {dir_name} to {os.path.join(library, input_name)}',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Unable to move files',
|
||||
)
|
||||
else:
|
||||
logger.error('No library specified to move files to. Please edit your configuration.', section)
|
||||
logger.error(
|
||||
'No library specified to move files to. Please edit your configuration.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - No library defined in '
|
||||
f'{section}',
|
||||
)
|
||||
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Server returned status '
|
||||
f'{r.status_code}',
|
||||
)
|
||||
elif result['success']:
|
||||
logger.postprocess(f'SUCCESS: Status for {gamez_id} has been set to {download_status} in Gamez', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Status for {gamez_id} has been set to {download_status} in Gamez',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
else:
|
||||
logger.error(f'FAILED: Status for {gamez_id} has NOT been updated in Gamez', section)
|
||||
logger.error(
|
||||
f'FAILED: Status for {gamez_id} has NOT been updated in Gamez',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Returned log from {section} '
|
||||
f'was not as expected.',
|
||||
|
|
|
@ -402,7 +402,8 @@ class SickBeard:
|
|||
|
||||
self.delete_failed = int(self.sb_init.config.get('delete_failed', 0))
|
||||
self.nzb_extraction_by = self.sb_init.config.get(
|
||||
'nzbExtractionBy', 'Downloader',
|
||||
'nzbExtractionBy',
|
||||
'Downloader',
|
||||
)
|
||||
self.process_method = self.sb_init.config.get('process_method')
|
||||
self.remote_path = int(self.sb_init.config.get('remote_path', 0))
|
||||
|
@ -581,7 +582,8 @@ class SickBeard:
|
|||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(
|
||||
f'Unable to open URL: {self.url}', self.sb_init.section,
|
||||
f'Unable to open URL: {self.url}',
|
||||
self.sb_init.section,
|
||||
)
|
||||
result = ProcessResult.failure(
|
||||
f'{self.sb_init.section}: Failed to post-process - Unable to '
|
||||
|
|
|
@ -95,7 +95,9 @@ def process(
|
|||
route = web_root
|
||||
base_url = core.utils.common.create_url(scheme, host, port, route)
|
||||
if not apikey:
|
||||
logger.info('No CouchPotato or Radarr apikey entered. Performing transcoder functions only')
|
||||
logger.info(
|
||||
'No CouchPotato or Radarr apikey entered. Performing transcoder functions only',
|
||||
)
|
||||
release = None
|
||||
elif server_responding(base_url):
|
||||
if section == 'CouchPotato':
|
||||
|
@ -104,7 +106,9 @@ def process(
|
|||
release = None
|
||||
else:
|
||||
logger.error('Server did not respond. Exiting', section)
|
||||
return ProcessResult.failure(f'{section}: Failed to post-process - {section} did not respond.')
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - {section} did not respond.',
|
||||
)
|
||||
|
||||
# pull info from release found if available
|
||||
release_id = None
|
||||
|
@ -121,7 +125,9 @@ def process(
|
|||
except Exception:
|
||||
pass
|
||||
|
||||
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
|
||||
if not os.path.isdir(dir_name) and os.path.isfile(
|
||||
dir_name,
|
||||
): # If the input directory is a file, assume single file download and split dir/name.
|
||||
dir_name = os.path.split(os.path.normpath(dir_name))[0]
|
||||
|
||||
specific_path = os.path.join(dir_name, str(input_name))
|
||||
|
@ -134,8 +140,18 @@ def process(
|
|||
process_all_exceptions(input_name, dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
||||
if not list_media_files(dir_name, media=True, audio=False, meta=False, archives=False) and list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
|
||||
logger.debug(f'Checking for archives to extract in directory: {dir_name}')
|
||||
if (
|
||||
not list_media_files(
|
||||
dir_name, media=True, audio=False, meta=False, archives=False,
|
||||
)
|
||||
and list_media_files(
|
||||
dir_name, media=False, audio=False, meta=False, archives=True,
|
||||
)
|
||||
and extract
|
||||
):
|
||||
logger.debug(
|
||||
f'Checking for archives to extract in directory: {dir_name}',
|
||||
)
|
||||
core.extract_files(dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
||||
|
@ -143,68 +159,115 @@ def process(
|
|||
valid_files = 0
|
||||
num_files = 0
|
||||
# Check video files for corruption
|
||||
for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
|
||||
for video in list_media_files(
|
||||
dir_name, media=True, audio=False, meta=False, archives=False,
|
||||
):
|
||||
num_files += 1
|
||||
if transcoder.is_video_good(video, status):
|
||||
good_files += 1
|
||||
if not core.REQUIRE_LAN or transcoder.is_video_good(video, status, require_lan=core.REQUIRE_LAN):
|
||||
if not core.REQUIRE_LAN or transcoder.is_video_good(
|
||||
video, status, require_lan=core.REQUIRE_LAN,
|
||||
):
|
||||
valid_files += 1
|
||||
import_subs(video)
|
||||
rename_subs(dir_name)
|
||||
if num_files and valid_files == num_files:
|
||||
if status:
|
||||
logger.info(f'Status shown as failed from Downloader, but {good_files} valid video files found. Setting as success.', section)
|
||||
logger.info(
|
||||
f'Status shown as failed from Downloader, but {good_files} valid video files found. Setting as success.',
|
||||
section,
|
||||
)
|
||||
status = 0
|
||||
elif num_files and valid_files < num_files:
|
||||
logger.info('Status shown as success from Downloader, but corrupt video files found. Setting as failed.', section)
|
||||
logger.info(
|
||||
'Status shown as success from Downloader, but corrupt video files found. Setting as failed.',
|
||||
section,
|
||||
)
|
||||
status = 1
|
||||
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
|
||||
if (
|
||||
'NZBOP_VERSION' in os.environ
|
||||
and os.environ['NZBOP_VERSION'][0:5] >= '14.0'
|
||||
):
|
||||
print('[NZB] MARK=BAD')
|
||||
if good_files == num_files:
|
||||
logger.debug(f'Video marked as failed due to missing required language: {core.REQUIRE_LAN}', section)
|
||||
logger.debug(
|
||||
f'Video marked as failed due to missing required language: {core.REQUIRE_LAN}',
|
||||
section,
|
||||
)
|
||||
else:
|
||||
logger.debug('Video marked as failed due to missing playable audio or video', section)
|
||||
if good_files < num_files and failure_link: # only report corrupt files
|
||||
logger.debug(
|
||||
'Video marked as failed due to missing playable audio or video',
|
||||
section,
|
||||
)
|
||||
if (
|
||||
good_files < num_files and failure_link
|
||||
): # only report corrupt files
|
||||
failure_link += '&corrupt=true'
|
||||
elif client_agent == 'manual':
|
||||
logger.warning(f'No media files found in directory {dir_name} to manually process.', section)
|
||||
logger.warning(
|
||||
f'No media files found in directory {dir_name} to manually process.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult(
|
||||
message='',
|
||||
status_code=0, # Success (as far as this script is concerned)
|
||||
)
|
||||
else:
|
||||
logger.warning(f'No media files found in directory {dir_name}. Processing this as a failed download', section)
|
||||
logger.warning(
|
||||
f'No media files found in directory {dir_name}. Processing this as a failed download',
|
||||
section,
|
||||
)
|
||||
status = 1
|
||||
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
|
||||
if (
|
||||
'NZBOP_VERSION' in os.environ
|
||||
and os.environ['NZBOP_VERSION'][0:5] >= '14.0'
|
||||
):
|
||||
print('[NZB] MARK=BAD')
|
||||
|
||||
if status == 0:
|
||||
if core.TRANSCODE == 1:
|
||||
result, new_dir_name = transcoder.transcode_directory(dir_name)
|
||||
if result == 0:
|
||||
logger.debug(f'Transcoding succeeded for files in {dir_name}', section)
|
||||
logger.debug(
|
||||
f'Transcoding succeeded for files in {dir_name}', section,
|
||||
)
|
||||
dir_name = new_dir_name
|
||||
|
||||
logger.debug(f'Config setting \'chmodDirectory\' currently set to {oct(chmod_directory)}', section)
|
||||
logger.debug(
|
||||
f'Config setting \'chmodDirectory\' currently set to {oct(chmod_directory)}',
|
||||
section,
|
||||
)
|
||||
if chmod_directory:
|
||||
logger.info(f'Attempting to set the octal permission of \'{oct(chmod_directory)}\' on directory \'{dir_name}\'', section)
|
||||
logger.info(
|
||||
f'Attempting to set the octal permission of \'{oct(chmod_directory)}\' on directory \'{dir_name}\'',
|
||||
section,
|
||||
)
|
||||
core.rchmod(dir_name, chmod_directory)
|
||||
else:
|
||||
logger.error(f'Transcoding failed for files in {dir_name}', section)
|
||||
logger.error(
|
||||
f'Transcoding failed for files in {dir_name}', section,
|
||||
)
|
||||
return ProcessResult(
|
||||
message=f'{section}: Failed to post-process - Transcoding failed',
|
||||
status_code=1,
|
||||
)
|
||||
for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
|
||||
for video in list_media_files(
|
||||
dir_name, media=True, audio=False, meta=False, archives=False,
|
||||
):
|
||||
if not release and '.cp(tt' not in video and imdbid:
|
||||
video_name, video_ext = os.path.splitext(video)
|
||||
video2 = f'{video_name}.cp({imdbid}){video_ext}'
|
||||
if not (client_agent in [core.TORRENT_CLIENT_AGENT, 'manual'] and core.USE_LINK == 'move-sym'):
|
||||
if not (
|
||||
client_agent in [core.TORRENT_CLIENT_AGENT, 'manual']
|
||||
and core.USE_LINK == 'move-sym'
|
||||
):
|
||||
logger.debug(f'Renaming: {video} to: {video2}')
|
||||
os.rename(video, video2)
|
||||
|
||||
if not apikey: # If only using Transcoder functions, exit here.
|
||||
logger.info('No CouchPotato or Radarr or Watcher3 apikey entered. Processing completed.')
|
||||
logger.info(
|
||||
'No CouchPotato or Radarr or Watcher3 apikey entered. Processing completed.',
|
||||
)
|
||||
return ProcessResult(
|
||||
message=f'{section}: Successfully post-processed {input_name}',
|
||||
status_code=0,
|
||||
|
@ -227,47 +290,91 @@ def process(
|
|||
|
||||
url = f'{base_url}{command}'
|
||||
logger.debug(f'Opening URL: {url} with PARAMS: {params}', section)
|
||||
logger.postprocess(f'Starting {method} scan for {input_name}', section)
|
||||
logger.postprocess(
|
||||
f'Starting {method} scan for {input_name}', section,
|
||||
)
|
||||
|
||||
if section == 'Radarr':
|
||||
payload = {'name': 'DownloadedMoviesScan', 'path': params['media_folder'], 'downloadClientId': download_id, 'importMode': import_mode}
|
||||
payload = {
|
||||
'name': 'DownloadedMoviesScan',
|
||||
'path': params['media_folder'],
|
||||
'downloadClientId': download_id,
|
||||
'importMode': import_mode,
|
||||
}
|
||||
if not download_id:
|
||||
payload.pop('downloadClientId')
|
||||
logger.debug(f'Opening URL: {base_url} with PARAMS: {payload}', section)
|
||||
logger.postprocess(f'Starting DownloadedMoviesScan scan for {input_name}', section)
|
||||
logger.debug(
|
||||
f'Opening URL: {base_url} with PARAMS: {payload}', section,
|
||||
)
|
||||
logger.postprocess(
|
||||
f'Starting DownloadedMoviesScan scan for {input_name}', section,
|
||||
)
|
||||
|
||||
if section == 'Watcher3':
|
||||
if input_name and os.path.isfile(os.path.join(dir_name, input_name)):
|
||||
params['media_folder'] = os.path.join(params['media_folder'], input_name)
|
||||
payload = {'apikey': apikey, 'path': params['media_folder'], 'guid': download_id, 'mode': 'complete'}
|
||||
if input_name and os.path.isfile(
|
||||
os.path.join(dir_name, input_name),
|
||||
):
|
||||
params['media_folder'] = os.path.join(
|
||||
params['media_folder'], input_name,
|
||||
)
|
||||
payload = {
|
||||
'apikey': apikey,
|
||||
'path': params['media_folder'],
|
||||
'guid': download_id,
|
||||
'mode': 'complete',
|
||||
}
|
||||
if not download_id:
|
||||
payload.pop('guid')
|
||||
logger.debug(f'Opening URL: {base_url} with PARAMS: {payload}', section)
|
||||
logger.postprocess(f'Starting postprocessing scan for {input_name}', section)
|
||||
logger.debug(
|
||||
f'Opening URL: {base_url} with PARAMS: {payload}', section,
|
||||
)
|
||||
logger.postprocess(
|
||||
f'Starting postprocessing scan for {input_name}', section,
|
||||
)
|
||||
|
||||
try:
|
||||
if section == 'CouchPotato':
|
||||
r = requests.get(url, params=params, verify=False, timeout=(30, 1800))
|
||||
r = requests.get(
|
||||
url, params=params, verify=False, timeout=(30, 1800),
|
||||
)
|
||||
elif section == 'Watcher3':
|
||||
r = requests.post(base_url, data=payload, verify=False, timeout=(30, 1800))
|
||||
r = requests.post(
|
||||
base_url, data=payload, verify=False, timeout=(30, 1800),
|
||||
)
|
||||
else:
|
||||
r = requests.post(base_url, data=json.dumps(payload), headers=headers, stream=True, verify=False, timeout=(30, 1800))
|
||||
r = requests.post(
|
||||
base_url,
|
||||
data=json.dumps(payload),
|
||||
headers=headers,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 1800),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error('Unable to open URL', section)
|
||||
return ProcessResult(
|
||||
message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
|
||||
message='{0}: Failed to post-process - Unable to connect to {0}'.format(
|
||||
section,
|
||||
),
|
||||
status_code=1,
|
||||
)
|
||||
|
||||
result = r.json()
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return ProcessResult(
|
||||
message=f'{section}: Failed to post-process - Server returned status {r.status_code}',
|
||||
status_code=1,
|
||||
)
|
||||
elif section == 'CouchPotato' and result['success']:
|
||||
logger.postprocess(f'SUCCESS: Finished {method} scan for folder {dir_name}', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Finished {method} scan for folder {dir_name}',
|
||||
section,
|
||||
)
|
||||
if method == 'manage':
|
||||
return ProcessResult(
|
||||
message=f'{section}: Successfully post-processed {input_name}',
|
||||
|
@ -304,33 +411,60 @@ def process(
|
|||
)
|
||||
else:
|
||||
core.FAILED = True
|
||||
logger.postprocess(f'FAILED DOWNLOAD DETECTED FOR {input_name}', section)
|
||||
logger.postprocess(
|
||||
f'FAILED DOWNLOAD DETECTED FOR {input_name}', section,
|
||||
)
|
||||
if failure_link:
|
||||
report_nzb(failure_link, client_agent)
|
||||
|
||||
if section == 'Radarr':
|
||||
logger.postprocess(f'SUCCESS: Sending failed download to {section} for CDH processing', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Sending failed download to {section} for CDH processing',
|
||||
section,
|
||||
)
|
||||
return ProcessResult(
|
||||
message='{0}: Sending failed download back to {0}'.format(section),
|
||||
message='{0}: Sending failed download back to {0}'.format(
|
||||
section,
|
||||
),
|
||||
status_code=1, # Return as failed to flag this in the downloader.
|
||||
) # Return failed flag, but log the event as successful.
|
||||
elif section == 'Watcher3':
|
||||
logger.postprocess(f'Sending failed download to {section} for CDH processing', section)
|
||||
logger.postprocess(
|
||||
f'Sending failed download to {section} for CDH processing',
|
||||
section,
|
||||
)
|
||||
path = remote_dir(dir_name) if remote_path else dir_name
|
||||
if input_name and os.path.isfile(os.path.join(dir_name, input_name)):
|
||||
if input_name and os.path.isfile(
|
||||
os.path.join(dir_name, input_name),
|
||||
):
|
||||
path = os.path.join(path, input_name)
|
||||
payload = {'apikey': apikey, 'path': path, 'guid': download_id, 'mode': 'failed'}
|
||||
r = requests.post(base_url, data=payload, verify=False, timeout=(30, 1800))
|
||||
payload = {
|
||||
'apikey': apikey,
|
||||
'path': path,
|
||||
'guid': download_id,
|
||||
'mode': 'failed',
|
||||
}
|
||||
r = requests.post(
|
||||
base_url, data=payload, verify=False, timeout=(30, 1800),
|
||||
)
|
||||
result = r.json()
|
||||
logger.postprocess(f'Watcher3 response: {result}')
|
||||
if result['status'] == 'finished':
|
||||
return ProcessResult(
|
||||
message='{0}: Sending failed download back to {0}'.format(section),
|
||||
message='{0}: Sending failed download back to {0}'.format(
|
||||
section,
|
||||
),
|
||||
status_code=1, # Return as failed to flag this in the downloader.
|
||||
) # Return failed flag, but log the event as successful.
|
||||
|
||||
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
|
||||
logger.postprocess(f'Deleting failed files and folder {dir_name}', section)
|
||||
if (
|
||||
delete_failed
|
||||
and os.path.isdir(dir_name)
|
||||
and not os.path.dirname(dir_name) == dir_name
|
||||
):
|
||||
logger.postprocess(
|
||||
f'Deleting failed files and folder {dir_name}', section,
|
||||
)
|
||||
remove_dir(dir_name)
|
||||
|
||||
if not release_id and not media_id:
|
||||
|
@ -339,12 +473,16 @@ def process(
|
|||
section,
|
||||
)
|
||||
return ProcessResult(
|
||||
message='{0}: Failed to post-process - Failed download not found in {0}'.format(section),
|
||||
message='{0}: Failed to post-process - Failed download not found in {0}'.format(
|
||||
section,
|
||||
),
|
||||
status_code=1,
|
||||
)
|
||||
|
||||
if release_id:
|
||||
logger.postprocess(f'Setting failed release {input_name} to ignored ...', section)
|
||||
logger.postprocess(
|
||||
f'Setting failed release {input_name} to ignored ...', section,
|
||||
)
|
||||
|
||||
url = f'{base_url}release.ignore'
|
||||
params = {'id': release_id}
|
||||
|
@ -352,37 +490,59 @@ def process(
|
|||
logger.debug(f'Opening URL: {url} with PARAMS: {params}', section)
|
||||
|
||||
try:
|
||||
r = requests.get(url, params=params, verify=False, timeout=(30, 120))
|
||||
r = requests.get(
|
||||
url, params=params, verify=False, timeout=(30, 120),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(f'Unable to open URL {url}', section)
|
||||
return ProcessResult(
|
||||
message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
|
||||
message='{0}: Failed to post-process - Unable to connect to {0}'.format(
|
||||
section,
|
||||
),
|
||||
status_code=1,
|
||||
)
|
||||
|
||||
result = r.json()
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(
|
||||
f'Server returned status {r.status_code}', section,
|
||||
)
|
||||
return ProcessResult(
|
||||
status_code=1,
|
||||
message=f'{section}: Failed to post-process - Server returned status {r.status_code}',
|
||||
)
|
||||
elif result['success']:
|
||||
logger.postprocess(f'SUCCESS: {input_name} has been set to ignored ...', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: {input_name} has been set to ignored ...',
|
||||
section,
|
||||
)
|
||||
else:
|
||||
logger.warning(f'FAILED: Unable to set {input_name} to ignored!', section)
|
||||
logger.warning(
|
||||
f'FAILED: Unable to set {input_name} to ignored!', section,
|
||||
)
|
||||
return ProcessResult(
|
||||
message=f'{section}: Failed to post-process - Unable to set {input_name} to ignored',
|
||||
status_code=1,
|
||||
)
|
||||
|
||||
logger.postprocess('Trying to snatch the next highest ranked release.', section)
|
||||
logger.postprocess(
|
||||
'Trying to snatch the next highest ranked release.', section,
|
||||
)
|
||||
|
||||
url = f'{base_url}movie.searcher.try_next'
|
||||
logger.debug(f'Opening URL: {url}', section)
|
||||
|
||||
try:
|
||||
r = requests.get(url, params={'media_id': media_id}, verify=False, timeout=(30, 600))
|
||||
r = requests.get(
|
||||
url,
|
||||
params={'media_id': media_id},
|
||||
verify=False,
|
||||
timeout=(30, 600),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(f'Unable to open URL {url}', section)
|
||||
return ProcessResult.failure(
|
||||
|
@ -391,19 +551,28 @@ def process(
|
|||
)
|
||||
|
||||
result = r.json()
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Server returned status '
|
||||
f'{r.status_code}',
|
||||
)
|
||||
elif result['success']:
|
||||
logger.postprocess('SUCCESS: Snatched the next highest release ...', section)
|
||||
logger.postprocess(
|
||||
'SUCCESS: Snatched the next highest release ...', section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully snatched next highest release',
|
||||
)
|
||||
else:
|
||||
logger.postprocess('SUCCESS: Unable to find a new release to snatch now. CP will keep searching!', section)
|
||||
logger.postprocess(
|
||||
'SUCCESS: Unable to find a new release to snatch now. CP will keep searching!',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: No new release found now. '
|
||||
f'{section} will keep searching',
|
||||
|
@ -411,7 +580,9 @@ def process(
|
|||
|
||||
# Added a release that was not in the wanted list so confirm rename successful by finding this movie media.list.
|
||||
if not release:
|
||||
download_id = None # we don't want to filter new releases based on this.
|
||||
download_id = (
|
||||
None # we don't want to filter new releases based on this.
|
||||
)
|
||||
|
||||
if no_status_check:
|
||||
return ProcessResult.success(
|
||||
|
@ -422,7 +593,9 @@ def process(
|
|||
# we will now check to see if CPS has finished renaming before returning to TorrentToMedia and unpausing.
|
||||
timeout = time.time() + 60 * wait_for
|
||||
while time.time() < timeout: # only wait 2 (default) minutes, then return.
|
||||
logger.postprocess('Checking for status change, please stand by ...', section)
|
||||
logger.postprocess(
|
||||
'Checking for status change, please stand by ...', section,
|
||||
)
|
||||
if section == 'CouchPotato':
|
||||
release = get_release(base_url, imdbid, download_id, release_id)
|
||||
scan_id = None
|
||||
|
@ -432,15 +605,23 @@ def process(
|
|||
try:
|
||||
release_id = list(release.keys())[0]
|
||||
release_status_new = release[release_id]['status']
|
||||
if release_status_old is None: # we didn't have a release before, but now we do.
|
||||
if (
|
||||
release_status_old is None
|
||||
): # we didn't have a release before, but now we do.
|
||||
title = release[release_id]['title']
|
||||
logger.postprocess(f'SUCCESS: Movie {title} has now been added to CouchPotato with release status of [{str(release_status_new).upper()}]', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Movie {title} has now been added to CouchPotato with release status of [{str(release_status_new).upper()}]',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
|
||||
if release_status_new != release_status_old:
|
||||
logger.postprocess(f'SUCCESS: Release {release_id} has now been marked with a status of [{str(release_status_new).upper()}]', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Release {release_id} has now been marked with a status of [{str(release_status_new).upper()}]',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
|
@ -450,27 +631,44 @@ def process(
|
|||
url = f'{base_url}/{scan_id}'
|
||||
command_status = command_complete(url, params, headers, section)
|
||||
if command_status:
|
||||
logger.debug(f'The Scan command return status: {command_status}', section)
|
||||
logger.debug(
|
||||
f'The Scan command return status: {command_status}',
|
||||
section,
|
||||
)
|
||||
if command_status in ['completed']:
|
||||
logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
|
||||
logger.debug(
|
||||
'The Scan command has completed successfully. Renaming was successful.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
elif command_status in ['failed']:
|
||||
logger.debug('The Scan command has failed. Renaming was not successful.', section)
|
||||
logger.debug(
|
||||
'The Scan command has failed. Renaming was not successful.',
|
||||
section,
|
||||
)
|
||||
# return ProcessResult(
|
||||
# message='{0}: Failed to post-process {1}'.format(section, input_name),
|
||||
# status_code=1,
|
||||
# )
|
||||
|
||||
if not os.path.isdir(dir_name):
|
||||
logger.postprocess(f'SUCCESS: Input Directory [{dir_name}] has been processed and removed', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Input Directory [{dir_name}] has been processed and removed',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
|
||||
elif not list_media_files(dir_name, media=True, audio=False, meta=False, archives=True):
|
||||
logger.postprocess(f'SUCCESS: Input Directory [{dir_name}] has no remaining media files. This has been fully processed.', section)
|
||||
elif not list_media_files(
|
||||
dir_name, media=True, audio=False, meta=False, archives=True,
|
||||
):
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Input Directory [{dir_name}] has no remaining media files. This has been fully processed.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
|
@ -479,8 +677,13 @@ def process(
|
|||
time.sleep(10 * wait_for)
|
||||
|
||||
# The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now.
|
||||
if section == 'Radarr' and completed_download_handling(url2, headers, section=section):
|
||||
logger.debug(f'The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {section}.', section)
|
||||
if section == 'Radarr' and completed_download_handling(
|
||||
url2, headers, section=section,
|
||||
):
|
||||
logger.debug(
|
||||
f'The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {section}.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Complete DownLoad Handling is enabled. Passing back '
|
||||
f'to {section}',
|
||||
|
@ -562,7 +765,10 @@ def get_release(base_url, imdb_id=None, download_id=None, release_id=None):
|
|||
if release['status'] not in ['snatched', 'downloaded', 'done']:
|
||||
continue
|
||||
if download_id:
|
||||
if download_id.lower() != release['download_info']['id'].lower():
|
||||
if (
|
||||
download_id.lower()
|
||||
!= release['download_info']['id'].lower()
|
||||
):
|
||||
continue
|
||||
|
||||
cur_id = release['_id']
|
||||
|
@ -589,7 +795,10 @@ def get_release(base_url, imdb_id=None, download_id=None, release_id=None):
|
|||
rem_id = set()
|
||||
for cur_id, x in results.items():
|
||||
try:
|
||||
if not find_download(str(x['download_info']['downloader']).lower(), x['download_info']['id']):
|
||||
if not find_download(
|
||||
str(x['download_info']['downloader']).lower(),
|
||||
x['download_info']['id'],
|
||||
):
|
||||
rem_id.add(cur_id)
|
||||
except Exception:
|
||||
continue
|
||||
|
|
|
@ -80,7 +80,9 @@ def process(
|
|||
f'{section}: Failed to post-process - {section} did not respond.',
|
||||
)
|
||||
|
||||
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
|
||||
if not os.path.isdir(dir_name) and os.path.isfile(
|
||||
dir_name,
|
||||
): # If the input directory is a file, assume single file download and split dir/name.
|
||||
dir_name = os.path.split(os.path.normpath(dir_name))[0]
|
||||
|
||||
specific_path = os.path.join(dir_name, str(input_name))
|
||||
|
@ -93,8 +95,18 @@ def process(
|
|||
process_all_exceptions(input_name, dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
||||
if not list_media_files(dir_name, media=False, audio=True, meta=False, archives=False) and list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
|
||||
logger.debug(f'Checking for archives to extract in directory: {dir_name}')
|
||||
if (
|
||||
not list_media_files(
|
||||
dir_name, media=False, audio=True, meta=False, archives=False,
|
||||
)
|
||||
and list_media_files(
|
||||
dir_name, media=False, audio=False, meta=False, archives=True,
|
||||
)
|
||||
and extract
|
||||
):
|
||||
logger.debug(
|
||||
f'Checking for archives to extract in directory: {dir_name}',
|
||||
)
|
||||
core.extract_files(dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
||||
|
@ -110,22 +122,31 @@ def process(
|
|||
'dir': remote_dir(dir_name) if remote_path else dir_name,
|
||||
}
|
||||
|
||||
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
|
||||
res = force_process(
|
||||
params, url, apikey, input_name, dir_name, section, wait_for,
|
||||
)
|
||||
if res.status_code in [0, 1]:
|
||||
return res
|
||||
|
||||
params = {
|
||||
'apikey': apikey,
|
||||
'cmd': 'forceProcess',
|
||||
'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0],
|
||||
'dir': os.path.split(remote_dir(dir_name))[0]
|
||||
if remote_path
|
||||
else os.path.split(dir_name)[0],
|
||||
}
|
||||
|
||||
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
|
||||
res = force_process(
|
||||
params, url, apikey, input_name, dir_name, section, wait_for,
|
||||
)
|
||||
if res.status_code in [0, 1]:
|
||||
return res
|
||||
|
||||
# The status hasn't changed. uTorrent can resume seeding now.
|
||||
logger.warning(f'The music album does not appear to have changed status after {wait_for} minutes. Please check your Logs', section)
|
||||
logger.warning(
|
||||
f'The music album does not appear to have changed status after {wait_for} minutes. Please check your Logs',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - No change in wanted status',
|
||||
)
|
||||
|
@ -143,7 +164,14 @@ def process(
|
|||
data = json.dumps(data)
|
||||
try:
|
||||
logger.debug(f'Opening URL: {url} with data: {data}', section)
|
||||
r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
|
||||
r = requests.post(
|
||||
url,
|
||||
data=data,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 1800),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(f'Unable to open URL: {url}', section)
|
||||
return ProcessResult.failure(
|
||||
|
@ -171,41 +199,64 @@ def process(
|
|||
break
|
||||
n += 1
|
||||
if command_status:
|
||||
logger.debug(f'The Scan command return status: {command_status}', section)
|
||||
logger.debug(
|
||||
f'The Scan command return status: {command_status}', section,
|
||||
)
|
||||
if not os.path.exists(dir_name):
|
||||
logger.debug(f'The directory {dir_name} has been removed. Renaming was successful.', section)
|
||||
logger.debug(
|
||||
f'The directory {dir_name} has been removed. Renaming was successful.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
elif command_status and command_status in ['completed']:
|
||||
logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
|
||||
logger.debug(
|
||||
'The Scan command has completed successfully. Renaming was successful.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
elif command_status and command_status in ['failed']:
|
||||
logger.debug('The Scan command has failed. Renaming was not successful.', section)
|
||||
logger.debug(
|
||||
'The Scan command has failed. Renaming was not successful.',
|
||||
section,
|
||||
)
|
||||
# return ProcessResult.failure(
|
||||
# f'{section}: Failed to post-process {input_name}'
|
||||
# )
|
||||
else:
|
||||
logger.debug(f'The Scan command did not return status completed. Passing back to {section} to attempt complete download handling.', section)
|
||||
logger.debug(
|
||||
f'The Scan command did not return status completed. Passing back to {section} to attempt complete download handling.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult(
|
||||
message=f'{section}: Passing back to {section} to attempt '
|
||||
f'Complete Download Handling',
|
||||
f'Complete Download Handling',
|
||||
status_code=status,
|
||||
)
|
||||
|
||||
else:
|
||||
if section == 'Lidarr':
|
||||
logger.postprocess(f'FAILED: The download failed. Sending failed download to {section} for CDH processing', section)
|
||||
logger.postprocess(
|
||||
f'FAILED: The download failed. Sending failed download to {section} for CDH processing',
|
||||
section,
|
||||
)
|
||||
# Return as failed to flag this in the downloader.
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Download Failed. Sending back to {section}',
|
||||
)
|
||||
else:
|
||||
logger.warning('FAILED DOWNLOAD DETECTED', section)
|
||||
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
|
||||
logger.postprocess(f'Deleting failed files and folder {dir_name}', section)
|
||||
if (
|
||||
delete_failed
|
||||
and os.path.isdir(dir_name)
|
||||
and not os.path.dirname(dir_name) == dir_name
|
||||
):
|
||||
logger.postprocess(
|
||||
f'Deleting failed files and folder {dir_name}', section,
|
||||
)
|
||||
remove_dir(dir_name)
|
||||
# Return as failed to flag this in the downloader.
|
||||
return ProcessResult.failure(
|
||||
|
@ -215,7 +266,9 @@ def process(
|
|||
|
||||
|
||||
def get_status(url, apikey, dir_name):
|
||||
logger.debug(f'Attempting to get current status for release:{os.path.basename(dir_name)}')
|
||||
logger.debug(
|
||||
f'Attempting to get current status for release:{os.path.basename(dir_name)}',
|
||||
)
|
||||
|
||||
params = {
|
||||
'apikey': apikey,
|
||||
|
@ -241,10 +294,15 @@ def get_status(url, apikey, dir_name):
|
|||
return album['Status'].lower()
|
||||
|
||||
|
||||
def force_process(params, url, apikey, input_name, dir_name, section, wait_for):
|
||||
def force_process(
|
||||
params, url, apikey, input_name, dir_name, section, wait_for,
|
||||
):
|
||||
release_status = get_status(url, apikey, dir_name)
|
||||
if not release_status:
|
||||
logger.error(f'Could not find a status for {input_name}, is it in the wanted list ?', section)
|
||||
logger.error(
|
||||
f'Could not find a status for {input_name}, is it in the wanted list ?',
|
||||
section,
|
||||
)
|
||||
|
||||
logger.debug(f'Opening URL: {url} with PARAMS: {params}', section)
|
||||
|
||||
|
@ -259,15 +317,25 @@ def force_process(params, url, apikey, input_name, dir_name, section, wait_for):
|
|||
|
||||
logger.debug(f'Result: {r.text}', section)
|
||||
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Server returned status {r.status_code}',
|
||||
)
|
||||
elif r.text == 'OK':
|
||||
logger.postprocess(f'SUCCESS: Post-Processing started for {input_name} in folder {dir_name} ...', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: Post-Processing started for {input_name} in folder {dir_name} ...',
|
||||
section,
|
||||
)
|
||||
else:
|
||||
logger.error(f'FAILED: Post-Processing has NOT started for {input_name} in folder {dir_name}. exiting!', section)
|
||||
logger.error(
|
||||
f'FAILED: Post-Processing has NOT started for {input_name} in folder {dir_name}. exiting!',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Returned log from {section} '
|
||||
f'was not as expected.',
|
||||
|
@ -277,13 +345,21 @@ def force_process(params, url, apikey, input_name, dir_name, section, wait_for):
|
|||
timeout = time.time() + 60 * wait_for
|
||||
while time.time() < timeout:
|
||||
current_status = get_status(url, apikey, dir_name)
|
||||
if current_status is not None and current_status != release_status: # Something has changed. CPS must have processed this movie.
|
||||
logger.postprocess(f'SUCCESS: This release is now marked as status [{current_status}]', section)
|
||||
if (
|
||||
current_status is not None and current_status != release_status
|
||||
): # Something has changed. CPS must have processed this movie.
|
||||
logger.postprocess(
|
||||
f'SUCCESS: This release is now marked as status [{current_status}]',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
if not os.path.isdir(dir_name):
|
||||
logger.postprocess(f'SUCCESS: The input directory {dir_name} has been removed Processing must have finished.', section)
|
||||
logger.postprocess(
|
||||
f'SUCCESS: The input directory {dir_name} has been removed Processing must have finished.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
|
|
|
@ -97,7 +97,9 @@ def process(
|
|||
# Should be changed after refactor.
|
||||
fork, fork_params = init_sickbeard.auto_fork()
|
||||
elif not username and not apikey and not sso_username:
|
||||
logger.info('No SickBeard / SiCKRAGE username or Sonarr apikey entered. Performing transcoder functions only')
|
||||
logger.info(
|
||||
'No SickBeard / SiCKRAGE username or Sonarr apikey entered. Performing transcoder functions only',
|
||||
)
|
||||
fork, fork_params = 'None', {}
|
||||
else:
|
||||
logger.error('Server did not respond. Exiting', section)
|
||||
|
@ -105,9 +107,14 @@ def process(
|
|||
f'{section}: Failed to post-process - {section} did not respond.',
|
||||
)
|
||||
|
||||
if client_agent == core.TORRENT_CLIENT_AGENT and core.USE_LINK == 'move-sym':
|
||||
if (
|
||||
client_agent == core.TORRENT_CLIENT_AGENT
|
||||
and core.USE_LINK == 'move-sym'
|
||||
):
|
||||
process_method = 'symlink'
|
||||
if not os.path.isdir(dir_name) and os.path.isfile(dir_name): # If the input directory is a file, assume single file download and split dir/name.
|
||||
if not os.path.isdir(dir_name) and os.path.isfile(
|
||||
dir_name,
|
||||
): # If the input directory is a file, assume single file download and split dir/name.
|
||||
dir_name = os.path.split(os.path.normpath(dir_name))[0]
|
||||
|
||||
specific_path = os.path.join(dir_name, str(input_name))
|
||||
|
@ -128,30 +135,52 @@ def process(
|
|||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != 'Destination'):
|
||||
if 'process_method' not in fork_params or (
|
||||
client_agent in ['nzbget', 'sabnzbd']
|
||||
and nzb_extraction_by != 'Destination'
|
||||
):
|
||||
if input_name:
|
||||
process_all_exceptions(input_name, dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
||||
# Now check if tv files exist in destination.
|
||||
if not list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
|
||||
if list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
|
||||
logger.debug(f'Checking for archives to extract in directory: {dir_name}')
|
||||
if not list_media_files(
|
||||
dir_name, media=True, audio=False, meta=False, archives=False,
|
||||
):
|
||||
if (
|
||||
list_media_files(
|
||||
dir_name,
|
||||
media=False,
|
||||
audio=False,
|
||||
meta=False,
|
||||
archives=True,
|
||||
)
|
||||
and extract
|
||||
):
|
||||
logger.debug(
|
||||
f'Checking for archives to extract in directory: {dir_name}',
|
||||
)
|
||||
core.extract_files(dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
||||
if list_media_files(dir_name, media=True, audio=False, meta=False, archives=False): # Check that a video exists. if not, assume failed.
|
||||
if list_media_files(
|
||||
dir_name, media=True, audio=False, meta=False, archives=False,
|
||||
): # Check that a video exists. if not, assume failed.
|
||||
flatten(dir_name)
|
||||
|
||||
# Check video files for corruption
|
||||
good_files = 0
|
||||
valid_files = 0
|
||||
num_files = 0
|
||||
for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
|
||||
for video in list_media_files(
|
||||
dir_name, media=True, audio=False, meta=False, archives=False,
|
||||
):
|
||||
num_files += 1
|
||||
if transcoder.is_video_good(video, status):
|
||||
good_files += 1
|
||||
if not core.REQUIRE_LAN or transcoder.is_video_good(video, status, require_lan=core.REQUIRE_LAN):
|
||||
if not core.REQUIRE_LAN or transcoder.is_video_good(
|
||||
video, status, require_lan=core.REQUIRE_LAN,
|
||||
):
|
||||
valid_files += 1
|
||||
import_subs(video)
|
||||
rename_subs(dir_name)
|
||||
|
@ -164,54 +193,93 @@ def process(
|
|||
logger.info('Found corrupt videos. Setting status Failed')
|
||||
status = 1
|
||||
failed = 1
|
||||
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
|
||||
if (
|
||||
'NZBOP_VERSION' in os.environ
|
||||
and os.environ['NZBOP_VERSION'][0:5] >= '14.0'
|
||||
):
|
||||
print('[NZB] MARK=BAD')
|
||||
if good_files == num_files:
|
||||
logger.debug(f'Video marked as failed due to missing required language: {core.REQUIRE_LAN}', section)
|
||||
logger.debug(
|
||||
f'Video marked as failed due to missing required language: {core.REQUIRE_LAN}',
|
||||
section,
|
||||
)
|
||||
else:
|
||||
logger.debug('Video marked as failed due to missing playable audio or video', section)
|
||||
if good_files < num_files and failure_link: # only report corrupt files
|
||||
logger.debug(
|
||||
'Video marked as failed due to missing playable audio or video',
|
||||
section,
|
||||
)
|
||||
if (
|
||||
good_files < num_files and failure_link
|
||||
): # only report corrupt files
|
||||
failure_link += '&corrupt=true'
|
||||
elif client_agent == 'manual':
|
||||
logger.warning(f'No media files found in directory {dir_name} to manually process.', section)
|
||||
logger.warning(
|
||||
f'No media files found in directory {dir_name} to manually process.',
|
||||
section,
|
||||
)
|
||||
# Success (as far as this script is concerned)
|
||||
return ProcessResult.success()
|
||||
elif nzb_extraction_by == 'Destination':
|
||||
logger.info('Check for media files ignored because nzbExtractionBy is set to Destination.')
|
||||
logger.info(
|
||||
'Check for media files ignored because nzbExtractionBy is set to Destination.',
|
||||
)
|
||||
if int(failed) == 0:
|
||||
logger.info('Setting Status Success.')
|
||||
status = 0
|
||||
failed = 0
|
||||
else:
|
||||
logger.info('Downloader reported an error during download or verification. Processing this as a failed download.')
|
||||
logger.info(
|
||||
'Downloader reported an error during download or verification. Processing this as a failed download.',
|
||||
)
|
||||
status = 1
|
||||
failed = 1
|
||||
else:
|
||||
logger.warning(f'No media files found in directory {dir_name}. Processing this as a failed download', section)
|
||||
logger.warning(
|
||||
f'No media files found in directory {dir_name}. Processing this as a failed download',
|
||||
section,
|
||||
)
|
||||
status = 1
|
||||
failed = 1
|
||||
if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
|
||||
if (
|
||||
'NZBOP_VERSION' in os.environ
|
||||
and os.environ['NZBOP_VERSION'][0:5] >= '14.0'
|
||||
):
|
||||
print('[NZB] MARK=BAD')
|
||||
|
||||
if status == 0 and core.TRANSCODE == 1: # only transcode successful downloads
|
||||
if (
|
||||
status == 0 and core.TRANSCODE == 1
|
||||
): # only transcode successful downloads
|
||||
result, new_dir_name = transcoder.transcode_directory(dir_name)
|
||||
if result == 0:
|
||||
logger.debug(f'SUCCESS: Transcoding succeeded for files in {dir_name}', section)
|
||||
logger.debug(
|
||||
f'SUCCESS: Transcoding succeeded for files in {dir_name}',
|
||||
section,
|
||||
)
|
||||
dir_name = new_dir_name
|
||||
|
||||
logger.debug(f'Config setting \'chmodDirectory\' currently set to {oct(chmod_directory)}', section)
|
||||
logger.debug(
|
||||
f'Config setting \'chmodDirectory\' currently set to {oct(chmod_directory)}',
|
||||
section,
|
||||
)
|
||||
if chmod_directory:
|
||||
logger.info(f'Attempting to set the octal permission of \'{oct(chmod_directory)}\' on directory \'{dir_name}\'', section)
|
||||
logger.info(
|
||||
f'Attempting to set the octal permission of \'{oct(chmod_directory)}\' on directory \'{dir_name}\'',
|
||||
section,
|
||||
)
|
||||
core.rchmod(dir_name, chmod_directory)
|
||||
else:
|
||||
logger.error(f'FAILED: Transcoding failed for files in {dir_name}', section)
|
||||
logger.error(
|
||||
f'FAILED: Transcoding failed for files in {dir_name}', section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Transcoding failed',
|
||||
)
|
||||
|
||||
# Part of the refactor
|
||||
if init_sickbeard.fork_obj:
|
||||
init_sickbeard.fork_obj.initialize(dir_name, input_name, failed, client_agent='manual')
|
||||
init_sickbeard.fork_obj.initialize(
|
||||
dir_name, input_name, failed, client_agent='manual',
|
||||
)
|
||||
|
||||
# configure SB params to pass
|
||||
# We don't want to remove params, for the Forks that have been refactored.
|
||||
|
@ -238,12 +306,20 @@ def process(
|
|||
del fork_params['quiet']
|
||||
|
||||
if param == 'type':
|
||||
if 'type' in fork_params: # only set if we haven't already deleted for 'failed' above.
|
||||
if (
|
||||
'type' in fork_params
|
||||
): # only set if we haven't already deleted for 'failed' above.
|
||||
fork_params[param] = 'manual'
|
||||
if 'proc_type' in fork_params:
|
||||
del fork_params['proc_type']
|
||||
|
||||
if param in ['dir_name', 'dir', 'proc_dir', 'process_directory', 'path']:
|
||||
if param in [
|
||||
'dir_name',
|
||||
'dir',
|
||||
'proc_dir',
|
||||
'process_directory',
|
||||
'path',
|
||||
]:
|
||||
fork_params[param] = dir_name
|
||||
if remote_path:
|
||||
fork_params[param] = remote_dir(dir_name)
|
||||
|
@ -284,26 +360,46 @@ def process(
|
|||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
logger.postprocess('SUCCESS: The download succeeded, sending a post-process request', section)
|
||||
logger.postprocess(
|
||||
'SUCCESS: The download succeeded, sending a post-process request',
|
||||
section,
|
||||
)
|
||||
else:
|
||||
core.FAILED = True
|
||||
if failure_link:
|
||||
report_nzb(failure_link, client_agent)
|
||||
if 'failed' in fork_params:
|
||||
logger.postprocess(f'FAILED: The download failed. Sending \'failed\' process request to {fork} branch', section)
|
||||
logger.postprocess(
|
||||
f'FAILED: The download failed. Sending \'failed\' process request to {fork} branch',
|
||||
section,
|
||||
)
|
||||
elif section == 'NzbDrone':
|
||||
logger.postprocess(f'FAILED: The download failed. Sending failed download to {fork} for CDH processing', section)
|
||||
logger.postprocess(
|
||||
f'FAILED: The download failed. Sending failed download to {fork} for CDH processing',
|
||||
section,
|
||||
)
|
||||
# Return as failed to flag this in the downloader.
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Download Failed. Sending back to {section}',
|
||||
)
|
||||
else:
|
||||
logger.postprocess(f'FAILED: The download failed. {fork} branch does not handle failed downloads. Nothing to process', section)
|
||||
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
|
||||
logger.postprocess(f'Deleting failed files and folder {dir_name}', section)
|
||||
logger.postprocess(
|
||||
f'FAILED: The download failed. {fork} branch does not handle failed downloads. Nothing to process',
|
||||
section,
|
||||
)
|
||||
if (
|
||||
delete_failed
|
||||
and os.path.isdir(dir_name)
|
||||
and not os.path.dirname(dir_name) == dir_name
|
||||
):
|
||||
logger.postprocess(
|
||||
f'Deleting failed files and folder {dir_name}', section,
|
||||
)
|
||||
remove_dir(dir_name)
|
||||
# Return as failed to flag this in the downloader.
|
||||
return ProcessResult.failure(f'{section}: Failed to post-process. {section} does not support failed downloads')
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process. {section} does not support failed downloads',
|
||||
)
|
||||
|
||||
route = ''
|
||||
if section == 'SickBeard':
|
||||
|
@ -328,10 +424,20 @@ def process(
|
|||
# params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
|
||||
if remote_path:
|
||||
logger.debug(f'remote_path: {remote_dir(dir_name)}', section)
|
||||
data = {'name': 'DownloadedEpisodesScan', 'path': remote_dir(dir_name), 'downloadClientId': download_id, 'importMode': import_mode}
|
||||
data = {
|
||||
'name': 'DownloadedEpisodesScan',
|
||||
'path': remote_dir(dir_name),
|
||||
'downloadClientId': download_id,
|
||||
'importMode': import_mode,
|
||||
}
|
||||
else:
|
||||
logger.debug(f'path: {dir_name}', section)
|
||||
data = {'name': 'DownloadedEpisodesScan', 'path': dir_name, 'downloadClientId': download_id, 'importMode': import_mode}
|
||||
data = {
|
||||
'name': 'DownloadedEpisodesScan',
|
||||
'path': dir_name,
|
||||
'downloadClientId': download_id,
|
||||
'importMode': import_mode,
|
||||
}
|
||||
if not download_id:
|
||||
data.pop('downloadClientId')
|
||||
data = json.dumps(data)
|
||||
|
@ -343,34 +449,59 @@ def process(
|
|||
else:
|
||||
s = requests.Session()
|
||||
|
||||
logger.debug(f'Opening URL: {url} with params: {fork_params}', section)
|
||||
logger.debug(
|
||||
f'Opening URL: {url} with params: {fork_params}', section,
|
||||
)
|
||||
if not apikey and username and password:
|
||||
login = f'{web_root}/login'
|
||||
login_params = {'username': username, 'password': password}
|
||||
r = s.get(login, verify=False, timeout=(30, 60))
|
||||
if r.status_code in [401, 403] and r.cookies.get('_xsrf'):
|
||||
login_params['_xsrf'] = r.cookies.get('_xsrf')
|
||||
s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
|
||||
r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800))
|
||||
s.post(
|
||||
login,
|
||||
data=login_params,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 60),
|
||||
)
|
||||
r = s.get(
|
||||
url,
|
||||
auth=(username, password),
|
||||
params=fork_params,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 1800),
|
||||
)
|
||||
elif section == 'SiCKRAGE':
|
||||
s = requests.Session()
|
||||
|
||||
if api_version >= 2 and sso_username and sso_password:
|
||||
oauth = OAuth2Session(client=LegacyApplicationClient(client_id=core.SICKRAGE_OAUTH_CLIENT_ID))
|
||||
oauth = OAuth2Session(
|
||||
client=LegacyApplicationClient(
|
||||
client_id=core.SICKRAGE_OAUTH_CLIENT_ID,
|
||||
),
|
||||
)
|
||||
oauth_token = oauth.fetch_token(
|
||||
client_id=core.SICKRAGE_OAUTH_CLIENT_ID,
|
||||
token_url=core.SICKRAGE_OAUTH_TOKEN_URL,
|
||||
username=sso_username,
|
||||
password=sso_password,
|
||||
)
|
||||
s.headers.update({'Authorization': 'Bearer ' + oauth_token['access_token']})
|
||||
s.headers.update(
|
||||
{'Authorization': 'Bearer ' + oauth_token['access_token']},
|
||||
)
|
||||
|
||||
params = {
|
||||
'path': fork_params['path'],
|
||||
'failed': str(bool(fork_params['failed'])).lower(),
|
||||
'processMethod': 'move',
|
||||
'forceReplace': str(bool(fork_params['force_replace'])).lower(),
|
||||
'returnData': str(bool(fork_params['return_data'])).lower(),
|
||||
'forceReplace': str(
|
||||
bool(fork_params['force_replace']),
|
||||
).lower(),
|
||||
'returnData': str(
|
||||
bool(fork_params['return_data']),
|
||||
).lower(),
|
||||
'delete': str(bool(fork_params['delete'])).lower(),
|
||||
'forceNext': str(bool(fork_params['force_next'])).lower(),
|
||||
'nzbName': fork_params['nzbName'],
|
||||
|
@ -378,10 +509,23 @@ def process(
|
|||
else:
|
||||
params = fork_params
|
||||
|
||||
r = s.get(url, params=params, stream=True, verify=False, timeout=(30, 1800))
|
||||
r = s.get(
|
||||
url,
|
||||
params=params,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 1800),
|
||||
)
|
||||
elif section == 'NzbDrone':
|
||||
logger.debug(f'Opening URL: {url} with data: {data}', section)
|
||||
r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
|
||||
r = requests.post(
|
||||
url,
|
||||
data=data,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
verify=False,
|
||||
timeout=(30, 1800),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(f'Unable to open URL: {url}', section)
|
||||
return ProcessResult.failure(
|
||||
|
@ -389,7 +533,11 @@ def process(
|
|||
f'{section}',
|
||||
)
|
||||
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
if r.status_code not in [
|
||||
requests.codes.ok,
|
||||
requests.codes.created,
|
||||
requests.codes.accepted,
|
||||
]:
|
||||
logger.error(f'Server returned status {r.status_code}', section)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process - Server returned status '
|
||||
|
@ -412,7 +560,10 @@ def process(
|
|||
input_name = os.path.split(line)[1]
|
||||
if 'added to the queue' in line:
|
||||
queued = True
|
||||
if 'Processing succeeded' in line or 'Successfully processed' in line:
|
||||
if (
|
||||
'Processing succeeded' in line
|
||||
or 'Successfully processed' in line
|
||||
):
|
||||
success = True
|
||||
|
||||
if queued:
|
||||
|
@ -434,8 +585,14 @@ def process(
|
|||
scan_id = None
|
||||
started = False
|
||||
|
||||
if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name:
|
||||
logger.postprocess(f'Deleting failed files and folder {dir_name}', section)
|
||||
if (
|
||||
status != 0
|
||||
and delete_failed
|
||||
and not os.path.dirname(dir_name) == dir_name
|
||||
):
|
||||
logger.postprocess(
|
||||
f'Deleting failed files and folder {dir_name}', section,
|
||||
)
|
||||
remove_dir(dir_name)
|
||||
|
||||
if success:
|
||||
|
@ -453,19 +610,30 @@ def process(
|
|||
break
|
||||
n += 1
|
||||
if command_status:
|
||||
logger.debug(f'The Scan command return status: {command_status}', section)
|
||||
logger.debug(
|
||||
f'The Scan command return status: {command_status}', section,
|
||||
)
|
||||
if not os.path.exists(dir_name):
|
||||
logger.debug(f'The directory {dir_name} has been removed. Renaming was successful.', section)
|
||||
logger.debug(
|
||||
f'The directory {dir_name} has been removed. Renaming was successful.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
elif command_status and command_status in ['completed']:
|
||||
logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
|
||||
logger.debug(
|
||||
'The Scan command has completed successfully. Renaming was successful.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.success(
|
||||
f'{section}: Successfully post-processed {input_name}',
|
||||
)
|
||||
elif command_status and command_status in ['failed']:
|
||||
logger.debug('The Scan command has failed. Renaming was not successful.', section)
|
||||
logger.debug(
|
||||
'The Scan command has failed. Renaming was not successful.',
|
||||
section,
|
||||
)
|
||||
# return ProcessResult.failure(
|
||||
# f'{section}: Failed to post-process {input_name}'
|
||||
# )
|
||||
|
@ -478,11 +646,14 @@ def process(
|
|||
)
|
||||
return ProcessResult(
|
||||
message=f'{section}: Complete DownLoad Handling is enabled. '
|
||||
f'Passing back to {section}',
|
||||
f'Passing back to {section}',
|
||||
status_code=status,
|
||||
)
|
||||
else:
|
||||
logger.warning('The Scan command did not return a valid status. Renaming was not successful.', section)
|
||||
logger.warning(
|
||||
'The Scan command did not return a valid status. Renaming was not successful.',
|
||||
section,
|
||||
)
|
||||
return ProcessResult.failure(
|
||||
f'{section}: Failed to post-process {input_name}',
|
||||
)
|
||||
|
|
|
@ -26,7 +26,9 @@ class Section(configobj.Section):
|
|||
for section_name, subsections in to_return.items():
|
||||
for subsection in subsections:
|
||||
try:
|
||||
value = list(ConfigObj.find_key(subsections, 'enabled'))[0]
|
||||
value = list(
|
||||
ConfigObj.find_key(subsections, 'enabled'),
|
||||
)[0]
|
||||
except Exception:
|
||||
value = 0
|
||||
|
||||
|
@ -165,7 +167,9 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
for option, value in values.items():
|
||||
if section in ['CouchPotato']:
|
||||
if option == ['outputDirectory']:
|
||||
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
|
||||
CFG_NEW['Torrent'][option] = os.path.split(
|
||||
os.path.normpath(value),
|
||||
)[0]
|
||||
values.pop(option)
|
||||
if section in ['CouchPotato', 'HeadPhones', 'Gamez', 'Mylar']:
|
||||
if option in ['username', 'password']:
|
||||
|
@ -177,11 +181,20 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if option == 'failed_fork': # change this old format
|
||||
values['failed'] = 'auto'
|
||||
values.pop(option)
|
||||
if option == 'outputDirectory': # move this to new location format
|
||||
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
|
||||
if (
|
||||
option == 'outputDirectory'
|
||||
): # move this to new location format
|
||||
CFG_NEW['Torrent'][option] = os.path.split(
|
||||
os.path.normpath(value),
|
||||
)[0]
|
||||
values.pop(option)
|
||||
if section in ['Torrent']:
|
||||
if option in ['compressedExtensions', 'mediaExtensions', 'metaExtensions', 'minSampleSize']:
|
||||
if option in [
|
||||
'compressedExtensions',
|
||||
'mediaExtensions',
|
||||
'metaExtensions',
|
||||
'minSampleSize',
|
||||
]:
|
||||
CFG_NEW['Extensions'][option] = value
|
||||
values.pop(option)
|
||||
if option == 'useLink': # Sym links supported now as well.
|
||||
|
@ -193,7 +206,9 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if option == 'forceClean':
|
||||
CFG_NEW['General']['force_clean'] = value
|
||||
values.pop(option)
|
||||
if option == 'qBittorrenHost': # We had a typo that is now fixed.
|
||||
if (
|
||||
option == 'qBittorrenHost'
|
||||
): # We had a typo that is now fixed.
|
||||
CFG_NEW['Torrent']['qBittorrentHost'] = value
|
||||
values.pop(option)
|
||||
if section in ['Transcoder']:
|
||||
|
@ -226,7 +241,9 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
for option, value in values.items():
|
||||
CFG_NEW[section][subsection][option] = value
|
||||
elif subsection in CFG_OLD[section].sections:
|
||||
values = cleanup_values(CFG_OLD[section][subsection], section)
|
||||
values = cleanup_values(
|
||||
CFG_OLD[section][subsection], section,
|
||||
)
|
||||
if subsection not in CFG_NEW[section].sections:
|
||||
CFG_NEW[section][subsection] = {}
|
||||
for option, value in values.items():
|
||||
|
@ -243,7 +260,9 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
subsection = None
|
||||
if section in list(chain.from_iterable(subsections.values())):
|
||||
subsection = section
|
||||
section = ''.join([k for k, v in subsections.items() if subsection in v])
|
||||
section = ''.join(
|
||||
[k for k, v in subsections.items() if subsection in v],
|
||||
)
|
||||
process_section(section, subsection)
|
||||
elif section in subsections.keys():
|
||||
subsection = subsections[section]
|
||||
|
@ -252,13 +271,19 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
process_section(section, subsection)
|
||||
|
||||
# migrate SiCRKAGE settings from SickBeard section to new dedicated SiCRKAGE section
|
||||
if CFG_OLD['SickBeard']['tv']['enabled'] and CFG_OLD['SickBeard']['tv']['fork'] == 'sickrage-api':
|
||||
if (
|
||||
CFG_OLD['SickBeard']['tv']['enabled']
|
||||
and CFG_OLD['SickBeard']['tv']['fork'] == 'sickrage-api'
|
||||
):
|
||||
for option, value in CFG_OLD['SickBeard']['tv'].items():
|
||||
if option in CFG_NEW['SiCKRAGE']['tv']:
|
||||
CFG_NEW['SiCKRAGE']['tv'][option] = value
|
||||
|
||||
# set API version to 1 if API key detected and no SSO username is set
|
||||
if CFG_NEW['SiCKRAGE']['tv']['apikey'] and not CFG_NEW['SiCKRAGE']['tv']['sso_username']:
|
||||
if (
|
||||
CFG_NEW['SiCKRAGE']['tv']['apikey']
|
||||
and not CFG_NEW['SiCKRAGE']['tv']['sso_username']
|
||||
):
|
||||
CFG_NEW['SiCKRAGE']['tv']['api_version'] = 1
|
||||
|
||||
# disable SickBeard section
|
||||
|
@ -281,30 +306,70 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfg_new = config()
|
||||
|
||||
try:
|
||||
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']:
|
||||
if (
|
||||
'NZBPO_NDCATEGORY' in os.environ
|
||||
and 'NZBPO_SBCATEGORY' in os.environ
|
||||
):
|
||||
if (
|
||||
os.environ['NZBPO_NDCATEGORY']
|
||||
== os.environ['NZBPO_SBCATEGORY']
|
||||
):
|
||||
logger.warning(
|
||||
'{x} category is set for SickBeard and Sonarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_NDCATEGORY']),
|
||||
'{x} category is set for SickBeard and Sonarr. Please check your config in NZBGet'.format(
|
||||
x=os.environ['NZBPO_NDCATEGORY'],
|
||||
),
|
||||
)
|
||||
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
|
||||
if (
|
||||
'NZBPO_RACATEGORY' in os.environ
|
||||
and 'NZBPO_CPSCATEGORY' in os.environ
|
||||
):
|
||||
if (
|
||||
os.environ['NZBPO_RACATEGORY']
|
||||
== os.environ['NZBPO_CPSCATEGORY']
|
||||
):
|
||||
logger.warning(
|
||||
'{x} category is set for CouchPotato and Radarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_RACATEGORY']),
|
||||
'{x} category is set for CouchPotato and Radarr. Please check your config in NZBGet'.format(
|
||||
x=os.environ['NZBPO_RACATEGORY'],
|
||||
),
|
||||
)
|
||||
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_W3CATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_W3CATEGORY']:
|
||||
if (
|
||||
'NZBPO_RACATEGORY' in os.environ
|
||||
and 'NZBPO_W3CATEGORY' in os.environ
|
||||
):
|
||||
if (
|
||||
os.environ['NZBPO_RACATEGORY']
|
||||
== os.environ['NZBPO_W3CATEGORY']
|
||||
):
|
||||
logger.warning(
|
||||
'{x} category is set for Watcher3 and Radarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_RACATEGORY']),
|
||||
'{x} category is set for Watcher3 and Radarr. Please check your config in NZBGet'.format(
|
||||
x=os.environ['NZBPO_RACATEGORY'],
|
||||
),
|
||||
)
|
||||
if 'NZBPO_W3CATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_W3CATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
|
||||
if (
|
||||
'NZBPO_W3CATEGORY' in os.environ
|
||||
and 'NZBPO_CPSCATEGORY' in os.environ
|
||||
):
|
||||
if (
|
||||
os.environ['NZBPO_W3CATEGORY']
|
||||
== os.environ['NZBPO_CPSCATEGORY']
|
||||
):
|
||||
logger.warning(
|
||||
'{x} category is set for CouchPotato and Watcher3. Please check your config in NZBGet'.format(x=os.environ['NZBPO_W3CATEGORY']),
|
||||
'{x} category is set for CouchPotato and Watcher3. Please check your config in NZBGet'.format(
|
||||
x=os.environ['NZBPO_W3CATEGORY'],
|
||||
),
|
||||
)
|
||||
if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']:
|
||||
if (
|
||||
'NZBPO_LICATEGORY' in os.environ
|
||||
and 'NZBPO_HPCATEGORY' in os.environ
|
||||
):
|
||||
if (
|
||||
os.environ['NZBPO_LICATEGORY']
|
||||
== os.environ['NZBPO_HPCATEGORY']
|
||||
):
|
||||
logger.warning(
|
||||
'{x} category is set for HeadPhones and Lidarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_LICATEGORY']),
|
||||
'{x} category is set for HeadPhones and Lidarr. Please check your config in NZBGet'.format(
|
||||
x=os.environ['NZBPO_LICATEGORY'],
|
||||
),
|
||||
)
|
||||
section = 'Nzb'
|
||||
key = 'NZBOP_DESTDIR'
|
||||
|
@ -314,8 +379,20 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
cfg_new[section][option] = value
|
||||
|
||||
section = 'General'
|
||||
env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'REQUIRE_LAN', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
|
||||
cfg_keys = ['auto_update', 'check_media', 'require_lan', 'safe_mode', 'no_extract_failed']
|
||||
env_keys = [
|
||||
'AUTO_UPDATE',
|
||||
'CHECK_MEDIA',
|
||||
'REQUIRE_LAN',
|
||||
'SAFE_MODE',
|
||||
'NO_EXTRACT_FAILED',
|
||||
]
|
||||
cfg_keys = [
|
||||
'auto_update',
|
||||
'check_media',
|
||||
'require_lan',
|
||||
'safe_mode',
|
||||
'no_extract_failed',
|
||||
]
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
if key in os.environ:
|
||||
|
@ -336,12 +413,32 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'CouchPotato'
|
||||
env_cat_key = 'NZBPO_CPSCATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY',
|
||||
'ENABLED',
|
||||
'APIKEY',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'METHOD',
|
||||
'DELETE_FAILED',
|
||||
'REMOTE_PATH',
|
||||
'WAIT_FOR',
|
||||
'WATCH_DIR',
|
||||
'OMDBAPIKEY',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
|
||||
'wait_for', 'watch_dir', 'omdbapikey',
|
||||
'enabled',
|
||||
'apikey',
|
||||
'host',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'method',
|
||||
'delete_failed',
|
||||
'remote_path',
|
||||
'wait_for',
|
||||
'watch_dir',
|
||||
'omdbapikey',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -349,9 +446,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['Radarr'].sections:
|
||||
cfg_new['Radarr'][env_cat_key]['enabled'] = 0
|
||||
|
@ -361,12 +463,32 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Watcher3'
|
||||
env_cat_key = 'NZBPO_W3CATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY',
|
||||
'ENABLED',
|
||||
'APIKEY',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'METHOD',
|
||||
'DELETE_FAILED',
|
||||
'REMOTE_PATH',
|
||||
'WAIT_FOR',
|
||||
'WATCH_DIR',
|
||||
'OMDBAPIKEY',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
|
||||
'wait_for', 'watch_dir', 'omdbapikey',
|
||||
'enabled',
|
||||
'apikey',
|
||||
'host',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'method',
|
||||
'delete_failed',
|
||||
'remote_path',
|
||||
'wait_for',
|
||||
'watch_dir',
|
||||
'omdbapikey',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -374,9 +496,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['Radarr'].sections:
|
||||
cfg_new['Radarr'][env_cat_key]['enabled'] = 0
|
||||
|
@ -386,12 +513,38 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'SickBeard'
|
||||
env_cat_key = 'NZBPO_SBCATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD',
|
||||
'ENABLED',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'APIKEY',
|
||||
'USERNAME',
|
||||
'PASSWORD',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'FORK',
|
||||
'DELETE_FAILED',
|
||||
'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY',
|
||||
'REMOTE_PATH',
|
||||
'PROCESS_METHOD',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink',
|
||||
'nzbExtractionBy', 'remote_path', 'process_method',
|
||||
'enabled',
|
||||
'host',
|
||||
'port',
|
||||
'apikey',
|
||||
'username',
|
||||
'password',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'fork',
|
||||
'delete_failed',
|
||||
'Torrent_NoLink',
|
||||
'nzbExtractionBy',
|
||||
'remote_path',
|
||||
'process_method',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -399,9 +552,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['SiCKRAGE'].sections:
|
||||
cfg_new['SiCKRAGE'][env_cat_key]['enabled'] = 0
|
||||
|
@ -411,12 +569,40 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'SiCKRAGE'
|
||||
env_cat_key = 'NZBPO_SRCATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'PORT', 'APIKEY', 'API_VERSION', 'SSO_USERNAME', 'SSO_PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
|
||||
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD',
|
||||
'ENABLED',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'APIKEY',
|
||||
'API_VERSION',
|
||||
'SSO_USERNAME',
|
||||
'SSO_PASSWORD',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'FORK',
|
||||
'DELETE_FAILED',
|
||||
'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY',
|
||||
'REMOTE_PATH',
|
||||
'PROCESS_METHOD',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'port', 'apikey', 'api_version', 'sso_username', 'sso_password', 'ssl', 'web_root', 'watch_dir', 'fork',
|
||||
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method',
|
||||
'enabled',
|
||||
'host',
|
||||
'port',
|
||||
'apikey',
|
||||
'api_version',
|
||||
'sso_username',
|
||||
'sso_password',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'fork',
|
||||
'delete_failed',
|
||||
'Torrent_NoLink',
|
||||
'nzbExtractionBy',
|
||||
'remote_path',
|
||||
'process_method',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -424,9 +610,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['SickBeard'].sections:
|
||||
cfg_new['SickBeard'][env_cat_key]['enabled'] = 0
|
||||
|
@ -435,17 +626,44 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'HeadPhones'
|
||||
env_cat_key = 'NZBPO_HPCATEGORY'
|
||||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
|
||||
env_keys = [
|
||||
'ENABLED',
|
||||
'APIKEY',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WAIT_FOR',
|
||||
'WATCH_DIR',
|
||||
'REMOTE_PATH',
|
||||
'DELETE_FAILED',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled',
|
||||
'apikey',
|
||||
'host',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'wait_for',
|
||||
'watch_dir',
|
||||
'remote_path',
|
||||
'delete_failed',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_HP{env_keys[index]}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['Lidarr'].sections:
|
||||
cfg_new['Lidarr'][env_cat_key]['enabled'] = 0
|
||||
|
@ -453,11 +671,27 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Mylar'
|
||||
env_cat_key = 'NZBPO_MYCATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
|
||||
'ENABLED',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'USERNAME',
|
||||
'PASSWORD',
|
||||
'APIKEY',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'REMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
|
||||
'enabled',
|
||||
'host',
|
||||
'port',
|
||||
'username',
|
||||
'password',
|
||||
'apikey',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
|
@ -466,51 +700,130 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
|
||||
section = 'Gamez'
|
||||
env_cat_key = 'NZBPO_GZCATEGORY'
|
||||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
|
||||
env_keys = [
|
||||
'ENABLED',
|
||||
'APIKEY',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'LIBRARY',
|
||||
'REMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled',
|
||||
'apikey',
|
||||
'host',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'library',
|
||||
'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_GZ{env_keys[index]}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
|
||||
section = 'LazyLibrarian'
|
||||
env_cat_key = 'NZBPO_LLCATEGORY'
|
||||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'remote_path']
|
||||
env_keys = [
|
||||
'ENABLED',
|
||||
'APIKEY',
|
||||
'HOST',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'REMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled',
|
||||
'apikey',
|
||||
'host',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_LL{env_keys[index]}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
|
||||
section = 'NzbDrone'
|
||||
env_cat_key = 'NZBPO_NDCATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'IMPORTMODE',
|
||||
'ENABLED',
|
||||
'HOST',
|
||||
'APIKEY',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'FORK',
|
||||
'DELETE_FAILED',
|
||||
'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY',
|
||||
'WAIT_FOR',
|
||||
'DELETE_FAILED',
|
||||
'REMOTE_PATH',
|
||||
'IMPORTMODE',
|
||||
]
|
||||
# new cfgKey added for importMode
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'importMode',
|
||||
'enabled',
|
||||
'host',
|
||||
'apikey',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'fork',
|
||||
'delete_failed',
|
||||
'Torrent_NoLink',
|
||||
'nzbExtractionBy',
|
||||
'wait_for',
|
||||
'delete_failed',
|
||||
'remote_path',
|
||||
'importMode',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -518,9 +831,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['SickBeard'].sections:
|
||||
cfg_new['SickBeard'][env_cat_key]['enabled'] = 0
|
||||
|
@ -530,13 +848,41 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Radarr'
|
||||
env_cat_key = 'NZBPO_RACATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY', 'IMPORTMODE',
|
||||
'ENABLED',
|
||||
'HOST',
|
||||
'APIKEY',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'FORK',
|
||||
'DELETE_FAILED',
|
||||
'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY',
|
||||
'WAIT_FOR',
|
||||
'DELETE_FAILED',
|
||||
'REMOTE_PATH',
|
||||
'OMDBAPIKEY',
|
||||
'IMPORTMODE',
|
||||
]
|
||||
# new cfgKey added for importMode
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey', 'importMode',
|
||||
'enabled',
|
||||
'host',
|
||||
'apikey',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'fork',
|
||||
'delete_failed',
|
||||
'Torrent_NoLink',
|
||||
'nzbExtractionBy',
|
||||
'wait_for',
|
||||
'delete_failed',
|
||||
'remote_path',
|
||||
'omdbapikey',
|
||||
'importMode',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -544,9 +890,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['CouchPotato'].sections:
|
||||
cfg_new['CouchPotato'][env_cat_key]['enabled'] = 0
|
||||
|
@ -556,12 +907,36 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Lidarr'
|
||||
env_cat_key = 'NZBPO_LICATEGORY'
|
||||
env_keys = [
|
||||
'ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH',
|
||||
'ENABLED',
|
||||
'HOST',
|
||||
'APIKEY',
|
||||
'PORT',
|
||||
'SSL',
|
||||
'WEB_ROOT',
|
||||
'WATCH_DIR',
|
||||
'FORK',
|
||||
'DELETE_FAILED',
|
||||
'TORRENT_NOLINK',
|
||||
'NZBEXTRACTIONBY',
|
||||
'WAIT_FOR',
|
||||
'DELETE_FAILED',
|
||||
'REMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path',
|
||||
'enabled',
|
||||
'host',
|
||||
'apikey',
|
||||
'port',
|
||||
'ssl',
|
||||
'web_root',
|
||||
'watch_dir',
|
||||
'fork',
|
||||
'delete_failed',
|
||||
'Torrent_NoLink',
|
||||
'nzbExtractionBy',
|
||||
'wait_for',
|
||||
'delete_failed',
|
||||
'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -569,16 +944,29 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
if os.environ[env_cat_key] in cfg_new['HeadPhones'].sections:
|
||||
cfg_new['HeadPhones'][env_cat_key]['enabled'] = 0
|
||||
|
||||
section = 'Extensions'
|
||||
env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
|
||||
cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
|
||||
env_keys = [
|
||||
'COMPRESSEDEXTENSIONS',
|
||||
'MEDIAEXTENSIONS',
|
||||
'METAEXTENSIONS',
|
||||
]
|
||||
cfg_keys = [
|
||||
'compressedExtensions',
|
||||
'mediaExtensions',
|
||||
'metaExtensions',
|
||||
]
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
if key in os.environ:
|
||||
|
@ -598,28 +986,82 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
section = 'Transcoder'
|
||||
env_keys = [
|
||||
'TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
|
||||
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
|
||||
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
|
||||
'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
|
||||
'OUTPUTVIDEOPRESET', 'OUTPUTVIDEOFRAMERATE', 'OUTPUTVIDEOBITRATE', 'OUTPUTAUDIOCODEC',
|
||||
'AUDIOCODECALLOW', 'OUTPUTAUDIOBITRATE', 'OUTPUTQUALITYPERCENT', 'GETSUBS',
|
||||
'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE',
|
||||
'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
|
||||
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS',
|
||||
'OUTPUTAUDIOOTHERCHANNELS', 'OUTPUTVIDEORESOLUTION',
|
||||
'TRANSCODE',
|
||||
'DUPLICATE',
|
||||
'IGNOREEXTENSIONS',
|
||||
'OUTPUTFASTSTART',
|
||||
'OUTPUTVIDEOPATH',
|
||||
'PROCESSOUTPUT',
|
||||
'AUDIOLANGUAGE',
|
||||
'ALLAUDIOLANGUAGES',
|
||||
'SUBLANGUAGES',
|
||||
'ALLSUBLANGUAGES',
|
||||
'EMBEDSUBS',
|
||||
'BURNINSUBTITLE',
|
||||
'EXTRACTSUBS',
|
||||
'EXTERNALSUBDIR',
|
||||
'OUTPUTDEFAULT',
|
||||
'OUTPUTVIDEOEXTENSION',
|
||||
'OUTPUTVIDEOCODEC',
|
||||
'VIDEOCODECALLOW',
|
||||
'OUTPUTVIDEOPRESET',
|
||||
'OUTPUTVIDEOFRAMERATE',
|
||||
'OUTPUTVIDEOBITRATE',
|
||||
'OUTPUTAUDIOCODEC',
|
||||
'AUDIOCODECALLOW',
|
||||
'OUTPUTAUDIOBITRATE',
|
||||
'OUTPUTQUALITYPERCENT',
|
||||
'GETSUBS',
|
||||
'OUTPUTAUDIOTRACK2CODEC',
|
||||
'AUDIOCODEC2ALLOW',
|
||||
'OUTPUTAUDIOTRACK2BITRATE',
|
||||
'OUTPUTAUDIOOTHERCODEC',
|
||||
'AUDIOOTHERCODECALLOW',
|
||||
'OUTPUTAUDIOOTHERBITRATE',
|
||||
'OUTPUTSUBTITLECODEC',
|
||||
'OUTPUTAUDIOCHANNELS',
|
||||
'OUTPUTAUDIOTRACK2CHANNELS',
|
||||
'OUTPUTAUDIOOTHERCHANNELS',
|
||||
'OUTPUTVIDEORESOLUTION',
|
||||
]
|
||||
cfg_keys = [
|
||||
'transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
|
||||
'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
|
||||
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir',
|
||||
'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
|
||||
'outputVideoPreset', 'outputVideoFramerate', 'outputVideoBitrate', 'outputAudioCodec',
|
||||
'AudioCodecAllow', 'outputAudioBitrate', 'outputQualityPercent', 'getSubs',
|
||||
'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate',
|
||||
'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
|
||||
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
|
||||
'outputAudioOtherChannels', 'outputVideoResolution',
|
||||
'transcode',
|
||||
'duplicate',
|
||||
'ignoreExtensions',
|
||||
'outputFastStart',
|
||||
'outputVideoPath',
|
||||
'processOutput',
|
||||
'audioLanguage',
|
||||
'allAudioLanguages',
|
||||
'subLanguages',
|
||||
'allSubLanguages',
|
||||
'embedSubs',
|
||||
'burnInSubtitle',
|
||||
'extractSubs',
|
||||
'externalSubDir',
|
||||
'outputDefault',
|
||||
'outputVideoExtension',
|
||||
'outputVideoCodec',
|
||||
'VideoCodecAllow',
|
||||
'outputVideoPreset',
|
||||
'outputVideoFramerate',
|
||||
'outputVideoBitrate',
|
||||
'outputAudioCodec',
|
||||
'AudioCodecAllow',
|
||||
'outputAudioBitrate',
|
||||
'outputQualityPercent',
|
||||
'getSubs',
|
||||
'outputAudioTrack2Codec',
|
||||
'AudioCodec2Allow',
|
||||
'outputAudioTrack2Bitrate',
|
||||
'outputAudioOtherCodec',
|
||||
'AudioOtherCodecAllow',
|
||||
'outputAudioOtherBitrate',
|
||||
'outputSubtitleCodec',
|
||||
'outputAudioChannels',
|
||||
'outputAudioTrack2Channels',
|
||||
'outputAudioOtherChannels',
|
||||
'outputVideoResolution',
|
||||
]
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
|
@ -641,12 +1083,24 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'UserScript'
|
||||
env_cat_key = 'NZBPO_USCATEGORY'
|
||||
env_keys = [
|
||||
'USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
|
||||
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH',
|
||||
'USER_SCRIPT_MEDIAEXTENSIONS',
|
||||
'USER_SCRIPT_PATH',
|
||||
'USER_SCRIPT_PARAM',
|
||||
'USER_SCRIPT_RUNONCE',
|
||||
'USER_SCRIPT_SUCCESSCODES',
|
||||
'USER_SCRIPT_CLEAN',
|
||||
'USDELAY',
|
||||
'USREMOTE_PATH',
|
||||
]
|
||||
cfg_keys = [
|
||||
'user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
|
||||
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path',
|
||||
'user_script_mediaExtensions',
|
||||
'user_script_path',
|
||||
'user_script_param',
|
||||
'user_script_runOnce',
|
||||
'user_script_successCodes',
|
||||
'user_script_clean',
|
||||
'delay',
|
||||
'remote_path',
|
||||
]
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
|
@ -654,9 +1108,14 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[env_cat_key] not in cfg_new[section].sections:
|
||||
if (
|
||||
os.environ[env_cat_key]
|
||||
not in cfg_new[section].sections
|
||||
):
|
||||
cfg_new[section][os.environ[env_cat_key]] = {}
|
||||
cfg_new[section][os.environ[env_cat_key]][option] = value
|
||||
cfg_new[section][os.environ[env_cat_key]][
|
||||
option
|
||||
] = value
|
||||
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
|
||||
|
||||
except Exception as error:
|
||||
|
|
|
@ -11,7 +11,9 @@ MAX_DB_VERSION = 2
|
|||
def backup_database(version):
|
||||
logger.info('Backing up database before upgrade')
|
||||
if not backup_versioned_file(main_db.db_filename(), version):
|
||||
logger.log_error_and_exit('Database backup failed, abort upgrading database')
|
||||
logger.log_error_and_exit(
|
||||
'Database backup failed, abort upgrading database',
|
||||
)
|
||||
else:
|
||||
logger.info('Proceeding with upgrade')
|
||||
|
||||
|
@ -21,6 +23,7 @@ def backup_database(version):
|
|||
# ======================
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
|
||||
|
||||
class InitialSchema(main_db.SchemaUpgrade):
|
||||
def test(self):
|
||||
no_update = False
|
||||
|
@ -30,7 +33,9 @@ class InitialSchema(main_db.SchemaUpgrade):
|
|||
return no_update
|
||||
|
||||
def execute(self):
|
||||
if not self.has_table('downloads') and not self.has_table('db_version'):
|
||||
if not self.has_table('downloads') and not self.has_table(
|
||||
'db_version',
|
||||
):
|
||||
queries = [
|
||||
'CREATE TABLE db_version (db_version INTEGER);',
|
||||
'CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));',
|
||||
|
|
|
@ -19,14 +19,46 @@ def extract(file_path, output_destination):
|
|||
if not os.path.exists(core.SEVENZIP):
|
||||
core.logger.error('EXTRACTOR: Could not find 7-zip, Exiting')
|
||||
return False
|
||||
wscriptlocation = os.path.join(os.environ['WINDIR'], 'system32', 'wscript.exe')
|
||||
invislocation = os.path.join(core.APP_ROOT, 'core', 'extractor', 'bin', 'invisible.vbs')
|
||||
cmd_7zip = [wscriptlocation, invislocation, str(core.SHOWEXTRACT), core.SEVENZIP, 'x', '-y']
|
||||
ext_7zip = ['.rar', '.zip', '.tar.gz', 'tgz', '.tar.bz2', '.tbz', '.tar.lzma', '.tlz', '.7z', '.xz', '.gz']
|
||||
wscriptlocation = os.path.join(
|
||||
os.environ['WINDIR'], 'system32', 'wscript.exe',
|
||||
)
|
||||
invislocation = os.path.join(
|
||||
core.APP_ROOT, 'core', 'extractor', 'bin', 'invisible.vbs',
|
||||
)
|
||||
cmd_7zip = [
|
||||
wscriptlocation,
|
||||
invislocation,
|
||||
str(core.SHOWEXTRACT),
|
||||
core.SEVENZIP,
|
||||
'x',
|
||||
'-y',
|
||||
]
|
||||
ext_7zip = [
|
||||
'.rar',
|
||||
'.zip',
|
||||
'.tar.gz',
|
||||
'tgz',
|
||||
'.tar.bz2',
|
||||
'.tbz',
|
||||
'.tar.lzma',
|
||||
'.tlz',
|
||||
'.7z',
|
||||
'.xz',
|
||||
'.gz',
|
||||
]
|
||||
extract_commands = dict.fromkeys(ext_7zip, cmd_7zip)
|
||||
# Using unix
|
||||
else:
|
||||
required_cmds = ['unrar', 'unzip', 'tar', 'unxz', 'unlzma', '7zr', 'bunzip2', 'gunzip']
|
||||
required_cmds = [
|
||||
'unrar',
|
||||
'unzip',
|
||||
'tar',
|
||||
'unxz',
|
||||
'unlzma',
|
||||
'7zr',
|
||||
'bunzip2',
|
||||
'gunzip',
|
||||
]
|
||||
# ## Possible future suport:
|
||||
# gunzip: gz (cmd will delete original archive)
|
||||
# ## the following do not extract to dest dir
|
||||
|
@ -38,10 +70,14 @@ def extract(file_path, output_destination):
|
|||
'.rar': ['unrar', 'x', '-o+', '-y'],
|
||||
'.tar': ['tar', '-xf'],
|
||||
'.zip': ['unzip'],
|
||||
'.tar.gz': ['tar', '-xzf'], '.tgz': ['tar', '-xzf'],
|
||||
'.tar.bz2': ['tar', '-xjf'], '.tbz': ['tar', '-xjf'],
|
||||
'.tar.lzma': ['tar', '--lzma', '-xf'], '.tlz': ['tar', '--lzma', '-xf'],
|
||||
'.tar.xz': ['tar', '--xz', '-xf'], '.txz': ['tar', '--xz', '-xf'],
|
||||
'.tar.gz': ['tar', '-xzf'],
|
||||
'.tgz': ['tar', '-xzf'],
|
||||
'.tar.bz2': ['tar', '-xjf'],
|
||||
'.tbz': ['tar', '-xjf'],
|
||||
'.tar.lzma': ['tar', '--lzma', '-xf'],
|
||||
'.tlz': ['tar', '--lzma', '-xf'],
|
||||
'.tar.xz': ['tar', '--xz', '-xf'],
|
||||
'.txz': ['tar', '--xz', '-xf'],
|
||||
'.7z': ['7zr', 'x'],
|
||||
'.gz': ['gunzip'],
|
||||
}
|
||||
|
@ -50,26 +86,43 @@ def extract(file_path, output_destination):
|
|||
devnull = open(os.devnull, 'w')
|
||||
for cmd in required_cmds:
|
||||
if call(
|
||||
['which', cmd], stdout=devnull,
|
||||
['which', cmd],
|
||||
stdout=devnull,
|
||||
stderr=devnull,
|
||||
): # note, returns 0 if exists, or 1 if doesn't exist.
|
||||
for k, v in extract_commands.items():
|
||||
if cmd in v[0]:
|
||||
if not call(['which', '7zr'], stdout=devnull, stderr=devnull): # we do have '7zr'
|
||||
if not call(
|
||||
['which', '7zr'],
|
||||
stdout=devnull,
|
||||
stderr=devnull,
|
||||
): # we do have '7zr'
|
||||
extract_commands[k] = ['7zr', 'x', '-y']
|
||||
elif not call(['which', '7z'], stdout=devnull, stderr=devnull): # we do have '7z'
|
||||
elif not call(
|
||||
['which', '7z'], stdout=devnull, stderr=devnull,
|
||||
): # we do have '7z'
|
||||
extract_commands[k] = ['7z', 'x', '-y']
|
||||
elif not call(['which', '7za'], stdout=devnull, stderr=devnull): # we do have '7za'
|
||||
elif not call(
|
||||
['which', '7za'],
|
||||
stdout=devnull,
|
||||
stderr=devnull,
|
||||
): # we do have '7za'
|
||||
extract_commands[k] = ['7za', 'x', '-y']
|
||||
else:
|
||||
core.logger.error(f'EXTRACTOR: {cmd} not found, disabling support for {k}')
|
||||
core.logger.error(
|
||||
f'EXTRACTOR: {cmd} not found, disabling support for {k}',
|
||||
)
|
||||
del extract_commands[k]
|
||||
devnull.close()
|
||||
else:
|
||||
core.logger.warning('EXTRACTOR: Cannot determine which tool to use when called from Transmission')
|
||||
core.logger.warning(
|
||||
'EXTRACTOR: Cannot determine which tool to use when called from Transmission',
|
||||
)
|
||||
|
||||
if not extract_commands:
|
||||
core.logger.warning('EXTRACTOR: No archive extracting programs found, plugin will be disabled')
|
||||
core.logger.warning(
|
||||
'EXTRACTOR: No archive extracting programs found, plugin will be disabled',
|
||||
)
|
||||
|
||||
ext = os.path.splitext(file_path)
|
||||
cmd = []
|
||||
|
@ -79,9 +132,19 @@ def extract(file_path, output_destination):
|
|||
cmd = extract_commands[f'.tar{ext[1]}']
|
||||
else: # Try gunzip
|
||||
cmd = extract_commands[ext[1]]
|
||||
elif ext[1] in ('.1', '.01', '.001') and os.path.splitext(ext[0])[1] in ('.rar', '.zip', '.7z'):
|
||||
elif ext[1] in ('.1', '.01', '.001') and os.path.splitext(ext[0])[1] in (
|
||||
'.rar',
|
||||
'.zip',
|
||||
'.7z',
|
||||
):
|
||||
cmd = extract_commands[os.path.splitext(ext[0])[1]]
|
||||
elif ext[1] in ('.cb7', '.cba', '.cbr', '.cbt', '.cbz'): # don't extract these comic book archives.
|
||||
elif ext[1] in (
|
||||
'.cb7',
|
||||
'.cba',
|
||||
'.cbr',
|
||||
'.cbt',
|
||||
'.cbz',
|
||||
): # don't extract these comic book archives.
|
||||
return False
|
||||
else:
|
||||
if ext[1] in extract_commands:
|
||||
|
@ -93,8 +156,13 @@ def extract(file_path, output_destination):
|
|||
# Create outputDestination folder
|
||||
core.make_dir(output_destination)
|
||||
|
||||
if core.PASSWORDS_FILE and os.path.isfile(os.path.normpath(core.PASSWORDS_FILE)):
|
||||
passwords = [line.strip() for line in open(os.path.normpath(core.PASSWORDS_FILE))]
|
||||
if core.PASSWORDS_FILE and os.path.isfile(
|
||||
os.path.normpath(core.PASSWORDS_FILE),
|
||||
):
|
||||
passwords = [
|
||||
line.strip()
|
||||
for line in open(os.path.normpath(core.PASSWORDS_FILE))
|
||||
]
|
||||
else:
|
||||
passwords = []
|
||||
|
||||
|
@ -110,7 +178,9 @@ def extract(file_path, output_destination):
|
|||
orig_files.append(os.path.join(directory, file))
|
||||
|
||||
pwd = os.getcwd() # Get our Present Working Directory
|
||||
os.chdir(output_destination) # Not all unpack commands accept full paths, so just extract into this directory
|
||||
os.chdir(
|
||||
output_destination,
|
||||
) # Not all unpack commands accept full paths, so just extract into this directory
|
||||
devnull = open(os.devnull, 'w')
|
||||
|
||||
try: # now works same for nt and *nix
|
||||
|
@ -124,30 +194,42 @@ def extract(file_path, output_destination):
|
|||
cmd2 = cmd
|
||||
if not 'gunzip' in cmd: # gunzip doesn't support password
|
||||
cmd2.append('-p-') # don't prompt for password.
|
||||
p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine.
|
||||
p = Popen(
|
||||
cmd2, stdout=devnull, stderr=devnull, startupinfo=info,
|
||||
) # should extract files fine.
|
||||
res = p.wait()
|
||||
if res == 0: # Both Linux and Windows return 0 for successful.
|
||||
core.logger.info(f'EXTRACTOR: Extraction was successful for {file_path} to {output_destination}')
|
||||
core.logger.info(
|
||||
f'EXTRACTOR: Extraction was successful for {file_path} to {output_destination}',
|
||||
)
|
||||
success = 1
|
||||
elif len(passwords) > 0 and not 'gunzip' in cmd:
|
||||
core.logger.info('EXTRACTOR: Attempting to extract with passwords')
|
||||
for password in passwords:
|
||||
if password == '': # if edited in windows or otherwise if blank lines.
|
||||
if (
|
||||
password == ''
|
||||
): # if edited in windows or otherwise if blank lines.
|
||||
continue
|
||||
cmd2 = cmd
|
||||
# append password here.
|
||||
passcmd = f'-p{password}'
|
||||
cmd2.append(passcmd)
|
||||
p = Popen(cmd2, stdout=devnull, stderr=devnull, startupinfo=info) # should extract files fine.
|
||||
p = Popen(
|
||||
cmd2, stdout=devnull, stderr=devnull, startupinfo=info,
|
||||
) # should extract files fine.
|
||||
res = p.wait()
|
||||
if (res >= 0 and platform == 'Windows') or res == 0:
|
||||
core.logger.info(f'EXTRACTOR: Extraction was successful for {file_path} to {output_destination} using password: {password}')
|
||||
core.logger.info(
|
||||
f'EXTRACTOR: Extraction was successful for {file_path} to {output_destination} using password: {password}',
|
||||
)
|
||||
success = 1
|
||||
break
|
||||
else:
|
||||
continue
|
||||
except Exception:
|
||||
core.logger.error(f'EXTRACTOR: Extraction failed for {file_path}. Could not call command {cmd}')
|
||||
core.logger.error(
|
||||
f'EXTRACTOR: Extraction failed for {file_path}. Could not call command {cmd}',
|
||||
)
|
||||
os.chdir(pwd)
|
||||
return False
|
||||
|
||||
|
@ -167,10 +249,14 @@ def extract(file_path, output_destination):
|
|||
for file in files:
|
||||
if not os.path.join(directory, file) in orig_files:
|
||||
try:
|
||||
shutil.copymode(file_path, os.path.join(directory, file))
|
||||
shutil.copymode(
|
||||
file_path, os.path.join(directory, file),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
else:
|
||||
core.logger.error(f'EXTRACTOR: Extraction failed for {file_path}. Result was {res}')
|
||||
core.logger.error(
|
||||
f'EXTRACTOR: Extraction failed for {file_path}. Result was {res}',
|
||||
)
|
||||
return False
|
||||
|
|
|
@ -47,7 +47,10 @@ class GitHub:
|
|||
"""
|
||||
return self._access_api(
|
||||
[
|
||||
'repos', self.github_repo_user, self.github_repo, 'compare',
|
||||
'repos',
|
||||
self.github_repo_user,
|
||||
self.github_repo,
|
||||
'compare',
|
||||
f'{base}...{head}',
|
||||
],
|
||||
params={'per_page': per_page},
|
||||
|
|
|
@ -88,9 +88,18 @@ class NTMRotatingLogHandler:
|
|||
console.setFormatter(
|
||||
DispatchingFormatter(
|
||||
{
|
||||
'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'db': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'nzbtomedia': logging.Formatter(
|
||||
'[%(asctime)s] [%(levelname)s]::%(message)s',
|
||||
'%H:%M:%S',
|
||||
),
|
||||
'postprocess': logging.Formatter(
|
||||
'[%(asctime)s] [%(levelname)s]::%(message)s',
|
||||
'%H:%M:%S',
|
||||
),
|
||||
'db': logging.Formatter(
|
||||
'[%(asctime)s] [%(levelname)s]::%(message)s',
|
||||
'%H:%M:%S',
|
||||
),
|
||||
},
|
||||
logging.Formatter('%(message)s'),
|
||||
),
|
||||
|
@ -119,16 +128,27 @@ class NTMRotatingLogHandler:
|
|||
|
||||
def _config_handler(self):
|
||||
"""Configure a file handler to log at file_name and return it."""
|
||||
file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8')
|
||||
file_handler = logging.FileHandler(
|
||||
self.log_file_path, encoding='utf-8',
|
||||
)
|
||||
|
||||
file_handler.setLevel(DB)
|
||||
|
||||
file_handler.setFormatter(
|
||||
DispatchingFormatter(
|
||||
{
|
||||
'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'db': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'nzbtomedia': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s::%(message)s',
|
||||
'%Y-%m-%d %H:%M:%S',
|
||||
),
|
||||
'postprocess': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s::%(message)s',
|
||||
'%Y-%m-%d %H:%M:%S',
|
||||
),
|
||||
'db': logging.Formatter(
|
||||
'%(asctime)s %(levelname)-8s::%(message)s',
|
||||
'%Y-%m-%d %H:%M:%S',
|
||||
),
|
||||
},
|
||||
logging.Formatter('%(message)s'),
|
||||
),
|
||||
|
@ -194,7 +214,10 @@ class NTMRotatingLogHandler:
|
|||
|
||||
# check the size and see if we need to rotate
|
||||
if self.writes_since_check >= 10:
|
||||
if os.path.isfile(self.log_file_path) and os.path.getsize(self.log_file_path) >= LOG_SIZE:
|
||||
if (
|
||||
os.path.isfile(self.log_file_path)
|
||||
and os.path.getsize(self.log_file_path) >= LOG_SIZE
|
||||
):
|
||||
self._rotate_logs()
|
||||
self.writes_since_check = 0
|
||||
else:
|
||||
|
@ -203,14 +226,18 @@ class NTMRotatingLogHandler:
|
|||
try:
|
||||
message = f'{section.upper()}: {to_log}'
|
||||
except UnicodeError:
|
||||
message = f'{section.upper()}: Message contains non-utf-8 string'
|
||||
message = (
|
||||
f'{section.upper()}: Message contains non-utf-8 string'
|
||||
)
|
||||
|
||||
out_line = message
|
||||
|
||||
ntm_logger = logging.getLogger('nzbtomedia')
|
||||
pp_logger = logging.getLogger('postprocess')
|
||||
db_logger = logging.getLogger('db')
|
||||
pp_logger.postprocess = functools.partial(pp_logger.log, POSTPROCESS)
|
||||
pp_logger.postprocess = functools.partial(
|
||||
pp_logger.log, POSTPROCESS,
|
||||
)
|
||||
db_logger.db = functools.partial(db_logger.log, DB)
|
||||
try:
|
||||
if log_level == DEBUG:
|
||||
|
|
|
@ -59,7 +59,8 @@ class DBConnection:
|
|||
sql_result = cursor.fetchone()[0]
|
||||
else:
|
||||
logger.log(
|
||||
f'{self.filename}: {query} with args {args}', logger.DB,
|
||||
f'{self.filename}: {query} with args {args}',
|
||||
logger.DB,
|
||||
)
|
||||
cursor = self.connection.cursor()
|
||||
cursor.execute(query, args)
|
||||
|
@ -68,7 +69,10 @@ class DBConnection:
|
|||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError as error:
|
||||
if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]:
|
||||
if (
|
||||
'unable to open database file' in error.args[0]
|
||||
or 'database is locked' in error.args[0]
|
||||
):
|
||||
logger.log(f'DB error: {error}', logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(1)
|
||||
|
@ -76,7 +80,9 @@ class DBConnection:
|
|||
logger.log(f'DB error: {error}', logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError as error:
|
||||
logger.log(f'Fatal error executing query: {error}', logger.ERROR)
|
||||
logger.log(
|
||||
f'Fatal error executing query: {error}', logger.ERROR,
|
||||
)
|
||||
raise
|
||||
|
||||
return sql_result
|
||||
|
@ -97,16 +103,26 @@ class DBConnection:
|
|||
sql_result.append(self.connection.execute(qu[0]))
|
||||
elif len(qu) > 1:
|
||||
if log_transaction:
|
||||
logger.log(f'{qu[0]} with args {qu[1]}', logger.DEBUG)
|
||||
sql_result.append(self.connection.execute(qu[0], qu[1]))
|
||||
logger.log(
|
||||
f'{qu[0]} with args {qu[1]}', logger.DEBUG,
|
||||
)
|
||||
sql_result.append(
|
||||
self.connection.execute(qu[0], qu[1]),
|
||||
)
|
||||
self.connection.commit()
|
||||
logger.log(f'Transaction with {len(querylist)} query\'s executed', logger.DEBUG)
|
||||
logger.log(
|
||||
f'Transaction with {len(querylist)} query\'s executed',
|
||||
logger.DEBUG,
|
||||
)
|
||||
return sql_result
|
||||
except sqlite3.OperationalError as error:
|
||||
sql_result = []
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]:
|
||||
if (
|
||||
'unable to open database file' in error.args[0]
|
||||
or 'database is locked' in error.args[0]
|
||||
):
|
||||
logger.log(f'DB error: {error}', logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(1)
|
||||
|
@ -116,7 +132,9 @@ class DBConnection:
|
|||
except sqlite3.DatabaseError as error:
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
logger.log(f'Fatal error executing query: {error}', logger.ERROR)
|
||||
logger.log(
|
||||
f'Fatal error executing query: {error}', logger.ERROR,
|
||||
)
|
||||
raise
|
||||
|
||||
return sql_result
|
||||
|
@ -135,14 +153,18 @@ class DBConnection:
|
|||
sql_result = self.connection.execute(query)
|
||||
else:
|
||||
logger.log(
|
||||
f'{self.filename}: {query} with args {args}', logger.DB,
|
||||
f'{self.filename}: {query} with args {args}',
|
||||
logger.DB,
|
||||
)
|
||||
sql_result = self.connection.execute(query, args)
|
||||
self.connection.commit()
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError as error:
|
||||
if 'unable to open database file' in error.args[0] or 'database is locked' in error.args[0]:
|
||||
if (
|
||||
'unable to open database file' in error.args[0]
|
||||
or 'database is locked' in error.args[0]
|
||||
):
|
||||
logger.log(f'DB error: {error}', logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(1)
|
||||
|
@ -150,7 +172,9 @@ class DBConnection:
|
|||
logger.log(f'DB error: {error}', logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError as error:
|
||||
logger.log(f'Fatal error executing query: {error}', logger.ERROR)
|
||||
logger.log(
|
||||
f'Fatal error executing query: {error}', logger.ERROR,
|
||||
)
|
||||
raise
|
||||
|
||||
return sql_result
|
||||
|
@ -165,12 +189,8 @@ class DBConnection:
|
|||
return sql_results
|
||||
|
||||
def upsert(self, table_name, value_dict, key_dict):
|
||||
|
||||
def gen_params(my_dict):
|
||||
return [
|
||||
f'{k} = ?'
|
||||
for k in my_dict.keys()
|
||||
]
|
||||
return [f'{k} = ?' for k in my_dict.keys()]
|
||||
|
||||
changes_before = self.connection.total_changes
|
||||
items = list(value_dict.values()) + list(key_dict.values())
|
||||
|
@ -199,10 +219,7 @@ class DBConnection:
|
|||
def table_info(self, table_name):
|
||||
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
|
||||
cursor = self.connection.execute(f'PRAGMA table_info({table_name})')
|
||||
return {
|
||||
column['name']: {'type': column['type']}
|
||||
for column in cursor
|
||||
}
|
||||
return {column['name']: {'type': column['type']} for column in cursor}
|
||||
|
||||
|
||||
def sanity_check_database(connection, sanity_check):
|
||||
|
@ -221,23 +238,28 @@ class DBSanityCheck:
|
|||
# = Upgrade API =
|
||||
# ===============
|
||||
|
||||
|
||||
def upgrade_database(connection, schema):
|
||||
logger.log('Checking database structure...', logger.MESSAGE)
|
||||
_process_upgrade(connection, schema)
|
||||
|
||||
|
||||
def pretty_name(class_name):
|
||||
return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
|
||||
return ' '.join(
|
||||
[x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)],
|
||||
)
|
||||
|
||||
|
||||
def _process_upgrade(connection, upgrade_class):
|
||||
instance = upgrade_class(connection)
|
||||
logger.log(
|
||||
f'Checking {pretty_name(upgrade_class.__name__)} database upgrade', logger.DEBUG,
|
||||
f'Checking {pretty_name(upgrade_class.__name__)} database upgrade',
|
||||
logger.DEBUG,
|
||||
)
|
||||
if not instance.test():
|
||||
logger.log(
|
||||
f'Database upgrade required: {pretty_name(upgrade_class.__name__)}', logger.MESSAGE,
|
||||
f'Database upgrade required: {pretty_name(upgrade_class.__name__)}',
|
||||
logger.MESSAGE,
|
||||
)
|
||||
try:
|
||||
instance.execute()
|
||||
|
@ -247,11 +269,13 @@ def _process_upgrade(connection, upgrade_class):
|
|||
)
|
||||
raise
|
||||
logger.log(
|
||||
f'{upgrade_class.__name__} upgrade completed', logger.DEBUG,
|
||||
f'{upgrade_class.__name__} upgrade completed',
|
||||
logger.DEBUG,
|
||||
)
|
||||
else:
|
||||
logger.log(
|
||||
f'{upgrade_class.__name__} upgrade not required', logger.DEBUG,
|
||||
f'{upgrade_class.__name__} upgrade not required',
|
||||
logger.DEBUG,
|
||||
)
|
||||
|
||||
for upgradeSubClass in upgrade_class.__subclasses__():
|
||||
|
@ -264,7 +288,15 @@ class SchemaUpgrade:
|
|||
self.connection = connection
|
||||
|
||||
def has_table(self, table_name):
|
||||
return len(self.connection.action('SELECT 1 FROM sqlite_master WHERE name = ?;', (table_name,)).fetchall()) > 0
|
||||
return (
|
||||
len(
|
||||
self.connection.action(
|
||||
'SELECT 1 FROM sqlite_master WHERE name = ?;',
|
||||
(table_name,),
|
||||
).fetchall(),
|
||||
)
|
||||
> 0
|
||||
)
|
||||
|
||||
def has_column(self, table_name, column):
|
||||
return column in self.connection.table_info(table_name)
|
||||
|
@ -282,5 +314,7 @@ class SchemaUpgrade:
|
|||
|
||||
def inc_db_version(self):
|
||||
new_version = self.check_db_version() + 1
|
||||
self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
|
||||
self.connection.action(
|
||||
'UPDATE db_version SET db_version = ?', [new_version],
|
||||
)
|
||||
return new_version
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from core.plugins.downloaders.nzb.configuration import configure_nzbs
|
||||
from core.plugins.downloaders.torrent.configuration import configure_torrent_class
|
||||
from core.plugins.downloaders.torrent.configuration import (
|
||||
configure_torrent_class,
|
||||
)
|
||||
from core.plugins.downloaders.torrent.configuration import configure_torrents
|
||||
|
|
|
@ -14,5 +14,7 @@ def configure_nzbs(config):
|
|||
|
||||
def configure_sabnzbd(config):
|
||||
core.SABNZBD_HOST = config['sabnzbd_host']
|
||||
core.SABNZBD_PORT = int(config['sabnzbd_port'] or 8080) # defaults to accommodate NzbGet
|
||||
core.SABNZBD_PORT = int(
|
||||
config['sabnzbd_port'] or 8080,
|
||||
) # defaults to accommodate NzbGet
|
||||
core.SABNZBD_APIKEY = config['sabnzbd_apikey']
|
||||
|
|
|
@ -30,7 +30,12 @@ def get_nzoid(input_name):
|
|||
try:
|
||||
result = r.json()
|
||||
clean_name = os.path.splitext(os.path.split(input_name)[1])[0]
|
||||
slots.extend([(slot['nzo_id'], slot['filename']) for slot in result['queue']['slots']])
|
||||
slots.extend(
|
||||
[
|
||||
(slot['nzo_id'], slot['filename'])
|
||||
for slot in result['queue']['slots']
|
||||
],
|
||||
)
|
||||
except Exception:
|
||||
logger.warning('Data from SABnzbd queue could not be parsed')
|
||||
params['mode'] = 'history'
|
||||
|
@ -42,7 +47,12 @@ def get_nzoid(input_name):
|
|||
try:
|
||||
result = r.json()
|
||||
clean_name = os.path.splitext(os.path.split(input_name)[1])[0]
|
||||
slots.extend([(slot['nzo_id'], slot['name']) for slot in result['history']['slots']])
|
||||
slots.extend(
|
||||
[
|
||||
(slot['nzo_id'], slot['name'])
|
||||
for slot in result['history']['slots']
|
||||
],
|
||||
)
|
||||
except Exception:
|
||||
logger.warning('Data from SABnzbd history could not be parsed')
|
||||
try:
|
||||
|
|
|
@ -6,9 +6,15 @@ from core.plugins.downloaders.torrent.utils import create_torrent_class
|
|||
|
||||
def configure_torrents(config):
|
||||
torrent_config = config['Torrent']
|
||||
core.TORRENT_CLIENT_AGENT = torrent_config['clientAgent'] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent | synods | other
|
||||
core.OUTPUT_DIRECTORY = torrent_config['outputDirectory'] # /abs/path/to/complete/
|
||||
core.TORRENT_DEFAULT_DIRECTORY = torrent_config['default_downloadDirectory']
|
||||
core.TORRENT_CLIENT_AGENT = torrent_config[
|
||||
'clientAgent'
|
||||
] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent | synods | other
|
||||
core.OUTPUT_DIRECTORY = torrent_config[
|
||||
'outputDirectory'
|
||||
] # /abs/path/to/complete/
|
||||
core.TORRENT_DEFAULT_DIRECTORY = torrent_config[
|
||||
'default_downloadDirectory'
|
||||
]
|
||||
core.TORRENT_NO_MANUAL = int(torrent_config['no_manual'], 0)
|
||||
|
||||
configure_torrent_linking(torrent_config)
|
||||
|
@ -29,13 +35,15 @@ def configure_torrent_linking(config):
|
|||
|
||||
|
||||
def configure_flattening(config):
|
||||
core.NOFLATTEN = (config['noFlatten'])
|
||||
core.NOFLATTEN = config['noFlatten']
|
||||
if isinstance(core.NOFLATTEN, str):
|
||||
core.NOFLATTEN = core.NOFLATTEN.split(',')
|
||||
|
||||
|
||||
def configure_torrent_categories(config):
|
||||
core.CATEGORIES = (config['categories']) # music,music_videos,pictures,software
|
||||
core.CATEGORIES = config[
|
||||
'categories'
|
||||
] # music,music_videos,pictures,software
|
||||
if isinstance(core.CATEGORIES, str):
|
||||
core.CATEGORIES = core.CATEGORIES.split(',')
|
||||
|
||||
|
@ -54,7 +62,9 @@ def configure_torrent_deletion(config):
|
|||
|
||||
|
||||
def configure_utorrent(config):
|
||||
core.UTORRENT_WEB_UI = config['uTorrentWEBui'] # http://localhost:8090/gui/
|
||||
core.UTORRENT_WEB_UI = config[
|
||||
'uTorrentWEBui'
|
||||
] # http://localhost:8090/gui/
|
||||
core.UTORRENT_USER = config['uTorrentUSR'] # mysecretusr
|
||||
core.UTORRENT_PASSWORD = config['uTorrentPWD'] # mysecretpwr
|
||||
|
||||
|
|
|
@ -30,7 +30,9 @@ def create_torrent_class(client_agent):
|
|||
|
||||
|
||||
def pause_torrent(client_agent, input_hash, input_id, input_name):
|
||||
logger.debug(f'Stopping torrent {input_name} in {client_agent} while processing')
|
||||
logger.debug(
|
||||
f'Stopping torrent {input_name} in {client_agent} while processing',
|
||||
)
|
||||
try:
|
||||
if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
|
||||
core.TORRENT_CLASS.stop(input_hash)
|
||||
|
@ -44,7 +46,9 @@ def pause_torrent(client_agent, input_hash, input_id, input_name):
|
|||
core.TORRENT_CLASS.pause(input_hash)
|
||||
time.sleep(5)
|
||||
except Exception:
|
||||
logger.warning(f'Failed to stop torrent {input_name} in {client_agent}')
|
||||
logger.warning(
|
||||
f'Failed to stop torrent {input_name} in {client_agent}',
|
||||
)
|
||||
|
||||
|
||||
def resume_torrent(client_agent, input_hash, input_id, input_name):
|
||||
|
@ -64,7 +68,9 @@ def resume_torrent(client_agent, input_hash, input_id, input_name):
|
|||
core.TORRENT_CLASS.resume(input_hash)
|
||||
time.sleep(5)
|
||||
except Exception:
|
||||
logger.warning(f'Failed to start torrent {input_name} in {client_agent}')
|
||||
logger.warning(
|
||||
f'Failed to start torrent {input_name} in {client_agent}',
|
||||
)
|
||||
|
||||
|
||||
def remove_torrent(client_agent, input_hash, input_id, input_name):
|
||||
|
@ -84,6 +90,8 @@ def remove_torrent(client_agent, input_hash, input_id, input_name):
|
|||
core.TORRENT_CLASS.delete_permanently(input_hash)
|
||||
time.sleep(5)
|
||||
except Exception:
|
||||
logger.warning(f'Failed to delete torrent {input_name} in {client_agent}')
|
||||
logger.warning(
|
||||
f'Failed to delete torrent {input_name} in {client_agent}',
|
||||
)
|
||||
else:
|
||||
resume_torrent(client_agent, input_hash, input_id, input_name)
|
||||
|
|
|
@ -15,10 +15,11 @@ def configure_plex(config):
|
|||
|
||||
if plex_section:
|
||||
if isinstance(plex_section, list):
|
||||
plex_section = ','.join(plex_section) # fix in case this imported as list.
|
||||
plex_section = ','.join(
|
||||
plex_section,
|
||||
) # fix in case this imported as list.
|
||||
plex_section = [
|
||||
tuple(item.split(','))
|
||||
for item in plex_section.split('|')
|
||||
tuple(item.split(',')) for item in plex_section.split('|')
|
||||
]
|
||||
|
||||
core.PLEX_SECTION = plex_section
|
||||
|
@ -35,7 +36,9 @@ def plex_update(category):
|
|||
section = None
|
||||
if not core.PLEX_SECTION:
|
||||
return
|
||||
logger.debug(f'Attempting to update Plex Library for category {category}.', 'PLEX')
|
||||
logger.debug(
|
||||
f'Attempting to update Plex Library for category {category}.', 'PLEX',
|
||||
)
|
||||
for item in core.PLEX_SECTION:
|
||||
if item[0] == category:
|
||||
section = item[1]
|
||||
|
|
|
@ -14,7 +14,9 @@ def import_subs(filename):
|
|||
if not core.GETSUBS:
|
||||
return
|
||||
try:
|
||||
subliminal.region.configure('dogpile.cache.dbm', arguments={'filename': 'cachefile.dbm'})
|
||||
subliminal.region.configure(
|
||||
'dogpile.cache.dbm', arguments={'filename': 'cachefile.dbm'},
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
@ -27,37 +29,54 @@ def import_subs(filename):
|
|||
if not languages:
|
||||
return
|
||||
|
||||
logger.info(f'Attempting to download subtitles for {filename}', 'SUBTITLES')
|
||||
logger.info(
|
||||
f'Attempting to download subtitles for {filename}', 'SUBTITLES',
|
||||
)
|
||||
try:
|
||||
video = subliminal.scan_video(filename)
|
||||
subtitles = subliminal.download_best_subtitles({video}, languages)
|
||||
subliminal.save_subtitles(video, subtitles[video])
|
||||
|
||||
for subtitle in subtitles[video]:
|
||||
subtitle_path = subliminal.subtitle.get_subtitle_path(video.name, subtitle.language)
|
||||
subtitle_path = subliminal.subtitle.get_subtitle_path(
|
||||
video.name, subtitle.language,
|
||||
)
|
||||
os.chmod(subtitle_path, 0o644)
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to download subtitles for {filename} due to: {e}', 'SUBTITLES')
|
||||
logger.error(
|
||||
f'Failed to download subtitles for {filename} due to: {e}',
|
||||
'SUBTITLES',
|
||||
)
|
||||
|
||||
|
||||
def rename_subs(path):
|
||||
filepaths = []
|
||||
sub_ext = ['.srt', '.sub', '.idx']
|
||||
vidfiles = core.list_media_files(path, media=True, audio=False, meta=False, archives=False)
|
||||
if not vidfiles or len(vidfiles) > 1: # If there is more than 1 video file, or no video files, we can't rename subs.
|
||||
vidfiles = core.list_media_files(
|
||||
path, media=True, audio=False, meta=False, archives=False,
|
||||
)
|
||||
if (
|
||||
not vidfiles or len(vidfiles) > 1
|
||||
): # If there is more than 1 video file, or no video files, we can't rename subs.
|
||||
return
|
||||
name = os.path.splitext(os.path.split(vidfiles[0])[1])[0]
|
||||
for directory, _, filenames in os.walk(path):
|
||||
for filename in filenames:
|
||||
filepaths.extend([os.path.join(directory, filename)])
|
||||
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in sub_ext]
|
||||
subfiles = [
|
||||
item for item in filepaths if os.path.splitext(item)[1] in sub_ext
|
||||
]
|
||||
subfiles.sort() # This should sort subtitle names by language (alpha) and Number (where multiple)
|
||||
renamed = []
|
||||
for sub in subfiles:
|
||||
subname, ext = os.path.splitext(os.path.basename(sub))
|
||||
if name in subname: # The sub file name already includes the video name.
|
||||
if (
|
||||
name in subname
|
||||
): # The sub file name already includes the video name.
|
||||
continue
|
||||
words = re.findall('[a-zA-Z]+', str(subname)) # find whole words in string
|
||||
words = re.findall(
|
||||
'[a-zA-Z]+', str(subname),
|
||||
) # find whole words in string
|
||||
# parse the words for language descriptors.
|
||||
lan = None
|
||||
for word in words:
|
||||
|
@ -78,8 +97,12 @@ def rename_subs(path):
|
|||
new_sub_name = name
|
||||
else:
|
||||
new_sub_name = f'{name}.{str(lan)}'
|
||||
new_sub = os.path.join(directory, new_sub_name) # full path and name less ext
|
||||
if f'{new_sub}{ext}' in renamed: # If duplicate names, add unique number before ext.
|
||||
new_sub = os.path.join(
|
||||
directory, new_sub_name,
|
||||
) # full path and name less ext
|
||||
if (
|
||||
f'{new_sub}{ext}' in renamed
|
||||
): # If duplicate names, add unique number before ext.
|
||||
for i in range(1, len(renamed) + 1):
|
||||
if f'{new_sub}.{i}{ext}' in renamed:
|
||||
continue
|
||||
|
@ -87,7 +110,9 @@ def rename_subs(path):
|
|||
break
|
||||
new_sub = f'{new_sub}{ext}' # add extension now
|
||||
if os.path.isfile(new_sub): # Don't copy over existing - final check.
|
||||
logger.debug(f'Unable to rename sub file {sub} as destination {new_sub} already exists')
|
||||
logger.debug(
|
||||
f'Unable to rename sub file {sub} as destination {new_sub} already exists',
|
||||
)
|
||||
continue
|
||||
logger.debug(
|
||||
f'Renaming sub file from {sub} to {new_sub}',
|
||||
|
|
|
@ -35,24 +35,36 @@ def process():
|
|||
)
|
||||
|
||||
core.DOWNLOAD_INFO = get_download_info(
|
||||
os.path.basename(dir_name), 0,
|
||||
os.path.basename(dir_name),
|
||||
0,
|
||||
)
|
||||
if core.DOWNLOAD_INFO:
|
||||
logger.info(f'Found download info for {os.path.basename(dir_name)}, setting variables now ...')
|
||||
client_agent = core.DOWNLOAD_INFO[0]['client_agent'] or 'manual'
|
||||
logger.info(
|
||||
f'Found download info for {os.path.basename(dir_name)}, setting variables now ...',
|
||||
)
|
||||
client_agent = (
|
||||
core.DOWNLOAD_INFO[0]['client_agent'] or 'manual'
|
||||
)
|
||||
download_id = core.DOWNLOAD_INFO[0]['input_id'] or ''
|
||||
else:
|
||||
logger.info(f'Unable to locate download info for {os.path.basename(dir_name)}, continuing to try and process this release ...')
|
||||
logger.info(
|
||||
f'Unable to locate download info for {os.path.basename(dir_name)}, continuing to try and process this release ...',
|
||||
)
|
||||
client_agent = 'manual'
|
||||
download_id = ''
|
||||
|
||||
if client_agent and client_agent.lower() not in core.NZB_CLIENTS:
|
||||
if (
|
||||
client_agent
|
||||
and client_agent.lower() not in core.NZB_CLIENTS
|
||||
):
|
||||
continue
|
||||
|
||||
input_name = os.path.basename(dir_name)
|
||||
|
||||
results = nzb.process(
|
||||
dir_name, input_name, 0,
|
||||
dir_name,
|
||||
input_name,
|
||||
0,
|
||||
client_agent=client_agent,
|
||||
download_id=download_id or None,
|
||||
input_category=subsection,
|
||||
|
|
|
@ -22,7 +22,15 @@ from core.utils.files import extract_files
|
|||
from core.utils.download_info import update_download_info_status
|
||||
|
||||
|
||||
def process(input_directory, input_name=None, status=0, client_agent='manual', download_id=None, input_category=None, failure_link=None):
|
||||
def process(
|
||||
input_directory,
|
||||
input_name=None,
|
||||
status=0,
|
||||
client_agent='manual',
|
||||
download_id=None,
|
||||
input_category=None,
|
||||
failure_link=None,
|
||||
):
|
||||
if core.SAFE_MODE and input_directory == core.NZB_DEFAULT_DIRECTORY:
|
||||
logger.error(
|
||||
f'The input directory:[{input_directory}] is the Default Download Directory. Please configure category directories to prevent processing of other media.',
|
||||
|
@ -36,7 +44,9 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
|
|||
download_id = get_nzoid(input_name)
|
||||
|
||||
if client_agent != 'manual' and not core.DOWNLOAD_INFO:
|
||||
logger.debug(f'Adding NZB download info for directory {input_directory} to database')
|
||||
logger.debug(
|
||||
f'Adding NZB download info for directory {input_directory} to database',
|
||||
)
|
||||
|
||||
my_db = main_db.DBConnection()
|
||||
|
||||
|
@ -91,7 +101,9 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
|
|||
section_name = section.keys()[0]
|
||||
logger.info(f'Auto-detected SECTION:{section_name}')
|
||||
else:
|
||||
logger.error(f'Unable to locate a section with subsection:{input_category} enabled in your autoProcessMedia.cfg, exiting!')
|
||||
logger.error(
|
||||
f'Unable to locate a section with subsection:{input_category} enabled in your autoProcessMedia.cfg, exiting!',
|
||||
)
|
||||
return ProcessResult(
|
||||
status_code=-1,
|
||||
message='',
|
||||
|
@ -103,25 +115,35 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
|
|||
|
||||
try:
|
||||
if int(cfg.get('remote_path')) and not core.REMOTE_PATHS:
|
||||
logger.error(f'Remote Path is enabled for {section_name}:{input_category} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!')
|
||||
logger.error(
|
||||
f'Remote Path is enabled for {section_name}:{input_category} but no Network mount points are defined. Please check your autoProcessMedia.cfg, exiting!',
|
||||
)
|
||||
return ProcessResult(
|
||||
status_code=-1,
|
||||
message='',
|
||||
)
|
||||
except Exception:
|
||||
remote_path = cfg.get('remote_path')
|
||||
logger.error(f'Remote Path {remote_path} is not valid for {section_name}:{input_category} Please set this to either 0 to disable or 1 to enable!')
|
||||
logger.error(
|
||||
f'Remote Path {remote_path} is not valid for {section_name}:{input_category} Please set this to either 0 to disable or 1 to enable!',
|
||||
)
|
||||
|
||||
input_name, input_directory = convert_to_ascii(input_name, input_directory)
|
||||
|
||||
if extract == 1 and not (status > 0 and core.NOEXTRACTFAILED):
|
||||
logger.debug(f'Checking for archives to extract in directory: {input_directory}')
|
||||
logger.debug(
|
||||
f'Checking for archives to extract in directory: {input_directory}',
|
||||
)
|
||||
extract_files(input_directory)
|
||||
|
||||
logger.info(f'Calling {section_name}:{input_category} to post-process:{input_name}')
|
||||
logger.info(
|
||||
f'Calling {section_name}:{input_category} to post-process:{input_name}',
|
||||
)
|
||||
|
||||
if section_name == 'UserScript':
|
||||
result = external_script(input_directory, input_name, input_category, section[usercat])
|
||||
result = external_script(
|
||||
input_directory, input_name, input_category, section[usercat],
|
||||
)
|
||||
else:
|
||||
process_map = {
|
||||
'CouchPotato': movies.process,
|
||||
|
@ -155,7 +177,13 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
|
|||
if client_agent != 'manual':
|
||||
# update download status in our DB
|
||||
update_download_info_status(input_name, 1)
|
||||
if section_name not in ['UserScript', 'NzbDrone', 'Sonarr', 'Radarr', 'Lidarr']:
|
||||
if section_name not in [
|
||||
'UserScript',
|
||||
'NzbDrone',
|
||||
'Sonarr',
|
||||
'Radarr',
|
||||
'Lidarr',
|
||||
]:
|
||||
# cleanup our processing folders of any misc unwanted files and empty directories
|
||||
clean_dir(input_directory, section_name, input_category)
|
||||
|
||||
|
|
|
@ -65,11 +65,17 @@ def _parse_health_status():
|
|||
# Unpack was skipped due to nzb-file properties
|
||||
# or due to errors during par-check
|
||||
if int(os.environ['NZBPP_HEALTH']) < 1000:
|
||||
logger.warning('Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \'failed\'')
|
||||
logger.warning(
|
||||
'Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \'failed\'',
|
||||
)
|
||||
status = 1
|
||||
else:
|
||||
logger.info('Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful')
|
||||
logger.info('Please check your Par-check/repair settings for future downloads.')
|
||||
logger.info(
|
||||
'Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful',
|
||||
)
|
||||
logger.info(
|
||||
'Please check your Par-check/repair settings for future downloads.',
|
||||
)
|
||||
return status
|
||||
|
||||
|
||||
|
@ -89,7 +95,9 @@ def check_version():
|
|||
version = os.environ['NZBOP_VERSION']
|
||||
# Check if the script is called from nzbget 11.0 or later
|
||||
if version[0:5] < '11.0':
|
||||
logger.error(f'NZBGet Version {version} is not supported. Please update NZBGet.')
|
||||
logger.error(
|
||||
f'NZBGet Version {version} is not supported. Please update NZBGet.',
|
||||
)
|
||||
sys.exit(core.NZBGET_POSTPROCESS_ERROR)
|
||||
logger.info(f'Script triggered from NZBGet Version {version}.')
|
||||
|
||||
|
|
|
@ -11,21 +11,57 @@ from core import logger
|
|||
from core.utils.files import list_media_files
|
||||
|
||||
reverse_list = [
|
||||
r'\.\d{2}e\d{2}s\.', r'\.[pi]0801\.', r'\.p027\.', r'\.[pi]675\.', r'\.[pi]084\.', r'\.p063\.',
|
||||
r'\b[45]62[xh]\.', r'\.yarulb\.', r'\.vtd[hp]\.',
|
||||
r'\.ld[.-]?bew\.', r'\.pir.?(dov|dvd|bew|db|rb)\.', r'\brdvd\.', r'\.vts\.', r'\.reneercs\.',
|
||||
r'\.dcv\.', r'\b(pir|mac)dh\b', r'\.reporp\.', r'\.kcaper\.',
|
||||
r'\.lanretni\.', r'\b3ca\b', r'\.cstn\.',
|
||||
r'\.\d{2}e\d{2}s\.',
|
||||
r'\.[pi]0801\.',
|
||||
r'\.p027\.',
|
||||
r'\.[pi]675\.',
|
||||
r'\.[pi]084\.',
|
||||
r'\.p063\.',
|
||||
r'\b[45]62[xh]\.',
|
||||
r'\.yarulb\.',
|
||||
r'\.vtd[hp]\.',
|
||||
r'\.ld[.-]?bew\.',
|
||||
r'\.pir.?(dov|dvd|bew|db|rb)\.',
|
||||
r'\brdvd\.',
|
||||
r'\.vts\.',
|
||||
r'\.reneercs\.',
|
||||
r'\.dcv\.',
|
||||
r'\b(pir|mac)dh\b',
|
||||
r'\.reporp\.',
|
||||
r'\.kcaper\.',
|
||||
r'\.lanretni\.',
|
||||
r'\b3ca\b',
|
||||
r'\.cstn\.',
|
||||
]
|
||||
reverse_pattern = re.compile('|'.join(reverse_list), flags=re.IGNORECASE)
|
||||
season_pattern = re.compile(r'(.*\.\d{2}e\d{2}s\.)(.*)', flags=re.IGNORECASE)
|
||||
word_pattern = re.compile(r'([^A-Z0-9]*[A-Z0-9]+)')
|
||||
media_list = [
|
||||
r'\.s\d{2}e\d{2}\.', r'\.1080[pi]\.', r'\.720p\.', r'\.576[pi]', r'\.480[pi]\.', r'\.360p\.',
|
||||
r'\.[xh]26[45]\b', r'\.bluray\.', r'\.[hp]dtv\.',
|
||||
r'\.web[.-]?dl\.', r'\.(vod|dvd|web|bd|br).?rip\.', r'\.dvdr\b', r'\.stv\.', r'\.screener\.', r'\.vcd\.',
|
||||
r'\bhd(cam|rip)\b', r'\.proper\.', r'\.repack\.',
|
||||
r'\.internal\.', r'\bac3\b', r'\.ntsc\.', r'\.pal\.', r'\.secam\.', r'\bdivx\b', r'\bxvid\b',
|
||||
r'\.s\d{2}e\d{2}\.',
|
||||
r'\.1080[pi]\.',
|
||||
r'\.720p\.',
|
||||
r'\.576[pi]',
|
||||
r'\.480[pi]\.',
|
||||
r'\.360p\.',
|
||||
r'\.[xh]26[45]\b',
|
||||
r'\.bluray\.',
|
||||
r'\.[hp]dtv\.',
|
||||
r'\.web[.-]?dl\.',
|
||||
r'\.(vod|dvd|web|bd|br).?rip\.',
|
||||
r'\.dvdr\b',
|
||||
r'\.stv\.',
|
||||
r'\.screener\.',
|
||||
r'\.vcd\.',
|
||||
r'\bhd(cam|rip)\b',
|
||||
r'\.proper\.',
|
||||
r'\.repack\.',
|
||||
r'\.internal\.',
|
||||
r'\bac3\b',
|
||||
r'\.ntsc\.',
|
||||
r'\.pal\.',
|
||||
r'\.secam\.',
|
||||
r'\bdivx\b',
|
||||
r'\bxvid\b',
|
||||
]
|
||||
media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE)
|
||||
garbage_name = re.compile(r'^[a-zA-Z0-9]*$')
|
||||
|
@ -72,9 +108,14 @@ def strip_groups(filename):
|
|||
|
||||
def rename_file(filename, newfile_path):
|
||||
if os.path.isfile(newfile_path):
|
||||
newfile_path = os.path.splitext(newfile_path)[0] + '.NTM' + os.path.splitext(newfile_path)[1]
|
||||
newfile_path = (
|
||||
os.path.splitext(newfile_path)[0]
|
||||
+ '.NTM'
|
||||
+ os.path.splitext(newfile_path)[1]
|
||||
)
|
||||
logger.debug(
|
||||
f'Replacing file name {filename} with download name {newfile_path}', 'EXCEPTION',
|
||||
f'Replacing file name {filename} with download name {newfile_path}',
|
||||
'EXCEPTION',
|
||||
)
|
||||
try:
|
||||
os.rename(filename, newfile_path)
|
||||
|
@ -84,16 +125,25 @@ def rename_file(filename, newfile_path):
|
|||
|
||||
def replace_filename(filename, dirname, name):
|
||||
head, file_extension = os.path.splitext(os.path.basename(filename))
|
||||
if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
|
||||
if (
|
||||
media_pattern.search(os.path.basename(dirname).replace(' ', '.'))
|
||||
is not None
|
||||
):
|
||||
newname = os.path.basename(dirname).replace(' ', '.')
|
||||
logger.debug(f'Replacing file name {head} with directory name {newname}', 'EXCEPTION')
|
||||
logger.debug(
|
||||
f'Replacing file name {head} with directory name {newname}',
|
||||
'EXCEPTION',
|
||||
)
|
||||
elif media_pattern.search(name.replace(' ', '.').lower()) is not None:
|
||||
newname = name.replace(' ', '.')
|
||||
logger.debug(
|
||||
f'Replacing file name {head} with download name {newname}', 'EXCEPTION',
|
||||
f'Replacing file name {head} with download name {newname}',
|
||||
'EXCEPTION',
|
||||
)
|
||||
else:
|
||||
logger.warning(f'No name replacement determined for {head}', 'EXCEPTION')
|
||||
logger.warning(
|
||||
f'No name replacement determined for {head}', 'EXCEPTION',
|
||||
)
|
||||
newname = name
|
||||
newfile = newname + file_extension
|
||||
newfile_path = os.path.join(dirname, newfile)
|
||||
|
@ -120,7 +170,8 @@ def reverse_filename(filename, dirname, name):
|
|||
newname = head[::-1].title()
|
||||
newname = newname.replace(' ', '.')
|
||||
logger.debug(
|
||||
f'Reversing filename {head} to {newname}', 'EXCEPTION',
|
||||
f'Reversing filename {head} to {newname}',
|
||||
'EXCEPTION',
|
||||
)
|
||||
newfile = newname + file_extension
|
||||
newfile_path = os.path.join(dirname, newfile)
|
||||
|
@ -144,16 +195,21 @@ def rename_script(dirname):
|
|||
continue
|
||||
if len(cmd) == 2 and os.path.isfile(os.path.join(dirname, cmd[0])):
|
||||
orig = os.path.join(dirname, cmd[0])
|
||||
dest = os.path.join(dirname, cmd[1].split('\\')[-1].split('/')[-1])
|
||||
dest = os.path.join(
|
||||
dirname, cmd[1].split('\\')[-1].split('/')[-1],
|
||||
)
|
||||
if os.path.isfile(dest):
|
||||
continue
|
||||
logger.debug(
|
||||
f'Renaming file {orig} to {dest}', 'EXCEPTION',
|
||||
f'Renaming file {orig} to {dest}',
|
||||
'EXCEPTION',
|
||||
)
|
||||
try:
|
||||
os.rename(orig, dest)
|
||||
except Exception as error:
|
||||
logger.error(f'Unable to rename file due to: {error}', 'EXCEPTION')
|
||||
logger.error(
|
||||
f'Unable to rename file due to: {error}', 'EXCEPTION',
|
||||
)
|
||||
|
||||
|
||||
def par2(dirname):
|
||||
|
@ -182,16 +238,21 @@ def par2(dirname):
|
|||
cmd = f'{cmd} {item}'
|
||||
logger.debug(f'calling command:{cmd}', 'PAR2')
|
||||
try:
|
||||
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket)
|
||||
proc = subprocess.Popen(
|
||||
command, stdout=bitbucket, stderr=bitbucket,
|
||||
)
|
||||
proc.communicate()
|
||||
result = proc.returncode
|
||||
except Exception:
|
||||
logger.error(f'par2 file processing for {parfile} has failed', 'PAR2')
|
||||
logger.error(
|
||||
f'par2 file processing for {parfile} has failed', 'PAR2',
|
||||
)
|
||||
if result == 0:
|
||||
logger.info('par2 file processing succeeded', 'PAR2')
|
||||
os.chdir(pwd)
|
||||
bitbucket.close()
|
||||
|
||||
|
||||
# dict for custom groups
|
||||
# we can add more to this list
|
||||
# _customgroups = {'Q o Q': process_qoq, '-ECI': process_eci}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -14,12 +14,18 @@ from core.auto_process.common import ProcessResult
|
|||
def external_script(output_destination, torrent_name, torrent_label, settings):
|
||||
final_result = 0 # start at 0.
|
||||
num_files = 0
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = settings.get('user_script_mediaExtensions', '')
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = settings.get(
|
||||
'user_script_mediaExtensions', '',
|
||||
)
|
||||
try:
|
||||
if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str):
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.lower().split(',')
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = (
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS.lower().split(',')
|
||||
)
|
||||
except Exception:
|
||||
logger.error('user_script_mediaExtensions could not be set', 'USERSCRIPT')
|
||||
logger.error(
|
||||
'user_script_mediaExtensions could not be set', 'USERSCRIPT',
|
||||
)
|
||||
core.USER_SCRIPT_MEDIAEXTENSIONS = []
|
||||
|
||||
core.USER_SCRIPT = settings.get('user_script_path', '')
|
||||
|
@ -42,7 +48,9 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
|
|||
core.USER_SCRIPT_SUCCESSCODES = settings.get('user_script_successCodes', 0)
|
||||
try:
|
||||
if isinstance(core.USER_SCRIPT_SUCCESSCODES, str):
|
||||
core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',')
|
||||
core.USER_SCRIPT_SUCCESSCODES = (
|
||||
core.USER_SCRIPT_SUCCESSCODES.split(',')
|
||||
)
|
||||
except Exception:
|
||||
logger.error('user_script_successCodes could not be set', 'USERSCRIPT')
|
||||
core.USER_SCRIPT_SUCCESSCODES = 0
|
||||
|
@ -51,11 +59,20 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
|
|||
core.USER_SCRIPT_RUNONCE = int(settings.get('user_script_runOnce', 1))
|
||||
|
||||
if core.CHECK_MEDIA:
|
||||
for video in list_media_files(output_destination, media=True, audio=False, meta=False, archives=False):
|
||||
for video in list_media_files(
|
||||
output_destination,
|
||||
media=True,
|
||||
audio=False,
|
||||
meta=False,
|
||||
archives=False,
|
||||
):
|
||||
if transcoder.is_video_good(video, 0):
|
||||
import_subs(video)
|
||||
else:
|
||||
logger.info(f'Corrupt video file found {video}. Deleting.', 'USERSCRIPT')
|
||||
logger.info(
|
||||
f'Corrupt video file found {video}. Deleting.',
|
||||
'USERSCRIPT',
|
||||
)
|
||||
os.unlink(video)
|
||||
|
||||
for dirpath, _, filenames in os.walk(output_destination):
|
||||
|
@ -63,11 +80,19 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
|
|||
|
||||
file_path = core.os.path.join(dirpath, file)
|
||||
file_name, file_extension = os.path.splitext(file)
|
||||
logger.debug(f'Checking file {file} to see if this should be processed.', 'USERSCRIPT')
|
||||
logger.debug(
|
||||
f'Checking file {file} to see if this should be processed.',
|
||||
'USERSCRIPT',
|
||||
)
|
||||
|
||||
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or 'all' in core.USER_SCRIPT_MEDIAEXTENSIONS:
|
||||
if (
|
||||
file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS
|
||||
or 'all' in core.USER_SCRIPT_MEDIAEXTENSIONS
|
||||
):
|
||||
num_files += 1
|
||||
if core.USER_SCRIPT_RUNONCE == 1 and num_files > 1: # we have already run once, so just continue to get number of files.
|
||||
if (
|
||||
core.USER_SCRIPT_RUNONCE == 1 and num_files > 1
|
||||
): # we have already run once, so just continue to get number of files.
|
||||
continue
|
||||
command = [core.USER_SCRIPT]
|
||||
for param in core.USER_SCRIPT_PARAM:
|
||||
|
@ -95,19 +120,31 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
|
|||
cmd = ''
|
||||
for item in command:
|
||||
cmd = f'{cmd} {item}'
|
||||
logger.info(f'Running script {cmd} on file {file_path}.', 'USERSCRIPT')
|
||||
logger.info(
|
||||
f'Running script {cmd} on file {file_path}.', 'USERSCRIPT',
|
||||
)
|
||||
try:
|
||||
p = Popen(command)
|
||||
res = p.wait()
|
||||
if str(res) in core.USER_SCRIPT_SUCCESSCODES: # Linux returns 0 for successful.
|
||||
if (
|
||||
str(res) in core.USER_SCRIPT_SUCCESSCODES
|
||||
): # Linux returns 0 for successful.
|
||||
logger.info(f'UserScript {command[0]} was successfull')
|
||||
result = 0
|
||||
else:
|
||||
logger.error(f'UserScript {command[0]} has failed with return code: {res}', 'USERSCRIPT')
|
||||
logger.info(f'If the UserScript completed successfully you should add {res} to the user_script_successCodes', 'USERSCRIPT')
|
||||
logger.error(
|
||||
f'UserScript {command[0]} has failed with return code: {res}',
|
||||
'USERSCRIPT',
|
||||
)
|
||||
logger.info(
|
||||
f'If the UserScript completed successfully you should add {res} to the user_script_successCodes',
|
||||
'USERSCRIPT',
|
||||
)
|
||||
result = int(1)
|
||||
except Exception:
|
||||
logger.error(f'UserScript {command[0]} has failed', 'USERSCRIPT')
|
||||
logger.error(
|
||||
f'UserScript {command[0]} has failed', 'USERSCRIPT',
|
||||
)
|
||||
result = int(1)
|
||||
final_result += result
|
||||
|
||||
|
@ -116,14 +153,25 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
|
|||
for file in filenames:
|
||||
file_name, file_extension = os.path.splitext(file)
|
||||
|
||||
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or core.USER_SCRIPT_MEDIAEXTENSIONS == 'ALL':
|
||||
if (
|
||||
file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS
|
||||
or core.USER_SCRIPT_MEDIAEXTENSIONS == 'ALL'
|
||||
):
|
||||
num_files_new += 1
|
||||
|
||||
if core.USER_SCRIPT_CLEAN == int(1) and num_files_new == 0 and final_result == 0:
|
||||
logger.info(f'All files have been processed. Cleaning outputDirectory {output_destination}')
|
||||
if (
|
||||
core.USER_SCRIPT_CLEAN == int(1)
|
||||
and num_files_new == 0
|
||||
and final_result == 0
|
||||
):
|
||||
logger.info(
|
||||
f'All files have been processed. Cleaning outputDirectory {output_destination}',
|
||||
)
|
||||
remove_dir(output_destination)
|
||||
elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0:
|
||||
logger.info(f'{num_files} files were processed, but {num_files_new} still remain. outputDirectory will not be cleaned.')
|
||||
logger.info(
|
||||
f'{num_files} files were processed, but {num_files_new} still remain. outputDirectory will not be cleaned.',
|
||||
)
|
||||
return ProcessResult(
|
||||
status_code=final_result,
|
||||
message='User Script Completed',
|
||||
|
|
|
@ -13,7 +13,9 @@ from core.utils.paths import flatten_dir
|
|||
|
||||
|
||||
def flatten(output_destination):
|
||||
return flatten_dir(output_destination, list_media_files(output_destination))
|
||||
return flatten_dir(
|
||||
output_destination, list_media_files(output_destination),
|
||||
)
|
||||
|
||||
|
||||
def clean_dir(path, section, subsection):
|
||||
|
@ -21,7 +23,9 @@ def clean_dir(path, section, subsection):
|
|||
min_size = int(cfg.get('minSize', 0))
|
||||
delete_ignored = int(cfg.get('delete_ignored', 0))
|
||||
try:
|
||||
files = list_media_files(path, min_size=min_size, delete_ignored=delete_ignored)
|
||||
files = list_media_files(
|
||||
path, min_size=min_size, delete_ignored=delete_ignored,
|
||||
)
|
||||
except Exception:
|
||||
files = []
|
||||
return clean_directory(path, files)
|
||||
|
@ -37,21 +41,20 @@ def process_dir(path, link):
|
|||
|
||||
# Generate list of sync files
|
||||
sync_files = (
|
||||
item for item in dir_contents
|
||||
item
|
||||
for item in dir_contents
|
||||
if os.path.splitext(item)[1] in ['.!sync', '.bts']
|
||||
)
|
||||
|
||||
# Generate a list of file paths
|
||||
filepaths = (
|
||||
os.path.join(path, item) for item in dir_contents
|
||||
os.path.join(path, item)
|
||||
for item in dir_contents
|
||||
if item not in ['Thumbs.db', 'thumbs.db']
|
||||
)
|
||||
|
||||
# Generate a list of media files
|
||||
mediafiles = (
|
||||
item for item in filepaths
|
||||
if os.path.isfile(item)
|
||||
)
|
||||
mediafiles = (item for item in filepaths if os.path.isfile(item))
|
||||
|
||||
if any(sync_files):
|
||||
logger.info('')
|
||||
|
@ -60,26 +63,23 @@ def process_dir(path, link):
|
|||
try:
|
||||
move_file(mediafile, path, link)
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to move {os.path.split(mediafile)[1]} to its own directory: {e}')
|
||||
logger.error(
|
||||
f'Failed to move {os.path.split(mediafile)[1]} to its own directory: {e}',
|
||||
)
|
||||
|
||||
# removeEmptyFolders(path, removeRoot=False)
|
||||
|
||||
# Generate all path contents
|
||||
path_contents = (
|
||||
os.path.join(path, item)
|
||||
for item in os.listdir(path)
|
||||
)
|
||||
path_contents = (os.path.join(path, item) for item in os.listdir(path))
|
||||
|
||||
# Generate all directories from path contents
|
||||
directories = (
|
||||
path for path in path_contents
|
||||
if os.path.isdir(path)
|
||||
)
|
||||
directories = (path for path in path_contents if os.path.isdir(path))
|
||||
|
||||
for directory in directories:
|
||||
dir_contents = os.listdir(directory)
|
||||
sync_files = (
|
||||
item for item in dir_contents
|
||||
item
|
||||
for item in dir_contents
|
||||
if os.path.splitext(item)[1] in ['.!sync', '.bts']
|
||||
)
|
||||
if not any(dir_contents) or any(sync_files):
|
||||
|
@ -101,7 +101,9 @@ def get_dirs(section, subsection, link='hard'):
|
|||
try:
|
||||
to_return.extend(process_dir(directory, link))
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to add directories from {watch_directory} for post-processing: {e}')
|
||||
logger.error(
|
||||
f'Failed to add directories from {watch_directory} for post-processing: {e}',
|
||||
)
|
||||
|
||||
if core.USE_LINK == 'move':
|
||||
try:
|
||||
|
@ -109,10 +111,14 @@ def get_dirs(section, subsection, link='hard'):
|
|||
if os.path.exists(output_directory):
|
||||
to_return.extend(process_dir(output_directory, link))
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to add directories from {core.OUTPUT_DIRECTORY} for post-processing: {e}')
|
||||
logger.error(
|
||||
f'Failed to add directories from {core.OUTPUT_DIRECTORY} for post-processing: {e}',
|
||||
)
|
||||
|
||||
if not to_return:
|
||||
logger.debug(f'No directories identified in {section}:{subsection} for post-processing')
|
||||
logger.debug(
|
||||
f'No directories identified in {section}:{subsection} for post-processing',
|
||||
)
|
||||
|
||||
return list(set(to_return))
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ def char_replace(name_in):
|
|||
if (len(name) != 1) & (Idx < (len(name) - 1)):
|
||||
# Detect UTF-8
|
||||
if ((name[Idx] == 0xC2) | (name[Idx] == 0xC3)) & (
|
||||
(name[Idx + 1] >= 0xA0) & (name[Idx + 1] <= 0xFF)
|
||||
(name[Idx + 1] >= 0xA0) & (name[Idx + 1] <= 0xFF)
|
||||
):
|
||||
encoding = 'utf-8'
|
||||
break
|
||||
|
@ -56,7 +56,9 @@ def char_replace(name_in):
|
|||
def convert_to_ascii(input_name, dir_name):
|
||||
|
||||
ascii_convert = int(core.CFG['ASCII']['convert'])
|
||||
if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and '\' is replaced!.
|
||||
if (
|
||||
ascii_convert == 0 or os.name == 'nt'
|
||||
): # just return if we don't want to convert or on windows os and '\' is replaced!.
|
||||
return input_name, dir_name
|
||||
|
||||
encoded, input_name = char_replace(input_name)
|
||||
|
@ -77,14 +79,22 @@ def convert_to_ascii(input_name, dir_name):
|
|||
for subdirname in dirnames:
|
||||
encoded, subdirname2 = char_replace(subdirname)
|
||||
if encoded:
|
||||
logger.info(f'Renaming directory to: {subdirname2}.', 'ENCODER')
|
||||
os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2))
|
||||
logger.info(
|
||||
f'Renaming directory to: {subdirname2}.', 'ENCODER',
|
||||
)
|
||||
os.rename(
|
||||
os.path.join(dirname, subdirname),
|
||||
os.path.join(dirname, subdirname2),
|
||||
)
|
||||
|
||||
for dirname, _, filenames in os.walk(dir_name):
|
||||
for filename in filenames:
|
||||
encoded, filename2 = char_replace(filename)
|
||||
if encoded:
|
||||
logger.info(f'Renaming file to: {filename2}.', 'ENCODER')
|
||||
os.rename(os.path.join(dirname, filename), os.path.join(dirname, filename2))
|
||||
os.rename(
|
||||
os.path.join(dirname, filename),
|
||||
os.path.join(dirname, filename2),
|
||||
)
|
||||
|
||||
return input_name, dir_name
|
||||
|
|
|
@ -20,7 +20,9 @@ from core.utils.paths import make_dir
|
|||
|
||||
|
||||
def move_file(mediafile, path, link):
|
||||
logger.debug(f'Found file {os.path.split(mediafile)[1]} in root directory {path}.')
|
||||
logger.debug(
|
||||
f'Found file {os.path.split(mediafile)[1]} in root directory {path}.',
|
||||
)
|
||||
new_path = None
|
||||
file_ext = os.path.splitext(mediafile)[1]
|
||||
try:
|
||||
|
@ -32,7 +34,9 @@ def move_file(mediafile, path, link):
|
|||
album = f.album
|
||||
|
||||
# create new path
|
||||
new_path = os.path.join(path, f'{sanitize_name(artist)} - {sanitize_name(album)}')
|
||||
new_path = os.path.join(
|
||||
path, f'{sanitize_name(artist)} - {sanitize_name(album)}',
|
||||
)
|
||||
elif file_ext in core.MEDIA_CONTAINER:
|
||||
f = guessit.guessit(mediafile)
|
||||
|
||||
|
@ -44,7 +48,9 @@ def move_file(mediafile, path, link):
|
|||
|
||||
new_path = os.path.join(path, sanitize_name(title))
|
||||
except Exception as e:
|
||||
logger.error(f'Exception parsing name for media file: {os.path.split(mediafile)[1]}: {e}')
|
||||
logger.error(
|
||||
f'Exception parsing name for media file: {os.path.split(mediafile)[1]}: {e}',
|
||||
)
|
||||
|
||||
if not new_path:
|
||||
title = os.path.splitext(os.path.basename(mediafile))[0]
|
||||
|
@ -58,14 +64,19 @@ def move_file(mediafile, path, link):
|
|||
|
||||
# Just fail-safe incase we already have afile with this clean-name (was actually a bug from earlier code, but let's be safe).
|
||||
if os.path.isfile(new_path):
|
||||
new_path2 = os.path.join(os.path.join(os.path.split(new_path)[0], 'new'), os.path.split(new_path)[1])
|
||||
new_path2 = os.path.join(
|
||||
os.path.join(os.path.split(new_path)[0], 'new'),
|
||||
os.path.split(new_path)[1],
|
||||
)
|
||||
new_path = new_path2
|
||||
|
||||
# create new path if it does not exist
|
||||
if not os.path.exists(new_path):
|
||||
make_dir(new_path)
|
||||
|
||||
newfile = os.path.join(new_path, sanitize_name(os.path.split(mediafile)[1]))
|
||||
newfile = os.path.join(
|
||||
new_path, sanitize_name(os.path.split(mediafile)[1]),
|
||||
)
|
||||
try:
|
||||
newfile = newfile.encode(core.SYS_ENCODING)
|
||||
except Exception:
|
||||
|
@ -84,7 +95,9 @@ def is_min_size(input_name, min_size):
|
|||
try:
|
||||
input_size = get_dir_size(os.path.dirname(input_name))
|
||||
except Exception:
|
||||
logger.error(f'Failed to get file size for {input_name}', 'MINSIZE')
|
||||
logger.error(
|
||||
f'Failed to get file size for {input_name}', 'MINSIZE',
|
||||
)
|
||||
return True
|
||||
|
||||
# Ignore files under a certain size
|
||||
|
@ -100,7 +113,15 @@ def is_archive_file(filename):
|
|||
return False
|
||||
|
||||
|
||||
def is_media_file(mediafile, media=True, audio=True, meta=True, archives=True, other=False, otherext=None):
|
||||
def is_media_file(
|
||||
mediafile,
|
||||
media=True,
|
||||
audio=True,
|
||||
meta=True,
|
||||
archives=True,
|
||||
other=False,
|
||||
otherext=None,
|
||||
):
|
||||
if otherext is None:
|
||||
otherext = []
|
||||
|
||||
|
@ -113,16 +134,28 @@ def is_media_file(mediafile, media=True, audio=True, meta=True, archives=True, o
|
|||
except Exception:
|
||||
pass
|
||||
|
||||
return any([
|
||||
(media and file_ext.lower() in core.MEDIA_CONTAINER),
|
||||
(audio and file_ext.lower() in core.AUDIO_CONTAINER),
|
||||
(meta and file_ext.lower() in core.META_CONTAINER),
|
||||
(archives and is_archive_file(mediafile)),
|
||||
(other and (file_ext.lower() in otherext or 'all' in otherext)),
|
||||
])
|
||||
return any(
|
||||
[
|
||||
(media and file_ext.lower() in core.MEDIA_CONTAINER),
|
||||
(audio and file_ext.lower() in core.AUDIO_CONTAINER),
|
||||
(meta and file_ext.lower() in core.META_CONTAINER),
|
||||
(archives and is_archive_file(mediafile)),
|
||||
(other and (file_ext.lower() in otherext or 'all' in otherext)),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def list_media_files(path, min_size=0, delete_ignored=0, media=True, audio=True, meta=True, archives=True, other=False, otherext=None):
|
||||
def list_media_files(
|
||||
path,
|
||||
min_size=0,
|
||||
delete_ignored=0,
|
||||
media=True,
|
||||
audio=True,
|
||||
meta=True,
|
||||
archives=True,
|
||||
other=False,
|
||||
otherext=None,
|
||||
):
|
||||
if otherext is None:
|
||||
otherext = []
|
||||
|
||||
|
@ -130,13 +163,17 @@ def list_media_files(path, min_size=0, delete_ignored=0, media=True, audio=True,
|
|||
if not os.path.isdir(path):
|
||||
if os.path.isfile(path): # Single file downloads.
|
||||
cur_file = os.path.split(path)[1]
|
||||
if is_media_file(cur_file, media, audio, meta, archives, other, otherext):
|
||||
if is_media_file(
|
||||
cur_file, media, audio, meta, archives, other, otherext,
|
||||
):
|
||||
# Optionally ignore sample files
|
||||
if is_sample(path) or not is_min_size(path, min_size):
|
||||
if delete_ignored == 1:
|
||||
try:
|
||||
os.unlink(path)
|
||||
logger.debug(f'Ignored file {cur_file} has been removed ...')
|
||||
logger.debug(
|
||||
f'Ignored file {cur_file} has been removed ...',
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
|
@ -149,15 +186,31 @@ def list_media_files(path, min_size=0, delete_ignored=0, media=True, audio=True,
|
|||
|
||||
# if it's a folder do it recursively
|
||||
if os.path.isdir(full_cur_file) and not cur_file.startswith('.'):
|
||||
files += list_media_files(full_cur_file, min_size, delete_ignored, media, audio, meta, archives, other, otherext)
|
||||
files += list_media_files(
|
||||
full_cur_file,
|
||||
min_size,
|
||||
delete_ignored,
|
||||
media,
|
||||
audio,
|
||||
meta,
|
||||
archives,
|
||||
other,
|
||||
otherext,
|
||||
)
|
||||
|
||||
elif is_media_file(cur_file, media, audio, meta, archives, other, otherext):
|
||||
elif is_media_file(
|
||||
cur_file, media, audio, meta, archives, other, otherext,
|
||||
):
|
||||
# Optionally ignore sample files
|
||||
if is_sample(full_cur_file) or not is_min_size(full_cur_file, min_size):
|
||||
if is_sample(full_cur_file) or not is_min_size(
|
||||
full_cur_file, min_size,
|
||||
):
|
||||
if delete_ignored == 1:
|
||||
try:
|
||||
os.unlink(full_cur_file)
|
||||
logger.debug(f'Ignored file {cur_file} has been removed ...')
|
||||
logger.debug(
|
||||
f'Ignored file {cur_file} has been removed ...',
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
continue
|
||||
|
@ -171,7 +224,9 @@ def extract_files(src, dst=None, keep_archive=None):
|
|||
extracted_folder = []
|
||||
extracted_archive = []
|
||||
|
||||
for inputFile in list_media_files(src, media=False, audio=False, meta=False, archives=True):
|
||||
for inputFile in list_media_files(
|
||||
src, media=False, audio=False, meta=False, archives=True,
|
||||
):
|
||||
dir_path = os.path.dirname(inputFile)
|
||||
full_file_name = os.path.basename(inputFile)
|
||||
archive_name = os.path.splitext(full_file_name)[0]
|
||||
|
@ -188,13 +243,17 @@ def extract_files(src, dst=None, keep_archive=None):
|
|||
logger.error(f'Extraction failed for: {full_file_name}')
|
||||
|
||||
for folder in extracted_folder:
|
||||
for inputFile in list_media_files(folder, media=False, audio=False, meta=False, archives=True):
|
||||
for inputFile in list_media_files(
|
||||
folder, media=False, audio=False, meta=False, archives=True,
|
||||
):
|
||||
full_file_name = os.path.basename(inputFile)
|
||||
archive_name = os.path.splitext(full_file_name)[0]
|
||||
archive_name = re.sub(r'part[0-9]+', '', archive_name)
|
||||
if archive_name not in extracted_archive or keep_archive:
|
||||
continue # don't remove if we haven't extracted this archive, or if we want to preserve them.
|
||||
logger.info(f'Removing extracted archive {full_file_name} from folder {folder} ...')
|
||||
logger.info(
|
||||
f'Removing extracted archive {full_file_name} from folder {folder} ...',
|
||||
)
|
||||
try:
|
||||
if not os.access(inputFile, os.W_OK):
|
||||
os.chmod(inputFile, stat.S_IWUSR)
|
||||
|
@ -211,24 +270,35 @@ def backup_versioned_file(old_file, version):
|
|||
|
||||
while not os.path.isfile(new_file):
|
||||
if not os.path.isfile(old_file):
|
||||
logger.log(f'Not creating backup, {old_file} doesn\'t exist', logger.DEBUG)
|
||||
logger.log(
|
||||
f'Not creating backup, {old_file} doesn\'t exist', logger.DEBUG,
|
||||
)
|
||||
break
|
||||
|
||||
try:
|
||||
logger.log('Trying to back up {old} to {new]'.format(old=old_file, new=new_file), logger.DEBUG)
|
||||
logger.log(
|
||||
'Trying to back up {old} to {new]'.format(
|
||||
old=old_file, new=new_file,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
shutil.copy(old_file, new_file)
|
||||
logger.log('Backup done', logger.DEBUG)
|
||||
break
|
||||
except Exception as error:
|
||||
logger.log(
|
||||
f'Error while trying to back up {old_file} to {new_file} : {error}', logger.WARNING,
|
||||
f'Error while trying to back up {old_file} to {new_file} : {error}',
|
||||
logger.WARNING,
|
||||
)
|
||||
num_tries += 1
|
||||
time.sleep(1)
|
||||
logger.log('Trying again.', logger.DEBUG)
|
||||
|
||||
if num_tries >= 10:
|
||||
logger.log(f'Unable to back up {old_file} to {new_file} please do it manually.', logger.ERROR)
|
||||
logger.log(
|
||||
f'Unable to back up {old_file} to {new_file} please do it manually.',
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -32,7 +32,9 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
|
|||
if 'NZBPR__DNZB_MOREINFO' in os.environ:
|
||||
dnzb_more_info = os.environ.get('NZBPR__DNZB_MOREINFO', '')
|
||||
if dnzb_more_info != '':
|
||||
regex = re.compile(r'^http://www.imdb.com/title/(tt[0-9]+)/$', re.IGNORECASE)
|
||||
regex = re.compile(
|
||||
r'^http://www.imdb.com/title/(tt[0-9]+)/$', re.IGNORECASE,
|
||||
)
|
||||
m = regex.match(dnzb_more_info)
|
||||
if m:
|
||||
imdbid = m.group(1)
|
||||
|
@ -57,15 +59,19 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
|
|||
url = 'http://www.omdbapi.com'
|
||||
|
||||
if not omdb_api_key:
|
||||
logger.info('Unable to determine imdbID: No api key provided for omdbapi.com.')
|
||||
logger.info(
|
||||
'Unable to determine imdbID: No api key provided for omdbapi.com.',
|
||||
)
|
||||
return
|
||||
|
||||
logger.debug(f'Opening URL: {url}')
|
||||
|
||||
try:
|
||||
r = requests.get(
|
||||
url, params={'apikey': omdb_api_key, 'y': year, 't': title},
|
||||
verify=False, timeout=(60, 300),
|
||||
url,
|
||||
params={'apikey': omdb_api_key, 'y': year, 't': title},
|
||||
verify=False,
|
||||
timeout=(60, 300),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error(f'Unable to open URL {url}')
|
||||
|
@ -89,7 +95,9 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
|
|||
return imdbid
|
||||
|
||||
|
||||
def category_search(input_directory, input_name, input_category, root, categories):
|
||||
def category_search(
|
||||
input_directory, input_name, input_category, root, categories,
|
||||
):
|
||||
tordir = False
|
||||
|
||||
if input_directory is None: # =Nothing to process here.
|
||||
|
@ -98,64 +106,103 @@ def category_search(input_directory, input_name, input_category, root, categorie
|
|||
pathlist = os.path.normpath(input_directory).split(os.sep)
|
||||
|
||||
if input_category and input_category in pathlist:
|
||||
logger.debug(f'SEARCH: Found the Category: {input_category} in directory structure')
|
||||
logger.debug(
|
||||
f'SEARCH: Found the Category: {input_category} in directory structure',
|
||||
)
|
||||
elif input_category:
|
||||
logger.debug(f'SEARCH: Could not find the category: {input_category} in the directory structure')
|
||||
logger.debug(
|
||||
f'SEARCH: Could not find the category: {input_category} in the directory structure',
|
||||
)
|
||||
else:
|
||||
try:
|
||||
input_category = list(set(pathlist) & set(categories))[-1] # assume last match is most relevant category.
|
||||
logger.debug(f'SEARCH: Found Category: {input_category} in directory structure')
|
||||
input_category = list(set(pathlist) & set(categories))[
|
||||
-1
|
||||
] # assume last match is most relevant category.
|
||||
logger.debug(
|
||||
f'SEARCH: Found Category: {input_category} in directory structure',
|
||||
)
|
||||
except IndexError:
|
||||
input_category = ''
|
||||
logger.debug('SEARCH: Could not find a category in the directory structure')
|
||||
if not os.path.isdir(input_directory) and os.path.isfile(input_directory): # If the input directory is a file
|
||||
logger.debug(
|
||||
'SEARCH: Could not find a category in the directory structure',
|
||||
)
|
||||
if not os.path.isdir(input_directory) and os.path.isfile(
|
||||
input_directory,
|
||||
): # If the input directory is a file
|
||||
if not input_name:
|
||||
input_name = os.path.split(os.path.normpath(input_directory))[1]
|
||||
return input_directory, input_name, input_category, root
|
||||
|
||||
if input_category and os.path.isdir(os.path.join(input_directory, input_category)):
|
||||
if input_category and os.path.isdir(
|
||||
os.path.join(input_directory, input_category),
|
||||
):
|
||||
logger.info(
|
||||
f'SEARCH: Found category directory {input_category} in input directory directory {input_directory}',
|
||||
)
|
||||
input_directory = os.path.join(input_directory, input_category)
|
||||
logger.info(f'SEARCH: Setting input_directory to {input_directory}')
|
||||
if input_name and os.path.isdir(os.path.join(input_directory, input_name)):
|
||||
logger.info(f'SEARCH: Found torrent directory {input_name} in input directory directory {input_directory}')
|
||||
logger.info(
|
||||
f'SEARCH: Found torrent directory {input_name} in input directory directory {input_directory}',
|
||||
)
|
||||
input_directory = os.path.join(input_directory, input_name)
|
||||
logger.info(f'SEARCH: Setting input_directory to {input_directory}')
|
||||
tordir = True
|
||||
elif input_name and os.path.isdir(os.path.join(input_directory, sanitize_name(input_name))):
|
||||
elif input_name and os.path.isdir(
|
||||
os.path.join(input_directory, sanitize_name(input_name)),
|
||||
):
|
||||
logger.info(
|
||||
f'SEARCH: Found torrent directory {sanitize_name(input_name)} in input directory directory {input_directory}',
|
||||
)
|
||||
input_directory = os.path.join(input_directory, sanitize_name(input_name))
|
||||
input_directory = os.path.join(
|
||||
input_directory, sanitize_name(input_name),
|
||||
)
|
||||
logger.info(f'SEARCH: Setting input_directory to {input_directory}')
|
||||
tordir = True
|
||||
elif input_name and os.path.isfile(os.path.join(input_directory, input_name)):
|
||||
logger.info(f'SEARCH: Found torrent file {input_name} in input directory directory {input_directory}')
|
||||
elif input_name and os.path.isfile(
|
||||
os.path.join(input_directory, input_name),
|
||||
):
|
||||
logger.info(
|
||||
f'SEARCH: Found torrent file {input_name} in input directory directory {input_directory}',
|
||||
)
|
||||
input_directory = os.path.join(input_directory, input_name)
|
||||
logger.info(f'SEARCH: Setting input_directory to {input_directory}')
|
||||
tordir = True
|
||||
elif input_name and os.path.isfile(os.path.join(input_directory, sanitize_name(input_name))):
|
||||
elif input_name and os.path.isfile(
|
||||
os.path.join(input_directory, sanitize_name(input_name)),
|
||||
):
|
||||
logger.info(
|
||||
f'SEARCH: Found torrent file {sanitize_name(input_name)} in input directory directory {input_directory}',
|
||||
)
|
||||
input_directory = os.path.join(input_directory, sanitize_name(input_name))
|
||||
input_directory = os.path.join(
|
||||
input_directory, sanitize_name(input_name),
|
||||
)
|
||||
logger.info(f'SEARCH: Setting input_directory to {input_directory}')
|
||||
tordir = True
|
||||
elif input_name and os.path.isdir(input_directory):
|
||||
for file in os.listdir(input_directory):
|
||||
if os.path.splitext(file)[0] in [input_name, sanitize_name(input_name)]:
|
||||
logger.info(f'SEARCH: Found torrent file {file} in input directory directory {input_directory}')
|
||||
if os.path.splitext(file)[0] in [
|
||||
input_name,
|
||||
sanitize_name(input_name),
|
||||
]:
|
||||
logger.info(
|
||||
f'SEARCH: Found torrent file {file} in input directory directory {input_directory}',
|
||||
)
|
||||
input_directory = os.path.join(input_directory, file)
|
||||
logger.info(f'SEARCH: Setting input_directory to {input_directory}')
|
||||
logger.info(
|
||||
f'SEARCH: Setting input_directory to {input_directory}',
|
||||
)
|
||||
input_name = file
|
||||
tordir = True
|
||||
break
|
||||
|
||||
imdbid = [item for item in pathlist if '.cp(tt' in item] # This looks for the .cp(tt imdb id in the path.
|
||||
imdbid = [
|
||||
item for item in pathlist if '.cp(tt' in item
|
||||
] # This looks for the .cp(tt imdb id in the path.
|
||||
if imdbid and '.cp(tt' not in input_name:
|
||||
input_name = imdbid[0] # This ensures the imdb id is preserved and passed to CP
|
||||
input_name = imdbid[
|
||||
0
|
||||
] # This ensures the imdb id is preserved and passed to CP
|
||||
tordir = True
|
||||
|
||||
if input_category and not tordir:
|
||||
|
@ -163,7 +210,9 @@ def category_search(input_directory, input_name, input_category, root, categorie
|
|||
index = pathlist.index(input_category)
|
||||
if index + 1 < len(pathlist):
|
||||
tordir = True
|
||||
logger.info(f'SEARCH: Found a unique directory {pathlist[index + 1]} in the category directory')
|
||||
logger.info(
|
||||
f'SEARCH: Found a unique directory {pathlist[index + 1]} in the category directory',
|
||||
)
|
||||
if not input_name:
|
||||
input_name = pathlist[index + 1]
|
||||
except ValueError:
|
||||
|
@ -171,7 +220,9 @@ def category_search(input_directory, input_name, input_category, root, categorie
|
|||
|
||||
if input_name and not tordir:
|
||||
if input_name in pathlist or sanitize_name(input_name) in pathlist:
|
||||
logger.info(f'SEARCH: Found torrent directory {input_name} in the directory structure')
|
||||
logger.info(
|
||||
f'SEARCH: Found torrent directory {input_name} in the directory structure',
|
||||
)
|
||||
tordir = True
|
||||
else:
|
||||
root = 1
|
||||
|
@ -179,7 +230,11 @@ def category_search(input_directory, input_name, input_category, root, categorie
|
|||
root = 2
|
||||
|
||||
if root > 0:
|
||||
logger.info('SEARCH: Could not find a unique directory for this download. Assume a common directory.')
|
||||
logger.info('SEARCH: We will try and determine which files to process, individually')
|
||||
logger.info(
|
||||
'SEARCH: Could not find a unique directory for this download. Assume a common directory.',
|
||||
)
|
||||
logger.info(
|
||||
'SEARCH: We will try and determine which files to process, individually',
|
||||
)
|
||||
|
||||
return input_directory, input_name, input_category, root
|
||||
|
|
|
@ -24,35 +24,57 @@ def copy_link(src, target_link, use_link):
|
|||
logger.info(f'TARGET FOLDER: [{os.path.dirname(target_link)}]', 'COPYLINK')
|
||||
|
||||
if src != target_link and os.path.exists(target_link):
|
||||
logger.info('MEDIAFILE already exists in the TARGET folder, skipping ...', 'COPYLINK')
|
||||
logger.info(
|
||||
'MEDIAFILE already exists in the TARGET folder, skipping ...',
|
||||
'COPYLINK',
|
||||
)
|
||||
return True
|
||||
elif src == target_link and os.path.isfile(target_link) and os.path.isfile(src):
|
||||
logger.info('SOURCE AND TARGET files are the same, skipping ...', 'COPYLINK')
|
||||
elif (
|
||||
src == target_link
|
||||
and os.path.isfile(target_link)
|
||||
and os.path.isfile(src)
|
||||
):
|
||||
logger.info(
|
||||
'SOURCE AND TARGET files are the same, skipping ...', 'COPYLINK',
|
||||
)
|
||||
return True
|
||||
elif src == os.path.dirname(target_link):
|
||||
logger.info('SOURCE AND TARGET folders are the same, skipping ...', 'COPYLINK')
|
||||
logger.info(
|
||||
'SOURCE AND TARGET folders are the same, skipping ...', 'COPYLINK',
|
||||
)
|
||||
return True
|
||||
|
||||
make_dir(os.path.dirname(target_link))
|
||||
try:
|
||||
if use_link == 'dir':
|
||||
logger.info('Directory linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK')
|
||||
logger.info(
|
||||
'Directory linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK',
|
||||
)
|
||||
linktastic.dirlink(src, target_link)
|
||||
return True
|
||||
if use_link == 'junction':
|
||||
logger.info('Directory junction linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK')
|
||||
logger.info(
|
||||
'Directory junction linking SOURCE FOLDER -> TARGET FOLDER',
|
||||
'COPYLINK',
|
||||
)
|
||||
linktastic.dirlink(src, target_link)
|
||||
return True
|
||||
elif use_link == 'hard':
|
||||
logger.info('Hard linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
|
||||
logger.info(
|
||||
'Hard linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK',
|
||||
)
|
||||
linktastic.link(src, target_link)
|
||||
return True
|
||||
elif use_link == 'sym':
|
||||
logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
|
||||
logger.info(
|
||||
'Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK',
|
||||
)
|
||||
linktastic.symlink(src, target_link)
|
||||
return True
|
||||
elif use_link == 'move-sym':
|
||||
logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
|
||||
logger.info(
|
||||
'Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK',
|
||||
)
|
||||
shutil.move(src, target_link)
|
||||
linktastic.symlink(target_link, src)
|
||||
return True
|
||||
|
@ -81,9 +103,16 @@ def replace_links(link, max_depth=10):
|
|||
|
||||
if not link_depth:
|
||||
logger.debug(f'{link} is not a link')
|
||||
elif link_depth > max_depth or (link_depth == max_depth and islink(target)):
|
||||
logger.warning(f'Exceeded maximum depth {max_depth} while following link {link}')
|
||||
elif link_depth > max_depth or (
|
||||
link_depth == max_depth and islink(target)
|
||||
):
|
||||
logger.warning(
|
||||
f'Exceeded maximum depth {max_depth} while following link {link}',
|
||||
)
|
||||
else:
|
||||
logger.info(f'Changing sym-link: {link} to point directly to file: {target}', 'COPYLINK')
|
||||
logger.info(
|
||||
f'Changing sym-link: {link} to point directly to file: {target}',
|
||||
'COPYLINK',
|
||||
)
|
||||
os.unlink(link)
|
||||
linktastic.symlink(target, link)
|
||||
|
|
|
@ -33,7 +33,9 @@ def clean_file_name(filename):
|
|||
space, but handles decimal numbers in string, for example:
|
||||
"""
|
||||
filename = re.sub(r'(\D)\.(?!\s)(\D)', r'\1 \2', filename)
|
||||
filename = re.sub(r'(\d)\.(\d{4})', r'\1 \2', filename) # if it ends in a year then don't keep the dot
|
||||
filename = re.sub(
|
||||
r'(\d)\.(\d{4})', r'\1 \2', filename,
|
||||
) # if it ends in a year then don't keep the dot
|
||||
filename = re.sub(r'(\D)\.(?!\s)', r'\1 ', filename)
|
||||
filename = re.sub(r'\.(?!\s)(\D)', r' \1', filename)
|
||||
filename = filename.replace('_', ' ')
|
||||
|
|
|
@ -12,10 +12,7 @@ from core import logger
|
|||
|
||||
def make_wake_on_lan_packet(mac_address):
|
||||
"""Build the Wake-On-LAN 'Magic Packet'."""
|
||||
address = (
|
||||
int(value, 16)
|
||||
for value in mac_address.split(':')
|
||||
)
|
||||
address = (int(value, 16) for value in mac_address.split(':'))
|
||||
fmt = 'BBBBBB'
|
||||
hardware_address = struct.pack(fmt, *address)
|
||||
broadcast_address = b'\xFF' * 6 # FF:FF:FF:FF:FF:FF
|
||||
|
@ -115,7 +112,9 @@ def find_download(client_agent, download_id):
|
|||
'value': download_id,
|
||||
}
|
||||
try:
|
||||
r = requests.get(url, params=params, verify=False, timeout=(30, 120))
|
||||
r = requests.get(
|
||||
url, params=params, verify=False, timeout=(30, 120),
|
||||
)
|
||||
except requests.ConnectionError:
|
||||
logger.error('Unable to open URL')
|
||||
return False # failure
|
||||
|
|
|
@ -61,7 +61,11 @@ def parse_deluge(args):
|
|||
input_hash = args[1]
|
||||
input_id = args[1]
|
||||
try:
|
||||
input_category = core.TORRENT_CLASS.core.get_torrent_status(input_id, ['label']).get(b'label').decode()
|
||||
input_category = (
|
||||
core.TORRENT_CLASS.core.get_torrent_status(input_id, ['label'])
|
||||
.get(b'label')
|
||||
.decode()
|
||||
)
|
||||
except Exception:
|
||||
input_category = ''
|
||||
return input_directory, input_name, input_category, input_hash, input_id
|
||||
|
@ -85,10 +89,16 @@ def parse_synods(args):
|
|||
input_name = os.getenv('TR_TORRENT_NAME')
|
||||
input_hash = os.getenv('TR_TORRENT_HASH')
|
||||
if not input_name: # No info passed. Assume manual download.
|
||||
return input_directory, input_name, input_category, input_hash, input_id
|
||||
return (
|
||||
input_directory,
|
||||
input_name,
|
||||
input_category,
|
||||
input_hash,
|
||||
input_id,
|
||||
)
|
||||
torrent_id = os.getenv('TR_TORRENT_ID')
|
||||
input_id = f'dbid_{torrent_id}'
|
||||
#res = core.TORRENT_CLASS.tasks_list(additional_param='detail')
|
||||
# res = core.TORRENT_CLASS.tasks_list(additional_param='detail')
|
||||
res = core.TORRENT_CLASS.tasks_info(input_id, additional_param='detail')
|
||||
logger.debug(f'result from syno {res}')
|
||||
if res['success']:
|
||||
|
|
|
@ -105,7 +105,9 @@ def remove_read_only(filename):
|
|||
try:
|
||||
os.chmod(filename, stat.S_IWRITE)
|
||||
except Exception:
|
||||
logger.warning(f'Cannot change permissions of {filename}', logger.WARNING)
|
||||
logger.warning(
|
||||
f'Cannot change permissions of {filename}', logger.WARNING,
|
||||
)
|
||||
|
||||
|
||||
def flatten_dir(destination, files):
|
||||
|
@ -129,7 +131,9 @@ def flatten_dir(destination, files):
|
|||
|
||||
def clean_directory(path, files):
|
||||
if not os.path.exists(path):
|
||||
logger.info(f'Directory {path} has been processed and removed ...', 'CLEANDIR')
|
||||
logger.info(
|
||||
f'Directory {path} has been processed and removed ...', 'CLEANDIR',
|
||||
)
|
||||
return
|
||||
|
||||
if core.FORCE_CLEAN and not core.FAILED:
|
||||
|
@ -144,7 +148,9 @@ def clean_directory(path, files):
|
|||
)
|
||||
return
|
||||
|
||||
logger.info(f'Directory {path} has been processed, removing ...', 'CLEANDIRS')
|
||||
logger.info(
|
||||
f'Directory {path} has been processed, removing ...', 'CLEANDIRS',
|
||||
)
|
||||
try:
|
||||
shutil.rmtree(path, onerror=onerror)
|
||||
except Exception:
|
||||
|
|
|
@ -20,7 +20,9 @@ if os.name == 'nt':
|
|||
class WindowsProcess:
|
||||
def __init__(self):
|
||||
self.mutex = None
|
||||
self.mutexname = 'nzbtomedia_{pid}'.format(pid=core.PID_FILE.replace('\\', '/')) # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}'
|
||||
self.mutexname = 'nzbtomedia_{pid}'.format(
|
||||
pid=core.PID_FILE.replace('\\', '/'),
|
||||
) # {D0E858DF-985E-4907-B7FB-8D732C3FC3B9}'
|
||||
self.CreateMutex = CreateMutex
|
||||
self.CloseHandle = CloseHandle
|
||||
self.GetLastError = GetLastError
|
||||
|
|
|
@ -62,7 +62,9 @@ class CheckVersion:
|
|||
force: if true the VERSION_NOTIFY setting will be ignored and a check will be forced
|
||||
"""
|
||||
if not core.VERSION_NOTIFY and not force:
|
||||
logger.log('Version checking is disabled, not checking for the newest version')
|
||||
logger.log(
|
||||
'Version checking is disabled, not checking for the newest version',
|
||||
)
|
||||
return False
|
||||
|
||||
logger.log(f'Checking if {self.install_type} needs an update')
|
||||
|
@ -118,8 +120,10 @@ class GitUpdateManager(UpdateManager):
|
|||
main_git = 'git'
|
||||
|
||||
logger.log(
|
||||
'Checking if we can use git commands: {git} {cmd}'.format
|
||||
(git=main_git, cmd=test_cmd), logger.DEBUG,
|
||||
'Checking if we can use git commands: {git} {cmd}'.format(
|
||||
git=main_git, cmd=test_cmd,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
output, err, exit_status = self._run_git(main_git, test_cmd)
|
||||
|
||||
|
@ -146,8 +150,10 @@ class GitUpdateManager(UpdateManager):
|
|||
|
||||
for cur_git in alternative_git:
|
||||
logger.log(
|
||||
'Checking if we can use git commands: {git} {cmd}'.format
|
||||
(git=cur_git, cmd=test_cmd), logger.DEBUG,
|
||||
'Checking if we can use git commands: {git} {cmd}'.format(
|
||||
git=cur_git, cmd=test_cmd,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
output, err, exit_status = self._run_git(cur_git, test_cmd)
|
||||
|
||||
|
@ -172,7 +178,9 @@ class GitUpdateManager(UpdateManager):
|
|||
err = None
|
||||
|
||||
if not git_path:
|
||||
logger.log('No git specified, can\'t use git commands', logger.DEBUG)
|
||||
logger.log(
|
||||
'No git specified, can\'t use git commands', logger.DEBUG,
|
||||
)
|
||||
exit_status = 1
|
||||
return output, err, exit_status
|
||||
|
||||
|
@ -180,12 +188,18 @@ class GitUpdateManager(UpdateManager):
|
|||
|
||||
try:
|
||||
logger.log(
|
||||
'Executing {cmd} with your shell in {directory}'.format
|
||||
(cmd=cmd, directory=core.APP_ROOT), logger.DEBUG,
|
||||
'Executing {cmd} with your shell in {directory}'.format(
|
||||
cmd=cmd, directory=core.APP_ROOT,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
p = subprocess.Popen(
|
||||
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
shell=True, cwd=core.APP_ROOT,
|
||||
cmd,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
shell=True,
|
||||
cwd=core.APP_ROOT,
|
||||
)
|
||||
output, err = p.communicate()
|
||||
exit_status = p.returncode
|
||||
|
@ -207,14 +221,16 @@ class GitUpdateManager(UpdateManager):
|
|||
exit_status = 0
|
||||
elif core.LOG_GIT and exit_status in (1, 128):
|
||||
logger.log(
|
||||
'{cmd} returned : {output}'.format
|
||||
(cmd=cmd, output=output), logger.DEBUG,
|
||||
f'{cmd} returned : {output}',
|
||||
logger.DEBUG,
|
||||
)
|
||||
else:
|
||||
if core.LOG_GIT:
|
||||
logger.log(
|
||||
'{cmd} returned : {output}, treat as error for now'.format
|
||||
(cmd=cmd, output=output), logger.DEBUG,
|
||||
'{cmd} returned : {output}, treat as error for now'.format(
|
||||
cmd=cmd, output=output,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
exit_status = 1
|
||||
|
||||
|
@ -228,12 +244,17 @@ class GitUpdateManager(UpdateManager):
|
|||
|
||||
Returns: True for success or False for failure
|
||||
"""
|
||||
output, err, exit_status = self._run_git(self._git_path, 'rev-parse HEAD') # @UnusedVariable
|
||||
output, err, exit_status = self._run_git(
|
||||
self._git_path, 'rev-parse HEAD',
|
||||
) # @UnusedVariable
|
||||
|
||||
if exit_status == 0 and output:
|
||||
cur_commit_hash = output.strip()
|
||||
if not re.match('^[a-z0-9]+$', cur_commit_hash):
|
||||
logger.log('Output doesn\'t look like a hash, not using it', logger.ERROR)
|
||||
logger.log(
|
||||
'Output doesn\'t look like a hash, not using it',
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
self._cur_commit_hash = cur_commit_hash
|
||||
if self._cur_commit_hash:
|
||||
|
@ -244,7 +265,9 @@ class GitUpdateManager(UpdateManager):
|
|||
|
||||
def _find_git_branch(self):
|
||||
core.NZBTOMEDIA_BRANCH = self.get_github_branch()
|
||||
branch_info, err, exit_status = self._run_git(self._git_path, 'symbolic-ref -q HEAD') # @UnusedVariable
|
||||
branch_info, err, exit_status = self._run_git(
|
||||
self._git_path, 'symbolic-ref -q HEAD',
|
||||
) # @UnusedVariable
|
||||
if exit_status == 0 and branch_info:
|
||||
branch = branch_info.strip().replace('refs/heads/', '', 1)
|
||||
if branch:
|
||||
|
@ -265,20 +288,30 @@ class GitUpdateManager(UpdateManager):
|
|||
self._num_commits_ahead = 0
|
||||
|
||||
# get all new info from github
|
||||
output, err, exit_status = self._run_git(self._git_path, 'fetch origin')
|
||||
output, err, exit_status = self._run_git(
|
||||
self._git_path, 'fetch origin',
|
||||
)
|
||||
|
||||
if not exit_status == 0:
|
||||
logger.log('Unable to contact github, can\'t check for update', logger.ERROR)
|
||||
logger.log(
|
||||
'Unable to contact github, can\'t check for update',
|
||||
logger.ERROR,
|
||||
)
|
||||
return
|
||||
|
||||
# get latest commit_hash from remote
|
||||
output, err, exit_status = self._run_git(self._git_path, 'rev-parse --verify --quiet \'@{upstream}\'')
|
||||
output, err, exit_status = self._run_git(
|
||||
self._git_path, 'rev-parse --verify --quiet \'@{upstream}\'',
|
||||
)
|
||||
|
||||
if exit_status == 0 and output:
|
||||
cur_commit_hash = output.strip()
|
||||
|
||||
if not re.match('^[a-z0-9]+$', cur_commit_hash):
|
||||
logger.log('Output doesn\'t look like a hash, not using it', logger.DEBUG)
|
||||
logger.log(
|
||||
'Output doesn\'t look like a hash, not using it',
|
||||
logger.DEBUG,
|
||||
)
|
||||
return
|
||||
|
||||
else:
|
||||
|
@ -288,7 +321,9 @@ class GitUpdateManager(UpdateManager):
|
|||
return
|
||||
|
||||
# get number of commits behind and ahead (option --count not supported git < 1.7.2)
|
||||
output, err, exit_status = self._run_git(self._git_path, 'rev-list --left-right \'@{upstream}\'...HEAD')
|
||||
output, err, exit_status = self._run_git(
|
||||
self._git_path, 'rev-list --left-right \'@{upstream}\'...HEAD',
|
||||
)
|
||||
|
||||
if exit_status == 0 and output:
|
||||
|
||||
|
@ -297,35 +332,47 @@ class GitUpdateManager(UpdateManager):
|
|||
self._num_commits_ahead = int(output.count('>'))
|
||||
|
||||
except Exception:
|
||||
logger.log('git didn\'t return numbers for behind and ahead, not using it', logger.DEBUG)
|
||||
logger.log(
|
||||
'git didn\'t return numbers for behind and ahead, not using it',
|
||||
logger.DEBUG,
|
||||
)
|
||||
return
|
||||
|
||||
logger.log(
|
||||
'cur_commit = {current} % (newest_commit)= {new}, '
|
||||
'num_commits_behind = {x}, num_commits_ahead = {y}'.format
|
||||
(
|
||||
current=self._cur_commit_hash, new=self._newest_commit_hash,
|
||||
x=self._num_commits_behind, y=self._num_commits_ahead,
|
||||
), logger.DEBUG,
|
||||
'num_commits_behind = {x}, num_commits_ahead = {y}'.format(
|
||||
current=self._cur_commit_hash,
|
||||
new=self._newest_commit_hash,
|
||||
x=self._num_commits_behind,
|
||||
y=self._num_commits_ahead,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
|
||||
def set_newest_text(self):
|
||||
if self._num_commits_ahead:
|
||||
logger.log(
|
||||
'Local branch is ahead of {branch}. Automatic update not possible.'.format
|
||||
(branch=self.branch), logger.ERROR,
|
||||
'Local branch is ahead of {branch}. Automatic update not possible.'.format(
|
||||
branch=self.branch,
|
||||
),
|
||||
logger.ERROR,
|
||||
)
|
||||
elif self._num_commits_behind:
|
||||
logger.log(
|
||||
'There is a newer version available (you\'re {x} commit{s} behind)'.format
|
||||
(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''), logger.MESSAGE,
|
||||
'There is a newer version available (you\'re {x} commit{s} behind)'.format(
|
||||
x=self._num_commits_behind,
|
||||
s='s' if self._num_commits_behind > 1 else '',
|
||||
),
|
||||
logger.MESSAGE,
|
||||
)
|
||||
else:
|
||||
return
|
||||
|
||||
def need_update(self):
|
||||
if not self._find_installed_version():
|
||||
logger.error('Unable to determine installed version via git, please check your logs!')
|
||||
logger.error(
|
||||
'Unable to determine installed version via git, please check your logs!',
|
||||
)
|
||||
return False
|
||||
|
||||
if not self._cur_commit_hash:
|
||||
|
@ -334,7 +381,10 @@ class GitUpdateManager(UpdateManager):
|
|||
try:
|
||||
self._check_github_for_update()
|
||||
except Exception as error:
|
||||
logger.log(f'Unable to contact github, can\'t check for update: {error!r}', logger.ERROR)
|
||||
logger.log(
|
||||
f'Unable to contact github, can\'t check for update: {error!r}',
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
if self._num_commits_behind > 0:
|
||||
|
@ -349,7 +399,9 @@ class GitUpdateManager(UpdateManager):
|
|||
Calls git pull origin <branch> in order to update Sick Beard.
|
||||
Returns a bool depending on the call's success.
|
||||
"""
|
||||
output, err, exit_status = self._run_git(self._git_path, f'pull origin {self.branch}') # @UnusedVariable
|
||||
output, err, exit_status = self._run_git(
|
||||
self._git_path, f'pull origin {self.branch}',
|
||||
) # @UnusedVariable
|
||||
|
||||
if exit_status == 0:
|
||||
return True
|
||||
|
@ -379,7 +431,9 @@ class SourceUpdateManager(UpdateManager):
|
|||
with open(version_file) as fp:
|
||||
self._cur_commit_hash = fp.read().strip(' \n\r')
|
||||
except OSError as error:
|
||||
logger.log(f'Unable to open \'version.txt\': {error}', logger.DEBUG)
|
||||
logger.log(
|
||||
f'Unable to open \'version.txt\': {error}', logger.DEBUG,
|
||||
)
|
||||
|
||||
if not self._cur_commit_hash:
|
||||
self._cur_commit_hash = None
|
||||
|
@ -393,7 +447,10 @@ class SourceUpdateManager(UpdateManager):
|
|||
try:
|
||||
self._check_github_for_update()
|
||||
except Exception as error:
|
||||
logger.log(f'Unable to contact github, can\'t check for update: {error!r}', logger.ERROR)
|
||||
logger.log(
|
||||
f'Unable to contact github, can\'t check for update: {error!r}',
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
if not self._cur_commit_hash or self._num_commits_behind > 0:
|
||||
|
@ -414,14 +471,20 @@ class SourceUpdateManager(UpdateManager):
|
|||
self._num_commits_behind = 0
|
||||
self._newest_commit_hash = None
|
||||
|
||||
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
|
||||
gh = github.GitHub(
|
||||
self.github_repo_user, self.github_repo, self.branch,
|
||||
)
|
||||
|
||||
# try to get newest commit hash and commits behind directly by comparing branch and current commit
|
||||
if self._cur_commit_hash:
|
||||
branch_compared = gh.compare(base=self.branch, head=self._cur_commit_hash)
|
||||
branch_compared = gh.compare(
|
||||
base=self.branch, head=self._cur_commit_hash,
|
||||
)
|
||||
|
||||
if 'base_commit' in branch_compared:
|
||||
self._newest_commit_hash = branch_compared['base_commit']['sha']
|
||||
self._newest_commit_hash = branch_compared['base_commit'][
|
||||
'sha'
|
||||
]
|
||||
|
||||
if 'behind_by' in branch_compared:
|
||||
self._num_commits_behind = int(branch_compared['behind_by'])
|
||||
|
@ -442,8 +505,12 @@ class SourceUpdateManager(UpdateManager):
|
|||
self._num_commits_behind += 1
|
||||
|
||||
logger.log(
|
||||
'cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}'.format
|
||||
(current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG,
|
||||
'cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}'.format(
|
||||
current=self._cur_commit_hash,
|
||||
new=self._newest_commit_hash,
|
||||
x=self._num_commits_behind,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
|
||||
def set_newest_text(self):
|
||||
|
@ -452,19 +519,29 @@ class SourceUpdateManager(UpdateManager):
|
|||
core.NEWEST_VERSION_STRING = None
|
||||
|
||||
if not self._cur_commit_hash:
|
||||
logger.log('Unknown current version number, don\'t know if we should update or not', logger.ERROR)
|
||||
logger.log(
|
||||
'Unknown current version number, don\'t know if we should update or not',
|
||||
logger.ERROR,
|
||||
)
|
||||
elif self._num_commits_behind > 0:
|
||||
logger.log(
|
||||
'There is a newer version available (you\'re {x} commit{s} behind)'.format
|
||||
(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''), logger.MESSAGE,
|
||||
'There is a newer version available (you\'re {x} commit{s} behind)'.format(
|
||||
x=self._num_commits_behind,
|
||||
s='s' if self._num_commits_behind > 1 else '',
|
||||
),
|
||||
logger.MESSAGE,
|
||||
)
|
||||
else:
|
||||
return
|
||||
|
||||
def update(self):
|
||||
"""Download and install latest source tarball from github."""
|
||||
tar_download_url = 'https://github.com/{org}/{repo}/tarball/{branch}'.format(
|
||||
org=self.github_repo_user, repo=self.github_repo, branch=self.branch,
|
||||
tar_download_url = (
|
||||
'https://github.com/{org}/{repo}/tarball/{branch}'.format(
|
||||
org=self.github_repo_user,
|
||||
repo=self.github_repo,
|
||||
branch=self.branch,
|
||||
)
|
||||
)
|
||||
version_path = os.path.join(core.APP_ROOT, 'version.txt')
|
||||
|
||||
|
@ -473,28 +550,38 @@ class SourceUpdateManager(UpdateManager):
|
|||
sb_update_dir = os.path.join(core.APP_ROOT, 'sb-update')
|
||||
|
||||
if os.path.isdir(sb_update_dir):
|
||||
logger.log(f'Clearing out update folder {sb_update_dir} before extracting')
|
||||
logger.log(
|
||||
f'Clearing out update folder {sb_update_dir} before extracting',
|
||||
)
|
||||
shutil.rmtree(sb_update_dir)
|
||||
|
||||
logger.log(f'Creating update folder {sb_update_dir} before extracting')
|
||||
logger.log(
|
||||
f'Creating update folder {sb_update_dir} before extracting',
|
||||
)
|
||||
os.makedirs(sb_update_dir)
|
||||
|
||||
# retrieve file
|
||||
logger.log(f'Downloading update from {tar_download_url!r}')
|
||||
tar_download_path = os.path.join(sb_update_dir, 'nzbtomedia-update.tar')
|
||||
tar_download_path = os.path.join(
|
||||
sb_update_dir, 'nzbtomedia-update.tar',
|
||||
)
|
||||
urlretrieve(tar_download_url, tar_download_path)
|
||||
|
||||
if not os.path.isfile(tar_download_path):
|
||||
logger.log(
|
||||
'Unable to retrieve new version from {url}, can\'t update'.format
|
||||
(url=tar_download_url), logger.ERROR,
|
||||
'Unable to retrieve new version from {url}, can\'t update'.format(
|
||||
url=tar_download_url,
|
||||
),
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
if not tarfile.is_tarfile(tar_download_path):
|
||||
logger.log(
|
||||
'Retrieved version from {url} is corrupt, can\'t update'.format
|
||||
(url=tar_download_url), logger.ERROR,
|
||||
'Retrieved version from {url} is corrupt, can\'t update'.format(
|
||||
url=tar_download_url,
|
||||
),
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
|
@ -510,21 +597,28 @@ class SourceUpdateManager(UpdateManager):
|
|||
|
||||
# find update dir name
|
||||
update_dir_contents = [
|
||||
x for x in os.listdir(sb_update_dir) if
|
||||
os.path.isdir(os.path.join(sb_update_dir, x))
|
||||
x
|
||||
for x in os.listdir(sb_update_dir)
|
||||
if os.path.isdir(os.path.join(sb_update_dir, x))
|
||||
]
|
||||
if len(update_dir_contents) != 1:
|
||||
logger.log(f'Invalid update data, update failed: {update_dir_contents}', logger.ERROR)
|
||||
logger.log(
|
||||
f'Invalid update data, update failed: {update_dir_contents}',
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
|
||||
|
||||
# walk temp folder and move files to main folder
|
||||
logger.log(
|
||||
'Moving files from {source} to {destination}'.format
|
||||
(source=content_dir, destination=core.APP_ROOT),
|
||||
'Moving files from {source} to {destination}'.format(
|
||||
source=content_dir, destination=core.APP_ROOT,
|
||||
),
|
||||
)
|
||||
for dirname, _, filenames in os.walk(content_dir): # @UnusedVariable
|
||||
dirname = dirname[len(content_dir) + 1:]
|
||||
for dirname, _, filenames in os.walk(
|
||||
content_dir,
|
||||
): # @UnusedVariable
|
||||
dirname = dirname[len(content_dir) + 1 :]
|
||||
for curfile in filenames:
|
||||
old_path = os.path.join(content_dir, dirname, curfile)
|
||||
new_path = os.path.join(core.APP_ROOT, dirname, curfile)
|
||||
|
@ -539,10 +633,14 @@ class SourceUpdateManager(UpdateManager):
|
|||
os.renames(old_path, new_path)
|
||||
except Exception as error:
|
||||
logger.log(
|
||||
'Unable to update {path}: {msg}'.format
|
||||
(path=new_path, msg=error), logger.DEBUG,
|
||||
'Unable to update {path}: {msg}'.format(
|
||||
path=new_path, msg=error,
|
||||
),
|
||||
logger.DEBUG,
|
||||
)
|
||||
os.remove(old_path) # Trash the updated file without moving in new path
|
||||
os.remove(
|
||||
old_path,
|
||||
) # Trash the updated file without moving in new path
|
||||
continue
|
||||
|
||||
if os.path.isfile(new_path):
|
||||
|
@ -555,15 +653,17 @@ class SourceUpdateManager(UpdateManager):
|
|||
ver_file.write(self._newest_commit_hash)
|
||||
except OSError as error:
|
||||
logger.log(
|
||||
'Unable to write version file, update not complete: {msg}'.format
|
||||
(msg=error), logger.ERROR,
|
||||
'Unable to write version file, update not complete: {msg}'.format(
|
||||
msg=error,
|
||||
),
|
||||
logger.ERROR,
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as error:
|
||||
logger.log(
|
||||
'Error while trying to update: {msg}'.format
|
||||
(msg=error), logger.ERROR,
|
||||
f'Error while trying to update: {error}',
|
||||
logger.ERROR,
|
||||
)
|
||||
logger.log(f'Traceback: {traceback.format_exc()}', logger.DEBUG)
|
||||
return False
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
babelfish
|
||||
beets
|
||||
configobj
|
||||
deluge-client@git+https://github.com/labrys/deluge.git@master
|
||||
guessit
|
||||
jaraco-windows ; sys.platform == 'win32'
|
||||
linktastic
|
||||
python-qbittorrent
|
||||
pywin32 ; sys.platform == 'win32'
|
||||
pyxdg
|
||||
rencode
|
||||
requests
|
||||
|
@ -11,9 +14,6 @@ requests_oauthlib
|
|||
setuptools
|
||||
six
|
||||
subliminal != 2.1.0
|
||||
transmissionrpc
|
||||
deluge-client@git+https://github.com/labrys/deluge.git@master
|
||||
syno@git+https://github.com/labrys/syno.git@master
|
||||
transmissionrpc
|
||||
utorrent@git+https://github.com/labrys/utorrent.git@master
|
||||
jaraco-windows ; sys.platform == 'win32'
|
||||
pywin32 ; sys.platform == 'win32'
|
||||
|
|
0
tests/test_initialize.py → tests/initialize_test.py
Executable file → Normal file
0
tests/test_initialize.py → tests/initialize_test.py
Executable file → Normal file
0
tests/test_transcoder.py → tests/transcoder_test.py
Executable file → Normal file
0
tests/test_transcoder.py → tests/transcoder_test.py
Executable file → Normal file
Loading…
Add table
Add a link
Reference in a new issue