Merge pull request #1526 from clinton-hall/dev

Merge dev to master
This commit is contained in:
Labrys of Knossos 2019-01-15 18:44:43 -05:00 committed by GitHub
commit 4896848099
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 210 additions and 166 deletions

View file

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 12.0.6 current_version = 12.0.7
commit = True commit = True
tag = False tag = False

View file

@ -1,4 +1,4 @@
nzbToMedia v12.0.6 nzbToMedia v12.0.7
================== ==================
Provides an [efficient](https://github.com/clinton-hall/nzbToMedia/wiki/Efficient-on-demand-post-processing) way to handle postprocessing for [CouchPotatoServer](https://couchpota.to/ "CouchPotatoServer") and [SickBeard](http://sickbeard.com/ "SickBeard") (and its [forks](https://github.com/clinton-hall/nzbToMedia/wiki/Failed-Download-Handling-%28FDH%29#sick-beard-and-its-forks)) Provides an [efficient](https://github.com/clinton-hall/nzbToMedia/wiki/Efficient-on-demand-post-processing) way to handle postprocessing for [CouchPotatoServer](https://couchpota.to/ "CouchPotatoServer") and [SickBeard](http://sickbeard.com/ "SickBeard") (and its [forks](https://github.com/clinton-hall/nzbToMedia/wiki/Failed-Download-Handling-%28FDH%29#sick-beard-and-its-forks))

View file

@ -1,5 +1,13 @@
Change_LOG / History Change_LOG / History
V12.0.7
Refactor utils
Fix git subprocess
Fix cleanup script output
Add extra logging for fork detection
Additional code clean up
V12.0.6 V12.0.6
Hotfix for Manual Torrent run results. Hotfix for Manual Torrent run results.

View file

@ -37,6 +37,13 @@ class WorkingDirectory(object):
def module_path(module=__file__, parent=False): def module_path(module=__file__, parent=False):
"""
Detect path for a module.
:param module: The module who's path is being detected. Defaults to current module.
:param parent: True to return the parent folder of the current module.
:return: The absolute normalized path to the module or its parent.
"""
try: try:
path = module.__file__ path = module.__file__
except AttributeError: except AttributeError:
@ -122,6 +129,12 @@ def clean_folders(*paths):
def force_clean_folder(path, required): def force_clean_folder(path, required):
"""
Force clean a folder and exclude any required subfolders.
:param path: Target folder to remove subfolders
:param required: Keep only the required subfolders
"""
root, dirs, files = next(os.walk(path)) root, dirs, files = next(os.walk(path))
required = sorted(required) required = sorted(required)
if required: if required:
@ -138,6 +151,11 @@ def force_clean_folder(path, required):
def clean(paths): def clean(paths):
"""Clean up bytecode and obsolete folders.""" """Clean up bytecode and obsolete folders."""
def _report_error(msg):
print('WARNING: Automatic cleanup could not be executed.')
print(' If errors occur, manual cleanup may be required.')
print('REASON : {}'.format(msg))
with WorkingDirectory(module_path()) as cwd: with WorkingDirectory(module_path()) as cwd:
if cwd.working_directory != cwd.original_directory: if cwd.working_directory != cwd.original_directory:
print('Changing to directory:', cwd.working_directory) print('Changing to directory:', cwd.working_directory)
@ -146,7 +164,7 @@ def clean(paths):
try: try:
result = clean_bytecode() result = clean_bytecode()
except SystemExit as error: except SystemExit as error:
print(error) _report_error(error)
else: else:
print(result or 'No bytecode to clean') print(result or 'No bytecode to clean')
@ -155,7 +173,7 @@ def clean(paths):
try: try:
result = clean_folders(*paths) result = clean_folders(*paths)
except SystemExit as error: except SystemExit as error:
print(error) _report_error(error)
else: else:
print(result or 'No folders to clean\n') print(result or 'No folders to clean\n')
else: else:
@ -163,7 +181,7 @@ def clean(paths):
try: try:
items = paths.items() items = paths.items()
except AttributeError: except AttributeError:
print('Failed to clean, no subfolder structure given') _report_error('Failed to clean, no subfolder structure given')
else: else:
for folder, subfolders in items: for folder, subfolders in items:
print('\nForce cleaning folder:', folder) print('\nForce cleaning folder:', folder)

View file

@ -46,13 +46,31 @@ from six.moves import reload_module
from core import logger, main_db, version_check, databases, transcoder from core import logger, main_db, version_check, databases, transcoder
from core.configuration import config from core.configuration import config
from core.utils import ( from core.utils import (
RunningProcess, wake_up, category_search, clean_dir, clean_dir, copy_link, RunningProcess,
create_torrent_class, extract_files, flatten, get_dirs, get_download_info, category_search,
list_media_files, make_dir, parse_args, pause_torrent, remove_torrent, clean_dir,
resume_torrent, remove_dir, remove_read_only, sanitize_name, update_download_info_status, copy_link,
create_torrent_class,
extract_files,
flatten,
get_dirs,
get_download_info,
list_media_files,
make_dir,
parse_args,
pause_torrent,
rchmod,
remove_dir,
remove_read_only,
remove_torrent,
restart,
resume_torrent,
sanitize_name,
update_download_info_status,
wake_up,
) )
__version__ = '12.0.6' __version__ = '12.0.7'
# Client Agents # Client Agents
NZB_CLIENTS = ['sabnzbd', 'nzbget', 'manual'] NZB_CLIENTS = ['sabnzbd', 'nzbget', 'manual']
@ -846,36 +864,3 @@ def initialize(section=None):
# finished initalizing # finished initalizing
return True return True
def restart():
install_type = version_check.CheckVersion().install_type
status = 0
popen_list = []
if install_type in ('git', 'source'):
popen_list = [sys.executable, APP_FILENAME]
if popen_list:
popen_list += SYS_ARGV
logger.log(u'Restarting nzbToMedia with {args}'.format(args=popen_list))
logger.close()
p = subprocess.Popen(popen_list, cwd=os.getcwd())
p.wait()
status = p.returncode
os._exit(status)
def rchmod(path, mod):
logger.log('Changing file mode of {0} to {1}'.format(path, oct(mod)))
os.chmod(path, mod)
if not os.path.isdir(path):
return # Skip files
for root, dirs, files in os.walk(path):
for d in dirs:
os.chmod(os.path.join(root, d), mod)
for f in files:
os.chmod(os.path.join(root, f), mod)

View file

@ -1,15 +1,9 @@
# coding=utf-8 # coding=utf-8
from __future__ import print_function, unicode_literals
import os
import requests import requests
from six import text_type
import core
from core import logger
from core.utils import shutil_custom from core.utils import shutil_custom
from core.utils.common import clean_dir, flatten, get_dirs, process_dir
from core.utils.download_info import get_download_info, update_download_info_status from core.utils.download_info import get_download_info, update_download_info_status
from core.utils.encoding import char_replace, convert_to_ascii from core.utils.encoding import char_replace, convert_to_ascii
from core.utils.files import ( from core.utils.files import (
@ -24,7 +18,7 @@ from core.utils.files import (
from core.utils.identification import category_search, find_imdbid from core.utils.identification import category_search, find_imdbid
from core.utils.links import copy_link, replace_links from core.utils.links import copy_link, replace_links
from core.utils.naming import clean_file_name, is_sample, sanitize_name from core.utils.naming import clean_file_name, is_sample, sanitize_name
from core.utils.network import find_download, test_connection, wake_on_lan, wake_up, server_responding from core.utils.network import find_download, server_responding, test_connection, wake_on_lan, wake_up
from core.utils.notifications import plex_update from core.utils.notifications import plex_update
from core.utils.nzbs import get_nzoid, report_nzb from core.utils.nzbs import get_nzoid, report_nzb
from core.utils.parsers import ( from core.utils.parsers import (
@ -43,126 +37,15 @@ from core.utils.paths import (
get_dir_size, get_dir_size,
make_dir, make_dir,
onerror, onerror,
rchmod,
remote_dir, remote_dir,
remove_dir, remove_dir,
remove_empty_folders, remove_empty_folders,
remove_read_only, remove_read_only,
) )
from core.utils.processes import RunningProcess from core.utils.processes import RunningProcess, restart
from core.utils.subtitles import import_subs from core.utils.subtitles import import_subs
from core.utils.torrents import create_torrent_class, pause_torrent, remove_torrent, resume_torrent from core.utils.torrents import create_torrent_class, pause_torrent, remove_torrent, resume_torrent
try:
import jaraco
except ImportError:
if os.name == 'nt':
raise
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
shutil_custom.monkey_patch() shutil_custom.monkey_patch()
def flatten(output_destination):
return flatten_dir(output_destination, list_media_files(output_destination))
def clean_dir(path, section, subsection):
cfg = dict(core.CFG[section][subsection])
min_size = int(cfg.get('minSize', 0))
delete_ignored = int(cfg.get('delete_ignored', 0))
try:
files = list_media_files(path, min_size=min_size, delete_ignored=delete_ignored)
except Exception:
files = []
return clean_directory(path, files)
def process_dir(path, link):
folders = []
logger.info('Searching {0} for mediafiles to post-process ...'.format(path))
dir_contents = os.listdir(text_type(path))
# search for single files and move them into their own folder for post-processing
# Generate list of sync files
sync_files = (
item for item in dir_contents
if os.path.splitext(item)[1] in ['.!sync', '.bts']
)
# Generate a list of file paths
filepaths = (
os.path.join(path, item) for item in dir_contents
if item not in ['Thumbs.db', 'thumbs.db']
)
# Generate a list of media files
mediafiles = (
item for item in filepaths
if os.path.isfile(item)
)
if any(sync_files):
logger.info('')
else:
for mediafile in mediafiles:
try:
move_file(mediafile, path, link)
except Exception as e:
logger.error('Failed to move {0} to its own directory: {1}'.format(os.path.split(mediafile)[1], e))
# removeEmptyFolders(path, removeRoot=False)
# Generate all path contents
path_contents = (
os.path.join(path, item)
for item in os.listdir(text_type(path))
)
# Generate all directories from path contents
directories = (
path for path in path_contents
if os.path.isdir(path)
)
for directory in directories:
dir_contents = os.listdir(directory)
sync_files = (
item for item in dir_contents
if os.path.splitext(item)[1] in ['.!sync', '.bts']
)
if not any(dir_contents) or any(sync_files):
continue
folders.append(directory)
return folders
def get_dirs(section, subsection, link='hard'):
to_return = []
watch_directory = core.CFG[section][subsection]['watch_dir']
directory = os.path.join(watch_directory, subsection)
if not os.path.exists(directory):
directory = watch_directory
try:
to_return.extend(process_dir(directory, link))
except Exception as e:
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(watch_directory, e))
if core.USELINK == 'move':
try:
output_directory = os.path.join(core.OUTPUTDIRECTORY, subsection)
if os.path.exists(output_directory):
to_return.extend(process_dir(output_directory, link))
except Exception as e:
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(core.OUTPUTDIRECTORY, e))
if not to_return:
logger.debug('No directories identified in {0}:{1} for post-processing'.format(section, subsection))
return list(set(to_return))

114
core/utils/common.py Normal file
View file

@ -0,0 +1,114 @@
import os.path
from six import text_type
import core
from core import logger
from core.utils.files import list_media_files, move_file
from core.utils.paths import clean_directory, flatten_dir
def flatten(output_destination):
return flatten_dir(output_destination, list_media_files(output_destination))
def clean_dir(path, section, subsection):
cfg = dict(core.CFG[section][subsection])
min_size = int(cfg.get('minSize', 0))
delete_ignored = int(cfg.get('delete_ignored', 0))
try:
files = list_media_files(path, min_size=min_size, delete_ignored=delete_ignored)
except Exception:
files = []
return clean_directory(path, files)
def process_dir(path, link):
folders = []
logger.info('Searching {0} for mediafiles to post-process ...'.format(path))
dir_contents = os.listdir(text_type(path))
# search for single files and move them into their own folder for post-processing
# Generate list of sync files
sync_files = (
item for item in dir_contents
if os.path.splitext(item)[1] in ['.!sync', '.bts']
)
# Generate a list of file paths
filepaths = (
os.path.join(path, item) for item in dir_contents
if item not in ['Thumbs.db', 'thumbs.db']
)
# Generate a list of media files
mediafiles = (
item for item in filepaths
if os.path.isfile(item)
)
if any(sync_files):
logger.info('')
else:
for mediafile in mediafiles:
try:
move_file(mediafile, path, link)
except Exception as e:
logger.error('Failed to move {0} to its own directory: {1}'.format(os.path.split(mediafile)[1], e))
# removeEmptyFolders(path, removeRoot=False)
# Generate all path contents
path_contents = (
os.path.join(path, item)
for item in os.listdir(text_type(path))
)
# Generate all directories from path contents
directories = (
path for path in path_contents
if os.path.isdir(path)
)
for directory in directories:
dir_contents = os.listdir(directory)
sync_files = (
item for item in dir_contents
if os.path.splitext(item)[1] in ['.!sync', '.bts']
)
if not any(dir_contents) or any(sync_files):
continue
folders.append(directory)
return folders
def get_dirs(section, subsection, link='hard'):
to_return = []
watch_directory = core.CFG[section][subsection]['watch_dir']
directory = os.path.join(watch_directory, subsection)
if not os.path.exists(directory):
directory = watch_directory
try:
to_return.extend(process_dir(directory, link))
except Exception as e:
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(watch_directory, e))
if core.USELINK == 'move':
try:
output_directory = os.path.join(core.OUTPUTDIRECTORY, subsection)
if os.path.exists(output_directory):
to_return.extend(process_dir(output_directory, link))
except Exception as e:
logger.error('Failed to add directories from {0} for post-processing: {1}'.format(core.OUTPUTDIRECTORY, e))
if not to_return:
logger.debug('No directories identified in {0}:{1} for post-processing'.format(section, subsection))
return list(set(to_return))

View file

@ -149,3 +149,16 @@ def clean_directory(path, files):
shutil.rmtree(path, onerror=onerror) shutil.rmtree(path, onerror=onerror)
except Exception: except Exception:
logger.error('Unable to delete directory {0}'.format(path)) logger.error('Unable to delete directory {0}'.format(path))
def rchmod(path, mod):
logger.log('Changing file mode of {0} to {1}'.format(path, oct(mod)))
os.chmod(path, mod)
if not os.path.isdir(path):
return # Skip files
for root, dirs, files in os.walk(path):
for d in dirs:
os.chmod(os.path.join(root, d), mod)
for f in files:
os.chmod(os.path.join(root, f), mod)

View file

@ -1,7 +1,10 @@
import os import os
import socket import socket
import subprocess
import sys
import core import core
from core import logger, version_check, APP_FILENAME, SYS_ARGV
if os.name == 'nt': if os.name == 'nt':
from win32event import CreateMutex from win32event import CreateMutex
@ -90,3 +93,23 @@ if os.name == 'nt':
RunningProcess = WindowsProcess RunningProcess = WindowsProcess
else: else:
RunningProcess = PosixProcess RunningProcess = PosixProcess
def restart():
install_type = version_check.CheckVersion().install_type
status = 0
popen_list = []
if install_type in ('git', 'source'):
popen_list = [sys.executable, APP_FILENAME]
if popen_list:
popen_list += SYS_ARGV
logger.log(u'Restarting nzbToMedia with {args}'.format(args=popen_list))
logger.close()
p = subprocess.Popen(popen_list, cwd=os.getcwd())
p.wait()
status = p.returncode
os._exit(status)

View file

@ -116,7 +116,7 @@ class GitUpdateManager(UpdateManager):
test_cmd = 'version' test_cmd = 'version'
if core.GIT_PATH: if core.GIT_PATH:
main_git = '\'{git}\''.format(git=core.GIT_PATH) main_git = '"{git}"'.format(git=core.GIT_PATH)
else: else:
main_git = 'git' main_git = 'git'

View file

@ -18,7 +18,7 @@ def read(*names, **kwargs):
setup( setup(
name='nzbToMedia', name='nzbToMedia',
version='12.0.6', version='12.0.7',
license='GPLv3', license='GPLv3',
description='Efficient on demand post processing', description='Efficient on demand post processing',
long_description=""" long_description="""