Merge pull request #1594 from clinton-hall/quality/flake8

Quality/flake8
This commit is contained in:
Labrys of Knossos 2019-04-05 17:52:56 -04:00 committed by GitHub
commit 627b453d3b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 89 additions and 73 deletions

View file

@ -108,7 +108,7 @@ FORKS = {
FORK_SICKCHILL: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None}, FORK_SICKCHILL: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None},
FORK_SICKBEARD_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None}, FORK_SICKBEARD_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None},
FORK_MEDUSA: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None}, FORK_MEDUSA: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None},
FORK_MEDUSA_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete_files': None, 'is_priority': None}, FORK_MEDUSA_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete_files': None, 'is_priority': None},
FORK_SICKGEAR: {'dir': None, 'failed': None, 'process_method': None, 'force': None}, FORK_SICKGEAR: {'dir': None, 'failed': None, 'process_method': None, 'force': None},
FORK_STHENO: {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "ignore_subs": None} FORK_STHENO: {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "ignore_subs": None}
} }
@ -870,7 +870,7 @@ def configure_utility_locations():
else: else:
if SYS_PATH: if SYS_PATH:
os.environ['PATH'] += ':'+SYS_PATH os.environ['PATH'] += ':' + SYS_PATH
try: try:
SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip().decode() SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip().decode()
except Exception: except Exception:

View file

@ -1,8 +1,5 @@
# coding=utf-8 # coding=utf-8
import os
import shutil
import requests import requests
import core import core
@ -52,7 +49,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
) )
logger.postprocess('{0}'.format(r.text), section) logger.postprocess('{0}'.format(r.text), section)
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error('Server returned status {0}'.format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult( return ProcessResult(

View file

@ -256,7 +256,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
return ProcessResult( return ProcessResult(
message='{0}: Sending failed download back to {0}'.format(section), message='{0}: Sending failed download back to {0}'.format(section),
status_code=1, # Return as failed to flag this in the downloader. status_code=1, # Return as failed to flag this in the downloader.
) # Return failed flag, but log the event as successful. ) # Return failed flag, but log the event as successful.
if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name: if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section) logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
@ -373,22 +373,22 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
except Exception: except Exception:
pass pass
elif scan_id: elif scan_id:
url = '{0}/{1}'.format(base_url, scan_id) url = '{0}/{1}'.format(base_url, scan_id)
command_status = command_complete(url, params, headers, section) command_status = command_complete(url, params, headers, section)
if command_status: if command_status:
logger.debug('The Scan command return status: {0}'.format(command_status), section) logger.debug('The Scan command return status: {0}'.format(command_status), section)
if command_status in ['completed']: if command_status in ['completed']:
logger.debug('The Scan command has completed successfully. Renaming was successful.', section) logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
return ProcessResult( return ProcessResult(
message='{0}: Successfully post-processed {1}'.format(section, input_name), message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0, status_code=0,
) )
elif command_status in ['failed']: elif command_status in ['failed']:
logger.debug('The Scan command has failed. Renaming was not successful.', section) logger.debug('The Scan command has failed. Renaming was not successful.', section)
# return ProcessResult( # return ProcessResult(
# message='{0}: Failed to post-process {1}'.format(section, input_name), # message='{0}: Failed to post-process {1}'.format(section, input_name),
# status_code=1, # status_code=1,
# ) # )
if not os.path.isdir(dir_name): if not os.path.isdir(dir_name):
logger.postprocess('SUCCESS: Input Directory [{0}] has been processed and removed'.format( logger.postprocess('SUCCESS: Input Directory [{0}] has been processed and removed'.format(

View file

@ -42,7 +42,8 @@ def auto_fork(section, input_category):
logger.info('Attempting to verify {category} fork'.format logger.info('Attempting to verify {category} fork'.format
(category=input_category)) (category=input_category))
url = '{protocol}{host}:{port}{root}/api/rootfolder'.format( url = '{protocol}{host}:{port}{root}/api/rootfolder'.format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root
)
headers = {'X-Api-Key': apikey} headers = {'X-Api-Key': apikey}
try: try:
r = requests.get(url, headers=headers, stream=True, verify=False) r = requests.get(url, headers=headers, stream=True, verify=False)
@ -65,10 +66,12 @@ def auto_fork(section, input_category):
if apikey: if apikey:
url = '{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess'.format( url = '{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess'.format(
protocol=protocol, host=host, port=port, root=web_root, apikey=apikey) protocol=protocol, host=host, port=port, root=web_root, apikey=apikey
)
else: else:
url = '{protocol}{host}:{port}{root}/home/postprocess/'.format( url = '{protocol}{host}:{port}{root}/home/postprocess/'.format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root
)
# attempting to auto-detect fork # attempting to auto-detect fork
try: try:

View file

@ -132,7 +132,7 @@ class NTMRotatingLogHandler(object):
""" """
Returns a numbered log file name depending on i. If i==0 it just uses logName, if not it appends Returns a numbered log file name depending on i. If i==0 it just uses logName, if not it appends
it to the extension (blah.log.3 for i == 3) it to the extension (blah.log.3 for i == 3)
i: Log number to ues i: Log number to ues
""" """

View file

@ -171,7 +171,7 @@ def par2(dirname):
cmd = '' cmd = ''
for item in command: for item in command:
cmd = '{cmd} {item}'.format(cmd=cmd, item=item) cmd = '{cmd} {item}'.format(cmd=cmd, item=item)
logger.debug('calling command:{0}'.format(cmd), 'PAR2') logger.debug('calling command:{0}'.format(cmd), 'PAR2')
try: try:
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket)
proc.communicate() proc.communicate()

View file

@ -828,7 +828,7 @@ def transcode_directory(dir_name):
if not os.listdir(text_type(new_dir)): # this is an empty directory and we didn't transcode into it. if not os.listdir(text_type(new_dir)): # this is an empty directory and we didn't transcode into it.
os.rmdir(new_dir) os.rmdir(new_dir)
new_dir = dir_name new_dir = dir_name
if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB
new_dir = dir_name new_dir = dir_name
bitbucket.close() bitbucket.close()
return final_result, new_dir return final_result, new_dir

View file

@ -5,7 +5,6 @@ import guessit
import requests import requests
from six import text_type from six import text_type
import core
from core import logger from core import logger
from core.utils.naming import sanitize_name from core.utils.naming import sanitize_name
@ -90,13 +89,13 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
def category_search(input_directory, input_name, input_category, root, categories): def category_search(input_directory, input_name, input_category, root, categories):
tordir = False tordir = False
#try: # try:
# input_name = input_name.encode(core.SYS_ENCODING) # input_name = input_name.encode(core.SYS_ENCODING)
#except Exception: # except Exception:
# pass # pass
#try: # try:
# input_directory = input_directory.encode(core.SYS_ENCODING) # input_directory = input_directory.encode(core.SYS_ENCODING)
#except Exception: # except Exception:
# pass # pass
if input_directory is None: # =Nothing to process here. if input_directory is None: # =Nothing to process here.

View file

@ -1,5 +1,4 @@
import re import re
import core
def sanitize_name(name): def sanitize_name(name):
@ -20,9 +19,9 @@ def sanitize_name(name):
# remove leading/trailing periods and spaces # remove leading/trailing periods and spaces
name = name.strip(' .') name = name.strip(' .')
#try: # try:
# name = name.encode(core.SYS_ENCODING) # name = name.encode(core.SYS_ENCODING)
#except Exception: # except Exception:
# pass # pass
return name return name

View file

@ -199,8 +199,8 @@ class GitUpdateManager(UpdateManager):
logger.log(u'{cmd} : returned successful'.format(cmd=cmd), logger.DEBUG) logger.log(u'{cmd} : returned successful'.format(cmd=cmd), logger.DEBUG)
exit_status = 0 exit_status = 0
elif core.LOG_GIT and exit_status in (1, 128): elif core.LOG_GIT and exit_status in (1, 128):
logger.log(u'{cmd} returned : {output}'.format logger.log(u'{cmd} returned : {output}'.format
(cmd=cmd, output=output), logger.DEBUG) (cmd=cmd, output=output), logger.DEBUG)
else: else:
if core.LOG_GIT: if core.LOG_GIT:
logger.log(u'{cmd} returned : {output}, treat as error for now'.format logger.log(u'{cmd} returned : {output}, treat as error for now'.format

View file

@ -21,21 +21,21 @@ setup(
version='12.0.10', version='12.0.10',
license='GPLv3', license='GPLv3',
description='Efficient on demand post processing', description='Efficient on demand post processing',
long_description=""" long_description="""
nzbToMedia nzbToMedia
========== ==========
Efficient on demand post processing Efficient on demand post processing
----------------------------------- -----------------------------------
A PVR app needs to know when a download is ready for post-processing. There are two methods: A PVR app needs to know when a download is ready for post-processing. There are two methods:
1. On-demand post-processing script (e.g. sabToSickBeard.py or nzbToMedia.py): A script in the downloader runs once at the end of the download job and notifies the PVR app that the download is complete. 1. On-demand post-processing script (e.g. sabToSickBeard.py or nzbToMedia.py): A script in the downloader runs once at the end of the download job and notifies the PVR app that the download is complete.
2. Continuous folder scanning: The PVR app frequently polls download folder(s) for completed downloads. 2. Continuous folder scanning: The PVR app frequently polls download folder(s) for completed downloads.
On-demand is superior, for several reasons: On-demand is superior, for several reasons:
1. The PVR app is notified only once, exactly when the download is ready for post-processing 1. The PVR app is notified only once, exactly when the download is ready for post-processing
2. The PVR app does not have to wait for the next poll interval before it starts processing 2. The PVR app does not have to wait for the next poll interval before it starts processing
3. Continuous polling is not as efficient and is more stressful on low performance hardware 3. Continuous polling is not as efficient and is more stressful on low performance hardware
@ -46,7 +46,7 @@ setup(
8. On-demand scripts can be tweaked to allow for delays with slow hardware 8. On-demand scripts can be tweaked to allow for delays with slow hardware
nzbToMedia is an on-demand post-processing script and was created out of a demand for more efficient post-processing on low-performance hardware. Many features have been added so higher performance hardware can benefit too. nzbToMedia is an on-demand post-processing script and was created out of a demand for more efficient post-processing on low-performance hardware. Many features have been added so higher performance hardware can benefit too.
Many issues that users have with folder scanning can be fixed by switching to on-demand. A whole class of support issues can be eliminated by using nzbToMedia. Many issues that users have with folder scanning can be fixed by switching to on-demand. A whole class of support issues can be eliminated by using nzbToMedia.
""", """,
author='Clinton Hall', author='Clinton Hall',

View file

@ -1,48 +1,47 @@
#! /usr/bin/env python #! /usr/bin/env python
from __future__ import print_function from __future__ import print_function
import datetime
import os import core
import sys
def test_eol(): def test_eol():
import eol import eol
eol.check() eol.check()
def test_cleanup(): def test_cleanup():
import cleanup import cleanup
cleanup.clean(cleanup.FOLDER_STRUCTURE) cleanup.clean(cleanup.FOLDER_STRUCTURE)
def test_import_core(): def test_import_core():
import core pass
from core import logger, main_db
def test_import_core_auto_process(): def test_import_core_auto_process():
from core.auto_process import comics, games, movies, music, tv pass
from core.auto_process.common import ProcessResult
def test_import_core_plugins(): def test_import_core_plugins():
from core.plugins.downloaders.nzb.utils import get_nzoid pass
from core.plugins.plex import plex_update
def test_import_core_user_scripts(): def test_import_core_user_scripts():
from core.user_scripts import external_script pass
def test_import_six(): def test_import_six():
from six import text_type pass
def test_import_core_utils(): def test_import_core_utils():
from core.utils import ( pass
char_replace, clean_dir, convert_to_ascii,
extract_files, get_dirs, get_download_info,
update_download_info_status, replace_links,
)
import core
from core import logger, main_db
def test_initial(): def test_initial():
core.initialize() core.initialize()
del core.MYAPP del core.MYAPP
def test_core_parameters(): def test_core_parameters():
assert core.CHECK_MEDIA == 1 assert core.CHECK_MEDIA == 1

View file

@ -1,14 +1,9 @@
#! /usr/bin/env python #! /usr/bin/env python
from __future__ import print_function from __future__ import print_function
import datetime
import os
import sys
import json
import time
import requests
import core import core
from core import logger, transcoder from core import transcoder
def test_transcoder_check(): def test_transcoder_check():
assert transcoder.is_video_good(core.TEST_FILE, 0) == True assert transcoder.is_video_good(core.TEST_FILE, 0) is True

24
tox.ini
View file

@ -29,6 +29,30 @@ deps =
commands = commands =
{posargs:pytest --cov --cov-report=term-missing tests} {posargs:pytest --cov --cov-report=term-missing tests}
[flake8]
max-line-length = 79
verbose = 2
statistics = True
ignore =
; -- flake8 --
; E501 line too long
E501
per-file-ignores =
; F401 imported but unused
; E402 module level import not at top of file
core/__init__.py: E402, F401
core/utils/__init__.py: F401
core/plugins/downloaders/configuration.py: F401
core/plugins/downloaders/utils.py: F401
[testenv:check]
deps =
flake8
skip_install = true
commands =
flake8 core tests setup.py
[coverage:run] [coverage:run]
omit = omit =
libs/* libs/*