Merge v12.1.00

This commit is contained in:
clinton-hall 2019-08-06 13:19:19 +12:00
commit e165bbcefc
74 changed files with 1615 additions and 443 deletions

View file

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 12.0.10 current_version = 12.1.00
commit = True commit = True
tag = False tag = False

View file

@ -1,24 +1,34 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding=utf-8 # coding=utf-8
import eol from __future__ import (
eol.check() absolute_import,
division,
import cleanup print_function,
cleanup.clean(cleanup.FOLDER_STRUCTURE) unicode_literals,
)
import datetime import datetime
import os import os
import sys import sys
import eol
import cleanup
eol.check()
cleanup.clean(cleanup.FOLDER_STRUCTURE)
import core import core
from core import logger, main_db from core import logger, main_db
from core.auto_process import comics, games, movies, music, tv from core.auto_process import comics, games, movies, music, tv, books
from core.auto_process.common import ProcessResult from core.auto_process.common import ProcessResult
from core.plugins.plex import plex_update from core.plugins.plex import plex_update
from core.user_scripts import external_script from core.user_scripts import external_script
from core.utils import char_replace, convert_to_ascii, replace_links from core.utils import char_replace, convert_to_ascii, replace_links
from six import text_type
try:
text_type = unicode
except NameError:
text_type = str
def process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent): def process_torrent(input_directory, input_name, input_category, input_hash, input_id, client_agent):
@ -60,30 +70,25 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
input_category = 'UNCAT' input_category = 'UNCAT'
usercat = input_category usercat = input_category
try:
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
try:
input_directory = input_directory.encode(core.SYS_ENCODING)
except UnicodeError:
pass
logger.debug('Determined Directory: {0} | Name: {1} | Category: {2}'.format logger.debug('Determined Directory: {0} | Name: {1} | Category: {2}'.format
(input_directory, input_name, input_category)) (input_directory, input_name, input_category))
# auto-detect section # auto-detect section
section = core.CFG.findsection(input_category).isenabled() section = core.CFG.findsection(input_category).isenabled()
if section is None: if section is None: #Check for user_scripts for 'ALL' and 'UNCAT'
if usercat in core.CATEGORIES:
section = core.CFG.findsection('ALL').isenabled() section = core.CFG.findsection('ALL').isenabled()
if section is None: usercat = 'ALL'
else:
section = core.CFG.findsection('UNCAT').isenabled()
usercat = 'UNCAT'
if section is None: # We haven't found any categories to process.
logger.error('Category:[{0}] is not defined or is not enabled. ' logger.error('Category:[{0}] is not defined or is not enabled. '
'Please rename it or ensure it is enabled for the appropriate section ' 'Please rename it or ensure it is enabled for the appropriate section '
'in your autoProcessMedia.cfg and try again.'.format 'in your autoProcessMedia.cfg and try again.'.format
(input_category)) (input_category))
return [-1, ''] return [-1, '']
else:
usercat = 'ALL'
if len(section) > 1: if len(section) > 1:
logger.error('Category:[{0}] is not unique, {1} are using it. ' logger.error('Category:[{0}] is not unique, {1} are using it. '
@ -106,7 +111,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
torrent_no_link = int(section.get('Torrent_NoLink', 0)) torrent_no_link = int(section.get('Torrent_NoLink', 0))
keep_archive = int(section.get('keep_archive', 0)) keep_archive = int(section.get('keep_archive', 0))
extract = int(section.get('extract', 0)) extract = int(section.get('extract', 0))
extensions = section.get('user_script_mediaExtensions', '').lower().split(',') extensions = section.get('user_script_mediaExtensions', '')
unique_path = int(section.get('unique_path', 1)) unique_path = int(section.get('unique_path', 1))
if client_agent != 'manual': if client_agent != 'manual':
@ -125,10 +130,6 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
else: else:
output_destination = os.path.normpath( output_destination = os.path.normpath(
core.os.path.join(core.OUTPUT_DIRECTORY, input_category)) core.os.path.join(core.OUTPUT_DIRECTORY, input_category))
try:
output_destination = output_destination.encode(core.SYS_ENCODING)
except UnicodeError:
pass
if output_destination in input_directory: if output_destination in input_directory:
output_destination = input_directory output_destination = input_directory
@ -170,10 +171,6 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name) core.os.path.join(output_destination, os.path.basename(file_path)), full_file_name)
logger.debug('Setting outputDestination to {0} to preserve folder structure'.format logger.debug('Setting outputDestination to {0} to preserve folder structure'.format
(os.path.dirname(target_file))) (os.path.dirname(target_file)))
try:
target_file = target_file.encode(core.SYS_ENCODING)
except UnicodeError:
pass
if root == 1: if root == 1:
if not found_file: if not found_file:
logger.debug('Looking for {0} in: {1}'.format(input_name, inputFile)) logger.debug('Looking for {0} in: {1}'.format(input_name, inputFile))
@ -256,6 +253,8 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
result = comics.process(section_name, output_destination, input_name, status, client_agent, input_category) result = comics.process(section_name, output_destination, input_name, status, client_agent, input_category)
elif section_name == 'Gamez': elif section_name == 'Gamez':
result = games.process(section_name, output_destination, input_name, status, client_agent, input_category) result = games.process(section_name, output_destination, input_name, status, client_agent, input_category)
elif section_name == 'LazyLibrarian':
result = books.process(section_name, output_destination, input_name, status, client_agent, input_category)
plex_update(input_category) plex_update(input_category)
@ -276,13 +275,13 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
# remove torrent # remove torrent
if core.USE_LINK == 'move-sym' and not core.DELETE_ORIGINAL == 1: if core.USE_LINK == 'move-sym' and not core.DELETE_ORIGINAL == 1:
logger.debug('Checking for sym-links to re-direct in: {0}'.format(input_directory)) logger.debug('Checking for sym-links to re-direct in: {0}'.format(input_directory))
for dirpath, dirs, files in os.walk(input_directory): for dirpath, _, files in os.walk(input_directory):
for file in files: for file in files:
logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file))) logger.debug('Checking symlink: {0}'.format(os.path.join(dirpath, file)))
replace_links(os.path.join(dirpath, file)) replace_links(os.path.join(dirpath, file))
core.remove_torrent(client_agent, input_hash, input_id, input_name) core.remove_torrent(client_agent, input_hash, input_id, input_name)
if not section_name == 'UserScript': if section_name != 'UserScript':
# for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN # for user script, we assume this is cleaned by the script or option USER_SCRIPT_CLEAN
# cleanup our processing folders of any misc unwanted files and empty directories # cleanup our processing folders of any misc unwanted files and empty directories
core.clean_dir(output_destination, section_name, input_category) core.clean_dir(output_destination, section_name, input_category)
@ -350,15 +349,7 @@ def main(args):
if client_agent.lower() not in core.TORRENT_CLIENTS: if client_agent.lower() not in core.TORRENT_CLIENTS:
continue continue
try:
dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
input_name = os.path.basename(dir_name) input_name = os.path.basename(dir_name)
try:
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None, results = process_torrent(dir_name, input_name, subsection, input_hash or None, input_id or None,
client_agent) client_agent)

View file

@ -36,7 +36,9 @@
[Posix] [Posix]
### Process priority setting for External commands (Extractor and Transcoder) on Posix (Unix/Linux/OSX) systems. ### Process priority setting for External commands (Extractor and Transcoder) on Posix (Unix/Linux/OSX) systems.
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
niceness = 0 # If entering an integer e.g 'niceness = 4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness = nice,4' this will be passed as 'nice 4' (Safer).
niceness = nice,-n0
# Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle. # Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle.
ionice_class = 0 ionice_class = 0
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data. # Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
@ -282,6 +284,31 @@
##### Set to path where download client places completed downloads locally for this category ##### Set to path where download client places completed downloads locally for this category
watch_dir = watch_dir =
[LazyLibrarian]
#### autoProcessing for LazyLibrarian
#### books - category that gets called for post-processing with LazyLibrarian
[[books]]
enabled = 0
apikey =
host = localhost
port = 5299
###### ADVANCED USE - ONLY EDIT IF YOU KNOW WHAT YOU'RE DOING ######
ssl = 0
web_root =
# Enable/Disable linking for Torrents
Torrent_NoLink = 0
keep_archive = 1
extract = 1
# Set this to minimum required size to consider a media file valid (in MB)
minSize = 0
# Enable/Disable deleting ignored files (samples and invalid media files)
delete_ignored = 0
##### Enable if LazyLibrarian is on a remote server for this category
remote_path = 0
##### Set to path where download client places completed downloads locally for this category
watch_dir =
[Network] [Network]
# Enter Mount points as LocalPath,RemotePath and separate each pair with '|' # Enter Mount points as LocalPath,RemotePath and separate each pair with '|'
# e.g. MountPoints = /volume1/Public/,E:\|/volume2/share/,\\NAS\ # e.g. MountPoints = /volume1/Public/,E:\|/volume2/share/,\\NAS\
@ -389,11 +416,13 @@
externalSubDir = externalSubDir =
# hwAccel. 1 will set ffmpeg to enable hardware acceleration (this requires a recent ffmpeg) # hwAccel. 1 will set ffmpeg to enable hardware acceleration (this requires a recent ffmpeg)
hwAccel = 0 hwAccel = 0
# generalOptions. Enter your additional ffmpeg options here with commas to separate each option/value (i.e replace spaces with commas). # generalOptions. Enter your additional ffmpeg options (these insert before the '-i' input files) here with commas to separate each option/value (i.e replace spaces with commas).
generalOptions = generalOptions =
# otherOptions. Enter your additional ffmpeg options (these insert after the '-i' input files and before the output file) here with commas to separate each option/value (i.e replace spaces with commas).
otherOptions =
# outputDefault. Loads default configs for the selected device. The remaining options below are ignored. # outputDefault. Loads default configs for the selected device. The remaining options below are ignored.
# If you want to use your own profile, leave this blank and set the remaining options below. # If you want to use your own profile, leave this blank and set the remaining options below.
# outputDefault profiles allowed: iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mp4-scene-release # outputDefault profiles allowed: iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mkv-bluray, mp4-scene-release
outputDefault = outputDefault =
#### Define custom settings below. #### Define custom settings below.
outputVideoExtension = .mp4 outputVideoExtension = .mp4

View file

@ -24,6 +24,42 @@ jobs:
maxParallel: 4 maxParallel: 4
steps: steps:
#- script: |
# Make sure all packages are pulled from latest
#sudo apt-get update
# Fail out if any setups fail
#set -e
# Delete old Pythons
#rm -rf $AGENT_TOOLSDIRECTORY/Python/2.7.16
#rm -rf $AGENT_TOOLSDIRECTORY/Python/3.5.7
#rm -rf $AGENT_TOOLSDIRECTORY/Python/3.7.3
# Download new Pythons
#azcopy --recursive \
#--source https://vstsagenttools.blob.core.windows.net/tools/hostedtoolcache/linux/Python/2.7.15 \
#--destination $AGENT_TOOLSDIRECTORY/Python/2.7.15
#azcopy --recursive \
#--source https://vstsagenttools.blob.core.windows.net/tools/hostedtoolcache/linux/Python/3.5.5 \
#--destination $AGENT_TOOLSDIRECTORY/Python/3.5.5
#azcopy --recursive \
#--source https://vstsagenttools.blob.core.windows.net/tools/hostedtoolcache/linux/Python/3.7.2 \
#--destination $AGENT_TOOLSDIRECTORY/Python/3.7.2
# Install new Pythons
#original_directory=$PWD
#setups=$(find $AGENT_TOOLSDIRECTORY/Python -name setup.sh)
#for setup in $setups; do
#chmod +x $setup;
#cd $(dirname $setup);
#./$(basename $setup);
#cd $original_directory;
#done;
#displayName: 'Workaround: update apt and roll back Python versions'
- task: UsePythonVersion@0 - task: UsePythonVersion@0
inputs: inputs:
versionSpec: '$(python.version)' versionSpec: '$(python.version)'
@ -32,11 +68,21 @@ jobs:
- script: python -m pip install --upgrade pip - script: python -m pip install --upgrade pip
displayName: 'Install dependencies' displayName: 'Install dependencies'
- script: sudo apt-get install ffmpeg
displayName: 'Install ffmpeg'
- script: | - script: |
pip install pytest pip install pytest
pytest tests --doctest-modules --junitxml=junit/test-results.xml pytest tests --doctest-modules --junitxml=junit/test-results.xml
displayName: 'pytest' displayName: 'pytest'
- script: |
rm -rf .git
python cleanup.py
python TorrentToMedia.py
python nzbToMedia.py
displayName: 'Test source install cleanup'
- task: PublishTestResults@2 - task: PublishTestResults@2
inputs: inputs:
testResultsFiles: '**/test-results.xml' testResultsFiles: '**/test-results.xml'

View file

@ -1,6 +1,11 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import subprocess import subprocess
@ -25,6 +30,7 @@ FOLDER_STRUCTURE = {
class WorkingDirectory(object): class WorkingDirectory(object):
"""Context manager for changing current working directory.""" """Context manager for changing current working directory."""
def __init__(self, new, original=None): def __init__(self, new, original=None):
self.working_directory = new self.working_directory = new
self.original_directory = os.getcwd() if original is None else original self.original_directory = os.getcwd() if original is None else original
@ -43,7 +49,7 @@ class WorkingDirectory(object):
original_directory=self.original_directory, original_directory=self.original_directory,
error=error, error=error,
working_directory=self.working_directory, working_directory=self.working_directory,
) ),
) )

View file

@ -1,6 +1,11 @@
# coding=utf-8 # coding=utf-8
from __future__ import print_function from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import itertools import itertools
import locale import locale
@ -78,7 +83,7 @@ from core.utils import (
wake_up, wake_up,
) )
__version__ = '12.0.10' __version__ = '12.1.00'
# Client Agents # Client Agents
NZB_CLIENTS = ['sabnzbd', 'nzbget', 'manual'] NZB_CLIENTS = ['sabnzbd', 'nzbget', 'manual']
@ -94,10 +99,12 @@ FORK_FAILED = 'failed'
FORK_FAILED_TORRENT = 'failed-torrent' FORK_FAILED_TORRENT = 'failed-torrent'
FORK_SICKRAGE = 'SickRage' FORK_SICKRAGE = 'SickRage'
FORK_SICKCHILL = 'SickChill' FORK_SICKCHILL = 'SickChill'
FORK_SICKCHILL_API = 'SickChill-api'
FORK_SICKBEARD_API = 'SickBeard-api' FORK_SICKBEARD_API = 'SickBeard-api'
FORK_MEDUSA = 'Medusa' FORK_MEDUSA = 'Medusa'
FORK_MEDUSA_API = 'Medusa-api' FORK_MEDUSA_API = 'Medusa-api'
FORK_SICKGEAR = 'SickGear' FORK_SICKGEAR = 'SickGear'
FORK_SICKGEAR_API = 'SickGear-api'
FORK_STHENO = 'Stheno' FORK_STHENO = 'Stheno'
FORKS = { FORKS = {
@ -106,11 +113,13 @@ FORKS = {
FORK_FAILED_TORRENT: {'dir': None, 'failed': None, 'process_method': None}, FORK_FAILED_TORRENT: {'dir': None, 'failed': None, 'process_method': None},
FORK_SICKRAGE: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None}, FORK_SICKRAGE: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None},
FORK_SICKCHILL: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None}, FORK_SICKCHILL: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'force_next': None},
FORK_SICKCHILL_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None, 'is_priority': None},
FORK_SICKBEARD_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None}, FORK_SICKBEARD_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete': None, 'force_next': None},
FORK_MEDUSA: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None}, FORK_MEDUSA: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None},
FORK_MEDUSA_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete_files': None, 'is_priority': None}, FORK_MEDUSA_API: {'path': None, 'failed': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'delete_files': None, 'is_priority': None},
FORK_SICKGEAR: {'dir': None, 'failed': None, 'process_method': None, 'force': None}, FORK_SICKGEAR: {'dir': None, 'failed': None, 'process_method': None, 'force': None},
FORK_STHENO: {"proc_dir": None, "failed": None, "process_method": None, "force": None, "delete_on": None, "ignore_subs": None} FORK_SICKGEAR_API: {'path': None, 'process_method': None, 'force_replace': None, 'return_data': None, 'type': None, 'is priority': None},
FORK_STHENO: {'proc_dir': None, 'failed': None, 'process_method': None, 'force': None, 'delete_on': None, 'ignore_subs': None},
} }
ALL_FORKS = {k: None for k in set(list(itertools.chain.from_iterable([FORKS[x].keys() for x in FORKS.keys()])))} ALL_FORKS = {k: None for k in set(list(itertools.chain.from_iterable([FORKS[x].keys() for x in FORKS.keys()])))}
@ -193,7 +202,9 @@ META_CONTAINER = []
SECTIONS = [] SECTIONS = []
CATEGORIES = [] CATEGORIES = []
FORK_SET = []
MOUNTED = None
GETSUBS = False GETSUBS = False
TRANSCODE = None TRANSCODE = None
CONCAT = None CONCAT = None
@ -205,6 +216,7 @@ VEXTENSION = None
OUTPUTVIDEOPATH = None OUTPUTVIDEOPATH = None
PROCESSOUTPUT = False PROCESSOUTPUT = False
GENERALOPTS = [] GENERALOPTS = []
OTHEROPTS = []
ALANGUAGE = None ALANGUAGE = None
AINCLUDE = False AINCLUDE = False
SLANGUAGES = [] SLANGUAGES = []
@ -450,6 +462,9 @@ def configure_niceness():
with open(os.devnull, 'w') as devnull: with open(os.devnull, 'w') as devnull:
try: try:
subprocess.Popen(['nice'], stdout=devnull, stderr=devnull).communicate() subprocess.Popen(['nice'], stdout=devnull, stderr=devnull).communicate()
if len(CFG['Posix']['niceness'].split(',')) > 1: #Allow passing of absolute command, not just value.
NICENESS.extend(CFG['Posix']['niceness'].split(','))
else:
NICENESS.extend(['nice', '-n{0}'.format(int(CFG['Posix']['niceness']))]) NICENESS.extend(['nice', '-n{0}'.format(int(CFG['Posix']['niceness']))])
except Exception: except Exception:
pass pass
@ -499,6 +514,7 @@ def configure_containers():
def configure_transcoder(): def configure_transcoder():
global MOUNTED
global GETSUBS global GETSUBS
global TRANSCODE global TRANSCODE
global DUPLICATE global DUPLICATE
@ -506,6 +522,7 @@ def configure_transcoder():
global IGNOREEXTENSIONS global IGNOREEXTENSIONS
global OUTPUTFASTSTART global OUTPUTFASTSTART
global GENERALOPTS global GENERALOPTS
global OTHEROPTS
global OUTPUTQUALITYPERCENT global OUTPUTQUALITYPERCENT
global OUTPUTVIDEOPATH global OUTPUTVIDEOPATH
global PROCESSOUTPUT global PROCESSOUTPUT
@ -543,6 +560,7 @@ def configure_transcoder():
global ALLOWSUBS global ALLOWSUBS
global DEFAULTS global DEFAULTS
MOUNTED = None
GETSUBS = int(CFG['Transcoder']['getSubs']) GETSUBS = int(CFG['Transcoder']['getSubs'])
TRANSCODE = int(CFG['Transcoder']['transcode']) TRANSCODE = int(CFG['Transcoder']['transcode'])
DUPLICATE = int(CFG['Transcoder']['duplicate']) DUPLICATE = int(CFG['Transcoder']['duplicate'])
@ -560,6 +578,11 @@ def configure_transcoder():
GENERALOPTS.append('-fflags') GENERALOPTS.append('-fflags')
if '+genpts' not in GENERALOPTS: if '+genpts' not in GENERALOPTS:
GENERALOPTS.append('+genpts') GENERALOPTS.append('+genpts')
OTHEROPTS = (CFG['Transcoder']['otherOptions'])
if isinstance(OTHEROPTS, str):
OTHEROPTS = OTHEROPTS.split(',')
if OTHEROPTS == ['']:
OTHEROPTS = []
try: try:
OUTPUTQUALITYPERCENT = int(CFG['Transcoder']['outputQualityPercent']) OUTPUTQUALITYPERCENT = int(CFG['Transcoder']['outputQualityPercent'])
except Exception: except Exception:
@ -653,7 +676,7 @@ def configure_transcoder():
codec_alias = { codec_alias = {
'libx264': ['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'], 'libx264': ['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'],
'libmp3lame': ['libmp3lame', 'mp3'], 'libmp3lame': ['libmp3lame', 'mp3'],
'libfaac': ['libfaac', 'aac', 'faac'] 'libfaac': ['libfaac', 'aac', 'faac'],
} }
transcode_defaults = { transcode_defaults = {
'iPad': { 'iPad': {
@ -662,7 +685,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2,
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'iPad-1080p': { 'iPad-1080p': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -670,7 +693,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2,
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'iPad-720p': { 'iPad-720p': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -678,7 +701,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': None, 'ACHANNELS': 2,
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'Apple-TV': { 'Apple-TV': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -686,7 +709,7 @@ def configure_transcoder():
'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6, 'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6,
'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2, 'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'iPod': { 'iPod': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -694,7 +717,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'iPhone': { 'iPhone': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -702,7 +725,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'PS3': { 'PS3': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -710,7 +733,7 @@ def configure_transcoder():
'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6, 'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6,
'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2, 'ACODEC2': 'aac', 'ACODEC2_ALLOW': ['libfaac'], 'ABITRATE2': None, 'ACHANNELS2': 2,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'xbox': { 'xbox': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -718,7 +741,7 @@ def configure_transcoder():
'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6, 'ACODEC': 'ac3', 'ACODEC_ALLOW': ['ac3'], 'ABITRATE': None, 'ACHANNELS': 6,
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'Roku-480p': { 'Roku-480p': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -726,7 +749,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'Roku-720p': { 'Roku-720p': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -734,7 +757,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'Roku-1080p': { 'Roku-1080p': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -742,7 +765,7 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 160000, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 160000, 'ACHANNELS': 2,
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'mkv': { 'mkv': {
'VEXTENSION': '.mkv', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mkv', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
@ -752,13 +775,21 @@ def configure_transcoder():
'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, 'ACHANNELS3': 8, 'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, 'ACHANNELS3': 8,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text'
}, },
'mkv-bluray': {
'VEXTENSION': '.mkv', 'VCODEC': 'libx265', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': None, 'VLEVEL': None,
'VRESOLUTION': None, 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'hevc', 'h265', 'libx265', 'h.265', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8,
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, 'ACHANNELS3': 8,
'SCODEC': 'mov_text',
},
'mp4-scene-release': { 'mp4-scene-release': {
'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': 19, 'VLEVEL': '3.1', 'VEXTENSION': '.mp4', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': None, 'VCRF': 19, 'VLEVEL': '3.1',
'VRESOLUTION': None, 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'], 'VRESOLUTION': None, 'VCODEC_ALLOW': ['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8, 'ACODEC': 'dts', 'ACODEC_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE': None, 'ACHANNELS': 8,
'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None, 'ACODEC2': None, 'ACODEC2_ALLOW': [], 'ABITRATE2': None, 'ACHANNELS2': None,
'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, 'ACHANNELS3': 8, 'ACODEC3': 'ac3', 'ACODEC3_ALLOW': ['libfaac', 'dts', 'ac3', 'mp2', 'mp3'], 'ABITRATE3': None, 'ACHANNELS3': 8,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
}, },
'MKV-SD': { 'MKV-SD': {
'VEXTENSION': '.mkv', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': '1200k', 'VCRF': None, 'VLEVEL': None, 'VEXTENSION': '.mkv', 'VCODEC': 'libx264', 'VPRESET': None, 'VFRAMERATE': None, 'VBITRATE': '1200k', 'VCRF': None, 'VLEVEL': None,
@ -766,8 +797,8 @@ def configure_transcoder():
'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2, 'ACODEC': 'aac', 'ACODEC_ALLOW': ['libfaac'], 'ABITRATE': 128000, 'ACHANNELS': 2,
'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6, 'ACODEC2': 'ac3', 'ACODEC2_ALLOW': ['ac3'], 'ABITRATE2': None, 'ACHANNELS2': 6,
'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None, 'ACODEC3': None, 'ACODEC3_ALLOW': [], 'ABITRATE3': None, 'ACHANNELS3': None,
'SCODEC': 'mov_text' 'SCODEC': 'mov_text',
} },
} }
if DEFAULTS and DEFAULTS in transcode_defaults: if DEFAULTS and DEFAULTS in transcode_defaults:
VEXTENSION = transcode_defaults[DEFAULTS]['VEXTENSION'] VEXTENSION = transcode_defaults[DEFAULTS]['VEXTENSION']
@ -957,7 +988,7 @@ def check_python():
major=sys.version_info[0], major=sys.version_info[0],
minor=sys.version_info[1], minor=sys.version_info[1],
x=days_left, x=days_left,
) ),
) )
if days_left <= grace_period: if days_left <= grace_period:
logger.warning('Please upgrade to a more recent Python version.') logger.warning('Please upgrade to a more recent Python version.')

View file

@ -0,0 +1,83 @@
# coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import requests
import core
from core import logger
from core.auto_process.common import ProcessResult
from core.utils import (
convert_to_ascii,
remote_dir,
server_responding,
)
requests.packages.urllib3.disable_warnings()
def process(section, dir_name, input_name=None, status=0, client_agent='manual', input_category=None):
status = int(status)
cfg = dict(core.CFG[section][input_category])
host = cfg['host']
port = cfg['port']
apikey = cfg['apikey']
ssl = int(cfg.get('ssl', 0))
web_root = cfg.get('web_root', '')
protocol = 'https://' if ssl else 'http://'
remote_path = int(cfg.get('remote_path', 0))
url = '{0}{1}:{2}{3}/api'.format(protocol, host, port, web_root)
if not server_responding(url):
logger.error('Server did not respond. Exiting', section)
return ProcessResult(
message='{0}: Failed to post-process - {0} did not respond.'.format(section),
status_code=1,
)
input_name, dir_name = convert_to_ascii(input_name, dir_name)
params = {
'apikey': apikey,
'cmd': 'forceProcess',
'dir': remote_dir(dir_name) if remote_path else dir_name,
}
logger.debug('Opening URL: {0} with params: {1}'.format(url, params), section)
try:
r = requests.get(url, params=params, verify=False, timeout=(30, 300))
except requests.ConnectionError:
logger.error('Unable to open URL')
return ProcessResult(
message='{0}: Failed to post-process - Unable to connect to {1}'.format(section, section),
status_code=1,
)
logger.postprocess('{0}'.format(r.text), section)
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error('Server returned status {0}'.format(r.status_code), section)
return ProcessResult(
message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
status_code=1,
)
elif r.text == 'OK':
logger.postprocess('SUCCESS: ForceProcess for {0} has been started in LazyLibrarian'.format(dir_name), section)
return ProcessResult(
message='{0}: Successfully post-processed {1}'.format(section, input_name),
status_code=0,
)
else:
logger.error('FAILED: ForceProcess of {0} has Failed in LazyLibrarian'.format(dir_name), section)
return ProcessResult(
message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section),
status_code=1,
)

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import requests import requests
@ -60,7 +67,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
logger.error('Unable to open URL', section) logger.error('Unable to open URL', section)
return ProcessResult( return ProcessResult(
message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
status_code=1 status_code=1,
) )
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error('Server returned status {0}'.format(r.status_code), section) logger.error('Server returned status {0}'.format(r.status_code), section)

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import requests import requests
from core import logger from core import logger
@ -17,7 +24,7 @@ class ProcessResult(object):
def __str__(self): def __str__(self):
return 'Processing {0}: {1}'.format( return 'Processing {0}: {1}'.format(
'succeeded' if bool(self) else 'failed', 'succeeded' if bool(self) else 'failed',
self.message self.message,
) )
def __repr__(self): def __repr__(self):

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import shutil import shutil
@ -46,7 +53,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
'api_key': apikey, 'api_key': apikey,
'mode': 'UPDATEREQUESTEDSTATUS', 'mode': 'UPDATEREQUESTEDSTATUS',
'db_id': gamez_id, 'db_id': gamez_id,
'status': download_status 'status': download_status,
} }
logger.debug('Opening URL: {0}'.format(url), section) logger.debug('Opening URL: {0}'.format(url), section)

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import json import json
import os import os
import time import time
@ -504,7 +511,7 @@ def get_release(base_url, imdb_id=None, download_id=None, release_id=None):
# Narrow results by removing old releases by comparing their last_edit field # Narrow results by removing old releases by comparing their last_edit field
if len(results) > 1: if len(results) > 1:
for id1, x1 in results.items(): for id1, x1 in results.items():
for id2, x2 in results.items(): for x2 in results.values():
try: try:
if x2['last_edit'] > x1['last_edit']: if x2['last_edit'] > x1['last_edit']:
results.pop(id1) results.pop(id1)

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import json import json
import os import os
import time import time
@ -73,7 +80,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': 'forceProcess', 'cmd': 'forceProcess',
'dir': remote_dir(dir_name) if remote_path else dir_name 'dir': remote_dir(dir_name) if remote_path else dir_name,
} }
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for) res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
@ -83,7 +90,7 @@ def process(section, dir_name, input_name=None, status=0, client_agent='manual',
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': 'forceProcess', 'cmd': 'forceProcess',
'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0] 'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0],
} }
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for) res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
@ -187,7 +194,7 @@ def get_status(url, apikey, dir_name):
params = { params = {
'apikey': apikey, 'apikey': apikey,
'cmd': 'getHistory' 'cmd': 'getHistory',
} }
logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params)) logger.debug('Opening URL: {0} with PARAMS: {1}'.format(url, params))

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import copy import copy
import errno import errno
import json import json
@ -266,7 +273,7 @@ def process(section, dir_name, input_name=None, failed=False, client_agent='manu
if apikey: if apikey:
url = '{0}{1}:{2}{3}/api/{4}/?cmd=postprocess'.format(protocol, host, port, web_root, apikey) url = '{0}{1}:{2}{3}/api/{4}/?cmd=postprocess'.format(protocol, host, port, web_root, apikey)
elif fork == 'Stheno': elif fork == 'Stheno':
url = "{0}{1}:{2}{3}/home/postprocess/process_episode".format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/home/postprocess/process_episode'.format(protocol, host, port, web_root)
else: else:
url = '{0}{1}:{2}{3}/home/postprocess/processEpisode'.format(protocol, host, port, web_root) url = '{0}{1}:{2}{3}/home/postprocess/processEpisode'.format(protocol, host, port, web_root)
elif section == 'NzbDrone': elif section == 'NzbDrone':

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import copy import copy
import os import os
import shutil import shutil
@ -13,17 +20,17 @@ from core import logger
class Section(configobj.Section, object): class Section(configobj.Section, object):
def isenabled(section): def isenabled(self):
# checks if subsection enabled, returns true/false if subsection specified otherwise returns true/false in {} # checks if subsection enabled, returns true/false if subsection specified otherwise returns true/false in {}
if not section.sections: if not self.sections:
try: try:
value = list(ConfigObj.find_key(section, 'enabled'))[0] value = list(ConfigObj.find_key(self, 'enabled'))[0]
except Exception: except Exception:
value = 0 value = 0
if int(value) == 1: if int(value) == 1:
return section return self
else: else:
to_return = copy.deepcopy(section) to_return = copy.deepcopy(self)
for section_name, subsections in to_return.items(): for section_name, subsections in to_return.items():
for subsection in subsections: for subsection in subsections:
try: try:
@ -40,8 +47,8 @@ class Section(configobj.Section, object):
return to_return return to_return
def findsection(section, key): def findsection(self, key):
to_return = copy.deepcopy(section) to_return = copy.deepcopy(self)
for subsection in to_return: for subsection in to_return:
try: try:
value = list(ConfigObj.find_key(to_return[subsection], key))[0] value = list(ConfigObj.find_key(to_return[subsection], key))[0]
@ -136,10 +143,10 @@ class ConfigObj(configobj.ConfigObj, Section):
subsections = {} subsections = {}
# gather all new-style and old-style sub-sections # gather all new-style and old-style sub-sections
for newsection, newitems in CFG_NEW.items(): for newsection in CFG_NEW:
if CFG_NEW[newsection].sections: if CFG_NEW[newsection].sections:
subsections.update({newsection: CFG_NEW[newsection].sections}) subsections.update({newsection: CFG_NEW[newsection].sections})
for section, items in CFG_OLD.items(): for section in CFG_OLD:
if CFG_OLD[section].sections: if CFG_OLD[section].sections:
subsections.update({section: CFG_OLD[section].sections}) subsections.update({section: CFG_OLD[section].sections})
for option, value in CFG_OLD[section].items(): for option, value in CFG_OLD[section].items():
@ -383,6 +390,21 @@ class ConfigObj(configobj.ConfigObj, Section):
cfg_new[section][os.environ[env_cat_key]][option] = value cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1 cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = 'LazyLibrarian'
env_cat_key = 'NZBPO_LLCATEGORY'
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'REMOTE_PATH']
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'remote_path']
if env_cat_key in os.environ:
for index in range(len(env_keys)):
key = 'NZBPO_LL{index}'.format(index=env_keys[index])
if key in os.environ:
option = cfg_keys[index]
value = os.environ[key]
if os.environ[env_cat_key] not in cfg_new[section].sections:
cfg_new[section][os.environ[env_cat_key]] = {}
cfg_new[section][os.environ[env_cat_key]][option] = value
cfg_new[section][os.environ[env_cat_key]]['enabled'] = 1
section = 'NzbDrone' section = 'NzbDrone'
env_cat_key = 'NZBPO_NDCATEGORY' env_cat_key = 'NZBPO_NDCATEGORY'
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from core import logger, main_db from core import logger, main_db
from core.utils import backup_versioned_file from core.utils import backup_versioned_file
@ -33,7 +40,7 @@ class InitialSchema(main_db.SchemaUpgrade):
queries = [ queries = [
'CREATE TABLE db_version (db_version INTEGER);', 'CREATE TABLE db_version (db_version INTEGER);',
'CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));', 'CREATE TABLE downloads (input_directory TEXT, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC, CONSTRAINT pk_downloadID PRIMARY KEY (input_directory, input_name));',
'INSERT INTO db_version (db_version) VALUES (2);' 'INSERT INTO db_version (db_version) VALUES (2);',
] ]
for query in queries: for query in queries:
self.connection.action(query) self.connection.action(query)
@ -59,7 +66,7 @@ class InitialSchema(main_db.SchemaUpgrade):
'INSERT INTO downloads2 SELECT * FROM downloads;', 'INSERT INTO downloads2 SELECT * FROM downloads;',
'DROP TABLE IF EXISTS downloads;', 'DROP TABLE IF EXISTS downloads;',
'ALTER TABLE downloads2 RENAME TO downloads;', 'ALTER TABLE downloads2 RENAME TO downloads;',
'INSERT INTO db_version (db_version) VALUES (2);' 'INSERT INTO db_version (db_version) VALUES (2);',
] ]
for query in queries: for query in queries:
self.connection.action(query) self.connection.action(query)

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import platform import platform
import shutil import shutil

View file

@ -1,15 +1,54 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import requests import requests
from six import iteritems from six import iteritems
import core import core
from core import logger from core import logger
def api_check(r, params, rem_params):
try:
json_data = r.json()
except ValueError:
logger.error('Failed to get JSON data from response')
logger.debug('Response received')
raise
try:
json_data = json_data['data']
except KeyError:
logger.error('Failed to get data from JSON')
logger.debug('Response received: {}'.format(json_data))
raise
else:
json_data = json_data.get('data', json_data)
try:
optional_parameters = json_data['optionalParameters'].keys()
# Find excess parameters
excess_parameters = set(params).difference(optional_parameters)
logger.debug('Removing excess parameters: {}'.format(sorted(excess_parameters)))
rem_params.extend(excess_parameters)
return rem_params, True
except:
logger.error('Failed to identify optionalParameters')
return rem_params, False
def auto_fork(section, input_category): def auto_fork(section, input_category):
# auto-detect correct section # auto-detect correct section
# config settings # config settings
if core.FORK_SET: # keep using determined fork for multiple (manual) post-processing
logger.info('{section}:{category} fork already set to {fork}'.format
(section=section, category=input_category, fork=core.FORK_SET[0]))
return core.FORK_SET[0], core.FORK_SET[1]
cfg = dict(core.CFG[section][input_category]) cfg = dict(core.CFG[section][input_category])
@ -42,7 +81,8 @@ def auto_fork(section, input_category):
logger.info('Attempting to verify {category} fork'.format logger.info('Attempting to verify {category} fork'.format
(category=input_category)) (category=input_category))
url = '{protocol}{host}:{port}{root}/api/rootfolder'.format( url = '{protocol}{host}:{port}{root}/api/rootfolder'.format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root,
)
headers = {'X-Api-Key': apikey} headers = {'X-Api-Key': apikey}
try: try:
r = requests.get(url, headers=headers, stream=True, verify=False) r = requests.get(url, headers=headers, stream=True, verify=False)
@ -65,10 +105,12 @@ def auto_fork(section, input_category):
if apikey: if apikey:
url = '{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess'.format( url = '{protocol}{host}:{port}{root}/api/{apikey}/?cmd=help&subject=postprocess'.format(
protocol=protocol, host=host, port=port, root=web_root, apikey=apikey) protocol=protocol, host=host, port=port, root=web_root, apikey=apikey,
)
else: else:
url = '{protocol}{host}:{port}{root}/home/postprocess/'.format( url = '{protocol}{host}:{port}{root}/home/postprocess/'.format(
protocol=protocol, host=host, port=port, root=web_root) protocol=protocol, host=host, port=port, root=web_root,
)
# attempting to auto-detect fork # attempting to auto-detect fork
try: try:
@ -88,27 +130,17 @@ def auto_fork(section, input_category):
r = [] r = []
if r and r.ok: if r and r.ok:
if apikey: if apikey:
rem_params, found = api_check(r, params, rem_params)
if not found: # try different api set for SickGear.
url = '{protocol}{host}:{port}{root}/api/{apikey}/?cmd=postprocess&help=1'.format(
protocol=protocol, host=host, port=port, root=web_root, apikey=apikey,
)
try: try:
json_data = r.json() r = s.get(url, auth=(username, password), verify=False)
except ValueError: except requests.ConnectionError:
logger.error('Failed to get JSON data from response') logger.info('Could not connect to {section}:{category} to perform auto-fork detection!'.format
logger.debug('Response received') (section=section, category=input_category))
raise rem_params, found = api_check(r, params, rem_params)
try:
json_data = json_data['data']
except KeyError:
logger.error('Failed to get data from JSON')
logger.debug('Response received: {}'.format(json_data))
raise
else:
json_data = json_data.get('data', json_data)
optional_parameters = json_data['optionalParameters'].keys()
# Find excess parameters
excess_parameters = set(params).difference(optional_parameters)
logger.debug('Removing excess parameters: {}'.format(sorted(excess_parameters)))
rem_params.extend(excess_parameters)
else: else:
# Find excess parameters # Find excess parameters
rem_params.extend( rem_params.extend(
@ -140,4 +172,5 @@ def auto_fork(section, input_category):
logger.info('{section}:{category} fork set to {fork}'.format logger.info('{section}:{category} fork set to {fork}'.format
(section=section, category=input_category, fork=fork[0])) (section=section, category=input_category, fork=fork[0]))
core.FORK_SET = fork
return fork[0], fork[1] return fork[0], fork[1]

View file

@ -1,12 +1,17 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import requests import requests
class GitHub(object): class GitHub(object):
""" """Simple api wrapper for the Github API v3."""
Simple api wrapper for the Github API v3.
"""
def __init__(self, github_repo_user, github_repo, branch='master'): def __init__(self, github_repo_user, github_repo, branch='master'):
@ -15,16 +20,14 @@ class GitHub(object):
self.branch = branch self.branch = branch
def _access_api(self, path, params=None): def _access_api(self, path, params=None):
""" """Access API at given an API path and optional parameters."""
Access the API at the path given and with the optional params given.
"""
url = 'https://api.github.com/{path}'.format(path='/'.join(path)) url = 'https://api.github.com/{path}'.format(path='/'.join(path))
data = requests.get(url, params=params, verify=False) data = requests.get(url, params=params, verify=False)
return data.json() if data.ok else [] return data.json() if data.ok else []
def commits(self): def commits(self):
""" """
Uses the API to get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD. Get the 100 most recent commits from the specified user/repo/branch, starting from HEAD.
user: The github username of the person whose repo you're querying user: The github username of the person whose repo you're querying
repo: The repo name to query repo: The repo name to query
@ -39,7 +42,7 @@ class GitHub(object):
def compare(self, base, head, per_page=1): def compare(self, base, head, per_page=1):
""" """
Uses the API to get a list of compares between base and head. Get compares between base and head.
user: The github username of the person whose repo you're querying user: The github username of the person whose repo you're querying
repo: The repo name to query repo: The repo name to query

View file

@ -1,11 +1,19 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import logging import logging
import os import os
import sys import sys
import threading import threading
import core import core
import functools
# number of log files to keep # number of log files to keep
NUM_LOGS = 3 NUM_LOGS = 3
@ -85,9 +93,9 @@ class NTMRotatingLogHandler(object):
console.setFormatter(DispatchingFormatter( console.setFormatter(DispatchingFormatter(
{'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'), {'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'), 'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
'db': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S') 'db': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
}, },
logging.Formatter('%(message)s'), )) logging.Formatter('%(message)s')))
# add the handler to the root logger # add the handler to the root logger
logging.getLogger('nzbtomedia').addHandler(console) logging.getLogger('nzbtomedia').addHandler(console)
@ -111,10 +119,7 @@ class NTMRotatingLogHandler(object):
self.close_log(old_handler) self.close_log(old_handler)
def _config_handler(self): def _config_handler(self):
""" """Configure a file handler to log at file_name and return it."""
Configure a file handler to log at file_name and return it.
"""
file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8') file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8')
file_handler.setLevel(DB) file_handler.setLevel(DB)
@ -122,29 +127,29 @@ class NTMRotatingLogHandler(object):
file_handler.setFormatter(DispatchingFormatter( file_handler.setFormatter(DispatchingFormatter(
{'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'), {'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'), 'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
'db': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S') 'db': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
}, },
logging.Formatter('%(message)s'), )) logging.Formatter('%(message)s')))
return file_handler return file_handler
def _log_file_name(self, i): def _log_file_name(self, i):
""" """
Returns a numbered log file name depending on i. If i==0 it just uses logName, if not it appends Return a numbered log file name depending on i.
it to the extension (blah.log.3 for i == 3)
If i==0 it just uses logName, if not it appends it to the extension
e.g. (blah.log.3 for i == 3)
i: Log number to ues i: Log number to ues
""" """
return self.log_file_path + ('.{0}'.format(i) if i else '') return self.log_file_path + ('.{0}'.format(i) if i else '')
def _num_logs(self): def _num_logs(self):
""" """
Scans the log folder and figures out how many log files there are already on disk Scan the log folder and figure out how many log files there are already on disk.
Returns: The number of the last used file (eg. mylog.log.3 would return 3). If there are no logs it returns -1 Returns: The number of the last used file (eg. mylog.log.3 would return 3). If there are no logs it returns -1
""" """
cur_log = 0 cur_log = 0
while os.path.isfile(self._log_file_name(cur_log)): while os.path.isfile(self._log_file_name(cur_log)):
cur_log += 1 cur_log += 1
@ -202,9 +207,8 @@ class NTMRotatingLogHandler(object):
ntm_logger = logging.getLogger('nzbtomedia') ntm_logger = logging.getLogger('nzbtomedia')
pp_logger = logging.getLogger('postprocess') pp_logger = logging.getLogger('postprocess')
db_logger = logging.getLogger('db') db_logger = logging.getLogger('db')
setattr(pp_logger, 'postprocess', lambda *args: pp_logger.log(POSTPROCESS, *args)) pp_logger.postprocess = functools.partial(pp_logger.log, POSTPROCESS)
setattr(db_logger, 'db', lambda *args: db_logger.log(DB, *args)) db_logger.db = functools.partial(db_logger.log, DB)
try: try:
if log_level == DEBUG: if log_level == DEBUG:
if core.LOG_DEBUG == 1: if core.LOG_DEBUG == 1:

View file

@ -1,19 +1,52 @@
# coding=utf-8 # coding=utf-8
from __future__ import print_function from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import re import re
import sqlite3 import sqlite3
import time import time
from six import text_type from six import text_type, PY2
import core import core
from core import logger from core import logger
if PY2:
class Row(sqlite3.Row, object):
"""
Row factory that uses Byte Strings for keys.
The sqlite3.Row in Python 2 does not support unicode keys.
This overrides __getitem__ to attempt to encode the key to bytes first.
"""
def __getitem__(self, item):
"""
Get an item from the row by index or key.
:param item: Index or Key of item to return.
:return: An item from the sqlite3.Row.
"""
try:
# sqlite3.Row column names should be Bytes in Python 2
item = item.encode()
except AttributeError:
pass # assume item is a numeric index
return super(Row, self).__getitem__(item)
else:
from sqlite3 import Row
def db_filename(filename='nzbtomedia.db', suffix=None): def db_filename(filename='nzbtomedia.db', suffix=None):
""" """
Return the correct location of the database file.
@param filename: The sqlite database filename to use. If not specified, @param filename: The sqlite database filename to use. If not specified,
will be made to be nzbtomedia.db will be made to be nzbtomedia.db
@param suffix: The suffix to append to the filename. A '.' will be added @param suffix: The suffix to append to the filename. A '.' will be added
@ -30,10 +63,7 @@ class DBConnection(object):
self.filename = filename self.filename = filename
self.connection = sqlite3.connect(db_filename(filename), 20) self.connection = sqlite3.connect(db_filename(filename), 20)
if row_type == 'dict': self.connection.row_factory = Row
self.connection.row_factory = self._dict_factory
else:
self.connection.row_factory = sqlite3.Row
def check_db_version(self): def check_db_version(self):
result = None result = None
@ -183,9 +213,9 @@ class DBConnection(object):
'WHERE {conditions}'.format( 'WHERE {conditions}'.format(
table=table_name, table=table_name,
params=', '.join(gen_params(value_dict)), params=', '.join(gen_params(value_dict)),
conditions=' AND '.join(gen_params(key_dict)) conditions=' AND '.join(gen_params(key_dict)),
), ),
items items,
) )
if self.connection.total_changes == changes_before: if self.connection.total_changes == changes_before:
@ -194,9 +224,9 @@ class DBConnection(object):
'VALUES ({values})'.format( 'VALUES ({values})'.format(
table=table_name, table=table_name,
columns=', '.join(map(text_type, value_dict.keys())), columns=', '.join(map(text_type, value_dict.keys())),
values=', '.join(['?'] * len(value_dict.values())) values=', '.join(['?'] * len(value_dict.values())),
), ),
list(value_dict.values()) list(value_dict.values()),
) )
def table_info(self, table_name): def table_info(self, table_name):
@ -207,13 +237,6 @@ class DBConnection(object):
for column in cursor for column in cursor
} }
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
def _dict_factory(self, cursor, row):
return {
col[0]: row[idx]
for idx, col in enumerate(cursor.description)
}
def sanity_check_database(connection, sanity_check): def sanity_check_database(connection, sanity_check):
sanity_check(connection).check() sanity_check(connection).check()

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import core import core

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import requests import requests

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import core import core
from core.plugins.downloaders.torrent.utils import create_torrent_class from core.plugins.downloaders.torrent.utils import create_torrent_class

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from synchronousdeluge.client import DelugeClient from synchronousdeluge.client import DelugeClient
import core import core

View file

@ -1,4 +1,10 @@
from __future__ import absolute_import from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from qbittorrent import Client as qBittorrentClient from qbittorrent import Client as qBittorrentClient
@ -14,7 +20,7 @@ def configure_client():
password = core.QBITTORRENT_PASSWORD password = core.QBITTORRENT_PASSWORD
logger.debug( logger.debug(
'Connecting to {0}: http://{1}:{2}'.format(agent, host, port) 'Connecting to {0}: http://{1}:{2}'.format(agent, host, port),
) )
client = qBittorrentClient('http://{0}:{1}/'.format(host, port)) client = qBittorrentClient('http://{0}:{1}/'.format(host, port))
try: try:

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from transmissionrpc.client import Client as TransmissionClient from transmissionrpc.client import Client as TransmissionClient
import core import core

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import time import time
import core import core

View file

@ -1,4 +1,9 @@
from __future__ import absolute_import from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from utorrent.client import UTorrentClient from utorrent.client import UTorrentClient

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import requests import requests
import core import core

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from babelfish import Language from babelfish import Language
import subliminal import subliminal

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import platform import platform
import re import re
@ -25,7 +32,7 @@ media_list = [r'\.s\d{2}e\d{2}\.', r'\.1080[pi]\.', r'\.720p\.', r'\.576[pi]', r
r'\.internal\.', r'\bac3\b', r'\.ntsc\.', r'\.pal\.', r'\.secam\.', r'\bdivx\b', r'\bxvid\b'] r'\.internal\.', r'\bac3\b', r'\.ntsc\.', r'\.pal\.', r'\.secam\.', r'\bdivx\b', r'\bxvid\b']
media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE) media_pattern = re.compile('|'.join(media_list), flags=re.IGNORECASE)
garbage_name = re.compile(r'^[a-zA-Z0-9]*$') garbage_name = re.compile(r'^[a-zA-Z0-9]*$')
char_replace = [[r'(\w)1\.(\w)', r'\1i\2'] char_replace = [[r'(\w)1\.(\w)', r'\1i\2'],
] ]
@ -121,7 +128,7 @@ def reverse_filename(filename, dirname, name):
def rename_script(dirname): def rename_script(dirname):
rename_file = '' rename_file = ''
for directory, directories, files in os.walk(dirname): for directory, _, files in os.walk(dirname):
for file in files: for file in files:
if re.search(r'(rename\S*\.(sh|bat)$)', file, re.IGNORECASE): if re.search(r'(rename\S*\.(sh|bat)$)', file, re.IGNORECASE):
rename_file = os.path.join(directory, file) rename_file = os.path.join(directory, file)

View file

@ -1,8 +1,17 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import errno import errno
import json import json
import sys
import os import os
import time
import platform import platform
import re import re
import shutil import shutil
@ -66,6 +75,9 @@ def is_video_good(videofile, status):
def zip_out(file, img, bitbucket): def zip_out(file, img, bitbucket):
procin = None procin = None
if os.path.isfile(file):
cmd = ['cat', file]
else:
cmd = [core.SEVENZIP, '-so', 'e', img, file] cmd = [core.SEVENZIP, '-so', 'e', img, file]
try: try:
procin = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) procin = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
@ -97,12 +109,11 @@ def get_video_details(videofile, img=None, bitbucket=None):
result = proc.returncode result = proc.returncode
video_details = json.loads(out.decode()) video_details = json.loads(out.decode())
except Exception: except Exception:
pass try: # try this again without -show error in case of ffmpeg limitation
if not video_details:
try:
command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', videofile] command = [core.FFPROBE, '-v', 'quiet', print_format, 'json', '-show_format', '-show_streams', videofile]
print_cmd(command)
if img: if img:
procin = zip_out(file, img) procin = zip_out(file, img, bitbucket)
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stdin=procin.stdout) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stdin=procin.stdout)
procin.stdout.close() procin.stdout.close()
else: else:
@ -115,6 +126,21 @@ def get_video_details(videofile, img=None, bitbucket=None):
return video_details, result return video_details, result
def check_vid_file(video_details, result):
if result != 0:
return False
if video_details.get('error'):
return False
if not video_details.get('streams'):
return False
video_streams = [item for item in video_details['streams'] if item['codec_type'] == 'video']
audio_streams = [item for item in video_details['streams'] if item['codec_type'] == 'audio']
if len(video_streams) > 0 and len(audio_streams) > 0:
return True
else:
return False
def build_commands(file, new_dir, movie_name, bitbucket): def build_commands(file, new_dir, movie_name, bitbucket):
if isinstance(file, string_types): if isinstance(file, string_types):
input_file = file input_file = file
@ -132,9 +158,18 @@ def build_commands(file, new_dir, movie_name, bitbucket):
name = re.sub('([ ._=:-]+[cC][dD][0-9])', '', name) name = re.sub('([ ._=:-]+[cC][dD][0-9])', '', name)
if ext == core.VEXTENSION and new_dir == directory: # we need to change the name to prevent overwriting itself. if ext == core.VEXTENSION and new_dir == directory: # we need to change the name to prevent overwriting itself.
core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext' core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext'
new_file = file
else: else:
img, data = next(iteritems(file)) img, data = next(iteritems(file))
name = data['name'] name = data['name']
new_file = []
rem_vid = []
for vid in data['files']:
video_details, result = get_video_details(vid, img, bitbucket)
if not check_vid_file(video_details, result): #lets not transcode menu or other clips that don't have audio and video.
rem_vid.append(vid)
data['files'] = [ f for f in data['files'] if f not in rem_vid ]
new_file = {img: {'name': data['name'], 'files': data['files']}}
video_details, result = get_video_details(data['files'][0], img, bitbucket) video_details, result = get_video_details(data['files'][0], img, bitbucket)
input_file = '-' input_file = '-'
file = '-' file = '-'
@ -458,6 +493,8 @@ def build_commands(file, new_dir, movie_name, bitbucket):
if core.OUTPUTFASTSTART: if core.OUTPUTFASTSTART:
other_cmd.extend(['-movflags', '+faststart']) other_cmd.extend(['-movflags', '+faststart'])
if core.OTHEROPTS:
other_cmd.extend(core.OTHEROPTS)
command = [core.FFMPEG, '-loglevel', 'warning'] command = [core.FFMPEG, '-loglevel', 'warning']
@ -511,7 +548,7 @@ def build_commands(file, new_dir, movie_name, bitbucket):
command.append(newfile_path) command.append(newfile_path)
if platform.system() != 'Windows': if platform.system() != 'Windows':
command = core.NICENESS + command command = core.NICENESS + command
return command return command, new_file
def get_subs(file): def get_subs(file):
@ -519,7 +556,7 @@ def get_subs(file):
sub_ext = ['.srt', '.sub', '.idx'] sub_ext = ['.srt', '.sub', '.idx']
name = os.path.splitext(os.path.split(file)[1])[0] name = os.path.splitext(os.path.split(file)[1])[0]
path = os.path.split(file)[0] path = os.path.split(file)[0]
for directory, directories, filenames in os.walk(path): for directory, _, filenames in os.walk(path):
for filename in filenames: for filename in filenames:
filepaths.extend([os.path.join(directory, filename)]) filepaths.extend([os.path.join(directory, filename)])
subfiles = [item for item in filepaths if os.path.splitext(item)[1] in sub_ext and name in item] subfiles = [item for item in filepaths if os.path.splitext(item)[1] in sub_ext and name in item]
@ -570,7 +607,7 @@ def extract_subs(file, newfile_path, bitbucket):
result = 1 # set result to failed in case call fails. result = 1 # set result to failed in case call fails.
try: try:
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket)
proc.communicate() out, err = proc.communicate()
result = proc.returncode result = proc.returncode
except Exception: except Exception:
logger.error('Extracting subtitle has failed') logger.error('Extracting subtitle has failed')
@ -590,6 +627,7 @@ def process_list(it, new_dir, bitbucket):
new_list = [] new_list = []
combine = [] combine = []
vts_path = None vts_path = None
mts_path = None
success = True success = True
for item in it: for item in it:
ext = os.path.splitext(item)[1].lower() ext = os.path.splitext(item)[1].lower()
@ -605,6 +643,14 @@ def process_list(it, new_dir, bitbucket):
except Exception: except Exception:
vts_path = os.path.split(item)[0] vts_path = os.path.split(item)[0]
rem_list.append(item) rem_list.append(item)
elif re.match('.+BDMV[/\\]SOURCE[/\\][0-9]+[0-9].[Mm][Tt][Ss]', item) and '.mts' not in core.IGNOREEXTENSIONS:
logger.debug('Found MTS image file: {0}'.format(item), 'TRANSCODER')
if not mts_path:
try:
mts_path = re.match('(.+BDMV[/\\]SOURCE)', item).groups()[0]
except Exception:
mts_path = os.path.split(item)[0]
rem_list.append(item)
elif re.match('.+VIDEO_TS.', item) or re.match('.+VTS_[0-9][0-9]_[0-9].', item): elif re.match('.+VIDEO_TS.', item) or re.match('.+VTS_[0-9][0-9]_[0-9].', item):
rem_list.append(item) rem_list.append(item)
elif core.CONCAT and re.match('.+[cC][dD][0-9].', item): elif core.CONCAT and re.match('.+[cC][dD][0-9].', item):
@ -614,6 +660,8 @@ def process_list(it, new_dir, bitbucket):
continue continue
if vts_path: if vts_path:
new_list.extend(combine_vts(vts_path)) new_list.extend(combine_vts(vts_path))
if mts_path:
new_list.extend(combine_mts(mts_path))
if combine: if combine:
new_list.extend(combine_cd(combine)) new_list.extend(combine_cd(combine))
for file in new_list: for file in new_list:
@ -632,17 +680,53 @@ def process_list(it, new_dir, bitbucket):
return it, rem_list, new_list, success return it, rem_list, new_list, success
def mount_iso(item, new_dir, bitbucket): #Currently only supports Linux Mount when permissions allow.
if platform.system() == 'Windows':
logger.error('No mounting options available under Windows for image file {0}'.format(item), 'TRANSCODER')
return []
mount_point = os.path.join(os.path.dirname(os.path.abspath(item)),'temp')
make_dir(mount_point)
cmd = ['mount', '-o', 'loop', item, mount_point]
print_cmd(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
out, err = proc.communicate()
core.MOUNTED = mount_point # Allows us to verify this has been done and then cleanup.
for root, dirs, files in os.walk(mount_point):
for file in files:
full_path = os.path.join(root, file)
if re.match('.+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', full_path) and '.vob' not in core.IGNOREEXTENSIONS:
logger.debug('Found VIDEO_TS image file: {0}'.format(full_path), 'TRANSCODER')
try:
vts_path = re.match('(.+VIDEO_TS)', full_path).groups()[0]
except Exception:
vts_path = os.path.split(full_path)[0]
return combine_vts(vts_path)
elif re.match('.+BDMV[/\\]STREAM[/\\][0-9]+[0-9].[Mm]', full_path) and '.mts' not in core.IGNOREEXTENSIONS:
logger.debug('Found MTS image file: {0}'.format(full_path), 'TRANSCODER')
try:
mts_path = re.match('(.+BDMV[/\\]STREAM)', full_path).groups()[0]
except Exception:
mts_path = os.path.split(full_path)[0]
return combine_mts(mts_path)
logger.error('No VIDEO_TS or BDMV/SOURCE folder found in image file {0}'.format(mount_point), 'TRANSCODER')
return ['failure'] # If we got here, nothing matched our criteria
def rip_iso(item, new_dir, bitbucket): def rip_iso(item, new_dir, bitbucket):
new_files = [] new_files = []
failure_dir = 'failure' failure_dir = 'failure'
# Mount the ISO in your OS and call combineVTS. # Mount the ISO in your OS and call combineVTS.
if not core.SEVENZIP: if not core.SEVENZIP:
logger.error('No 7zip installed. Can\'t extract image file {0}'.format(item), 'TRANSCODER') logger.debug('No 7zip installed. Attempting to mount image file {0}'.format(item), 'TRANSCODER')
try:
new_files = mount_iso(item, new_dir, bitbucket) # Currently only works for Linux.
except Exception:
logger.error('Failed to mount and extract from image file {0}'.format(item), 'TRANSCODER')
new_files = [failure_dir] new_files = [failure_dir]
return new_files return new_files
cmd = [core.SEVENZIP, 'l', item] cmd = [core.SEVENZIP, 'l', item]
try: try:
logger.debug('Attempting to extract .vob from image file {0}'.format(item), 'TRANSCODER') logger.debug('Attempting to extract .vob or .mts from image file {0}'.format(item), 'TRANSCODER')
print_cmd(cmd) print_cmd(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
out, err = proc.communicate() out, err = proc.communicate()
@ -656,6 +740,7 @@ def rip_iso(item, new_dir, bitbucket):
if file_match if file_match
] ]
combined = [] combined = []
if file_list: # handle DVD
for n in range(99): for n in range(99):
concat = [] concat = []
m = 1 m = 1
@ -675,12 +760,38 @@ def rip_iso(item, new_dir, bitbucket):
name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1 name=os.path.splitext(os.path.split(item)[1])[0], x=n + 1
) )
new_files.append({item: {'name': name, 'files': concat}}) new_files.append({item: {'name': name, 'files': concat}})
else: #check BlueRay for BDMV/STREAM/XXXX.MTS
mts_list_gen = (
re.match(r'.+(BDMV[/\\]STREAM[/\\][0-9]+[0-9].[Mm]).', line)
for line in out.decode().splitlines()
)
mts_list = [
file_match.groups()[0]
for file_match in mts_list_gen
if file_match
]
if sys.version_info[0] == 2: # Python2 sorting
mts_list.sort(key=lambda f: int(filter(str.isdigit, f))) # Sort all .mts files in numerical order
else: # Python3 sorting
mts_list.sort(key=lambda f: int(''.join(filter(str.isdigit, f))))
n = 0
for mts_name in mts_list:
concat = []
n += 1
concat.append(mts_name)
if core.CONCAT: if core.CONCAT:
combined.extend(concat)
continue
name = '{name}.cd{x}'.format(
name=os.path.splitext(os.path.split(item)[1])[0], x=n
)
new_files.append({item: {'name': name, 'files': concat}})
if core.CONCAT and combined:
name = os.path.splitext(os.path.split(item)[1])[0] name = os.path.splitext(os.path.split(item)[1])[0]
new_files.append({item: {'name': name, 'files': combined}}) new_files.append({item: {'name': name, 'files': combined}})
if not new_files: if not new_files:
logger.error('No VIDEO_TS folder found in image file {0}'.format(item), 'TRANSCODER') logger.error('No VIDEO_TS or BDMV/SOURCE folder found in image file. Attempting to mount and scan {0}'.format(item), 'TRANSCODER')
new_files = [failure_dir] new_files = mount_iso(item, new_dir, bitbucket)
except Exception: except Exception:
logger.error('Failed to extract from image file {0}'.format(item), 'TRANSCODER') logger.error('Failed to extract from image file {0}'.format(item), 'TRANSCODER')
new_files = [failure_dir] new_files = [failure_dir]
@ -689,31 +800,69 @@ def rip_iso(item, new_dir, bitbucket):
def combine_vts(vts_path): def combine_vts(vts_path):
new_files = [] new_files = []
combined = '' combined = []
name = re.match(r'(.+)[/\\]VIDEO_TS', vts_path).groups()[0]
if os.path.basename(name) == 'temp':
name = os.path.basename(os.path.dirname(name))
else:
name = os.path.basename(name)
for n in range(99): for n in range(99):
concat = '' concat = []
m = 1 m = 1
while True: while True:
vts_name = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m) vts_name = 'VTS_{0:02d}_{1:d}.VOB'.format(n + 1, m)
if os.path.isfile(os.path.join(vts_path, vts_name)): if os.path.isfile(os.path.join(vts_path, vts_name)):
concat += '{file}|'.format(file=os.path.join(vts_path, vts_name)) concat.append(os.path.join(vts_path, vts_name))
m += 1 m += 1
else: else:
break break
if not concat: if not concat:
break break
if core.CONCAT: if core.CONCAT:
combined += '{files}|'.format(files=concat) combined.extend(concat)
continue continue
new_files.append('concat:{0}'.format(concat[:-1])) name = '{name}.cd{x}'.format(
name=name, x=n + 1
)
new_files.append({vts_path: {'name': name, 'files': concat}})
if core.CONCAT: if core.CONCAT:
new_files.append('concat:{0}'.format(combined[:-1])) new_files.append({vts_path: {'name': name, 'files': combined}})
return new_files
def combine_mts(mts_path):
new_files = []
combined = []
name = re.match(r'(.+)[/\\]BDMV[/\\]STREAM', mts_path).groups()[0]
if os.path.basename(name) == 'temp':
name = os.path.basename(os.path.dirname(name))
else:
name = os.path.basename(name)
n = 0
mts_list = [f for f in os.listdir(mts_path) if os.path.isfile(os.path.join(mts_path, f))]
if sys.version_info[0] == 2: # Python2 sorting
mts_list.sort(key=lambda f: int(filter(str.isdigit, f)))
else: # Python3 sorting
mts_list.sort(key=lambda f: int(''.join(filter(str.isdigit, f))))
for mts_name in mts_list: ### need to sort all files [1 - 998].mts in order
concat = []
concat.append(os.path.join(mts_path, mts_name))
if core.CONCAT:
combined.extend(concat)
continue
name = '{name}.cd{x}'.format(
name=name, x=n + 1
)
new_files.append({mts_path: {'name': name, 'files': concat}})
n += 1
if core.CONCAT:
new_files.append({mts_path: {'name': name, 'files': combined}})
return new_files return new_files
def combine_cd(combine): def combine_cd(combine):
new_files = [] new_files = []
for item in set([re.match('(.+)[cC][dD][0-9].', item).groups()[0] for item in combine]): for item in {re.match('(.+)[cC][dD][0-9].', item).groups()[0] for item in combine}:
concat = '' concat = ''
for n in range(99): for n in range(99):
files = [file for file in combine if files = [file for file in combine if
@ -761,7 +910,7 @@ def transcode_directory(dir_name):
for file in file_list: for file in file_list:
if isinstance(file, string_types) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS: if isinstance(file, string_types) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS:
continue continue
command = build_commands(file, new_dir, movie_name, bitbucket) command, file = build_commands(file, new_dir, movie_name, bitbucket)
newfile_path = command[-1] newfile_path = command[-1]
# transcoding files may remove the original file, so make sure to extract subtitles first # transcoding files may remove the original file, so make sure to extract subtitles first
@ -781,16 +930,19 @@ def transcode_directory(dir_name):
result = 1 # set result to failed in case call fails. result = 1 # set result to failed in case call fails.
try: try:
if isinstance(file, string_types): if isinstance(file, string_types):
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket) proc = subprocess.Popen(command, stdout=bitbucket, stderr=subprocess.PIPE)
else: else:
img, data = next(iteritems(file)) img, data = next(iteritems(file))
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket, stdin=subprocess.PIPE) proc = subprocess.Popen(command, stdout=bitbucket, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
for vob in data['files']: for vob in data['files']:
procin = zip_out(vob, img, bitbucket) procin = zip_out(vob, img, bitbucket)
if procin: if procin:
logger.debug('Feeding in file: {0} to Transcoder'.format(vob))
shutil.copyfileobj(procin.stdout, proc.stdin) shutil.copyfileobj(procin.stdout, proc.stdin)
procin.stdout.close() procin.stdout.close()
proc.communicate() out, err = proc.communicate()
if err:
logger.error('Transcoder returned:{0} has failed'.format(err))
result = proc.returncode result = proc.returncode
except Exception: except Exception:
logger.error('Transcoding of video {0} has failed'.format(newfile_path)) logger.error('Transcoding of video {0} has failed'.format(newfile_path))
@ -819,6 +971,15 @@ def transcode_directory(dir_name):
logger.error('Transcoding of video to {0} failed with result {1}'.format(newfile_path, result)) logger.error('Transcoding of video to {0} failed with result {1}'.format(newfile_path, result))
# this will be 0 (successful) it all are successful, else will return a positive integer for failure. # this will be 0 (successful) it all are successful, else will return a positive integer for failure.
final_result = final_result + result final_result = final_result + result
if core.MOUNTED: # In case we mounted an .iso file, unmount here.
time.sleep(5) # play it safe and avoid failing to unmount.
cmd = ['umount', '-l', core.MOUNTED]
print_cmd(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=bitbucket)
out, err = proc.communicate()
time.sleep(5)
os.rmdir(core.MOUNTED)
core.MOUNTED = None
if final_result == 0 and not core.DUPLICATE: if final_result == 0 and not core.DUPLICATE:
for file in rem_list: for file in rem_list:
try: try:

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
from subprocess import Popen from subprocess import Popen
@ -7,33 +14,46 @@ import core
from core import logger, transcoder from core import logger, transcoder
from core.plugins.subtitles import import_subs from core.plugins.subtitles import import_subs
from core.utils import list_media_files, remove_dir from core.utils import list_media_files, remove_dir
from core.auto_process.common import (
ProcessResult,
)
def external_script(output_destination, torrent_name, torrent_label, settings): def external_script(output_destination, torrent_name, torrent_label, settings):
final_result = 0 # start at 0. final_result = 0 # start at 0.
num_files = 0 num_files = 0
core.USER_SCRIPT_MEDIAEXTENSIONS = settings.get('user_script_mediaExtensions', '')
try: try:
core.USER_SCRIPT_MEDIAEXTENSIONS = settings['user_script_mediaExtensions'].lower()
if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str): if isinstance(core.USER_SCRIPT_MEDIAEXTENSIONS, str):
core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.split(',') core.USER_SCRIPT_MEDIAEXTENSIONS = core.USER_SCRIPT_MEDIAEXTENSIONS.lower().split(',')
except Exception: except Exception:
logger.error('user_script_mediaExtensions could not be set', 'USERSCRIPT')
core.USER_SCRIPT_MEDIAEXTENSIONS = [] core.USER_SCRIPT_MEDIAEXTENSIONS = []
core.USER_SCRIPT = settings.get('user_script_path') core.USER_SCRIPT = settings.get('user_script_path', '')
if not core.USER_SCRIPT or core.USER_SCRIPT == 'None': # do nothing and return success. if not core.USER_SCRIPT or core.USER_SCRIPT == 'None':
return [0, ''] # do nothing and return success. This allows the user an option to Link files only and not run a script.
return ProcessResult(
status_code=0,
message='No user script defined',
)
core.USER_SCRIPT_PARAM = settings.get('user_script_param', '')
try: try:
core.USER_SCRIPT_PARAM = settings['user_script_param']
if isinstance(core.USER_SCRIPT_PARAM, str): if isinstance(core.USER_SCRIPT_PARAM, str):
core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',') core.USER_SCRIPT_PARAM = core.USER_SCRIPT_PARAM.split(',')
except Exception: except Exception:
logger.error('user_script_params could not be set', 'USERSCRIPT')
core.USER_SCRIPT_PARAM = [] core.USER_SCRIPT_PARAM = []
core.USER_SCRIPT_SUCCESSCODES = settings.get('user_script_successCodes', 0)
try: try:
core.USER_SCRIPT_SUCCESSCODES = settings['user_script_successCodes']
if isinstance(core.USER_SCRIPT_SUCCESSCODES, str): if isinstance(core.USER_SCRIPT_SUCCESSCODES, str):
core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',') core.USER_SCRIPT_SUCCESSCODES = core.USER_SCRIPT_SUCCESSCODES.split(',')
except Exception: except Exception:
logger.error('user_script_successCodes could not be set', 'USERSCRIPT')
core.USER_SCRIPT_SUCCESSCODES = 0 core.USER_SCRIPT_SUCCESSCODES = 0
core.USER_SCRIPT_CLEAN = int(settings.get('user_script_clean', 1)) core.USER_SCRIPT_CLEAN = int(settings.get('user_script_clean', 1))
@ -47,11 +67,12 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
logger.info('Corrupt video file found {0}. Deleting.'.format(video), 'USERSCRIPT') logger.info('Corrupt video file found {0}. Deleting.'.format(video), 'USERSCRIPT')
os.unlink(video) os.unlink(video)
for dirpath, dirnames, filenames in os.walk(output_destination): for dirpath, _, filenames in os.walk(output_destination):
for file in filenames: for file in filenames:
file_path = core.os.path.join(dirpath, file) file_path = core.os.path.join(dirpath, file)
file_name, file_extension = os.path.splitext(file) file_name, file_extension = os.path.splitext(file)
logger.debug('Checking file {0} to see if this should be processed.'.format(file), 'USERSCRIPT')
if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or 'all' in core.USER_SCRIPT_MEDIAEXTENSIONS: if file_extension in core.USER_SCRIPT_MEDIAEXTENSIONS or 'all' in core.USER_SCRIPT_MEDIAEXTENSIONS:
num_files += 1 num_files += 1
@ -102,7 +123,7 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
final_result += result final_result += result
num_files_new = 0 num_files_new = 0
for dirpath, dirnames, filenames in os.walk(output_destination): for _, _, filenames in os.walk(output_destination):
for file in filenames: for file in filenames:
file_name, file_extension = os.path.splitext(file) file_name, file_extension = os.path.splitext(file)
@ -115,4 +136,7 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0: elif core.USER_SCRIPT_CLEAN == int(1) and num_files_new != 0:
logger.info('{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.'.format( logger.info('{0} files were processed, but {1} still remain. outputDirectory will not be cleaned.'.format(
num_files, num_files_new)) num_files, num_files_new))
return [final_result, ''] return ProcessResult(
status_code=final_result,
message='User Script Completed',
)

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import requests import requests
from core.utils import shutil_custom from core.utils import shutil_custom

View file

@ -1,3 +1,9 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os.path import os.path

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import datetime import datetime
from six import text_type from six import text_type

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
from six import text_type from six import text_type
@ -68,14 +75,14 @@ def convert_to_ascii(input_name, dir_name):
if 'NZBOP_SCRIPTDIR' in os.environ: if 'NZBOP_SCRIPTDIR' in os.environ:
print('[NZB] DIRECTORY={0}'.format(dir_name)) print('[NZB] DIRECTORY={0}'.format(dir_name))
for dirname, dirnames, filenames in os.walk(dir_name, topdown=False): for dirname, dirnames, _ in os.walk(dir_name, topdown=False):
for subdirname in dirnames: for subdirname in dirnames:
encoded, subdirname2 = char_replace(subdirname) encoded, subdirname2 = char_replace(subdirname)
if encoded: if encoded:
logger.info('Renaming directory to: {0}.'.format(subdirname2), 'ENCODER') logger.info('Renaming directory to: {0}.'.format(subdirname2), 'ENCODER')
os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2)) os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2))
for dirname, dirnames, filenames in os.walk(dir_name): for dirname, _, filenames in os.walk(dir_name):
for filename in filenames: for filename in filenames:
encoded, filename2 = char_replace(filename) encoded, filename2 = char_replace(filename)
if encoded: if encoded:

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import re import re
import shutil import shutil
@ -88,7 +95,7 @@ def is_min_size(input_name, min_size):
def is_archive_file(filename): def is_archive_file(filename):
"""Check if the filename is allowed for the Archive""" """Check if the filename is allowed for the Archive."""
for regext in core.COMPRESSED_CONTAINER: for regext in core.COMPRESSED_CONTAINER:
if regext.search(filename): if regext.search(filename):
return regext.split(filename)[0] return regext.split(filename)[0]

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import re import re
@ -5,7 +12,6 @@ import guessit
import requests import requests
from six import text_type from six import text_type
import core
from core import logger from core import logger
from core.utils.naming import sanitize_name from core.utils.naming import sanitize_name
@ -17,14 +23,14 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
# find imdbid in dirName # find imdbid in dirName
logger.info('Searching folder and file names for imdbID ...') logger.info('Searching folder and file names for imdbID ...')
m = re.search(r'(tt\d{7})', dir_name + input_name) m = re.search(r'\b(tt\d{7,8})\b', dir_name + input_name)
if m: if m:
imdbid = m.group(1) imdbid = m.group(1)
logger.info('Found imdbID [{0}]'.format(imdbid)) logger.info('Found imdbID [{0}]'.format(imdbid))
return imdbid return imdbid
if os.path.isdir(dir_name): if os.path.isdir(dir_name):
for file in os.listdir(text_type(dir_name)): for file in os.listdir(text_type(dir_name)):
m = re.search(r'(tt\d{7})', file) m = re.search(r'\b(tt\d{7,8})\b', file)
if m: if m:
imdbid = m.group(1) imdbid = m.group(1)
logger.info('Found imdbID [{0}] via file name'.format(imdbid)) logger.info('Found imdbID [{0}] via file name'.format(imdbid))
@ -90,15 +96,6 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
def category_search(input_directory, input_name, input_category, root, categories): def category_search(input_directory, input_name, input_category, root, categories):
tordir = False tordir = False
try:
input_name = input_name.encode(core.SYS_ENCODING)
except Exception:
pass
try:
input_directory = input_directory.encode(core.SYS_ENCODING)
except Exception:
pass
if input_directory is None: # =Nothing to process here. if input_directory is None: # =Nothing to process here.
return input_directory, input_name, input_category, root return input_directory, input_name, input_category, root

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import shutil import shutil

View file

@ -1,9 +1,17 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import re import re
import core
def sanitize_name(name): def sanitize_name(name):
""" """
Remove bad chars from the filename.
>>> sanitize_name('a/b/c') >>> sanitize_name('a/b/c')
'a-b-c' 'a-b-c'
>>> sanitize_name('abc') >>> sanitize_name('abc')
@ -13,29 +21,22 @@ def sanitize_name(name):
>>> sanitize_name('.a.b..') >>> sanitize_name('.a.b..')
'a.b' 'a.b'
""" """
# remove bad chars from the filename
name = re.sub(r'[\\/*]', '-', name) name = re.sub(r'[\\/*]', '-', name)
name = re.sub(r'[:\'<>|?]', '', name) name = re.sub(r'[:\'<>|?]', '', name)
# remove leading/trailing periods and spaces # remove leading/trailing periods and spaces
name = name.strip(' .') name = name.strip(' .')
try:
name = name.encode(core.SYS_ENCODING)
except Exception:
pass
return name return name
def clean_file_name(filename): def clean_file_name(filename):
"""Cleans up nzb name by removing any . and _ """
characters, along with any trailing hyphens. Clean up nzb name by removing any . and _ characters and trailing hyphens.
Is basically equivalent to replacing all _ and . with a Is basically equivalent to replacing all _ and . with a
space, but handles decimal numbers in string, for example: space, but handles decimal numbers in string, for example:
""" """
filename = re.sub(r'(\D)\.(?!\s)(\D)', r'\1 \2', filename) filename = re.sub(r'(\D)\.(?!\s)(\D)', r'\1 \2', filename)
filename = re.sub(r'(\d)\.(\d{4})', r'\1 \2', filename) # if it ends in a year then don't keep the dot filename = re.sub(r'(\d)\.(\d{4})', r'\1 \2', filename) # if it ends in a year then don't keep the dot
filename = re.sub(r'(\D)\.(?!\s)', r'\1 ', filename) filename = re.sub(r'(\D)\.(?!\s)', r'\1 ', filename)

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import socket import socket
import struct import struct
import time import time

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import core import core
@ -120,7 +127,11 @@ def parse_qbittorrent(args):
except Exception: except Exception:
input_directory = '' input_directory = ''
try: try:
input_name = cur_input[1].replace('\'', '') input_name = cur_input[1]
if input_name[0] == '\'':
input_name = input_name[1:]
if input_name[-1] == '\'':
input_name = input_name[:-1]
except Exception: except Exception:
input_name = '' input_name = ''
try: try:

View file

@ -1,3 +1,9 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from functools import partial from functools import partial
import os import os
@ -67,14 +73,14 @@ def remote_dir(path):
def get_dir_size(input_path): def get_dir_size(input_path):
prepend = partial(os.path.join, input_path) prepend = partial(os.path.join, input_path)
return sum([ return sum(
(os.path.getsize(f) if os.path.isfile(f) else get_dir_size(f)) (os.path.getsize(f) if os.path.isfile(f) else get_dir_size(f))
for f in map(prepend, os.listdir(text_type(input_path))) for f in map(prepend, os.listdir(text_type(input_path)))
]) )
def remove_empty_folders(path, remove_root=True): def remove_empty_folders(path, remove_root=True):
"""Function to remove empty folders""" """Remove empty folders."""
if not os.path.isdir(path): if not os.path.isdir(path):
return return

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import socket import socket
import subprocess import subprocess
@ -47,7 +54,7 @@ class PosixProcess(object):
self.lasterror = False self.lasterror = False
return self.lasterror return self.lasterror
except socket.error as e: except socket.error as e:
if 'Address already in use' in e: if 'Address already in use' in str(e):
self.lasterror = True self.lasterror = True
return self.lasterror return self.lasterror
except AttributeError: except AttributeError:

View file

@ -1,3 +1,10 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from functools import partial from functools import partial
import shutil import shutil
from six import PY2 from six import PY2

View file

@ -2,6 +2,13 @@
# Author: Nic Wolfe <nic@wolfeden.ca> # Author: Nic Wolfe <nic@wolfeden.ca>
# Modified by: echel0n # Modified by: echel0n
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import platform import platform
import re import re
@ -19,9 +26,7 @@ from core import github_api as github, logger
class CheckVersion(object): class CheckVersion(object):
""" """Version checker that runs in a thread with the SB scheduler."""
Version check class meant to run as a thread object with the SB scheduler.
"""
def __init__(self): def __init__(self):
self.install_type = self.find_install_type() self.install_type = self.find_install_type()
@ -40,14 +45,13 @@ class CheckVersion(object):
def find_install_type(self): def find_install_type(self):
""" """
Determines how this copy of SB was installed. Determine how this copy of SB was installed.
returns: type of installation. Possible values are: returns: type of installation. Possible values are:
'win': any compiled windows build 'win': any compiled windows build
'git': running from source using git 'git': running from source using git
'source': running from source without git 'source': running from source without git
""" """
# check if we're a windows build # check if we're a windows build
if os.path.isdir(os.path.join(core.APP_ROOT, u'.git')): if os.path.isdir(os.path.join(core.APP_ROOT, u'.git')):
install_type = 'git' install_type = 'git'
@ -58,13 +62,12 @@ class CheckVersion(object):
def check_for_new_version(self, force=False): def check_for_new_version(self, force=False):
""" """
Checks the internet for a newer version. Check the internet for a newer version.
returns: bool, True for new version or False for no new version. returns: bool, True for new version or False for no new version.
force: if true the VERSION_NOTIFY setting will be ignored and a check will be forced force: if true the VERSION_NOTIFY setting will be ignored and a check will be forced
""" """
if not core.VERSION_NOTIFY and not force: if not core.VERSION_NOTIFY and not force:
logger.log(u'Version checking is disabled, not checking for the newest version') logger.log(u'Version checking is disabled, not checking for the newest version')
return False return False
@ -211,13 +214,12 @@ class GitUpdateManager(UpdateManager):
def _find_installed_version(self): def _find_installed_version(self):
""" """
Attempts to find the currently installed version of Sick Beard. Attempt to find the currently installed version of Sick Beard.
Uses git show to get commit version. Uses git show to get commit version.
Returns: True for success or False for failure Returns: True for success or False for failure
""" """
output, err, exit_status = self._run_git(self._git_path, 'rev-parse HEAD') # @UnusedVariable output, err, exit_status = self._run_git(self._git_path, 'rev-parse HEAD') # @UnusedVariable
if exit_status == 0 and output: if exit_status == 0 and output:
@ -244,10 +246,12 @@ class GitUpdateManager(UpdateManager):
def _check_github_for_update(self): def _check_github_for_update(self):
""" """
Uses git commands to check if there is a newer version that the provided Check Github for a new version.
commit hash. If there is a newer version it sets _num_commits_behind.
"""
Uses git commands to check if there is a newer version than
the provided commit hash. If there is a newer version it
sets _num_commits_behind.
"""
self._newest_commit_hash = None self._newest_commit_hash = None
self._num_commits_behind = 0 self._num_commits_behind = 0
self._num_commits_ahead = 0 self._num_commits_ahead = 0
@ -324,10 +328,11 @@ class GitUpdateManager(UpdateManager):
def update(self): def update(self):
""" """
Calls git pull origin <branch> in order to update Sick Beard. Returns a bool depending Check git for a new version.
on the call's success.
"""
Calls git pull origin <branch> in order to update Sick Beard.
Returns a bool depending on the call's success.
"""
output, err, exit_status = self._run_git(self._git_path, 'pull origin {branch}'.format(branch=self.branch)) # @UnusedVariable output, err, exit_status = self._run_git(self._git_path, 'pull origin {branch}'.format(branch=self.branch)) # @UnusedVariable
if exit_status == 0: if exit_status == 0:
@ -382,12 +387,14 @@ class SourceUpdateManager(UpdateManager):
def _check_github_for_update(self): def _check_github_for_update(self):
""" """
Uses pygithub to ask github if there is a newer version that the provided Check Github for a new version.
commit hash. If there is a newer version it sets Sick Beard's version text.
Uses pygithub to ask github if there is a newer version than
the provided commit hash. If there is a newer version it sets
Sick Beard's version text.
commit_hash: hash that we're checking against commit_hash: hash that we're checking against
""" """
self._num_commits_behind = 0 self._num_commits_behind = 0
self._newest_commit_hash = None self._newest_commit_hash = None
@ -435,9 +442,7 @@ class SourceUpdateManager(UpdateManager):
return return
def update(self): def update(self):
""" """Download and install latest source tarball from github."""
Downloads the latest source tarball from github and installs it over the existing version.
"""
tar_download_url = 'https://github.com/{org}/{repo}/tarball/{branch}'.format( tar_download_url = 'https://github.com/{org}/{repo}/tarball/{branch}'.format(
org=self.github_repo_user, repo=self.github_repo, branch=self.branch) org=self.github_repo_user, repo=self.github_repo, branch=self.branch)
version_path = os.path.join(core.APP_ROOT, u'version.txt') version_path = os.path.join(core.APP_ROOT, u'version.txt')
@ -489,7 +494,7 @@ class SourceUpdateManager(UpdateManager):
# walk temp folder and move files to main folder # walk temp folder and move files to main folder
logger.log(u'Moving files from {source} to {destination}'.format logger.log(u'Moving files from {source} to {destination}'.format
(source=content_dir, destination=core.APP_ROOT)) (source=content_dir, destination=core.APP_ROOT))
for dirname, dirnames, filenames in os.walk(content_dir): # @UnusedVariable for dirname, _, filenames in os.walk(content_dir): # @UnusedVariable
dirname = dirname[len(content_dir) + 1:] dirname = dirname[len(content_dir) + 1:]
for curfile in filenames: for curfile in filenames:
old_path = os.path.join(content_dir, dirname, curfile) old_path = os.path.join(content_dir, dirname, curfile)

11
eol.py
View file

@ -1,5 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import datetime import datetime
import sys import sys
import warnings import warnings
@ -157,7 +164,7 @@ def print_statuses(show_expired=False):
major=python_version[0], major=python_version[0],
minor=python_version[1], minor=python_version[1],
remaining=days_left, remaining=days_left,
) ),
) )
if not show_expired: if not show_expired:
return return
@ -171,7 +178,7 @@ def print_statuses(show_expired=False):
major=python_version[0], major=python_version[0],
minor=python_version[1], minor=python_version[1],
remaining=-days_left, remaining=-days_left,
) ),
) )

View file

@ -1,4 +1,11 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import site import site
import sys import sys

View file

@ -1,4 +1,11 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import shutil import shutil
import os import os
import time import time

View file

@ -1,4 +1,11 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import libs import libs
__all__ = ['completed'] __all__ = ['completed']

View file

@ -1,7 +1,8 @@
# coding=utf-8 # coding=utf-8
"""A synchronous implementation of the Deluge RPC protocol """
based on gevent-deluge by Christopher Rosell. A synchronous implementation of the Deluge RPC protocol.
Based on gevent-deluge by Christopher Rosell:
https://github.com/chrippa/gevent-deluge https://github.com/chrippa/gevent-deluge
Example usage: Example usage:
@ -15,9 +16,16 @@ Example usage:
download_location = client.core.get_config_value("download_location").get() download_location = client.core.get_config_value("download_location").get()
""" """
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from .exceptions import DelugeRPCError from .exceptions import DelugeRPCError
__title__ = "synchronous-deluge" __title__ = 'synchronous-deluge'
__version__ = "0.1" __version__ = '0.1'
__author__ = "Christian Dale" __author__ = 'Christian Dale'

View file

@ -1,4 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os import os
import platform import platform
from collections import defaultdict from collections import defaultdict
@ -9,7 +17,7 @@ from .exceptions import DelugeRPCError
from .protocol import DelugeRPCRequest, DelugeRPCResponse from .protocol import DelugeRPCRequest, DelugeRPCResponse
from .transfer import DelugeTransfer from .transfer import DelugeTransfer
__all__ = ["DelugeClient"] __all__ = ['DelugeClient']
RPC_RESPONSE = 1 RPC_RESPONSE = 1
RPC_ERROR = 2 RPC_ERROR = 2
@ -18,41 +26,41 @@ RPC_EVENT = 3
class DelugeClient(object): class DelugeClient(object):
def __init__(self): def __init__(self):
"""A deluge client session.""" """Create a deluge client session."""
self.transfer = DelugeTransfer() self.transfer = DelugeTransfer()
self.modules = [] self.modules = []
self._request_counter = 0 self._request_counter = 0
def _get_local_auth(self): def _get_local_auth(self):
username = password = "" username = password = ''
if platform.system() in ('Windows', 'Microsoft'): if platform.system() in ('Windows', 'Microsoft'):
app_data_path = os.environ.get("APPDATA") app_data_path = os.environ.get('APPDATA')
if not app_data_path: if not app_data_path:
from six.moves import winreg from six.moves import winreg
hkey = winreg.OpenKey( hkey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER, winreg.HKEY_CURRENT_USER,
"Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders", 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders',
) )
app_data_reg = winreg.QueryValueEx(hkey, "AppData") app_data_reg = winreg.QueryValueEx(hkey, 'AppData')
app_data_path = app_data_reg[0] app_data_path = app_data_reg[0]
winreg.CloseKey(hkey) winreg.CloseKey(hkey)
auth_file = os.path.join(app_data_path, "deluge", "auth") auth_file = os.path.join(app_data_path, 'deluge', 'auth')
else: else:
from xdg.BaseDirectory import save_config_path from xdg.BaseDirectory import save_config_path
try: try:
auth_file = os.path.join(save_config_path("deluge"), "auth") auth_file = os.path.join(save_config_path('deluge'), 'auth')
except OSError: except OSError:
return username, password return username, password
if os.path.exists(auth_file): if os.path.exists(auth_file):
for line in open(auth_file): for line in open(auth_file):
if line.startswith("#"): if line.startswith('#'):
# This is a comment line # This is a comment line
continue continue
line = line.strip() line = line.strip()
try: try:
lsplit = line.split(":") lsplit = line.split(':')
except Exception: except Exception:
continue continue
@ -63,37 +71,38 @@ class DelugeClient(object):
else: else:
continue continue
if username == "localclient": if username == 'localclient':
return username, password return username, password
return "", "" return '', ''
def _create_module_method(self, module, method): def _create_module_method(self, module, method):
fullname = "{0}.{1}".format(module, method) fullname = '{0}.{1}'.format(module, method)
def func(obj, *args, **kwargs): def func(obj, *args, **kwargs):
return self.remote_call(fullname, *args, **kwargs) return self.remote_call(fullname, *args, **kwargs)
func.__name__ = method func.__name__ = str(method)
return func return func
def _introspect(self): def _introspect(self):
def splitter(value): def splitter(value):
return value.split(".") return value.split('.')
self.modules = [] self.modules = []
methods = self.remote_call("daemon.get_method_list").get() methods = self.remote_call('daemon.get_method_list').get()
methods = (x.decode() for x in methods)
methodmap = defaultdict(dict) methodmap = defaultdict(dict)
for module, method in imap(splitter, methods): for module, method in imap(splitter, methods):
methodmap[module][method] = self._create_module_method(module, method) methodmap[module][method] = self._create_module_method(module, method)
for module, methods in methodmap.items(): for module, methods in methodmap.items():
clsname = "DelugeModule{0}".format(module.capitalize()) clsname = 'DelugeModule{0}'.format(module.capitalize())
cls = type(clsname, (), methods) cls = type(str(clsname), (), methods)
setattr(self, module, cls()) setattr(self, str(module), cls())
self.modules.append(module) self.modules.append(module)
def remote_call(self, method, *args, **kwargs): def remote_call(self, method, *args, **kwargs):
@ -133,24 +142,23 @@ class DelugeClient(object):
self._request_counter += 1 self._request_counter += 1
return response return response
def connect(self, host="127.0.0.1", port=58846, username="", password=""): def connect(self, host='127.0.0.1', port=58846, username='', password=''):
"""Connects to a daemon process. """Connect to a daemon process.
:param host: str, the hostname of the daemon :param host: str, the hostname of the daemon
:param port: int, the port of the daemon :param port: int, the port of the daemon
:param username: str, the username to login with :param username: str, the username to login with
:param password: str, the password to login with :param password: str, the password to login with
""" """
# Connect transport # Connect transport
self.transfer.connect((host, port)) self.transfer.connect((host, port))
# Attempt to fetch local auth info if needed # Attempt to fetch local auth info if needed
if not username and host in ("127.0.0.1", "localhost"): if not username and host in ('127.0.0.1', 'localhost'):
username, password = self._get_local_auth() username, password = self._get_local_auth()
# Authenticate # Authenticate
self.remote_call("daemon.login", username, password).get() self.remote_call('daemon.login', username, password).get()
# Introspect available methods # Introspect available methods
self._introspect() self._introspect()

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
class DelugeRPCError(Exception): class DelugeRPCError(Exception):
def __init__(self, name, msg, traceback): def __init__(self, name, msg, traceback):
@ -8,4 +15,4 @@ class DelugeRPCError(Exception):
self.traceback = traceback self.traceback = traceback
def __str__(self): def __str__(self):
return "{0}: {1}: {2}".format(self.__class__.__name__, self.name, self.msg) return '{0}: {1}: {2}'.format(self.__class__.__name__, self.name, self.msg)

View file

@ -1,5 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
class DelugeRPCRequest(object): class DelugeRPCRequest(object):
def __init__(self, request_id, method, *args, **kwargs): def __init__(self, request_id, method, *args, **kwargs):

View file

@ -1,4 +1,12 @@
# coding=utf-8 # coding=utf-8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import socket import socket
import ssl import ssl
import struct import struct
@ -6,7 +14,7 @@ import zlib
import rencode import rencode
__all__ = ["DelugeTransfer"] __all__ = ['DelugeTransfer']
class DelugeTransfer(object): class DelugeTransfer(object):
@ -33,7 +41,7 @@ class DelugeTransfer(object):
payload = zlib.compress(rencode.dumps(data)) payload = zlib.compress(rencode.dumps(data))
self.conn.sendall(payload) self.conn.sendall(payload)
buf = b"" buf = b''
while True: while True:
data = self.conn.recv(1024) data = self.conn.recv(1024)

View file

@ -1,4 +1,12 @@
# coding=utf8 # coding=utf8
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import re import re
from six import StringIO, iteritems from six import StringIO, iteritems
@ -31,8 +39,7 @@ class UTorrentClient(object):
# TODO refresh token, when necessary # TODO refresh token, when necessary
def _make_opener(self, realm, base_url, username, password): def _make_opener(self, realm, base_url, username, password):
"""uTorrent API need HTTP Basic Auth and cookie support for token verify.""" """HTTP Basic Auth and cookie support for token verification."""
auth_handler = HTTPBasicAuthHandler() auth_handler = HTTPBasicAuthHandler()
auth_handler.add_password(realm=realm, auth_handler.add_password(realm=realm,
uri=base_url, uri=base_url,
@ -52,7 +59,7 @@ class UTorrentClient(object):
url = urljoin(self.base_url, 'token.html') url = urljoin(self.base_url, 'token.html')
response = self.opener.open(url) response = self.opener.open(url)
token_re = "<div id='token' style='display:none;'>([^<>]+)</div>" token_re = "<div id='token' style='display:none;'>([^<>]+)</div>"
match = re.search(token_re, response.read()) match = re.search(token_re, str(response.read()))
return match.group(1) return match.group(1)
def list(self, **kwargs): def list(self, **kwargs):
@ -61,25 +68,25 @@ class UTorrentClient(object):
return self._action(params) return self._action(params)
def start(self, *hashes): def start(self, *hashes):
params = [('action', 'start'), ] params = [('action', 'start')]
for cur_hash in hashes: for cur_hash in hashes:
params.append(('hash', cur_hash)) params.append(('hash', cur_hash))
return self._action(params) return self._action(params)
def stop(self, *hashes): def stop(self, *hashes):
params = [('action', 'stop'), ] params = [('action', 'stop')]
for cur_hash in hashes: for cur_hash in hashes:
params.append(('hash', cur_hash)) params.append(('hash', cur_hash))
return self._action(params) return self._action(params)
def pause(self, *hashes): def pause(self, *hashes):
params = [('action', 'pause'), ] params = [('action', 'pause')]
for cur_hash in hashes: for cur_hash in hashes:
params.append(('hash', cur_hash)) params.append(('hash', cur_hash))
return self._action(params) return self._action(params)
def forcestart(self, *hashes): def forcestart(self, *hashes):
params = [('action', 'forcestart'), ] params = [('action', 'forcestart')]
for cur_hash in hashes: for cur_hash in hashes:
params.append(('hash', cur_hash)) params.append(('hash', cur_hash))
return self._action(params) return self._action(params)
@ -95,8 +102,8 @@ class UTorrentClient(object):
def setprops(self, cur_hash, **kvpairs): def setprops(self, cur_hash, **kvpairs):
params = [('action', 'setprops'), ('hash', cur_hash)] params = [('action', 'setprops'), ('hash', cur_hash)]
for k, v in iteritems(kvpairs): for k, v in iteritems(kvpairs):
params.append(("s", k)) params.append(('s', k))
params.append(("v", v)) params.append(('v', v))
return self._action(params) return self._action(params)
@ -125,13 +132,13 @@ class UTorrentClient(object):
self._action(params) self._action(params)
def remove(self, *hashes): def remove(self, *hashes):
params = [('action', 'remove'), ] params = [('action', 'remove')]
for cur_hash in hashes: for cur_hash in hashes:
params.append(('hash', cur_hash)) params.append(('hash', cur_hash))
return self._action(params) return self._action(params)
def removedata(self, *hashes): def removedata(self, *hashes):
params = [('action', 'removedata'), ] params = [('action', 'removedata')]
for cur_hash in hashes: for cur_hash in hashes:
params.append(('hash', cur_hash)) params.append(('hash', cur_hash))
return self._action(params) return self._action(params)

View file

@ -1,6 +1,13 @@
# coding=utf-8 # coding=utf-8
# code copied from http://www.doughellmann.com/PyMOTW/urllib2/ # code copied from http://www.doughellmann.com/PyMOTW/urllib2/
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import itertools import itertools
import mimetypes import mimetypes
from email.generator import _make_boundary as choose_boundary from email.generator import _make_boundary as choose_boundary

View file

@ -1,4 +1,11 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import subprocess import subprocess
import sys import sys
import os import os

View file

@ -4,7 +4,7 @@
############################################################################## ##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones. # Post-Process to CouchPotato.
# #
# This script sends the download to your automated media management servers. # This script sends the download to your automated media management servers.
# #
@ -113,7 +113,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -202,7 +204,7 @@
# externalSubDir. set the directory where subs should be saved (if not the same directory as the video) # externalSubDir. set the directory where subs should be saved (if not the same directory as the video)
#externalSubDir= #externalSubDir=
# outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mp4-scene-release, MKV-SD). # outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mkv-bluray, mp4-scene-release, MKV-SD).
# #
# outputDefault. Loads default configs for the selected device. The remaining options below are ignored. # outputDefault. Loads default configs for the selected device. The remaining options below are ignored.
# If you want to use your own profile, set None and set the remaining options below. # If you want to use your own profile, set None and set the remaining options below.
@ -255,6 +257,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -4,7 +4,7 @@
############################################################################## ##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones. # Post-Process to Gamez.
# #
# This script sends the download to your automated media management servers. # This script sends the download to your automated media management servers.
# #
@ -69,7 +69,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -100,6 +102,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -83,7 +83,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -122,6 +124,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

126
nzbToLazyLibrarian.py Executable file
View file

@ -0,0 +1,126 @@
#!/usr/bin/env python
# coding=utf-8
#
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to LazyLibrarian.
#
# This script sends the download to your automated media management servers.
#
# NOTE: This script requires Python to be installed on your system.
##############################################################################
#
### OPTIONS ###
## General
# Auto Update nzbToMedia (0, 1).
#
# Set to 1 if you want nzbToMedia to automatically check for and update to the latest version
#auto_update=0
# Safe Mode protection of DestDir (0, 1).
#
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
#safe_mode=1
## LazyLibrarian
# LazyLibrarian script category.
#
# category that gets called for post-processing with LazyLibrarian.
#llCategory=books
# LazyLibrarian api key.
#llapikey=
# LazyLibrarian host.
#
# The ipaddress for your LazyLibrarian server. e.g For the Same system use localhost or 127.0.0.1
#llhost=localhost
# LazyLibrarian port.
#llport=5299
# LazyLibrarian uses ssl (0, 1).
#
# Set to 1 if using ssl, else set to 0.
#llssl=0
# LazyLibrarian web_root
#
# set this if using a reverse proxy.
#llweb_root=
# LazyLibrarian watch directory.
#
# set this to where your LazyLibrarian completed downloads are.
#llwatch_dir=
# LazyLibrarian and NZBGet are a different system (0, 1).
#
# Enable to replace local path with the path as per the mountPoints below.
#llremote_path=0
## Network
# Network Mount Points (Needed for remote path above)
#
# Enter Mount points as LocalPath,RemotePath and separate each pair with '|'
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
#mountPoints=
## Posix
# Niceness for external tasks Extractor and Transcoder.
#
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
# If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3).
#
# Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle.
#ionice_class=2
# ionice scheduling class data.
#
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
#ionice_classdata=4
## WakeOnLan
# use WOL (0, 1).
#
# set to 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) on the host and port specified.
#wolwake=0
# WOL MAC
#
# enter the mac address of the system to be woken.
#wolmac=00:01:2e:2D:64:e1
# Set the Host and Port of a server to verify system has woken.
#wolhost=192.168.1.37
#wolport=80
### NZBGET POST-PROCESSING SCRIPT ###
##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys
import nzbToMedia
section = 'LazyLibrarian'
result = nzbToMedia.main(sys.argv, section)
sys.exit(result)

View file

@ -98,7 +98,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -187,7 +189,7 @@
# externalSubDir. set the directory where subs should be saved (if not the same directory as the video) # externalSubDir. set the directory where subs should be saved (if not the same directory as the video)
#externalSubDir = #externalSubDir =
# outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mp4-scene-release, MKV-SD). # outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mkv-bluray, mp4-scene-release, MKV-SD).
# #
# outputDefault. Loads default configs for the selected device. The remaining options below are ignored. # outputDefault. Loads default configs for the selected device. The remaining options below are ignored.
# If you want to use your own profile, set None and set the remaining options below. # If you want to use your own profile, set None and set the remaining options below.
@ -237,6 +239,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -4,7 +4,8 @@
############################################################################## ##############################################################################
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones. # Post-Process to CouchPotato, SickBeard, Sonarr, Mylar, Gamez, HeadPhones,
# LazyLibrarian, Radarr, Lidarr
# #
# This script sends the download to your automated media management servers. # This script sends the download to your automated media management servers.
# #
@ -410,6 +411,44 @@
# Enable to replace local path with the path as per the mountPoints below. # Enable to replace local path with the path as per the mountPoints below.
#gzremote_path=0 #gzremote_path=0
## LazyLibrarian
# LazyLibrarian script category.
#
# category that gets called for post-processing with LazyLibrarian.
#llCategory=books
# LazyLibrarian api key.
#llapikey=
# LazyLibrarian host.
#
# The ipaddress for your LazyLibrarian server. e.g For the Same system use localhost or 127.0.0.1
#llhost=localhost
# LazyLibrarian port.
#llport=5299
# LazyLibrarian uses ssl (0, 1).
#
# Set to 1 if using ssl, else set to 0.
#llssl=0
# LazyLibrarian web_root
#
# set this if using a reverse proxy.
#llweb_root=
# LazyLibrarian watch directory.
#
# set this to where your LazyLibrarian completed downloads are.
#llwatch_dir=
# LazyLibrarian and NZBGet are a different system (0, 1).
#
# Enable to replace local path with the path as per the mountPoints below.
#llremote_path=0
## Network ## Network
# Network Mount Points (Needed for remote path above) # Network Mount Points (Needed for remote path above)
@ -430,7 +469,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -519,7 +560,7 @@
# externalSubDir. set the directory where subs should be saved (if not the same directory as the video) # externalSubDir. set the directory where subs should be saved (if not the same directory as the video)
#externalSubDir= #externalSubDir=
# outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mp4-scene-release). # outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mkv-bluray, mp4-scene-release).
# #
# outputDefault. Loads default configs for the selected device. The remaining options below are ignored. # outputDefault. Loads default configs for the selected device. The remaining options below are ignored.
# If you want to use your own profile, set None and set the remaining options below. # If you want to use your own profile, set None and set the remaining options below.
@ -621,21 +662,26 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import print_function from __future__ import (
absolute_import,
import eol division,
eol.check() print_function,
unicode_literals,
import cleanup )
cleanup.clean(cleanup.FOLDER_STRUCTURE)
import datetime import datetime
import os import os
import sys import sys
import eol
import cleanup
eol.check()
cleanup.clean(cleanup.FOLDER_STRUCTURE)
import core import core
from core import logger, main_db from core import logger, main_db
from core.auto_process import comics, games, movies, music, tv from core.auto_process import comics, games, movies, music, tv, books
from core.auto_process.common import ProcessResult from core.auto_process.common import ProcessResult
from core.plugins.downloaders.nzb.utils import get_nzoid from core.plugins.downloaders.nzb.utils import get_nzoid
from core.plugins.plex import plex_update from core.plugins.plex import plex_update
@ -743,11 +789,11 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
) )
except Exception: except Exception:
logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format( logger.error('Remote Path {0} is not valid for {1}:{2} Please set this to either 0 to disable or 1 to enable!'.format(
core.get('remote_path'), section_name, input_category)) cfg.get('remote_path'), section_name, input_category))
input_name, input_directory = convert_to_ascii(input_name, input_directory) input_name, input_directory = convert_to_ascii(input_name, input_directory)
if extract == 1: if extract == 1 and not (status > 0 and core.NOEXTRACTFAILED):
logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory)) logger.debug('Checking for archives to extract in directory: {0}'.format(input_directory))
extract_files(input_directory) extract_files(input_directory)
@ -763,6 +809,8 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
result = comics.process(section_name, input_directory, input_name, status, client_agent, input_category) result = comics.process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == 'Gamez': elif section_name == 'Gamez':
result = games.process(section_name, input_directory, input_name, status, client_agent, input_category) result = games.process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == 'LazyLibrarian':
result = books.process(section_name, input_directory, input_name, status, client_agent, input_category)
elif section_name == 'UserScript': elif section_name == 'UserScript':
result = external_script(input_directory, input_name, input_category, section[usercat]) result = external_script(input_directory, input_name, input_category, section[usercat])
else: else:
@ -925,15 +973,7 @@ def main(args, section=None):
if client_agent and client_agent.lower() not in core.NZB_CLIENTS: if client_agent and client_agent.lower() not in core.NZB_CLIENTS:
continue continue
try:
dir_name = dir_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
input_name = os.path.basename(dir_name) input_name = os.path.basename(dir_name)
try:
input_name = input_name.encode(core.SYS_ENCODING)
except UnicodeError:
pass
results = process(dir_name, input_name, 0, client_agent=client_agent, results = process(dir_name, input_name, 0, client_agent=client_agent,
download_id=download_id or None, input_category=subsection) download_id=download_id or None, input_category=subsection)

View file

@ -74,7 +74,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -113,6 +115,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -103,7 +103,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -192,7 +194,7 @@
# externalSubDir. set the directory where subs should be saved (if not the same directory as the video) # externalSubDir. set the directory where subs should be saved (if not the same directory as the video)
#externalSubDir = #externalSubDir =
# outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mp4-scene-release, MKV-SD). # outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mkv-bluray, mp4-scene-release, MKV-SD).
# #
# outputDefault. Loads default configs for the selected device. The remaining options below are ignored. # outputDefault. Loads default configs for the selected device. The remaining options below are ignored.
# If you want to use your own profile, set None and set the remaining options below. # If you want to use your own profile, set None and set the remaining options below.
@ -242,6 +244,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -108,7 +108,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -197,7 +199,7 @@
# externalSubDir. set the directory where subs should be saved (if not the same directory as the video) # externalSubDir. set the directory where subs should be saved (if not the same directory as the video)
#externalSubDir = #externalSubDir =
# outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mp4-scene-release, MKV-SD). # outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mkv-bluray, mp4-scene-release, MKV-SD).
# #
# outputDefault. Loads default configs for the selected device. The remaining options below are ignored. # outputDefault. Loads default configs for the selected device. The remaining options below are ignored.
# If you want to use your own profile, set None and set the remaining options below. # If you want to use your own profile, set None and set the remaining options below.
@ -247,6 +249,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -114,7 +114,9 @@
# Niceness for external tasks Extractor and Transcoder. # Niceness for external tasks Extractor and Transcoder.
# #
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process). # Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10 # If entering an integer e.g 'niceness=4', this is added to the nice command and passed as 'nice -n4' (Default).
# If entering a comma separated list e.g. 'niceness=nice,4' this will be passed as 'nice 4' (Safer).
#niceness=nice,-n0
# ionice scheduling class (0, 1, 2, 3). # ionice scheduling class (0, 1, 2, 3).
# #
@ -203,7 +205,7 @@
# externalSubDir. set the directory where subs should be saved (if not the same directory as the video) # externalSubDir. set the directory where subs should be saved (if not the same directory as the video)
#externalSubDir= #externalSubDir=
# outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mp4-scene-release, MKV-SD). # outputDefault (None, iPad, iPad-1080p, iPad-720p, Apple-TV2, iPod, iPhone, PS3, xbox, Roku-1080p, Roku-720p, Roku-480p, mkv, mkv-bluray, mp4-scene-release, MKV-SD).
# #
# outputDefault. Loads default configs for the selected device. The remaining options below are ignored. # outputDefault. Loads default configs for the selected device. The remaining options below are ignored.
# If you want to use your own profile, set None and set the remaining options below. # If you want to use your own profile, set None and set the remaining options below.
@ -256,6 +258,13 @@
### NZBGET POST-PROCESSING SCRIPT ### ### NZBGET POST-PROCESSING SCRIPT ###
############################################################################## ##############################################################################
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import sys import sys
import nzbToMedia import nzbToMedia

View file

@ -1,6 +1,11 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- encoding: utf-8 -*- # -*- encoding: utf-8 -*-
from __future__ import absolute_import, print_function from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import io import io
import os.path import os.path
@ -11,14 +16,14 @@ from setuptools import setup
def read(*names, **kwargs): def read(*names, **kwargs):
with io.open( with io.open(
os.path.join(os.path.dirname(__file__), *names), os.path.join(os.path.dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8') encoding=kwargs.get('encoding', 'utf8'),
) as fh: ) as fh:
return fh.read() return fh.read()
setup( setup(
name='nzbToMedia', name='nzbToMedia',
version='12.0.10', version='12.1.00',
license='GPLv3', license='GPLv3',
description='Efficient on demand post processing', description='Efficient on demand post processing',
long_description=""" long_description="""
@ -53,6 +58,9 @@ setup(
author_email='fock_wulf@hotmail.com', author_email='fock_wulf@hotmail.com',
url='https://github.com/clinton-hall/nzbToMedia', url='https://github.com/clinton-hall/nzbToMedia',
packages=['core'], packages=['core'],
install_requires=[
'pywin32;platform_system=="Windows"',
],
classifiers=[ classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable', 'Development Status :: 5 - Production/Stable',

View file

@ -1 +1,8 @@
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
__author__ = 'Justin' __author__ = 'Justin'

View file

@ -1,48 +1,52 @@
#! /usr/bin/env python #! /usr/bin/env python
from __future__ import print_function from __future__ import (
import datetime absolute_import,
import os division,
import sys print_function,
unicode_literals,
)
import core
def test_eol(): def test_eol():
import eol import eol
eol.check() eol.check()
def test_cleanup(): def test_cleanup():
import cleanup import cleanup
cleanup.clean(cleanup.FOLDER_STRUCTURE) cleanup.clean(cleanup.FOLDER_STRUCTURE)
def test_import_core(): def test_import_core():
import core pass
from core import logger, main_db
def test_import_core_auto_process(): def test_import_core_auto_process():
from core.auto_process import comics, games, movies, music, tv pass
from core.auto_process.common import ProcessResult
def test_import_core_plugins(): def test_import_core_plugins():
from core.plugins.downloaders.nzb.utils import get_nzoid pass
from core.plugins.plex import plex_update
def test_import_core_user_scripts(): def test_import_core_user_scripts():
from core.user_scripts import external_script pass
def test_import_six(): def test_import_six():
from six import text_type pass
def test_import_core_utils(): def test_import_core_utils():
from core.utils import ( pass
char_replace, clean_dir, convert_to_ascii,
extract_files, get_dirs, get_download_info,
update_download_info_status, replace_links,
)
import core
from core import logger, main_db
def test_initial(): def test_initial():
core.initialize() core.initialize()
del core.MYAPP del core.MYAPP
def test_core_parameters(): def test_core_parameters():
assert core.CHECK_MEDIA == 1 assert core.CHECK_MEDIA == 1

14
tests/test_transcoder.py Executable file
View file

@ -0,0 +1,14 @@
#! /usr/bin/env python
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import core
from core import transcoder
def test_transcoder_check():
assert transcoder.is_video_good(core.TEST_FILE, 0) is True

146
tox.ini Normal file
View file

@ -0,0 +1,146 @@
; a generative tox configuration, see: https://tox.readthedocs.io/en/latest/config.html#generative-envlist
[tox]
envlist =
clean,
check,
{py27, py35, py36, py37},
report
[testenv]
basepython =
py27: {env:TOXPYTHON:python2.7}
py35: {env:TOXPYTHON:python3.5}
py36: {env:TOXPYTHON:python3.6}
py37: {env:TOXPYTHON:python3.7}
{clean,check,report,codecov}: {env:TOXPYTHON:python3}
setenv =
PYTHONPATH={toxinidir}/tests
PYTHONUNBUFFERED=yes
passenv =
*
usedevelop = false
skip_install = true
deps =
pytest
pytest-travis-fold
pytest-cov
pywin32 ; sys.platform == 'win32'
commands =
{posargs:pytest --cov --cov-report=term-missing tests}
[flake8]
max-line-length = 79
max-doc-length = 79
verbose = 2
statistics = True
min-version = 2.7
require-code = True
exclude =
.github/
.tox/
.pytest_cache/
htmlcov/
logs/
libs/common
libs/win
libs/py2
ignore =
; -- flake8 --
; E501 line too long
; W505 doc line too long
E501, W505
; -- flake8-docstrings --
; D100 Missing docstring in public module
; D101 Missing docstring in public class
; D102 Missing docstring in public method
; D103 Missing docstring in public function
; D104 Missing docstring in public package
; D105 Missing docstring in magic method
; D107 Missing docstring in __init__
; D200 One-line docstring should fit on one line with quotes
; D202 No blank lines allowed after function docstring
; D205 1 blank line required between summary line and description
; D400 First line should end with a period
; D401 First line should be in imperative mood
; D402 First line should not be the function's "signature"
D100, D101, D102, D103, D104, D105, D107
; -- flake8-future-import --
; x = 1 for missing, 5 for present
; FIx6 nested_scopes 2.2
; FIx7 generators 2.3
; FIx2 with_statement 2.6
; FIx1 absolute_import 3.0
; FIx0 division 3.0
; FIx3 print_function 3.0
; FIx4 unicode_literals 3.0
; FIx5 generator_stop 3.7
; ???? annotations 4.0
; FI90 __future__ import does not exist
FI50, FI51, FI53, FI54
per-file-ignores =
; F401 imported but unused
; E402 module level import not at top of file
nzbTo*.py: E265, E266, E402
TorrentToMedia.py: E402
core/__init__.py: E402, F401
core/utils/__init__.py: F401
core/plugins/downloaders/configuration.py: F401
core/plugins/downloaders/utils.py: F401
libs/custom/synchronousdeluge/__init__.py: F401
[testenv:check]
deps =
flake8
flake8-bugbear
flake8-commas
flake8-comprehensions
flake8-docstrings
flake8-future-import
flake8-quotes
skip_install = true
commands =
; ** PRIMARY TESTS **
; Run flake8 tests (with plugins) using default test selections
flake8
; ** SELECTIVE TESTS **
; Run flake8 tests (with plugins) for specific optional codes defined below
; -- flake8 --
; E123 closing bracket does not match indentation of opening brackets line
; E226 missing whitespace around arithmetic operator
; E241 multiple spaces after ,
; E242 tab after ,
; E704 multiple statements on one line
; W504 line break after binary operator
; W505 doc line too long
; -- flake8-bugbear --
; B902 Invalid first argument used for instance method.
; B903 Data class should be immutable or use __slots__ to save memory.
flake8 --select=B902,B903,E123,E226,E241,E242,E704,W504,W505
[coverage:run]
omit =
libs/*
[testenv:codecov]
deps =
codecov
skip_install = true
commands =
coverage xml --ignore-errors
codecov []
[testenv:report]
deps = coverage
skip_install = true
commands =
coverage report
coverage html
[testenv:clean]
commands = coverage erase
skip_install = true
deps = coverage