mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-19 21:03:14 -07:00
Merge branch 'dev'
This commit is contained in:
commit
2cb59eb0dd
23 changed files with 1210 additions and 323 deletions
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
# Media Extensions
|
# Media Extensions
|
||||||
#
|
#
|
||||||
# This is a list of media extensions that may be deleted if ".sample" is in the filename.
|
# This is a list of media extensions that may be deleted if a Sample_id is in the filename.
|
||||||
#mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso
|
#mediaExtensions=.mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso
|
||||||
|
|
||||||
# maxSampleSize
|
# maxSampleSize
|
||||||
|
@ -22,17 +22,30 @@
|
||||||
# This is the maximum size (in MiB) to be be considered as sample file.
|
# This is the maximum size (in MiB) to be be considered as sample file.
|
||||||
#maxSampleSize=200
|
#maxSampleSize=200
|
||||||
|
|
||||||
|
# SampleIDs
|
||||||
|
#
|
||||||
|
# This is a list of identifiers used for samples. e.g sample,-s. Use 'SizeOnly' to delete all media files less than maxSampleSize.
|
||||||
|
#SampleIDs=sample,-s.
|
||||||
|
|
||||||
### NZBGET POST-PROCESSING SCRIPT ###
|
### NZBGET POST-PROCESSING SCRIPT ###
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
def is_sample(filePath, inputName, maxSampleSize):
|
|
||||||
|
def is_sample(filePath, inputName, maxSampleSize, SampleIDs):
|
||||||
# 200 MB in bytes
|
# 200 MB in bytes
|
||||||
SIZE_CUTOFF = int(maxSampleSize) * 1024 * 1024
|
SIZE_CUTOFF = int(maxSampleSize) * 1024 * 1024
|
||||||
# Ignore 'sample' in files unless 'sample' in Torrent Name
|
if os.path.getsize(filePath) < SIZE_CUTOFF:
|
||||||
return ('sample' in filePath.lower()) and (not 'sample' in inputName) and (os.path.getsize(filePath) < SIZE_CUTOFF)
|
if 'SizeOnly' in SampleIDs:
|
||||||
|
return True
|
||||||
|
# Ignore 'sample' in files unless 'sample' in Torrent Name
|
||||||
|
for ident in SampleIDs:
|
||||||
|
if ident.lower() in filePath.lower() and not ident.lower() in inputName.lower():
|
||||||
|
return True
|
||||||
|
# Return False if none of these were met.
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
# NZBGet V11+
|
# NZBGet V11+
|
||||||
|
@ -41,7 +54,6 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
print "Script triggered from NZBGet (11.0 or later)."
|
print "Script triggered from NZBGet (11.0 or later)."
|
||||||
|
|
||||||
# NZBGet argv: all passed as environment variables.
|
# NZBGet argv: all passed as environment variables.
|
||||||
clientAgent = "nzbget"
|
|
||||||
# Exit codes used by NZBGet
|
# Exit codes used by NZBGet
|
||||||
POSTPROCESS_PARCHECK=92
|
POSTPROCESS_PARCHECK=92
|
||||||
POSTPROCESS_SUCCESS=93
|
POSTPROCESS_SUCCESS=93
|
||||||
|
@ -52,55 +64,48 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
print "Please enable option \"Unpack\" in nzbget configuration file, exiting"
|
print "Please enable option \"Unpack\" in nzbget configuration file, exiting."
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
print "Par-check successful, but Par-repair disabled, exiting"
|
print "Par-check successful, but Par-repair disabled, exiting."
|
||||||
|
print "Please check your Par-repair settings for future downloads."
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
print "Par-check failed, setting status \"failed\""
|
print "Par-repair failed, setting status \"failed\"."
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
print "Unpack failed, setting status \"failed\""
|
print "Unpack failed, setting status \"failed\"."
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
print "Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"."
|
||||||
fileExtension = os.path.splitext(file)[1]
|
print "Please check your Par-check/repair settings for future downloads."
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
print "Post-Process: Archive files exist but unpack skipped, setting status \"failed\""
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
print "Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g"
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
print "Post-Process: _brokenlog.txt exists, download is probably damaged, exiting"
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
print "Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful"
|
print "Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful."
|
||||||
|
print "Please check your Par-check/repair settings for future downloads."
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
print "Post-Process: Nothing to post-process: destination directory ", os.environ['NZBPP_DIRECTORY'], "doesn't exist"
|
print "Nothing to post-process: destination directory", os.environ['NZBPP_DIRECTORY'], "doesn't exist. Setting status \"failed\"."
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
|
|
||||||
|
if status == 1:
|
||||||
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
mediaContainer = os.environ['NZBPO_MEDIAEXTENSIONS'].split(',')
|
mediaContainer = os.environ['NZBPO_MEDIAEXTENSIONS'].split(',')
|
||||||
|
SampleIDs = os.environ['NZBPO_SAMPLEIDS'].split(',')
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
||||||
for file in filenames:
|
for file in filenames:
|
||||||
|
|
||||||
|
@ -108,7 +113,7 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
fileName, fileExtension = os.path.splitext(file)
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
|
|
||||||
if fileExtension in mediaContainer: # If the file is a video file
|
if fileExtension in mediaContainer: # If the file is a video file
|
||||||
if is_sample(filePath, os.environ['NZBPP_NZBNAME'], os.environ['NZBPO_MAXSAMPLESIZE']): # Ignore samples
|
if is_sample(filePath, os.environ['NZBPP_NZBNAME'], os.environ['NZBPO_MAXSAMPLESIZE'], SampleIDs): # Ignore samples
|
||||||
print "Deleting sample file: ", filePath
|
print "Deleting sample file: ", filePath
|
||||||
try:
|
try:
|
||||||
os.unlink(filePath)
|
os.unlink(filePath)
|
||||||
|
|
|
@ -22,7 +22,6 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
print "Script triggered from NZBGet (11.0 or later)."
|
print "Script triggered from NZBGet (11.0 or later)."
|
||||||
|
|
||||||
# NZBGet argv: all passed as environment variables.
|
# NZBGet argv: all passed as environment variables.
|
||||||
clientAgent = "nzbget"
|
|
||||||
# Exit codes used by NZBGet
|
# Exit codes used by NZBGet
|
||||||
POSTPROCESS_PARCHECK=92
|
POSTPROCESS_PARCHECK=92
|
||||||
POSTPROCESS_SUCCESS=93
|
POSTPROCESS_SUCCESS=93
|
||||||
|
@ -33,61 +32,57 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
print "Please enable option \"Unpack\" in nzbget configuration file, exiting"
|
print "Please enable option \"Unpack\" in nzbget configuration file, exiting."
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
print "Par-check successful, but Par-repair disabled, exiting"
|
print "Par-check successful, but Par-repair disabled, exiting."
|
||||||
|
print "Please check your Par-repair settings for future downloads."
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
print "Par-check failed, setting status \"failed\""
|
print "Par-repair failed, setting status \"failed\"."
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
print "Unpack failed, setting status \"failed\""
|
print "Unpack failed, setting status \"failed\"."
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
print "Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"."
|
||||||
fileExtension = os.path.splitext(file)[1]
|
print "Please check your Par-check/repair settings for future downloads."
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
print "Post-Process: Archive files exist but unpack skipped, setting status \"failed\""
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
print "Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g"
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
print "Post-Process: _brokenlog.txt exists, download is probably damaged, exiting"
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
print "Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful"
|
print "Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful."
|
||||||
|
print "Please check your Par-check/repair settings for future downloads."
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
print "Post-Process: Nothing to post-process: destination directory ", os.environ['NZBPP_DIRECTORY'], "doesn't exist"
|
print "Nothing to post-process: destination directory", os.environ['NZBPP_DIRECTORY'], "doesn't exist. Setting status \"failed\"."
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
|
|
||||||
|
if status == 1:
|
||||||
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
directory = os.path.normpath(os.environ['NZBPP_DIRECTORY'])
|
directory = os.path.normpath(os.environ['NZBPP_DIRECTORY'])
|
||||||
for dirpath, dirnames, filenames in os.walk(directory):
|
for dirpath, dirnames, filenames in os.walk(directory):
|
||||||
for file in filenames:
|
for file in filenames:
|
||||||
filepath = os.path.join(dirpath, file)
|
filepath = os.path.join(dirpath, file)
|
||||||
print "reseting datetime for file", filepath
|
print "reseting datetime for file", filepath
|
||||||
os.utime(filepath, None)
|
try:
|
||||||
continue
|
os.utime(filepath, None)
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
print "Error: unable to reset time for file", filePath
|
||||||
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
sys.exit(POSTPROCESS_SUCCESS)
|
sys.exit(POSTPROCESS_SUCCESS)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -23,6 +23,7 @@ from autoProcess.nzbToMediaEnv import *
|
||||||
from autoProcess.nzbToMediaUtil import *
|
from autoProcess.nzbToMediaUtil import *
|
||||||
from utorrent.client import UTorrentClient
|
from utorrent.client import UTorrentClient
|
||||||
from transmissionrpc.client import Client as TransmissionClient
|
from transmissionrpc.client import Client as TransmissionClient
|
||||||
|
from synchronousdeluge.client import DelugeClient
|
||||||
|
|
||||||
def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
|
|
||||||
|
@ -34,22 +35,26 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
extracted_folder = []
|
extracted_folder = []
|
||||||
extractionSuccess = False
|
extractionSuccess = False
|
||||||
copy_list = []
|
copy_list = []
|
||||||
|
useLink = useLink_in
|
||||||
|
|
||||||
Logger.debug("MAIN: Received Directory: %s | Name: %s | Category: %s", inputDirectory, inputName, inputCategory)
|
Logger.debug("MAIN: Received Directory: %s | Name: %s | Category: %s", inputDirectory, inputName, inputCategory)
|
||||||
if inputCategory in sbCategory and sbFork in SICKBEARD_TORRENT:
|
|
||||||
|
inputDirectory, inputName, inputCategory, root = category_search(inputDirectory, inputName, inputCategory, root, categories) # Confirm the category by parsing directory structure
|
||||||
|
|
||||||
|
Logger.debug("MAIN: Determined Directory: %s | Name: %s | Category: %s", inputDirectory, inputName, inputCategory)
|
||||||
|
|
||||||
|
if inputCategory in sbCategory and sbFork in SICKBEARD_TORRENT and Torrent_ForceLink != 1:
|
||||||
Logger.info("MAIN: Calling SickBeard's %s branch to post-process: %s",sbFork ,inputName)
|
Logger.info("MAIN: Calling SickBeard's %s branch to post-process: %s",sbFork ,inputName)
|
||||||
result = autoProcessTV.processEpisode(inputDirectory, inputName, int(0))
|
result = autoProcessTV.processEpisode(inputDirectory, inputName, int(0))
|
||||||
if result == 1:
|
if result == 1:
|
||||||
Logger.info("MAIN: A problem was reported in the autoProcess* script. If torrent was pasued we will resume seeding")
|
Logger.info("MAIN: A problem was reported in the autoProcess* script.")
|
||||||
Logger.info("MAIN: All done.")
|
Logger.info("MAIN: All done.")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
inputDirectory, inputName, inputCategory, root = category_search(inputDirectory, inputName, inputCategory, root, categories) # Confirm the category by parsing directory structure
|
|
||||||
|
|
||||||
outputDestination = ""
|
outputDestination = ""
|
||||||
for category in categories:
|
for category in categories:
|
||||||
if category == inputCategory:
|
if category == inputCategory:
|
||||||
if os.path.basename(inputDirectory) == inputName:
|
if os.path.basename(inputDirectory) == inputName and os.path.isdir(inputDirectory):
|
||||||
Logger.info("MAIN: Download is a directory")
|
Logger.info("MAIN: Download is a directory")
|
||||||
outputDestination = os.path.normpath(os.path.join(outputDirectory, category, safeName(inputName)))
|
outputDestination = os.path.normpath(os.path.join(outputDirectory, category, safeName(inputName)))
|
||||||
else:
|
else:
|
||||||
|
@ -62,7 +67,7 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
if outputDestination == "":
|
if outputDestination == "":
|
||||||
if inputCategory == "":
|
if inputCategory == "":
|
||||||
inputCategory = "UNCAT"
|
inputCategory = "UNCAT"
|
||||||
if os.path.basename(inputDirectory) == inputName:
|
if os.path.basename(inputDirectory) == inputName and os.path.isdir(inputDirectory):
|
||||||
Logger.info("MAIN: Download is a directory")
|
Logger.info("MAIN: Download is a directory")
|
||||||
outputDestination = os.path.normpath(os.path.join(outputDirectory, inputCategory, safeName(inputName)))
|
outputDestination = os.path.normpath(os.path.join(outputDirectory, inputCategory, safeName(inputName)))
|
||||||
else:
|
else:
|
||||||
|
@ -82,7 +87,7 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
||||||
if clientAgent in ['utorrent', 'transmission'] and inputHash:
|
if clientAgent in ['utorrent', 'transmission', 'deluge'] and inputHash:
|
||||||
if clientAgent == 'utorrent':
|
if clientAgent == 'utorrent':
|
||||||
try:
|
try:
|
||||||
Logger.debug("MAIN: Connecting to %s: %s", clientAgent, uTorrentWEBui)
|
Logger.debug("MAIN: Connecting to %s: %s", clientAgent, uTorrentWEBui)
|
||||||
|
@ -97,6 +102,14 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
except:
|
except:
|
||||||
Logger.exception("MAIN: Failed to connect to Transmission")
|
Logger.exception("MAIN: Failed to connect to Transmission")
|
||||||
TransmissionClass = ""
|
TransmissionClass = ""
|
||||||
|
if clientAgent == 'deluge':
|
||||||
|
try:
|
||||||
|
Logger.debug("MAIN: Connecting to %s: http://%s:%s", clientAgent, DelugeHost, DelugePort)
|
||||||
|
delugeClient = DelugeClient()
|
||||||
|
delugeClient.connect(host = DelugeHost, port = DelugePort, username = DelugeUSR, password = DelugePWD)
|
||||||
|
except:
|
||||||
|
Logger.exception("MAIN: Failed to connect to deluge")
|
||||||
|
delugeClient = ""
|
||||||
|
|
||||||
# if we are using links with uTorrent it means we need to pause it in order to access the files
|
# if we are using links with uTorrent it means we need to pause it in order to access the files
|
||||||
Logger.debug("MAIN: Stoping torrent %s in %s while processing", inputName, clientAgent)
|
Logger.debug("MAIN: Stoping torrent %s in %s while processing", inputName, clientAgent)
|
||||||
|
@ -104,16 +117,36 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
utorrentClass.stop(inputHash)
|
utorrentClass.stop(inputHash)
|
||||||
if clientAgent == 'transmission' and TransmissionClass !="":
|
if clientAgent == 'transmission' and TransmissionClass !="":
|
||||||
TransmissionClass.stop_torrent(inputID)
|
TransmissionClass.stop_torrent(inputID)
|
||||||
|
if clientAgent == 'deluge' and delugeClient != "":
|
||||||
|
delugeClient.core.pause_torrent([inputID])
|
||||||
time.sleep(5) # Give Torrent client some time to catch up with the change
|
time.sleep(5) # Give Torrent client some time to catch up with the change
|
||||||
|
|
||||||
Logger.debug("MAIN: Scanning files in directory: %s", inputDirectory)
|
Logger.debug("MAIN: Scanning files in directory: %s", inputDirectory)
|
||||||
|
|
||||||
|
if inputCategory in hpCategory:
|
||||||
|
noFlatten.extend(hpCategory) # Make sure we preserve folder structure for HeadPhones.
|
||||||
|
if useLink in ['sym','move']: # These don't work for HeadPhones.
|
||||||
|
useLink = 'no' # default to copy.
|
||||||
|
|
||||||
|
if inputCategory in sbCategory and sbFork in SICKBEARD_TORRENT: # Don't flatten when sending to SICKBEARD_TORRENT
|
||||||
|
noFlatten.extend(sbCategory)
|
||||||
|
|
||||||
|
outputDestinationMaster = outputDestination # Save the original, so we can change this within the loop below, and reset afterwards.
|
||||||
now = datetime.datetime.now()
|
now = datetime.datetime.now()
|
||||||
for dirpath, dirnames, filenames in os.walk(inputDirectory):
|
for dirpath, dirnames, filenames in os.walk(inputDirectory):
|
||||||
|
Logger.debug("MAIN: Found %s files in %s", str(len(filenames)), dirpath)
|
||||||
for file in filenames:
|
for file in filenames:
|
||||||
|
|
||||||
filePath = os.path.join(dirpath, file)
|
filePath = os.path.join(dirpath, file)
|
||||||
fileName, fileExtension = os.path.splitext(file)
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
|
if inputCategory in noFlatten:
|
||||||
|
newDir = dirpath # find the full path
|
||||||
|
newDir = newDir.replace(inputDirectory, "") #find the extra-depth directory
|
||||||
|
if len(newDir) > 0 and newDir[0] == "/":
|
||||||
|
newDir = newDir[1:] # remove leading "/" to enable join to work.
|
||||||
|
outputDestination = os.path.join(outputDestinationMaster, newDir) # join this extra directory to output.
|
||||||
|
Logger.debug("MAIN: Setting outputDestination to %s to preserve folder structure", outputDestination)
|
||||||
|
|
||||||
targetDirectory = os.path.join(outputDestination, file)
|
targetDirectory = os.path.join(outputDestination, file)
|
||||||
|
|
||||||
if root == 1:
|
if root == 1:
|
||||||
|
@ -137,13 +170,22 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
else:
|
else:
|
||||||
continue # This file has not been recently moved or created, skip it
|
continue # This file has not been recently moved or created, skip it
|
||||||
|
|
||||||
|
if inputCategory in sbCategory and sbFork in SICKBEARD_TORRENT: # We want to link every file.
|
||||||
|
Logger.info("MAIN: Found file %s in %s", fileExtension, filePath)
|
||||||
|
try:
|
||||||
|
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
||||||
|
copy_list.append([filePath, os.path.join(outputDestination, file)])
|
||||||
|
except:
|
||||||
|
Logger.exception("MAIN: Failed to link file: %s", file)
|
||||||
|
continue
|
||||||
|
|
||||||
if fileExtension in mediaContainer: # If the file is a video file
|
if fileExtension in mediaContainer: # If the file is a video file
|
||||||
if is_sample(filePath, inputName, minSampleSize) and not inputCategory in hpCategory: # Ignore samples
|
if is_sample(filePath, inputName, minSampleSize, SampleIDs) and not inputCategory in hpCategory: # Ignore samples
|
||||||
Logger.info("MAIN: Ignoring sample file: %s ", filePath)
|
Logger.info("MAIN: Ignoring sample file: %s ", filePath)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
video = video + 1
|
video = video + 1
|
||||||
Logger.info("MAIN: Found video file %s in %s", fileExtension, filePath)
|
Logger.info("MAIN: Found media file %s in %s", fileExtension, filePath)
|
||||||
try:
|
try:
|
||||||
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
||||||
copy_list.append([filePath, os.path.join(outputDestination, file)])
|
copy_list.append([filePath, os.path.join(outputDestination, file)])
|
||||||
|
@ -192,17 +234,19 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
else:
|
else:
|
||||||
Logger.debug("MAIN: Ignoring unknown filetype %s for file %s", fileExtension, filePath)
|
Logger.debug("MAIN: Ignoring unknown filetype %s for file %s", fileExtension, filePath)
|
||||||
continue
|
continue
|
||||||
if not inputCategory in hpCategory: #don't flatten hp in case multi cd albums, and we need to copy this back later.
|
|
||||||
|
outputDestination = outputDestinationMaster # Reset here.
|
||||||
|
if not inputCategory in noFlatten: #don't flatten hp in case multi cd albums, and we need to copy this back later.
|
||||||
flatten(outputDestination)
|
flatten(outputDestination)
|
||||||
|
|
||||||
# Now check if movie files exist in destination:
|
# Now check if movie files exist in destination:
|
||||||
if inputCategory in cpsCategory + sbCategory:
|
if inputCategory in cpsCategory + sbCategory and not (inputCategory in sbCategory and sbFork in SICKBEARD_TORRENT):
|
||||||
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||||
for file in filenames:
|
for file in filenames:
|
||||||
filePath = os.path.join(dirpath, file)
|
filePath = os.path.join(dirpath, file)
|
||||||
fileName, fileExtension = os.path.splitext(file)
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
if fileExtension in mediaContainer: # If the file is a video file
|
if fileExtension in mediaContainer: # If the file is a video file
|
||||||
if is_sample(filePath, inputName, minSampleSize):
|
if is_sample(filePath, inputName, minSampleSize, SampleIDs):
|
||||||
Logger.debug("MAIN: Removing sample file: %s", filePath)
|
Logger.debug("MAIN: Removing sample file: %s", filePath)
|
||||||
os.unlink(filePath) # remove samples
|
os.unlink(filePath) # remove samples
|
||||||
else:
|
else:
|
||||||
|
@ -216,11 +260,16 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
else:
|
else:
|
||||||
Logger.debug("MAIN: Found %s media files in output. %s were found in input", str(video2), str(video))
|
Logger.debug("MAIN: Found %s media files in output. %s were found in input", str(video2), str(video))
|
||||||
|
|
||||||
|
if inputCategory in sbCategory and sbFork in SICKBEARD_TORRENT:
|
||||||
|
if len(copy_list) > 0:
|
||||||
|
Logger.debug("MAIN: Found and linked %s files", str(len(copy_list)))
|
||||||
|
status = int(0)
|
||||||
|
|
||||||
processCategories = cpsCategory + sbCategory + hpCategory + mlCategory + gzCategory
|
processCategories = cpsCategory + sbCategory + hpCategory + mlCategory + gzCategory
|
||||||
|
|
||||||
if (inputCategory in user_script_categories and not "NONE" in user_script_categories) or ("ALL" in user_script_categories and not inputCategory in processCategories):
|
if (inputCategory in user_script_categories and not "NONE" in user_script_categories) or ("ALL" in user_script_categories and not inputCategory in processCategories):
|
||||||
Logger.info("MAIN: Processing user script %s.", user_script)
|
Logger.info("MAIN: Processing user script %s.", user_script)
|
||||||
result = external_script(outputDestination)
|
result = external_script(outputDestination,inputName,inputCategory)
|
||||||
elif status == int(0) or (inputCategory in hpCategory + mlCategory + gzCategory): # if movies linked/extracted or for other categories.
|
elif status == int(0) or (inputCategory in hpCategory + mlCategory + gzCategory): # if movies linked/extracted or for other categories.
|
||||||
Logger.debug("MAIN: Calling autoProcess script for successful download.")
|
Logger.debug("MAIN: Calling autoProcess script for successful download.")
|
||||||
status = int(0) # hp, my, gz don't support failed.
|
status = int(0) # hp, my, gz don't support failed.
|
||||||
|
@ -234,7 +283,7 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
result = autoProcessMovie.process(outputDestination, inputName, status, clientAgent, download_id, inputCategory)
|
result = autoProcessMovie.process(outputDestination, inputName, status, clientAgent, download_id, inputCategory)
|
||||||
elif inputCategory in sbCategory:
|
elif inputCategory in sbCategory:
|
||||||
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
||||||
result = autoProcessTV.processEpisode(outputDestination, inputName, status, inputCategory)
|
result = autoProcessTV.processEpisode(outputDestination, inputName, status, clientAgent, inputCategory)
|
||||||
elif inputCategory in hpCategory:
|
elif inputCategory in hpCategory:
|
||||||
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
||||||
result = autoProcessMusic.process(inputDirectory, inputName, status, inputCategory)
|
result = autoProcessMusic.process(inputDirectory, inputName, status, inputCategory)
|
||||||
|
@ -259,11 +308,15 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
continue
|
continue
|
||||||
else: # move temp version back to allow seeding or Torrent removal.
|
else: # move temp version back to allow seeding or Torrent removal.
|
||||||
Logger.debug("MAIN: Moving %s to %s", str(item[1]), str(item[0]))
|
Logger.debug("MAIN: Moving %s to %s", str(item[1]), str(item[0]))
|
||||||
shutil.move(os.path.normpath(item[1]), os.path.normpath(item[0]))
|
newDestination = os.path.split(os.path.normpath(item[0]))
|
||||||
|
try:
|
||||||
|
copy_link(os.path.normpath(item[1]), os.path.normpath(item[0]), 'move', newDestination[0])
|
||||||
|
except:
|
||||||
|
Logger.exception("MAIN: Failed to move file: %s", file)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
||||||
if clientAgent in ['utorrent', 'transmission'] and inputHash:
|
if clientAgent in ['utorrent', 'transmission', 'deluge'] and inputHash:
|
||||||
# Delete torrent and torrentdata from Torrent client if processing was successful.
|
# Delete torrent and torrentdata from Torrent client if processing was successful.
|
||||||
if deleteOriginal == 1 and result != 1:
|
if deleteOriginal == 1 and result != 1:
|
||||||
Logger.debug("MAIN: Deleting torrent %s from %s", inputName, clientAgent)
|
Logger.debug("MAIN: Deleting torrent %s from %s", inputName, clientAgent)
|
||||||
|
@ -276,6 +329,8 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
TransmissionClass.remove_torrent(inputID, False)
|
TransmissionClass.remove_torrent(inputID, False)
|
||||||
else:
|
else:
|
||||||
TransmissionClass.remove_torrent(inputID, True)
|
TransmissionClass.remove_torrent(inputID, True)
|
||||||
|
if clientAgent == 'deluge' and delugeClient != "":
|
||||||
|
delugeClient.core.remove_torrent(inputID, True)
|
||||||
# we always want to resume seeding, for now manually find out what is wrong when extraction fails
|
# we always want to resume seeding, for now manually find out what is wrong when extraction fails
|
||||||
else:
|
else:
|
||||||
Logger.debug("MAIN: Starting torrent %s in %s", inputName, clientAgent)
|
Logger.debug("MAIN: Starting torrent %s in %s", inputName, clientAgent)
|
||||||
|
@ -283,6 +338,8 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
utorrentClass.start(inputHash)
|
utorrentClass.start(inputHash)
|
||||||
if clientAgent == 'transmission' and TransmissionClass !="":
|
if clientAgent == 'transmission' and TransmissionClass !="":
|
||||||
TransmissionClass.start_torrent(inputID)
|
TransmissionClass.start_torrent(inputID)
|
||||||
|
if clientAgent == 'deluge' and delugeClient != "":
|
||||||
|
delugeClient.core.resume_torrent([inputID])
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
#cleanup
|
#cleanup
|
||||||
if inputCategory in processCategories and result == 0 and os.path.isdir(outputDestination):
|
if inputCategory in processCategories and result == 0 and os.path.isdir(outputDestination):
|
||||||
|
@ -304,9 +361,9 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
Logger.debug("media/meta file found: %s", item)
|
Logger.debug("media/meta file found: %s", item)
|
||||||
Logger.info("MAIN: All done.")
|
Logger.info("MAIN: All done.")
|
||||||
|
|
||||||
def external_script(outputDestination):
|
def external_script(outputDestination,torrentName,torrentLabel):
|
||||||
|
|
||||||
result_final = int(0) # start at 0.
|
final_result = int(0) # start at 0.
|
||||||
num_files = int(0)
|
num_files = int(0)
|
||||||
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||||
for file in filenames:
|
for file in filenames:
|
||||||
|
@ -314,8 +371,10 @@ def external_script(outputDestination):
|
||||||
filePath = os.path.join(dirpath, file)
|
filePath = os.path.join(dirpath, file)
|
||||||
fileName, fileExtension = os.path.splitext(file)
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
|
|
||||||
if fileExtension in user_script_mediaExtensions or user_script_mediaExtensions == "ALL":
|
if fileExtension in user_script_mediaExtensions or "ALL" in user_script_mediaExtensions:
|
||||||
num_files = num_files + 1
|
num_files = num_files + 1
|
||||||
|
if user_script_runOnce == 1 and num_files > 1: # we have already run once, so just continue to get number of files.
|
||||||
|
continue
|
||||||
command = [user_script]
|
command = [user_script]
|
||||||
for param in user_script_param:
|
for param in user_script_param:
|
||||||
if param == "FN":
|
if param == "FN":
|
||||||
|
@ -324,13 +383,25 @@ def external_script(outputDestination):
|
||||||
elif param == "FP":
|
elif param == "FP":
|
||||||
command.append(filePath)
|
command.append(filePath)
|
||||||
continue
|
continue
|
||||||
|
elif param == "TN":
|
||||||
|
command.append(torrentName)
|
||||||
|
continue
|
||||||
|
elif param == "TL":
|
||||||
|
command.append(torrentLabel)
|
||||||
|
continue
|
||||||
elif param == "DN":
|
elif param == "DN":
|
||||||
command.append(dirpath)
|
if user_script_runOnce == 1:
|
||||||
|
command.append(outputDestination)
|
||||||
|
else:
|
||||||
|
command.append(dirpath)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
command.append(param)
|
command.append(param)
|
||||||
continue
|
continue
|
||||||
Logger.info("Running script %s on file %s.", command, filePath)
|
cmd = ""
|
||||||
|
for item in command:
|
||||||
|
cmd = cmd + " " + item
|
||||||
|
Logger.info("Running script %s on file %s.", cmd, filePath)
|
||||||
try:
|
try:
|
||||||
p = Popen(command)
|
p = Popen(command)
|
||||||
res = p.wait()
|
res = p.wait()
|
||||||
|
@ -390,10 +461,11 @@ if __name__ == "__main__":
|
||||||
Logger.info("MAIN: Loading config from %s", configFilename)
|
Logger.info("MAIN: Loading config from %s", configFilename)
|
||||||
config.read(configFilename)
|
config.read(configFilename)
|
||||||
# EXAMPLE VALUES:
|
# EXAMPLE VALUES:
|
||||||
clientAgent = config.get("Torrent", "clientAgent") # utorrent | deluge | transmission | other
|
clientAgent = config.get("Torrent", "clientAgent") # utorrent | deluge | transmission | rtorrent | other
|
||||||
useLink = config.get("Torrent", "useLink") # no | hard | sym
|
useLink_in = config.get("Torrent", "useLink") # no | hard | sym
|
||||||
outputDirectory = config.get("Torrent", "outputDirectory") # /abs/path/to/complete/
|
outputDirectory = config.get("Torrent", "outputDirectory") # /abs/path/to/complete/
|
||||||
categories = (config.get("Torrent", "categories")).split(',') # music,music_videos,pictures,software
|
categories = (config.get("Torrent", "categories")).split(',') # music,music_videos,pictures,software
|
||||||
|
noFlatten = (config.get("Torrent", "noFlatten")).split(',')
|
||||||
|
|
||||||
uTorrentWEBui = config.get("Torrent", "uTorrentWEBui") # http://localhost:8090/gui/
|
uTorrentWEBui = config.get("Torrent", "uTorrentWEBui") # http://localhost:8090/gui/
|
||||||
uTorrentUSR = config.get("Torrent", "uTorrentUSR") # mysecretusr
|
uTorrentUSR = config.get("Torrent", "uTorrentUSR") # mysecretusr
|
||||||
|
@ -403,6 +475,11 @@ if __name__ == "__main__":
|
||||||
TransmissionPort = config.get("Torrent", "TransmissionPort") # 8084
|
TransmissionPort = config.get("Torrent", "TransmissionPort") # 8084
|
||||||
TransmissionUSR = config.get("Torrent", "TransmissionUSR") # mysecretusr
|
TransmissionUSR = config.get("Torrent", "TransmissionUSR") # mysecretusr
|
||||||
TransmissionPWD = config.get("Torrent", "TransmissionPWD") # mysecretpwr
|
TransmissionPWD = config.get("Torrent", "TransmissionPWD") # mysecretpwr
|
||||||
|
|
||||||
|
DelugeHost = config.get("Torrent", "DelugeHost") # localhost
|
||||||
|
DelugePort = config.get("Torrent", "DelugePort") # 8084
|
||||||
|
DelugeUSR = config.get("Torrent", "DelugeUSR") # mysecretusr
|
||||||
|
DelugePWD = config.get("Torrent", "DelugePWD") # mysecretpwr
|
||||||
|
|
||||||
deleteOriginal = int(config.get("Torrent", "deleteOriginal")) # 0
|
deleteOriginal = int(config.get("Torrent", "deleteOriginal")) # 0
|
||||||
|
|
||||||
|
@ -410,10 +487,12 @@ if __name__ == "__main__":
|
||||||
mediaContainer = (config.get("Extensions", "mediaExtensions")).split(',') # .mkv,.avi,.divx
|
mediaContainer = (config.get("Extensions", "mediaExtensions")).split(',') # .mkv,.avi,.divx
|
||||||
metaContainer = (config.get("Extensions", "metaExtensions")).split(',') # .nfo,.sub,.srt
|
metaContainer = (config.get("Extensions", "metaExtensions")).split(',') # .nfo,.sub,.srt
|
||||||
minSampleSize = int(config.get("Extensions", "minSampleSize")) # 200 (in MB)
|
minSampleSize = int(config.get("Extensions", "minSampleSize")) # 200 (in MB)
|
||||||
|
SampleIDs = (config.get("Extensions", "SampleIDs")).split(',') # sample,-s.
|
||||||
|
|
||||||
cpsCategory = (config.get("CouchPotato", "cpsCategory")).split(',') # movie
|
cpsCategory = (config.get("CouchPotato", "cpsCategory")).split(',') # movie
|
||||||
sbCategory = (config.get("SickBeard", "sbCategory")).split(',') # tv
|
sbCategory = (config.get("SickBeard", "sbCategory")).split(',') # tv
|
||||||
sbFork = config.get("SickBeard", "fork") # tv
|
sbFork = config.get("SickBeard", "fork") # default
|
||||||
|
Torrent_ForceLink = int(config.get("SickBeard", "Torrent_ForceLink")) # 1
|
||||||
hpCategory = (config.get("HeadPhones", "hpCategory")).split(',') # music
|
hpCategory = (config.get("HeadPhones", "hpCategory")).split(',') # music
|
||||||
mlCategory = (config.get("Mylar", "mlCategory")).split(',') # comics
|
mlCategory = (config.get("Mylar", "mlCategory")).split(',') # comics
|
||||||
gzCategory = (config.get("Gamez", "gzCategory")).split(',') # games
|
gzCategory = (config.get("Gamez", "gzCategory")).split(',') # games
|
||||||
|
@ -431,6 +510,7 @@ if __name__ == "__main__":
|
||||||
user_script_successCodes = (config.get("UserScript", "user_script_successCodes")).split(',')
|
user_script_successCodes = (config.get("UserScript", "user_script_successCodes")).split(',')
|
||||||
user_script_clean = int(config.get("UserScript", "user_script_clean"))
|
user_script_clean = int(config.get("UserScript", "user_script_clean"))
|
||||||
user_delay = int(config.get("UserScript", "delay"))
|
user_delay = int(config.get("UserScript", "delay"))
|
||||||
|
user_script_runOnce = int(config.get("UserScript", "user_script_runOnce"))
|
||||||
|
|
||||||
transcode = int(config.get("Transcoder", "transcode"))
|
transcode = int(config.get("Transcoder", "transcode"))
|
||||||
|
|
||||||
|
|
|
@ -40,14 +40,13 @@ def get_imdb(nzbName, dirName):
|
||||||
return imdbid
|
return imdbid
|
||||||
|
|
||||||
else:
|
else:
|
||||||
Logger.warning("Could not find an imdb id in directory or name")
|
Logger.debug("Could not find an imdb id in directory or name")
|
||||||
Logger.info("Postprocessing will continue, but the movie may not be identified correctly by CouchPotato")
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def get_movie_info(baseURL, imdbid, download_id):
|
def get_movie_info(baseURL, imdbid, download_id):
|
||||||
|
|
||||||
if not imdbid and not download_id:
|
if not imdbid and not download_id:
|
||||||
return ""
|
return "", None, imdbid
|
||||||
|
|
||||||
movie_id = ""
|
movie_id = ""
|
||||||
releaselist = []
|
releaselist = []
|
||||||
|
@ -55,7 +54,7 @@ def get_movie_info(baseURL, imdbid, download_id):
|
||||||
library = []
|
library = []
|
||||||
offset = int(0)
|
offset = int(0)
|
||||||
while True:
|
while True:
|
||||||
url = baseURL + "media.list/?status=active" + "&limit_offset=50," + str(offset)
|
url = baseURL + "media.list/?status=active&release_status=snatched&limit_offset=50," + str(offset)
|
||||||
|
|
||||||
Logger.debug("Opening URL: %s", url)
|
Logger.debug("Opening URL: %s", url)
|
||||||
|
|
||||||
|
@ -81,6 +80,7 @@ def get_movie_info(baseURL, imdbid, download_id):
|
||||||
break
|
break
|
||||||
offset = offset + 50
|
offset = offset + 50
|
||||||
|
|
||||||
|
result = None # reset
|
||||||
for index in range(len(movieid)):
|
for index in range(len(movieid)):
|
||||||
if not imdbid:
|
if not imdbid:
|
||||||
url = baseURL + "media.get/?id=" + str(movieid[index])
|
url = baseURL + "media.get/?id=" + str(movieid[index])
|
||||||
|
@ -89,17 +89,18 @@ def get_movie_info(baseURL, imdbid, download_id):
|
||||||
urlObj = urllib.urlopen(url)
|
urlObj = urllib.urlopen(url)
|
||||||
except:
|
except:
|
||||||
Logger.exception("Unable to open URL")
|
Logger.exception("Unable to open URL")
|
||||||
return ""
|
return "", None, imdbid
|
||||||
try:
|
try:
|
||||||
result = json.load(urlObj)
|
result = json.load(urlObj)
|
||||||
releaselist = [item["info"]["download_id"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
releaselist = [item["info"]["download_id"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
||||||
except:
|
except:
|
||||||
Logger.exception("Unable to parse json data for releases")
|
Logger.exception("Unable to parse json data for releases")
|
||||||
return ""
|
return "", None, imdbid
|
||||||
|
|
||||||
if len(releaselist) > 0:
|
if len(releaselist) > 0:
|
||||||
movie_id = str(movieid[index])
|
movie_id = str(movieid[index])
|
||||||
Logger.info("Found movie id %s in database via download_id %s", movie_id, download_id)
|
imdbid = str(library[index])
|
||||||
|
Logger.info("Found movie id %s and imdb %s in database via download_id %s", movie_id, imdbid, download_id)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
@ -112,22 +113,24 @@ def get_movie_info(baseURL, imdbid, download_id):
|
||||||
if not movie_id:
|
if not movie_id:
|
||||||
Logger.exception("Could not parse database results to determine imdbid or movie id")
|
Logger.exception("Could not parse database results to determine imdbid or movie id")
|
||||||
|
|
||||||
return movie_id
|
return movie_id, result, imdbid
|
||||||
|
|
||||||
def get_status(baseURL, movie_id, clientAgent, download_id):
|
def get_status(baseURL, movie_id, clientAgent, download_id, result=None):
|
||||||
|
|
||||||
if not movie_id:
|
if not movie_id:
|
||||||
return "", clientAgent, "none", "none"
|
return "", clientAgent, "none", "none"
|
||||||
url = baseURL + "media.get/?id=" + str(movie_id)
|
|
||||||
Logger.debug("Looking for status of movie: %s - with release sent to clientAgent: %s and download_id: %s", movie_id, clientAgent, download_id)
|
|
||||||
Logger.debug("Opening URL: %s", url)
|
|
||||||
|
|
||||||
try:
|
Logger.debug("Looking for status of movie: %s - with release sent to clientAgent: %s and download_id: %s", movie_id, clientAgent, download_id)
|
||||||
urlObj = urllib.urlopen(url)
|
if not result: # we haven't already called media.get
|
||||||
except:
|
url = baseURL + "media.get/?id=" + str(movie_id)
|
||||||
Logger.exception("Unable to open URL")
|
Logger.debug("Opening URL: %s", url)
|
||||||
return "", clientAgent, "none", "none"
|
|
||||||
result = json.load(urlObj)
|
try:
|
||||||
|
urlObj = urllib.urlopen(url)
|
||||||
|
except:
|
||||||
|
Logger.exception("Unable to open URL")
|
||||||
|
return "", clientAgent, "none", "none"
|
||||||
|
result = json.load(urlObj)
|
||||||
try:
|
try:
|
||||||
movie_status = result["media"]["status"]["identifier"]
|
movie_status = result["media"]["status"]["identifier"]
|
||||||
Logger.debug("This movie is marked as status %s in CouchPotatoServer", movie_status)
|
Logger.debug("This movie is marked as status %s in CouchPotatoServer", movie_status)
|
||||||
|
@ -251,9 +254,9 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
|
||||||
|
|
||||||
baseURL = protocol + host + ":" + port + web_root + "/api/" + apikey + "/"
|
baseURL = protocol + host + ":" + port + web_root + "/api/" + apikey + "/"
|
||||||
|
|
||||||
movie_id = get_movie_info(baseURL, imdbid, download_id) # get the CPS database movie id this movie.
|
movie_id, result, imdbid = get_movie_info(baseURL, imdbid, download_id) # get the CPS database movie id for this movie.
|
||||||
|
|
||||||
initial_status, clientAgent, download_id, initial_release_status = get_status(baseURL, movie_id, clientAgent, download_id)
|
initial_status, clientAgent, download_id, initial_release_status = get_status(baseURL, movie_id, clientAgent, download_id, result)
|
||||||
|
|
||||||
process_all_exceptions(nzbName.lower(), dirName)
|
process_all_exceptions(nzbName.lower(), dirName)
|
||||||
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
||||||
|
@ -277,7 +280,7 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
|
||||||
if remoteCPS == 1:
|
if remoteCPS == 1:
|
||||||
command = command + "/?downloader=" + clientAgent + "&download_id=" + download_id
|
command = command + "/?downloader=" + clientAgent + "&download_id=" + download_id
|
||||||
else:
|
else:
|
||||||
command = command + "/?media_folder=" + dirName + "&downloader=" + clientAgent + "&download_id=" + download_id
|
command = command + "/?media_folder=" + urllib.quote(dirName) + "&downloader=" + clientAgent + "&download_id=" + download_id
|
||||||
|
|
||||||
url = baseURL + command
|
url = baseURL + command
|
||||||
|
|
||||||
|
|
|
@ -13,9 +13,6 @@ from nzbToMediaUtil import *
|
||||||
from nzbToMediaSceneExceptions import process_all_exceptions
|
from nzbToMediaSceneExceptions import process_all_exceptions
|
||||||
|
|
||||||
Logger = logging.getLogger()
|
Logger = logging.getLogger()
|
||||||
TimeOut = 4 * int(TimeOut) # SickBeard needs to complete all moving and renaming before returning the log sequence via url.
|
|
||||||
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
|
|
||||||
|
|
||||||
|
|
||||||
class AuthURLOpener(urllib.FancyURLopener):
|
class AuthURLOpener(urllib.FancyURLopener):
|
||||||
def __init__(self, user, pw):
|
def __init__(self, user, pw):
|
||||||
|
@ -44,7 +41,7 @@ def delete(dirName):
|
||||||
Logger.exception("Unable to delete folder %s", dirName)
|
Logger.exception("Unable to delete folder %s", dirName)
|
||||||
|
|
||||||
|
|
||||||
def processEpisode(dirName, nzbName=None, failed=False, inputCategory=None):
|
def processEpisode(dirName, nzbName=None, failed=False, clientAgent=None, inputCategory=None):
|
||||||
|
|
||||||
status = int(failed)
|
status = int(failed)
|
||||||
config = ConfigParser.ConfigParser()
|
config = ConfigParser.ConfigParser()
|
||||||
|
@ -99,15 +96,45 @@ def processEpisode(dirName, nzbName=None, failed=False, inputCategory=None):
|
||||||
delay = float(config.get(section, "delay"))
|
delay = float(config.get(section, "delay"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
delay = 0
|
delay = 0
|
||||||
|
try:
|
||||||
|
wait_for = int(config.get(section, "wait_for"))
|
||||||
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
|
wait_for = 5
|
||||||
|
try:
|
||||||
|
SampleIDs = (config.get("Extensions", "SampleIDs")).split(',')
|
||||||
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
|
SampleIDs = ['sample','-s.']
|
||||||
|
try:
|
||||||
|
nzbExtractionBy = config.get(section, "nzbExtractionBy")
|
||||||
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
|
nzbExtractionBy = "Downloader"
|
||||||
|
|
||||||
|
TimeOut = 60 * int(wait_for) # SickBeard needs to complete all moving and renaming before returning the log sequence via url.
|
||||||
|
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
|
||||||
|
|
||||||
mediaContainer = (config.get("Extensions", "mediaExtensions")).split(',')
|
mediaContainer = (config.get("Extensions", "mediaExtensions")).split(',')
|
||||||
minSampleSize = int(config.get("Extensions", "minSampleSize"))
|
minSampleSize = int(config.get("Extensions", "minSampleSize"))
|
||||||
|
|
||||||
if not fork in SICKBEARD_TORRENT:
|
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
|
||||||
|
dirName = os.path.split(os.path.normpath(dirName))[0]
|
||||||
|
|
||||||
|
SpecificPath = os.path.join(dirName, nzbName)
|
||||||
|
cleanName = os.path.splitext(SpecificPath)
|
||||||
|
if cleanName[1] == ".nzb":
|
||||||
|
SpecificPath = cleanName[0]
|
||||||
|
if os.path.isdir(SpecificPath):
|
||||||
|
dirName = SpecificPath
|
||||||
|
|
||||||
|
SICKBEARD_TORRENT_USE = SICKBEARD_TORRENT
|
||||||
|
|
||||||
|
if clientAgent in ['nzbget','sabnzbd'] and not nzbExtractionBy == "Destination": #Assume Torrent actions (unrar and link) don't happen. We need to check for valid media here.
|
||||||
|
SICKBEARD_TORRENT_USE = []
|
||||||
|
|
||||||
|
if not fork in SICKBEARD_TORRENT_USE:
|
||||||
process_all_exceptions(nzbName.lower(), dirName)
|
process_all_exceptions(nzbName.lower(), dirName)
|
||||||
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
if nzbName != "Manual Run" and not fork in SICKBEARD_TORRENT:
|
if nzbName != "Manual Run" and not fork in SICKBEARD_TORRENT_USE:
|
||||||
# Now check if movie files exist in destination:
|
# Now check if movie files exist in destination:
|
||||||
video = int(0)
|
video = int(0)
|
||||||
for dirpath, dirnames, filenames in os.walk(dirName):
|
for dirpath, dirnames, filenames in os.walk(dirName):
|
||||||
|
@ -115,7 +142,7 @@ def processEpisode(dirName, nzbName=None, failed=False, inputCategory=None):
|
||||||
filePath = os.path.join(dirpath, file)
|
filePath = os.path.join(dirpath, file)
|
||||||
fileExtension = os.path.splitext(file)[1]
|
fileExtension = os.path.splitext(file)[1]
|
||||||
if fileExtension in mediaContainer: # If the file is a video file
|
if fileExtension in mediaContainer: # If the file is a video file
|
||||||
if is_sample(filePath, nzbName, minSampleSize):
|
if is_sample(filePath, nzbName, minSampleSize, SampleIDs):
|
||||||
Logger.debug("Removing sample file: %s", filePath)
|
Logger.debug("Removing sample file: %s", filePath)
|
||||||
os.unlink(filePath) # remove samples
|
os.unlink(filePath) # remove samples
|
||||||
else:
|
else:
|
||||||
|
@ -127,7 +154,7 @@ def processEpisode(dirName, nzbName=None, failed=False, inputCategory=None):
|
||||||
status = int(1)
|
status = int(1)
|
||||||
failed = True
|
failed = True
|
||||||
|
|
||||||
if watch_dir != "":
|
if watch_dir != "" and (not host in ['localhost', '127.0.0.1'] or nzbName == "Manual Run"):
|
||||||
dirName = watch_dir
|
dirName = watch_dir
|
||||||
|
|
||||||
params = {}
|
params = {}
|
||||||
|
|
|
@ -294,8 +294,8 @@ def addnzbget():
|
||||||
confignew.read(configFilenamenew)
|
confignew.read(configFilenamenew)
|
||||||
|
|
||||||
section = "CouchPotato"
|
section = "CouchPotato"
|
||||||
envKeys = ['CATEGORY', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'DELAY', 'METHOD', 'DELETE_FAILED', 'REMOTECPS']
|
envKeys = ['CATEGORY', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'DELAY', 'METHOD', 'DELETE_FAILED', 'REMOTECPS', 'WAIT_FOR']
|
||||||
cfgKeys = ['cpsCategory', 'apikey', 'host', 'port', 'ssl', 'web_root', 'delay', 'method', 'delete_failed', 'remoteCPS']
|
cfgKeys = ['cpsCategory', 'apikey', 'host', 'port', 'ssl', 'web_root', 'delay', 'method', 'delete_failed', 'remoteCPS', 'wait_for']
|
||||||
for index in range(len(envKeys)):
|
for index in range(len(envKeys)):
|
||||||
key = 'NZBPO_CPS' + envKeys[index]
|
key = 'NZBPO_CPS' + envKeys[index]
|
||||||
if os.environ.has_key(key):
|
if os.environ.has_key(key):
|
||||||
|
@ -305,8 +305,8 @@ def addnzbget():
|
||||||
|
|
||||||
|
|
||||||
section = "SickBeard"
|
section = "SickBeard"
|
||||||
envKeys = ['CATEGORY', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK']
|
envKeys = ['CATEGORY', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'DELAY', 'WAIT_FOR']
|
||||||
cfgKeys = ['sbCategory', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork']
|
cfgKeys = ['sbCategory', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'delay', 'wait_for']
|
||||||
for index in range(len(envKeys)):
|
for index in range(len(envKeys)):
|
||||||
key = 'NZBPO_SB' + envKeys[index]
|
key = 'NZBPO_SB' + envKeys[index]
|
||||||
if os.environ.has_key(key):
|
if os.environ.has_key(key):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Make things easy and less error prone by centralising all common values
|
# Make things easy and less error prone by centralising all common values
|
||||||
|
|
||||||
# Global Constants
|
# Global Constants
|
||||||
VERSION = 'V9.1'
|
VERSION = 'V9.2'
|
||||||
TimeOut = 60
|
TimeOut = 60
|
||||||
|
|
||||||
# Constants pertinant to SabNzb
|
# Constants pertinant to SabNzb
|
||||||
|
|
|
@ -39,6 +39,9 @@ def create_destination(outputDestination):
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
|
if not os.path.isdir(inputDirectory) and os.path.isfile(inputDirectory): # If the input directory is a file, assume single file downlaod and split dir/name.
|
||||||
|
inputDirectory,inputName = os.path.split(os.path.normpath(inputDirectory))
|
||||||
|
|
||||||
if inputCategory and os.path.isdir(os.path.join(inputDirectory, inputCategory)):
|
if inputCategory and os.path.isdir(os.path.join(inputDirectory, inputCategory)):
|
||||||
Logger.info("SEARCH: Found category directory %s in input directory directory %s", inputCategory, inputDirectory)
|
Logger.info("SEARCH: Found category directory %s in input directory directory %s", inputCategory, inputDirectory)
|
||||||
inputDirectory = os.path.join(inputDirectory, inputCategory)
|
inputDirectory = os.path.join(inputDirectory, inputCategory)
|
||||||
|
@ -93,8 +96,21 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
Logger.info("SEARCH: Identified Category: %s and Torrent Name: %s. We are in a unique directory, so we can proceed.", inputCategory, inputName)
|
Logger.info("SEARCH: Identified Category: %s and Torrent Name: %s. We are in a unique directory, so we can proceed.", inputCategory, inputName)
|
||||||
break # we are done
|
break # we are done
|
||||||
elif categorySearch[1] and not inputName: # assume the the next directory deep is the torrent name.
|
elif categorySearch[1] and not inputName: # assume the the next directory deep is the torrent name.
|
||||||
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], categorySearch[1]), categorySearch[0])
|
|
||||||
inputName = categorySearch[1]
|
inputName = categorySearch[1]
|
||||||
|
Logger.info("SEARCH: Found torrent name: %s", categorySearch[1])
|
||||||
|
if os.path.isdir(os.path.join(categorySearch[0], categorySearch[1])):
|
||||||
|
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], categorySearch[1]), categorySearch[0])
|
||||||
|
inputDirectory = os.path.normpath(os.path.join(categorySearch[0], categorySearch[1]))
|
||||||
|
elif os.path.isfile(os.path.join(categorySearch[0], categorySearch[1])): # Our inputdirectory is actually the full file path for single file download.
|
||||||
|
Logger.info("SEARCH: %s is a file, not a directory.", os.path.join(categorySearch[0], categorySearch[1]))
|
||||||
|
Logger.info("SEARCH: Setting input directory to %s", categorySearch[0])
|
||||||
|
root = 1
|
||||||
|
inputDirectory = os.path.normpath(categorySearch[0])
|
||||||
|
else: # The inputdirectory given can't have been valid. Start at the category directory and search for date modified.
|
||||||
|
Logger.info("SEARCH: Input Directory %s doesn't exist as a directory or file", inputDirectory)
|
||||||
|
Logger.info("SEARCH: Setting input directory to %s and checking for files by date modified.", categorySearch[0])
|
||||||
|
root = 2
|
||||||
|
inputDirectory = os.path.normpath(categorySearch[0])
|
||||||
break # we are done
|
break # we are done
|
||||||
elif ('.cp(tt' in categorySearch[1]) and (not '.cp(tt' in inputName): # if the directory was created by CouchPotato, and this tag is not in Torrent name, we want to add it.
|
elif ('.cp(tt' in categorySearch[1]) and (not '.cp(tt' in inputName): # if the directory was created by CouchPotato, and this tag is not in Torrent name, we want to add it.
|
||||||
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
|
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
|
||||||
|
@ -110,6 +126,11 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
|
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
|
||||||
inputDirectory = os.path.join(categorySearch[0], safeName(inputName)) # we only want to search this next dir up.
|
inputDirectory = os.path.join(categorySearch[0], safeName(inputName)) # we only want to search this next dir up.
|
||||||
break # we are done
|
break # we are done
|
||||||
|
elif inputName and os.path.isfile(os.path.join(categorySearch[0], inputName)) or os.path.isfile(os.path.join(categorySearch[0], safeName(inputName))): # testing for torrent name name as file inside category directory
|
||||||
|
Logger.info("SEARCH: Found torrent file %s in category directory %s", os.path.join(categorySearch[0], safeName(inputName)), categorySearch[0])
|
||||||
|
root = 1
|
||||||
|
inputDirectory = os.path.normpath(categorySearch[0])
|
||||||
|
break # we are done
|
||||||
elif inputName: # if these exists, we are ok to proceed, but we are in a root/common directory.
|
elif inputName: # if these exists, we are ok to proceed, but we are in a root/common directory.
|
||||||
Logger.info("SEARCH: Could not find a unique torrent folder in the directory structure")
|
Logger.info("SEARCH: Could not find a unique torrent folder in the directory structure")
|
||||||
Logger.info("SEARCH: The directory passed is the root directory for category %s", categorySearch2[1])
|
Logger.info("SEARCH: The directory passed is the root directory for category %s", categorySearch2[1])
|
||||||
|
@ -123,7 +144,7 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
||||||
root = 2
|
root = 2
|
||||||
break
|
break
|
||||||
elif inputName and safeName(categorySearch2[1]) == safeName(inputName): # we have identified a unique directory.
|
elif inputName and safeName(categorySearch2[1]) == safeName(inputName) and os.path.isdir(categorySearch[0]): # we have identified a unique directory.
|
||||||
Logger.info("SEARCH: Files appear to be in their own directory")
|
Logger.info("SEARCH: Files appear to be in their own directory")
|
||||||
unique = int(1)
|
unique = int(1)
|
||||||
if inputCategory: # we are ok to proceed.
|
if inputCategory: # we are ok to proceed.
|
||||||
|
@ -161,11 +182,18 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
return inputDirectory, inputName, inputCategory, root
|
return inputDirectory, inputName, inputCategory, root
|
||||||
|
|
||||||
|
|
||||||
def is_sample(filePath, inputName, minSampleSize):
|
def is_sample(filePath, inputName, minSampleSize, SampleIDs):
|
||||||
# 200 MB in bytes
|
# 200 MB in bytes
|
||||||
SIZE_CUTOFF = minSampleSize * 1024 * 1024
|
SIZE_CUTOFF = minSampleSize * 1024 * 1024
|
||||||
# Ignore 'sample' in files unless 'sample' in Torrent Name
|
if os.path.getsize(filePath) < SIZE_CUTOFF:
|
||||||
return ('sample' in filePath.lower()) and (not 'sample' in inputName) and (os.path.getsize(filePath) < SIZE_CUTOFF)
|
if 'SizeOnly' in SampleIDs:
|
||||||
|
return True
|
||||||
|
# Ignore 'sample' in files unless 'sample' in Torrent Name
|
||||||
|
for ident in SampleIDs:
|
||||||
|
if ident.lower() in filePath.lower() and not ident.lower() in inputName.lower():
|
||||||
|
return True
|
||||||
|
# Return False if none of these were met.
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def copy_link(filePath, targetDirectory, useLink, outputDestination):
|
def copy_link(filePath, targetDirectory, useLink, outputDestination):
|
||||||
|
@ -331,8 +359,8 @@ def converto_to_ascii(nzbName, dirName):
|
||||||
nzbName2 = str(nzbName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
nzbName2 = str(nzbName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||||
dirName2 = str(dirName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
dirName2 = str(dirName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||||
if dirName != dirName2:
|
if dirName != dirName2:
|
||||||
Logger.info("Renaming directory:%s to: %s.", dirName, nzbName2)
|
Logger.info("Renaming directory:%s to: %s.", dirName, dirName2)
|
||||||
shutil.move(dirName, nzbName2)
|
shutil.move(dirName, dirName2)
|
||||||
for dirpath, dirnames, filesnames in os.walk(dirName2):
|
for dirpath, dirnames, filesnames in os.walk(dirName2):
|
||||||
for filename in filesnames:
|
for filename in filesnames:
|
||||||
filename2 = str(filename.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
filename2 = str(filename.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||||
|
@ -340,12 +368,34 @@ def converto_to_ascii(nzbName, dirName):
|
||||||
Logger.info("Renaming file:%s to: %s.", filename, filename2)
|
Logger.info("Renaming file:%s to: %s.", filename, filename2)
|
||||||
shutil.move(filename, filename2)
|
shutil.move(filename, filename2)
|
||||||
nzbName = nzbName2
|
nzbName = nzbName2
|
||||||
dirName = nzbName2
|
dirName = dirName2
|
||||||
return nzbName, dirName
|
return nzbName, dirName
|
||||||
|
|
||||||
def parse_other(args):
|
def parse_other(args):
|
||||||
return os.path.normpath(sys.argv[1]), '', '', '', ''
|
return os.path.normpath(args[1]), '', '', '', ''
|
||||||
|
|
||||||
|
def parse_rtorrent(args):
|
||||||
|
# rtorrent usage: system.method.set_key = event.download.finished,TorrentToMedia,
|
||||||
|
# "execute={/path/to/nzbToMedia/TorrentToMedia.py,\"$d.get_base_path=\",\"$d.get_name=\",\"$d.get_custom1=\",\"$d.get_hash=\"}"
|
||||||
|
inputDirectory = os.path.normpath(args[1])
|
||||||
|
try:
|
||||||
|
inputName = args[2]
|
||||||
|
except:
|
||||||
|
inputName = ''
|
||||||
|
try:
|
||||||
|
inputCategory = args[3]
|
||||||
|
except:
|
||||||
|
inputCategory = ''
|
||||||
|
try:
|
||||||
|
inputHash = args[4]
|
||||||
|
except:
|
||||||
|
inputHash = ''
|
||||||
|
try:
|
||||||
|
inputID = args[4]
|
||||||
|
except:
|
||||||
|
inputID = ''
|
||||||
|
|
||||||
|
return inputDirectory, inputName, inputCategory, inputHash, inputID
|
||||||
|
|
||||||
def parse_utorrent(args):
|
def parse_utorrent(args):
|
||||||
# uTorrent usage: call TorrentToMedia.py "%D" "%N" "%L" "%I"
|
# uTorrent usage: call TorrentToMedia.py "%D" "%N" "%L" "%I"
|
||||||
|
@ -389,6 +439,7 @@ def parse_transmission(args):
|
||||||
|
|
||||||
__ARG_PARSERS__ = {
|
__ARG_PARSERS__ = {
|
||||||
'other': parse_other,
|
'other': parse_other,
|
||||||
|
'rtorrent': parse_rtorrent,
|
||||||
'utorrent': parse_utorrent,
|
'utorrent': parse_utorrent,
|
||||||
'deluge': parse_deluge,
|
'deluge': parse_deluge,
|
||||||
'transmission': parse_transmission,
|
'transmission': parse_transmission,
|
||||||
|
|
|
@ -14,7 +14,7 @@ web_root =
|
||||||
delay = 65
|
delay = 65
|
||||||
method = renamer
|
method = renamer
|
||||||
delete_failed = 0
|
delete_failed = 0
|
||||||
wait_for = 2
|
wait_for = 5
|
||||||
#### Set to 1 if CouchPotatoServer is running on a different server to your NZB client
|
#### Set to 1 if CouchPotatoServer is running on a different server to your NZB client
|
||||||
remoteCPS = 0
|
remoteCPS = 0
|
||||||
|
|
||||||
|
@ -31,9 +31,12 @@ password =
|
||||||
web_root =
|
web_root =
|
||||||
ssl = 0
|
ssl = 0
|
||||||
delay = 0
|
delay = 0
|
||||||
|
wait_for = 5
|
||||||
watch_dir =
|
watch_dir =
|
||||||
fork = default
|
fork = default
|
||||||
delete_failed = 0
|
delete_failed = 0
|
||||||
|
nzbExtractionBy = Downloader
|
||||||
|
Torrent_ForceLink = 1
|
||||||
|
|
||||||
|
|
||||||
[HeadPhones]
|
[HeadPhones]
|
||||||
|
@ -76,14 +79,16 @@ web_root =
|
||||||
|
|
||||||
|
|
||||||
[Torrent]
|
[Torrent]
|
||||||
###### clientAgent - Supported clients: utorrent, transmission, deluge, other
|
###### clientAgent - Supported clients: utorrent, transmission, deluge, rtorrent, other
|
||||||
clientAgent = other
|
clientAgent = other
|
||||||
###### useLink - Set to hard for physical links, sym for symbolic links, move to move, and no to not use links (copy)
|
###### useLink - Set to hard for physical links, sym for symbolic links, move to move, and no to not use links (copy)
|
||||||
useLink = hard
|
useLink = hard
|
||||||
###### outputDirectory - Default output directory (categories will be appended as sub directory to outputDirectory)
|
###### outputDirectory - Default output directory (categories will be appended as sub directory to outputDirectory)
|
||||||
outputDirectory = /abs/path/to/complete/
|
outputDirectory = /abs/path/to/complete/
|
||||||
###### Other categories/labels defined for your downloader. Does not include CouchPotato, SickBeard, HeadPhones, Mylar categories.
|
###### Other categories/labels defined for your downloader. Does not include CouchPotato, SickBeard, HeadPhones, Mylar categories.
|
||||||
categories = music_videos,pictures,software,
|
categories = music_videos,pictures,software,manual
|
||||||
|
###### A list of categories that you don't want to be flattened (i.e preserve the directory structure when copying/linking.
|
||||||
|
noFlatten = pictures,manual
|
||||||
###### uTorrent Hardlink solution (You must edit this if your using TorrentToMedia.py with uTorrent)
|
###### uTorrent Hardlink solution (You must edit this if your using TorrentToMedia.py with uTorrent)
|
||||||
uTorrentWEBui = http://localhost:8090/gui/
|
uTorrentWEBui = http://localhost:8090/gui/
|
||||||
uTorrentUSR = your username
|
uTorrentUSR = your username
|
||||||
|
@ -93,6 +98,11 @@ TransmissionHost = localhost
|
||||||
TransmissionPort = 8084
|
TransmissionPort = 8084
|
||||||
TransmissionUSR = your username
|
TransmissionUSR = your username
|
||||||
TransmissionPWD = your password
|
TransmissionPWD = your password
|
||||||
|
#### Deluge (You must edit this if your using TorrentToMedia.py with deluge. Note that the host/port is for the deluge daemon, not the webui)
|
||||||
|
DelugeHost = localhost
|
||||||
|
DelugePort = 58846
|
||||||
|
DelugeUSR = your username
|
||||||
|
DelugePWD = your password
|
||||||
###### ADVANCED USE - ONLY EDIT IF YOU KNOW WHAT YOU'RE DOING ######
|
###### ADVANCED USE - ONLY EDIT IF YOU KNOW WHAT YOU'RE DOING ######
|
||||||
deleteOriginal = 0
|
deleteOriginal = 0
|
||||||
|
|
||||||
|
@ -102,6 +112,8 @@ mediaExtensions = .mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg,.vob,.iso,.m4v
|
||||||
metaExtensions = .nfo,.sub,.srt,.jpg,.gif
|
metaExtensions = .nfo,.sub,.srt,.jpg,.gif
|
||||||
###### minSampleSize - Minimum required size to consider a media file not a sample file (in MB, eg 200mb)
|
###### minSampleSize - Minimum required size to consider a media file not a sample file (in MB, eg 200mb)
|
||||||
minSampleSize = 200
|
minSampleSize = 200
|
||||||
|
###### SampleIDs - a list of common sample identifiers. Use SizeOnly to ignore this and delete all media files less than minSampleSize
|
||||||
|
SampleIDs = sample,-s.
|
||||||
|
|
||||||
[Transcoder]
|
[Transcoder]
|
||||||
transcode = 0
|
transcode = 0
|
||||||
|
@ -140,9 +152,11 @@ user_script_mediaExtensions = .mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg
|
||||||
#Specify the path of the script
|
#Specify the path of the script
|
||||||
user_script_path = /media/test/script/script.sh
|
user_script_path = /media/test/script/script.sh
|
||||||
#Specify the argument(s) passed to script, comma separated in order.
|
#Specify the argument(s) passed to script, comma separated in order.
|
||||||
#for example FP,FN,DN for file path (absolute file name with path), file name, absolute directory name (with path).
|
#for example FP,FN,DN, TN, TL for file path (absolute file name with path), file name, absolute directory name (with path), Torrent Name, Torrent Label/Category.
|
||||||
#So the result is /media/test/script/script.sh FP FN DN. Add other arguments as needed eg -f, -r
|
#So the result is /media/test/script/script.sh FP FN DN TN TL. Add other arguments as needed eg -f, -r
|
||||||
user_script_param = FN
|
user_script_param = FN
|
||||||
|
#Set user_script_runOnce = 0 to run for each file, or 1 to only run once (presumably on teh entire directory).
|
||||||
|
user_script_runOnce = 0
|
||||||
#Specify the successcodes returned by the user script as a comma separated list. Linux default is 0
|
#Specify the successcodes returned by the user script as a comma separated list. Linux default is 0
|
||||||
user_script_successCodes = 0
|
user_script_successCodes = 0
|
||||||
#Clean after? Note that delay function is used to prevent possible mistake :) Delay is intended as seconds
|
#Clean after? Note that delay function is used to prevent possible mistake :) Delay is intended as seconds
|
||||||
|
|
|
@ -1,5 +1,26 @@
|
||||||
Change_LOG / History
|
Change_LOG / History
|
||||||
|
|
||||||
|
V9.2 05/03/2014
|
||||||
|
|
||||||
|
Impacts All
|
||||||
|
Change default "wait_for" to 5 mins. CouchPotato can take more than 2 minutes to return on renamer.scan request.
|
||||||
|
Added SickBeard "wait_for" to bw customizable to prevent unwanted timeouts.
|
||||||
|
Fixed ascii conversion of directory name.
|
||||||
|
Added list of common sample ids and a way to set deletion of All media files less than the sample file size limit.
|
||||||
|
Added urlquote to dirName for CouchPotato (allows special characters in directory name)
|
||||||
|
|
||||||
|
Impacts NZBs
|
||||||
|
Fix Error with manual run of nzbToMedia
|
||||||
|
Make sure SickBeard receives the individula download dir.
|
||||||
|
Added option to set SickBeard extraction as either Downlaoder or Destination (SickBeard).
|
||||||
|
Fixed Health Check handling for NZBGet.
|
||||||
|
|
||||||
|
Impacts Torrents
|
||||||
|
Added option to run userscript once only (on directory).
|
||||||
|
Added Option to not flatten specific categories.
|
||||||
|
Added rtorrent integration.
|
||||||
|
Fixes for HeadPhones use (no flatten), no move/sym, and fix move back to original.
|
||||||
|
|
||||||
V9.1 24/01/2014
|
V9.1 24/01/2014
|
||||||
|
|
||||||
Impacts All
|
Impacts All
|
||||||
|
|
|
@ -53,6 +53,11 @@
|
||||||
# set to 1 to delete failed, or 0 to leave files in place.
|
# set to 1 to delete failed, or 0 to leave files in place.
|
||||||
#cpsdelete_failed=0
|
#cpsdelete_failed=0
|
||||||
|
|
||||||
|
# CouchPotato wait_for
|
||||||
|
#
|
||||||
|
# Set the number of minutes to wait before timing out. If transfering files across drives or network, increase this to longer than the time it takes to copy a movie.
|
||||||
|
#cpswait_for=5
|
||||||
|
|
||||||
# CouchPotatoServer and NZBGet are a different system (0, 1).
|
# CouchPotatoServer and NZBGet are a different system (0, 1).
|
||||||
#
|
#
|
||||||
# set to 1 if CouchPotato and NZBGet are on a different system, or 0 if on the same system.
|
# set to 1 if CouchPotato and NZBGet are on a different system, or 0 if on the same system.
|
||||||
|
@ -152,58 +157,46 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
Logger.error("Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
Logger.error("MAIN: Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
Logger.warning("Par-check successful, but Par-repair disabled, exiting")
|
Logger.warning("MAIN: Par-check successful, but Par-repair disabled, exiting")
|
||||||
|
Logger.info("MAIN: Please check your Par-repair settings for future downloads.")
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
Logger.warning("Par-check failed, setting status \"failed\"")
|
Logger.warning("MAIN: Par-repair failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
Logger.warning("Unpack failed, setting status \"failed\"")
|
Logger.warning("MAIN: Unpack failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
Logger.warning("MAIN: Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"")
|
||||||
fileExtension = os.path.splitext(file)[1]
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
Logger.warning("Post-Process: Archive files exist but unpack skipped, setting status \"failed\"")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
Logger.warning("Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
Logger.warning("Post-Process: _brokenlog.txt exists, download is probably damaged, exiting")
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
Logger.info("Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful")
|
Logger.info("MAIN: Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful")
|
||||||
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
Logger.error("Post-Process: Nothing to post-process: destination directory %s doesn't exist", os.environ['NZBPP_DIRECTORY'])
|
Logger.error("MAIN: Nothing to post-process: destination directory %s doesn't exist. Setting status \"failed\"", os.environ['NZBPP_DIRECTORY'])
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
download_id = ""
|
download_id = ""
|
||||||
if os.environ.has_key('NZBPR_COUCHPOTATO'):
|
if os.environ.has_key('NZBPR_COUCHPOTATO'):
|
||||||
download_id = os.environ['NZBPR_COUCHPOTATO']
|
download_id = os.environ['NZBPR_COUCHPOTATO']
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessMovie...")
|
Logger.info("MAIN: Script triggered from NZBGet, starting autoProcessMovie...")
|
||||||
clientAgent = "nzbget"
|
|
||||||
result = autoProcessMovie.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status, clientAgent, download_id)
|
result = autoProcessMovie.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status, clientAgent, download_id)
|
||||||
# SABnzbd Pre 0.7.17
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
|
@ -215,7 +208,7 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 5 User-defined category
|
# 5 User-defined category
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessMovie...")
|
Logger.info("MAIN: Script triggered from SABnzbd, starting autoProcessMovie...")
|
||||||
clientAgent = "sabnzbd"
|
clientAgent = "sabnzbd"
|
||||||
result = autoProcessMovie.process(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent)
|
result = autoProcessMovie.process(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent)
|
||||||
# SABnzbd 0.7.17+
|
# SABnzbd 0.7.17+
|
||||||
|
@ -229,12 +222,12 @@ elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
# 8 Failure URL
|
# 8 Failure URL
|
||||||
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessMovie...")
|
Logger.info("MAIN: Script triggered from SABnzbd 0.7.17+, starting autoProcessMovie...")
|
||||||
clientAgent = "sabnzbd"
|
clientAgent = "sabnzbd"
|
||||||
result = autoProcessMovie.process(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent)
|
result = autoProcessMovie.process(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent)
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client.")
|
Logger.warn("MAIN: Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessMovie as a manual run...")
|
Logger.info("MAIN: Running autoProcessMovie as a manual run...")
|
||||||
clientAgent = "manual"
|
clientAgent = "manual"
|
||||||
result = autoProcessMovie.process('Manual Run', 'Manual Run', 0, clientAgent)
|
result = autoProcessMovie.process('Manual Run', 'Manual Run', 0, clientAgent)
|
||||||
|
|
||||||
|
|
|
@ -97,54 +97,43 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
Logger.error("Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
Logger.error("MAIN: Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
Logger.warning("Par-check successful, but Par-repair disabled, exiting")
|
Logger.warning("MAIN: Par-check successful, but Par-repair disabled, exiting")
|
||||||
|
Logger.info("MAIN: Please check your Par-repair settings for future downloads.")
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
Logger.warning("Par-check failed, setting status \"failed\"")
|
Logger.warning("MAIN: Par-repair failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
Logger.warning("Unpack failed, setting status \"failed\"")
|
Logger.warning("MAIN: Unpack failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
Logger.warning("MAIN: Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"")
|
||||||
fileExtension = os.path.splitext(file)[1]
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
Logger.warning("Post-Process: Archive files exist but unpack skipped, setting status \"failed\"")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
Logger.warning("Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
Logger.warning("Post-Process: _brokenlog.txt exists, download is probably damaged, exiting")
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
Logger.info("Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful")
|
Logger.info("MAIN: Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful")
|
||||||
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
Logger.error("Post-Process: Nothing to post-process: destination directory %s doesn't exist", os.environ['NZBPP_DIRECTORY'])
|
Logger.error("MAIN: Nothing to post-process: destination directory %s doesn't exist. Setting status \"failed\"", os.environ['NZBPP_DIRECTORY'])
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessGames...")
|
Logger.info("MAIN: Script triggered from NZBGet, starting autoProcessGames...")
|
||||||
result = autoProcessGames.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
result = autoProcessGames.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
||||||
# SABnzbd Pre 0.7.17
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
|
@ -156,7 +145,7 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 5 User-defined category
|
# 5 User-defined category
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessGames...")
|
Logger.info("MAIN: Script triggered from SABnzbd, starting autoProcessGames...")
|
||||||
result = autoProcessGames.process(sys.argv[1], sys.argv[3], sys.argv[7])
|
result = autoProcessGames.process(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
# SABnzbd 0.7.17+
|
# SABnzbd 0.7.17+
|
||||||
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
@ -169,10 +158,10 @@ elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
# 8 Failure URL
|
# 8 Failure URL
|
||||||
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessGames...")
|
Logger.info("MAIN: Script triggered from SABnzbd 0.7.17+, starting autoProcessGames...")
|
||||||
result = autoProcessGames.process(sys.argv[1], sys.argv[3], sys.argv[7])
|
result = autoProcessGames.process(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client. Exiting")
|
Logger.warn("MAIN: Invalid number of arguments received from client. Exiting")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if result == 0:
|
if result == 0:
|
||||||
|
|
|
@ -102,54 +102,43 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
Logger.error("Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
Logger.error("MAIN: Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
Logger.warning("Par-check successful, but Par-repair disabled, exiting")
|
Logger.warning("MAIN: Par-check successful, but Par-repair disabled, exiting")
|
||||||
|
Logger.info("MAIN: Please check your Par-repair settings for future downloads.")
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
Logger.warning("Par-check failed, setting status \"failed\"")
|
Logger.warning("MAIN: Par-repair failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
Logger.warning("Unpack failed, setting status \"failed\"")
|
Logger.warning("MAIN: Unpack failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
Logger.warning("MAIN: Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"")
|
||||||
fileExtension = os.path.splitext(file)[1]
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
Logger.warning("Post-Process: Archive files exist but unpack skipped, setting status \"failed\"")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
Logger.warning("Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
Logger.warning("Post-Process: _brokenlog.txt exists, download is probably damaged, exiting")
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
Logger.info("Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful")
|
Logger.info("MAIN: Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful")
|
||||||
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
Logger.error("Post-Process: Nothing to post-process: destination directory %s doesn't exist", os.environ['NZBPP_DIRECTORY'])
|
Logger.error("MAIN: Nothing to post-process: destination directory %s doesn't exist. Setting status \"failed\"", os.environ['NZBPP_DIRECTORY'])
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script
|
# All checks done, now launching the script.
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessMusic...")
|
Logger.info("MAIN: Script triggered from NZBGet, starting autoProcessMusic...")
|
||||||
result = autoProcessMusic.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
result = autoProcessMusic.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
||||||
# SABnzbd Pre 0.7.17
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
|
@ -161,7 +150,7 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 5 User-defined category
|
# 5 User-defined category
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessMusic...")
|
Logger.info("MAIN: Script triggered from SABnzbd, starting autoProcessMusic...")
|
||||||
result = autoProcessMusic.process(sys.argv[1], sys.argv[2], sys.argv[7])
|
result = autoProcessMusic.process(sys.argv[1], sys.argv[2], sys.argv[7])
|
||||||
# SABnzbd 0.7.17+
|
# SABnzbd 0.7.17+
|
||||||
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
@ -174,11 +163,11 @@ elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
# 8 Failue URL
|
# 8 Failue URL
|
||||||
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessMusic...")
|
Logger.info("MAIN: Script triggered from SABnzbd 0.7.17+, starting autoProcessMusic...")
|
||||||
result = autoProcessMusic.process(sys.argv[1], sys.argv[2], sys.argv[7])
|
result = autoProcessMusic.process(sys.argv[1], sys.argv[2], sys.argv[7])
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client.")
|
Logger.warn("MAIN: Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessMusic as a manual run...")
|
Logger.info("MAIN: Running autoProcessMusic as a manual run...")
|
||||||
result = autoProcessMusic.process('Manual Run', 'Manual Run', 0)
|
result = autoProcessMusic.process('Manual Run', 'Manual Run', 0)
|
||||||
|
|
||||||
if result == 0:
|
if result == 0:
|
||||||
|
|
|
@ -53,6 +53,11 @@
|
||||||
# set to 1 to delete failed, or 0 to leave files in place.
|
# set to 1 to delete failed, or 0 to leave files in place.
|
||||||
#cpsdelete_failed=0
|
#cpsdelete_failed=0
|
||||||
|
|
||||||
|
# CouchPotato wait_for
|
||||||
|
#
|
||||||
|
# Set the number of minutes to wait before timing out. If transfering files across drives or network, increase this to longer than the time it takes to copy a movie.
|
||||||
|
#cpswait_for=5
|
||||||
|
|
||||||
# CouchPotatoServer and NZBGet are a different system (0, 1).
|
# CouchPotatoServer and NZBGet are a different system (0, 1).
|
||||||
#
|
#
|
||||||
# set to 1 if CouchPotato and NZBGet are on a different system, or 0 if on the same system.
|
# set to 1 if CouchPotato and NZBGet are on a different system, or 0 if on the same system.
|
||||||
|
@ -87,6 +92,16 @@
|
||||||
# set this if using a reverse proxy.
|
# set this if using a reverse proxy.
|
||||||
#sbweb_root=
|
#sbweb_root=
|
||||||
|
|
||||||
|
# SickBeard delay
|
||||||
|
#
|
||||||
|
# Set the number of seconds to wait before calling post-process in SickBeard.
|
||||||
|
#sbdelay=0
|
||||||
|
|
||||||
|
# SickBeard wait_for
|
||||||
|
#
|
||||||
|
# Set the number of minutes to wait before timing out. If transferring files across drives or network, increase this to longer than the time it takes to copy an episode.
|
||||||
|
#sbwait_for=5
|
||||||
|
|
||||||
# SickBeard watch directory.
|
# SickBeard watch directory.
|
||||||
#
|
#
|
||||||
# set this if SickBeard and nzbGet are on different systems.
|
# set this if SickBeard and nzbGet are on different systems.
|
||||||
|
@ -273,7 +288,7 @@ WakeUp()
|
||||||
config = ConfigParser.ConfigParser()
|
config = ConfigParser.ConfigParser()
|
||||||
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
||||||
if not os.path.isfile(configFilename):
|
if not os.path.isfile(configFilename):
|
||||||
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
|
Logger.error("MAIN: You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
# CONFIG FILE
|
# CONFIG FILE
|
||||||
Logger.info("MAIN: Loading config from %s", configFilename)
|
Logger.info("MAIN: Loading config from %s", configFilename)
|
||||||
|
@ -302,50 +317,39 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
Logger.error("Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
Logger.error("MAIN: Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
Logger.warning("Par-check successful, but Par-repair disabled, exiting")
|
Logger.warning("MAIN: Par-check successful, but Par-repair disabled, exiting")
|
||||||
|
Logger.info("MAIN: Please check your Par-repair settings for future downloads.")
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
Logger.warning("Par-check failed, setting status \"failed\"")
|
Logger.warning("MAIN: Par-repair failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
Logger.warning("Unpack failed, setting status \"failed\"")
|
Logger.warning("MAIN: Unpack failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
Logger.warning("MAIN: Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"")
|
||||||
fileExtension = os.path.splitext(file)[1]
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
Logger.warning("Post-Process: Archive files exist but unpack skipped, setting status \"failed\"")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
Logger.warning("Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
Logger.warning("Post-Process: _brokenlog.txt exists, download is probably damaged, exiting")
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
Logger.info("Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful")
|
Logger.info("MAIN: Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful")
|
||||||
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
Logger.error("Post-Process: Nothing to post-process: destination directory %s doesn't exist", os.environ['NZBPP_DIRECTORY'])
|
Logger.error("MAIN: Nothing to post-process: destination directory %s doesn't exist. Setting status \"failed\"", os.environ['NZBPP_DIRECTORY'])
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
|
@ -384,14 +388,14 @@ else: # only CPS supports this manual run for now.
|
||||||
Logger.warn("MAIN: Invalid number of arguments received from client.")
|
Logger.warn("MAIN: Invalid number of arguments received from client.")
|
||||||
Logger.info("MAIN: Running autoProcessMovie as a manual run...")
|
Logger.info("MAIN: Running autoProcessMovie as a manual run...")
|
||||||
clientAgent = "manual"
|
clientAgent = "manual"
|
||||||
nzbDir, inputName, status, inputCategory, download_id = ('Manual Run', 'Manual Run', 0, cpsCategory, '')
|
nzbDir, inputName, status, inputCategory, download_id = ('Manual Run', 'Manual Run', 0, cpsCategory[0], '')
|
||||||
|
|
||||||
if inputCategory in cpsCategory:
|
if inputCategory in cpsCategory:
|
||||||
Logger.info("MAIN: Calling CouchPotatoServer to post-process: %s", inputName)
|
Logger.info("MAIN: Calling CouchPotatoServer to post-process: %s", inputName)
|
||||||
result = autoProcessMovie.process(nzbDir, inputName, status, clientAgent, download_id, inputCategory)
|
result = autoProcessMovie.process(nzbDir, inputName, status, clientAgent, download_id, inputCategory)
|
||||||
elif inputCategory in sbCategory:
|
elif inputCategory in sbCategory:
|
||||||
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
||||||
result = autoProcessTV.processEpisode(nzbDir, inputName, status, inputCategory)
|
result = autoProcessTV.processEpisode(nzbDir, inputName, status, clientAgent, inputCategory)
|
||||||
elif inputCategory in hpCategory:
|
elif inputCategory in hpCategory:
|
||||||
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
||||||
result = autoProcessMusic.process(nzbDir, inputName, status, inputCategory)
|
result = autoProcessMusic.process(nzbDir, inputName, status, inputCategory)
|
||||||
|
|
|
@ -100,54 +100,43 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
Logger.error("Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
Logger.error("MAIN: Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
Logger.warning("Par-check successful, but Par-repair disabled, exiting")
|
Logger.warning("MAIN: Par-check successful, but Par-repair disabled, exiting")
|
||||||
|
Logger.info("MAIN: Please check your Par-repair settings for future downloads.")
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
Logger.warning("Par-check failed, setting status \"failed\"")
|
Logger.warning("MAIN: Par-repair failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
Logger.warning("Unpack failed, setting status \"failed\"")
|
Logger.warning("MAIN: Unpack failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
Logger.warning("MAIN: Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"")
|
||||||
fileExtension = os.path.splitext(file)[1]
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
Logger.warning("Post-Process: Archive files exist but unpack skipped, setting status \"failed\"")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
Logger.warning("Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
Logger.warning("Post-Process: _brokenlog.txt exists, download is probably damaged, exiting")
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
Logger.info("Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful")
|
Logger.info("MAIN: Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful")
|
||||||
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
Logger.error("Post-Process: Nothing to post-process: destination directory %s doesn't exist", os.environ['NZBPP_DIRECTORY'])
|
Logger.error("MAIN: Nothing to post-process: destination directory %s doesn't exist. Setting status \"failed\"", os.environ['NZBPP_DIRECTORY'])
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessComics...")
|
Logger.info("MAIN: Script triggered from NZBGet, starting autoProcessComics...")
|
||||||
result = autoProcessComics.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
result = autoProcessComics.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
||||||
# SABnzbd Pre 0.7.17
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
|
@ -159,7 +148,7 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 5 User-defined category
|
# 5 User-defined category
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessComics...")
|
Logger.info("MAIN: Script triggered from SABnzbd, starting autoProcessComics...")
|
||||||
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[3], sys.argv[7])
|
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
# SABnzbd 0.7.17+
|
# SABnzbd 0.7.17+
|
||||||
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
@ -172,11 +161,11 @@ elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
# 8 Failure URL
|
# 8 Failure URL
|
||||||
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessComics...")
|
Logger.info("MAIN: Script triggered from SABnzbd 0.7.17+, starting autoProcessComics...")
|
||||||
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[3], sys.argv[7])
|
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client.")
|
Logger.warn("MAIN: Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessComics as a manual run...")
|
Logger.info("MAIN: Running autoProcessComics as a manual run...")
|
||||||
result = autoProcessComics.processEpisode('Manual Run', 'Manual Run', 0)
|
result = autoProcessComics.processEpisode('Manual Run', 'Manual Run', 0)
|
||||||
|
|
||||||
if result == 0:
|
if result == 0:
|
||||||
|
|
|
@ -41,6 +41,16 @@
|
||||||
# set this if using a reverse proxy.
|
# set this if using a reverse proxy.
|
||||||
#sbweb_root=
|
#sbweb_root=
|
||||||
|
|
||||||
|
# SickBeard delay
|
||||||
|
#
|
||||||
|
# Set the number of seconds to wait before calling post-process in SickBeard.
|
||||||
|
#sbdelay=0
|
||||||
|
|
||||||
|
# SickBeard wait_for
|
||||||
|
#
|
||||||
|
# Set the number of minutes to wait before timing out. If transfering files across drives or network, increase this to longer than the time it takes to copy an episode.
|
||||||
|
#sbwait_for=5
|
||||||
|
|
||||||
# SickBeard watch directory.
|
# SickBeard watch directory.
|
||||||
#
|
#
|
||||||
# set this if SickBeard and nzbGet are on different systems.
|
# set this if SickBeard and nzbGet are on different systems.
|
||||||
|
@ -149,55 +159,45 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
if os.environ['NZBOP_UNPACK'] != 'yes':
|
if os.environ['NZBOP_UNPACK'] != 'yes':
|
||||||
Logger.error("Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
Logger.error("MAIN: Please enable option \"Unpack\" in nzbget configuration file, exiting")
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
# Check par status
|
# Check par status
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '3':
|
if os.environ['NZBPP_PARSTATUS'] == '3':
|
||||||
Logger.warning("Par-check successful, but Par-repair disabled, exiting")
|
Logger.warning("MAIN: Par-check successful, but Par-repair disabled, exiting")
|
||||||
|
Logger.info("MAIN: Please check your Par-repair settings for future downloads.")
|
||||||
sys.exit(POSTPROCESS_NONE)
|
sys.exit(POSTPROCESS_NONE)
|
||||||
|
|
||||||
if os.environ['NZBPP_PARSTATUS'] == '1':
|
if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
|
||||||
Logger.warning("Par-check failed, setting status \"failed\"")
|
Logger.warning("MAIN: Par-repair failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# Check unpack status
|
# Check unpack status
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '1':
|
||||||
Logger.warning("Unpack failed, setting status \"failed\"")
|
Logger.warning("MAIN: Unpack failed, setting status \"failed\"")
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] != '2':
|
if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
|
||||||
# Unpack is disabled or was skipped due to nzb-file properties or due to errors during par-check
|
# Unpack was skipped due to nzb-file properties or due to errors during par-check
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
|
if os.environ['NZBPP_HEALTH'] < 1000:
|
||||||
for file in filenames:
|
Logger.warning("MAIN: Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"")
|
||||||
fileExtension = os.path.splitext(file)[1]
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
if fileExtension in ['.rar', '.7z'] or os.path.splitext(fileExtension)[1] in ['.rar', '.7z']:
|
|
||||||
Logger.warning("Post-Process: Archive files exist but unpack skipped, setting status \"failed\"")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if fileExtension in ['.par2']:
|
|
||||||
Logger.warning("Post-Process: Unpack skipped and par-check skipped (although par2-files exist), setting status \"failed\"g")
|
|
||||||
status = 1
|
|
||||||
break
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(os.environ['NZBPP_DIRECTORY'], "_brokenlog.txt")) and not status == 1:
|
|
||||||
Logger.warning("Post-Process: _brokenlog.txt exists, download is probably damaged, exiting")
|
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
if not status == 1:
|
else:
|
||||||
Logger.info("Neither archive- nor par2-files found, _brokenlog.txt doesn't exist, considering download successful")
|
Logger.info("MAIN: Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful")
|
||||||
|
Logger.info("MAIN: Please check your Par-check/repair settings for future downloads.")
|
||||||
|
|
||||||
# Check if destination directory exists (important for reprocessing of history items)
|
# Check if destination directory exists (important for reprocessing of history items)
|
||||||
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
if not os.path.isdir(os.environ['NZBPP_DIRECTORY']):
|
||||||
Logger.error("Post-Process: Nothing to post-process: destination directory %s doesn't exist", os.environ['NZBPP_DIRECTORY'])
|
Logger.error("MAIN: Nothing to post-process: destination directory %s doesn't exist. Setting status \"failed\"", os.environ['NZBPP_DIRECTORY'])
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessTV...")
|
Logger.info("MAIN: Script triggered from NZBGet, starting autoProcessTV...")
|
||||||
result = autoProcessTV.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBFILENAME'], status)
|
clientAgent = "nzbget"
|
||||||
|
result = autoProcessTV.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBFILENAME'], status, clientAgent, os.environ['NZBPP_CATEGORY'])
|
||||||
# SABnzbd Pre 0.7.17
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
|
@ -208,8 +208,9 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 5 User-defined category
|
# 5 User-defined category
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessTV...")
|
Logger.info("MAIN: Script triggered from SABnzbd, starting autoProcessTV...")
|
||||||
result = autoProcessTV.processEpisode(sys.argv[1], sys.argv[2], sys.argv[7])
|
clientAgent = "sabnzbd"
|
||||||
|
result = autoProcessTV.processEpisode(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent, sys.argv[5])
|
||||||
# SABnzbd 0.7.17+
|
# SABnzbd 0.7.17+
|
||||||
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
|
@ -221,11 +222,12 @@ elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
# 8 Failure URL
|
# 8 Failure URL
|
||||||
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessTV...")
|
Logger.info("MAIN: Script triggered from SABnzbd 0.7.17+, starting autoProcessTV...")
|
||||||
result = autoProcessTV.processEpisode(sys.argv[1], sys.argv[2], sys.argv[7])
|
clientAgent = "sabnzbd"
|
||||||
|
result = autoProcessTV.processEpisode(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent, sys.argv[5])
|
||||||
else:
|
else:
|
||||||
Logger.debug("Invalid number of arguments received from client.")
|
Logger.debug("MAIN: Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessTV as a manual run...")
|
Logger.info("MAIN: Running autoProcessTV as a manual run...")
|
||||||
result = autoProcessTV.processEpisode('Manual Run', 'Manual Run', 0)
|
result = autoProcessTV.processEpisode('Manual Run', 'Manual Run', 0)
|
||||||
|
|
||||||
if result == 0:
|
if result == 0:
|
||||||
|
|
24
synchronousdeluge/__init__.py
Normal file
24
synchronousdeluge/__init__.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
"""A synchronous implementation of the Deluge RPC protocol
|
||||||
|
based on gevent-deluge by Christopher Rosell.
|
||||||
|
|
||||||
|
https://github.com/chrippa/gevent-deluge
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
|
||||||
|
from synchronousdeluge import DelgueClient
|
||||||
|
|
||||||
|
client = DelugeClient()
|
||||||
|
client.connect()
|
||||||
|
|
||||||
|
# Wait for value
|
||||||
|
download_location = client.core.get_config_value("download_location").get()
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
__title__ = "synchronous-deluge"
|
||||||
|
__version__ = "0.1"
|
||||||
|
__author__ = "Christian Dale"
|
||||||
|
|
||||||
|
from synchronousdeluge.client import DelugeClient
|
||||||
|
from synchronousdeluge.exceptions import DelugeRPCError
|
||||||
|
|
162
synchronousdeluge/client.py
Normal file
162
synchronousdeluge/client.py
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
from itertools import imap
|
||||||
|
|
||||||
|
from synchronousdeluge.exceptions import DelugeRPCError
|
||||||
|
from synchronousdeluge.protocol import DelugeRPCRequest, DelugeRPCResponse
|
||||||
|
from synchronousdeluge.transfer import DelugeTransfer
|
||||||
|
|
||||||
|
__all__ = ["DelugeClient"]
|
||||||
|
|
||||||
|
|
||||||
|
RPC_RESPONSE = 1
|
||||||
|
RPC_ERROR = 2
|
||||||
|
RPC_EVENT = 3
|
||||||
|
|
||||||
|
|
||||||
|
class DelugeClient(object):
|
||||||
|
def __init__(self):
|
||||||
|
"""A deluge client session."""
|
||||||
|
self.transfer = DelugeTransfer()
|
||||||
|
self.modules = []
|
||||||
|
self._request_counter = 0
|
||||||
|
|
||||||
|
def _get_local_auth(self):
|
||||||
|
auth_file = ""
|
||||||
|
username = password = ""
|
||||||
|
if platform.system() in ('Windows', 'Microsoft'):
|
||||||
|
appDataPath = os.environ.get("APPDATA")
|
||||||
|
if not appDataPath:
|
||||||
|
import _winreg
|
||||||
|
hkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders")
|
||||||
|
appDataReg = _winreg.QueryValueEx(hkey, "AppData")
|
||||||
|
appDataPath = appDataReg[0]
|
||||||
|
_winreg.CloseKey(hkey)
|
||||||
|
|
||||||
|
auth_file = os.path.join(appDataPath, "deluge", "auth")
|
||||||
|
else:
|
||||||
|
from xdg.BaseDirectory import save_config_path
|
||||||
|
try:
|
||||||
|
auth_file = os.path.join(save_config_path("deluge"), "auth")
|
||||||
|
except OSError, e:
|
||||||
|
return username, password
|
||||||
|
|
||||||
|
|
||||||
|
if os.path.exists(auth_file):
|
||||||
|
for line in open(auth_file):
|
||||||
|
if line.startswith("#"):
|
||||||
|
# This is a comment line
|
||||||
|
continue
|
||||||
|
line = line.strip()
|
||||||
|
try:
|
||||||
|
lsplit = line.split(":")
|
||||||
|
except Exception, e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(lsplit) == 2:
|
||||||
|
username, password = lsplit
|
||||||
|
elif len(lsplit) == 3:
|
||||||
|
username, password, level = lsplit
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if username == "localclient":
|
||||||
|
return (username, password)
|
||||||
|
|
||||||
|
return ("", "")
|
||||||
|
|
||||||
|
def _create_module_method(self, module, method):
|
||||||
|
fullname = "{0}.{1}".format(module, method)
|
||||||
|
|
||||||
|
def func(obj, *args, **kwargs):
|
||||||
|
return self.remote_call(fullname, *args, **kwargs)
|
||||||
|
|
||||||
|
func.__name__ = method
|
||||||
|
|
||||||
|
return func
|
||||||
|
|
||||||
|
def _introspect(self):
|
||||||
|
self.modules = []
|
||||||
|
|
||||||
|
methods = self.remote_call("daemon.get_method_list").get()
|
||||||
|
methodmap = defaultdict(dict)
|
||||||
|
splitter = lambda v: v.split(".")
|
||||||
|
|
||||||
|
for module, method in imap(splitter, methods):
|
||||||
|
methodmap[module][method] = self._create_module_method(module, method)
|
||||||
|
|
||||||
|
for module, methods in methodmap.items():
|
||||||
|
clsname = "DelugeModule{0}".format(module.capitalize())
|
||||||
|
cls = type(clsname, (), methods)
|
||||||
|
setattr(self, module, cls())
|
||||||
|
self.modules.append(module)
|
||||||
|
|
||||||
|
def remote_call(self, method, *args, **kwargs):
|
||||||
|
req = DelugeRPCRequest(self._request_counter, method, *args, **kwargs)
|
||||||
|
message = next(self.transfer.send_request(req))
|
||||||
|
|
||||||
|
response = DelugeRPCResponse()
|
||||||
|
|
||||||
|
if not isinstance(message, tuple):
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(message) < 3:
|
||||||
|
return
|
||||||
|
|
||||||
|
message_type = message[0]
|
||||||
|
|
||||||
|
# if message_type == RPC_EVENT:
|
||||||
|
# event = message[1]
|
||||||
|
# values = message[2]
|
||||||
|
#
|
||||||
|
# if event in self._event_handlers:
|
||||||
|
# for handler in self._event_handlers[event]:
|
||||||
|
# gevent.spawn(handler, *values)
|
||||||
|
#
|
||||||
|
# elif message_type in (RPC_RESPONSE, RPC_ERROR):
|
||||||
|
if message_type in (RPC_RESPONSE, RPC_ERROR):
|
||||||
|
request_id = message[1]
|
||||||
|
value = message[2]
|
||||||
|
|
||||||
|
if request_id == self._request_counter :
|
||||||
|
if message_type == RPC_RESPONSE:
|
||||||
|
response.set(value)
|
||||||
|
elif message_type == RPC_ERROR:
|
||||||
|
err = DelugeRPCError(*value)
|
||||||
|
response.set_exception(err)
|
||||||
|
|
||||||
|
self._request_counter += 1
|
||||||
|
return response
|
||||||
|
|
||||||
|
def connect(self, host="127.0.0.1", port=58846, username="", password=""):
|
||||||
|
"""Connects to a daemon process.
|
||||||
|
|
||||||
|
:param host: str, the hostname of the daemon
|
||||||
|
:param port: int, the port of the daemon
|
||||||
|
:param username: str, the username to login with
|
||||||
|
:param password: str, the password to login with
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Connect transport
|
||||||
|
self.transfer.connect((host, port))
|
||||||
|
|
||||||
|
# Attempt to fetch local auth info if needed
|
||||||
|
if not username and host in ("127.0.0.1", "localhost"):
|
||||||
|
username, password = self._get_local_auth()
|
||||||
|
|
||||||
|
# Authenticate
|
||||||
|
self.remote_call("daemon.login", username, password).get()
|
||||||
|
|
||||||
|
# Introspect available methods
|
||||||
|
self._introspect()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def connected(self):
|
||||||
|
return self.transfer.connected
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
"""Disconnects from the daemon."""
|
||||||
|
self.transfer.disconnect()
|
||||||
|
|
11
synchronousdeluge/exceptions.py
Normal file
11
synchronousdeluge/exceptions.py
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
__all__ = ["DelugeRPCError"]
|
||||||
|
|
||||||
|
class DelugeRPCError(Exception):
|
||||||
|
def __init__(self, name, msg, traceback):
|
||||||
|
self.name = name
|
||||||
|
self.msg = msg
|
||||||
|
self.traceback = traceback
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{0}: {1}: {2}".format(self.__class__.__name__, self.name, self.msg)
|
||||||
|
|
38
synchronousdeluge/protocol.py
Normal file
38
synchronousdeluge/protocol.py
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
__all__ = ["DelugeRPCRequest", "DelugeRPCResponse"]
|
||||||
|
|
||||||
|
class DelugeRPCRequest(object):
|
||||||
|
def __init__(self, request_id, method, *args, **kwargs):
|
||||||
|
self.request_id = request_id
|
||||||
|
self.method = method
|
||||||
|
self.args = args
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def format(self):
|
||||||
|
return (self.request_id, self.method, self.args, self.kwargs)
|
||||||
|
|
||||||
|
class DelugeRPCResponse(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.value = None
|
||||||
|
self._exception = None
|
||||||
|
|
||||||
|
def successful(self):
|
||||||
|
return self._exception is None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def exception(self):
|
||||||
|
if self._exception is not None:
|
||||||
|
return self._exception
|
||||||
|
|
||||||
|
def set(self, value=None):
|
||||||
|
self.value = value
|
||||||
|
self._exception = None
|
||||||
|
|
||||||
|
def set_exception(self, exception):
|
||||||
|
self._exception = exception
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self._exception is None:
|
||||||
|
return self.value
|
||||||
|
else:
|
||||||
|
raise self._exception
|
||||||
|
|
433
synchronousdeluge/rencode.py
Normal file
433
synchronousdeluge/rencode.py
Normal file
|
@ -0,0 +1,433 @@
|
||||||
|
|
||||||
|
"""
|
||||||
|
rencode -- Web safe object pickling/unpickling.
|
||||||
|
|
||||||
|
Public domain, Connelly Barnes 2006-2007.
|
||||||
|
|
||||||
|
The rencode module is a modified version of bencode from the
|
||||||
|
BitTorrent project. For complex, heterogeneous data structures with
|
||||||
|
many small elements, r-encodings take up significantly less space than
|
||||||
|
b-encodings:
|
||||||
|
|
||||||
|
>>> len(rencode.dumps({'a':0, 'b':[1,2], 'c':99}))
|
||||||
|
13
|
||||||
|
>>> len(bencode.bencode({'a':0, 'b':[1,2], 'c':99}))
|
||||||
|
26
|
||||||
|
|
||||||
|
The rencode format is not standardized, and may change with different
|
||||||
|
rencode module versions, so you should check that you are using the
|
||||||
|
same rencode version throughout your project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = '1.0.1'
|
||||||
|
__all__ = ['dumps', 'loads']
|
||||||
|
|
||||||
|
# Original bencode module by Petru Paler, et al.
|
||||||
|
#
|
||||||
|
# Modifications by Connelly Barnes:
|
||||||
|
#
|
||||||
|
# - Added support for floats (sent as 32-bit or 64-bit in network
|
||||||
|
# order), bools, None.
|
||||||
|
# - Allowed dict keys to be of any serializable type.
|
||||||
|
# - Lists/tuples are always decoded as tuples (thus, tuples can be
|
||||||
|
# used as dict keys).
|
||||||
|
# - Embedded extra information in the 'typecodes' to save some space.
|
||||||
|
# - Added a restriction on integer length, so that malicious hosts
|
||||||
|
# cannot pass us large integers which take a long time to decode.
|
||||||
|
#
|
||||||
|
# Licensed by Bram Cohen under the "MIT license":
|
||||||
|
#
|
||||||
|
# "Copyright (C) 2001-2002 Bram Cohen
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person
|
||||||
|
# obtaining a copy of this software and associated documentation files
|
||||||
|
# (the "Software"), to deal in the Software without restriction,
|
||||||
|
# including without limitation the rights to use, copy, modify, merge,
|
||||||
|
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||||
|
# and to permit persons to whom the Software is furnished to do so,
|
||||||
|
# subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be
|
||||||
|
# included in all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# The Software is provided "AS IS", without warranty of any kind,
|
||||||
|
# express or implied, including but not limited to the warranties of
|
||||||
|
# merchantability, fitness for a particular purpose and
|
||||||
|
# noninfringement. In no event shall the authors or copyright holders
|
||||||
|
# be liable for any claim, damages or other liability, whether in an
|
||||||
|
# action of contract, tort or otherwise, arising from, out of or in
|
||||||
|
# connection with the Software or the use or other dealings in the
|
||||||
|
# Software."
|
||||||
|
#
|
||||||
|
# (The rencode module is licensed under the above license as well).
|
||||||
|
#
|
||||||
|
|
||||||
|
import struct
|
||||||
|
import string
|
||||||
|
from threading import Lock
|
||||||
|
|
||||||
|
# Default number of bits for serialized floats, either 32 or 64 (also a parameter for dumps()).
|
||||||
|
DEFAULT_FLOAT_BITS = 32
|
||||||
|
|
||||||
|
# Maximum length of integer when written as base 10 string.
|
||||||
|
MAX_INT_LENGTH = 64
|
||||||
|
|
||||||
|
# The bencode 'typecodes' such as i, d, etc have been extended and
|
||||||
|
# relocated on the base-256 character set.
|
||||||
|
CHR_LIST = chr(59)
|
||||||
|
CHR_DICT = chr(60)
|
||||||
|
CHR_INT = chr(61)
|
||||||
|
CHR_INT1 = chr(62)
|
||||||
|
CHR_INT2 = chr(63)
|
||||||
|
CHR_INT4 = chr(64)
|
||||||
|
CHR_INT8 = chr(65)
|
||||||
|
CHR_FLOAT32 = chr(66)
|
||||||
|
CHR_FLOAT64 = chr(44)
|
||||||
|
CHR_TRUE = chr(67)
|
||||||
|
CHR_FALSE = chr(68)
|
||||||
|
CHR_NONE = chr(69)
|
||||||
|
CHR_TERM = chr(127)
|
||||||
|
|
||||||
|
# Positive integers with value embedded in typecode.
|
||||||
|
INT_POS_FIXED_START = 0
|
||||||
|
INT_POS_FIXED_COUNT = 44
|
||||||
|
|
||||||
|
# Dictionaries with length embedded in typecode.
|
||||||
|
DICT_FIXED_START = 102
|
||||||
|
DICT_FIXED_COUNT = 25
|
||||||
|
|
||||||
|
# Negative integers with value embedded in typecode.
|
||||||
|
INT_NEG_FIXED_START = 70
|
||||||
|
INT_NEG_FIXED_COUNT = 32
|
||||||
|
|
||||||
|
# Strings with length embedded in typecode.
|
||||||
|
STR_FIXED_START = 128
|
||||||
|
STR_FIXED_COUNT = 64
|
||||||
|
|
||||||
|
# Lists with length embedded in typecode.
|
||||||
|
LIST_FIXED_START = STR_FIXED_START+STR_FIXED_COUNT
|
||||||
|
LIST_FIXED_COUNT = 64
|
||||||
|
|
||||||
|
def decode_int(x, f):
|
||||||
|
f += 1
|
||||||
|
newf = x.index(CHR_TERM, f)
|
||||||
|
if newf - f >= MAX_INT_LENGTH:
|
||||||
|
raise ValueError('overflow')
|
||||||
|
try:
|
||||||
|
n = int(x[f:newf])
|
||||||
|
except (OverflowError, ValueError):
|
||||||
|
n = long(x[f:newf])
|
||||||
|
if x[f] == '-':
|
||||||
|
if x[f + 1] == '0':
|
||||||
|
raise ValueError
|
||||||
|
elif x[f] == '0' and newf != f+1:
|
||||||
|
raise ValueError
|
||||||
|
return (n, newf+1)
|
||||||
|
|
||||||
|
def decode_intb(x, f):
|
||||||
|
f += 1
|
||||||
|
return (struct.unpack('!b', x[f:f+1])[0], f+1)
|
||||||
|
|
||||||
|
def decode_inth(x, f):
|
||||||
|
f += 1
|
||||||
|
return (struct.unpack('!h', x[f:f+2])[0], f+2)
|
||||||
|
|
||||||
|
def decode_intl(x, f):
|
||||||
|
f += 1
|
||||||
|
return (struct.unpack('!l', x[f:f+4])[0], f+4)
|
||||||
|
|
||||||
|
def decode_intq(x, f):
|
||||||
|
f += 1
|
||||||
|
return (struct.unpack('!q', x[f:f+8])[0], f+8)
|
||||||
|
|
||||||
|
def decode_float32(x, f):
|
||||||
|
f += 1
|
||||||
|
n = struct.unpack('!f', x[f:f+4])[0]
|
||||||
|
return (n, f+4)
|
||||||
|
|
||||||
|
def decode_float64(x, f):
|
||||||
|
f += 1
|
||||||
|
n = struct.unpack('!d', x[f:f+8])[0]
|
||||||
|
return (n, f+8)
|
||||||
|
|
||||||
|
def decode_string(x, f):
|
||||||
|
colon = x.index(':', f)
|
||||||
|
try:
|
||||||
|
n = int(x[f:colon])
|
||||||
|
except (OverflowError, ValueError):
|
||||||
|
n = long(x[f:colon])
|
||||||
|
if x[f] == '0' and colon != f+1:
|
||||||
|
raise ValueError
|
||||||
|
colon += 1
|
||||||
|
s = x[colon:colon+n]
|
||||||
|
try:
|
||||||
|
t = s.decode("utf8")
|
||||||
|
if len(t) != len(s):
|
||||||
|
s = t
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
pass
|
||||||
|
return (s, colon+n)
|
||||||
|
|
||||||
|
def decode_list(x, f):
|
||||||
|
r, f = [], f+1
|
||||||
|
while x[f] != CHR_TERM:
|
||||||
|
v, f = decode_func[x[f]](x, f)
|
||||||
|
r.append(v)
|
||||||
|
return (tuple(r), f + 1)
|
||||||
|
|
||||||
|
def decode_dict(x, f):
|
||||||
|
r, f = {}, f+1
|
||||||
|
while x[f] != CHR_TERM:
|
||||||
|
k, f = decode_func[x[f]](x, f)
|
||||||
|
r[k], f = decode_func[x[f]](x, f)
|
||||||
|
return (r, f + 1)
|
||||||
|
|
||||||
|
def decode_true(x, f):
|
||||||
|
return (True, f+1)
|
||||||
|
|
||||||
|
def decode_false(x, f):
|
||||||
|
return (False, f+1)
|
||||||
|
|
||||||
|
def decode_none(x, f):
|
||||||
|
return (None, f+1)
|
||||||
|
|
||||||
|
decode_func = {}
|
||||||
|
decode_func['0'] = decode_string
|
||||||
|
decode_func['1'] = decode_string
|
||||||
|
decode_func['2'] = decode_string
|
||||||
|
decode_func['3'] = decode_string
|
||||||
|
decode_func['4'] = decode_string
|
||||||
|
decode_func['5'] = decode_string
|
||||||
|
decode_func['6'] = decode_string
|
||||||
|
decode_func['7'] = decode_string
|
||||||
|
decode_func['8'] = decode_string
|
||||||
|
decode_func['9'] = decode_string
|
||||||
|
decode_func[CHR_LIST ] = decode_list
|
||||||
|
decode_func[CHR_DICT ] = decode_dict
|
||||||
|
decode_func[CHR_INT ] = decode_int
|
||||||
|
decode_func[CHR_INT1 ] = decode_intb
|
||||||
|
decode_func[CHR_INT2 ] = decode_inth
|
||||||
|
decode_func[CHR_INT4 ] = decode_intl
|
||||||
|
decode_func[CHR_INT8 ] = decode_intq
|
||||||
|
decode_func[CHR_FLOAT32] = decode_float32
|
||||||
|
decode_func[CHR_FLOAT64] = decode_float64
|
||||||
|
decode_func[CHR_TRUE ] = decode_true
|
||||||
|
decode_func[CHR_FALSE ] = decode_false
|
||||||
|
decode_func[CHR_NONE ] = decode_none
|
||||||
|
|
||||||
|
def make_fixed_length_string_decoders():
|
||||||
|
def make_decoder(slen):
|
||||||
|
def f(x, f):
|
||||||
|
s = x[f+1:f+1+slen]
|
||||||
|
try:
|
||||||
|
t = s.decode("utf8")
|
||||||
|
if len(t) != len(s):
|
||||||
|
s = t
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
pass
|
||||||
|
return (s, f+1+slen)
|
||||||
|
return f
|
||||||
|
for i in range(STR_FIXED_COUNT):
|
||||||
|
decode_func[chr(STR_FIXED_START+i)] = make_decoder(i)
|
||||||
|
|
||||||
|
make_fixed_length_string_decoders()
|
||||||
|
|
||||||
|
def make_fixed_length_list_decoders():
|
||||||
|
def make_decoder(slen):
|
||||||
|
def f(x, f):
|
||||||
|
r, f = [], f+1
|
||||||
|
for i in range(slen):
|
||||||
|
v, f = decode_func[x[f]](x, f)
|
||||||
|
r.append(v)
|
||||||
|
return (tuple(r), f)
|
||||||
|
return f
|
||||||
|
for i in range(LIST_FIXED_COUNT):
|
||||||
|
decode_func[chr(LIST_FIXED_START+i)] = make_decoder(i)
|
||||||
|
|
||||||
|
make_fixed_length_list_decoders()
|
||||||
|
|
||||||
|
def make_fixed_length_int_decoders():
|
||||||
|
def make_decoder(j):
|
||||||
|
def f(x, f):
|
||||||
|
return (j, f+1)
|
||||||
|
return f
|
||||||
|
for i in range(INT_POS_FIXED_COUNT):
|
||||||
|
decode_func[chr(INT_POS_FIXED_START+i)] = make_decoder(i)
|
||||||
|
for i in range(INT_NEG_FIXED_COUNT):
|
||||||
|
decode_func[chr(INT_NEG_FIXED_START+i)] = make_decoder(-1-i)
|
||||||
|
|
||||||
|
make_fixed_length_int_decoders()
|
||||||
|
|
||||||
|
def make_fixed_length_dict_decoders():
|
||||||
|
def make_decoder(slen):
|
||||||
|
def f(x, f):
|
||||||
|
r, f = {}, f+1
|
||||||
|
for j in range(slen):
|
||||||
|
k, f = decode_func[x[f]](x, f)
|
||||||
|
r[k], f = decode_func[x[f]](x, f)
|
||||||
|
return (r, f)
|
||||||
|
return f
|
||||||
|
for i in range(DICT_FIXED_COUNT):
|
||||||
|
decode_func[chr(DICT_FIXED_START+i)] = make_decoder(i)
|
||||||
|
|
||||||
|
make_fixed_length_dict_decoders()
|
||||||
|
|
||||||
|
def encode_dict(x,r):
|
||||||
|
r.append(CHR_DICT)
|
||||||
|
for k, v in x.items():
|
||||||
|
encode_func[type(k)](k, r)
|
||||||
|
encode_func[type(v)](v, r)
|
||||||
|
r.append(CHR_TERM)
|
||||||
|
|
||||||
|
|
||||||
|
def loads(x):
|
||||||
|
try:
|
||||||
|
r, l = decode_func[x[0]](x, 0)
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
raise ValueError
|
||||||
|
if l != len(x):
|
||||||
|
raise ValueError
|
||||||
|
return r
|
||||||
|
|
||||||
|
from types import StringType, IntType, LongType, DictType, ListType, TupleType, FloatType, NoneType, UnicodeType
|
||||||
|
|
||||||
|
def encode_int(x, r):
|
||||||
|
if 0 <= x < INT_POS_FIXED_COUNT:
|
||||||
|
r.append(chr(INT_POS_FIXED_START+x))
|
||||||
|
elif -INT_NEG_FIXED_COUNT <= x < 0:
|
||||||
|
r.append(chr(INT_NEG_FIXED_START-1-x))
|
||||||
|
elif -128 <= x < 128:
|
||||||
|
r.extend((CHR_INT1, struct.pack('!b', x)))
|
||||||
|
elif -32768 <= x < 32768:
|
||||||
|
r.extend((CHR_INT2, struct.pack('!h', x)))
|
||||||
|
elif -2147483648 <= x < 2147483648:
|
||||||
|
r.extend((CHR_INT4, struct.pack('!l', x)))
|
||||||
|
elif -9223372036854775808 <= x < 9223372036854775808:
|
||||||
|
r.extend((CHR_INT8, struct.pack('!q', x)))
|
||||||
|
else:
|
||||||
|
s = str(x)
|
||||||
|
if len(s) >= MAX_INT_LENGTH:
|
||||||
|
raise ValueError('overflow')
|
||||||
|
r.extend((CHR_INT, s, CHR_TERM))
|
||||||
|
|
||||||
|
def encode_float32(x, r):
|
||||||
|
r.extend((CHR_FLOAT32, struct.pack('!f', x)))
|
||||||
|
|
||||||
|
def encode_float64(x, r):
|
||||||
|
r.extend((CHR_FLOAT64, struct.pack('!d', x)))
|
||||||
|
|
||||||
|
def encode_bool(x, r):
|
||||||
|
r.extend({False: CHR_FALSE, True: CHR_TRUE}[bool(x)])
|
||||||
|
|
||||||
|
def encode_none(x, r):
|
||||||
|
r.extend(CHR_NONE)
|
||||||
|
|
||||||
|
def encode_string(x, r):
|
||||||
|
if len(x) < STR_FIXED_COUNT:
|
||||||
|
r.extend((chr(STR_FIXED_START + len(x)), x))
|
||||||
|
else:
|
||||||
|
r.extend((str(len(x)), ':', x))
|
||||||
|
|
||||||
|
def encode_unicode(x, r):
|
||||||
|
encode_string(x.encode("utf8"), r)
|
||||||
|
|
||||||
|
def encode_list(x, r):
|
||||||
|
if len(x) < LIST_FIXED_COUNT:
|
||||||
|
r.append(chr(LIST_FIXED_START + len(x)))
|
||||||
|
for i in x:
|
||||||
|
encode_func[type(i)](i, r)
|
||||||
|
else:
|
||||||
|
r.append(CHR_LIST)
|
||||||
|
for i in x:
|
||||||
|
encode_func[type(i)](i, r)
|
||||||
|
r.append(CHR_TERM)
|
||||||
|
|
||||||
|
def encode_dict(x,r):
|
||||||
|
if len(x) < DICT_FIXED_COUNT:
|
||||||
|
r.append(chr(DICT_FIXED_START + len(x)))
|
||||||
|
for k, v in x.items():
|
||||||
|
encode_func[type(k)](k, r)
|
||||||
|
encode_func[type(v)](v, r)
|
||||||
|
else:
|
||||||
|
r.append(CHR_DICT)
|
||||||
|
for k, v in x.items():
|
||||||
|
encode_func[type(k)](k, r)
|
||||||
|
encode_func[type(v)](v, r)
|
||||||
|
r.append(CHR_TERM)
|
||||||
|
|
||||||
|
encode_func = {}
|
||||||
|
encode_func[IntType] = encode_int
|
||||||
|
encode_func[LongType] = encode_int
|
||||||
|
encode_func[StringType] = encode_string
|
||||||
|
encode_func[ListType] = encode_list
|
||||||
|
encode_func[TupleType] = encode_list
|
||||||
|
encode_func[DictType] = encode_dict
|
||||||
|
encode_func[NoneType] = encode_none
|
||||||
|
encode_func[UnicodeType] = encode_unicode
|
||||||
|
|
||||||
|
lock = Lock()
|
||||||
|
|
||||||
|
try:
|
||||||
|
from types import BooleanType
|
||||||
|
encode_func[BooleanType] = encode_bool
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def dumps(x, float_bits=DEFAULT_FLOAT_BITS):
|
||||||
|
"""
|
||||||
|
Dump data structure to str.
|
||||||
|
|
||||||
|
Here float_bits is either 32 or 64.
|
||||||
|
"""
|
||||||
|
lock.acquire()
|
||||||
|
try:
|
||||||
|
if float_bits == 32:
|
||||||
|
encode_func[FloatType] = encode_float32
|
||||||
|
elif float_bits == 64:
|
||||||
|
encode_func[FloatType] = encode_float64
|
||||||
|
else:
|
||||||
|
raise ValueError('Float bits (%d) is not 32 or 64' % float_bits)
|
||||||
|
r = []
|
||||||
|
encode_func[type(x)](x, r)
|
||||||
|
finally:
|
||||||
|
lock.release()
|
||||||
|
return ''.join(r)
|
||||||
|
|
||||||
|
def test():
|
||||||
|
f1 = struct.unpack('!f', struct.pack('!f', 25.5))[0]
|
||||||
|
f2 = struct.unpack('!f', struct.pack('!f', 29.3))[0]
|
||||||
|
f3 = struct.unpack('!f', struct.pack('!f', -0.6))[0]
|
||||||
|
L = (({'a':15, 'bb':f1, 'ccc':f2, '':(f3,(),False,True,'')},('a',10**20),tuple(range(-100000,100000)),'b'*31,'b'*62,'b'*64,2**30,2**33,2**62,2**64,2**30,2**33,2**62,2**64,False,False, True, -1, 2, 0),)
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
d = dict(zip(range(-100000,100000),range(-100000,100000)))
|
||||||
|
d.update({'a':20, 20:40, 40:41, f1:f2, f2:f3, f3:False, False:True, True:False})
|
||||||
|
L = (d, {}, {5:6}, {7:7,True:8}, {9:10, 22:39, 49:50, 44: ''})
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
L = ('', 'a'*10, 'a'*100, 'a'*1000, 'a'*10000, 'a'*100000, 'a'*1000000, 'a'*10000000)
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
L = tuple([dict(zip(range(n),range(n))) for n in range(100)]) + ('b',)
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
L = tuple([dict(zip(range(n),range(-n,0))) for n in range(100)]) + ('b',)
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
L = tuple([tuple(range(n)) for n in range(100)]) + ('b',)
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
L = tuple(['a'*n for n in range(1000)]) + ('b',)
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
L = tuple(['a'*n for n in range(1000)]) + (None,True,None)
|
||||||
|
assert loads(dumps(L)) == L
|
||||||
|
assert loads(dumps(None)) == None
|
||||||
|
assert loads(dumps({None:None})) == {None:None}
|
||||||
|
assert 1e-10<abs(loads(dumps(1.1))-1.1)<1e-6
|
||||||
|
assert 1e-10<abs(loads(dumps(1.1,32))-1.1)<1e-6
|
||||||
|
assert abs(loads(dumps(1.1,64))-1.1)<1e-12
|
||||||
|
assert loads(dumps(u"Hello World!!"))
|
||||||
|
try:
|
||||||
|
import psyco
|
||||||
|
psyco.bind(dumps)
|
||||||
|
psyco.bind(loads)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
test()
|
57
synchronousdeluge/transfer.py
Normal file
57
synchronousdeluge/transfer.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
import zlib
|
||||||
|
import struct
|
||||||
|
import socket
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
from synchronousdeluge import rencode
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["DelugeTransfer"]
|
||||||
|
|
||||||
|
class DelugeTransfer(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.sock = None
|
||||||
|
self.conn = None
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
def connect(self, hostport):
|
||||||
|
if self.connected:
|
||||||
|
self.disconnect()
|
||||||
|
|
||||||
|
self.sock = socket.create_connection(hostport)
|
||||||
|
self.conn = ssl.wrap_socket(self.sock, None, None, False, ssl.CERT_NONE, ssl.PROTOCOL_SSLv3)
|
||||||
|
self.connected = True
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
if self.conn:
|
||||||
|
self.conn.close()
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
def send_request(self, request):
|
||||||
|
data = (request.format(),)
|
||||||
|
payload = zlib.compress(rencode.dumps(data))
|
||||||
|
self.conn.sendall(payload)
|
||||||
|
|
||||||
|
buf = b""
|
||||||
|
|
||||||
|
while True:
|
||||||
|
data = self.conn.recv(1024)
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
self.connected = False
|
||||||
|
break
|
||||||
|
|
||||||
|
buf += data
|
||||||
|
dobj = zlib.decompressobj()
|
||||||
|
|
||||||
|
try:
|
||||||
|
message = rencode.loads(dobj.decompress(buf))
|
||||||
|
except (ValueError, zlib.error, struct.error):
|
||||||
|
# Probably incomplete data, read more
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
buf = dobj.unused_data
|
||||||
|
|
||||||
|
yield message
|
||||||
|
|
||||||
|
|
|
@ -138,7 +138,7 @@ class Client(object):
|
||||||
urlo = urlparse(address)
|
urlo = urlparse(address)
|
||||||
if urlo.scheme == '':
|
if urlo.scheme == '':
|
||||||
base_url = 'http://' + address + ':' + str(port)
|
base_url = 'http://' + address + ':' + str(port)
|
||||||
self.url = base_url + '/transmission/rpc'
|
self.url = base_url + '/transmission/rpc/'
|
||||||
else:
|
else:
|
||||||
if urlo.port:
|
if urlo.port:
|
||||||
self.url = urlo.scheme + '://' + urlo.hostname + ':' + str(urlo.port) + urlo.path
|
self.url = urlo.scheme + '://' + urlo.hostname + ':' + str(urlo.port) + urlo.path
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue