mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 22:03:13 -07:00
Merge branch 'dev'
This commit is contained in:
commit
9fd90845e6
20 changed files with 781 additions and 308 deletions
|
@ -2,7 +2,7 @@ nzbToMedia
|
||||||
================
|
================
|
||||||
|
|
||||||
Provides an efficient way to handle postprocessing for [CouchPotatoServer](https://couchpota.to/ "CouchPotatoServer") and [SickBeard](http://sickbeard.com/ "SickBeard")
|
Provides an efficient way to handle postprocessing for [CouchPotatoServer](https://couchpota.to/ "CouchPotatoServer") and [SickBeard](http://sickbeard.com/ "SickBeard")
|
||||||
when using one of the popular NZB download clients like [SABnzbd](http://sabnzbd.org/) and [NZBGet](http://nzbget.sourceforge.net/ "NZBGet") on low performance systems like a NAS.
|
when using one of the popular NZB download clients like [SABnzbd](http://sabnzbd.org/ "SABnzbd") and [NZBGet](http://nzbget.sourceforge.net/ "NZBGet") on low performance systems like a NAS.
|
||||||
This script is based on sabToSickBeard (written by Nic Wolfe and supplied with SickBeard), with the support for NZBGet being added by [thorli](https://github.com/thorli "thorli") and further contributions by [schumi2004](https://github.com/schumi2004 "schumi2004") and [hugbug](https://sourceforge.net/apps/phpbb/nzbget/memberlist.php?mode=viewprofile&u=67 "hugbug").
|
This script is based on sabToSickBeard (written by Nic Wolfe and supplied with SickBeard), with the support for NZBGet being added by [thorli](https://github.com/thorli "thorli") and further contributions by [schumi2004](https://github.com/schumi2004 "schumi2004") and [hugbug](https://sourceforge.net/apps/phpbb/nzbget/memberlist.php?mode=viewprofile&u=67 "hugbug").
|
||||||
Torrent suport added by [jkaberg](https://github.com/jkaberg "jkaberg") and [berkona](https://github.com/berkona "berkona")
|
Torrent suport added by [jkaberg](https://github.com/jkaberg "jkaberg") and [berkona](https://github.com/berkona "berkona")
|
||||||
|
|
||||||
|
@ -12,12 +12,9 @@ Originally this was modifed from the SickBeard version to allow for "on-demand"
|
||||||
Later, a few failed downloads prompted me to incorporate "failed download" handling.
|
Later, a few failed downloads prompted me to incorporate "failed download" handling.
|
||||||
Failed download handling is now provided for sabnzbd, by CouchPotatoServer; however on arm processors (e.g. small NAS systems) this can be un-reliable.
|
Failed download handling is now provided for sabnzbd, by CouchPotatoServer; however on arm processors (e.g. small NAS systems) this can be un-reliable.
|
||||||
|
|
||||||
thorli's Synology DS211j was too weak to provide decent download rates with SABnzbd and CouchPotatoServer even by using sabToCouchPotato; His only alternative (as with many many QNAP and Synology users) was to switch to NZBGet which uses far less resources and helps to reach the full download speed.
|
|
||||||
|
|
||||||
The renamer of CouchPotatoServer caused broken downloads by interfering with NZBGet while it was still unpacking the files. Hence the solution was thorli's version of sabToCouchPotato which has now been named "nzbToCouchPotato".
|
|
||||||
|
|
||||||
Failed download handling for SickBeard is available by using the development branch from fork [SickBeard-failed](https://github.com/Tolstyak/Sick-Beard.git "SickBeard-failed")
|
Failed download handling for SickBeard is available by using the development branch from fork [SickBeard-failed](https://github.com/Tolstyak/Sick-Beard.git "SickBeard-failed")
|
||||||
To use this feature, in autoProcessTV.cfg set the parameter "failed_fork=1". Default is 0 and will work with standard version of SickBeard and just ignores failed downloads.
|
To use this feature, in autoProcessTV.cfg set the parameter "fork=failed". Default is "fork=default" and will work with standard version of SickBeard and just ignores failed downloads.
|
||||||
|
Additional forks exist for ThePirateBay (does its own extraction and linking). See [SickBeard Forks](https://github.com/clinton-hall/nzbToMedia/wiki/sickbeard-branches "SickBeard Forks") for a lit of known forks.
|
||||||
|
|
||||||
Torrent support has been added with the assistance of jkaberg and berkona. Currently supports uTorrent, Transmissions, Deluge and possibly more.
|
Torrent support has been added with the assistance of jkaberg and berkona. Currently supports uTorrent, Transmissions, Deluge and possibly more.
|
||||||
To enable Torrent extraction, on Windows, you need to install [7-zip](http://www.7-zip.org/ "7-zip") or on *nix you need to install the following packages/commands.
|
To enable Torrent extraction, on Windows, you need to install [7-zip](http://www.7-zip.org/ "7-zip") or on *nix you need to install the following packages/commands.
|
||||||
|
|
|
@ -9,8 +9,7 @@ import logging
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
from sets import Set
|
from subprocess import call, Popen
|
||||||
from subprocess import call
|
|
||||||
|
|
||||||
# Custom imports
|
# Custom imports
|
||||||
import autoProcess.migratecfg as migratecfg
|
import autoProcess.migratecfg as migratecfg
|
||||||
|
@ -34,26 +33,78 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
foundFile = int(0)
|
foundFile = int(0)
|
||||||
extracted_folder = []
|
extracted_folder = []
|
||||||
extractionSuccess = False
|
extractionSuccess = False
|
||||||
|
copy_list = []
|
||||||
|
|
||||||
Logger.debug("MAIN: Received Directory: %s | Name: %s | Category: %s", inputDirectory, inputName, inputCategory)
|
Logger.debug("MAIN: Received Directory: %s | Name: %s | Category: %s", inputDirectory, inputName, inputCategory)
|
||||||
|
if inputCategory in sbCategory and sbFork in SICKBEARD_TORRENT:
|
||||||
|
Logger.info("MAIN: Calling SickBeard's %s branch to post-process: %s",sbFork ,inputName)
|
||||||
|
result = autoProcessTV.processEpisode(inputDirectory, inputName, int(0))
|
||||||
|
if result == 1:
|
||||||
|
Logger.info("MAIN: A problem was reported in the autoProcess* script. If torrent was pasued we will resume seeding")
|
||||||
|
Logger.info("MAIN: All done.")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
inputDirectory, inputName, inputCategory, root = category_search(inputDirectory, inputName, inputCategory, root, categories) # Confirm the category by parsing directory structure
|
inputDirectory, inputName, inputCategory, root = category_search(inputDirectory, inputName, inputCategory, root, categories) # Confirm the category by parsing directory structure
|
||||||
|
|
||||||
|
outputDestination = ""
|
||||||
for category in categories:
|
for category in categories:
|
||||||
if category == inputCategory:
|
if category == inputCategory:
|
||||||
if inputCategory == hpCategory:
|
|
||||||
outputDestination = inputDirectory #HP needs to scan the same dir as passed to downloader.
|
|
||||||
break
|
|
||||||
if os.path.basename(inputDirectory) == inputName:
|
if os.path.basename(inputDirectory) == inputName:
|
||||||
Logger.info("MAIN: Download is a directory")
|
Logger.info("MAIN: Download is a directory")
|
||||||
outputDestination = os.path.normpath(os.path.join(outputDirectory, category, safeName(inputName)))
|
outputDestination = os.path.normpath(os.path.join(outputDirectory, category, safeName(inputName)))
|
||||||
else:
|
else:
|
||||||
Logger.info("MAIN: Download is not a directory")
|
Logger.info("MAIN: Download is not a directory")
|
||||||
outputDestination = os.path.normpath(os.path.join(outputDirectory, category, os.path.splitext(safeName(inputName))[0]))
|
outputDestination = os.path.normpath(os.path.join(outputDirectory, category, os.path.splitext(safeName(inputName))[0]))
|
||||||
Logger.info("MAIN: Output directory set to: %s", outputDestination)
|
Logger.info("MAIN: Output directory set to: %s", outputDestination)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
if outputDestination == "":
|
||||||
|
if inputCategory == "":
|
||||||
|
inputCategory = "UNCAT"
|
||||||
|
if os.path.basename(inputDirectory) == inputName:
|
||||||
|
Logger.info("MAIN: Download is a directory")
|
||||||
|
outputDestination = os.path.normpath(os.path.join(outputDirectory, inputCategory, safeName(inputName)))
|
||||||
|
else:
|
||||||
|
Logger.info("MAIN: Download is not a directory")
|
||||||
|
outputDestination = os.path.normpath(os.path.join(outputDirectory, inputCategory, os.path.splitext(safeName(inputName))[0]))
|
||||||
|
Logger.info("MAIN: Output directory set to: %s", outputDestination)
|
||||||
|
|
||||||
|
processOnly = cpsCategory + sbCategory + hpCategory + mlCategory + gzCategory
|
||||||
|
if not "NONE" in user_script_categories: # if None, we only process the 5 listed.
|
||||||
|
if "ALL" in user_script_categories: # All defined categories
|
||||||
|
processOnly = categories
|
||||||
|
processOnly.extend(user_script_categories) # Adds all categories to be processed by userscript.
|
||||||
|
|
||||||
|
if not inputCategory in processOnly:
|
||||||
|
Logger.info("MAIN: No processing to be done for category: %s. Exiting", inputCategory)
|
||||||
|
Logger.info("MAIN: All done.")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
||||||
|
if clientAgent in ['utorrent', 'transmission'] and inputHash:
|
||||||
|
if clientAgent == 'utorrent':
|
||||||
|
try:
|
||||||
|
Logger.debug("MAIN: Connecting to %s: %s", clientAgent, uTorrentWEBui)
|
||||||
|
utorrentClass = UTorrentClient(uTorrentWEBui, uTorrentUSR, uTorrentPWD)
|
||||||
|
except:
|
||||||
|
Logger.exception("MAIN: Failed to connect to uTorrent")
|
||||||
|
utorrentClass = ""
|
||||||
|
if clientAgent == 'transmission':
|
||||||
|
try:
|
||||||
|
Logger.debug("MAIN: Connecting to %s: http://%s:%s", clientAgent, TransmissionHost, TransmissionPort)
|
||||||
|
TransmissionClass = TransmissionClient(TransmissionHost, TransmissionPort, TransmissionUSR, TransmissionPWD)
|
||||||
|
except:
|
||||||
|
Logger.exception("MAIN: Failed to connect to Transmission")
|
||||||
|
TransmissionClass = ""
|
||||||
|
|
||||||
|
# if we are using links with uTorrent it means we need to pause it in order to access the files
|
||||||
|
Logger.debug("MAIN: Stoping torrent %s in %s while processing", inputName, clientAgent)
|
||||||
|
if clientAgent == 'utorrent' and utorrentClass != "":
|
||||||
|
utorrentClass.stop(inputHash)
|
||||||
|
if clientAgent == 'transmission' and TransmissionClass !="":
|
||||||
|
TransmissionClass.stop_torrent(inputID)
|
||||||
|
time.sleep(5) # Give Torrent client some time to catch up with the change
|
||||||
|
|
||||||
Logger.debug("MAIN: Scanning files in directory: %s", inputDirectory)
|
Logger.debug("MAIN: Scanning files in directory: %s", inputDirectory)
|
||||||
|
|
||||||
|
@ -66,10 +117,10 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
targetDirectory = os.path.join(outputDestination, file)
|
targetDirectory = os.path.join(outputDestination, file)
|
||||||
|
|
||||||
if root == 1:
|
if root == 1:
|
||||||
if not foundFile:
|
if foundFile == int(0):
|
||||||
Logger.debug("MAIN: Looking for %s in: %s", inputName, file)
|
Logger.debug("MAIN: Looking for %s in: %s", inputName, file)
|
||||||
if (safeName(inputName) in safeName(file)) or (safeName(os.path.splitext(file)[0]) in safeName(inputName)) and foundFile == 0:
|
if (safeName(inputName) in safeName(file)) or (safeName(fileName) in safeName(inputName)):
|
||||||
pass # This file does match the Torrent name
|
#pass # This file does match the Torrent name
|
||||||
foundFile = 1
|
foundFile = 1
|
||||||
Logger.debug("MAIN: Found file %s that matches Torrent Name %s", file, inputName)
|
Logger.debug("MAIN: Found file %s that matches Torrent Name %s", file, inputName)
|
||||||
else:
|
else:
|
||||||
|
@ -79,18 +130,15 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
Logger.debug("MAIN: Looking for files with modified/created dates less than 5 minutes old.")
|
Logger.debug("MAIN: Looking for files with modified/created dates less than 5 minutes old.")
|
||||||
mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(os.path.join(dirpath, file)))
|
mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(os.path.join(dirpath, file)))
|
||||||
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(os.path.join(dirpath, file)))
|
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(os.path.join(dirpath, file)))
|
||||||
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)) and foundFile == 0:
|
if (mtime_lapse < datetime.timedelta(minutes=5)) or (ctime_lapse < datetime.timedelta(minutes=5)):
|
||||||
pass # This file does match the date time criteria
|
#pass # This file does match the date time criteria
|
||||||
foundFile = 1
|
foundFile = 1
|
||||||
Logger.debug("MAIN: Found file %s with date modifed/created less than 5 minutes ago.", file)
|
Logger.debug("MAIN: Found file %s with date modifed/created less than 5 minutes ago.", file)
|
||||||
else:
|
else:
|
||||||
continue # This file has not been recently moved or created, skip it
|
continue # This file has not been recently moved or created, skip it
|
||||||
|
|
||||||
if not (inputCategory == cpsCategory or inputCategory == sbCategory or inputCategory == hpCategory): #process all for non-video categories.
|
if fileExtension in mediaContainer: # If the file is a video file
|
||||||
Logger.info("MAIN: Found file %s for category %s", filePath, inputCategory)
|
if is_sample(filePath, inputName, minSampleSize) and not inputCategory in hpCategory: # Ignore samples
|
||||||
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
|
||||||
elif fileExtension in mediaContainer and not inputCategory == hpCategory: # If the file is a video file
|
|
||||||
if is_sample(filePath, inputName, minSampleSize): # Ignore samples
|
|
||||||
Logger.info("MAIN: Ignoring sample file: %s ", filePath)
|
Logger.info("MAIN: Ignoring sample file: %s ", filePath)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
|
@ -98,15 +146,25 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
Logger.info("MAIN: Found video file %s in %s", fileExtension, filePath)
|
Logger.info("MAIN: Found video file %s in %s", fileExtension, filePath)
|
||||||
try:
|
try:
|
||||||
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
||||||
|
copy_list.append([filePath, os.path.join(outputDestination, file)])
|
||||||
except:
|
except:
|
||||||
Logger.exception("MAIN: Failed to link file: %s", file)
|
Logger.exception("MAIN: Failed to link file: %s", file)
|
||||||
elif fileExtension in metaContainer and not inputCategory == hpCategory:
|
elif fileExtension in metaContainer:
|
||||||
Logger.info("MAIN: Found metadata file %s for file %s", fileExtension, filePath)
|
Logger.info("MAIN: Found metadata file %s for file %s", fileExtension, filePath)
|
||||||
try:
|
try:
|
||||||
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
||||||
|
copy_list.append([filePath, os.path.join(outputDestination, file)])
|
||||||
except:
|
except:
|
||||||
Logger.exception("MAIN: Failed to link file: %s", file)
|
Logger.exception("MAIN: Failed to link file: %s", file)
|
||||||
|
continue
|
||||||
elif fileExtension in compressedContainer:
|
elif fileExtension in compressedContainer:
|
||||||
|
if inputCategory in hpCategory: # We need to link all files for HP in order to move these back to support seeding.
|
||||||
|
Logger.info("MAIN: Linking compressed archive file %s for file %s", fileExtension, filePath)
|
||||||
|
try:
|
||||||
|
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
||||||
|
copy_list.append([filePath, os.path.join(outputDestination, file)])
|
||||||
|
except:
|
||||||
|
Logger.exception("MAIN: Failed to link file: %s", file)
|
||||||
# find part numbers in second "extension" from right, if we have more than 1 compressed file in the same directory.
|
# find part numbers in second "extension" from right, if we have more than 1 compressed file in the same directory.
|
||||||
if re.search(r'\d+', os.path.splitext(fileName)[1]) and os.path.dirname(filePath) in extracted_folder and not (os.path.splitext(fileName)[1] in ['.720p','.1080p']):
|
if re.search(r'\d+', os.path.splitext(fileName)[1]) and os.path.dirname(filePath) in extracted_folder and not (os.path.splitext(fileName)[1] in ['.720p','.1080p']):
|
||||||
part = int(re.search(r'\d+', os.path.splitext(fileName)[1]).group())
|
part = int(re.search(r'\d+', os.path.splitext(fileName)[1]).group())
|
||||||
|
@ -117,112 +175,195 @@ def main(inputDirectory, inputName, inputCategory, inputHash, inputID):
|
||||||
continue
|
continue
|
||||||
Logger.info("MAIN: Found compressed archive %s for file %s", fileExtension, filePath)
|
Logger.info("MAIN: Found compressed archive %s for file %s", fileExtension, filePath)
|
||||||
try:
|
try:
|
||||||
extractor.extract(filePath, outputDestination)
|
if inputCategory in hpCategory: # HP needs to scan the same dir as passed to downloader.
|
||||||
|
extractor.extract(filePath, inputDirectory)
|
||||||
|
else:
|
||||||
|
extractor.extract(filePath, outputDestination)
|
||||||
extractionSuccess = True # we use this variable to determine if we need to pause a torrent or not in uTorrent (don't need to pause archived content)
|
extractionSuccess = True # we use this variable to determine if we need to pause a torrent or not in uTorrent (don't need to pause archived content)
|
||||||
extracted_folder.append(os.path.dirname(filePath))
|
extracted_folder.append(os.path.dirname(filePath))
|
||||||
except:
|
except:
|
||||||
Logger.exception("MAIN: Extraction failed for: %s", file)
|
Logger.exception("MAIN: Extraction failed for: %s", file)
|
||||||
elif inputCategory == hpCategory:
|
continue
|
||||||
|
elif not inputCategory in cpsCategory + sbCategory: #process all for non-video categories.
|
||||||
|
Logger.info("MAIN: Found file %s for category %s", filePath, inputCategory)
|
||||||
|
copy_link(filePath, targetDirectory, useLink, outputDestination)
|
||||||
|
copy_list.append([filePath, os.path.join(outputDestination, file)])
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
Logger.debug("MAIN: Ignoring unknown filetype %s for file %s", fileExtension, filePath)
|
Logger.debug("MAIN: Ignoring unknown filetype %s for file %s", fileExtension, filePath)
|
||||||
continue
|
continue
|
||||||
flatten(outputDestination)
|
if not inputCategory in hpCategory: #don't flatten hp in case multi cd albums, and we need to copy this back later.
|
||||||
|
flatten(outputDestination)
|
||||||
|
|
||||||
# Now check if movie files exist in destination:
|
# Now check if movie files exist in destination:
|
||||||
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
if inputCategory in cpsCategory + sbCategory:
|
||||||
for file in filenames:
|
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||||
filePath = os.path.join(dirpath, file)
|
for file in filenames:
|
||||||
fileExtension = os.path.splitext(file)[1]
|
filePath = os.path.join(dirpath, file)
|
||||||
if fileExtension in mediaContainer: # If the file is a video file
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
if is_sample(filePath, inputName, minSampleSize):
|
if fileExtension in mediaContainer: # If the file is a video file
|
||||||
Logger.debug("MAIN: Removing sample file: %s", filePath)
|
if is_sample(filePath, inputName, minSampleSize):
|
||||||
os.unlink(filePath) # remove samples
|
Logger.debug("MAIN: Removing sample file: %s", filePath)
|
||||||
|
os.unlink(filePath) # remove samples
|
||||||
|
else:
|
||||||
|
Logger.debug("MAIN: Found media file: %s", filePath)
|
||||||
|
video2 = video2 + 1
|
||||||
else:
|
else:
|
||||||
video2 = video2 + 1
|
Logger.debug("MAIN: File %s is not a media file", filePath)
|
||||||
if video2 >= video and video2 > 0: # Check that all video files were moved
|
if video2 >= video and video2 > int(0): # Check that all video files were moved
|
||||||
status = 0
|
Logger.debug("MAIN: Found %s media files", str(video2))
|
||||||
|
status = int(0)
|
||||||
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
|
||||||
if clientAgent in ['utorrent', 'transmission'] and extractionSuccess == False and inputHash:
|
|
||||||
if clientAgent == 'utorrent':
|
|
||||||
try:
|
|
||||||
Logger.debug("MAIN: Connecting to %s: %s", clientAgent, uTorrentWEBui)
|
|
||||||
utorrentClass = UTorrentClient(uTorrentWEBui, uTorrentUSR, uTorrentPWD)
|
|
||||||
except:
|
|
||||||
Logger.exception("MAIN: Failed to connect to uTorrent")
|
|
||||||
else:
|
else:
|
||||||
try:
|
Logger.debug("MAIN: Found %s media files in output. %s were found in input", str(video2), str(video))
|
||||||
Logger.debug("MAIN: Connecting to %s: http://%s:%s", clientAgent, TransmissionHost, TransmissionPort)
|
|
||||||
TransmissionClass = TransmissionClient(TransmissionHost, TransmissionPort, TransmissionUSR, TransmissionPWD)
|
|
||||||
except:
|
|
||||||
Logger.exception("MAIN: Failed to connect to Transmission")
|
|
||||||
|
|
||||||
# if we are using links with uTorrent it means we need to pause it in order to access the files
|
processCategories = cpsCategory + sbCategory + hpCategory + mlCategory + gzCategory
|
||||||
if useLink != "no":
|
|
||||||
Logger.debug("MAIN: Stoping torrent %s in %s while processing", inputName, clientAgent)
|
|
||||||
if clientAgent == 'utorrent':
|
|
||||||
utorrentClass.stop(inputHash)
|
|
||||||
else:
|
|
||||||
TransmissionClass.stop_torrent(inputID)
|
|
||||||
time.sleep(5) # Give uTorrent some time to catch up with the change
|
|
||||||
|
|
||||||
# Delete torrent and torrentdata from uTorrent
|
if (inputCategory in user_script_categories and not "NONE" in user_script_categories) or ("ALL" in user_script_categories and not inputCategory in processCategories):
|
||||||
if deleteOriginal == 1:
|
Logger.info("MAIN: Processing user script %s.", user_script)
|
||||||
Logger.debug("MAIN: Deleting torrent %s from %s", inputName, clientAgent)
|
result = external_script(outputDestination)
|
||||||
if clientAgent == 'utorrent':
|
elif status == int(0) or (inputCategory in hpCategory + mlCategory + gzCategory): # if movies linked/extracted or for other categories.
|
||||||
utorrentClass.removedata(inputHash)
|
|
||||||
if not inputCategory == hpCategory:
|
|
||||||
utorrentClass.remove(inputHash)
|
|
||||||
else:
|
|
||||||
if inputCategory == hpCategory:
|
|
||||||
TransmissionClass.remove_torrent(inputID, False)
|
|
||||||
else:
|
|
||||||
TransmissionClass.remove_torrent(inputID, True)
|
|
||||||
time.sleep(5)
|
|
||||||
|
|
||||||
processCategories = Set([cpsCategory, sbCategory, hpCategory, mlCategory, gzCategory])
|
|
||||||
|
|
||||||
if inputCategory and not (inputCategory in processCategories): # no extra processing to be done... yet.
|
|
||||||
Logger.info("MAIN: No further processing to be done for category %s.", inputCategory)
|
|
||||||
result = 1
|
|
||||||
elif status == 0 or (inputCategory in [hpCategory, mlCategory, gzCategory]): # if movies linked/extracted or for other categories.
|
|
||||||
Logger.debug("MAIN: Calling autoProcess script for successful download.")
|
Logger.debug("MAIN: Calling autoProcess script for successful download.")
|
||||||
status = 0 # hp, my, gz don't support failed.
|
status = int(0) # hp, my, gz don't support failed.
|
||||||
else:
|
else:
|
||||||
Logger.error("MAIN: Something failed! Please check logs. Exiting")
|
Logger.error("MAIN: Something failed! Please check logs. Exiting")
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
if inputCategory == cpsCategory:
|
if inputCategory in cpsCategory:
|
||||||
Logger.info("MAIN: Calling CouchPotatoServer to post-process: %s", inputName)
|
Logger.info("MAIN: Calling CouchPotatoServer to post-process: %s", inputName)
|
||||||
download_id = inputHash
|
download_id = inputHash
|
||||||
result = autoProcessMovie.process(outputDestination, inputName, status, clientAgent, download_id)
|
result = autoProcessMovie.process(outputDestination, inputName, status, clientAgent, download_id, inputCategory)
|
||||||
elif inputCategory == sbCategory:
|
elif inputCategory in sbCategory:
|
||||||
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
||||||
result = autoProcessTV.processEpisode(outputDestination, inputName, status)
|
result = autoProcessTV.processEpisode(outputDestination, inputName, status, inputCategory)
|
||||||
elif inputCategory == hpCategory:
|
elif inputCategory in hpCategory:
|
||||||
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
||||||
result = autoProcessMusic.process(outputDestination, inputName, status)
|
result = autoProcessMusic.process(inputDirectory, inputName, status, inputCategory)
|
||||||
elif inputCategory == mlCategory:
|
elif inputCategory in mlCategory:
|
||||||
Logger.info("MAIN: Calling Mylar to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Mylar to post-process: %s", inputName)
|
||||||
result = autoProcessComics.processEpisode(outputDestination, inputName, status)
|
result = autoProcessComics.processEpisode(outputDestination, inputName, status, inputCategory)
|
||||||
elif inputCategory == gzCategory:
|
elif inputCategory in gzCategory:
|
||||||
Logger.info("MAIN: Calling Gamez to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Gamez to post-process: %s", inputName)
|
||||||
result = autoProcessGames.process(outputDestination, inputName, status)
|
result = autoProcessGames.process(outputDestination, inputName, status, inputCategory)
|
||||||
|
|
||||||
if result == 1:
|
if result == 1:
|
||||||
Logger.info("MAIN: A problem was reported in the autoProcess* script. If torrent was pasued we will resume seeding")
|
Logger.info("MAIN: A problem was reported in the autoProcess* script. If torrent was paused we will resume seeding")
|
||||||
|
|
||||||
|
if inputCategory in hpCategory:
|
||||||
|
# we need to move the output dir files back...
|
||||||
|
Logger.debug("MAIN: Moving temporary HeadPhones files back to allow seeding.")
|
||||||
|
for item in copy_list:
|
||||||
|
if os.path.isfile(os.path.normpath(item[1])): # check to ensure temp files still exist.
|
||||||
|
if os.path.isfile(os.path.normpath(item[0])): # both exist, remove temp version
|
||||||
|
Logger.debug("MAIN: File %s still present. Removing tempoary file %s", str(item[0]), str(item[1]))
|
||||||
|
os.unlink(os.path.normpath(item[1]))
|
||||||
|
continue
|
||||||
|
else: # move temp version back to allow seeding or Torrent removal.
|
||||||
|
Logger.debug("MAIN: Moving %s to %s", str(item[1]), str(item[0]))
|
||||||
|
shutil.move(os.path.normpath(item[1]), os.path.normpath(item[0]))
|
||||||
|
continue
|
||||||
|
|
||||||
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
# Hardlink solution for uTorrent, need to implent support for deluge, transmission
|
||||||
if clientAgent in ['utorrent', 'transmission'] and extractionSuccess == False and inputHash and useLink != "no" and deleteOriginal == 0: # we always want to resume seeding, for now manually find out what is wrong when extraction fails
|
if clientAgent in ['utorrent', 'transmission'] and inputHash:
|
||||||
Logger.debug("MAIN: Starting torrent %s in %s", inputName, clientAgent)
|
# Delete torrent and torrentdata from Torrent client if processing was successful.
|
||||||
if clientAgent == 'utorrent':
|
if deleteOriginal == 1 and result != 1:
|
||||||
utorrentClass.start(inputHash)
|
Logger.debug("MAIN: Deleting torrent %s from %s", inputName, clientAgent)
|
||||||
|
if clientAgent == 'utorrent' and utorrentClass != "":
|
||||||
|
utorrentClass.removedata(inputHash)
|
||||||
|
if not inputCategory in hpCategory:
|
||||||
|
utorrentClass.remove(inputHash)
|
||||||
|
if clientAgent == 'transmission' and TransmissionClass !="":
|
||||||
|
if inputCategory in hpCategory: #don't delete actual files for hp category, just remove torrent.
|
||||||
|
TransmissionClass.remove_torrent(inputID, False)
|
||||||
|
else:
|
||||||
|
TransmissionClass.remove_torrent(inputID, True)
|
||||||
|
# we always want to resume seeding, for now manually find out what is wrong when extraction fails
|
||||||
else:
|
else:
|
||||||
TransmissionClass.start_torrent(inputID)
|
Logger.debug("MAIN: Starting torrent %s in %s", inputName, clientAgent)
|
||||||
|
if clientAgent == 'utorrent' and utorrentClass != "":
|
||||||
|
utorrentClass.start(inputHash)
|
||||||
|
if clientAgent == 'transmission' and TransmissionClass !="":
|
||||||
|
TransmissionClass.start_torrent(inputID)
|
||||||
|
time.sleep(5)
|
||||||
|
#cleanup
|
||||||
|
if inputCategory in processCategories and result == 0 and os.path.isdir(outputDestination):
|
||||||
|
num_files_new = int(0)
|
||||||
|
file_list = []
|
||||||
|
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||||
|
for file in filenames:
|
||||||
|
filePath = os.path.join(dirpath, file)
|
||||||
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
|
if fileExtension in mediaContainer or fileExtension in metaContainer:
|
||||||
|
num_files_new = num_files_new + 1
|
||||||
|
file_list.append(file)
|
||||||
|
if num_files_new == int(0):
|
||||||
|
Logger.info("All files have been processed. Cleaning outputDirectory %s", outputDestination)
|
||||||
|
shutil.rmtree(outputDestination)
|
||||||
|
else:
|
||||||
|
Logger.info("outputDirectory %s still contains %s media and/or meta files. This directory will not be removed.", outputDestination, num_files_new)
|
||||||
|
for item in file_list:
|
||||||
|
Logger.debug("media/meta file found: %s", item)
|
||||||
Logger.info("MAIN: All done.")
|
Logger.info("MAIN: All done.")
|
||||||
|
|
||||||
|
def external_script(outputDestination):
|
||||||
|
|
||||||
|
result_final = int(0) # start at 0.
|
||||||
|
num_files = int(0)
|
||||||
|
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||||
|
for file in filenames:
|
||||||
|
|
||||||
|
filePath = os.path.join(dirpath, file)
|
||||||
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
|
|
||||||
|
if fileExtension in user_script_mediaExtensions or user_script_mediaExtensions == "ALL":
|
||||||
|
num_files = num_files + 1
|
||||||
|
command = [user_script]
|
||||||
|
for param in user_script_param:
|
||||||
|
if param == "FN":
|
||||||
|
command.append(file)
|
||||||
|
continue
|
||||||
|
elif param == "FP":
|
||||||
|
command.append(filePath)
|
||||||
|
continue
|
||||||
|
elif param == "DN":
|
||||||
|
command.append(dirpath)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
command.append(param)
|
||||||
|
continue
|
||||||
|
Logger.info("Running script %s on file %s.", command, filePath)
|
||||||
|
try:
|
||||||
|
p = Popen(command)
|
||||||
|
res = p.wait()
|
||||||
|
if str(res) in user_script_successCodes: # Linux returns 0 for successful.
|
||||||
|
Logger.info("UserScript %s was successfull", command[0])
|
||||||
|
result = int(0)
|
||||||
|
else:
|
||||||
|
Logger.error("UserScript %s has failed with return code: %s", command[0], res)
|
||||||
|
Logger.info("If the UserScript completed successfully you should add %s to the user_script_successCodes", res)
|
||||||
|
result = int(1)
|
||||||
|
except:
|
||||||
|
Logger.exception("UserScript %s has failed", command[0])
|
||||||
|
result = int(1)
|
||||||
|
final_result = final_result + result
|
||||||
|
|
||||||
|
time.sleep(user_delay)
|
||||||
|
num_files_new = int(0)
|
||||||
|
for dirpath, dirnames, filenames in os.walk(outputDestination):
|
||||||
|
for file in filenames:
|
||||||
|
|
||||||
|
filePath = os.path.join(dirpath, file)
|
||||||
|
fileName, fileExtension = os.path.splitext(file)
|
||||||
|
|
||||||
|
if fileExtension in user_script_mediaExtensions or user_script_mediaExtensions == "ALL":
|
||||||
|
num_files_new = num_files_new + 1
|
||||||
|
|
||||||
|
if user_script_clean == int(1) and num_files_new == int(0) and final_result == int(0):
|
||||||
|
Logger.info("All files have been processed. Cleaning outputDirectory %s", outputDestination)
|
||||||
|
shutil.rmtree(outputDestination)
|
||||||
|
elif user_script_clean == int(1) and num_files_new != int(0):
|
||||||
|
Logger.info("%s files were processed, but %s still remain. outputDirectory will not be cleaned.", num_files, num_files_new)
|
||||||
|
return final_result
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
#check to migrate old cfg before trying to load.
|
#check to migrate old cfg before trying to load.
|
||||||
|
@ -258,8 +399,8 @@ if __name__ == "__main__":
|
||||||
uTorrentUSR = config.get("Torrent", "uTorrentUSR") # mysecretusr
|
uTorrentUSR = config.get("Torrent", "uTorrentUSR") # mysecretusr
|
||||||
uTorrentPWD = config.get("Torrent", "uTorrentPWD") # mysecretpwr
|
uTorrentPWD = config.get("Torrent", "uTorrentPWD") # mysecretpwr
|
||||||
|
|
||||||
TransmissionHost = config.get("Torrent", "TransmissionHost") # localhost
|
TransmissionHost = config.get("Torrent", "TransmissionHost") # localhost
|
||||||
TransmissionPort = config.get("Torrent", "TransmissionPort") # 8084
|
TransmissionPort = config.get("Torrent", "TransmissionPort") # 8084
|
||||||
TransmissionUSR = config.get("Torrent", "TransmissionUSR") # mysecretusr
|
TransmissionUSR = config.get("Torrent", "TransmissionUSR") # mysecretusr
|
||||||
TransmissionPWD = config.get("Torrent", "TransmissionPWD") # mysecretpwr
|
TransmissionPWD = config.get("Torrent", "TransmissionPWD") # mysecretpwr
|
||||||
|
|
||||||
|
@ -270,16 +411,26 @@ if __name__ == "__main__":
|
||||||
metaContainer = (config.get("Extensions", "metaExtensions")).split(',') # .nfo,.sub,.srt
|
metaContainer = (config.get("Extensions", "metaExtensions")).split(',') # .nfo,.sub,.srt
|
||||||
minSampleSize = int(config.get("Extensions", "minSampleSize")) # 200 (in MB)
|
minSampleSize = int(config.get("Extensions", "minSampleSize")) # 200 (in MB)
|
||||||
|
|
||||||
cpsCategory = config.get("CouchPotato", "cpsCategory") # movie
|
cpsCategory = (config.get("CouchPotato", "cpsCategory")).split(',') # movie
|
||||||
sbCategory = config.get("SickBeard", "sbCategory") # tv
|
sbCategory = (config.get("SickBeard", "sbCategory")).split(',') # tv
|
||||||
hpCategory = config.get("HeadPhones", "hpCategory") # music
|
sbFork = config.get("SickBeard", "fork") # tv
|
||||||
mlCategory = config.get("Mylar", "mlCategory") # comics
|
hpCategory = (config.get("HeadPhones", "hpCategory")).split(',') # music
|
||||||
gzCategory = config.get("Gamez", "gzCategory") # games
|
mlCategory = (config.get("Mylar", "mlCategory")).split(',') # comics
|
||||||
categories.append(cpsCategory)
|
gzCategory = (config.get("Gamez", "gzCategory")).split(',') # games
|
||||||
categories.append(sbCategory)
|
categories.extend(cpsCategory)
|
||||||
categories.append(hpCategory)
|
categories.extend(sbCategory)
|
||||||
categories.append(mlCategory)
|
categories.extend(hpCategory)
|
||||||
categories.append(gzCategory)
|
categories.extend(mlCategory)
|
||||||
|
categories.extend(gzCategory)
|
||||||
|
|
||||||
|
user_script_categories = config.get("UserScript", "user_script_categories").split(',') # NONE
|
||||||
|
if not "NONE" in user_script_categories:
|
||||||
|
user_script_mediaExtensions = (config.get("UserScript", "user_script_mediaExtensions")).split(',')
|
||||||
|
user_script = config.get("UserScript", "user_script_path")
|
||||||
|
user_script_param = (config.get("UserScript", "user_script_param")).split(',')
|
||||||
|
user_script_successCodes = (config.get("UserScript", "user_script_successCodes")).split(',')
|
||||||
|
user_script_clean = int(config.get("UserScript", "user_script_clean"))
|
||||||
|
user_delay = int(config.get("UserScript", "delay"))
|
||||||
|
|
||||||
transcode = int(config.get("Transcoder", "transcode"))
|
transcode = int(config.get("Transcoder", "transcode"))
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ import sys
|
||||||
import os
|
import os
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
import logging
|
import logging
|
||||||
|
import errno
|
||||||
from subprocess import call
|
from subprocess import call
|
||||||
|
|
||||||
Logger = logging.getLogger()
|
Logger = logging.getLogger()
|
||||||
|
@ -49,6 +50,8 @@ def Transcode_directory(dirName):
|
||||||
outputAudioCodec = config.get("Transcoder", "outputAudioCodec").strip()
|
outputAudioCodec = config.get("Transcoder", "outputAudioCodec").strip()
|
||||||
outputAudioBitrate = config.get("Transcoder", "outputAudioBitrate").strip()
|
outputAudioBitrate = config.get("Transcoder", "outputAudioBitrate").strip()
|
||||||
outputSubtitleCodec = config.get("Transcoder", "outputSubtitleCodec").strip()
|
outputSubtitleCodec = config.get("Transcoder", "outputSubtitleCodec").strip()
|
||||||
|
outputFastStart = int(config.get("Transcoder", "outputFastStart"))
|
||||||
|
outputQualityPercent = int(config.get("Transcoder", "outputQualityPercent"))
|
||||||
if useNiceness:
|
if useNiceness:
|
||||||
niceness = int(config.get("Transcoder", "niceness"))
|
niceness = int(config.get("Transcoder", "niceness"))
|
||||||
|
|
||||||
|
@ -69,7 +72,7 @@ def Transcode_directory(dirName):
|
||||||
outputVideoExtension = '-transcoded' + outputVideoExtension # adds '-transcoded.ext'
|
outputVideoExtension = '-transcoded' + outputVideoExtension # adds '-transcoded.ext'
|
||||||
newfilePath = os.path.normpath(name + outputVideoExtension)
|
newfilePath = os.path.normpath(name + outputVideoExtension)
|
||||||
|
|
||||||
command = [ffmpeg, '-loglevel', 'warning', '-i', filePath, '-map', '0']
|
command = [ffmpeg, '-loglevel', 'warning', '-i', filePath, '-map', '0'] # -map 0 takes all input streams
|
||||||
|
|
||||||
if useNiceness:
|
if useNiceness:
|
||||||
command = ['nice', '-%d' % niceness] + command
|
command = ['nice', '-%d' % niceness] + command
|
||||||
|
@ -85,10 +88,10 @@ def Transcode_directory(dirName):
|
||||||
command.append('copy')
|
command.append('copy')
|
||||||
if len(outputVideoFramerate) > 0:
|
if len(outputVideoFramerate) > 0:
|
||||||
command.append('-r')
|
command.append('-r')
|
||||||
command.append(outputVideoFramerate)
|
command.append(str(outputVideoFramerate))
|
||||||
if len(outputVideoBitrate) > 0:
|
if len(outputVideoBitrate) > 0:
|
||||||
command.append('-b:v')
|
command.append('-b:v')
|
||||||
command.append(outputVideoBitrate)
|
command.append(str(outputVideoBitrate))
|
||||||
if len(outputAudioCodec) > 0:
|
if len(outputAudioCodec) > 0:
|
||||||
command.append('-c:a')
|
command.append('-c:a')
|
||||||
command.append(outputAudioCodec)
|
command.append(outputAudioCodec)
|
||||||
|
@ -100,16 +103,22 @@ def Transcode_directory(dirName):
|
||||||
command.append('copy')
|
command.append('copy')
|
||||||
if len(outputAudioBitrate) > 0:
|
if len(outputAudioBitrate) > 0:
|
||||||
command.append('-b:a')
|
command.append('-b:a')
|
||||||
command.append(outputAudioBitrate)
|
command.append(str(outputAudioBitrate))
|
||||||
if len(outputSubtitleCodec) > 0:
|
if outputFastStart > 0:
|
||||||
|
command.append('-movflags')
|
||||||
|
command.append('+faststart')
|
||||||
|
if outputQualityPercent > 0:
|
||||||
|
command.append('-q:a')
|
||||||
|
command.append(str(outputQualityPercent))
|
||||||
|
if len(outputSubtitleCodec) > 0: # Not every subtitle codec can be used for every video container format!
|
||||||
command.append('-c:s')
|
command.append('-c:s')
|
||||||
command.append(outputSubtitleCodec)
|
command.append(outputSubtitleCodec) # http://en.wikibooks.org/wiki/FFMPEG_An_Intermediate_Guide/subtitle_options
|
||||||
else:
|
else:
|
||||||
command.append('-sn') # Don't copy the subtitles over
|
command.append('-sn') # Don't copy the subtitles over
|
||||||
command.append(newfilePath)
|
command.append(newfilePath)
|
||||||
|
|
||||||
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
|
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
|
||||||
os.remove(newFilePath)
|
os.remove(newfilePath)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
|
if e.errno != errno.ENOENT: # Ignore the error if it's just telling us that the file doesn't exist
|
||||||
Logger.debug("Error when removing transcoding target: %s", e)
|
Logger.debug("Error when removing transcoding target: %s", e)
|
||||||
|
@ -117,17 +126,21 @@ def Transcode_directory(dirName):
|
||||||
Logger.debug("Error when removing transcoding target: %s", e)
|
Logger.debug("Error when removing transcoding target: %s", e)
|
||||||
|
|
||||||
Logger.info("Transcoding video: %s", file)
|
Logger.info("Transcoding video: %s", file)
|
||||||
|
cmd = ""
|
||||||
|
for item in command:
|
||||||
|
cmd = cmd + " " + item
|
||||||
|
Logger.debug("calling command:%s", cmd)
|
||||||
result = 1 # set result to failed in case call fails.
|
result = 1 # set result to failed in case call fails.
|
||||||
try:
|
try:
|
||||||
result = call(command)
|
result = call(command)
|
||||||
except:
|
except:
|
||||||
Logger.exception("Transcoding of video %s has failed", filePath)
|
Logger.exception("Transcoding of video %s has failed", filePath)
|
||||||
if result == 0:
|
if result == 0:
|
||||||
Logger.info("Transcoding of video %s to %s succeded", filePath, newfilePath)
|
Logger.info("Transcoding of video %s to %s succeeded", filePath, newfilePath)
|
||||||
if duplicate == 0: # we get rid of the original file
|
if duplicate == 0: # we get rid of the original file
|
||||||
os.unlink(filePath)
|
os.unlink(filePath)
|
||||||
else:
|
else:
|
||||||
Logger.error("Transcoding of video %s to %s failed", filePath, newfilePath)
|
Logger.error("Transcoding of video %s to %s failed", filePath, newfilePath)
|
||||||
# this will be 0 (successful) it all are sucessful, else will return a positive integer for failure.
|
# this will be 0 (successful) it all are successful, else will return a positive integer for failure.
|
||||||
final_result = final_result + result
|
final_result = final_result + result
|
||||||
return final_result
|
return final_result
|
||||||
|
|
|
@ -4,11 +4,14 @@ import os.path
|
||||||
import time
|
import time
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
import logging
|
import logging
|
||||||
|
import socket
|
||||||
|
|
||||||
from nzbToMediaEnv import *
|
from nzbToMediaEnv import *
|
||||||
|
from nzbToMediaUtil import *
|
||||||
from nzbToMediaSceneExceptions import process_all_exceptions
|
from nzbToMediaSceneExceptions import process_all_exceptions
|
||||||
|
|
||||||
Logger = logging.getLogger()
|
Logger = logging.getLogger()
|
||||||
|
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
|
||||||
|
|
||||||
class AuthURLOpener(urllib.FancyURLopener):
|
class AuthURLOpener(urllib.FancyURLopener):
|
||||||
def __init__(self, user, pw):
|
def __init__(self, user, pw):
|
||||||
|
@ -29,7 +32,7 @@ class AuthURLOpener(urllib.FancyURLopener):
|
||||||
return urllib.FancyURLopener.open(self, url)
|
return urllib.FancyURLopener.open(self, url)
|
||||||
|
|
||||||
|
|
||||||
def processEpisode(dirName, nzbName=None, status=0):
|
def processEpisode(dirName, nzbName=None, status=0, inputCategory=None):
|
||||||
|
|
||||||
config = ConfigParser.ConfigParser()
|
config = ConfigParser.ConfigParser()
|
||||||
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
||||||
|
@ -41,26 +44,31 @@ def processEpisode(dirName, nzbName=None, status=0):
|
||||||
|
|
||||||
config.read(configFilename)
|
config.read(configFilename)
|
||||||
|
|
||||||
host = config.get("Mylar", "host")
|
section = "Mylar"
|
||||||
port = config.get("Mylar", "port")
|
if inputCategory != None and config.has_section(inputCategory):
|
||||||
username = config.get("Mylar", "username")
|
section = inputCategory
|
||||||
password = config.get("Mylar", "password")
|
host = config.get(section, "host")
|
||||||
|
port = config.get(section, "port")
|
||||||
|
username = config.get(section, "username")
|
||||||
|
password = config.get(section, "password")
|
||||||
try:
|
try:
|
||||||
ssl = int(config.get("Mylar", "ssl"))
|
ssl = int(config.get(section, "ssl"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
ssl = 0
|
ssl = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
web_root = config.get("Mylar", "web_root")
|
web_root = config.get(section, "web_root")
|
||||||
except ConfigParser.NoOptionError:
|
except ConfigParser.NoOptionError:
|
||||||
web_root = ""
|
web_root = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
watch_dir = config.get("Mylar", "watch_dir")
|
watch_dir = config.get(section, "watch_dir")
|
||||||
except ConfigParser.NoOptionError:
|
except ConfigParser.NoOptionError:
|
||||||
watch_dir = ""
|
watch_dir = ""
|
||||||
params = {}
|
params = {}
|
||||||
|
|
||||||
|
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
if dirName == "Manual Run" and watch_dir != "":
|
if dirName == "Manual Run" and watch_dir != "":
|
||||||
dirName = watch_dir
|
dirName = watch_dir
|
||||||
|
|
||||||
|
|
|
@ -7,12 +7,15 @@ import datetime
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import socket
|
||||||
|
|
||||||
from nzbToMediaEnv import *
|
from nzbToMediaEnv import *
|
||||||
|
from nzbToMediaUtil import *
|
||||||
|
|
||||||
Logger = logging.getLogger()
|
Logger = logging.getLogger()
|
||||||
|
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
|
||||||
|
|
||||||
def process(dirName, nzbName=None, status=0):
|
def process(dirName, nzbName=None, status=0, inputCategory=None):
|
||||||
|
|
||||||
status = int(status)
|
status = int(status)
|
||||||
config = ConfigParser.ConfigParser()
|
config = ConfigParser.ConfigParser()
|
||||||
|
@ -25,17 +28,21 @@ def process(dirName, nzbName=None, status=0):
|
||||||
|
|
||||||
config.read(configFilename)
|
config.read(configFilename)
|
||||||
|
|
||||||
host = config.get("Gamez", "host")
|
section = "Gamez"
|
||||||
port = config.get("Gamez", "port")
|
if inputCategory != None and config.has_section(inputCategory):
|
||||||
apikey = config.get("Gamez", "apikey")
|
section = inputCategory
|
||||||
|
|
||||||
|
host = config.get(section, "host")
|
||||||
|
port = config.get(section, "port")
|
||||||
|
apikey = config.get(section, "apikey")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ssl = int(config.get("Gamez", "ssl"))
|
ssl = int(config.get(section, "ssl"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
ssl = 0
|
ssl = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
web_root = config.get("Gamez", "web_root")
|
web_root = config.get(section, "web_root")
|
||||||
except ConfigParser.NoOptionError:
|
except ConfigParser.NoOptionError:
|
||||||
web_root = ""
|
web_root = ""
|
||||||
|
|
||||||
|
@ -44,6 +51,8 @@ def process(dirName, nzbName=None, status=0):
|
||||||
else:
|
else:
|
||||||
protocol = "http://"
|
protocol = "http://"
|
||||||
|
|
||||||
|
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
baseURL = protocol + host + ":" + port + web_root + "/api?api_key=" + apikey + "&mode="
|
baseURL = protocol + host + ":" + port + web_root + "/api?api_key=" + apikey + "&mode="
|
||||||
|
|
||||||
fields = nzbName.split("-")
|
fields = nzbName.split("-")
|
||||||
|
|
|
@ -7,12 +7,15 @@ import datetime
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import socket
|
||||||
|
|
||||||
import Transcoder
|
import Transcoder
|
||||||
from nzbToMediaEnv import *
|
from nzbToMediaEnv import *
|
||||||
|
from nzbToMediaUtil import *
|
||||||
from nzbToMediaSceneExceptions import process_all_exceptions
|
from nzbToMediaSceneExceptions import process_all_exceptions
|
||||||
|
|
||||||
Logger = logging.getLogger()
|
Logger = logging.getLogger()
|
||||||
|
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
|
||||||
|
|
||||||
def get_imdb(nzbName, dirName):
|
def get_imdb(nzbName, dirName):
|
||||||
|
|
||||||
|
@ -45,29 +48,42 @@ def get_movie_info(baseURL, imdbid, download_id):
|
||||||
|
|
||||||
if not imdbid and not download_id:
|
if not imdbid and not download_id:
|
||||||
return ""
|
return ""
|
||||||
url = baseURL + "movie.list/?status=active"
|
|
||||||
|
|
||||||
Logger.debug("Opening URL: %s", url)
|
|
||||||
|
|
||||||
try:
|
|
||||||
urlObj = urllib.urlopen(url)
|
|
||||||
except:
|
|
||||||
Logger.exception("Unable to open URL")
|
|
||||||
return ""
|
|
||||||
|
|
||||||
movie_id = ""
|
movie_id = ""
|
||||||
releaselist = []
|
releaselist = []
|
||||||
try:
|
movieid = []
|
||||||
result = json.load(urlObj)
|
library = []
|
||||||
movieid = [item["id"] for item in result["movies"]]
|
offset = int(0)
|
||||||
library = [item["library"]["identifier"] for item in result["movies"]]
|
while True:
|
||||||
except:
|
url = baseURL + "media.list/?status=active" + "&limit_offset=50," + str(offset)
|
||||||
Logger.exception("Unable to parse json data for movies")
|
|
||||||
return ""
|
Logger.debug("Opening URL: %s", url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
urlObj = urllib.urlopen(url)
|
||||||
|
except:
|
||||||
|
Logger.exception("Unable to open URL")
|
||||||
|
break
|
||||||
|
|
||||||
|
movieid2 = []
|
||||||
|
library2 = []
|
||||||
|
try:
|
||||||
|
result = json.load(urlObj)
|
||||||
|
movieid2 = [item["id"] for item in result["movies"]]
|
||||||
|
library2 = [item["library"]["identifier"] for item in result["movies"]]
|
||||||
|
except:
|
||||||
|
Logger.exception("Unable to parse json data for movies")
|
||||||
|
break
|
||||||
|
|
||||||
|
movieid.extend(movieid2)
|
||||||
|
library.extend(library2)
|
||||||
|
if len(movieid2) < int(50): # finished parsing list of movies. Time to break.
|
||||||
|
break
|
||||||
|
offset = offset + 50
|
||||||
|
|
||||||
for index in range(len(movieid)):
|
for index in range(len(movieid)):
|
||||||
if not imdbid:
|
if not imdbid:
|
||||||
url = baseURL + "movie.get/?id=" + str(movieid[index])
|
url = baseURL + "media.get/?id=" + str(movieid[index])
|
||||||
Logger.debug("Opening URL: %s", url)
|
Logger.debug("Opening URL: %s", url)
|
||||||
try:
|
try:
|
||||||
urlObj = urllib.urlopen(url)
|
urlObj = urllib.urlopen(url)
|
||||||
|
@ -76,7 +92,7 @@ def get_movie_info(baseURL, imdbid, download_id):
|
||||||
return ""
|
return ""
|
||||||
try:
|
try:
|
||||||
result = json.load(urlObj)
|
result = json.load(urlObj)
|
||||||
releaselist = [item["info"]["download_id"] for item in result["movie"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
releaselist = [item["info"]["download_id"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
||||||
except:
|
except:
|
||||||
Logger.exception("Unable to parse json data for releases")
|
Logger.exception("Unable to parse json data for releases")
|
||||||
return ""
|
return ""
|
||||||
|
@ -102,7 +118,7 @@ def get_status(baseURL, movie_id, clientAgent, download_id):
|
||||||
|
|
||||||
if not movie_id:
|
if not movie_id:
|
||||||
return "", clientAgent, "none", "none"
|
return "", clientAgent, "none", "none"
|
||||||
url = baseURL + "movie.get/?id=" + str(movie_id)
|
url = baseURL + "media.get/?id=" + str(movie_id)
|
||||||
Logger.debug("Looking for status of movie: %s - with release sent to clientAgent: %s and download_id: %s", movie_id, clientAgent, download_id)
|
Logger.debug("Looking for status of movie: %s - with release sent to clientAgent: %s and download_id: %s", movie_id, clientAgent, download_id)
|
||||||
Logger.debug("Opening URL: %s", url)
|
Logger.debug("Opening URL: %s", url)
|
||||||
|
|
||||||
|
@ -113,7 +129,7 @@ def get_status(baseURL, movie_id, clientAgent, download_id):
|
||||||
return "", clientAgent, "none", "none"
|
return "", clientAgent, "none", "none"
|
||||||
result = json.load(urlObj)
|
result = json.load(urlObj)
|
||||||
try:
|
try:
|
||||||
movie_status = result["movie"]["status"]["identifier"]
|
movie_status = result["media"]["status"]["identifier"]
|
||||||
Logger.debug("This movie is marked as status %s in CouchPotatoServer", movie_status)
|
Logger.debug("This movie is marked as status %s in CouchPotatoServer", movie_status)
|
||||||
except: # index out of range/doesn't exist?
|
except: # index out of range/doesn't exist?
|
||||||
Logger.exception("Could not find a status for this movie")
|
Logger.exception("Could not find a status for this movie")
|
||||||
|
@ -121,8 +137,8 @@ def get_status(baseURL, movie_id, clientAgent, download_id):
|
||||||
try:
|
try:
|
||||||
release_status = "none"
|
release_status = "none"
|
||||||
if download_id != "" and download_id != "none": # we have the download id from the downloader. Let's see if it's valid.
|
if download_id != "" and download_id != "none": # we have the download id from the downloader. Let's see if it's valid.
|
||||||
release_statuslist = [item["status"]["identifier"] for item in result["movie"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
release_statuslist = [item["status"]["identifier"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
||||||
clientAgentlist = [item["info"]["download_downloader"] for item in result["movie"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
clientAgentlist = [item["info"]["download_downloader"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
|
||||||
if len(release_statuslist) == 1: # we have found a release by this id. :)
|
if len(release_statuslist) == 1: # we have found a release by this id. :)
|
||||||
release_status = release_statuslist[0]
|
release_status = release_statuslist[0]
|
||||||
clientAgent = clientAgentlist[0]
|
clientAgent = clientAgentlist[0]
|
||||||
|
@ -132,7 +148,7 @@ def get_status(baseURL, movie_id, clientAgent, download_id):
|
||||||
clients = [item for item in clientAgentlist if item.lower() == clientAgent.lower()]
|
clients = [item for item in clientAgentlist if item.lower() == clientAgent.lower()]
|
||||||
clientAgent = clients[0]
|
clientAgent = clients[0]
|
||||||
if len(clients) == 1: # ok.. a unique entry for download_id and clientAgent ;)
|
if len(clients) == 1: # ok.. a unique entry for download_id and clientAgent ;)
|
||||||
release_status = [item["status"]["identifier"] for item in result["movie"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower() and item["info"]["download_downloader"] == clientAgent][0]
|
release_status = [item["status"]["identifier"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower() and item["info"]["download_downloader"] == clientAgent][0]
|
||||||
Logger.debug("Found a single release for download_id: %s and clientAgent: %s. Release status is: %s", download_id, clientAgent, release_status)
|
Logger.debug("Found a single release for download_id: %s and clientAgent: %s. Release status is: %s", download_id, clientAgent, release_status)
|
||||||
else: # doesn't matter. only really used as secondary confirmation of movie status change. Let's continue.
|
else: # doesn't matter. only really used as secondary confirmation of movie status change. Let's continue.
|
||||||
Logger.debug("Found several releases for download_id: %s and clientAgent: %s. Cannot determine the release status", download_id, clientAgent)
|
Logger.debug("Found several releases for download_id: %s and clientAgent: %s. Cannot determine the release status", download_id, clientAgent)
|
||||||
|
@ -142,8 +158,8 @@ def get_status(baseURL, movie_id, clientAgent, download_id):
|
||||||
if download_id == "none": # if we couldn't find this initially, there is no need to check next time around.
|
if download_id == "none": # if we couldn't find this initially, there is no need to check next time around.
|
||||||
return movie_status, clientAgent, download_id, release_status
|
return movie_status, clientAgent, download_id, release_status
|
||||||
elif download_id == "": # in case we didn't get this from the downloader.
|
elif download_id == "": # in case we didn't get this from the downloader.
|
||||||
download_idlist = [item["info"]["download_id"] for item in result["movie"]["releases"] if item["status"]["identifier"] == "snatched"]
|
download_idlist = [item["info"]["download_id"] for item in result["media"]["releases"] if item["status"]["identifier"] == "snatched"]
|
||||||
clientAgentlist = [item["info"]["download_downloader"] for item in result["movie"]["releases"] if item["status"]["identifier"] == "snatched"]
|
clientAgentlist = [item["info"]["download_downloader"] for item in result["media"]["releases"] if item["status"]["identifier"] == "snatched"]
|
||||||
if len(clientAgentlist) == 1:
|
if len(clientAgentlist) == 1:
|
||||||
if clientAgent == "manual":
|
if clientAgent == "manual":
|
||||||
clientAgent = clientAgentlist[0]
|
clientAgent = clientAgentlist[0]
|
||||||
|
@ -176,7 +192,7 @@ def get_status(baseURL, movie_id, clientAgent, download_id):
|
||||||
download_id = "none"
|
download_id = "none"
|
||||||
return movie_status, clientAgent, download_id, release_status
|
return movie_status, clientAgent, download_id, release_status
|
||||||
|
|
||||||
def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id = ""):
|
def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id = "", inputCategory=None):
|
||||||
|
|
||||||
status = int(status)
|
status = int(status)
|
||||||
config = ConfigParser.ConfigParser()
|
config = ConfigParser.ConfigParser()
|
||||||
|
@ -189,21 +205,25 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
|
||||||
|
|
||||||
config.read(configFilename)
|
config.read(configFilename)
|
||||||
|
|
||||||
host = config.get("CouchPotato", "host")
|
section = "CouchPotato"
|
||||||
port = config.get("CouchPotato", "port")
|
if inputCategory != None and config.has_section(inputCategory):
|
||||||
apikey = config.get("CouchPotato", "apikey")
|
section = inputCategory
|
||||||
delay = float(config.get("CouchPotato", "delay"))
|
|
||||||
method = config.get("CouchPotato", "method")
|
host = config.get(section, "host")
|
||||||
delete_failed = int(config.get("CouchPotato", "delete_failed"))
|
port = config.get(section, "port")
|
||||||
wait_for = int(config.get("CouchPotato", "wait_for"))
|
apikey = config.get(section, "apikey")
|
||||||
|
delay = float(config.get(section, "delay"))
|
||||||
|
method = config.get(section, "method")
|
||||||
|
delete_failed = int(config.get(section, "delete_failed"))
|
||||||
|
wait_for = int(config.get(section, "wait_for"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ssl = int(config.get("CouchPotato", "ssl"))
|
ssl = int(config.get(section, "ssl"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
ssl = 0
|
ssl = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
web_root = config.get("CouchPotato", "web_root")
|
web_root = config.get(section, "web_root")
|
||||||
except ConfigParser.NoOptionError:
|
except ConfigParser.NoOptionError:
|
||||||
web_root = ""
|
web_root = ""
|
||||||
|
|
||||||
|
@ -213,7 +233,7 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
|
||||||
transcode = 0
|
transcode = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
remoteCPS = int(config.get("CouchPotato", "remoteCPS"))
|
remoteCPS = int(config.get(section, "remoteCPS"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
remoteCPS = 0
|
remoteCPS = 0
|
||||||
|
|
||||||
|
@ -236,6 +256,7 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
|
||||||
initial_status, clientAgent, download_id, initial_release_status = get_status(baseURL, movie_id, clientAgent, download_id)
|
initial_status, clientAgent, download_id, initial_release_status = get_status(baseURL, movie_id, clientAgent, download_id)
|
||||||
|
|
||||||
process_all_exceptions(nzbName.lower(), dirName)
|
process_all_exceptions(nzbName.lower(), dirName)
|
||||||
|
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
if status == 0:
|
if status == 0:
|
||||||
if transcode == 1:
|
if transcode == 1:
|
||||||
|
@ -253,7 +274,7 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
|
||||||
if remoteCPS == 1:
|
if remoteCPS == 1:
|
||||||
command = command + "/?downloader=" + clientAgent + "&download_id=" + download_id
|
command = command + "/?downloader=" + clientAgent + "&download_id=" + download_id
|
||||||
else:
|
else:
|
||||||
command = command + "/?movie_folder=" + dirName + "&downloader=" + clientAgent + "&download_id=" + download_id
|
command = command + "/?media_folder=" + dirName + "&downloader=" + clientAgent + "&download_id=" + download_id
|
||||||
|
|
||||||
url = baseURL + command
|
url = baseURL + command
|
||||||
|
|
||||||
|
|
|
@ -7,12 +7,15 @@ import datetime
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import socket
|
||||||
|
|
||||||
from nzbToMediaEnv import *
|
from nzbToMediaEnv import *
|
||||||
|
from nzbToMediaUtil import *
|
||||||
|
|
||||||
Logger = logging.getLogger()
|
Logger = logging.getLogger()
|
||||||
|
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
|
||||||
|
|
||||||
def process(dirName, nzbName=None, status=0):
|
def process(dirName, nzbName=None, status=0, inputCategory=None):
|
||||||
|
|
||||||
status = int(status)
|
status = int(status)
|
||||||
config = ConfigParser.ConfigParser()
|
config = ConfigParser.ConfigParser()
|
||||||
|
@ -25,18 +28,22 @@ def process(dirName, nzbName=None, status=0):
|
||||||
|
|
||||||
config.read(configFilename)
|
config.read(configFilename)
|
||||||
|
|
||||||
host = config.get("HeadPhones", "host")
|
section = "HeadPhones"
|
||||||
port = config.get("HeadPhones", "port")
|
if inputCategory != None and config.has_section(inputCategory):
|
||||||
apikey = config.get("HeadPhones", "apikey")
|
section = inputCategory
|
||||||
delay = float(config.get("HeadPhones", "delay"))
|
|
||||||
|
host = config.get(section, "host")
|
||||||
|
port = config.get(section, "port")
|
||||||
|
apikey = config.get(section, "apikey")
|
||||||
|
delay = float(config.get(section, "delay"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ssl = int(config.get("HeadPhones", "ssl"))
|
ssl = int(config.get(section, "ssl"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
ssl = 0
|
ssl = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
web_root = config.get("HeadPhones", "web_root")
|
web_root = config.get(section, "web_root")
|
||||||
except ConfigParser.NoOptionError:
|
except ConfigParser.NoOptionError:
|
||||||
web_root = ""
|
web_root = ""
|
||||||
|
|
||||||
|
@ -48,6 +55,8 @@ def process(dirName, nzbName=None, status=0):
|
||||||
if nzbName == "Manual Run":
|
if nzbName == "Manual Run":
|
||||||
delay = 0
|
delay = 0
|
||||||
|
|
||||||
|
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
baseURL = protocol + host + ":" + port + web_root + "/api?apikey=" + apikey + "&cmd="
|
baseURL = protocol + host + ":" + port + web_root + "/api?apikey=" + apikey + "&cmd="
|
||||||
|
|
||||||
if status == 0:
|
if status == 0:
|
||||||
|
@ -68,7 +77,7 @@ def process(dirName, nzbName=None, status=0):
|
||||||
return 1 # failure
|
return 1 # failure
|
||||||
|
|
||||||
result = urlObj.readlines()
|
result = urlObj.readlines()
|
||||||
Logger.info("HeaPhones returned %s", result)
|
Logger.info("HeadPhones returned %s", result)
|
||||||
if result[0] == "OK":
|
if result[0] == "OK":
|
||||||
Logger.info("%s started on HeadPhones for %s", command, nzbName)
|
Logger.info("%s started on HeadPhones for %s", command, nzbName)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,22 +1,3 @@
|
||||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
||||||
# URL: http://code.google.com/p/sickbeard/
|
|
||||||
#
|
|
||||||
# This file is part of Sick Beard.
|
|
||||||
#
|
|
||||||
# Sick Beard is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Sick Beard is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import urllib
|
import urllib
|
||||||
import os
|
import os
|
||||||
|
@ -24,6 +5,7 @@ import ConfigParser
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
|
import socket
|
||||||
|
|
||||||
import Transcoder
|
import Transcoder
|
||||||
from nzbToMediaEnv import *
|
from nzbToMediaEnv import *
|
||||||
|
@ -31,6 +13,8 @@ from nzbToMediaUtil import *
|
||||||
from nzbToMediaSceneExceptions import process_all_exceptions
|
from nzbToMediaSceneExceptions import process_all_exceptions
|
||||||
|
|
||||||
Logger = logging.getLogger()
|
Logger = logging.getLogger()
|
||||||
|
TimeOut = 4 * int(TimeOut) # SickBeard needs to complete all moving and renaming before returning the log sequence via url.
|
||||||
|
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
|
||||||
|
|
||||||
|
|
||||||
class AuthURLOpener(urllib.FancyURLopener):
|
class AuthURLOpener(urllib.FancyURLopener):
|
||||||
|
@ -60,7 +44,7 @@ def delete(dirName):
|
||||||
Logger.exception("Unable to delete folder %s", dirName)
|
Logger.exception("Unable to delete folder %s", dirName)
|
||||||
|
|
||||||
|
|
||||||
def processEpisode(dirName, nzbName=None, failed=False):
|
def processEpisode(dirName, nzbName=None, failed=False, inputCategory=None):
|
||||||
|
|
||||||
status = int(failed)
|
status = int(failed)
|
||||||
config = ConfigParser.ConfigParser()
|
config = ConfigParser.ConfigParser()
|
||||||
|
@ -73,30 +57,34 @@ def processEpisode(dirName, nzbName=None, failed=False):
|
||||||
|
|
||||||
config.read(configFilename)
|
config.read(configFilename)
|
||||||
|
|
||||||
|
section = "SickBeard"
|
||||||
|
if inputCategory != None and config.has_section(inputCategory):
|
||||||
|
section = inputCategory
|
||||||
|
|
||||||
watch_dir = ""
|
watch_dir = ""
|
||||||
host = config.get("SickBeard", "host")
|
host = config.get(section, "host")
|
||||||
port = config.get("SickBeard", "port")
|
port = config.get(section, "port")
|
||||||
username = config.get("SickBeard", "username")
|
username = config.get(section, "username")
|
||||||
password = config.get("SickBeard", "password")
|
password = config.get(section, "password")
|
||||||
try:
|
try:
|
||||||
ssl = int(config.get("SickBeard", "ssl"))
|
ssl = int(config.get(section, "ssl"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
ssl = 0
|
ssl = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
web_root = config.get("SickBeard", "web_root")
|
web_root = config.get(section, "web_root")
|
||||||
except ConfigParser.NoOptionError:
|
except ConfigParser.NoOptionError:
|
||||||
web_root = ""
|
web_root = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
watch_dir = config.get("SickBeard", "watch_dir")
|
watch_dir = config.get(section, "watch_dir")
|
||||||
except ConfigParser.NoOptionError:
|
except ConfigParser.NoOptionError:
|
||||||
watch_dir = ""
|
watch_dir = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
failed_fork = int(config.get("SickBeard", "failed_fork"))
|
fork = config.get(section, "fork")
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except ConfigParser.NoOptionError:
|
||||||
failed_fork = 0
|
fork = "default"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
transcode = int(config.get("Transcoder", "transcode"))
|
transcode = int(config.get("Transcoder", "transcode"))
|
||||||
|
@ -104,21 +92,22 @@ def processEpisode(dirName, nzbName=None, failed=False):
|
||||||
transcode = 0
|
transcode = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
delete_failed = int(config.get("SickBeard", "delete_failed"))
|
delete_failed = int(config.get(section, "delete_failed"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
delete_failed = 0
|
delete_failed = 0
|
||||||
try:
|
try:
|
||||||
delay = float(config.get("SickBeard", "delay"))
|
delay = float(config.get(section, "delay"))
|
||||||
except (ConfigParser.NoOptionError, ValueError):
|
except (ConfigParser.NoOptionError, ValueError):
|
||||||
delay = 0
|
delay = 0
|
||||||
|
|
||||||
|
|
||||||
mediaContainer = (config.get("Extensions", "mediaExtensions")).split(',')
|
mediaContainer = (config.get("Extensions", "mediaExtensions")).split(',')
|
||||||
minSampleSize = int(config.get("Extensions", "minSampleSize"))
|
minSampleSize = int(config.get("Extensions", "minSampleSize"))
|
||||||
|
|
||||||
process_all_exceptions(nzbName.lower(), dirName)
|
if not fork in SICKBEARD_TORRENT:
|
||||||
|
process_all_exceptions(nzbName.lower(), dirName)
|
||||||
|
nzbName, dirName = converto_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
if nzbName != "Manual Run":
|
if nzbName != "Manual Run" and not fork in SICKBEARD_TORRENT:
|
||||||
# Now check if movie files exist in destination:
|
# Now check if movie files exist in destination:
|
||||||
video = int(0)
|
video = int(0)
|
||||||
for dirpath, dirnames, filenames in os.walk(dirName):
|
for dirpath, dirnames, filenames in os.walk(dirName):
|
||||||
|
@ -144,32 +133,27 @@ def processEpisode(dirName, nzbName=None, failed=False):
|
||||||
params = {}
|
params = {}
|
||||||
|
|
||||||
params['quiet'] = 1
|
params['quiet'] = 1
|
||||||
|
if fork in SICKBEARD_DIRNAME:
|
||||||
# if you have specified you are using development branch from fork https://github.com/Tolstyak/Sick-Beard.git
|
|
||||||
if failed_fork:
|
|
||||||
params['dirName'] = dirName
|
params['dirName'] = dirName
|
||||||
if nzbName != None:
|
|
||||||
params['nzbName'] = nzbName
|
|
||||||
params['failed'] = failed
|
|
||||||
if status == 0:
|
|
||||||
Logger.info("The download succeeded. Sending process request to SickBeard's failed branch")
|
|
||||||
else:
|
|
||||||
Logger.info("The download failed. Sending 'failed' process request to SickBeard's failed branch")
|
|
||||||
|
|
||||||
|
|
||||||
# this is our default behaviour to work with the standard Master branch of SickBeard
|
|
||||||
else:
|
else:
|
||||||
params['dir'] = dirName
|
params['dir'] = dirName
|
||||||
if nzbName != None:
|
|
||||||
params['nzbName'] = nzbName
|
if nzbName != None:
|
||||||
# the standard Master bamch of SickBeard cannot process failed downloads. So Exit here.
|
params['nzbName'] = nzbName
|
||||||
if status == 0:
|
|
||||||
Logger.info("The download succeeded. Sending process request to SickBeard")
|
if fork in SICKBEARD_FAILED:
|
||||||
else:
|
params['failed'] = failed
|
||||||
Logger.info("The download failed. Nothing to process")
|
|
||||||
if delete_failed and os.path.isdir(dirName) and not dirName in ['sys.argv[0]','/','']:
|
if status == 0:
|
||||||
delete(dirName)
|
Logger.info("The download succeeded. Sending process request to SickBeard's %s branch", fork)
|
||||||
return 0 # Success (as far as this script is concerned)
|
elif fork in SICKBEARD_FAILED:
|
||||||
|
Logger.info("The download failed. Sending 'failed' process request to SickBeard's %s branch", fork)
|
||||||
|
else:
|
||||||
|
Logger.info("The download failed. SickBeard's %s branch does not handle failed downloads. Nothing to process", fork)
|
||||||
|
if delete_failed and os.path.isdir(dirName) and not dirName in ['sys.argv[0]','/','']:
|
||||||
|
Logger.info("Deleting directory: %s", dirName)
|
||||||
|
delete(dirName)
|
||||||
|
return 0 # Success (as far as this script is concerned)
|
||||||
|
|
||||||
if status == 0 and transcode == 1: # only transcode successful downlaods
|
if status == 0 and transcode == 1: # only transcode successful downlaods
|
||||||
result = Transcoder.Transcode_directory(dirName)
|
result = Transcoder.Transcode_directory(dirName)
|
||||||
|
|
|
@ -12,6 +12,8 @@ def migrate():
|
||||||
configold = ConfigParser.ConfigParser()
|
configold = ConfigParser.ConfigParser()
|
||||||
configold.optionxform = str
|
configold.optionxform = str
|
||||||
|
|
||||||
|
categories = []
|
||||||
|
|
||||||
section = "CouchPotato"
|
section = "CouchPotato"
|
||||||
original = []
|
original = []
|
||||||
configFilenameold = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
configFilenameold = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
||||||
|
@ -35,6 +37,8 @@ def migrate():
|
||||||
continue
|
continue
|
||||||
if option in ["username", "password" ]: # these are no-longer needed.
|
if option in ["username", "password" ]: # these are no-longer needed.
|
||||||
continue
|
continue
|
||||||
|
if option == "cpsCategory":
|
||||||
|
categories.extend(value.split(','))
|
||||||
confignew.set(section, option, value)
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
section = "SickBeard"
|
section = "SickBeard"
|
||||||
|
@ -54,10 +58,18 @@ def migrate():
|
||||||
option, value = item
|
option, value = item
|
||||||
if option == "category": # change this old format
|
if option == "category": # change this old format
|
||||||
option = "sbCategory"
|
option = "sbCategory"
|
||||||
|
if option == "failed_fork": # change this old format
|
||||||
|
option = "fork"
|
||||||
|
if int(value) == 1:
|
||||||
|
value = "failed"
|
||||||
|
else:
|
||||||
|
value = "default"
|
||||||
if option == "outputDirectory": # move this to new location format
|
if option == "outputDirectory": # move this to new location format
|
||||||
value = os.path.split(os.path.normpath(value))[0]
|
value = os.path.split(os.path.normpath(value))[0]
|
||||||
confignew.set("Torrent", option, value)
|
confignew.set("Torrent", option, value)
|
||||||
continue
|
continue
|
||||||
|
if option == "sbCategory":
|
||||||
|
categories.extend(value.split(','))
|
||||||
confignew.set(section, option, value)
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
section = "HeadPhones"
|
section = "HeadPhones"
|
||||||
|
@ -73,6 +85,8 @@ def migrate():
|
||||||
if option in ["username", "password" ]: # these are no-longer needed.
|
if option in ["username", "password" ]: # these are no-longer needed.
|
||||||
continue
|
continue
|
||||||
option, value = item
|
option, value = item
|
||||||
|
if option == "hpCategory":
|
||||||
|
categories.extend(value.split(','))
|
||||||
confignew.set(section, option, value)
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
section = "Mylar"
|
section = "Mylar"
|
||||||
|
@ -83,6 +97,8 @@ def migrate():
|
||||||
pass
|
pass
|
||||||
for item in original:
|
for item in original:
|
||||||
option, value = item
|
option, value = item
|
||||||
|
if option == "mlCategory":
|
||||||
|
categories.extend(value.split(','))
|
||||||
confignew.set(section, option, value)
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
section = "Gamez"
|
section = "Gamez"
|
||||||
|
@ -92,11 +108,27 @@ def migrate():
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
for item in original:
|
for item in original:
|
||||||
|
option, value = item
|
||||||
if option in ["username", "password" ]: # these are no-longer needed.
|
if option in ["username", "password" ]: # these are no-longer needed.
|
||||||
continue
|
continue
|
||||||
option, value = item
|
if option == "gzCategory":
|
||||||
|
categories.extend(value.split(','))
|
||||||
confignew.set(section, option, value)
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
|
for section in categories:
|
||||||
|
original = []
|
||||||
|
try:
|
||||||
|
original = configold.items(section)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
confignew.add_section(section)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
for item in original:
|
||||||
|
option, value = item
|
||||||
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
section = "Torrent"
|
section = "Torrent"
|
||||||
original = []
|
original = []
|
||||||
try:
|
try:
|
||||||
|
@ -149,6 +181,36 @@ def migrate():
|
||||||
option, value = item
|
option, value = item
|
||||||
confignew.set(section, option, value)
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
|
section = "UserScript"
|
||||||
|
original = []
|
||||||
|
try:
|
||||||
|
original = configold.items(section)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
for item in original:
|
||||||
|
option, value = item
|
||||||
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
|
section = "ASCII"
|
||||||
|
original = []
|
||||||
|
try:
|
||||||
|
original = configold.items(section)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
for item in original:
|
||||||
|
option, value = item
|
||||||
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
|
section = "passwords"
|
||||||
|
original = []
|
||||||
|
try:
|
||||||
|
original = configold.items(section)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
for item in original:
|
||||||
|
option, value = item
|
||||||
|
confignew.set(section, option, value)
|
||||||
|
|
||||||
section = "loggers"
|
section = "loggers"
|
||||||
original = []
|
original = []
|
||||||
try:
|
try:
|
||||||
|
@ -220,9 +282,10 @@ def migrate():
|
||||||
os.unlink(backupname)
|
os.unlink(backupname)
|
||||||
os.rename(configFilenameold, backupname)
|
os.rename(configFilenameold, backupname)
|
||||||
|
|
||||||
# rename our newly edited autoProcessMedia.cfg.sample to autoProcessMedia.cfg
|
if os.path.isfile(configFilenamenew):
|
||||||
os.rename(configFilenamenew, configFilenameold)
|
# rename our newly edited autoProcessMedia.cfg.sample to autoProcessMedia.cfg
|
||||||
return
|
os.rename(configFilenamenew, configFilenameold)
|
||||||
|
return
|
||||||
|
|
||||||
def addnzbget():
|
def addnzbget():
|
||||||
confignew = ConfigParser.ConfigParser()
|
confignew = ConfigParser.ConfigParser()
|
||||||
|
@ -242,8 +305,8 @@ def addnzbget():
|
||||||
|
|
||||||
|
|
||||||
section = "SickBeard"
|
section = "SickBeard"
|
||||||
envKeys = ['CATEGORY', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FAILED_FORK']
|
envKeys = ['CATEGORY', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK']
|
||||||
cfgKeys = ['sbCategory', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'failed_fork']
|
cfgKeys = ['sbCategory', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork']
|
||||||
for index in range(len(envKeys)):
|
for index in range(len(envKeys)):
|
||||||
key = 'NZBPO_SB' + envKeys[index]
|
key = 'NZBPO_SB' + envKeys[index]
|
||||||
if os.environ.has_key(key):
|
if os.environ.has_key(key):
|
||||||
|
|
|
@ -1,8 +1,19 @@
|
||||||
# Make things easy and less error prone by centralising all common values
|
# Make things easy and less error prone by centralising all common values
|
||||||
|
|
||||||
# Global Constants
|
# Global Constants
|
||||||
VERSION = 'V8.5'
|
VERSION = 'V9.0'
|
||||||
|
TimeOut = 60
|
||||||
|
|
||||||
# Constants pertinant to SabNzb
|
# Constants pertinant to SabNzb
|
||||||
SABNZB_NO_OF_ARGUMENTS = 8
|
SABNZB_NO_OF_ARGUMENTS = 8
|
||||||
|
SABNZB_0717_NO_OF_ARGUMENTS = 9
|
||||||
|
|
||||||
|
# Constants pertaining to SickBeard Branches:
|
||||||
|
# extend this list to include all branches/forks that use "failed" to handle failed downloads.
|
||||||
|
SICKBEARD_FAILED = ["failed", "TPB-failed", "Pistachitos", "TPB"]
|
||||||
|
# extend this list to include all branches/forks that use "dirName" not "dir"
|
||||||
|
SICKBEARD_DIRNAME = ["failed"]
|
||||||
|
# extend this list to include all branches/forks that process rar and link files for torrents and therefore skip extraction and linking in TorrentToMedia.
|
||||||
|
SICKBEARD_TORRENT = ["TPB", "TPB-failed", "Pistachitos"]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -39,29 +39,47 @@ def create_destination(outputDestination):
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
|
if inputCategory and os.path.isdir(os.path.join(inputDirectory, inputCategory)):
|
||||||
|
Logger.info("SEARCH: Found category directory %s in input directory directory %s", inputCategory, inputDirectory)
|
||||||
|
inputDirectory = os.path.join(inputDirectory, inputCategory)
|
||||||
|
Logger.info("SEARCH: Setting inputDirectory to %s", inputDirectory)
|
||||||
|
if inputName and os.path.isdir(os.path.join(inputDirectory, inputName)):
|
||||||
|
Logger.info("SEARCH: Found torrent directory %s in input directory directory %s", inputName, inputDirectory)
|
||||||
|
inputDirectory = os.path.join(inputDirectory, inputName)
|
||||||
|
Logger.info("SEARCH: Setting inputDirectory to %s", inputDirectory)
|
||||||
|
if inputName and os.path.isdir(os.path.join(inputDirectory, safeName(inputName))):
|
||||||
|
Logger.info("SEARCH: Found torrent directory %s in input directory directory %s", safeName(inputName), inputDirectory)
|
||||||
|
inputDirectory = os.path.join(inputDirectory, safeName(inputName))
|
||||||
|
Logger.info("SEARCH: Setting inputDirectory to %s", inputDirectory)
|
||||||
|
|
||||||
categorySearch = [os.path.normpath(inputDirectory), ""] # initializie
|
categorySearch = [os.path.normpath(inputDirectory), ""] # initializie
|
||||||
notfound = 0
|
notfound = 0
|
||||||
|
unique = int(0)
|
||||||
for x in range(10): # loop up through 10 directories looking for category.
|
for x in range(10): # loop up through 10 directories looking for category.
|
||||||
try:
|
try:
|
||||||
categorySearch2 = os.path.split(os.path.normpath(categorySearch[0]))
|
categorySearch2 = os.path.split(os.path.normpath(categorySearch[0]))
|
||||||
except: # this might happen when we can't go higher.
|
except: # this might happen when we can't go higher.
|
||||||
if inputCategory and inputName: # if these exists, we are ok to proceed, but assume we are in a root/common directory.
|
if unique == int(0):
|
||||||
Logger.info("SEARCH: Could not find a Torrent Name or category in the directory structure")
|
if inputCategory and inputName: # if these exists, we are ok to proceed, but assume we are in a root/common directory.
|
||||||
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
|
Logger.info("SEARCH: Could not find a category in the directory structure")
|
||||||
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
|
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
||||||
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
root = 1
|
||||||
root = 1
|
break # we are done
|
||||||
break # we are done
|
elif inputCategory: # if this exists, we are ok to proceed, but assume we are in a root/common directory and we have to check file dates.
|
||||||
elif inputCategory: # if this exists, we are ok to proceed, but assume we are in a root/common directory and we have to check file dates.
|
Logger.info("SEARCH: Could not find a torrent name or category in the directory structure")
|
||||||
Logger.info("SEARCH: Could not find a Torrent Name or Category in the directory structure")
|
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
||||||
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
|
root = 2
|
||||||
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
|
break # we are done
|
||||||
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
elif inputName: # we didn't find category after 10 loops. This is a problem.
|
||||||
root = 2
|
Logger.info("SEARCH: Could not find a category in the directory structure")
|
||||||
break # we are done
|
Logger.info("SEARCH: Files will be linked and will only be processed by the userscript if enabled for UNCAT or ALL")
|
||||||
else:
|
root = 1
|
||||||
Logger.error("SEARCH: Could not identify Category of Torrent Name in the directory structure. Please check downloader settings. Exiting")
|
break # we are done
|
||||||
sys.exit(-1)
|
else: # we didn't find this after 10 loops. This is a problem.
|
||||||
|
Logger.info("SEARCH: Could not identify category or torrent name from the directory structure.")
|
||||||
|
Logger.info("SEARCH: Files will be linked and will only be processed by the userscript if enabled for UNCAT or ALL")
|
||||||
|
root = 2
|
||||||
|
break # we are done
|
||||||
|
|
||||||
if categorySearch2[1] in categories:
|
if categorySearch2[1] in categories:
|
||||||
Logger.debug("SEARCH: Found Category: %s in directory structure", categorySearch2[1])
|
Logger.debug("SEARCH: Found Category: %s in directory structure", categorySearch2[1])
|
||||||
|
@ -73,7 +91,7 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
|
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
|
||||||
inputName = categorySearch[1]
|
inputName = categorySearch[1]
|
||||||
Logger.info("SEARCH: Identified Category: %s and Torrent Name: %s. We are in a unique directory, so we can proceed.", inputCategory, inputName)
|
Logger.info("SEARCH: Identified Category: %s and Torrent Name: %s. We are in a unique directory, so we can proceed.", inputCategory, inputName)
|
||||||
break # we are done
|
break # we are done
|
||||||
elif categorySearch[1] and not inputName: # assume the the next directory deep is the torrent name.
|
elif categorySearch[1] and not inputName: # assume the the next directory deep is the torrent name.
|
||||||
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], categorySearch[1]), categorySearch[0])
|
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], categorySearch[1]), categorySearch[0])
|
||||||
inputName = categorySearch[1]
|
inputName = categorySearch[1]
|
||||||
|
@ -82,12 +100,12 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
|
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
|
||||||
inputName = categorySearch[1]
|
inputName = categorySearch[1]
|
||||||
break # we are done
|
break # we are done
|
||||||
elif os.path.isdir(os.path.join(categorySearch[0], inputName)) and inputName: # testing for torrent name in first sub directory
|
elif inputName and os.path.isdir(os.path.join(categorySearch[0], inputName)): # testing for torrent name in first sub directory
|
||||||
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], inputName), categorySearch[0])
|
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], inputName), categorySearch[0])
|
||||||
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
|
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
|
||||||
inputDirectory = os.path.join(categorySearch[0], inputName) # we only want to search this next dir up.
|
inputDirectory = os.path.join(categorySearch[0], inputName) # we only want to search this next dir up.
|
||||||
break # we are done
|
break # we are done
|
||||||
elif os.path.isdir(os.path.join(categorySearch[0], safeName(inputName))) and inputName: # testing for torrent name in first sub directory
|
elif inputName and os.path.isdir(os.path.join(categorySearch[0], safeName(inputName))): # testing for torrent name in first sub directory
|
||||||
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], safeName(inputName)), categorySearch[0])
|
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], safeName(inputName)), categorySearch[0])
|
||||||
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
|
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
|
||||||
inputDirectory = os.path.join(categorySearch[0], safeName(inputName)) # we only want to search this next dir up.
|
inputDirectory = os.path.join(categorySearch[0], safeName(inputName)) # we only want to search this next dir up.
|
||||||
|
@ -105,8 +123,9 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
||||||
root = 2
|
root = 2
|
||||||
break
|
break
|
||||||
elif safeName(categorySearch2[1]) == safeName(inputName) and inputName: # we have identified a unique directory.
|
elif inputName and safeName(categorySearch2[1]) == safeName(inputName): # we have identified a unique directory.
|
||||||
Logger.info("SEARCH: Files appear to be in their own directory")
|
Logger.info("SEARCH: Files appear to be in their own directory")
|
||||||
|
unique = int(1)
|
||||||
if inputCategory: # we are ok to proceed.
|
if inputCategory: # we are ok to proceed.
|
||||||
break # we are done
|
break # we are done
|
||||||
else:
|
else:
|
||||||
|
@ -121,22 +140,23 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
||||||
categorySearch = categorySearch2 # ready for next loop
|
categorySearch = categorySearch2 # ready for next loop
|
||||||
continue # keep going
|
continue # keep going
|
||||||
|
|
||||||
if notfound == 1:
|
if notfound == 1 and not unique == int(1):
|
||||||
if inputCategory and inputName: # if these exists, we are ok to proceed, but assume we are in a root/common directory.
|
if inputCategory and inputName: # if these exists, we are ok to proceed, but assume we are in a root/common directory.
|
||||||
Logger.info("SEARCH: Could not find a category in the directory structure")
|
Logger.info("SEARCH: Could not find a category in the directory structure")
|
||||||
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
|
|
||||||
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
|
|
||||||
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
||||||
root = 1
|
root = 1
|
||||||
elif inputCategory: # if this exists, we are ok to proceed, but assume we are in a root/common directory and we have to check file dates.
|
elif inputCategory: # if this exists, we are ok to proceed, but assume we are in a root/common directory and we have to check file dates.
|
||||||
Logger.info("SEARCH: Could not find a Torrent Name or Category in the directory structure")
|
Logger.info("SEARCH: Could not find a torrent name or category in the directory structure")
|
||||||
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
|
|
||||||
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
|
|
||||||
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
Logger.info("SEARCH: We will try and determine which files to process, individually")
|
||||||
root = 2
|
root = 2
|
||||||
if not inputCategory: # we didn't find this after 10 loops. This is a problem.
|
elif inputName: # we didn't find category after 10 loops. This is a problem.
|
||||||
Logger.error("SEARCH: Could not identify category and torrent name from the directory structure. Please check downloader settings. Exiting")
|
Logger.info("SEARCH: Could not find a category in the directory structure")
|
||||||
sys.exit(-1) # Oh yeah.... WE ARE DONE!
|
Logger.info("SEARCH: Files will be linked and will only be processed by the userscript if enabled for UNCAT or ALL")
|
||||||
|
root = 1
|
||||||
|
else: # we didn't find this after 10 loops. This is a problem.
|
||||||
|
Logger.info("SEARCH: Could not identify category or torrent name from the directory structure.")
|
||||||
|
Logger.info("SEARCH: Files will be linked and will only be processed by the userscript if enabled for UNCAT or ALL")
|
||||||
|
root = 2
|
||||||
|
|
||||||
return inputDirectory, inputName, inputCategory, root
|
return inputDirectory, inputName, inputCategory, root
|
||||||
|
|
||||||
|
@ -297,6 +317,31 @@ def WakeUp():
|
||||||
else:
|
else:
|
||||||
Logger.info("System with mac: %s has been woken. Continuing with the rest of the script.", mac)
|
Logger.info("System with mac: %s has been woken. Continuing with the rest of the script.", mac)
|
||||||
|
|
||||||
|
def converto_to_ascii(nzbName, dirName):
|
||||||
|
config = ConfigParser.ConfigParser()
|
||||||
|
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
||||||
|
if not os.path.isfile(configFilename):
|
||||||
|
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
|
||||||
|
return nzbName, dirName
|
||||||
|
config.read(configFilename)
|
||||||
|
ascii_convert = int(config.get("ASCII", "convert"))
|
||||||
|
if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and "\" is replaced!.
|
||||||
|
return nzbName, dirName
|
||||||
|
|
||||||
|
nzbName2 = str(nzbName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||||
|
dirName2 = str(dirName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||||
|
if dirName != dirName2:
|
||||||
|
Logger.info("Renaming directory:%s to: %s.", dirName, nzbName2)
|
||||||
|
shutil.move(dirName, nzbName2)
|
||||||
|
for dirpath, dirnames, filesnames in os.walk(dirName2):
|
||||||
|
for filename in filesnames:
|
||||||
|
filename2 = str(filename.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||||
|
if filename != filename2:
|
||||||
|
Logger.info("Renaming file:%s to: %s.", filename, filename2)
|
||||||
|
shutil.move(filename, filename2)
|
||||||
|
nzbName = nzbName2
|
||||||
|
dirName = nzbName2
|
||||||
|
return nzbName, dirName
|
||||||
|
|
||||||
def parse_other(args):
|
def parse_other(args):
|
||||||
return os.path.normpath(sys.argv[1]), '', '', '', ''
|
return os.path.normpath(sys.argv[1]), '', '', '', ''
|
||||||
|
@ -327,7 +372,7 @@ def parse_deluge(args):
|
||||||
inputDirectory = os.path.normpath(sys.argv[3])
|
inputDirectory = os.path.normpath(sys.argv[3])
|
||||||
inputName = sys.argv[2]
|
inputName = sys.argv[2]
|
||||||
inputCategory = '' # We dont have a category yet
|
inputCategory = '' # We dont have a category yet
|
||||||
inputHash = ''
|
inputHash = sys.argv[1]
|
||||||
inputID = sys.argv[1]
|
inputID = sys.argv[1]
|
||||||
return inputDirectory, inputName, inputCategory, inputHash, inputID
|
return inputDirectory, inputName, inputCategory, inputHash, inputID
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ web_root =
|
||||||
ssl = 0
|
ssl = 0
|
||||||
delay = 0
|
delay = 0
|
||||||
watch_dir =
|
watch_dir =
|
||||||
failed_fork = 0
|
fork = default
|
||||||
delete_failed = 0
|
delete_failed = 0
|
||||||
|
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ useLink = hard
|
||||||
###### outputDirectory - Default output directory (categories will be appended as sub directory to outputDirectory)
|
###### outputDirectory - Default output directory (categories will be appended as sub directory to outputDirectory)
|
||||||
outputDirectory = /abs/path/to/complete/
|
outputDirectory = /abs/path/to/complete/
|
||||||
###### Other categories/labels defined for your downloader. Does not include CouchPotato, SickBeard, HeadPhones, Mylar categories.
|
###### Other categories/labels defined for your downloader. Does not include CouchPotato, SickBeard, HeadPhones, Mylar categories.
|
||||||
categories = music_videos,pictures,software
|
categories = music_videos,pictures,software,
|
||||||
###### uTorrent Hardlink solution (You must edit this if your using TorrentToMedia.py with uTorrent)
|
###### uTorrent Hardlink solution (You must edit this if your using TorrentToMedia.py with uTorrent)
|
||||||
uTorrentWEBui = http://localhost:8090/gui/
|
uTorrentWEBui = http://localhost:8090/gui/
|
||||||
uTorrentUSR = your username
|
uTorrentUSR = your username
|
||||||
|
@ -118,6 +118,10 @@ outputVideoBitrate = 800k
|
||||||
outputAudioCodec = libmp3lame
|
outputAudioCodec = libmp3lame
|
||||||
outputAudioBitrate = 128k
|
outputAudioBitrate = 128k
|
||||||
outputSubtitleCodec =
|
outputSubtitleCodec =
|
||||||
|
# outputFastStart. 1 will use -movflags + faststart. 0 will disable this from being used.
|
||||||
|
outputFastStart = 0
|
||||||
|
# outputQualityPercent. used as -q:a value. 0 will disable this from being used.
|
||||||
|
outputQualityPercent = 0
|
||||||
|
|
||||||
[WakeOnLan]
|
[WakeOnLan]
|
||||||
###### set wake = 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) the host and port specified.
|
###### set wake = 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) the host and port specified.
|
||||||
|
@ -126,6 +130,33 @@ host = 192.168.1.37
|
||||||
port = 80
|
port = 80
|
||||||
mac = 00:01:2e:2D:64:e1
|
mac = 00:01:2e:2D:64:e1
|
||||||
|
|
||||||
|
[UserScript]
|
||||||
|
#Use user_script for uncategorized download?
|
||||||
|
#Set the categories to use external script, comma separated.
|
||||||
|
#Use "UNCAT" to process non-category downloads, and "ALL" for all. Set to "NONE" to disable external script.
|
||||||
|
user_script_categories = NONE
|
||||||
|
#What extension do you want to process? Specify all the extension, or use "ALL" to process all files.
|
||||||
|
user_script_mediaExtensions = .mkv,.avi,.divx,.xvid,.mov,.wmv,.mp4,.mpg,.mpeg
|
||||||
|
#Specify the path of the script
|
||||||
|
user_script_path = /media/test/script/script.sh
|
||||||
|
#Specify the argument(s) passed to script, comma separated in order.
|
||||||
|
#for example FP,FN,DN for file path (absolute file name with path), file name, absolute directory name (with path).
|
||||||
|
#So the result is /media/test/script/script.sh FP FN DN. Add other arguments as needed eg -f, -r
|
||||||
|
user_script_param = FN
|
||||||
|
#Specify the successcodes returned by the user script as a comma separated list. Linux default is 0
|
||||||
|
user_script_successCodes = 0
|
||||||
|
#Clean after? Note that delay function is used to prevent possible mistake :) Delay is intended as seconds
|
||||||
|
user_script_clean = 1
|
||||||
|
delay = 120
|
||||||
|
|
||||||
|
[ASCII]
|
||||||
|
#Set convert =1 if you want to convert any "foreign" characters to ASCII before passing to SB/CP etc. Default is disabled (0).
|
||||||
|
convert = 0
|
||||||
|
|
||||||
|
[passwords]
|
||||||
|
# enter the full path to a text file containing passwords to be used for extraction attempts.
|
||||||
|
# In the passwords file, every password should be on a new line
|
||||||
|
PassWordFile =
|
||||||
|
|
||||||
# Logging configuration
|
# Logging configuration
|
||||||
[loggers]
|
[loggers]
|
||||||
|
|
|
@ -1,5 +1,25 @@
|
||||||
Change_LOG / History
|
Change_LOG / History
|
||||||
|
|
||||||
|
V9.0 xx/01/2014
|
||||||
|
|
||||||
|
Impacts NZBs
|
||||||
|
SABnzbd 0.7.17+ now uses 8 arguments, not 7. These scripts now support the extra argument.
|
||||||
|
|
||||||
|
Impacts Torrents
|
||||||
|
Always pause before processing.
|
||||||
|
Moved delete to end of routine, only when succesful process occurs.
|
||||||
|
Don't flatten hp category (in case multi cd album)
|
||||||
|
Added UserScript to be called for un-categorized downloads and other defined categories.
|
||||||
|
Added Torrent Hash to Deluge to assist with movie ID.
|
||||||
|
Added passwords option to attempt extraction od passworded archives.
|
||||||
|
|
||||||
|
Impacts All
|
||||||
|
Added default socket timeout to prevent script hanging when the destination servers don't respond to http requests.
|
||||||
|
Made processing Category Centric as an option for people running multiple versions of SickBeard and CouchPotato etc.
|
||||||
|
Added TPB version of SickBeard processing. This now uses a fork pass-in instead of failed_fork.
|
||||||
|
Added new option to convert files, directories, and parameters to ASCII. To be used if you regularly download "foreign" titles and have problems with CP/SB.
|
||||||
|
Now only parse results from CouchPotato 50 at a time to prevent error with large wanted list.
|
||||||
|
|
||||||
V8.5 05/10/2013
|
V8.5 05/10/2013
|
||||||
|
|
||||||
Impacts Torrents
|
Impacts Torrents
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import ConfigParser
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]),'autoProcess/'))
|
sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]),'autoProcess/'))
|
||||||
import logging
|
import logging
|
||||||
from subprocess import call, Popen, PIPE
|
from subprocess import call, Popen, PIPE
|
||||||
|
@ -111,18 +112,48 @@ def extract(filePath, outputDestination):
|
||||||
# Create outputDestination folder
|
# Create outputDestination folder
|
||||||
create_destination(outputDestination)
|
create_destination(outputDestination)
|
||||||
|
|
||||||
|
config = ConfigParser.ConfigParser()
|
||||||
|
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
|
||||||
|
Logger.info("MAIN: Loading config from %s", configFilename)
|
||||||
|
config.read(configFilename)
|
||||||
|
passwordsfile = config.get("passwords", "PassWordFile")
|
||||||
|
if passwordsfile != "" and os.path.isfile(os.path.normpath(passwordsfile)):
|
||||||
|
passwords = [line.strip() for line in open(os.path.normpath(passwordsfile))]
|
||||||
|
else:
|
||||||
|
passwords = []
|
||||||
|
|
||||||
Logger.info("Extracting %s to %s", filePath, outputDestination)
|
Logger.info("Extracting %s to %s", filePath, outputDestination)
|
||||||
Logger.debug("Extracting %s %s %s", cmd, filePath, outputDestination)
|
Logger.debug("Extracting %s %s %s", cmd, filePath, outputDestination)
|
||||||
pwd = os.getcwd() # Get our Present Working Directory
|
pwd = os.getcwd() # Get our Present Working Directory
|
||||||
os.chdir(outputDestination) # Not all unpack commands accept full paths, so just extract into this directory
|
os.chdir(outputDestination) # Not all unpack commands accept full paths, so just extract into this directory
|
||||||
try: # now works same for nt and *nix
|
try: # now works same for nt and *nix
|
||||||
cmd.append(filePath) # add filePath to final cmd arg.
|
cmd.append(filePath) # add filePath to final cmd arg.
|
||||||
p = Popen(cmd) # should extract files fine.
|
cmd2 = cmd
|
||||||
|
cmd2.append("-p-") # don't prompt for password.
|
||||||
|
p = Popen(cmd2) # should extract files fine.
|
||||||
res = p.wait()
|
res = p.wait()
|
||||||
if res >= 0: # for windows chp returns process id if successful or -1*Error code. Linus returns 0 for successful.
|
if (res >= 0 and os.name == 'nt') or res == 0: # for windows chp returns process id if successful or -1*Error code. Linux returns 0 for successful.
|
||||||
Logger.info("EXTRACTOR: Extraction was successful for %s to %s", filePath, outputDestination)
|
Logger.info("EXTRACTOR: Extraction was successful for %s to %s", filePath, outputDestination)
|
||||||
else:
|
elif len(passwords) > 0:
|
||||||
Logger.error("EXTRACTOR: Extraction failed for %s. 7zip result was %s", filePath, res)
|
Logger.info("EXTRACTOR: Attempting to extract with passwords")
|
||||||
|
pass_success = int(0)
|
||||||
|
for password in passwords:
|
||||||
|
if password == "": # if edited in windows or otherwise if blank lines.
|
||||||
|
continue
|
||||||
|
cmd2 = cmd
|
||||||
|
#append password here.
|
||||||
|
passcmd = "-p" + password
|
||||||
|
cmd2.append(passcmd)
|
||||||
|
p = Popen(cmd2) # should extract files fine.
|
||||||
|
res = p.wait()
|
||||||
|
if (res >= 0 and os.name == 'nt') or res == 0: # for windows chp returns process id if successful or -1*Error code. Linux returns 0 for successful.
|
||||||
|
Logger.info("EXTRACTOR: Extraction was successful for %s to %s using password: %s", filePath, outputDestination, password)
|
||||||
|
pass_success = int(1)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
if pass_success == int(0):
|
||||||
|
Logger.error("EXTRACTOR: Extraction failed for %s. 7zip result was %s", filePath, res)
|
||||||
except:
|
except:
|
||||||
Logger.exception("EXTRACTOR: Extraction failed for %s. Could not call command %s", filePath, cmd)
|
Logger.exception("EXTRACTOR: Extraction failed for %s. Could not call command %s", filePath, cmd)
|
||||||
os.chdir(pwd) # Go back to our Original Working Directory
|
os.chdir(pwd) # Go back to our Original Working Directory
|
||||||
|
|
|
@ -205,7 +205,7 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessMovie...")
|
Logger.info("Script triggered from NZBGet, starting autoProcessMovie...")
|
||||||
clientAgent = "nzbget"
|
clientAgent = "nzbget"
|
||||||
result = autoProcessMovie.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status, clientAgent, download_id)
|
result = autoProcessMovie.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status, clientAgent, download_id)
|
||||||
# SABnzbd
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
# 1 The final directory of the job (full path)
|
# 1 The final directory of the job (full path)
|
||||||
|
@ -218,6 +218,20 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessMovie...")
|
Logger.info("Script triggered from SABnzbd, starting autoProcessMovie...")
|
||||||
clientAgent = "sabnzbd"
|
clientAgent = "sabnzbd"
|
||||||
result = autoProcessMovie.process(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent)
|
result = autoProcessMovie.process(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent)
|
||||||
|
# SABnzbd 0.7.17+
|
||||||
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
# SABnzbd argv:
|
||||||
|
# 1 The final directory of the job (full path)
|
||||||
|
# 2 The original name of the NZB file
|
||||||
|
# 3 Clean version of the job name (no path info and ".nzb" removed)
|
||||||
|
# 4 Indexer's report number (if supported)
|
||||||
|
# 5 User-defined category
|
||||||
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
|
# 8 Failure URL
|
||||||
|
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessMovie...")
|
||||||
|
clientAgent = "sabnzbd"
|
||||||
|
result = autoProcessMovie.process(sys.argv[1], sys.argv[2], sys.argv[7], clientAgent)
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client.")
|
Logger.warn("Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessMovie as a manual run...")
|
Logger.info("Running autoProcessMovie as a manual run...")
|
||||||
|
|
|
@ -146,7 +146,7 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessGames...")
|
Logger.info("Script triggered from NZBGet, starting autoProcessGames...")
|
||||||
result = autoProcessGames.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
result = autoProcessGames.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
||||||
# SABnzbd
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
# 1 The final directory of the job (full path)
|
# 1 The final directory of the job (full path)
|
||||||
|
@ -158,6 +158,19 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessGames...")
|
Logger.info("Script triggered from SABnzbd, starting autoProcessGames...")
|
||||||
result = autoProcessGames.process(sys.argv[1], sys.argv[3], sys.argv[7])
|
result = autoProcessGames.process(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
|
# SABnzbd 0.7.17+
|
||||||
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
# SABnzbd argv:
|
||||||
|
# 1 The final directory of the job (full path)
|
||||||
|
# 2 The original name of the NZB file
|
||||||
|
# 3 Clean version of the job name (no path info and ".nzb" removed)
|
||||||
|
# 4 Indexer's report number (if supported)
|
||||||
|
# 5 User-defined category
|
||||||
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
|
# 8 Failure URL
|
||||||
|
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessGames...")
|
||||||
|
result = autoProcessGames.process(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client. Exiting")
|
Logger.warn("Invalid number of arguments received from client. Exiting")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -151,7 +151,7 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
# All checks done, now launching the script
|
# All checks done, now launching the script
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessMusic...")
|
Logger.info("Script triggered from NZBGet, starting autoProcessMusic...")
|
||||||
result = autoProcessMusic.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
result = autoProcessMusic.process(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
||||||
# SABnzbd
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
# 1 The final directory of the job (full path)
|
# 1 The final directory of the job (full path)
|
||||||
|
@ -163,6 +163,19 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessMusic...")
|
Logger.info("Script triggered from SABnzbd, starting autoProcessMusic...")
|
||||||
result = autoProcessMusic.process(sys.argv[1], sys.argv[2], sys.argv[7])
|
result = autoProcessMusic.process(sys.argv[1], sys.argv[2], sys.argv[7])
|
||||||
|
# SABnzbd 0.7.17+
|
||||||
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
# SABnzbd argv:
|
||||||
|
# 1 The final directory of the job (full path)
|
||||||
|
# 2 The original name of the NZB file
|
||||||
|
# 3 Clean version of the job name (no path info and ".nzb" removed)
|
||||||
|
# 4 Indexer's report number (if supported)
|
||||||
|
# 5 User-defined category
|
||||||
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
|
# 8 Failue URL
|
||||||
|
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessMusic...")
|
||||||
|
result = autoProcessMusic.process(sys.argv[1], sys.argv[2], sys.argv[7])
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client.")
|
Logger.warn("Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessMusic as a manual run...")
|
Logger.info("Running autoProcessMusic as a manual run...")
|
||||||
|
|
|
@ -92,10 +92,10 @@
|
||||||
# set this if SickBeard and nzbGet are on different systems.
|
# set this if SickBeard and nzbGet are on different systems.
|
||||||
#sbwatch_dir=
|
#sbwatch_dir=
|
||||||
|
|
||||||
# SickBeard uses failed fork (0, 1).
|
# SickBeard fork.
|
||||||
#
|
#
|
||||||
# set to 1 if using the custom "failed fork". Default sickBeard uses 0.
|
# set to default or TPB or failed if using the custom "TPB" or "failed fork".
|
||||||
#sbfailed_fork=0
|
#sbfork=default
|
||||||
|
|
||||||
# SickBeard Delete Failed Downloads (0, 1)
|
# SickBeard Delete Failed Downloads (0, 1)
|
||||||
#
|
#
|
||||||
|
@ -279,11 +279,11 @@ if not os.path.isfile(configFilename):
|
||||||
Logger.info("MAIN: Loading config from %s", configFilename)
|
Logger.info("MAIN: Loading config from %s", configFilename)
|
||||||
config.read(configFilename)
|
config.read(configFilename)
|
||||||
|
|
||||||
cpsCategory = config.get("CouchPotato", "cpsCategory") # movie
|
cpsCategory = (config.get("CouchPotato", "cpsCategory")).split(',') # movie
|
||||||
sbCategory = config.get("SickBeard", "sbCategory") # tv
|
sbCategory = (config.get("SickBeard", "sbCategory")).split(',') # tv
|
||||||
hpCategory = config.get("HeadPhones", "hpCategory") # music
|
hpCategory = (config.get("HeadPhones", "hpCategory")).split(',') # music
|
||||||
mlCategory = config.get("Mylar", "mlCategory") # comics
|
mlCategory = (config.get("Mylar", "mlCategory")).split(',') # comics
|
||||||
gzCategory = config.get("Gamez", "gzCategory") # games
|
gzCategory = (config.get("Gamez", "gzCategory")).split(',') # games
|
||||||
|
|
||||||
# NZBGet V11+
|
# NZBGet V11+
|
||||||
# Check if the script is called from nzbget 11.0 or later
|
# Check if the script is called from nzbget 11.0 or later
|
||||||
|
@ -353,7 +353,7 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
if os.environ.has_key('NZBPR_COUCHPOTATO'):
|
if os.environ.has_key('NZBPR_COUCHPOTATO'):
|
||||||
download_id = os.environ['NZBPR_COUCHPOTATO']
|
download_id = os.environ['NZBPR_COUCHPOTATO']
|
||||||
nzbDir, inputName, inputCategory = (os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBFILENAME'], os.environ['NZBPP_CATEGORY'])
|
nzbDir, inputName, inputCategory = (os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBFILENAME'], os.environ['NZBPP_CATEGORY'])
|
||||||
# SABnzbd
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
# 1 The final directory of the job (full path)
|
# 1 The final directory of the job (full path)
|
||||||
|
@ -366,29 +366,43 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
Logger.info("MAIN: Script triggered from SABnzbd")
|
Logger.info("MAIN: Script triggered from SABnzbd")
|
||||||
clientAgent = "sabnzbd"
|
clientAgent = "sabnzbd"
|
||||||
nzbDir, inputName, status, inputCategory, download_id = (sys.argv[1], sys.argv[2], sys.argv[7], sys.argv[5], '')
|
nzbDir, inputName, status, inputCategory, download_id = (sys.argv[1], sys.argv[2], sys.argv[7], sys.argv[5], '')
|
||||||
|
# SABnzbd 0.7.17+
|
||||||
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
# SABnzbd argv:
|
||||||
|
# 1 The final directory of the job (full path)
|
||||||
|
# 2 The original name of the NZB file
|
||||||
|
# 3 Clean version of the job name (no path info and ".nzb" removed)
|
||||||
|
# 4 Indexer's report number (if supported)
|
||||||
|
# 5 User-defined category
|
||||||
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
|
# 8 Failure URL
|
||||||
|
Logger.info("MAIN: Script triggered from SABnzbd 0.7.17+")
|
||||||
|
clientAgent = "sabnzbd"
|
||||||
|
nzbDir, inputName, status, inputCategory, download_id = (sys.argv[1], sys.argv[2], sys.argv[7], sys.argv[5], '')
|
||||||
else: # only CPS supports this manual run for now.
|
else: # only CPS supports this manual run for now.
|
||||||
Logger.warn("MAIN: Invalid number of arguments received from client.")
|
Logger.warn("MAIN: Invalid number of arguments received from client.")
|
||||||
Logger.info("MAIN: Running autoProcessMovie as a manual run...")
|
Logger.info("MAIN: Running autoProcessMovie as a manual run...")
|
||||||
clientAgent = "manual"
|
clientAgent = "manual"
|
||||||
nzbDir, inputName, status, inputCategory, download_id = ('Manual Run', 'Manual Run', 0, cpsCategory, '')
|
nzbDir, inputName, status, inputCategory, download_id = ('Manual Run', 'Manual Run', 0, cpsCategory, '')
|
||||||
|
|
||||||
if inputCategory == cpsCategory:
|
if inputCategory in cpsCategory:
|
||||||
Logger.info("MAIN: Calling CouchPotatoServer to post-process: %s", inputName)
|
Logger.info("MAIN: Calling CouchPotatoServer to post-process: %s", inputName)
|
||||||
result = autoProcessMovie.process(nzbDir, inputName, status, clientAgent, download_id)
|
result = autoProcessMovie.process(nzbDir, inputName, status, clientAgent, download_id, inputCategory)
|
||||||
elif inputCategory == sbCategory:
|
elif inputCategory in sbCategory:
|
||||||
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Sick-Beard to post-process: %s", inputName)
|
||||||
result = autoProcessTV.processEpisode(nzbDir, inputName, status)
|
result = autoProcessTV.processEpisode(nzbDir, inputName, status, inputCategory)
|
||||||
elif inputCategory == hpCategory:
|
elif inputCategory in hpCategory:
|
||||||
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
Logger.info("MAIN: Calling HeadPhones to post-process: %s", inputName)
|
||||||
result = autoProcessMusic.process(nzbDir, inputName, status)
|
result = autoProcessMusic.process(nzbDir, inputName, status, inputCategory)
|
||||||
elif inputCategory == mlCategory:
|
elif inputCategory in mlCategory:
|
||||||
Logger.info("MAIN: Calling Mylar to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Mylar to post-process: %s", inputName)
|
||||||
result = autoProcessComics.processEpisode(nzbDir, inputName, status)
|
result = autoProcessComics.processEpisode(nzbDir, inputName, status, inputCategory)
|
||||||
elif inputCategory == gzCategory:
|
elif inputCategory in gzCategory:
|
||||||
Logger.info("MAIN: Calling Gamez to post-process: %s", inputName)
|
Logger.info("MAIN: Calling Gamez to post-process: %s", inputName)
|
||||||
result = autoProcessGames.process(nzbDir, inputName, status)
|
result = autoProcessGames.process(nzbDir, inputName, status, inputCategory)
|
||||||
else:
|
else:
|
||||||
Logger.warning("MAIN: The download category %s does not match any category defines in autoProcessMedia.cfg. Exiting.", inputCategory)
|
Logger.warning("MAIN: The download category %s does not match any category defined in autoProcessMedia.cfg. Exiting.", inputCategory)
|
||||||
sys.exit(POSTPROCESS_ERROR)
|
sys.exit(POSTPROCESS_ERROR)
|
||||||
|
|
||||||
if result == 0:
|
if result == 0:
|
||||||
|
|
|
@ -149,7 +149,7 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessComics...")
|
Logger.info("Script triggered from NZBGet, starting autoProcessComics...")
|
||||||
result = autoProcessComics.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
result = autoProcessComics.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'], status)
|
||||||
# SABnzbd
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
# 1 The final directory of the job (full path)
|
# 1 The final directory of the job (full path)
|
||||||
|
@ -161,6 +161,19 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessComics...")
|
Logger.info("Script triggered from SABnzbd, starting autoProcessComics...")
|
||||||
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[3], sys.argv[7])
|
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
|
# SABnzbd 0.7.17+
|
||||||
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
# SABnzbd argv:
|
||||||
|
# 1 The final directory of the job (full path)
|
||||||
|
# 2 The original name of the NZB file
|
||||||
|
# 3 Clean version of the job name (no path info and ".nzb" removed)
|
||||||
|
# 4 Indexer's report number (if supported)
|
||||||
|
# 5 User-defined category
|
||||||
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
|
# 8 Failure URL
|
||||||
|
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessComics...")
|
||||||
|
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[3], sys.argv[7])
|
||||||
else:
|
else:
|
||||||
Logger.warn("Invalid number of arguments received from client.")
|
Logger.warn("Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessComics as a manual run...")
|
Logger.info("Running autoProcessComics as a manual run...")
|
||||||
|
|
|
@ -46,10 +46,10 @@
|
||||||
# set this if SickBeard and nzbGet are on different systems.
|
# set this if SickBeard and nzbGet are on different systems.
|
||||||
#sbwatch_dir=
|
#sbwatch_dir=
|
||||||
|
|
||||||
# SickBeard uses failed fork (0, 1).
|
# SickBeard fork.
|
||||||
#
|
#
|
||||||
# set to 1 if using the custom "failed fork". Default sickBeard uses 0.
|
# set to default or TPB or failed if using the custom "TPB" or "failed fork".
|
||||||
#sbfailed_fork=0
|
#sbfork=default
|
||||||
|
|
||||||
# SickBeard Delete Failed Downloads (0, 1).
|
# SickBeard Delete Failed Downloads (0, 1).
|
||||||
#
|
#
|
||||||
|
@ -198,7 +198,7 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
|
||||||
# All checks done, now launching the script.
|
# All checks done, now launching the script.
|
||||||
Logger.info("Script triggered from NZBGet, starting autoProcessTV...")
|
Logger.info("Script triggered from NZBGet, starting autoProcessTV...")
|
||||||
result = autoProcessTV.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBFILENAME'], status)
|
result = autoProcessTV.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBFILENAME'], status)
|
||||||
# SABnzbd
|
# SABnzbd Pre 0.7.17
|
||||||
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# SABnzbd argv:
|
# SABnzbd argv:
|
||||||
# 1 The final directory of the job (full path)
|
# 1 The final directory of the job (full path)
|
||||||
|
@ -210,6 +210,19 @@ elif len(sys.argv) == SABNZB_NO_OF_ARGUMENTS:
|
||||||
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
Logger.info("Script triggered from SABnzbd, starting autoProcessTV...")
|
Logger.info("Script triggered from SABnzbd, starting autoProcessTV...")
|
||||||
result = autoProcessTV.processEpisode(sys.argv[1], sys.argv[2], sys.argv[7])
|
result = autoProcessTV.processEpisode(sys.argv[1], sys.argv[2], sys.argv[7])
|
||||||
|
# SABnzbd 0.7.17+
|
||||||
|
elif len(sys.argv) >= SABNZB_0717_NO_OF_ARGUMENTS:
|
||||||
|
# SABnzbd argv:
|
||||||
|
# 1 The final directory of the job (full path)
|
||||||
|
# 2 The original name of the NZB file
|
||||||
|
# 3 Clean version of the job name (no path info and ".nzb" removed)
|
||||||
|
# 4 Indexer's report number (if supported)
|
||||||
|
# 5 User-defined category
|
||||||
|
# 6 Group that the NZB was posted in e.g. alt.binaries.x
|
||||||
|
# 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
|
||||||
|
# 8 Failure URL
|
||||||
|
Logger.info("Script triggered from SABnzbd 0.7.17+, starting autoProcessTV...")
|
||||||
|
result = autoProcessTV.processEpisode(sys.argv[1], sys.argv[2], sys.argv[7])
|
||||||
else:
|
else:
|
||||||
Logger.debug("Invalid number of arguments received from client.")
|
Logger.debug("Invalid number of arguments received from client.")
|
||||||
Logger.info("Running autoProcessTV as a manual run...")
|
Logger.info("Running autoProcessTV as a manual run...")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue