Ouch. Almost lost these :(

This commit is contained in:
clinton-hall 2013-03-13 23:15:34 -07:00
parent 608a0f1eaa
commit cd470ae1ad
10 changed files with 1303 additions and 0 deletions

93
autoProcess/Transcoder.py Normal file
View file

@ -0,0 +1,93 @@
import sys
import os
import ConfigParser
import logging
from subprocess import call
Logger = logging.getLogger()
def Transcode_directory(dirName):
if os.name == 'nt':
ffmpeg = os.path.join(os.path.dirname(sys.argv[0]), 'ffmpeg\\bin\\ffmpeg.exe') # note, will need to package in this dir.
if not os.path.isfile(ffmpeg): # problem
Logger.error("ffmpeg not found. ffmpeg needs to be located at: %s", ffmpeg)
Logger.info("Cannot transcode files in folder %s", dirName)
return 1 # failure
else:
if call(['which', ffmpeg]):
res = call([os.path.join(os.path.dirname(sys.argv[0]),'getffmpeg.sh')])
if res or call(['which', ffmpeg]): # did not install or ffmpeg still not found.
Logger.error("Failed to install ffmpeg. Please install manually")
Logger.info("Cannot transcode files in folder %s", dirName)
return 1 # failure
else:
ffmpeg = 'ffmpeg'
config = ConfigParser.ConfigParser()
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
Logger.info("Loading config from %s", configFilename)
if not os.path.isfile(configFilename):
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
return 1 # failure
config.read(configFilename)
mediaContainer = (config.get("Extensions", "mediaExtensions")).split(',')
duplicate = int(config.get("Transcoder", "duplicate"))
ignoreExtensions = (config.get("Transcoder", "ignoreExtensions")).split(',')
outputVideoExtension = config.get("Transcoder", "outputVideoExtension")
outputVideoCodec = config.get("Transcoder", "outputVideoCodec")
outputVideoFramerate = config.get("Transcoder", "outputVideoFramerate")
outputVideoBitrate = config.get("Transcoder", "outputVideoBitrate")
outputAudioCodec = config.get("Transcoder", "outputAudioCodec")
outputAudioBitrate = config.get("Transcoder", "outputAudioBitrate")
Logger.info("Checking for files to be transcoded")
final_result = 0 # initialize as successful
for dirpath, dirnames, filenames in os.walk(dirName):
for file in filenames:
filePath = os.path.join(dirpath, file)
name, ext = os.path.splitext(filePath)
if ext in mediaContainer: # If the file is a video file
if ext in ignoreExtensions:
Logger.info("No need to transcode video type %s", ext)
continue
if ext == outputVideoExtension: # we need to change the name to prevent overwriting itself.
outputVideoExtension = '-transcoded' + outputVideoExtension # adds '-transcoded.ext'
newfilePath = os.path.normpath(name + outputVideoExtension)
command = [ffmpeg, '-i', filePath]
if outputVideoCodec:
command.append('-c:v')
command.append(outputVideoCodec)
if outputVideoFramerate:
command.append('-r')
command.append(outputVideoFramerate)
if outputVideoBitrate:
command.append('-b:v')
command.append(outputVideoBitrate)
if outputAudioCodec:
command.append('-c:a')
command.append(outputAudioCodec)
if outputAudioBitrate:
command.append('-b:a')
command.append(outputAudioBitrate)
command.append(newfilePath)
Logger.debug("Transcoding video %s to %s", filePath, newfilePath)
result = 1 # set result to failed in case call fails.
try:
result = call(command)
except e:
Logger.error("Transcoding of video %s failed due to: ", filePath, str(e))
if result == 0:
Logger.info("Transcoding of video %s to %s succeded", filePath, newfilePath)
if duplicate == 0: # we get rid of the original file
os.unlink(filePath)
else:
Logger.error("Transcoding of video %s to %s failed", filePath, newfilePath)
# this will be 0 (successful) it all are sucessful, else will return a positive integer for failure.
final_result = final_result + result
return final_result

View file

@ -0,0 +1,92 @@
import sys
import urllib
import os.path
import time
import ConfigParser
import logging
from nzbToMediaEnv import *
from nzbToMediaSceneExceptions import process_all_exceptions
Logger = logging.getLogger()
class AuthURLOpener(urllib.FancyURLopener):
def __init__(self, user, pw):
self.username = user
self.password = pw
self.numTries = 0
urllib.FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
else:
return ('', '')
def openit(self, url):
self.numTries = 0
return urllib.FancyURLopener.open(self, url)
def processEpisode(dirName, nzbName=None, status=0):
config = ConfigParser.ConfigParser()
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
Logger.info("Loading config from %s", configFilename)
if not os.path.isfile(configFilename):
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
return 1 # failure
try:
fp = open(configFilename, "r")
config.readfp(fp)
fp.close()
except IOError, e:
Logger.error("Could not read configuration file: %s", str(e))
return 1 # failure
host = config.get("Mylar", "host")
port = config.get("Mylar", "port")
username = config.get("Mylar", "username")
password = config.get("Mylar", "password")
try:
ssl = int(config.get("Mylar", "ssl"))
except (ConfigParser.NoOptionError, ValueError):
ssl = 0
try:
web_root = config.get("Mylar", "web_root")
except ConfigParser.NoOptionError:
web_root = ""
params = {}
params['nzb_folder'] = dirName
if nzbName != None:
params['nzb_name'] = nzbName
myOpener = AuthURLOpener(username, password)
if ssl:
protocol = "https://"
else:
protocol = "http://"
url = protocol + host + ":" + port + web_root + "/post_process?" + urllib.urlencode(params)
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return 1 # failure
result = urlObj.readlines()
for line in result:
Logger.info("%s", line)
time.sleep(60) #wait 1 minute for now... need to see just what gets logged and how long it takes to process
return 0 # Success

View file

@ -0,0 +1,94 @@
import sys
import urllib
import os
import shutil
import ConfigParser
import datetime
import time
import json
import logging
from nzbToMediaEnv import *
Logger = logging.getLogger()
class AuthURLOpener(urllib.FancyURLopener):
def __init__(self, user, pw):
self.username = user
self.password = pw
self.numTries = 0
urllib.FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
else:
return ('', '')
def openit(self, url):
self.numTries = 0
return urllib.FancyURLopener.open(self, url)
def process(dirName, nzbName=None, status=0):
status = int(status)
config = ConfigParser.ConfigParser()
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
Logger.info("Loading config from %s", configFilename)
if not os.path.isfile(configFilename):
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
return 1 # failure
config.read(configFilename)
host = config.get("Gamez", "host")
port = config.get("Gamez", "port")
username = config.get("Gamez", "username")
password = config.get("Gamez", "password")
apikey = config.get("Gamez", "apikey")
try:
ssl = int(config.get("Gamez", "ssl"))
except (ConfigParser.NoOptionError, ValueError):
ssl = 0
try:
web_root = config.get("Gamez", "web_root")
except ConfigParser.NoOptionError:
web_root = ""
myOpener = AuthURLOpener(username, password)
if ssl:
protocol = "https://"
else:
protocol = "http://"
baseURL = protocol + host + ":" + port + web_root + "/api?api_key=" + apikey + "&mode="
fields = nzbName.split("-")
gamezID = fields[0].replace("[","").replace("]","").replace(" ","")
downloadStatus = 'Wanted'
if status == 0:
downloadStatus = 'Downloaded'
URL = baseURL + "UPDATEREQUESTEDSTATUS&db_id=" + gamezID + "&status=" + downloadStatus
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return 1 # failure
result = json.load(urlObj)
Logger.info("Gamez returned %s", result)
if result['success']:
Logger.info("Status for %s has been set to %s in Gamez", gamezID, downloadStatus)
return 0 # Success
else:
Logger.error("Status for %s has NOT been updated in Gamez", gamezID)
return 1 # failure

View file

@ -0,0 +1,244 @@
import sys
import urllib
import os
import shutil
import ConfigParser
import datetime
import time
import json
import logging
import Transcoder
from nzbToMediaEnv import *
from nzbToMediaSceneExceptions import process_all_exceptions
Logger = logging.getLogger()
class AuthURLOpener(urllib.FancyURLopener):
def __init__(self, user, pw):
self.username = user
self.password = pw
self.numTries = 0
urllib.FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
else:
return ('', '')
def openit(self, url):
self.numTries = 0
return urllib.FancyURLopener.open(self, url)
def get_imdb(nzbName, dirName):
a=nzbName.find('.cp(')+4 #search for .cptt( in nzbName
b=nzbName[a:].find(')')+a
imdbid=nzbName[a:b]
if imdbid:
Logger.info("Found movie id %s in name", imdbid)
return imdbid
a=dirName.find('.cp(')+4 #search for .cptt( in dirname
b=dirName[a:].find(')')+a
imdbid=dirName[a:b]
if imdbid:
Logger.info("Found movie id %s in directory", imdbid)
return imdbid
else:
Logger.warning("Could not find an imdb id in directory or name")
Logger.info("Postprocessing will continue, but the movie may not be identified correctly by CouchPotato")
return ""
def get_movie_info(myOpener, baseURL, imdbid):
if not imdbid:
return ""
url = baseURL + "movie.list"
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return ""
movie_id = ""
result = json.load(urlObj)
movieid = [item["id"] for item in result["movies"]]
library = [item["library"]["identifier"] for item in result["movies"]]
for index in range(len(movieid)):
if library[index] == imdbid:
movie_id = str(movieid[index])
Logger.info("Found movie id %s in CPS database for movie %s", movie_id, imdbid)
break
return movie_id
def get_status(myOpener, baseURL, movie_id):
if not movie_id:
return ""
url = baseURL + "movie.get/?id=" + str(movie_id)
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return ""
result = json.load(urlObj)
try:
movie_status = result["movie"]["status"]["identifier"]
Logger.debug("This movie is marked as status %s in CouchPotatoServer", movie_status)
return movie_status
except e: # index out of range/doesn't exist?
Logger.error("Could not find a status for this movie due to: %s", str(e))
return ""
def process(dirName, nzbName=None, status=0):
status = int(status)
config = ConfigParser.ConfigParser()
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
Logger.info("Loading config from %s", configFilename)
if not os.path.isfile(configFilename):
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
return 1 # failure
config.read(configFilename)
host = config.get("CouchPotato", "host")
port = config.get("CouchPotato", "port")
username = config.get("CouchPotato", "username")
password = config.get("CouchPotato", "password")
apikey = config.get("CouchPotato", "apikey")
delay = float(config.get("CouchPotato", "delay"))
method = config.get("CouchPotato", "method")
delete_failed = int(config.get("CouchPotato", "delete_failed"))
try:
ssl = int(config.get("CouchPotato", "ssl"))
except (ConfigParser.NoOptionError, ValueError):
ssl = 0
try:
web_root = config.get("CouchPotato", "web_root")
except ConfigParser.NoOptionError:
web_root = ""
try:
transcode = int(config.get("Transcoder", "transcode"))
except (ConfigParser.NoOptionError, ValueError):
transcode = 0
myOpener = AuthURLOpener(username, password)
nzbName = str(nzbName) # make sure it is a string
imdbid = get_imdb(nzbName, dirName)
if ssl:
protocol = "https://"
else:
protocol = "http://"
# don't delay when we are calling this script manually.
if nzbName == "Manual Run":
delay = 0
baseURL = protocol + host + ":" + port + web_root + "/api/" + apikey + "/"
movie_id = get_movie_info(myOpener, baseURL, imdbid) # get the CPS database movie id this movie.
initial_status = get_status(myOpener, baseURL, movie_id)
process_all_exceptions(nzbName.lower(), dirName)
if status == 0:
if transcode == 1:
result = Transcoder.Transcode_directory(dirName)
if result == 0:
Logger.debug("Transcoding succeeded for files in %s", dirName)
else:
Logger.warning("Transcoding failed for files in %s", dirName)
if method == "manage":
command = "manage.update"
else:
command = "renamer.scan"
url = baseURL + command
Logger.info("Waiting for %s seconds to allow CPS to process newly extracted files", str(delay))
time.sleep(delay)
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return 1 # failure
result = json.load(urlObj)
Logger.info("CouchPotatoServer returned %s", result)
if result['success']:
Logger.info("%s started on CouchPotatoServer for %s", command, nzbName)
else:
Logger.error("%s has NOT started on CouchPotatoServer for %s. Exiting", command, nzbName)
return 1 # failure
else:
Logger.info("Download of %s has failed.", nzbName)
Logger.info("Trying to re-cue the next highest ranked release")
if not movie_id:
Logger.warning("Cound not find a movie in the database for release %s", nzbName)
Logger.warning("Please manually ignore this release and refresh the wanted movie")
Logger.error("Exiting autoProcessMovie script")
return 1 # failure
url = baseURL + "searcher.try_next/?id=" + movie_id
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return 1 # failure
result = urlObj.readlines()
for line in result:
Logger.info("%s", line)
Logger.info("Movie %s set to try the next best release on CouchPotatoServer", movie_id)
if delete_failed:
Logger.info("Deleting failed files and folder %s", dirName)
try:
shutil.rmtree(dirName)
except e:
Logger.error("Unable to delete folder %s due to: %s", dirName, str(e))
return 0 # success
if nzbName == "Manual Run":
return 0 # success
# we will now check to see if CPS has finished renaming before returning to TorrentToMedia and unpausing.
start = datetime.datetime.now() # set time for timeout
while (datetime.datetime.now() - start) < datetime.timedelta(minutes=2): # only wait 2 minutes, then return to TorrentToMedia
movie_status = get_status(myOpener, baseURL, movie_id) # get the current status fo this movie.
if movie_status != initial_status: # Something has changed. CPS must have processed this movie.
Logger.info("SUCCESS: This movie is now marked as status %s in CouchPotatoServer", movie_status)
return 0 # success
time.sleep(20) # Just stop this looping infinitely and hogging resources for 2 minutes ;)
else: # The status hasn't changed. we have waited 2 minutes which is more than enough. uTorrent can resule seeding now.
Logger.warning("The movie does not appear to have changed status after 2 minutes. Please check CouchPotato Logs")
return 1 # failure

View file

@ -0,0 +1,116 @@
import sys
import urllib
import os
import shutil
import ConfigParser
import datetime
import time
import json
import logging
from nzbToMediaEnv import *
Logger = logging.getLogger()
class AuthURLOpener(urllib.FancyURLopener):
def __init__(self, user, pw):
self.username = user
self.password = pw
self.numTries = 0
urllib.FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
else:
return ('', '')
def openit(self, url):
self.numTries = 0
return urllib.FancyURLopener.open(self, url)
def process(dirName, nzbName=None, status=0):
status = int(status)
config = ConfigParser.ConfigParser()
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
Logger.info("Loading config from %s", configFilename)
if not os.path.isfile(configFilename):
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
return 1 # failure
config.read(configFilename)
host = config.get("HeadPhones", "host")
port = config.get("HeadPhones", "port")
username = config.get("HeadPhones", "username")
password = config.get("HeadPhones", "password")
apikey = config.get("HeadPhones", "apikey")
delay = float(config.get("HeadPhones", "delay"))
try:
ssl = int(config.get("HeadPhones", "ssl"))
except (ConfigParser.NoOptionError, ValueError):
ssl = 0
try:
web_root = config.get("HeadPhones", "web_root")
except ConfigParser.NoOptionError:
web_root = ""
myOpener = AuthURLOpener(username, password)
if ssl:
protocol = "https://"
else:
protocol = "http://"
# don't delay when we are calling this script manually.
if nzbName == "Manual Run":
delay = 0
baseURL = protocol + host + ":" + port + web_root + "/api?apikey=" + apikey + "&cmd="
if status == 0:
command = "forceProcess"
url = baseURL + command
Logger.info("Waiting for %s seconds to allow HeadPhones to process newly extracted files", str(delay))
time.sleep(delay)
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return 1 # failure
result = json.load(urlObj)
Logger.info("HeaPhones returned %s", result)
if result == "OK":
Logger.info("%s started on HeadPhones for %s", command, nzbName)
else:
Logger.error("%s has NOT started on HeadPhones for %s. Exiting", command, nzbName)
return 1 # failure
else:
Logger.info("The download failed. Nothing to process")
return 0 # Success (as far as this script is concerned)
if nzbName == "Manual Run":
return 0 # success
# we will now wait 1 minutes for this album to be processed before returning to TorrentToMedia and unpausing.
## Hopefully we can use a "getHistory" check in here to confirm processing complete...
start = datetime.datetime.now() # set time for timeout
while (datetime.datetime.now() - start) < datetime.timedelta(minutes=1): # only wait 2 minutes, then return to TorrentToMedia
time.sleep(20) # Just stop this looping infinitely and hogging resources for 2 minutes ;)
else: # The status hasn't changed. we have waited 2 minutes which is more than enough. uTorrent can resume seeding now.
Logger.info("This album should have completed processing. Please check HeadPhones Logs")
# Logger.warning("The album does not appear to have changed status after 2 minutes. Please check HeadPhones Logs")
# return 1 # failure
return 0 # success for now.

View file

@ -0,0 +1,166 @@
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import sys
import urllib
import os
import ConfigParser
import logging
import Transcoder
from nzbToMediaEnv import *
from nzbToMediaSceneExceptions import process_all_exceptions
Logger = logging.getLogger()
class AuthURLOpener(urllib.FancyURLopener):
def __init__(self, user, pw):
self.username = user
self.password = pw
self.numTries = 0
urllib.FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
else:
return ('', '')
def openit(self, url):
self.numTries = 0
return urllib.FancyURLopener.open(self, url)
def processEpisode(dirName, nzbName=None, failed=False):
status = int(failed)
config = ConfigParser.ConfigParser()
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
Logger.info("Loading config from %s", configFilename)
if not os.path.isfile(configFilename):
Logger.error("You need an autoProcessMedia.cfg file - did you rename and edit the .sample?")
return 1 # failure
try:
fp = open(configFilename, "r")
config.readfp(fp)
fp.close()
except IOError, e:
Logger.error("Could not read configuration file: %s", str(e))
return 1 # failure
watch_dir = ""
host = config.get("SickBeard", "host")
port = config.get("SickBeard", "port")
username = config.get("SickBeard", "username")
password = config.get("SickBeard", "password")
try:
ssl = int(config.get("SickBeard", "ssl"))
except (ConfigParser.NoOptionError, ValueError):
ssl = 0
try:
web_root = config.get("SickBeard", "web_root")
except ConfigParser.NoOptionError:
web_root = ""
try:
watch_dir = config.get("SickBeard", "watch_dir")
except ConfigParser.NoOptionError:
watch_dir = ""
try:
failed_fork = int(config.get("SickBeard", "failed_fork"))
except (ConfigParser.NoOptionError, ValueError):
failed_fork = 0
try:
transcode = int(config.get("Transcoder", "transcode"))
except (ConfigParser.NoOptionError, ValueError):
transcode = 0
process_all_exceptions(nzbName.lower(), dirName)
#allows manual call of postprocess script if we have specified a watch_dir. Check that here.
if nzbName == "Manual Run" and watch_dir == "":
Logger.error("In order to run this script manually you must specify a watch_dir in autoProcessTV.cfg")
return 1 # failure
#allows us to specify the default watch directory and call the postproecssing on another PC with different directory structure.
if watch_dir != "":
dirName = watch_dir
params = {}
params['quiet'] = 1
# if you have specified you are using development branch from fork https://github.com/Tolstyak/Sick-Beard.git
if failed_fork:
params['dirName'] = dirName
if nzbName != None:
params['nzbName'] = nzbName
params['failed'] = failed
if status == 0:
Logger.info("The download succeeded. Sending process request to SickBeard's failed branch")
else:
Logger.info("The download failed. Sending 'failed' process request to SickBeard's failed branch")
# this is our default behaviour to work with the standard Master branch of SickBeard
else:
params['dir'] = dirName
if nzbName != None:
params['nzbName'] = nzbName
# the standard Master bamch of SickBeard cannot process failed downloads. So Exit here.
if status == 0:
Logger.info("The download succeeded. Sending process request to SickBeard")
else:
Logger.info("The download failed. Nothing to process")
return 0 # Success (as far as this script is concerned)
if status == 0 and transcode == 1: # only transcode successful downlaods
result = Transcoder.Transcode_directory(dirName)
if result == 0:
Logger.debug("Transcoding succeeded for files in %s", dirName)
else:
Logger.warning("Transcoding failed for files in %s", dirName)
myOpener = AuthURLOpener(username, password)
if ssl:
protocol = "https://"
else:
protocol = "http://"
url = protocol + host + ":" + port + web_root + "/home/postprocess/processEpisode?" + urllib.urlencode(params)
Logger.debug("Opening URL: %s", url)
try:
urlObj = myOpener.openit(url)
except IOError, e:
Logger.error("Unable to open URL: %s", str(e))
return 1 # failure
result = urlObj.readlines()
for line in result:
Logger.info("%s", line)
return 0 # Success

191
autoProcess/migratecfg.py Normal file
View file

@ -0,0 +1,191 @@
#System imports
import ConfigParser
import sys
import os
def migrate():
confignew = ConfigParser.ConfigParser()
confignew.optionxform = str
configFilenamenew = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg.sample")
confignew.read(configFilenamenew)
configold = ConfigParser.ConfigParser()
confignew.optionxform = str
section = "CouchPotato"
original = []
configFilenameold = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
if not os.path.isfile(configFilenameold): # lets look back for an older version.
configFilenameold = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMovie.cfg")
if not os.path.isfile(configFilenameold): # no config available
configFilenameold = ""
if configFilenameold: # read our old config.
configold.read(configFilenameold)
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
if option == "category": # change this old format
option = "cpsCategory"
if option == "outputDirectory": # move this to new location format
value = os.path.split(os.path.normpath(outputdirectory))[0]
confignew.set("Torrent", option, value)
continue
confignew.set(section, option, value)
section = "SickBeard"
original = []
configFilenameold = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
if not os.path.isfile(configFilenameold): # lets look back for an older version.
configFilenameold = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessTV.cfg")
if not os.path.isfile(configFilenameold): # no config available
configFilenameold = ""
if configFilenameold: # read our old config.
configold.read(configFilenameold)
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
if option == "category": # change this old format
option = "sbCategory"
if option == "outputDirectory": # move this to new location format
value = os.path.split(os.path.normpath(outputdirectory))[0]
confignew.set("Torrent", option, value)
continue
confignew.set(section, option, value)
section = "HeadPhones"
original = []
configFilenameold = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg")
if not os.path.isfile(configFilenameold):
configFilenameold = ""
if configFilenameold: # read our old config.
configold.read(configFilenameold)
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "Mylar"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "Gamez"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "Torrent"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
if option in ["compressedExtensions", "mediaExtensions", "metaExtensions"]:
section = "Extensions" # these were moved
confignew.set(section, option, value)
section = "Torrent" # reset in case extensions out of order.
section = "Transcoder"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "loggers"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "handlers"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "formatters"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "logger_root"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "handler_console"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
section = "formatter_generic"
original = []
try:
original = configold.items(section)
except:
pass
for item in original:
option, value = item
confignew.set(section, option, value)
# writing our configuration file to 'autoProcessMedia.cfg.sample'
with open(configFilenamenew, 'wb') as configFile:
confignew.write(configFile)
# create a backup of our old config
backupname = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessMedia.cfg.old")
if os.path.isfile(backupname): # remove older backups
os.unlink(backupname)
os.rename(configFilenameold, backupname)
# rename our newly edited autoProcessMedia.cfg.sample to autoProcessMedia.cfg
os.rename(configFilenamenew, configFilenameold)
return

View file

@ -0,0 +1,10 @@
# Make things easy and less error prone by centralising all common values
# Global Constants
VERSION = 'V6.1'
# Constants pertinant to SabNzb
SABNZB_NO_OF_ARGUMENTS = 8
# Constants pertinant to NzbGet
NZBGET_NO_OF_ARGUMENTS = 5

View file

@ -0,0 +1,35 @@
# System imports
import os
import logging
# Custom imports
from nzbToMediaUtil import iterate_media_files
Logger = logging.getLogger()
def process_all_exceptions(name, dirname):
for group, exception in __customgroups__.items():
if not (group in name or group in dirname):
continue
process_exception(exception, name, dirname)
def process_exception(exception, name, dirname):
for parentDir, filename in iterate_media_files(dirname):
exception(filename, parentDir)
def process_qoq(filename, dirname):
Logger.debug("Reversing the file name for a QoQ release %s", filename)
head, fileExtension = os.path.splitext(os.path.basename(filename))
newname = head[::-1]
newfile = newname + fileExtension
newfilePath = os.path.join(dirname, newfile)
os.rename(filename, newfilePath)
Logger.debug("New file name is %s", newfile)
# dict for custom groups
# we can add more to this list
__customgroups__ = {'Q o Q': process_qoq}

View file

@ -0,0 +1,262 @@
import logging
import logging.config
import os
import re
import sys
import shutil
import linktastic.linktastic as linktastic
Logger = logging.getLogger()
def safeName(name):
safename = re.sub(r"[\/\\\:\*\?\"\<\>\|]", "", name) #make this name safe for use in directories for windows etc.
return safename
def nzbtomedia_configure_logging(dirname):
logFile = os.path.join(dirname, "postprocess.log")
logging.config.fileConfig(os.path.join(dirname, "autoProcessMedia.cfg"))
fileHandler = logging.FileHandler(logFile, encoding='utf-8', delay=True)
fileHandler.formatter = logging.Formatter('%(asctime)s|%(levelname)-7.7s %(message)s', '%H:%M:%S')
fileHandler.level = logging.DEBUG
logging.getLogger().addHandler(fileHandler)
def create_destination(outputDestination):
if os.path.exists(outputDestination):
return
try:
Logger.info("CREATE DESTINATION: Creating destination folder: %s", outputDestination)
os.makedirs(outputDestination)
except Exception, e:
Logger.error("CREATE DESTINATION: Not possible to create destination folder: %s. Exiting", e)
sys.exit(-1)
def category_search(inputDirectory, inputName, inputCategory, root, categories):
categorySearch = [os.path.normpath(inputDirectory), ""] # initializie
notfound = 0
for x in range(10): # loop up through 10 directories looking for category.
try:
categorySearch2 = os.path.split(os.path.normpath(categorySearch[0]))
except: # this might happen when we can't go higher.
if inputCategory and inputName: # if these exists, we are ok to proceed, but assume we are in a root/common directory.
Logger.info("SEARCH: Could not find a Torrent Name or category in the directory structure")
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
Logger.info("SEARCH: We will try and determine which files to process, individually")
root = 1
break # we are done
elif inputCategory: # if this exists, we are ok to proceed, but assume we are in a root/common directory and we have to check file dates.
Logger.info("SEARCH: Could not find a Torrent Name or Category in the directory structure")
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
Logger.info("SEARCH: We will try and determine which files to process, individually")
root = 2
break # we are done
else:
Logger.error("SEARCH: Could not identify Category of Torrent Name in the directory structure. Please check downloader settings. Exiting")
sys.exit(-1)
if categorySearch2[1] in categories:
Logger.debug("SEARCH: Found Category: %s in directory structure", categorySearch2[1])
if not inputCategory:
Logger.info("SEARCH: Determined Category to be: %s", categorySearch2[1])
inputCategory = categorySearch2[1]
if inputName and categorySearch[0] != os.path.normpath(inputDirectory): # if we are not in the root directory and we have inputName we can continue.
if ('.cp(tt' in categorySearch[1]) and (not '.cp(tt' in inputName): # if the directory was created by CouchPotato, and this tag is not in Torrent name, we want to add it.
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
inputName = categorySearch[1]
Logger.info("SEARCH: Identified Category: %s and Torrent Name: %s. We are in a unique directory, so we can proceed.", inputCategory, inputName)
break # we are done
elif categorySearch[1] and not inputName: # assume the the next directory deep is the torrent name.
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], categorySearch[1]), categorySearch[0])
inputName = categorySearch[1]
break # we are done
elif ('.cp(tt' in categorySearch[1]) and (not '.cp(tt' in inputName): # if the directory was created by CouchPotato, and this tag is not in Torrent name, we want to add it.
Logger.info("SEARCH: Changing Torrent Name to %s to preserve imdb id.", categorySearch[1])
inputName = categorySearch[1]
break # we are done
elif os.path.isdir(os.path.join(categorySearch[0], inputName)) and inputName: # testing for torrent name in first sub directory
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], inputName), categorySearch[0])
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
inputDirectory = os.path.join(categorySearch[0], inputName) # we only want to search this next dir up.
break # we are done
elif os.path.isdir(os.path.join(categorySearch[0], safeName(inputName))) and inputName: # testing for torrent name in first sub directory
Logger.info("SEARCH: Found torrent directory %s in category directory %s", os.path.join(categorySearch[0], safeName(inputName)), categorySearch[0])
if categorySearch[0] == os.path.normpath(inputDirectory): # only true on first pass, x =0
inputDirectory = os.path.join(categorySearch[0], safeName(inputName)) # we only want to search this next dir up.
break # we are done
elif inputName: # if these exists, we are ok to proceed, but we are in a root/common directory.
Logger.info("SEARCH: Could not find a unique torrent folder in the directory structure")
Logger.info("SEARCH: The directory passed is the root directory for category %s", categorySearch2[1])
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
Logger.info("SEARCH: We will try and determine which files to process, individually")
root = 1
break # we are done
else: # this is a problem! if we don't have Torrent name and are in the root category dir, we can't proceed.
Logger.warn("SEARCH: Could not identify a torrent name and the directory passed is common to all downloads for category %s.", categorySearch[1])
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
Logger.info("SEARCH: We will try and determine which files to process, individually")
root = 2
break
elif safeName(categorySearch2[1]) == safeName(inputName) and inputName: # we have identified a unique directory.
Logger.info("SEARCH: Files appear to be in their own directory")
if inputCategory: # we are ok to proceed.
break # we are done
else:
Logger.debug("SEARCH: Continuing scan to determin category.")
categorySearch = categorySearch2 # ready for next loop
continue # keep going
else:
if x == 9: # This is the last pass in the loop and we didn't find anything.
notfound = 1
break # we are done
else:
categorySearch = categorySearch2 # ready for next loop
continue # keep going
if notfound == 1:
if inputCategory and inputName: # if these exists, we are ok to proceed, but assume we are in a root/common directory.
Logger.info("SEARCH: Could not find a category in the directory structure")
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
Logger.info("SEARCH: We will try and determine which files to process, individually")
root = 1
elif inputCategory: # if this exists, we are ok to proceed, but assume we are in a root/common directory and we have to check file dates.
Logger.info("SEARCH: Could not find a Torrent Name or Category in the directory structure")
Logger.info("SEARCH: We assume the directory passed is the root directory for your downlaoder")
Logger.warn("SEARCH: You should change settings to download torrents to their own directory if possible")
Logger.info("SEARCH: We will try and determine which files to process, individually")
root = 2
if not inputCategory: # we didn't find this after 10 loops. This is a problem.
Logger.error("SEARCH: Could not identify category and torrent name from the directory structure. Please check downloader settings. Exiting")
sys.exit(-1) # Oh yeah.... WE ARE DONE!
return inputDirectory, inputName, inputCategory, root
def is_sample(filePath, inputName, minSampleSize):
# 200 MB in bytes
SIZE_CUTOFF = minSampleSize * 1024 * 1024
# Ignore 'sample' in files unless 'sample' in Torrent Name
return ('sample' in filePath.lower()) and (not 'sample' in inputName) and (os.path.getsize(filePath) < SIZE_CUTOFF)
def copy_link(filePath, targetDirectory, useLink, outputDestination):
create_destination(outputDestination)
if useLink != 0:
try:
Logger.info("COPYLINK: Linking %s to %s", filePath, targetDirectory)
linktastic.link(filePath, targetDirectory)
except:
if os.path.isfile(targetDirectory):
Logger.info("COPYLINK: Something went wrong in linktastic.link, but the destination file was created")
else:
Logger.info("COPYLINK: Something went wrong in linktastic.link, copying instead")
Logger.debug("COPYLINK: Copying %s to %s", filePath, targetDirectory)
shutil.copy(filePath, targetDirectory)
else:
Logger.debug("Copying %s to %s", filePath, targetDirectory)
shutil.copy(filePath, targetDirectory)
return True
def flatten(outputDestination):
Logger.info("FLATTEN: Flattening directory: %s", outputDestination)
for dirpath, dirnames, filenames in os.walk(outputDestination): # Flatten out the directory to make postprocessing easier
if dirpath == outputDestination:
continue # No need to try and move files in the root destination directory
for filename in filenames:
source = os.path.join(dirpath, filename)
target = os.path.join(outputDestination, filename)
try:
shutil.move(source, target)
except OSError:
Logger.error("FLATTEN: Could not flatten %s", source)
removeEmptyFolders(outputDestination) # Cleanup empty directories
def removeEmptyFolders(path):
Logger.info("REMOVER: Removing empty folders in: %s", path)
if not os.path.isdir(path):
return
# Remove empty subfolders
files = os.listdir(path)
if len(files):
for f in files:
fullpath = os.path.join(path, f)
if os.path.isdir(fullpath):
removeEmptyFolders(fullpath)
# If folder empty, delete it
files = os.listdir(path)
if len(files) == 0:
Logger.debug("REMOVER: Removing empty folder: %s", path)
os.rmdir(path)
def iterate_media_files(dirname):
mediaContainer = [ '.mkv', '.avi', '.divx', '.xvid', '.mov', '.wmv',
'.mp4', '.mpg', '.mpeg', '.iso' ]
for dirpath, dirnames, filesnames in os.walk(dirname):
for filename in filesnames:
fileExtension = os.path.splitext(filename)[1]
if not (fileExtension in mediaContainer):
continue
yield dirpath, os.path.join(dirpath, filename)
def parse_other(args):
return os.path.normpath(sys.argv[1]), '', '', ''
def parse_utorrent(args):
# uTorrent usage: call TorrentToMedia.py "%D" "%N" "%L" "%I"
inputDirectory = os.path.normpath(args[1])
inputName = args[2]
try:
inputCategory = args[3]
except:
inputCategory = ''
try:
inputHash = args[4]
except:
inputHash = ''
return inputDirectory, inputName, inputCategory, inputHash
def parse_deluge(args):
# Deluge usage: call TorrentToMedia.py TORRENT_ID TORRENT_NAME TORRENT_DIR
inputDirectory = os.path.normpath(sys.argv[3])
inputName = sys.argv[2]
inputCategory = '' # We dont have a category yet
inputHash = ''
return inputDirectory, inputName, inputCategory, inputHash
def parse_transmission(args):
# Transmission usage: call TorrenToMedia.py (%TR_TORRENT_DIR% %TR_TORRENT_NAME% is passed on as environmental variables)
inputDirectory = os.path.normpath(os.getenv('TR_TORRENT_DIR'))
inputName = os.getenv('TR_TORRENT_NAME')
inputCategory = '' # We dont have a category yet
inputHash = ''
return inputDirectory, inputName, inputCategory, inputHash
__ARG_PARSERS__ = {
'other': parse_other,
'utorrent': parse_utorrent,
'deluge': parse_deluge,
'transmission': parse_transmission,
}
def parse_args(clientAgent):
parseFunc = __ARG_PARSERS__.get(clientAgent, None)
if not parseFunc:
raise RuntimeError("Could not find client-agent")
return parseFunc(sys.argv)