mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 05:43:16 -07:00
Added database support, we now store all variables passed into our scripts so that in the event something fails and you need to perform a manual run it can find those variables by a database lookup and re-use them.
This commit is contained in:
parent
c6e4ab442e
commit
3f137ae3b6
16 changed files with 514 additions and 81 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -5,4 +5,5 @@
|
|||
*.log
|
||||
/userscripts/
|
||||
/logs/
|
||||
/.idea/
|
||||
/.idea/
|
||||
*.db
|
||||
|
|
|
@ -15,8 +15,8 @@ from nzbtomedia.autoProcess.autoProcessMusic import autoProcessMusic
|
|||
from nzbtomedia.autoProcess.autoProcessTV import autoProcessTV
|
||||
from nzbtomedia.nzbToMediaUtil import category_search, sanitizeFileName, copy_link, parse_args, flatten, get_dirnames, \
|
||||
remove_read_only, pause_torrent, resume_torrent, listMediaFiles, joinPath, \
|
||||
extractFiles, cleanProcDirs, append_downloadID
|
||||
from nzbtomedia import logger
|
||||
extractFiles, cleanProcDirs, update_downloadInfoStatus, get_downloadInfo
|
||||
from nzbtomedia import logger, nzbToMediaDB
|
||||
|
||||
def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent):
|
||||
status = 1 # 1 = failed | 0 = success
|
||||
|
@ -25,12 +25,28 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
foundFile = 0
|
||||
copy_list = []
|
||||
|
||||
if clientAgent != 'manual':
|
||||
logger.debug('Adding TORRENT download info for directory %s to database' % (inputDirectory))
|
||||
|
||||
myDB = nzbToMediaDB.DBConnection()
|
||||
|
||||
controlValueDict = {"input_directory": inputDirectory}
|
||||
newValueDict = {"input_name": inputName,
|
||||
"input_hash": inputHash,
|
||||
"input_id": inputID,
|
||||
"client_agent": clientAgent,
|
||||
"status": 0,
|
||||
"last_update": datetime.date.today().toordinal()
|
||||
}
|
||||
myDB.upsert("downloads", newValueDict, controlValueDict)
|
||||
|
||||
logger.debug("Received Directory: %s | Name: %s | Category: %s" % (inputDirectory, inputName, inputCategory))
|
||||
|
||||
inputDirectory, inputName, inputCategory, root, single = category_search(inputDirectory, inputName, inputCategory, root, nzbtomedia.CATEGORIES) # Confirm the category by parsing directory structure
|
||||
|
||||
logger.debug("Determined Directory: %s | Name: %s | Category: %s" % (inputDirectory, inputName, inputCategory))
|
||||
|
||||
# Add torrent info hash to folder name incase we need it later on
|
||||
section = nzbtomedia.CFG.findsection(inputCategory)
|
||||
if not section:
|
||||
logger.error(
|
||||
|
@ -49,11 +65,6 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
inputCategory = "UNCAT"
|
||||
outputDestination = os.path.normpath(joinPath(nzbtomedia.OUTPUTDIRECTORY, inputCategory, sanitizeFileName(inputName)))
|
||||
|
||||
# Add torrent info hash to folder name incase we need it later on
|
||||
if clientAgent != 'manual':
|
||||
logger.debug('Added torrent info hash %s to output directory %s' % (inputHash, outputDestination))
|
||||
outputDestination = append_downloadID(outputDestination, inputHash)
|
||||
|
||||
logger.info("Output directory set to: %s" % (outputDestination))
|
||||
|
||||
processOnly = nzbtomedia.CFG[nzbtomedia.SECTIONS].sections
|
||||
|
@ -185,6 +196,9 @@ def processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID,
|
|||
logger.error("A problem was reported in the autoProcess* script. If torrent was paused we will resume seeding")
|
||||
resume_torrent(clientAgent, inputHash, inputID, result, inputName)
|
||||
else:
|
||||
# update download status in our DB
|
||||
update_downloadInfoStatus(inputDirectory, 1)
|
||||
|
||||
# cleanup our processing folders of any misc unwanted files and empty directories
|
||||
cleanProcDirs()
|
||||
|
||||
|
@ -289,16 +303,26 @@ def main(args):
|
|||
if inputDirectory and inputName and inputHash and inputID:
|
||||
result = processTorrent(inputDirectory, inputName, inputCategory, inputHash, inputID, clientAgent)
|
||||
else:
|
||||
# Perform Manual Run
|
||||
# Perform Manual Post-Processing
|
||||
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
|
||||
|
||||
# Loop and auto-process
|
||||
clientAgent = 'manual'
|
||||
for section, subsection in nzbtomedia.SUBSECTIONS.items():
|
||||
for category in subsection:
|
||||
if nzbtomedia.CFG[section][category].isenabled():
|
||||
dirNames = get_dirnames(section, category)
|
||||
for dirName in dirNames:
|
||||
clientAgent = 'manual'
|
||||
inputHash = None
|
||||
inputID = None
|
||||
|
||||
logger.info("Checking database for download info ...")
|
||||
downloadInfo = get_downloadInfo(dirName, 0)
|
||||
if downloadInfo:
|
||||
clientAgent = downloadInfo['client_agent']
|
||||
inputHash = downloadInfo['input_hash']
|
||||
inputID = downloadInfo['input_id']
|
||||
logger.info("Found download info for directory %s, setting variables now ..." % (dirName))
|
||||
|
||||
logger.info("Running %s:%s as a manual run for folder %s ..." % (section, category, dirName))
|
||||
results = processTorrent(dirName, os.path.basename(dirName), category, inputHash, inputID, clientAgent)
|
||||
if results != 0:
|
||||
|
|
|
@ -15,6 +15,8 @@
|
|||
force_clean = 0
|
||||
# Enable/Disable logging debug messages to nzbtomedia.log
|
||||
log_debug = 0
|
||||
# Enable/Disable logging database messages to nzbtomedia.log
|
||||
log_db = 0
|
||||
# Set to where your ffmpeg/ffprobe executables are located
|
||||
ffmpeg_path =
|
||||
|
||||
|
|
|
@ -145,7 +145,7 @@ Impacts Torrents
|
|||
Fixed an import error when extracting
|
||||
|
||||
Impacts NZBs
|
||||
Fixed passthrough of nzbName from NZBGet to pass the .nzb extension (required for SickBeard's failed fork)
|
||||
Fixed passthrough of inputName from NZBGet to pass the .nzb extension (required for SickBeard's failed fork)
|
||||
|
||||
|
||||
V8.0 28/04/2013
|
||||
|
|
|
@ -275,36 +275,52 @@
|
|||
##############################################################################
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import nzbtomedia
|
||||
from nzbtomedia.autoProcess.autoProcessComics import autoProcessComics
|
||||
from nzbtomedia.autoProcess.autoProcessGames import autoProcessGames
|
||||
from nzbtomedia.autoProcess.autoProcessMovie import autoProcessMovie
|
||||
from nzbtomedia.autoProcess.autoProcessMusic import autoProcessMusic
|
||||
from nzbtomedia.autoProcess.autoProcessTV import autoProcessTV
|
||||
from nzbtomedia.nzbToMediaUtil import get_dirnames, extractFiles, cleanProcDirs
|
||||
from nzbtomedia import logger
|
||||
from nzbtomedia.nzbToMediaUtil import get_dirnames, extractFiles, cleanProcDirs, update_downloadInfoStatus, get_downloadInfo
|
||||
from nzbtomedia import logger, nzbToMediaDB
|
||||
|
||||
# post-processing
|
||||
def process(nzbDir, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None):
|
||||
def process(inputDirectory, inputName=None, status=0, clientAgent='manual', download_id=None, inputCategory=None):
|
||||
if clientAgent != 'manual':
|
||||
logger.debug('Adding NZB download info for directory %s to database' % (inputDirectory))
|
||||
|
||||
myDB = nzbToMediaDB.DBConnection()
|
||||
|
||||
controlValueDict = {"input_directory": inputDirectory}
|
||||
newValueDict = {"input_name": inputName,
|
||||
"input_hash": download_id,
|
||||
"input_id": download_id,
|
||||
"client_agent": clientAgent,
|
||||
"status": 0,
|
||||
"last_update": datetime.date.today().toordinal()
|
||||
}
|
||||
myDB.upsert("downloads", newValueDict, controlValueDict)
|
||||
|
||||
# auto-detect section
|
||||
section = nzbtomedia.CFG.findsection(inputCategory)
|
||||
if section:
|
||||
if nzbtomedia.CFG[section][inputCategory]['extract']:
|
||||
logger.debug('Checking for archives to extract in directory: %s' % (nzbDir))
|
||||
extractFiles(nzbDir)
|
||||
logger.debug('Checking for archives to extract in directory: %s' % (inputDirectory))
|
||||
extractFiles(inputDirectory)
|
||||
|
||||
logger.info("Sending %s to %s for post-processing ..." % (inputName, str(section).upper()))
|
||||
|
||||
if nzbtomedia.CFG["CouchPotato"][inputCategory]:
|
||||
result = autoProcessMovie().process(nzbDir, inputName, status, clientAgent, download_id, inputCategory)
|
||||
result = autoProcessMovie().process(inputDirectory, inputName, status, clientAgent, download_id, inputCategory)
|
||||
elif nzbtomedia.CFG["SickBeard", "NzbDrone"][inputCategory]:
|
||||
result = autoProcessTV().processEpisode(nzbDir, inputName, status, clientAgent, inputCategory)
|
||||
result = autoProcessTV().processEpisode(inputDirectory, inputName, status, clientAgent, inputCategory)
|
||||
elif nzbtomedia.CFG["HeadPhones"][inputCategory]:
|
||||
result = autoProcessMusic().process(nzbDir, inputName, status, clientAgent, inputCategory)
|
||||
result = autoProcessMusic().process(inputDirectory, inputName, status, clientAgent, inputCategory)
|
||||
elif nzbtomedia.CFG["Mylar"][inputCategory]:
|
||||
result = autoProcessComics().processEpisode(nzbDir, inputName, status, clientAgent, inputCategory)
|
||||
result = autoProcessComics().processEpisode(inputDirectory, inputName, status, clientAgent, inputCategory)
|
||||
elif nzbtomedia.CFG["Gamez"][inputCategory]:
|
||||
result = autoProcessGames().process(nzbDir, inputName, status, clientAgent, inputCategory)
|
||||
result = autoProcessGames().process(inputDirectory, inputName, status, clientAgent, inputCategory)
|
||||
else:
|
||||
result = -1
|
||||
else:
|
||||
|
@ -312,6 +328,9 @@ def process(nzbDir, inputName=None, status=0, clientAgent='manual', download_id=
|
|||
result = -1
|
||||
|
||||
if result == 0:
|
||||
# update download status in our DB
|
||||
update_downloadInfoStatus(inputDirectory, 1)
|
||||
|
||||
# cleanup our processing folders of any misc unwanted files and empty directories
|
||||
cleanProcDirs()
|
||||
|
||||
|
@ -412,18 +431,26 @@ def main(args, section=None):
|
|||
logger.info("Script triggered from SABnzbd 0.7.17+")
|
||||
result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent, download_id='')
|
||||
else:
|
||||
# Perform Manual Run
|
||||
# Perform Manual Post-Processing
|
||||
logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")
|
||||
|
||||
# Loop and auto-process
|
||||
clientAgent = 'manual'
|
||||
for section, subsection in nzbtomedia.SUBSECTIONS.items():
|
||||
for category in subsection:
|
||||
if nzbtomedia.CFG[section][category].isenabled():
|
||||
dirNames = get_dirnames(section, category)
|
||||
for dirName in dirNames:
|
||||
clientAgent = 'manual'
|
||||
download_id = None
|
||||
|
||||
logger.info("Checking database for download info ...")
|
||||
downloadInfo = get_downloadInfo(dirName, 0)
|
||||
if downloadInfo:
|
||||
clientAgent = downloadInfo['client_agent']
|
||||
download_id = downloadInfo['input_id']
|
||||
logger.info("Found download info for directory %s, setting variables now ..." % (dirName))
|
||||
|
||||
logger.info("Starting manual run for %s:%s - Folder:%s" % (section, category, dirName))
|
||||
results = process(dirName, os.path.basename(dirName), 0, clientAgent=clientAgent, inputCategory=category)
|
||||
results = process(dirName, os.path.basename(dirName), 0, clientAgent=clientAgent, download_id=download_id, inputCategory=category)
|
||||
if results != 0:
|
||||
logger.error("A problem was reported when trying to perform a manual run for %s:%s." % (section, category))
|
||||
result = results
|
||||
|
|
|
@ -20,10 +20,11 @@ CONFIG_TV_FILE = os.path.join(PROGRAM_DIR, 'autoProcessTv.cfg')
|
|||
# add our custom libs to the system path
|
||||
sys.path.insert(0, LIBS_DIR)
|
||||
|
||||
from nzbtomedia import logger, versionCheck
|
||||
from nzbtomedia import logger, versionCheck, nzbToMediaDB
|
||||
from nzbtomedia.nzbToMediaConfig import config
|
||||
from nzbtomedia.nzbToMediaUtil import WakeUp, makeDir, joinPath, cleanProcDirs, create_torrent_class, listMediaFiles
|
||||
from nzbtomedia.transcoder import transcoder
|
||||
from nzbtomedia.databases import mainDB
|
||||
|
||||
# sabnzbd constants
|
||||
SABNZB_NO_OF_ARGUMENTS = 8
|
||||
|
@ -48,6 +49,7 @@ NZBGET_POSTPROCESS_NONE = 95
|
|||
|
||||
CFG = None
|
||||
LOG_DEBUG = None
|
||||
LOG_DB = None
|
||||
SYS_ENCODING = None
|
||||
|
||||
AUTO_UPDATE = None
|
||||
|
@ -139,7 +141,7 @@ def initialize(section=None):
|
|||
SECTIONS, SUBSECTIONS, USER_SCRIPT_CATEGORIES, __INITIALIZED__, AUTO_UPDATE, APP_FILENAME, USER_DELAY, USER_SCRIPT_RUNONCE, \
|
||||
APP_NAME,USER_SCRIPT_MEDIAEXTENSIONS, USER_SCRIPT, USER_SCRIPT_PARAM, USER_SCRIPT_SUCCESSCODES, USER_SCRIPT_CLEAN, \
|
||||
TRANSCODE, GIT_PATH, GIT_USER, GIT_BRANCH, GIT_REPO, SYS_ENCODING, NZB_CLIENTAGENT, SABNZBDHOST, SABNZBDPORT, SABNZBDAPIKEY, \
|
||||
DUPLICATE, IGNOREEXTENSIONS, OUTPUTVIDEOEXTENSION, OUTPUTVIDEOCODEC, OUTPUTVIDEOPRESET, OUTPUTVIDEOFRAMERATE, \
|
||||
DUPLICATE, IGNOREEXTENSIONS, OUTPUTVIDEOEXTENSION, OUTPUTVIDEOCODEC, OUTPUTVIDEOPRESET, OUTPUTVIDEOFRAMERATE, LOG_DB, \
|
||||
OUTPUTVIDEOBITRATE, OUTPUTAUDIOCODEC, OUTPUTAUDIOBITRATE, OUTPUTSUBTITLECODEC, OUTPUTFASTSTART, OUTPUTQUALITYPERCENT, \
|
||||
NICENESS, LOG_DEBUG, FORCE_CLEAN, FFMPEG_PATH, FFMPEG, FFPROBE, AUDIOCONTAINER, EXTCONTAINER, TORRENT_CLASS, DELETE_ORIGINAL
|
||||
|
||||
|
@ -184,13 +186,16 @@ def initialize(section=None):
|
|||
if not config.addnzbget():
|
||||
logger.error("Unable to migrate NzbGet config file %s, exiting ..." % (CONFIG_FILE))
|
||||
sys.exit(-1)
|
||||
|
||||
# load newly migrated config
|
||||
logger.info("Loading config from [%s]" % (CONFIG_FILE))
|
||||
CFG = config()
|
||||
|
||||
# Enable/Disable DEBUG Logging
|
||||
LOG_DEBUG = int(CFG['General']['log_debug'])
|
||||
LOG_DB = int(CFG['General']['log_db'])
|
||||
|
||||
# initialize the main SB database
|
||||
nzbToMediaDB.upgradeDatabase(nzbToMediaDB.DBConnection(), mainDB.InitialSchema)
|
||||
|
||||
# Set Version and GIT variables
|
||||
NZBTOMEDIA_VERSION = '9.3'
|
||||
|
|
|
@ -6,7 +6,7 @@ from nzbtomedia.nzbToMediaUtil import convert_to_ascii, joinPath
|
|||
from nzbtomedia import logger
|
||||
|
||||
class autoProcessComics:
|
||||
def processEpisode(self, dirName, nzbName=None, status=0, clientAgent='manual', inputCategory=None):
|
||||
def processEpisode(self, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None):
|
||||
# auto-detect correct section
|
||||
section = nzbtomedia.CFG.findsection(inputCategory)
|
||||
if not section:
|
||||
|
@ -34,15 +34,15 @@ class autoProcessComics:
|
|||
except:
|
||||
remote_path = None
|
||||
|
||||
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
||||
inputName, dirName = convert_to_ascii(inputName, dirName)
|
||||
|
||||
params = {}
|
||||
params['nzb_folder'] = dirName
|
||||
if remote_path:
|
||||
params['nzb_folder'] = joinPath(remote_path, os.path.basename(dirName))
|
||||
|
||||
if nzbName != None:
|
||||
params['nzb_name'] = nzbName
|
||||
if inputName != None:
|
||||
params['nzb_name'] = inputName
|
||||
|
||||
if ssl:
|
||||
protocol = "https://"
|
||||
|
|
|
@ -4,7 +4,7 @@ from nzbtomedia.nzbToMediaUtil import convert_to_ascii
|
|||
from nzbtomedia import logger
|
||||
|
||||
class autoProcessGames:
|
||||
def process(self, dirName, nzbName=None, status=0, clientAgent='manual', inputCategory=None):
|
||||
def process(self, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None):
|
||||
if dirName is None:
|
||||
logger.error("No directory was given!")
|
||||
return 1 # failure
|
||||
|
@ -37,11 +37,11 @@ class autoProcessGames:
|
|||
else:
|
||||
protocol = "http://"
|
||||
|
||||
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
||||
inputName, dirName = convert_to_ascii(inputName, dirName)
|
||||
|
||||
url = "%s%s:%s%s/api" % (protocol, host, port, web_root)
|
||||
|
||||
fields = nzbName.split("-")
|
||||
fields = inputName.split("-")
|
||||
|
||||
gamezID = fields[0].replace("[","").replace("]","").replace(" ","")
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ class autoProcessMovie:
|
|||
|
||||
return results
|
||||
|
||||
def process(self, dirName, nzbName=None, status=0, clientAgent="manual", download_id="", inputCategory=None):
|
||||
def process(self, dirName, inputName=None, status=0, clientAgent="manual", download_id="", inputCategory=None):
|
||||
# auto-detect correct section
|
||||
section = nzbtomedia.CFG.findsection(inputCategory)
|
||||
if not section:
|
||||
|
@ -128,7 +128,7 @@ class autoProcessMovie:
|
|||
|
||||
baseURL = "%s%s:%s%s/api/%s" % (protocol, host, port, web_root, apikey)
|
||||
|
||||
imdbid = find_imdbid(dirName, nzbName)
|
||||
imdbid = find_imdbid(dirName, inputName)
|
||||
release = self.get_release(baseURL, imdbid, download_id)
|
||||
|
||||
# pull info from release found if available
|
||||
|
@ -146,8 +146,8 @@ class autoProcessMovie:
|
|||
except:
|
||||
pass
|
||||
|
||||
process_all_exceptions(nzbName.lower(), dirName)
|
||||
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
||||
process_all_exceptions(inputName.lower(), dirName)
|
||||
inputName, dirName = convert_to_ascii(inputName, dirName)
|
||||
|
||||
if status == 0:
|
||||
if nzbtomedia.TRANSCODE == 1:
|
||||
|
@ -175,7 +175,7 @@ class autoProcessMovie:
|
|||
|
||||
logger.debug("Opening URL: %s" % (url), section)
|
||||
|
||||
logger.postprocess("Starting %s scan for %s" % (method, nzbName), section)
|
||||
logger.postprocess("Starting %s scan for %s" % (method, inputName), section)
|
||||
|
||||
try:
|
||||
r = requests.get(url, params=params)
|
||||
|
@ -195,18 +195,18 @@ class autoProcessMovie:
|
|||
if not release:
|
||||
return 0
|
||||
else:
|
||||
logger.postprocess("FAILED DOWNLOAD DETECTED FOR %s" % (nzbName), section)
|
||||
logger.postprocess("FAILED DOWNLOAD DETECTED FOR %s" % (inputName), section)
|
||||
|
||||
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
|
||||
logger.postprocess("Deleting failed files and folder %s" % dirName, section)
|
||||
rmDir(dirName)
|
||||
|
||||
if not download_id:
|
||||
logger.error("Could not find a downloaded movie in the database matching %s, exiting!" % nzbName,
|
||||
logger.error("Could not find a downloaded movie in the database matching %s, exiting!" % inputName,
|
||||
section)
|
||||
return 1 # failure
|
||||
|
||||
logger.postprocess("Setting failed release %s to ignored ..." % (nzbName), section)
|
||||
logger.postprocess("Setting failed release %s to ignored ..." % (inputName), section)
|
||||
|
||||
url = baseURL + "/release.ignore"
|
||||
logger.debug("Opening URL: %s" % (url), section)
|
||||
|
@ -219,9 +219,9 @@ class autoProcessMovie:
|
|||
|
||||
result = r.json()
|
||||
if result['success']:
|
||||
logger.postprocess("SUCCESS: %s has been set to ignored ..." % (nzbName), section)
|
||||
logger.postprocess("SUCCESS: %s has been set to ignored ..." % (inputName), section)
|
||||
else:
|
||||
logger.warning("FAILED: Unable to set %s to ignored!" % (nzbName), section)
|
||||
logger.warning("FAILED: Unable to set %s to ignored!" % (inputName), section)
|
||||
|
||||
logger.postprocess("Trying to snatch the next highest ranked release.", section)
|
||||
|
||||
|
@ -239,7 +239,7 @@ class autoProcessMovie:
|
|||
logger.postprocess("SUCCESS: Snatched the next highest release ...", section)
|
||||
return 0
|
||||
else:
|
||||
logger.postprocess("FAILED: Unable to find a higher ranked release then %s to snatch!" % (nzbName),
|
||||
logger.postprocess("FAILED: Unable to find a higher ranked release then %s to snatch!" % (inputName),
|
||||
section)
|
||||
return 1
|
||||
|
||||
|
@ -253,7 +253,7 @@ class autoProcessMovie:
|
|||
release_status_new = release[release_id]['status']
|
||||
if release_status_new != release_status_old:
|
||||
logger.postprocess("SUCCESS: Release %s has now been marked with a status of [%s]" % (
|
||||
nzbName, str(release_status_new).upper()), section)
|
||||
inputName, str(release_status_new).upper()), section)
|
||||
return 0 # success
|
||||
except:
|
||||
pass
|
||||
|
@ -263,6 +263,6 @@ class autoProcessMovie:
|
|||
|
||||
# The status hasn't changed. we have waited 2 minutes which is more than enough. uTorrent can resule seeding now.
|
||||
logger.warning(
|
||||
"%s does not appear to have changed status after %s minutes, Please check your logs." % (nzbName, wait_for),
|
||||
"%s does not appear to have changed status after %s minutes, Please check your logs." % (inputName, wait_for),
|
||||
section)
|
||||
return 1 # failure
|
|
@ -30,7 +30,7 @@ class autoProcessMusic:
|
|||
return album["Status"].lower()
|
||||
except:pass
|
||||
|
||||
def process(self, dirName, nzbName=None, status=0, clientAgent="manual", inputCategory=None):
|
||||
def process(self, dirName, inputName=None, status=0, clientAgent="manual", inputCategory=None):
|
||||
# auto-detect correct section
|
||||
section = nzbtomedia.CFG.findsection(inputCategory)
|
||||
if len(section) == 0:
|
||||
|
@ -64,7 +64,7 @@ class autoProcessMusic:
|
|||
else:
|
||||
protocol = "http://"
|
||||
|
||||
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
||||
inputName, dirName = convert_to_ascii(inputName, dirName)
|
||||
|
||||
url = "%s%s:%s%s/api" % (protocol,host,port,web_root)
|
||||
|
||||
|
@ -82,10 +82,10 @@ class autoProcessMusic:
|
|||
|
||||
if release_status:
|
||||
if release_status not in ["unprocessed", "snatched"]:
|
||||
logger.warning("%s is marked with a status of %s, skipping ..." % (nzbName, release_status),section)
|
||||
logger.warning("%s is marked with a status of %s, skipping ..." % (inputName, release_status),section)
|
||||
return 0
|
||||
else:
|
||||
logger.error("Could not find a status for %s" % (nzbName),section)
|
||||
logger.error("Could not find a status for %s" % (inputName),section)
|
||||
return 1
|
||||
|
||||
logger.debug("Opening URL: %s" % (url),section)
|
||||
|
@ -98,9 +98,9 @@ class autoProcessMusic:
|
|||
|
||||
logger.debug("Result: %s" % (r.text),section)
|
||||
if r.text == "OK":
|
||||
logger.postprocess("SUCCESS: Post-Processing started for %s in folder %s ..." % (nzbName, dirName),section)
|
||||
logger.postprocess("SUCCESS: Post-Processing started for %s in folder %s ..." % (inputName, dirName),section)
|
||||
else:
|
||||
logger.error("FAILED: Post-Processing has NOT started for %s in folder %s. exiting!" % (nzbName, dirName),section)
|
||||
logger.error("FAILED: Post-Processing has NOT started for %s in folder %s. exiting!" % (inputName, dirName),section)
|
||||
return 1 # failure
|
||||
|
||||
else:
|
||||
|
|
|
@ -10,7 +10,7 @@ from nzbtomedia import logger
|
|||
from nzbtomedia.transcoder import transcoder
|
||||
|
||||
class autoProcessTV:
|
||||
def processEpisode(self, dirName, nzbName=None, failed=False, clientAgent = "manual", inputCategory=None):
|
||||
def processEpisode(self, dirName, inputName=None, failed=False, clientAgent = "manual", inputCategory=None):
|
||||
# auto-detect correct section
|
||||
section = nzbtomedia.CFG.findsection(inputCategory)
|
||||
if not section:
|
||||
|
@ -64,7 +64,7 @@ class autoProcessTV:
|
|||
if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
|
||||
dirName = os.path.split(os.path.normpath(dirName))[0]
|
||||
|
||||
SpecificPath = joinPath(dirName, str(nzbName))
|
||||
SpecificPath = joinPath(dirName, str(inputName))
|
||||
cleanName = os.path.splitext(SpecificPath)
|
||||
if cleanName[1] == ".nzb":
|
||||
SpecificPath = cleanName[0]
|
||||
|
@ -72,9 +72,9 @@ class autoProcessTV:
|
|||
dirName = SpecificPath
|
||||
|
||||
if fork not in nzbtomedia.SICKBEARD_TORRENT or (clientAgent in ['nzbget','sabnzbd'] and nzbExtractionBy != "Destination"):
|
||||
if nzbName:
|
||||
process_all_exceptions(nzbName.lower(), dirName)
|
||||
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
||||
if inputName:
|
||||
process_all_exceptions(inputName.lower(), dirName)
|
||||
inputName, dirName = convert_to_ascii(inputName, dirName)
|
||||
|
||||
# Now check if tv files exist in destination. Eventually extraction may be done here if nzbExtractionBy == TorrentToMedia
|
||||
video = 0
|
||||
|
@ -98,8 +98,8 @@ class autoProcessTV:
|
|||
|
||||
# configure SB params to pass
|
||||
fork_params['quiet'] = 1
|
||||
if nzbName is not None:
|
||||
fork_params['nzbName'] = nzbName
|
||||
if inputName is not None:
|
||||
fork_params['inputName'] = inputName
|
||||
|
||||
for param in copy.copy(fork_params):
|
||||
if param == "failed":
|
||||
|
|
1
nzbtomedia/databases/__init__.py
Normal file
1
nzbtomedia/databases/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__all__ = ["mainDB"]
|
49
nzbtomedia/databases/mainDB.py
Normal file
49
nzbtomedia/databases/mainDB.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
import nzbtomedia
|
||||
from nzbtomedia import logger, nzbToMediaDB
|
||||
from nzbtomedia.nzbToMediaUtil import backupVersionedFile
|
||||
|
||||
MIN_DB_VERSION = 1 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 1
|
||||
|
||||
def backupDatabase(version):
|
||||
logger.info("Backing up database before upgrade")
|
||||
if not backupVersionedFile(nzbToMediaDB.dbFilename(), version):
|
||||
logger.log_error_and_exit("Database backup failed, abort upgrading database")
|
||||
else:
|
||||
logger.info("Proceeding with upgrade")
|
||||
|
||||
# ======================
|
||||
# = Main DB Migrations =
|
||||
# ======================
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
|
||||
class InitialSchema(nzbToMediaDB.SchemaUpgrade):
|
||||
def test(self):
|
||||
return self.hasTable("db_version")
|
||||
|
||||
def execute(self):
|
||||
if not self.hasTable("history") and not self.hasTable("db_version"):
|
||||
queries = [
|
||||
"CREATE TABLE db_version (db_version INTEGER);",
|
||||
"CREATE TABLE downloads (input_directory TEXT PRIMARY KEY, input_name TEXT, input_hash TEXT, input_id TEXT, client_agent TEXT, status INTEGER, last_update NUMERIC);",
|
||||
"INSERT INTO db_version (db_version) VALUES (1);"
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
|
||||
else:
|
||||
cur_db_version = self.checkDBVersion()
|
||||
|
||||
if cur_db_version < MIN_DB_VERSION:
|
||||
logger.log_error_and_exit("Your database version (" + str(
|
||||
cur_db_version) + ") is too old to migrate from what this version of nzbToMedia supports (" + \
|
||||
str(MIN_DB_VERSION) + ").\n" + \
|
||||
"Please remove nzbtomedia.db file to begin fresh."
|
||||
)
|
||||
|
||||
if cur_db_version > MAX_DB_VERSION:
|
||||
logger.log_error_and_exit("Your database version (" + str(
|
||||
cur_db_version) + ") has been incremented past what this version of nzbToMedia supports (" + \
|
||||
str(MAX_DB_VERSION) + ").\n" + \
|
||||
"If you have used other forks of nzbToMedia, your database may be unusable due to their modifications."
|
||||
)
|
|
@ -17,12 +17,14 @@ WARNING = logging.WARNING
|
|||
MESSAGE = logging.INFO
|
||||
DEBUG = logging.DEBUG
|
||||
POSTPROCESS = 21
|
||||
DB = 5
|
||||
|
||||
reverseNames = {u'ERROR': ERROR,
|
||||
u'WARNING': WARNING,
|
||||
u'INFO': MESSAGE,
|
||||
u'DEBUG': DEBUG,
|
||||
u'POSTPROCESS': POSTPROCESS}
|
||||
u'POSTPROCESS': POSTPROCESS,
|
||||
u'DB': DB}
|
||||
|
||||
class NTMRotatingLogHandler(object):
|
||||
def __init__(self, log_file, num_files, num_bytes):
|
||||
|
@ -63,8 +65,9 @@ class NTMRotatingLogHandler(object):
|
|||
if self.cur_handler:
|
||||
old_handler = self.cur_handler
|
||||
else:
|
||||
#Add a new logging level POSTPROCESS
|
||||
#Add a new logging levels
|
||||
logging.addLevelName(21, 'POSTPROCESS')
|
||||
logging.addLevelName(5, 'DB')
|
||||
|
||||
# only start consoleLogging on first initialize
|
||||
if self.console_logging:
|
||||
|
@ -77,13 +80,15 @@ class NTMRotatingLogHandler(object):
|
|||
# set a format which is simpler for console use
|
||||
console.setFormatter(DispatchingFormatter(
|
||||
{'nzbtomedia': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S')
|
||||
'postprocess': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S'),
|
||||
'db': logging.Formatter('[%(asctime)s] [%(levelname)s]::%(message)s', '%H:%M:%S')
|
||||
},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
# add the handler to the root logger
|
||||
logging.getLogger('nzbtomedia').addHandler(console)
|
||||
logging.getLogger('postprocess').addHandler(console)
|
||||
logging.getLogger('db').addHandler(console)
|
||||
|
||||
self.log_file_path = os.path.join(nzbtomedia.LOG_DIR, self.log_file)
|
||||
|
||||
|
@ -91,9 +96,11 @@ class NTMRotatingLogHandler(object):
|
|||
|
||||
logging.getLogger('nzbtomedia').addHandler(self.cur_handler)
|
||||
logging.getLogger('postprocess').addHandler(self.cur_handler)
|
||||
logging.getLogger('db').addHandler(self.cur_handler)
|
||||
|
||||
logging.getLogger('nzbtomedia').setLevel(logging.DEBUG)
|
||||
logging.getLogger('postprocess').setLevel(POSTPROCESS)
|
||||
logging.getLogger('db').setLevel(POSTPROCESS)
|
||||
|
||||
# already logging in new log folder, close the old handler
|
||||
if old_handler:
|
||||
|
@ -110,7 +117,8 @@ class NTMRotatingLogHandler(object):
|
|||
|
||||
file_handler.setFormatter(DispatchingFormatter(
|
||||
{'nzbtomedia': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
'postprocess': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||
'db': logging.Formatter('%(asctime)s %(levelname)-8s::%(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
},
|
||||
logging.Formatter('%(message)s'), ))
|
||||
|
||||
|
@ -142,6 +150,7 @@ class NTMRotatingLogHandler(object):
|
|||
|
||||
ntm_logger = logging.getLogger('nzbtomedia')
|
||||
pp_logger = logging.getLogger('postprocess')
|
||||
db_logger = logging.getLogger('db')
|
||||
|
||||
# delete the old handler
|
||||
if self.cur_handler:
|
||||
|
@ -184,7 +193,9 @@ class NTMRotatingLogHandler(object):
|
|||
|
||||
ntm_logger = logging.getLogger('nzbtomedia')
|
||||
pp_logger = logging.getLogger('postprocess')
|
||||
db_logger = logging.getLogger('db')
|
||||
setattr(pp_logger, 'postprocess', lambda *args: pp_logger.log(POSTPROCESS, *args))
|
||||
setattr(db_logger, 'db', lambda *args: db_logger.log(DB, *args))
|
||||
|
||||
try:
|
||||
if logLevel == DEBUG:
|
||||
|
@ -198,6 +209,8 @@ class NTMRotatingLogHandler(object):
|
|||
ntm_logger.error(out_line)
|
||||
elif logLevel == POSTPROCESS:
|
||||
pp_logger.postprocess(out_line)
|
||||
elif logLevel == DB:
|
||||
db_logger.db(out_line)
|
||||
else:
|
||||
ntm_logger.info(logLevel, out_line)
|
||||
except ValueError:
|
||||
|
@ -237,9 +250,12 @@ def warning(toLog, section='MAIN'):
|
|||
def debug(toLog, section='MAIN'):
|
||||
log(toLog, DEBUG, section)
|
||||
|
||||
def postprocess(toLog, section='MAIN'):
|
||||
def postprocess(toLog, section='POSTPROCESS'):
|
||||
log(toLog, POSTPROCESS, section)
|
||||
|
||||
def db(toLog, section='DB'):
|
||||
log(toLog, DB, section)
|
||||
|
||||
def log_error_and_exit(error_msg):
|
||||
ntm_log_instance.log_error_and_exit(error_msg)
|
||||
|
||||
|
|
266
nzbtomedia/nzbToMediaDB.py
Normal file
266
nzbtomedia/nzbToMediaDB.py
Normal file
|
@ -0,0 +1,266 @@
|
|||
from __future__ import with_statement
|
||||
|
||||
import re
|
||||
import sqlite3
|
||||
import time
|
||||
|
||||
import nzbtomedia
|
||||
from nzbtomedia import logger
|
||||
|
||||
def dbFilename(filename="nzbtomedia.db", suffix=None):
|
||||
"""
|
||||
@param filename: The sqlite database filename to use. If not specified,
|
||||
will be made to be nzbtomedia.db
|
||||
@param suffix: The suffix to append to the filename. A '.' will be added
|
||||
automatically, i.e. suffix='v0' will make dbfile.db.v0
|
||||
@return: the correct location of the database file.
|
||||
"""
|
||||
if suffix:
|
||||
filename = "%s.%s" % (filename, suffix)
|
||||
return nzbtomedia.joinPath(nzbtomedia.PROGRAM_DIR, filename)
|
||||
|
||||
|
||||
class DBConnection:
|
||||
def __init__(self, filename="nzbtomedia.db", suffix=None, row_type=None):
|
||||
|
||||
self.filename = filename
|
||||
self.connection = sqlite3.connect(dbFilename(filename), 20)
|
||||
if row_type == "dict":
|
||||
self.connection.row_factory = self._dict_factory
|
||||
else:
|
||||
self.connection.row_factory = sqlite3.Row
|
||||
|
||||
def checkDBVersion(self):
|
||||
result = None
|
||||
try:
|
||||
result = self.select("SELECT db_version FROM db_version")
|
||||
except sqlite3.OperationalError, e:
|
||||
if "no such table: db_version" in e.args[0]:
|
||||
return 0
|
||||
|
||||
if result:
|
||||
return int(result[0]["db_version"])
|
||||
else:
|
||||
return 0
|
||||
|
||||
def fetch(self, query, args=None):
|
||||
if query == None:
|
||||
return
|
||||
|
||||
sqlResult = None
|
||||
attempt = 0
|
||||
|
||||
while attempt < 5:
|
||||
try:
|
||||
if args == None:
|
||||
logger.log(self.filename + ": " + query, logger.DB)
|
||||
cursor = self.connection.cursor()
|
||||
cursor.execute(query)
|
||||
sqlResult = cursor.fetchone()[0]
|
||||
else:
|
||||
logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB)
|
||||
cursor = self.connection.cursor()
|
||||
cursor.execute(query, args)
|
||||
sqlResult = cursor.fetchone()[0]
|
||||
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + str(e), logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
logger.log(u"DB error: " + str(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
return sqlResult
|
||||
|
||||
def mass_action(self, querylist, logTransaction=False):
|
||||
if querylist == None:
|
||||
return
|
||||
|
||||
sqlResult = []
|
||||
attempt = 0
|
||||
|
||||
while attempt < 5:
|
||||
try:
|
||||
for qu in querylist:
|
||||
if len(qu) == 1:
|
||||
if logTransaction:
|
||||
logger.log(qu[0], logger.DEBUG)
|
||||
sqlResult.append(self.connection.execute(qu[0]))
|
||||
elif len(qu) > 1:
|
||||
if logTransaction:
|
||||
logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
|
||||
sqlResult.append(self.connection.execute(qu[0], qu[1]))
|
||||
self.connection.commit()
|
||||
logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG)
|
||||
return sqlResult
|
||||
except sqlite3.OperationalError, e:
|
||||
sqlResult = []
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + str(e), logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
logger.log(u"DB error: " + str(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
sqlResult = []
|
||||
if self.connection:
|
||||
self.connection.rollback()
|
||||
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
return sqlResult
|
||||
|
||||
def action(self, query, args=None):
|
||||
if query == None:
|
||||
return
|
||||
|
||||
sqlResult = None
|
||||
attempt = 0
|
||||
|
||||
while attempt < 5:
|
||||
try:
|
||||
if args == None:
|
||||
logger.log(self.filename + ": " + query, logger.DB)
|
||||
sqlResult = self.connection.execute(query)
|
||||
else:
|
||||
logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB)
|
||||
sqlResult = self.connection.execute(query, args)
|
||||
self.connection.commit()
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + str(e), logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
logger.log(u"DB error: " + str(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
logger.log(u"Fatal error executing query: " + str(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
return sqlResult
|
||||
|
||||
|
||||
def select(self, query, args=None):
|
||||
|
||||
sqlResults = self.action(query, args).fetchall()
|
||||
|
||||
if sqlResults == None:
|
||||
return []
|
||||
|
||||
return sqlResults
|
||||
|
||||
def upsert(self, tableName, valueDict, keyDict):
|
||||
|
||||
changesBefore = self.connection.total_changes
|
||||
|
||||
genParams = lambda myDict: [x + " = ?" for x in myDict.keys()]
|
||||
|
||||
query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(
|
||||
genParams(keyDict))
|
||||
|
||||
self.action(query, valueDict.values() + keyDict.values())
|
||||
|
||||
if self.connection.total_changes == changesBefore:
|
||||
query = "INSERT INTO " + tableName + " (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" + \
|
||||
" VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")"
|
||||
self.action(query, valueDict.values() + keyDict.values())
|
||||
|
||||
def tableInfo(self, tableName):
|
||||
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
|
||||
cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName)
|
||||
columns = {}
|
||||
for column in cursor:
|
||||
columns[column['name']] = {'type': column['type']}
|
||||
return columns
|
||||
|
||||
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
|
||||
def _dict_factory(self, cursor, row):
|
||||
d = {}
|
||||
for idx, col in enumerate(cursor.description):
|
||||
d[col[0]] = row[idx]
|
||||
return d
|
||||
|
||||
|
||||
def sanityCheckDatabase(connection, sanity_check):
|
||||
sanity_check(connection).check()
|
||||
|
||||
|
||||
class DBSanityCheck(object):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
def check(self):
|
||||
pass
|
||||
|
||||
|
||||
# ===============
|
||||
# = Upgrade API =
|
||||
# ===============
|
||||
|
||||
def upgradeDatabase(connection, schema):
|
||||
logger.log(u"Checking database structure...", logger.MESSAGE)
|
||||
_processUpgrade(connection, schema)
|
||||
|
||||
|
||||
def prettyName(class_name):
|
||||
return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
|
||||
|
||||
|
||||
def _processUpgrade(connection, upgradeClass):
|
||||
instance = upgradeClass(connection)
|
||||
logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG)
|
||||
if not instance.test():
|
||||
logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.MESSAGE)
|
||||
try:
|
||||
instance.execute()
|
||||
except sqlite3.DatabaseError, e:
|
||||
print "Error in " + str(upgradeClass.__name__) + ": " + str(e)
|
||||
raise
|
||||
logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG)
|
||||
else:
|
||||
logger.log(upgradeClass.__name__ + " upgrade not required", logger.DEBUG)
|
||||
|
||||
for upgradeSubClass in upgradeClass.__subclasses__():
|
||||
_processUpgrade(connection, upgradeSubClass)
|
||||
|
||||
|
||||
# Base migration class. All future DB changes should be subclassed from this class
|
||||
class SchemaUpgrade(object):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
def hasTable(self, tableName):
|
||||
return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, )).fetchall()) > 0
|
||||
|
||||
def hasColumn(self, tableName, column):
|
||||
return column in self.connection.tableInfo(tableName)
|
||||
|
||||
def addColumn(self, table, column, type="NUMERIC", default=0):
|
||||
self.connection.action("ALTER TABLE %s ADD %s %s" % (table, column, type))
|
||||
self.connection.action("UPDATE %s SET %s = ?" % (table, column), (default,))
|
||||
|
||||
def checkDBVersion(self):
|
||||
result = self.connection.select("SELECT db_version FROM db_version")
|
||||
if result:
|
||||
return int(result[0]["db_version"])
|
||||
else:
|
||||
return 0
|
||||
|
||||
def incDBVersion(self):
|
||||
new_version = self.checkDBVersion() + 1
|
||||
self.connection.action("UPDATE db_version SET db_version = ?", [new_version])
|
||||
return new_version
|
||||
|
|
@ -5,18 +5,18 @@ import stat
|
|||
import struct
|
||||
import shutil
|
||||
import time
|
||||
import datetime
|
||||
import nzbtomedia
|
||||
|
||||
from lib import requests
|
||||
from lib import guessit
|
||||
from nzbtomedia.extractor import extractor
|
||||
from nzbtomedia.linktastic import linktastic
|
||||
from nzbtomedia import logger
|
||||
from nzbtomedia import logger, nzbToMediaDB
|
||||
from nzbtomedia.synchronousdeluge.client import DelugeClient
|
||||
from nzbtomedia.utorrent.client import UTorrentClient
|
||||
from nzbtomedia.transmissionrpc.client import Client as TransmissionClient
|
||||
|
||||
|
||||
def sanitizeFileName(name):
|
||||
'''
|
||||
>>> sanitizeFileName('a/b/c')
|
||||
|
@ -268,12 +268,12 @@ def WakeUp():
|
|||
logger.info("System with mac: %s has been woken. Continuing with the rest of the script." % (mac))
|
||||
|
||||
|
||||
def convert_to_ascii(nzbName, dirName):
|
||||
def convert_to_ascii(inputName, dirName):
|
||||
ascii_convert = int(nzbtomedia.CFG["ASCII"]["convert"])
|
||||
if ascii_convert == 0 or os.name == 'nt': # just return if we don't want to convert or on windows os and "\" is replaced!.
|
||||
return nzbName, dirName
|
||||
return inputName, dirName
|
||||
|
||||
nzbName2 = str(nzbName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||
inputName2 = str(inputName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||
dirName2 = str(dirName.decode('ascii', 'replace').replace(u'\ufffd', '_'))
|
||||
if dirName != dirName2:
|
||||
logger.info("Renaming directory:%s to: %s." % (dirName, dirName2))
|
||||
|
@ -284,9 +284,9 @@ def convert_to_ascii(nzbName, dirName):
|
|||
if filename != filename2:
|
||||
logger.info("Renaming file:%s to: %s." % (filename, filename2))
|
||||
shutil.move(filename, filename2)
|
||||
nzbName = nzbName2
|
||||
inputName = inputName2
|
||||
dirName = dirName2
|
||||
return nzbName, dirName
|
||||
return inputName, dirName
|
||||
|
||||
|
||||
def parse_other(args):
|
||||
|
@ -616,21 +616,21 @@ def listMediaFiles(path, media=True, audio=True, meta=True, archives=True, ignor
|
|||
|
||||
return files
|
||||
|
||||
def find_imdbid(dirName, nzbName):
|
||||
def find_imdbid(dirName, inputName):
|
||||
imdbid = None
|
||||
|
||||
logger.info('Attemping imdbID lookup for %s' % (nzbName))
|
||||
logger.info('Attemping imdbID lookup for %s' % (inputName))
|
||||
|
||||
# find imdbid in dirName
|
||||
logger.info('Searching folder and file names for imdbID ...')
|
||||
m = re.search('(tt\d{7})', dirName+nzbName)
|
||||
m = re.search('(tt\d{7})', dirName+inputName)
|
||||
if m:
|
||||
imdbid = m.group(1)
|
||||
logger.info("Found imdbID [%s]" % imdbid)
|
||||
return imdbid
|
||||
|
||||
logger.info('Searching IMDB for imdbID ...')
|
||||
guess = guessit.guess_movie_info(nzbName)
|
||||
guess = guessit.guess_movie_info(inputName)
|
||||
if guess:
|
||||
# Movie Title
|
||||
title = None
|
||||
|
@ -663,7 +663,7 @@ def find_imdbid(dirName, nzbName):
|
|||
logger.info("Found imdbID [%s]" % imdbid)
|
||||
return imdbid
|
||||
|
||||
logger.warning('Unable to find a imdbID for %s' % (nzbName))
|
||||
logger.warning('Unable to find a imdbID for %s' % (inputName))
|
||||
|
||||
def extractFiles(src, dst=None):
|
||||
extracted_folder = []
|
||||
|
@ -702,5 +702,47 @@ def extractFiles(src, dst=None):
|
|||
except:
|
||||
logger.debug("Unable to remove file %s" % (inputFile))
|
||||
|
||||
def append_downloadID(dirName, download_id):
|
||||
return '%s.downloadID(%s)' % (dirName,download_id)
|
||||
def backupVersionedFile(old_file, version):
|
||||
numTries = 0
|
||||
|
||||
new_file = old_file + '.' + 'v' + str(version)
|
||||
|
||||
while not os.path.isfile(new_file):
|
||||
if not os.path.isfile(old_file):
|
||||
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
|
||||
break
|
||||
|
||||
try:
|
||||
logger.log(u"Trying to back up " + old_file + " to " + new_file, logger.DEBUG)
|
||||
shutil.copy(old_file, new_file)
|
||||
logger.log(u"Backup done", logger.DEBUG)
|
||||
break
|
||||
except Exception, e:
|
||||
logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + str(e), logger.WARNING)
|
||||
numTries += 1
|
||||
time.sleep(1)
|
||||
logger.log(u"Trying again.", logger.DEBUG)
|
||||
|
||||
if numTries >= 10:
|
||||
logger.log(u"Unable to back up " + old_file + " to " + new_file + " please do it manually.", logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def update_downloadInfoStatus(inputDirectory, status):
|
||||
logger.db("Updating status of our download in the DB to %s" % (status))
|
||||
|
||||
myDB = nzbToMediaDB.DBConnection()
|
||||
myDB.action("UPDATE downloads SET status=?, last_update=? WHERE input_directory=?",
|
||||
[status, datetime.date.today().toordinal(), inputDirectory])
|
||||
|
||||
|
||||
def get_downloadInfo(inputDirectory, status):
|
||||
logger.db("Getting download info from the DB for directory %s" % (inputDirectory))
|
||||
|
||||
myDB = nzbToMediaDB.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM downloads WHERE input_directory=? AND status=?",
|
||||
[inputDirectory, status])
|
||||
|
||||
return sqlResults
|
Loading…
Add table
Add a link
Reference in a new issue