new api call for CouchPotato

This commit is contained in:
clinton-hall 2014-03-18 16:20:59 +10:30
parent c2f10b832c
commit 0a9c8030e5
2 changed files with 62 additions and 105 deletions

View file

@ -1,4 +1,4 @@
import sys
uimport sys
import urllib
import os
import shutil
@ -44,14 +44,19 @@ def get_imdb(nzbName, dirName):
return ""
def get_movie_info(baseURL, imdbid, download_id):
if not imdbid and not download_id:
return "", None, imdbid
movie_id = ""
if not imdbid and not download_id:
return movie_id, imdbid, download_id, None, None
releaselist = []
movieid = []
moviestatus = []
library = []
release = []
movie_status = None
release_status = None
offset = int(0)
while True:
url = baseURL + "media.list/?status=active&release_status=snatched&limit_offset=50," + str(offset)
@ -66,59 +71,65 @@ def get_movie_info(baseURL, imdbid, download_id):
movieid2 = []
library2 = []
release2 = []
moviestatus2 = []
try:
result = json.load(urlObj)
movieid2 = [item["id"] for item in result["movies"]]
library2 = [item["library"]["identifier"] for item in result["movies"]]
movieid2 = [item["_id"] for item in result["movies"]]
library2 = [item["identifier"] for item in result["movies"]]
release2 = [item["releases"] for item in result["movies"]]
moviestatus2 = [item["status"] for item in result["movies"]]
except:
Logger.exception("Unable to parse json data for movies")
break
movieid.extend(movieid2)
moviestatus.extend(moviestatus2)
library.extend(library2)
release.extend(release2)
if len(movieid2) < int(50): # finished parsing list of movies. Time to break.
break
offset = offset + 50
result = None # reset
for index in range(len(movieid)):
if not imdbid:
url = baseURL + "media.get/?id=" + str(movieid[index])
Logger.debug("Opening URL: %s", url)
try:
urlObj = urllib.urlopen(url)
except:
Logger.exception("Unable to open URL")
return "", None, imdbid
try:
result = json.load(urlObj)
releaselist = [item["info"]["download_id"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
except:
Logger.exception("Unable to parse json data for releases")
return "", None, imdbid
releaselist1 = [item for item in release[index] if item["status"] == "snatched" and "download_info" in item]
if download_id:
releaselist = [item for item in releaselist1 if item["download_info"]["id"].lower() == download_id.lower()]
else:
releaselist = releaselist1
if len(releaselist) > 0:
movie_id = str(movieid[index])
imdbid = str(library[index])
Logger.info("Found movie id %s and imdb %s in database via download_id %s", movie_id, imdbid, download_id)
break
else:
continue
if library[index] == imdbid:
if imdbid and library[index] == imdbid:
movie_id = str(movieid[index])
Logger.info("Found movie id %s in CPS database for movie %s", movie_id, imdbid)
break
movie_status = str(moviestatus[index])
Logger.info("Found movie id %s with status %s in CPS database for movie %s", movie_id, movie_status, imdbid)
if not download_id and len(releaselist) == 1:
download_id = releaselist[0]["download_info"]["id"]
elif not imdbid and download_id and len(releaselist) > 0:
movie_id = str(movieid[index])
movie_status = str(moviestatus[index])
imdbid = str(library[index])
Logger.info("Found movie id %s and imdb %s with status %s in CPS database via download_id %s", movie_id, imdbid, movie_status, download_id)
else:
continue
if len(releaselist) == 1:
Logger.debug("Found a single release with download_id: %s. Release status is: %s", download_id, release_status)
release_status = releaselist[0]["status"]
break
if not movie_id:
Logger.exception("Could not parse database results to determine imdbid or movie id")
return movie_id, result, imdbid
return movie_id, imdbid, download_id, movie_status, release_status
def get_status(baseURL, movie_id, clientAgent, download_id, result=None):
def get_status(baseURL, movie_id, download_id):
if not movie_id:
return "", clientAgent, "none", "none"
return None, None
Logger.debug("Looking for status of movie: %s - with release sent to clientAgent: %s and download_id: %s", movie_id, clientAgent, download_id)
if not result: # we haven't already called media.get
@ -129,71 +140,18 @@ def get_status(baseURL, movie_id, clientAgent, download_id, result=None):
urlObj = urllib.urlopen(url)
except:
Logger.exception("Unable to open URL")
return "", clientAgent, "none", "none"
result = json.load(urlObj)
return None, None
try:
movie_status = result["media"]["status"]["identifier"]
result = json.load(urlObj)
movie_status = str(result["media"]["status"])
release_status = None # for now... keep this as a place holder.
Logger.debug("This movie is marked as status %s in CouchPotatoServer", movie_status)
except: # index out of range/doesn't exist?
Logger.exception("Could not find a status for this movie")
movie_status = ""
try:
release_status = "none"
if download_id != "" and download_id != "none": # we have the download id from the downloader. Let's see if it's valid.
release_statuslist = [item["status"]["identifier"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
clientAgentlist = [item["info"]["download_downloader"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower()]
if len(release_statuslist) == 1: # we have found a release by this id. :)
release_status = release_statuslist[0]
clientAgent = clientAgentlist[0]
Logger.debug("Found a single release with download_id: %s for clientAgent: %s. Release status is: %s", download_id, clientAgent, release_status)
return movie_status, clientAgent, download_id, release_status
elif len(release_statuslist) > 1: # we have found many releases by this id. Check for snatched status
clients = [item for item in clientAgentlist if item.lower() == clientAgent.lower()]
clientAgent = clients[0]
if len(clients) == 1: # ok.. a unique entry for download_id and clientAgent ;)
release_status = [item["status"]["identifier"] for item in result["media"]["releases"] if "download_id" in item["info"] and item["info"]["download_id"].lower() == download_id.lower() and item["info"]["download_downloader"] == clientAgent][0]
Logger.debug("Found a single release for download_id: %s and clientAgent: %s. Release status is: %s", download_id, clientAgent, release_status)
else: # doesn't matter. only really used as secondary confirmation of movie status change. Let's continue.
Logger.debug("Found several releases for download_id: %s and clientAgent: %s. Cannot determine the release status", download_id, clientAgent)
return movie_status, clientAgent, download_id, release_status
else: # clearly the id we were passed doesn't match the database. Reset it and search all snatched releases.... hence the next if (not elif ;) )
download_id = ""
if download_id == "none": # if we couldn't find this initially, there is no need to check next time around.
return movie_status, clientAgent, download_id, release_status
elif download_id == "": # in case we didn't get this from the downloader.
download_idlist = [item["info"]["download_id"] for item in result["media"]["releases"] if item["status"]["identifier"] == "snatched"]
clientAgentlist = [item["info"]["download_downloader"] for item in result["media"]["releases"] if item["status"]["identifier"] == "snatched"]
if len(clientAgentlist) == 1:
if clientAgent == "manual":
clientAgent = clientAgentlist[0]
download_id = download_idlist[0]
release_status = "snatched"
elif clientAgent.lower() == clientAgentlist[0].lower():
download_id = download_idlist[0]
clientAgent = clientAgentlist[0]
release_status = "snatched"
Logger.debug("Found a single download_id: %s and clientAgent: %s. Release status is: %s", download_id, clientAgent, release_status)
elif clientAgent == "manual":
download_id = "none"
release_status = "none"
else:
index = [index for index in range(len(clientAgentlist)) if clientAgentlist[index].lower() == clientAgent.lower()]
if len(index) == 1:
download_id = download_idlist[index[0]]
clientAgent = clientAgentlist[index[0]]
release_status = "snatched"
Logger.debug("Found download_id: %s for clientAgent: %s. Release status is: %s", download_id, clientAgent, release_status)
else:
Logger.info("Found a total of %s releases snatched for clientAgent: %s. Cannot determine download_id. Will perform a renamenr scan to try and process.", len(index), clientAgent)
download_id = "none"
release_status = "none"
else: #something went wrong here.... we should never get to this.
Logger.info("Could not find a download_id in the database for this movie")
release_status = "none"
except: # index out of range/doesn't exist?
Logger.exception("Could not find a download_id for this movie")
download_id = "none"
return movie_status, clientAgent, download_id, release_status
movie_status = None
release_status = None
return movie_status, release_status
def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id = "", inputCategory=None):
@ -254,9 +212,7 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
baseURL = protocol + host + ":" + port + web_root + "/api/" + apikey + "/"
movie_id, result, imdbid = get_movie_info(baseURL, imdbid, download_id) # get the CPS database movie id for this movie.
initial_status, clientAgent, download_id, initial_release_status = get_status(baseURL, movie_id, clientAgent, download_id, result)
movie_id, imdbid, download_id, initial_status, initial_release_status = get_movie_info(baseURL, imdbid, download_id) # get the CPS database movie id for this movie.
process_all_exceptions(nzbName.lower(), dirName)
nzbName, dirName = converto_to_ascii(nzbName, dirName)
@ -276,7 +232,7 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
command = "manage.update"
else:
command = "renamer.scan"
if clientAgent != "manual" and download_id != "none":
if clientAgent != "manual" and download_id != None:
if remoteCPS == 1:
command = command + "/?downloader=" + clientAgent + "&download_id=" + download_id
else:
@ -337,10 +293,8 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
Logger.exception("Unable to delete folder %s", dirName)
return 0 # success
if nzbName == "Manual Run":
if nzbName == "Manual Run" or download_id == "none":
return 0 # success
if download_id == "none":
return 1 # just to be sure TorrentToMedia doesn't start deleting files as we havent verified changed status.
# we will now check to see if CPS has finished renaming before returning to TorrentToMedia and unpausing.
socket.setdefaulttimeout(int(TimeOut)) #initialize socket timeout.
@ -348,13 +302,13 @@ def process(dirName, nzbName=None, status=0, clientAgent = "manual", download_id
start = datetime.datetime.now() # set time for timeout
pause_for = int(wait_for) * 10 # keep this so we only ever have 6 complete loops. This may not be necessary now?
while (datetime.datetime.now() - start) < datetime.timedelta(minutes=wait_for): # only wait 2 (default) minutes, then return.
movie_status, clientAgent, download_id, release_status = get_status(baseURL, movie_id, clientAgent, download_id) # get the current status fo this movie.
if movie_status != initial_status: # Something has changed. CPS must have processed this movie.
movie_status, release_status = get_status(baseURL, movie_id, download_id) # get the current status fo this movie.
if movie_status and initial_status and movie_status != initial_status: # Something has changed. CPS must have processed this movie.
Logger.info("SUCCESS: This movie is now marked as status %s in CouchPotatoServer", movie_status)
return 0 # success
time.sleep(pause_for) # Just stop this looping infinitely and hogging resources for 2 minutes ;)
else:
if release_status != initial_release_status and release_status != "none": # Something has changed. CPS must have processed this movie.
if release_status and initial_release_status and release_status != initial_release_status: # Something has changed. CPS must have processed this movie.
Logger.info("SUCCESS: This release is now marked as status %s in CouchPotatoServer", release_status)
return 0 # success
else: # The status hasn't changed. we have waited 2 minutes which is more than enough. uTorrent can resule seeding now.