mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 13:53:15 -07:00
Merge branch 'nightly' into dev
This commit is contained in:
commit
9ac74615af
14 changed files with 1185 additions and 29 deletions
|
@ -51,7 +51,7 @@ from core.databases import mainDB
|
|||
|
||||
# Client Agents
|
||||
NZB_CLIENTS = ['sabnzbd', 'nzbget', 'manual']
|
||||
TORRENT_CLIENTS = ['transmission', 'deluge', 'utorrent', 'rtorrent', 'other', 'manual']
|
||||
TORRENT_CLIENTS = ['transmission', 'deluge', 'utorrent', 'rtorrent', 'qbittorrent', 'other', 'manual']
|
||||
|
||||
# sabnzbd constants
|
||||
SABNZB_NO_OF_ARGUMENTS = 8
|
||||
|
@ -62,11 +62,11 @@ FORKS = {}
|
|||
FORK_DEFAULT = "default"
|
||||
FORK_FAILED = "failed"
|
||||
FORK_FAILED_TORRENT = "failed-torrent"
|
||||
FORK_SICKRAGETV = "sickragetv"
|
||||
FORK_SICKRAGE = "sickrage"
|
||||
FORK_SICKRAGE_API = "sickrage-api"
|
||||
FORK_MEDUSA = "medusa"
|
||||
FORK_SICKGEAR = "sickgear"
|
||||
FORK_SICKRAGETV = "SickRageTV"
|
||||
FORK_SICKRAGE = "SickRage"
|
||||
FORK_SICKRAGE_API = "SiCKRAGE-api"
|
||||
FORK_MEDUSA = "Medusa"
|
||||
FORK_SICKGEAR = "SickGear"
|
||||
FORKS[FORK_DEFAULT] = {"dir": None}
|
||||
FORKS[FORK_FAILED] = {"dirName": None, "failed": None}
|
||||
FORKS[FORK_FAILED_TORRENT] = {"dir": None, "failed": None, "process_method": None}
|
||||
|
@ -137,6 +137,11 @@ DELUGEPORT = None
|
|||
DELUGEUSR = None
|
||||
DELUGEPWD = None
|
||||
|
||||
QBITTORRENTHOST = None
|
||||
QBITTORRENTPORT = None
|
||||
QBITTORRENTUSR = None
|
||||
QBITTORRENTPWD = None
|
||||
|
||||
PLEXSSL = None
|
||||
PLEXHOST = None
|
||||
PLEXPORT = None
|
||||
|
@ -236,7 +241,7 @@ def initialize(section=None):
|
|||
DELETE_ORIGINAL, TORRENT_CHMOD_DIRECTORY, PASSWORDSFILE, USER_DELAY, USER_SCRIPT, USER_SCRIPT_CLEAN, USER_SCRIPT_MEDIAEXTENSIONS, \
|
||||
USER_SCRIPT_PARAM, USER_SCRIPT_RUNONCE, USER_SCRIPT_SUCCESSCODES, DOWNLOADINFO, CHECK_MEDIA, SAFE_MODE, \
|
||||
TORRENT_DEFAULTDIR, TORRENT_RESUME_ON_FAILURE, NZB_DEFAULTDIR, REMOTEPATHS, LOG_ENV, PID_FILE, MYAPP, ACHANNELS, ACHANNELS2, ACHANNELS3, \
|
||||
PLEXSSL, PLEXHOST, PLEXPORT, PLEXTOKEN, PLEXSEC, TORRENT_RESUME, PAR2CMD
|
||||
PLEXSSL, PLEXHOST, PLEXPORT, PLEXTOKEN, PLEXSEC, TORRENT_RESUME, PAR2CMD, QBITTORRENTHOST, QBITTORRENTPORT, QBITTORRENTUSR, QBITTORRENTPWD
|
||||
|
||||
if __INITIALIZED__:
|
||||
return False
|
||||
|
@ -359,7 +364,7 @@ def initialize(section=None):
|
|||
if GROUPS == ['']:
|
||||
GROUPS = None
|
||||
|
||||
TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"] # utorrent | deluge | transmission | rtorrent | vuze |other
|
||||
TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"] # utorrent | deluge | transmission | rtorrent | vuze | qbittorrent |other
|
||||
USELINK = CFG["Torrent"]["useLink"] # no | hard | sym
|
||||
OUTPUTDIRECTORY = CFG["Torrent"]["outputDirectory"] # /abs/path/to/complete/
|
||||
TORRENT_DEFAULTDIR = CFG["Torrent"]["default_downloadDirectory"]
|
||||
|
@ -387,6 +392,11 @@ def initialize(section=None):
|
|||
DELUGEUSR = CFG["Torrent"]["DelugeUSR"] # mysecretusr
|
||||
DELUGEPWD = CFG["Torrent"]["DelugePWD"] # mysecretpwr
|
||||
|
||||
QBITTORRENTHOST = CFG["Torrent"]["qBittorrenHost"] # localhost
|
||||
QBITTORRENTPORT = int(CFG["Torrent"]["qBittorrentPort"]) # 8080
|
||||
QBITTORRENTUSR = CFG["Torrent"]["qBittorrentUSR"] # mysecretusr
|
||||
QBITTORRENTPWD = CFG["Torrent"]["qBittorrentPWD"] # mysecretpwr
|
||||
|
||||
REMOTEPATHS = CFG["Network"]["mount_points"] or []
|
||||
if REMOTEPATHS:
|
||||
if isinstance(REMOTEPATHS, list):
|
||||
|
|
|
@ -75,7 +75,7 @@ class autoProcessMovie(object):
|
|||
if release['status'] not in ['snatched', 'downloaded', 'done']:
|
||||
continue
|
||||
if download_id:
|
||||
if download_id != release['download_info']['id']:
|
||||
if download_id.lower() != release['download_info']['id'].lower():
|
||||
continue
|
||||
|
||||
id = release['_id']
|
||||
|
@ -185,7 +185,7 @@ class autoProcessMovie(object):
|
|||
media_id = None
|
||||
downloader = None
|
||||
release_status_old = None
|
||||
if release and imdbid:
|
||||
if release:
|
||||
try:
|
||||
release_id = release.keys()[0]
|
||||
media_id = release[release_id]['media_id']
|
||||
|
|
|
@ -4,6 +4,7 @@ import os
|
|||
import time
|
||||
import requests
|
||||
import core
|
||||
import json
|
||||
|
||||
from core.nzbToMediaUtil import convert_to_ascii, remoteDir, listMediaFiles, server_responding
|
||||
from core.nzbToMediaSceneExceptions import process_all_exceptions
|
||||
|
@ -13,6 +14,39 @@ requests.packages.urllib3.disable_warnings()
|
|||
|
||||
|
||||
class autoProcessMusic(object):
|
||||
def command_complete(self, url, params, headers, section):
|
||||
try:
|
||||
r = requests.get(url, params=params, headers=headers, stream=True, verify=False, timeout=(30, 60))
|
||||
except requests.ConnectionError:
|
||||
logger.error("Unable to open URL: {0}".format(url), section)
|
||||
return None
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
logger.error("Server returned status {0}".format(r.status_code), section)
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return r.json()['state']
|
||||
except (ValueError, KeyError):
|
||||
# ValueError catches simplejson's JSONDecodeError and json's ValueError
|
||||
logger.error("{0} did not return expected json data.".format(section), section)
|
||||
return None
|
||||
|
||||
def CDH(self, url2, headers, section="MAIN"):
|
||||
try:
|
||||
r = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
|
||||
except requests.ConnectionError:
|
||||
logger.error("Unable to open URL: {0}".format(url2), section)
|
||||
return False
|
||||
if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
logger.error("Server returned status {0}".format(r.status_code), section)
|
||||
return False
|
||||
else:
|
||||
try:
|
||||
return r.json().get("enableCompletedDownloadHandling", False)
|
||||
except ValueError:
|
||||
# ValueError catches simplejson's JSONDecodeError and json's ValueError
|
||||
return False
|
||||
|
||||
def get_status(self, url, apikey, dirName):
|
||||
logger.debug("Attempting to get current status for release:{0}".format(os.path.basename(dirName)))
|
||||
|
||||
|
@ -96,7 +130,10 @@ class autoProcessMusic(object):
|
|||
else:
|
||||
extract = int(cfg.get("extract", 0))
|
||||
|
||||
url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root)
|
||||
if section == "Lidarr":
|
||||
url = "{0}{1}:{2}{3}/api/v1".format(protocol, host, port, web_root)
|
||||
else:
|
||||
url = "{0}{1}:{2}{3}/api".format(protocol, host, port, web_root)
|
||||
if not server_responding(url):
|
||||
logger.error("Server did not respond. Exiting", section)
|
||||
return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]
|
||||
|
@ -119,11 +156,11 @@ class autoProcessMusic(object):
|
|||
core.extractFiles(dirName)
|
||||
inputName, dirName = convert_to_ascii(inputName, dirName)
|
||||
|
||||
if listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and status:
|
||||
logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section)
|
||||
status = 0
|
||||
#if listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and status:
|
||||
# logger.info("Status shown as failed from Downloader, but valid video files found. Setting as successful.", section)
|
||||
# status = 0
|
||||
|
||||
if status == 0:
|
||||
if status == 0 and section == "HeadPhones":
|
||||
|
||||
params = {
|
||||
'apikey': apikey,
|
||||
|
@ -149,6 +186,74 @@ class autoProcessMusic(object):
|
|||
logger.warning("The music album does not appear to have changed status after {0} minutes. Please check your Logs".format(wait_for), section)
|
||||
return [1, "{0}: Failed to post-process - No change in wanted status".format(section)]
|
||||
|
||||
elif status == 0 and section == "Lidarr":
|
||||
url = "{0}{1}:{2}{3}/api/v1/command".format(protocol, host, port, web_root)
|
||||
url2 = "{0}{1}:{2}{3}/api/v1/config/downloadClient".format(protocol, host, port, web_root)
|
||||
headers = {"X-Api-Key": apikey}
|
||||
if remote_path:
|
||||
logger.debug("remote_path: {0}".format(remoteDir(dirName)), section)
|
||||
data = {"name": "DownloadedAlbumScan", "path": remoteDir(dirName), "downloadClientId": download_id, "importMode": "Move"}
|
||||
else:
|
||||
logger.debug("path: {0}".format(dirName), section)
|
||||
data = {"name": "DownloadedAlbumScan", "path": dirName, "downloadClientId": download_id, "importMode": "Move"}
|
||||
if not download_id:
|
||||
data.pop("downloadClientId")
|
||||
data = json.dumps(data)
|
||||
try:
|
||||
logger.debug("Opening URL: {0} with data: {1}".format(url, data), section)
|
||||
r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
|
||||
except requests.ConnectionError:
|
||||
logger.error("Unable to open URL: {0}".format(url), section)
|
||||
return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
|
||||
|
||||
Success = False
|
||||
Queued = False
|
||||
Started = False
|
||||
try:
|
||||
res = json.loads(r.content)
|
||||
scan_id = int(res['id'])
|
||||
logger.debug("Scan started with id: {0}".format(scan_id), section)
|
||||
Started = True
|
||||
except Exception as e:
|
||||
logger.warning("No scan id was returned due to: {0}".format(e), section)
|
||||
scan_id = None
|
||||
Started = False
|
||||
return [1, "{0}: Failed to post-process - Unable to start scan".format(section)]
|
||||
|
||||
n = 0
|
||||
params = {}
|
||||
url = "{0}/{1}".format(url, scan_id)
|
||||
while n < 6: # set up wait_for minutes to see if command completes..
|
||||
time.sleep(10 * wait_for)
|
||||
command_status = self.command_complete(url, params, headers, section)
|
||||
if command_status and command_status in ['completed', 'failed']:
|
||||
break
|
||||
n += 1
|
||||
if command_status:
|
||||
logger.debug("The Scan command return status: {0}".format(command_status), section)
|
||||
if not os.path.exists(dirName):
|
||||
logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section)
|
||||
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
|
||||
elif command_status and command_status in ['completed']:
|
||||
logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
|
||||
return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
|
||||
elif command_status and command_status in ['failed']:
|
||||
logger.debug("The Scan command has failed. Renaming was not successful.", section)
|
||||
# return [1, "%s: Failed to post-process %s" % (section, inputName) ]
|
||||
if self.CDH(url2, headers, section=section):
|
||||
logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
|
||||
return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
|
||||
else:
|
||||
logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section)
|
||||
return [1, "{0}: Failed to post-process {1}".format(section, inputName)]
|
||||
|
||||
else:
|
||||
logger.warning("FAILED DOWNLOAD DETECTED", section)
|
||||
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)]
|
||||
if section == "Lidarr":
|
||||
logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(section), section)
|
||||
return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)] # Return as failed to flag this in the downloader.
|
||||
else:
|
||||
logger.warning("FAILED DOWNLOAD DETECTED", section)
|
||||
if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
|
||||
logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
|
||||
rmDir(dirName)
|
||||
return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)] # Return as failed to flag this in the downloader.
|
|
@ -285,6 +285,9 @@ class autoProcessTV(object):
|
|||
if not apikey and username and password:
|
||||
login = "{0}{1}:{2}{3}/login".format(protocol, host, port, web_root)
|
||||
login_params = {'username': username, 'password': password}
|
||||
r = s.get(login, verify=False, timeout=(30,60))
|
||||
if r.status_code == 401 and r.cookies.get('_xsrf'):
|
||||
login_params['_xsrf'] = r.cookies.get('_xsrf')
|
||||
s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
|
||||
r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800))
|
||||
elif section == "NzbDrone":
|
||||
|
|
|
@ -21,8 +21,10 @@ def autoFork(section, inputCategory):
|
|||
apikey = cfg.get("apikey")
|
||||
ssl = int(cfg.get("ssl", 0))
|
||||
web_root = cfg.get("web_root", "")
|
||||
replace = {'sickrage':'SickRage', 'sickragetv':'SickRageTV', 'sickgear':'SickGear', 'medusa':'Medusa'}
|
||||
f1 = replace[cfg.get("fork", "auto")] if cfg.get("fork", "auto") in replace else cfg.get("fork", "auto")
|
||||
try:
|
||||
fork = core.FORKS.items()[core.FORKS.keys().index(cfg.get("fork", "auto"))]
|
||||
fork = core.FORKS.items()[core.FORKS.keys().index(f1)]
|
||||
except:
|
||||
fork = "auto"
|
||||
protocol = "https://" if ssl else "http://"
|
||||
|
@ -67,6 +69,9 @@ def autoFork(section, inputCategory):
|
|||
login = "{protocol}{host}:{port}{root}/login".format(
|
||||
protocol=protocol, host=host, port=port, root=web_root)
|
||||
login_params = {'username': username, 'password': password}
|
||||
r = s.get(login, verify=False, timeout=(30,60))
|
||||
if r.status_code == 401 and r.cookies.get('_xsrf'):
|
||||
login_params['_xsrf'] = r.cookies.get('_xsrf')
|
||||
s.post(login, data=login_params, stream=True, verify=False)
|
||||
r = s.get(url, auth=(username, password), verify=False)
|
||||
except requests.ConnectionError:
|
||||
|
|
|
@ -263,6 +263,11 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
logger.warning("{x} category is set for CouchPotato and Radarr. "
|
||||
"Please check your config in NZBGet".format
|
||||
(x=os.environ['NZBPO_RACATEGORY']))
|
||||
if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']:
|
||||
logger.warning("{x} category is set for HeadPhones and Lidarr. "
|
||||
"Please check your config in NZBGet".format
|
||||
(x=os.environ['NZBPO_LICATEGORY']))
|
||||
section = "Nzb"
|
||||
key = 'NZBOP_DESTDIR'
|
||||
if key in os.environ:
|
||||
|
@ -413,6 +418,25 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if os.environ[envCatKey] in CFG_NEW['CouchPotato'].sections:
|
||||
CFG_NEW['CouchPotato'][envCatKey]['enabled'] = 0
|
||||
|
||||
section = "Lidarr"
|
||||
envCatKey = 'NZBPO_LICATEGORY'
|
||||
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
|
||||
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
|
||||
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
|
||||
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
|
||||
if envCatKey in os.environ:
|
||||
for index in range(len(envKeys)):
|
||||
key = 'NZBPO_RA{index}'.format(index=envKeys[index])
|
||||
if key in os.environ:
|
||||
option = cfgKeys[index]
|
||||
value = os.environ[key]
|
||||
if os.environ[envCatKey] not in CFG_NEW[section].sections:
|
||||
CFG_NEW[section][os.environ[envCatKey]] = {}
|
||||
CFG_NEW[section][os.environ[envCatKey]][option] = value
|
||||
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
|
||||
if os.environ[envCatKey] in CFG_NEW['CouchPotato'].sections:
|
||||
CFG_NEW['CouchPotato'][envCatKey]['enabled'] = 0
|
||||
|
||||
section = "Extensions"
|
||||
envKeys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
|
||||
cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
|
||||
|
|
|
@ -23,6 +23,7 @@ from core.linktastic import linktastic
|
|||
from core.synchronousdeluge.client import DelugeClient
|
||||
from core.utorrent.client import UTorrentClient
|
||||
from core.transmissionrpc.client import Client as TransmissionClient
|
||||
from core.qbittorrent.client import Client as qBittorrentClient
|
||||
from core import logger, nzbToMediaDB
|
||||
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
@ -588,6 +589,34 @@ def parse_vuze(args):
|
|||
|
||||
return inputDirectory, inputName, inputCategory, inputHash, inputID
|
||||
|
||||
def parse_qbittorrent(args):
|
||||
# qbittorrent usage: C:\full\path\to\nzbToMedia\TorrentToMedia.py "%D|%N|%L|%I"
|
||||
try:
|
||||
input = args[1].split('|')
|
||||
except:
|
||||
input = []
|
||||
try:
|
||||
inputDirectory = os.path.normpath(input[0].replace('"',''))
|
||||
except:
|
||||
inputDirectory = ''
|
||||
try:
|
||||
inputName = input[1].replace('"','')
|
||||
except:
|
||||
inputName = ''
|
||||
try:
|
||||
inputCategory = input[2].replace('"','')
|
||||
except:
|
||||
inputCategory = ''
|
||||
try:
|
||||
inputHash = input[3].replace('"','')
|
||||
except:
|
||||
inputHash = ''
|
||||
try:
|
||||
inputID = input[3].replace('"','')
|
||||
except:
|
||||
inputID = ''
|
||||
|
||||
return inputDirectory, inputName, inputCategory, inputHash, inputID
|
||||
|
||||
def parse_args(clientAgent, args):
|
||||
clients = {
|
||||
|
@ -596,6 +625,7 @@ def parse_args(clientAgent, args):
|
|||
'utorrent': parse_utorrent,
|
||||
'deluge': parse_deluge,
|
||||
'transmission': parse_transmission,
|
||||
'qbittorrent': parse_qbittorrent,
|
||||
'vuze': parse_vuze,
|
||||
}
|
||||
|
||||
|
@ -796,6 +826,14 @@ def create_torrent_class(clientAgent):
|
|||
except:
|
||||
logger.error("Failed to connect to Deluge")
|
||||
|
||||
if clientAgent == 'qbittorrent':
|
||||
try:
|
||||
logger.debug("Connecting to {0}: http://{1}:{2}".format(clientAgent, core.QBITTORRENTHOST, core.QBITTORRENTPORT))
|
||||
tc = qBittorrentClient("http://{0}:{1}/".format(core.QBITTORRENTHOST, core.QBITTORRENTPORT))
|
||||
tc.login(core.QBITTORRENTUSR, core.QBITTORRENTPWD)
|
||||
except:
|
||||
logger.error("Failed to connect to qBittorrent")
|
||||
|
||||
return tc
|
||||
|
||||
|
||||
|
@ -808,6 +846,8 @@ def pause_torrent(clientAgent, inputHash, inputID, inputName):
|
|||
core.TORRENT_CLASS.stop_torrent(inputID)
|
||||
if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
|
||||
core.TORRENT_CLASS.core.pause_torrent([inputID])
|
||||
if clientAgent == 'qbittorrent' and core.TORRENT_CLASS != "":
|
||||
core.TORRENT_CLASS.pause(inputHash)
|
||||
time.sleep(5)
|
||||
except:
|
||||
logger.warning("Failed to stop torrent {0} in {1}".format(inputName, clientAgent))
|
||||
|
@ -824,6 +864,8 @@ def resume_torrent(clientAgent, inputHash, inputID, inputName):
|
|||
core.TORRENT_CLASS.start_torrent(inputID)
|
||||
if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
|
||||
core.TORRENT_CLASS.core.resume_torrent([inputID])
|
||||
if clientAgent == 'qbittorrent' and core.TORRENT_CLASS != "":
|
||||
core.TORRENT_CLASS.resume(inputHash)
|
||||
time.sleep(5)
|
||||
except:
|
||||
logger.warning("Failed to start torrent {0} in {1}".format(inputName, clientAgent))
|
||||
|
@ -840,6 +882,8 @@ def remove_torrent(clientAgent, inputHash, inputID, inputName):
|
|||
core.TORRENT_CLASS.remove_torrent(inputID, True)
|
||||
if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
|
||||
core.TORRENT_CLASS.core.remove_torrent(inputID, True)
|
||||
if clientAgent == 'qbittorrent' and core.TORRENT_CLASS != "":
|
||||
core.TORRENT_CLASS.delete(inputHash)
|
||||
time.sleep(5)
|
||||
except:
|
||||
logger.warning("Failed to delete torrent {0} in {1}".format(inputName, clientAgent))
|
||||
|
@ -862,6 +906,11 @@ def find_download(clientAgent, download_id):
|
|||
return True
|
||||
if clientAgent == 'deluge':
|
||||
return False
|
||||
if clientAgent == 'qbittorrent':
|
||||
torrents = core.TORRENT_CLASS.torrents()
|
||||
for torrent in torrents:
|
||||
if torrent['hash'] == download_id:
|
||||
return True
|
||||
if clientAgent == 'sabnzbd':
|
||||
if "http" in core.SABNZBDHOST:
|
||||
baseURL = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
|
||||
|
|
1
core/qbittorrent/__init__.py
Normal file
1
core/qbittorrent/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# coding=utf-8
|
633
core/qbittorrent/client.py
Normal file
633
core/qbittorrent/client.py
Normal file
|
@ -0,0 +1,633 @@
|
|||
import requests
|
||||
import json
|
||||
|
||||
|
||||
class LoginRequired(Exception):
|
||||
def __str__(self):
|
||||
return 'Please login first.'
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""class to interact with qBittorrent WEB API"""
|
||||
def __init__(self, url):
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
self.url = url
|
||||
|
||||
session = requests.Session()
|
||||
check_prefs = session.get(url+'query/preferences')
|
||||
|
||||
if check_prefs.status_code == 200:
|
||||
self._is_authenticated = True
|
||||
self.session = session
|
||||
|
||||
elif check_prefs.status_code == 404:
|
||||
self._is_authenticated = False
|
||||
raise RuntimeError("""
|
||||
This wrapper only supports qBittorrent applications
|
||||
with version higher than 3.1.x.
|
||||
Please use the latest qBittorrent release.
|
||||
""")
|
||||
|
||||
else:
|
||||
self._is_authenticated = False
|
||||
|
||||
def _get(self, endpoint, **kwargs):
|
||||
"""
|
||||
Method to perform GET request on the API.
|
||||
|
||||
:param endpoint: Endpoint of the API.
|
||||
:param kwargs: Other keyword arguments for requests.
|
||||
|
||||
:return: Response of the GET request.
|
||||
"""
|
||||
return self._request(endpoint, 'get', **kwargs)
|
||||
|
||||
def _post(self, endpoint, data, **kwargs):
|
||||
"""
|
||||
Method to perform POST request on the API.
|
||||
|
||||
:param endpoint: Endpoint of the API.
|
||||
:param data: POST DATA for the request.
|
||||
:param kwargs: Other keyword arguments for requests.
|
||||
|
||||
:return: Response of the POST request.
|
||||
"""
|
||||
return self._request(endpoint, 'post', data, **kwargs)
|
||||
|
||||
def _request(self, endpoint, method, data=None, **kwargs):
|
||||
"""
|
||||
Method to hanle both GET and POST requests.
|
||||
|
||||
:param endpoint: Endpoint of the API.
|
||||
:param method: Method of HTTP request.
|
||||
:param data: POST DATA for the request.
|
||||
:param kwargs: Other keyword arguments.
|
||||
|
||||
:return: Response for the request.
|
||||
"""
|
||||
final_url = self.url + endpoint
|
||||
|
||||
if not self._is_authenticated:
|
||||
raise LoginRequired
|
||||
|
||||
rq = self.session
|
||||
if method == 'get':
|
||||
request = rq.get(final_url, **kwargs)
|
||||
else:
|
||||
request = rq.post(final_url, data, **kwargs)
|
||||
|
||||
request.raise_for_status()
|
||||
request.encoding = 'utf_8'
|
||||
|
||||
if len(request.text) == 0:
|
||||
data = json.loads('{}')
|
||||
else:
|
||||
try:
|
||||
data = json.loads(request.text)
|
||||
except ValueError:
|
||||
data = request.text
|
||||
|
||||
return data
|
||||
|
||||
def login(self, username='admin', password='admin'):
|
||||
"""
|
||||
Method to authenticate the qBittorrent Client.
|
||||
|
||||
Declares a class attribute named ``session`` which
|
||||
stores the authenticated session if the login is correct.
|
||||
Else, shows the login error.
|
||||
|
||||
:param username: Username.
|
||||
:param password: Password.
|
||||
|
||||
:return: Response to login request to the API.
|
||||
"""
|
||||
self.session = requests.Session()
|
||||
login = self.session.post(self.url+'login',
|
||||
data={'username': username,
|
||||
'password': password})
|
||||
if login.text == 'Ok.':
|
||||
self._is_authenticated = True
|
||||
else:
|
||||
return login.text
|
||||
|
||||
def logout(self):
|
||||
"""
|
||||
Logout the current session.
|
||||
"""
|
||||
response = self._get('logout')
|
||||
self._is_authenticated = False
|
||||
return response
|
||||
|
||||
@property
|
||||
def qbittorrent_version(self):
|
||||
"""
|
||||
Get qBittorrent version.
|
||||
"""
|
||||
return self._get('version/qbittorrent')
|
||||
|
||||
@property
|
||||
def api_version(self):
|
||||
"""
|
||||
Get WEB API version.
|
||||
"""
|
||||
return self._get('version/api')
|
||||
|
||||
@property
|
||||
def api_min_version(self):
|
||||
"""
|
||||
Get minimum WEB API version.
|
||||
"""
|
||||
return self._get('version/api_min')
|
||||
|
||||
def shutdown(self):
|
||||
"""
|
||||
Shutdown qBittorrent.
|
||||
"""
|
||||
return self._get('command/shutdown')
|
||||
|
||||
def torrents(self, **filters):
|
||||
"""
|
||||
Returns a list of torrents matching the supplied filters.
|
||||
|
||||
:param filter: Current status of the torrents.
|
||||
:param category: Fetch all torrents with the supplied label.
|
||||
:param sort: Sort torrents by.
|
||||
:param reverse: Enable reverse sorting.
|
||||
:param limit: Limit the number of torrents returned.
|
||||
:param offset: Set offset (if less than 0, offset from end).
|
||||
|
||||
:return: list() of torrent with matching filter.
|
||||
"""
|
||||
params = {}
|
||||
for name, value in filters.items():
|
||||
# make sure that old 'status' argument still works
|
||||
name = 'filter' if name == 'status' else name
|
||||
params[name] = value
|
||||
|
||||
return self._get('query/torrents', params=params)
|
||||
|
||||
def get_torrent(self, infohash):
|
||||
"""
|
||||
Get details of the torrent.
|
||||
|
||||
:param infohash: INFO HASH of the torrent.
|
||||
"""
|
||||
return self._get('query/propertiesGeneral/' + infohash.lower())
|
||||
|
||||
def get_torrent_trackers(self, infohash):
|
||||
"""
|
||||
Get trackers for the torrent.
|
||||
|
||||
:param infohash: INFO HASH of the torrent.
|
||||
"""
|
||||
return self._get('query/propertiesTrackers/' + infohash.lower())
|
||||
|
||||
def get_torrent_webseeds(self, infohash):
|
||||
"""
|
||||
Get webseeds for the torrent.
|
||||
|
||||
:param infohash: INFO HASH of the torrent.
|
||||
"""
|
||||
return self._get('query/propertiesWebSeeds/' + infohash.lower())
|
||||
|
||||
def get_torrent_files(self, infohash):
|
||||
"""
|
||||
Get list of files for the torrent.
|
||||
|
||||
:param infohash: INFO HASH of the torrent.
|
||||
"""
|
||||
return self._get('query/propertiesFiles/' + infohash.lower())
|
||||
|
||||
@property
|
||||
def global_transfer_info(self):
|
||||
"""
|
||||
Get JSON data of the global transfer info of qBittorrent.
|
||||
"""
|
||||
return self._get('query/transferInfo')
|
||||
|
||||
@property
|
||||
def preferences(self):
|
||||
"""
|
||||
Get the current qBittorrent preferences.
|
||||
Can also be used to assign individual preferences.
|
||||
For setting multiple preferences at once,
|
||||
see ``set_preferences`` method.
|
||||
|
||||
Note: Even if this is a ``property``,
|
||||
to fetch the current preferences dict, you are required
|
||||
to call it like a bound method.
|
||||
|
||||
Wrong::
|
||||
|
||||
qb.preferences
|
||||
|
||||
Right::
|
||||
|
||||
qb.preferences()
|
||||
|
||||
"""
|
||||
prefs = self._get('query/preferences')
|
||||
|
||||
class Proxy(Client):
|
||||
"""
|
||||
Proxy class to to allow assignment of individual preferences.
|
||||
this class overrides some methods to ease things.
|
||||
|
||||
Because of this, settings can be assigned like::
|
||||
|
||||
In [5]: prefs = qb.preferences()
|
||||
|
||||
In [6]: prefs['autorun_enabled']
|
||||
Out[6]: True
|
||||
|
||||
In [7]: prefs['autorun_enabled'] = False
|
||||
|
||||
In [8]: prefs['autorun_enabled']
|
||||
Out[8]: False
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, url, prefs, auth, session):
|
||||
super(Proxy, self).__init__(url)
|
||||
self.prefs = prefs
|
||||
self._is_authenticated = auth
|
||||
self.session = session
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.prefs[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
kwargs = {key: value}
|
||||
return self.set_preferences(**kwargs)
|
||||
|
||||
def __call__(self):
|
||||
return self.prefs
|
||||
|
||||
return Proxy(self.url, prefs, self._is_authenticated, self.session)
|
||||
|
||||
def sync(self, rid=0):
|
||||
"""
|
||||
Sync the torrents by supplied LAST RESPONSE ID.
|
||||
Read more @ http://git.io/vEgXr
|
||||
|
||||
:param rid: Response ID of last request.
|
||||
"""
|
||||
return self._get('sync/maindata', params={'rid': rid})
|
||||
|
||||
def download_from_link(self, link, **kwargs):
|
||||
"""
|
||||
Download torrent using a link.
|
||||
|
||||
:param link: URL Link or list of.
|
||||
:param savepath: Path to download the torrent.
|
||||
:param category: Label or Category of the torrent(s).
|
||||
|
||||
:return: Empty JSON data.
|
||||
"""
|
||||
# old:new format
|
||||
old_arg_map = {'save_path': 'savepath'} # , 'label': 'category'}
|
||||
|
||||
# convert old option names to new option names
|
||||
options = kwargs.copy()
|
||||
for old_arg, new_arg in old_arg_map.items():
|
||||
if options.get(old_arg) and not options.get(new_arg):
|
||||
options[new_arg] = options[old_arg]
|
||||
|
||||
options['urls'] = link
|
||||
|
||||
# workaround to send multipart/formdata request
|
||||
# http://stackoverflow.com/a/23131823/4726598
|
||||
dummy_file = {'_dummy': (None, '_dummy')}
|
||||
|
||||
return self._post('command/download', data=options, files=dummy_file)
|
||||
|
||||
def download_from_file(self, file_buffer, **kwargs):
|
||||
"""
|
||||
Download torrent using a file.
|
||||
|
||||
:param file_buffer: Single file() buffer or list of.
|
||||
:param save_path: Path to download the torrent.
|
||||
:param label: Label of the torrent(s).
|
||||
|
||||
:return: Empty JSON data.
|
||||
"""
|
||||
if isinstance(file_buffer, list):
|
||||
torrent_files = {}
|
||||
for i, f in enumerate(file_buffer):
|
||||
torrent_files.update({'torrents%s' % i: f})
|
||||
else:
|
||||
torrent_files = {'torrents': file_buffer}
|
||||
|
||||
data = kwargs.copy()
|
||||
|
||||
if data.get('save_path'):
|
||||
data.update({'savepath': data['save_path']})
|
||||
return self._post('command/upload', data=data, files=torrent_files)
|
||||
|
||||
def add_trackers(self, infohash, trackers):
|
||||
"""
|
||||
Add trackers to a torrent.
|
||||
|
||||
:param infohash: INFO HASH of torrent.
|
||||
:param trackers: Trackers.
|
||||
"""
|
||||
data = {'hash': infohash.lower(),
|
||||
'urls': trackers}
|
||||
return self._post('command/addTrackers', data=data)
|
||||
|
||||
@staticmethod
|
||||
def _process_infohash_list(infohash_list):
|
||||
"""
|
||||
Method to convert the infohash_list to qBittorrent API friendly values.
|
||||
|
||||
:param infohash_list: List of infohash.
|
||||
"""
|
||||
if isinstance(infohash_list, list):
|
||||
data = {'hashes': '|'.join([h.lower() for h in infohash_list])}
|
||||
else:
|
||||
data = {'hashes': infohash_list.lower()}
|
||||
return data
|
||||
|
||||
def pause(self, infohash):
|
||||
"""
|
||||
Pause a torrent.
|
||||
|
||||
:param infohash: INFO HASH of torrent.
|
||||
"""
|
||||
return self._post('command/pause', data={'hash': infohash.lower()})
|
||||
|
||||
def pause_all(self):
|
||||
"""
|
||||
Pause all torrents.
|
||||
"""
|
||||
return self._get('command/pauseAll')
|
||||
|
||||
def pause_multiple(self, infohash_list):
|
||||
"""
|
||||
Pause multiple torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/pauseAll', data=data)
|
||||
|
||||
def set_label(self, infohash_list, label):
|
||||
"""
|
||||
Set the label on multiple torrents.
|
||||
IMPORTANT: OLD API method, kept as it is to avoid breaking stuffs.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
data['label'] = label
|
||||
return self._post('command/setLabel', data=data)
|
||||
|
||||
def set_category(self, infohash_list, category):
|
||||
"""
|
||||
Set the category on multiple torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
data['category'] = category
|
||||
return self._post('command/setCategory', data=data)
|
||||
|
||||
def resume(self, infohash):
|
||||
"""
|
||||
Resume a paused torrent.
|
||||
|
||||
:param infohash: INFO HASH of torrent.
|
||||
"""
|
||||
return self._post('command/resume', data={'hash': infohash.lower()})
|
||||
|
||||
def resume_all(self):
|
||||
"""
|
||||
Resume all torrents.
|
||||
"""
|
||||
return self._get('command/resumeAll')
|
||||
|
||||
def resume_multiple(self, infohash_list):
|
||||
"""
|
||||
Resume multiple paused torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/resumeAll', data=data)
|
||||
|
||||
def delete(self, infohash_list):
|
||||
"""
|
||||
Delete torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/delete', data=data)
|
||||
|
||||
def delete_permanently(self, infohash_list):
|
||||
"""
|
||||
Permanently delete torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/deletePerm', data=data)
|
||||
|
||||
def recheck(self, infohash_list):
|
||||
"""
|
||||
Recheck torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/recheck', data=data)
|
||||
|
||||
def increase_priority(self, infohash_list):
|
||||
"""
|
||||
Increase priority of torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/increasePrio', data=data)
|
||||
|
||||
def decrease_priority(self, infohash_list):
|
||||
"""
|
||||
Decrease priority of torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/decreasePrio', data=data)
|
||||
|
||||
def set_max_priority(self, infohash_list):
|
||||
"""
|
||||
Set torrents to maximum priority level.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/topPrio', data=data)
|
||||
|
||||
def set_min_priority(self, infohash_list):
|
||||
"""
|
||||
Set torrents to minimum priority level.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/bottomPrio', data=data)
|
||||
|
||||
def set_file_priority(self, infohash, file_id, priority):
|
||||
"""
|
||||
Set file of a torrent to a supplied priority level.
|
||||
|
||||
:param infohash: INFO HASH of torrent.
|
||||
:param file_id: ID of the file to set priority.
|
||||
:param priority: Priority level of the file.
|
||||
"""
|
||||
if priority not in [0, 1, 2, 7]:
|
||||
raise ValueError("Invalid priority, refer WEB-UI docs for info.")
|
||||
elif not isinstance(file_id, int):
|
||||
raise TypeError("File ID must be an int")
|
||||
|
||||
data = {'hash': infohash.lower(),
|
||||
'id': file_id,
|
||||
'priority': priority}
|
||||
|
||||
return self._post('command/setFilePrio', data=data)
|
||||
|
||||
# Get-set global download and upload speed limits.
|
||||
|
||||
def get_global_download_limit(self):
|
||||
"""
|
||||
Get global download speed limit.
|
||||
"""
|
||||
return self._get('command/getGlobalDlLimit')
|
||||
|
||||
def set_global_download_limit(self, limit):
|
||||
"""
|
||||
Set global download speed limit.
|
||||
|
||||
:param limit: Speed limit in bytes.
|
||||
"""
|
||||
return self._post('command/setGlobalDlLimit', data={'limit': limit})
|
||||
|
||||
global_download_limit = property(get_global_download_limit,
|
||||
set_global_download_limit)
|
||||
|
||||
def get_global_upload_limit(self):
|
||||
"""
|
||||
Get global upload speed limit.
|
||||
"""
|
||||
return self._get('command/getGlobalUpLimit')
|
||||
|
||||
def set_global_upload_limit(self, limit):
|
||||
"""
|
||||
Set global upload speed limit.
|
||||
|
||||
:param limit: Speed limit in bytes.
|
||||
"""
|
||||
return self._post('command/setGlobalUpLimit', data={'limit': limit})
|
||||
|
||||
global_upload_limit = property(get_global_upload_limit,
|
||||
set_global_upload_limit)
|
||||
|
||||
# Get-set download and upload speed limits of the torrents.
|
||||
def get_torrent_download_limit(self, infohash_list):
|
||||
"""
|
||||
Get download speed limit of the supplied torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/getTorrentsDlLimit', data=data)
|
||||
|
||||
def set_torrent_download_limit(self, infohash_list, limit):
|
||||
"""
|
||||
Set download speed limit of the supplied torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
:param limit: Speed limit in bytes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
data.update({'limit': limit})
|
||||
return self._post('command/setTorrentsDlLimit', data=data)
|
||||
|
||||
def get_torrent_upload_limit(self, infohash_list):
|
||||
"""
|
||||
Get upoload speed limit of the supplied torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/getTorrentsUpLimit', data=data)
|
||||
|
||||
def set_torrent_upload_limit(self, infohash_list, limit):
|
||||
"""
|
||||
Set upload speed limit of the supplied torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
:param limit: Speed limit in bytes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
data.update({'limit': limit})
|
||||
return self._post('command/setTorrentsUpLimit', data=data)
|
||||
|
||||
# setting preferences
|
||||
def set_preferences(self, **kwargs):
|
||||
"""
|
||||
Set preferences of qBittorrent.
|
||||
Read all possible preferences @ http://git.io/vEgDQ
|
||||
|
||||
:param kwargs: set preferences in kwargs form.
|
||||
"""
|
||||
json_data = "json={}".format(json.dumps(kwargs))
|
||||
headers = {'content-type': 'application/x-www-form-urlencoded'}
|
||||
return self._post('command/setPreferences', data=json_data,
|
||||
headers=headers)
|
||||
|
||||
def get_alternative_speed_status(self):
|
||||
"""
|
||||
Get Alternative speed limits. (1/0)
|
||||
"""
|
||||
return self._get('command/alternativeSpeedLimitsEnabled')
|
||||
|
||||
alternative_speed_status = property(get_alternative_speed_status)
|
||||
|
||||
def toggle_alternative_speed(self):
|
||||
"""
|
||||
Toggle alternative speed limits.
|
||||
"""
|
||||
return self._get('command/toggleAlternativeSpeedLimits')
|
||||
|
||||
def toggle_sequential_download(self, infohash_list):
|
||||
"""
|
||||
Toggle sequential download in supplied torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/toggleSequentialDownload', data=data)
|
||||
|
||||
def toggle_first_last_piece_priority(self, infohash_list):
|
||||
"""
|
||||
Toggle first/last piece priority of supplied torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
return self._post('command/toggleFirstLastPiecePrio', data=data)
|
||||
|
||||
def force_start(self, infohash_list, value=True):
|
||||
"""
|
||||
Force start selected torrents.
|
||||
|
||||
:param infohash_list: Single or list() of infohashes.
|
||||
:param value: Force start value (bool)
|
||||
"""
|
||||
data = self._process_infohash_list(infohash_list)
|
||||
data.update({'value': json.dumps(value)})
|
||||
return self._post('command/setForceStart', data=data)
|
Loading…
Add table
Add a link
Reference in a new issue