mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-20 21:33:18 -07:00
Initial Commit
This commit is contained in:
commit
88daa3fb91
1311 changed files with 256240 additions and 0 deletions
0
lib/pygazelle/__init__.py
Normal file
0
lib/pygazelle/__init__.py
Normal file
419
lib/pygazelle/api.py
Normal file
419
lib/pygazelle/api.py
Normal file
|
@ -0,0 +1,419 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# PyGazelle - https://github.com/cohena/pygazelle
|
||||
# A Python implementation of the What.cd Gazelle JSON API
|
||||
#
|
||||
# Loosely based on the API implementation from 'whatbetter', by Zachary Denton
|
||||
# See https://github.com/zacharydenton/whatbetter
|
||||
from HTMLParser import HTMLParser
|
||||
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import requests as requests
|
||||
|
||||
from .user import User
|
||||
from .artist import Artist
|
||||
from .tag import Tag
|
||||
from .request import Request
|
||||
from .torrent_group import TorrentGroup
|
||||
from .torrent import Torrent
|
||||
from .category import Category
|
||||
from .inbox import Mailbox
|
||||
|
||||
class LoginException(Exception):
|
||||
pass
|
||||
|
||||
class RequestException(Exception):
|
||||
pass
|
||||
|
||||
class GazelleAPI(object):
|
||||
last_request = time.time() # share amongst all api objects
|
||||
default_headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'Cache-Control': 'max-age=0',
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3)'\
|
||||
'AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.79'\
|
||||
'Safari/535.11',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9'\
|
||||
',*/*;q=0.8',
|
||||
'Accept-Encoding': 'gzip,deflate,sdch',
|
||||
'Accept-Language': 'en-US,en;q=0.8',
|
||||
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'}
|
||||
|
||||
|
||||
def __init__(self, username=None, password=None):
|
||||
self.session = requests.session()
|
||||
self.session.headers = self.default_headers
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.authkey = None
|
||||
self.passkey = None
|
||||
self.userid = None
|
||||
self.logged_in_user = None
|
||||
self.default_timeout = 30
|
||||
self.cached_users = {}
|
||||
self.cached_artists = {}
|
||||
self.cached_tags = {}
|
||||
self.cached_torrent_groups = {}
|
||||
self.cached_torrents = {}
|
||||
self.cached_requests = {}
|
||||
self.cached_categories = {}
|
||||
self.site = "https://what.cd/"
|
||||
self.past_request_timestamps = []
|
||||
|
||||
def wait_for_rate_limit(self):
|
||||
# maximum is 5 requests within 10 secs
|
||||
time_frame = 10
|
||||
max_reqs = 5
|
||||
|
||||
slice_point = 0
|
||||
|
||||
while len(self.past_request_timestamps) >= max_reqs:
|
||||
for i, timestamp in enumerate(self.past_request_timestamps):
|
||||
if timestamp < time.time() - time_frame:
|
||||
slice_point = i + 1
|
||||
else:
|
||||
break
|
||||
|
||||
if slice_point:
|
||||
self.past_request_timestamps = self.past_request_timestamps[slice_point:]
|
||||
else:
|
||||
time.sleep(0.1)
|
||||
|
||||
def logged_in(self):
|
||||
return self.logged_in_user is not None and self.logged_in_user.id == self.userid
|
||||
|
||||
def _login(self):
|
||||
"""
|
||||
Private method.
|
||||
Logs in user and gets authkey from server.
|
||||
"""
|
||||
|
||||
if self.logged_in():
|
||||
return
|
||||
|
||||
self.wait_for_rate_limit()
|
||||
|
||||
loginpage = 'https://what.cd/login.php'
|
||||
data = {'username': self.username,
|
||||
'password': self.password,
|
||||
'keeplogged': '1'}
|
||||
r = self.session.post(loginpage, data=data, timeout=self.default_timeout, headers=self.default_headers)
|
||||
self.past_request_timestamps.append(time.time())
|
||||
if r.status_code != 200:
|
||||
raise LoginException("Login returned status code %s" % r.status_code)
|
||||
|
||||
try:
|
||||
accountinfo = self.request('index', autologin=False)
|
||||
except RequestException as e:
|
||||
raise LoginException("Login probably incorrect")
|
||||
if not accountinfo or 'id' not in accountinfo:
|
||||
raise LoginException("Login probably incorrect")
|
||||
self.userid = accountinfo['id']
|
||||
self.authkey = accountinfo['authkey']
|
||||
self.passkey = accountinfo['passkey']
|
||||
self.logged_in_user = User(self.userid, self)
|
||||
self.logged_in_user.set_index_data(accountinfo)
|
||||
|
||||
def request(self, action, autologin=True, **kwargs):
|
||||
"""
|
||||
Makes an AJAX request at a given action.
|
||||
Pass an action and relevant arguments for that action.
|
||||
"""
|
||||
def make_request(action, **kwargs):
|
||||
ajaxpage = 'ajax.php'
|
||||
content = self.unparsed_request(ajaxpage, action, **kwargs)
|
||||
try:
|
||||
if not isinstance(content, text_type):
|
||||
content = content.decode('utf-8')
|
||||
parsed = json.loads(content)
|
||||
if parsed['status'] != 'success':
|
||||
raise RequestException
|
||||
return parsed['response']
|
||||
except ValueError:
|
||||
raise RequestException
|
||||
|
||||
try:
|
||||
return make_request(action, **kwargs)
|
||||
except Exception as e:
|
||||
if autologin and not self.logged_in():
|
||||
self._login()
|
||||
return make_request(action, **kwargs)
|
||||
else:
|
||||
raise e
|
||||
|
||||
def unparsed_request(self, sitepage, action, **kwargs):
|
||||
"""
|
||||
Makes a generic HTTP request at a given page with a given action.
|
||||
Also pass relevant arguments for that action.
|
||||
"""
|
||||
self.wait_for_rate_limit()
|
||||
|
||||
url = "%s%s" % (self.site, sitepage)
|
||||
params = {'action': action}
|
||||
if self.authkey:
|
||||
params['auth'] = self.authkey
|
||||
params.update(kwargs)
|
||||
r = self.session.get(url, params=params, allow_redirects=False, timeout=self.default_timeout)
|
||||
|
||||
if r.status_code == 302 and r.raw.headers['location'] == 'login.php':
|
||||
self.logged_in_user = None
|
||||
raise LoginException("User login expired")
|
||||
|
||||
self.past_request_timestamps.append(time.time())
|
||||
return r.content
|
||||
|
||||
def get_user(self, id):
|
||||
"""
|
||||
Returns a User for the passed ID, associated with this API object. If the ID references the currently logged in
|
||||
user, the user returned will be pre-populated with the information from an 'index' API call. Otherwise, you'll
|
||||
need to call User.update_user_data(). This is done on demand to reduce unnecessary API calls.
|
||||
"""
|
||||
id = int(id)
|
||||
if id == self.userid:
|
||||
return self.logged_in_user
|
||||
elif id in self.cached_users.keys():
|
||||
return self.cached_users[id]
|
||||
else:
|
||||
return User(id, self)
|
||||
|
||||
def search_users(self, search_query):
|
||||
"""
|
||||
Returns a list of users returned for the search query. You can search by name, part of name, and ID number. If
|
||||
one of the returned users is the currently logged-in user, that user object will be pre-populated with the
|
||||
information from an 'index' API call. Otherwise only the limited info returned by the search will be pre-pop'd.
|
||||
You can query more information with User.update_user_data(). This is done on demand to reduce unnecessary API calls.
|
||||
"""
|
||||
response = self.request(action='usersearch', search=search_query)
|
||||
results = response['results']
|
||||
|
||||
found_users = []
|
||||
for result in results:
|
||||
user = self.get_user(result['userId'])
|
||||
user.set_search_result_data(result)
|
||||
found_users.append(user)
|
||||
|
||||
return found_users
|
||||
|
||||
def get_inbox(self, page='1', sort='unread'):
|
||||
"""
|
||||
Returns the inbox Mailbox for the logged in user
|
||||
"""
|
||||
return Mailbox(self, 'inbox', page, sort)
|
||||
|
||||
def get_sentbox(self, page='1', sort='unread'):
|
||||
"""
|
||||
Returns the sentbox Mailbox for the logged in user
|
||||
"""
|
||||
return Mailbox(self, 'sentbox', page, sort)
|
||||
|
||||
def get_artist(self, id=None, name=None):
|
||||
"""
|
||||
Returns an Artist for the passed ID, associated with this API object. You'll need to call Artist.update_data()
|
||||
if the artist hasn't already been cached. This is done on demand to reduce unnecessary API calls.
|
||||
"""
|
||||
if id:
|
||||
id = int(id)
|
||||
if id in self.cached_artists.keys():
|
||||
artist = self.cached_artists[id]
|
||||
else:
|
||||
artist = Artist(id, self)
|
||||
if name:
|
||||
artist.name = HTMLParser().unescape(name)
|
||||
elif name:
|
||||
artist = Artist(-1, self)
|
||||
artist.name = HTMLParser().unescape(name)
|
||||
else:
|
||||
raise Exception("You must specify either an ID or a Name to get an artist.")
|
||||
|
||||
return artist
|
||||
|
||||
def get_tag(self, name):
|
||||
"""
|
||||
Returns a Tag for the passed name, associated with this API object. If you know the count value for this tag,
|
||||
pass it to update the object. There is no way to query the count directly from the API, but it can be retrieved
|
||||
from other calls such as 'artist', however.
|
||||
"""
|
||||
if name in self.cached_tags.keys():
|
||||
return self.cached_tags[name]
|
||||
else:
|
||||
return Tag(name, self)
|
||||
|
||||
def get_request(self, id):
|
||||
"""
|
||||
Returns a Request for the passed ID, associated with this API object. You'll need to call Request.update_data()
|
||||
if the request hasn't already been cached. This is done on demand to reduce unnecessary API calls.
|
||||
"""
|
||||
id = int(id)
|
||||
if id in self.cached_requests.keys():
|
||||
return self.cached_requests[id]
|
||||
else:
|
||||
return Request(id, self)
|
||||
|
||||
def get_torrent_group(self, id):
|
||||
"""
|
||||
Returns a TorrentGroup for the passed ID, associated with this API object.
|
||||
"""
|
||||
id = int(id)
|
||||
if id in self.cached_torrent_groups.keys():
|
||||
return self.cached_torrent_groups[id]
|
||||
else:
|
||||
return TorrentGroup(id, self)
|
||||
|
||||
def get_torrent(self, id):
|
||||
"""
|
||||
Returns a Torrent for the passed ID, associated with this API object.
|
||||
"""
|
||||
id = int(id)
|
||||
if id in self.cached_torrents.keys():
|
||||
return self.cached_torrents[id]
|
||||
else:
|
||||
return Torrent(id, self)
|
||||
|
||||
def get_torrent_from_info_hash(self, info_hash):
|
||||
"""
|
||||
Returns a Torrent for the passed info hash (if one exists), associated with this API object.
|
||||
"""
|
||||
try:
|
||||
response = self.request(action='torrent', hash=info_hash.upper())
|
||||
except RequestException:
|
||||
return None
|
||||
|
||||
id = int(response['torrent']['id'])
|
||||
if id in self.cached_torrents.keys():
|
||||
torrent = self.cached_torrents[id]
|
||||
else:
|
||||
torrent = Torrent(id, self)
|
||||
|
||||
torrent.set_torrent_complete_data(response)
|
||||
return torrent
|
||||
|
||||
def get_category(self, id, name=None):
|
||||
"""
|
||||
Returns a Category for the passed ID, associated with this API object.
|
||||
"""
|
||||
id = int(id)
|
||||
if id in self.cached_categories.keys():
|
||||
cat = self.cached_categories[id]
|
||||
else:
|
||||
cat = Category(id, self)
|
||||
if name:
|
||||
cat.name = name
|
||||
return cat
|
||||
|
||||
def get_top_10(self, type="torrents", limit=25):
|
||||
"""
|
||||
Lists the top <limit> items of <type>. Type can be "torrents", "tags", or "users". Limit MUST be
|
||||
10, 25, or 100...it can't just be an arbitrary number (unfortunately). Results are organized into a list of hashes.
|
||||
Each hash contains the results for a specific time frame, like 'day', or 'week'. In the hash, the 'results' key
|
||||
contains a list of objects appropriate to the passed <type>.
|
||||
"""
|
||||
|
||||
response = self.request(action='top10', type=type, limit=limit)
|
||||
top_items = []
|
||||
if not response:
|
||||
raise RequestException
|
||||
for category in response:
|
||||
results = []
|
||||
if type == "torrents":
|
||||
for item in category['results']:
|
||||
torrent = self.get_torrent(item['torrentId'])
|
||||
torrent.set_torrent_top_10_data(item)
|
||||
results.append(torrent)
|
||||
elif type == "tags":
|
||||
for item in category['results']:
|
||||
tag = self.get_tag(item['name'])
|
||||
results.append(tag)
|
||||
elif type == "users":
|
||||
for item in category['results']:
|
||||
user = self.get_user(item['id'])
|
||||
results.append(user)
|
||||
else:
|
||||
raise Exception("%s is an invalid type argument for GazelleAPI.get_top_ten()" % type)
|
||||
|
||||
top_items.append({
|
||||
"caption": category['caption'],
|
||||
"tag": category['tag'],
|
||||
"limit": category['limit'],
|
||||
"results": results
|
||||
})
|
||||
|
||||
return top_items
|
||||
|
||||
def search_torrents(self, **kwargs):
|
||||
"""
|
||||
Searches based on the args you pass and returns torrent groups filled with torrents.
|
||||
Pass strings unless otherwise specified.
|
||||
Valid search args:
|
||||
searchstr (any arbitrary string to search for)
|
||||
page (page to display -- default: 1)
|
||||
artistname (self explanatory)
|
||||
groupname (torrent group name, equivalent to album)
|
||||
recordlabel (self explanatory)
|
||||
cataloguenumber (self explanatory)
|
||||
year (self explanatory)
|
||||
remastertitle (self explanatory)
|
||||
remasteryear (self explanatory)
|
||||
remasterrecordlabel (self explanatory)
|
||||
remastercataloguenumber (self explanatory)
|
||||
filelist (can search for filenames found in torrent...unsure of formatting for multiple files)
|
||||
encoding (use constants in pygazelle.Encoding module)
|
||||
format (use constants in pygazelle.Format module)
|
||||
media (use constants in pygazelle.Media module)
|
||||
releasetype (use constants in pygazelle.ReleaseType module)
|
||||
haslog (int 1 or 0 to represent boolean, 100 for 100% only, -1 for < 100% / unscored)
|
||||
hascue (int 1 or 0 to represent boolean)
|
||||
scene (int 1 or 0 to represent boolean)
|
||||
vanityhouse (int 1 or 0 to represent boolean)
|
||||
freetorrent (int 1 or 0 to represent boolean)
|
||||
taglist (comma separated tag names)
|
||||
tags_type (0 for 'any' matching, 1 for 'all' matching)
|
||||
order_by (use constants in pygazelle.order module that start with by_ in their name)
|
||||
order_way (use way_ascending or way_descending constants in pygazelle.order)
|
||||
filter_cat (for each category you want to search, the param name must be filter_cat[catnum] and the value 1)
|
||||
ex. filter_cat[1]=1 turns on Music.
|
||||
filter_cat[1]=1, filter_cat[2]=1 turns on music and applications. (two separate params and vals!)
|
||||
Category object ids return the correct int value for these. (verify?)
|
||||
|
||||
Returns a dict containing keys 'curr_page', 'pages', and 'results'. Results contains a matching list of Torrents
|
||||
(they have a reference to their parent TorrentGroup).
|
||||
"""
|
||||
|
||||
response = self.request(action='browse', **kwargs)
|
||||
results = response['results']
|
||||
if len(results):
|
||||
curr_page = response['currentPage']
|
||||
pages = response['pages']
|
||||
else:
|
||||
curr_page = 1
|
||||
pages = 1
|
||||
|
||||
matching_torrents = []
|
||||
for torrent_group_dict in results:
|
||||
torrent_group = self.get_torrent_group(torrent_group_dict['groupId'])
|
||||
torrent_group.set_torrent_search_data(torrent_group_dict)
|
||||
|
||||
for torrent_dict in torrent_group_dict['torrents']:
|
||||
torrent_dict['groupId'] = torrent_group.id
|
||||
torrent = self.get_torrent(torrent_dict['torrentId'])
|
||||
torrent.set_torrent_search_data(torrent_dict)
|
||||
matching_torrents.append(torrent)
|
||||
|
||||
return {'curr_page': curr_page, 'pages': pages, 'results': matching_torrents}
|
||||
|
||||
def generate_torrent_link(self, id):
|
||||
url = "%storrents.php?action=download&id=%s&authkey=%s&torrent_pass=%s" %\
|
||||
(self.site, id, self.logged_in_user.authkey, self.logged_in_user.passkey)
|
||||
return url
|
||||
|
||||
def save_torrent_file(self, id, dest):
|
||||
file_data = self.unparsed_request("torrents.php", 'download',
|
||||
id=id, authkey=self.logged_in_user.authkey, torrent_pass=self.logged_in_user.passkey)
|
||||
with open(dest, 'w+') as dest_file:
|
||||
dest_file.write(file_data)
|
||||
|
||||
if sys.version_info[0] == 3:
|
||||
text_type = str
|
||||
else:
|
||||
text_type = unicode
|
85
lib/pygazelle/artist.py
Normal file
85
lib/pygazelle/artist.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
from HTMLParser import HTMLParser
|
||||
|
||||
class InvalidArtistException(Exception):
|
||||
pass
|
||||
|
||||
class Artist(object):
|
||||
"""
|
||||
This class represents an Artist. It is created knowing only its ID. To reduce API accesses, load information using
|
||||
Artist.update_data() only as needed.
|
||||
"""
|
||||
def __init__(self, id, parent_api):
|
||||
self.id = id
|
||||
self.parent_api = parent_api
|
||||
self.name = None
|
||||
self.notifications_enabled = None
|
||||
self.has_bookmarked = None
|
||||
self.image = None
|
||||
self.body = None
|
||||
self.vanity_house = None
|
||||
self.tags = []
|
||||
self.similar_artists_and_score = {}
|
||||
self.statistics = None
|
||||
self.torrent_groups = []
|
||||
self.requests = []
|
||||
|
||||
self.parent_api.cached_artists[self.id] = self # add self to cache of known Artist objects
|
||||
|
||||
def update_data(self):
|
||||
if self.id > 0:
|
||||
response = self.parent_api.request(action='artist', id=self.id)
|
||||
elif self.name:
|
||||
self.name = HTMLParser().unescape(self.name)
|
||||
try:
|
||||
response = self.parent_api.request(action='artist', artistname=self.name)
|
||||
except Exception:
|
||||
self.name = self.name.split(" & ")[0]
|
||||
response = self.parent_api.request(action='artist', artistname=self.name)
|
||||
else:
|
||||
raise InvalidArtistException("Neither ID or Artist Name is valid, can't update data.")
|
||||
self.set_data(response)
|
||||
|
||||
def set_data(self, artist_json_response):
|
||||
if self.id > 0 and self.id != artist_json_response['id']:
|
||||
raise InvalidArtistException("Tried to update an artists's information from an 'artist' API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, artist_json_response['id']) )
|
||||
elif self.name:
|
||||
self.id = artist_json_response['id']
|
||||
self.parent_api.cached_artists[self.id] = self
|
||||
|
||||
self.name = HTMLParser().unescape(artist_json_response['name'])
|
||||
self.notifications_enabled = artist_json_response['notificationsEnabled']
|
||||
self.has_bookmarked = artist_json_response['hasBookmarked']
|
||||
self.image = artist_json_response['image']
|
||||
self.body = artist_json_response['body']
|
||||
self.vanity_house = artist_json_response['vanityHouse']
|
||||
|
||||
self.tags = []
|
||||
for tag_dict in artist_json_response['tags']:
|
||||
tag = self.parent_api.get_tag(tag_dict['name'])
|
||||
tag.set_artist_count(self, tag_dict['count'])
|
||||
self.tags.append(tag)
|
||||
|
||||
self.similar_artists_and_score = {}
|
||||
for similar_artist_dict in artist_json_response['similarArtists']:
|
||||
similar_artist = self.parent_api.get_artist(similar_artist_dict['artistId'])
|
||||
similar_artist.name = similar_artist_dict['name']
|
||||
self.similar_artists_and_score[similar_artist] = similar_artist_dict['score']
|
||||
|
||||
self.statistics = artist_json_response['statistics']
|
||||
|
||||
self.torrent_groups = []
|
||||
for torrent_group_item in artist_json_response['torrentgroup']:
|
||||
torrent_group = self.parent_api.get_torrent_group(torrent_group_item['groupId'])
|
||||
torrent_group.set_artist_group_data(torrent_group_item)
|
||||
self.torrent_groups.append(torrent_group)
|
||||
|
||||
self.requests = []
|
||||
for request_json_item in artist_json_response['requests']:
|
||||
request = self.parent_api.get_request(request_json_item['requestId'])
|
||||
request.set_data(request_json_item)
|
||||
self.requests.append(request)
|
||||
|
||||
def __repr__(self):
|
||||
return "Artist: %s - ID: %s" % (self.name, self.id)
|
||||
|
13
lib/pygazelle/category.py
Normal file
13
lib/pygazelle/category.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
class InvalidCategoryException(Exception):
|
||||
pass
|
||||
|
||||
class Category(object):
|
||||
def __init__(self, id, parent_api):
|
||||
self.id = id
|
||||
self.parent_api = parent_api
|
||||
self.name = None
|
||||
|
||||
self.parent_api.cached_categories[self.id] = self # add self to cache of known Category objects
|
||||
|
||||
def __repr__(self):
|
||||
return "Category: %s - id: %s" % (self.name, self.id)
|
13
lib/pygazelle/encoding.py
Normal file
13
lib/pygazelle/encoding.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
C192 = "192"
|
||||
APS = "APS (VBR)"
|
||||
V2 = "V2 (VBR)"
|
||||
V1 = "V1 (VBR)"
|
||||
C256 = "256"
|
||||
APX = "APX (VBR)"
|
||||
V0 = "V0 (VBR)"
|
||||
C320 = "320"
|
||||
LOSSLESS = "Lossless"
|
||||
LOSSLESS_24 = "24bit Lossless"
|
||||
V8 = "V8 (VBR)"
|
||||
|
||||
ALL_ENCODINGS = [C192, APS, V2, V1, C256, APX, V0, C320, LOSSLESS, LOSSLESS_24, V8]
|
8
lib/pygazelle/format.py
Normal file
8
lib/pygazelle/format.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
MP3 = "MP3"
|
||||
FLAC = "FLAC"
|
||||
AAC = "AAC"
|
||||
AC3 = "AC3"
|
||||
DTS = "DTS"
|
||||
OGG_VORBIS = "Ogg Vorbis"
|
||||
|
||||
ALL_FORMATS = [MP3, FLAC, AAC, AC3, DTS, OGG_VORBIS]
|
107
lib/pygazelle/inbox.py
Normal file
107
lib/pygazelle/inbox.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
class MailboxMessage(object):
|
||||
def __init__(self, api, message):
|
||||
self.id = message['convId']
|
||||
self.conv = Conversation(api, self.id)
|
||||
self.subject = message['subject']
|
||||
self.unread = message['unread']
|
||||
self.sticky = message['sticky']
|
||||
self.fwd_id = message['forwardedId']
|
||||
self.fwd_name = message['forwardedName']
|
||||
self.sender_id = message['senderId']
|
||||
self.username = message['username']
|
||||
self.donor = message['donor']
|
||||
self.warned = message['warned']
|
||||
self.enabled = message['enabled']
|
||||
self.date = message['date']
|
||||
|
||||
def __repr__(self):
|
||||
return "MailboxMessage ID %s - %s %s %s" % (self.id, self.subject, self.sender_id, self.username)
|
||||
|
||||
|
||||
class ConversationMessage(object):
|
||||
def __init__(self, msg_resp):
|
||||
self.id = msg_resp['messageId']
|
||||
self.sender_id = msg_resp['senderId']
|
||||
self.sender_name = msg_resp['senderName']
|
||||
self.sent_date = msg_resp['sentDate']
|
||||
self.bb_body = msg_resp['bbBody']
|
||||
self.body = msg_resp['body']
|
||||
|
||||
def __repr__(self):
|
||||
return "ConversationMessage ID %s - %s %s" % (self.id, self.sender_name, self.sent_date)
|
||||
|
||||
|
||||
class Conversation(object):
|
||||
def __init__(self, api, conv_id):
|
||||
self.id = conv_id
|
||||
self.parent_api = api
|
||||
self.subject = None
|
||||
self.sticky = None
|
||||
self.messages = []
|
||||
|
||||
def __repr__(self):
|
||||
return "Conversation ID %s - %s" % (self.id, self.subject)
|
||||
|
||||
def set_conv_data(self, conv_resp):
|
||||
assert self.id == conv_resp['convId']
|
||||
self.subject = conv_resp['subject']
|
||||
self.sticky = conv_resp['sticky']
|
||||
self.messages = [ConversationMessage(m) for m in conv_resp['messages']]
|
||||
|
||||
def update_conv_data(self):
|
||||
response = self.parent_api.request(action='inbox',
|
||||
type='viewconv', id=self.id)
|
||||
self.set_conv_data(response)
|
||||
|
||||
|
||||
class Mailbox(object):
|
||||
"""
|
||||
This class represents the logged in user's inbox/sentbox
|
||||
"""
|
||||
def __init__(self, parent_api, boxtype='inbox', page='1', sort='unread'):
|
||||
self.parent_api = parent_api
|
||||
self.boxtype = boxtype
|
||||
self.current_page = page
|
||||
self.total_pages = None
|
||||
self.sort = sort
|
||||
self.messages = None
|
||||
|
||||
def set_mbox_data(self, mbox_resp):
|
||||
"""
|
||||
Takes parsed JSON response from 'inbox' action on api
|
||||
and updates the available subset of mailbox information.
|
||||
"""
|
||||
self.current_page = mbox_resp['currentPage']
|
||||
self.total_pages = mbox_resp['pages']
|
||||
self.messages = \
|
||||
[MailboxMessage(self.parent_api, m) for m in mbox_resp['messages']]
|
||||
|
||||
def update_mbox_data(self):
|
||||
response = self.parent_api.request(action='inbox',
|
||||
type=self.boxtype, page=self.current_page, sort=self.sort)
|
||||
self.set_mbox_data(response)
|
||||
|
||||
def next_page(self):
|
||||
if not self.total_pages:
|
||||
raise ValueError("call update_mbox_data() first")
|
||||
total_pages = int(self.total_pages)
|
||||
cur_page = int(self.current_page)
|
||||
if cur_page < total_pages:
|
||||
return Mailbox(self.parent_api, self.boxtype,
|
||||
str(cur_page + 1), self.sort)
|
||||
raise ValueError("Already at page %d/%d" % (cur_page, total_pages))
|
||||
|
||||
def prev_page(self):
|
||||
if not self.total_pages:
|
||||
raise ValueError("call update_mbox_data() first")
|
||||
total_pages = int(self.total_pages)
|
||||
cur_page = int(self.current_page)
|
||||
if cur_page > 1:
|
||||
return Mailbox(self.parent_api, self.boxtype,
|
||||
str(cur_page - 1), self.sort)
|
||||
raise ValueError("Already at page %d/%d" % (cur_page, total_pages))
|
||||
|
||||
def __repr__(self):
|
||||
return "Mailbox: %s %s Page %s/%s" \
|
||||
% (self.boxtype, self.sort,
|
||||
self.current_page, self.total_pages)
|
11
lib/pygazelle/media.py
Normal file
11
lib/pygazelle/media.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
CD = "CD"
|
||||
DVD = "DVD"
|
||||
VINYL = "Vinyl"
|
||||
SOUNDBOARD = "Soundboard"
|
||||
SACD = "SACD"
|
||||
DAT = "DAT"
|
||||
CASETTE = "Casette"
|
||||
WEB = "WEB"
|
||||
BLU_RAY = "Blu-ray"
|
||||
|
||||
ALL_MEDIAS = [CD, DVD, VINYL, SOUNDBOARD, SACD, DAT, CASETTE, WEB, BLU_RAY]
|
19
lib/pygazelle/release_type.py
Normal file
19
lib/pygazelle/release_type.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
ALBUM = "Album"
|
||||
SOUNDTRACK = "Soundtrack"
|
||||
EP = "EP"
|
||||
ANTHOLOGY = "Anthology"
|
||||
COMPILATION = "Compilation"
|
||||
DJ_MIX = "DJ Mix"
|
||||
SINGLE = "Single"
|
||||
LIVE_ALBUM = "Live album"
|
||||
REMIX = "Remix"
|
||||
BOOTLEG = "Bootleg"
|
||||
INTERVIEW = "Interview"
|
||||
MIXTAPE = "Mixtape"
|
||||
UNKNOWN = "Unknown"
|
||||
|
||||
ALL_RELEASE_TYPES = [ALBUM, SOUNDTRACK, EP, ANTHOLOGY, COMPILATION, DJ_MIX, SINGLE, LIVE_ALBUM, REMIX, BOOTLEG,
|
||||
INTERVIEW, MIXTAPE, UNKNOWN]
|
||||
|
||||
def get_int_val(release_type):
|
||||
return ALL_RELEASE_TYPES.index(release_type) + 1
|
29
lib/pygazelle/request.py
Normal file
29
lib/pygazelle/request.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
class InvalidRequestException(Exception):
|
||||
pass
|
||||
|
||||
class Request(object):
|
||||
def __init__(self, id, parent_api):
|
||||
self.id = id
|
||||
self.parent_api = parent_api
|
||||
self.category = None
|
||||
self.title = None
|
||||
self.year = None
|
||||
self.time_added = None
|
||||
self.votes = None
|
||||
self.bounty = None
|
||||
|
||||
self.parent_api.cached_requests[self.id] = self # add self to cache of known Request objects
|
||||
|
||||
def set_data(self, request_item_json_data):
|
||||
if self.id != request_item_json_data['requestId']:
|
||||
raise InvalidRequestException("Tried to update a Request's information from a request JSON item with a different id." +
|
||||
" Should be %s, got %s" % (self.id, request_item_json_data['requestId']) )
|
||||
self.category = self.parent_api.get_category(request_item_json_data['categoryId'])
|
||||
self.title = request_item_json_data['title']
|
||||
self.year = request_item_json_data['year']
|
||||
self.time_added = request_item_json_data['timeAdded']
|
||||
self.votes = request_item_json_data['votes']
|
||||
self.bounty = request_item_json_data['bounty']
|
||||
|
||||
def __repr__(self):
|
||||
return "Request: %s - ID: %s" % (self.title, self.id)
|
17
lib/pygazelle/tag.py
Normal file
17
lib/pygazelle/tag.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
class Tag(object):
|
||||
def __init__(self, name, parent_api):
|
||||
self.name = name
|
||||
self.artist_counts = {}
|
||||
self.parent_api = parent_api
|
||||
|
||||
self.parent_api.cached_tags[self.name] = self # add self to cache of known Tag objects
|
||||
|
||||
def set_artist_count(self, artist, count):
|
||||
"""
|
||||
Adds an artist to the known list of artists tagged with this tag (if necessary), and sets the count of times
|
||||
that that artist has been known to be tagged with this tag.
|
||||
"""
|
||||
self.artist_counts[artist] = count
|
||||
|
||||
def __repr__(self):
|
||||
return "Tag: %s" % self.name
|
183
lib/pygazelle/torrent.py
Normal file
183
lib/pygazelle/torrent.py
Normal file
|
@ -0,0 +1,183 @@
|
|||
from HTMLParser import HTMLParser
|
||||
import re
|
||||
|
||||
class InvalidTorrentException(Exception):
|
||||
pass
|
||||
|
||||
class Torrent(object):
|
||||
def __init__(self, id, parent_api):
|
||||
self.id = id
|
||||
self.parent_api = parent_api
|
||||
self.group = None
|
||||
self.media = None
|
||||
self.format = None
|
||||
self.encoding = None
|
||||
self.remaster_year = None
|
||||
self.remastered = None
|
||||
self.remaster_title = None
|
||||
self.remaster_record_label = None
|
||||
self.remaster_catalogue_number = None
|
||||
self.scene = None
|
||||
self.has_log = None
|
||||
self.has_cue = None
|
||||
self.log_score = None
|
||||
self.file_count = None
|
||||
self.free_torrent = None
|
||||
self.size = None
|
||||
self.leechers = None
|
||||
self.seeders = None
|
||||
self.snatched = None
|
||||
self.time = None
|
||||
self.has_file = None
|
||||
self.description = None
|
||||
self.file_list = []
|
||||
self.file_path = None
|
||||
self.user = None
|
||||
|
||||
self.parent_api.cached_torrents[self.id] = self
|
||||
|
||||
def set_torrent_complete_data(self, torrent_json_response):
|
||||
if self.id != torrent_json_response['torrent']['id']:
|
||||
raise InvalidTorrentException("Tried to update a Torrent's information from an 'artist' API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, torrent_json_response['id']) )
|
||||
|
||||
self.group = self.parent_api.get_torrent_group(torrent_json_response['group']['id'])
|
||||
had_complete_list = self.group.has_complete_torrent_list
|
||||
self.group.set_group_data(torrent_json_response)
|
||||
self.group.has_complete_torrent_list = had_complete_list
|
||||
|
||||
self.media = torrent_json_response['torrent']['media']
|
||||
self.format = torrent_json_response['torrent']['format']
|
||||
self.encoding = torrent_json_response['torrent']['encoding']
|
||||
self.remaster_year = torrent_json_response['torrent']['remasterYear']
|
||||
self.remastered = torrent_json_response['torrent']['remastered']
|
||||
self.remaster_title = torrent_json_response['torrent']['remasterTitle']
|
||||
self.remaster_record_label = torrent_json_response['torrent']['remasterRecordLabel']
|
||||
self.scene = torrent_json_response['torrent']['scene']
|
||||
self.has_log = torrent_json_response['torrent']['hasLog']
|
||||
self.has_cue = torrent_json_response['torrent']['hasCue']
|
||||
self.log_score = torrent_json_response['torrent']['logScore']
|
||||
self.file_count = torrent_json_response['torrent']['fileCount']
|
||||
self.free_torrent = torrent_json_response['torrent']['freeTorrent']
|
||||
self.size = torrent_json_response['torrent']['size']
|
||||
self.leechers = torrent_json_response['torrent']['leechers']
|
||||
self.seeders = torrent_json_response['torrent']['seeders']
|
||||
self.snatched = torrent_json_response['torrent']['snatched']
|
||||
self.time = torrent_json_response['torrent']['time']
|
||||
self.description = torrent_json_response['torrent']['description']
|
||||
self.file_list = [ re.match("(.+){{{(\d+)}}}", item).groups()
|
||||
for item in torrent_json_response['torrent']['fileList'].split("|||") ] # tuple ( filename, filesize )
|
||||
self.file_path = torrent_json_response['torrent']['filePath']
|
||||
self.user = self.parent_api.get_user(torrent_json_response['torrent']['userId'])
|
||||
|
||||
def set_torrent_artist_data(self, artist_torrent_json_response):
|
||||
if self.id != artist_torrent_json_response['id']:
|
||||
raise InvalidTorrentException("Tried to update a Torrent's information from an 'artist' API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, artist_torrent_json_response['id']) )
|
||||
|
||||
self.group = self.parent_api.get_torrent_group(artist_torrent_json_response['groupId'])
|
||||
self.media = artist_torrent_json_response['media']
|
||||
self.format = artist_torrent_json_response['format']
|
||||
self.encoding = artist_torrent_json_response['encoding']
|
||||
self.remaster_year = artist_torrent_json_response['remasterYear']
|
||||
self.remastered = artist_torrent_json_response['remastered']
|
||||
self.remaster_title = artist_torrent_json_response['remasterTitle']
|
||||
self.remaster_record_label = artist_torrent_json_response['remasterRecordLabel']
|
||||
self.scene = artist_torrent_json_response['scene']
|
||||
self.has_log = artist_torrent_json_response['hasLog']
|
||||
self.has_cue = artist_torrent_json_response['hasCue']
|
||||
self.log_score = artist_torrent_json_response['logScore']
|
||||
self.file_count = artist_torrent_json_response['fileCount']
|
||||
self.free_torrent = artist_torrent_json_response['freeTorrent']
|
||||
self.size = artist_torrent_json_response['size']
|
||||
self.leechers = artist_torrent_json_response['leechers']
|
||||
self.seeders = artist_torrent_json_response['seeders']
|
||||
self.snatched = artist_torrent_json_response['snatched']
|
||||
self.time = artist_torrent_json_response['time']
|
||||
self.has_file = artist_torrent_json_response['hasFile']
|
||||
|
||||
def set_torrent_group_data(self, group_torrent_json_response):
|
||||
if self.id != group_torrent_json_response['id']:
|
||||
raise InvalidTorrentException("Tried to update a Torrent's information from a 'torrentgroup' API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, group_torrent_json_response['id']) )
|
||||
|
||||
self.group = self.parent_api.get_torrent_group(group_torrent_json_response['groupId'])
|
||||
self.media = group_torrent_json_response['media']
|
||||
self.format = group_torrent_json_response['format']
|
||||
self.encoding = group_torrent_json_response['encoding']
|
||||
self.remastered = group_torrent_json_response['remastered']
|
||||
self.remaster_year = group_torrent_json_response['remasterYear']
|
||||
self.remaster_title = group_torrent_json_response['remasterTitle']
|
||||
self.remaster_record_label = group_torrent_json_response['remasterRecordLabel']
|
||||
self.remaster_catalogue_number = group_torrent_json_response['remasterCatalogueNumber']
|
||||
self.scene = group_torrent_json_response['scene']
|
||||
self.has_log = group_torrent_json_response['hasLog']
|
||||
self.has_cue = group_torrent_json_response['hasCue']
|
||||
self.log_score = group_torrent_json_response['logScore']
|
||||
self.file_count = group_torrent_json_response['fileCount']
|
||||
self.size = group_torrent_json_response['size']
|
||||
self.seeders = group_torrent_json_response['seeders']
|
||||
self.leechers = group_torrent_json_response['leechers']
|
||||
self.snatched = group_torrent_json_response['snatched']
|
||||
self.free_torrent = group_torrent_json_response['freeTorrent']
|
||||
self.time = group_torrent_json_response['time']
|
||||
self.description = group_torrent_json_response['description']
|
||||
self.file_list = [ re.match("(.+){{{(\d+)}}}", item).groups()
|
||||
for item in group_torrent_json_response['fileList'].split("|||") ] # tuple ( filename, filesize )
|
||||
self.file_path = group_torrent_json_response['filePath']
|
||||
self.user = self.parent_api.get_user(group_torrent_json_response['userId'])
|
||||
|
||||
def set_torrent_search_data(self, search_torrent_json_response):
|
||||
if self.id != search_torrent_json_response['torrentId']:
|
||||
raise InvalidTorrentException("Tried to update a Torrent's information from a 'browse'/search API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, search_torrent_json_response['torrentId']) )
|
||||
|
||||
# TODO: Add conditionals to handle torrents that aren't music
|
||||
self.group = self.parent_api.get_torrent_group(search_torrent_json_response['groupId'])
|
||||
self.remastered = search_torrent_json_response['remastered']
|
||||
self.remaster_year = search_torrent_json_response['remasterYear']
|
||||
self.remaster_title = search_torrent_json_response['remasterTitle']
|
||||
self.remaster_catalogue_number = search_torrent_json_response['remasterCatalogueNumber']
|
||||
self.media = search_torrent_json_response['media']
|
||||
self.format = search_torrent_json_response['format']
|
||||
self.encoding = search_torrent_json_response['encoding']
|
||||
self.has_log = search_torrent_json_response['hasLog']
|
||||
self.has_cue = search_torrent_json_response['hasCue']
|
||||
self.log_score = search_torrent_json_response['logScore']
|
||||
self.scene = search_torrent_json_response['scene']
|
||||
self.file_count = search_torrent_json_response['fileCount']
|
||||
self.size = search_torrent_json_response['size']
|
||||
self.seeders = search_torrent_json_response['seeders']
|
||||
self.leechers = search_torrent_json_response['leechers']
|
||||
self.snatched = search_torrent_json_response['snatches']
|
||||
self.free_torrent = search_torrent_json_response['isFreeleech'] or search_torrent_json_response['isPersonalFreeleech']
|
||||
self.time = search_torrent_json_response['time']
|
||||
|
||||
def set_torrent_top_10_data(self, top_10_json_response):
|
||||
if self.id != top_10_json_response['torrentId']:
|
||||
raise InvalidTorrentException("Tried to update a Torrent's information from a 'browse'/search API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, top_10_json_response['torrentId']) )
|
||||
|
||||
# TODO: Add conditionals to handle torrents that aren't music
|
||||
self.group = self.parent_api.get_torrent_group(top_10_json_response['groupId'])
|
||||
self.group.name = top_10_json_response['groupName']
|
||||
if not self.group.music_info and top_10_json_response['artist']:
|
||||
self.group.music_info = {'artists': [self.parent_api.get_artist(name=HTMLParser().unescape(top_10_json_response['artist']))]}
|
||||
self.remaster_title = top_10_json_response['remasterTitle']
|
||||
self.media = top_10_json_response['media']
|
||||
self.format = top_10_json_response['format']
|
||||
self.encoding = top_10_json_response['encoding']
|
||||
self.has_log = top_10_json_response['hasLog']
|
||||
self.has_cue = top_10_json_response['hasCue']
|
||||
self.scene = top_10_json_response['scene']
|
||||
self.seeders = top_10_json_response['seeders']
|
||||
self.leechers = top_10_json_response['leechers']
|
||||
self.snatched = top_10_json_response['snatched']
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
if self.group:
|
||||
groupname = self.group.name
|
||||
else:
|
||||
groupname = "Unknown Group"
|
||||
return "Torrent: %s - %s - ID: %s" % (groupname, self.encoding, self.id)
|
139
lib/pygazelle/torrent_group.py
Normal file
139
lib/pygazelle/torrent_group.py
Normal file
|
@ -0,0 +1,139 @@
|
|||
from .torrent import Torrent
|
||||
|
||||
class InvalidTorrentGroupException(Exception):
|
||||
pass
|
||||
|
||||
class TorrentGroup(object):
|
||||
"""
|
||||
Represents a Torrent Group (usually an album). Note that TorrentGroup.torrents may not be comprehensive if you
|
||||
haven't called TorrentGroup.update_group_data()...it may have only been populated with filtered search results.
|
||||
Check TorrentGroup.has_complete_torrent_list (boolean) to be sure.
|
||||
"""
|
||||
def __init__(self, id, parent_api):
|
||||
self.id = id
|
||||
self.parent_api = parent_api
|
||||
self.name = None
|
||||
self.wiki_body = None
|
||||
self.wiki_image = None
|
||||
self.year = None
|
||||
self.record_label = None
|
||||
self.catalogue_number = None
|
||||
self.tags = []
|
||||
self.release_type = None
|
||||
self.vanity_house = None
|
||||
self.has_bookmarked = None
|
||||
self.category = None
|
||||
self.time = None
|
||||
self.music_info = None
|
||||
self.torrents = []
|
||||
self.has_complete_torrent_list = False
|
||||
|
||||
self.parent_api.cached_torrent_groups[self.id] = self
|
||||
|
||||
def update_group_data(self):
|
||||
response = self.parent_api.request(action='torrentgroup', id=self.id)
|
||||
self.set_group_data(response)
|
||||
|
||||
def set_group_data(self, torrent_group_json_response):
|
||||
"""
|
||||
Takes parsed JSON response from 'torrentgroup' action on api, and updates relevant information.
|
||||
To avoid problems, only pass in data from an API call that used this torrentgroup's ID as an argument.
|
||||
"""
|
||||
if self.id != torrent_group_json_response['group']['id']:
|
||||
raise InvalidTorrentGroupException("Tried to update a TorrentGroup's information from an 'artist' API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, torrent_group_json_response['group']['groupId']) )
|
||||
|
||||
self.name = torrent_group_json_response['group']['name']
|
||||
self.year = torrent_group_json_response['group']['year']
|
||||
self.wiki_body = torrent_group_json_response['group']['wikiBody']
|
||||
self.wiki_image = torrent_group_json_response['group']['wikiImage']
|
||||
self.record_label = torrent_group_json_response['group']['recordLabel']
|
||||
self.catalogue_number = torrent_group_json_response['group']['catalogueNumber']
|
||||
|
||||
self.release_type = torrent_group_json_response['group']['releaseType']
|
||||
self.category = self.parent_api.get_category(torrent_group_json_response['group']['categoryId'],
|
||||
torrent_group_json_response['group']['categoryName'])
|
||||
self.time = torrent_group_json_response['group']['time']
|
||||
self.vanity_house = torrent_group_json_response['group']['vanityHouse']
|
||||
|
||||
self.music_info = torrent_group_json_response['group']['musicInfo']
|
||||
self.music_info['artists'] = [ self.parent_api.get_artist(artist['id'], artist['name'])
|
||||
for artist in self.music_info['artists'] ]
|
||||
self.music_info['with'] = [ self.parent_api.get_artist(artist['id'], artist['name'])
|
||||
for artist in self.music_info['with'] ]
|
||||
|
||||
if 'torrents' in torrent_group_json_response:
|
||||
self.torrents = []
|
||||
for torrent_dict in torrent_group_json_response['torrents']:
|
||||
torrent_dict['groupId'] = self.id
|
||||
torrent = self.parent_api.get_torrent(torrent_dict['id'])
|
||||
torrent.set_torrent_group_data(torrent_dict)
|
||||
self.torrents.append(torrent)
|
||||
self.has_complete_torrent_list = True
|
||||
elif 'torrent' in torrent_group_json_response:
|
||||
torrent = self.parent_api.get_torrent(torrent_group_json_response['torrent']['id'])
|
||||
self.torrents.append(torrent)
|
||||
|
||||
def set_artist_group_data(self, artist_group_json_response):
|
||||
"""
|
||||
Takes torrentgroup section from parsed JSON response from 'artist' action on api, and updates relevant information.
|
||||
"""
|
||||
if self.id != artist_group_json_response['groupId']:
|
||||
raise InvalidTorrentGroupException("Tried to update a TorrentGroup's information from an 'artist' API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, artist_group_json_response['groupId']) )
|
||||
|
||||
self.name = artist_group_json_response['groupName']
|
||||
self.year = artist_group_json_response['groupYear']
|
||||
self.record_label = artist_group_json_response['groupRecordLabel']
|
||||
self.catalogue_number = artist_group_json_response['groupCatalogueNumber']
|
||||
|
||||
self.tags = []
|
||||
for tag_name in artist_group_json_response['tags']:
|
||||
tag = self.parent_api.get_tag(tag_name)
|
||||
self.tags.append(tag)
|
||||
|
||||
self.release_type = artist_group_json_response['releaseType']
|
||||
self.has_bookmarked = artist_group_json_response['hasBookmarked']
|
||||
|
||||
self.torrents = []
|
||||
for torrent_dict in artist_group_json_response['torrent']:
|
||||
torrent = self.parent_api.get_torrent(torrent_dict['id'])
|
||||
torrent.set_torrent_artist_data(torrent_dict)
|
||||
self.torrents.append(torrent)
|
||||
self.has_complete_torrent_list = True
|
||||
|
||||
def set_torrent_search_data(self, search_json_response):
|
||||
if self.id != search_json_response['groupId']:
|
||||
raise InvalidTorrentGroupException("Tried to update a TorrentGroup's information from an 'browse'/search API call with a different id." +
|
||||
" Should be %s, got %s" % (self.id, search_json_response['groupId']) )
|
||||
|
||||
self.name = search_json_response['groupName']
|
||||
# purposefully ignoring search_json_response['artist']...the other data updates don't include it, would just get confusing
|
||||
self.tags = []
|
||||
for tag_name in search_json_response['tags']:
|
||||
tag = self.parent_api.get_tag(tag_name)
|
||||
self.tags.append(tag)
|
||||
# some of the below keys aren't in things like comics...should probably watch out for this elsewhere
|
||||
if 'bookmarked' in search_json_response.keys():
|
||||
self.has_bookmarked = search_json_response['bookmarked']
|
||||
if 'vanityHouse' in search_json_response.keys():
|
||||
self.vanity_house = search_json_response['vanityHouse']
|
||||
if 'groupYear' in search_json_response.keys():
|
||||
self.year = search_json_response['groupYear']
|
||||
if 'releaseType' in search_json_response.keys():
|
||||
self.release_type = search_json_response['releaseType']
|
||||
self.time = search_json_response['groupTime']
|
||||
if 'torrentId' in search_json_response.keys():
|
||||
search_json_response['torrents'] = [{'torrentId': search_json_response['torrentId']}]
|
||||
|
||||
new_torrents = []
|
||||
for torrent_dict in search_json_response['torrents']:
|
||||
torrent_dict['groupId'] = self.id
|
||||
torrent = self.parent_api.get_torrent(torrent_dict['torrentId'])
|
||||
new_torrents.append(torrent)
|
||||
# torrent information gets populated in API search call, no need to duplicate that here
|
||||
self.torrents = self.torrents + new_torrents
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return "TorrentGroup: %s - ID: %s" % (self.name, self.id)
|
108
lib/pygazelle/user.py
Normal file
108
lib/pygazelle/user.py
Normal file
|
@ -0,0 +1,108 @@
|
|||
|
||||
|
||||
class InvalidUserException(Exception):
|
||||
pass
|
||||
|
||||
class User(object):
|
||||
"""
|
||||
This class represents a User, whether your own or someone else's. It is created knowing only its ID. To reduce
|
||||
API accesses, load information using User.update_index_data() or User.update_user_data only as needed.
|
||||
"""
|
||||
def __init__(self, id, parent_api):
|
||||
self.id = id
|
||||
self.parent_api = parent_api
|
||||
self.username = None
|
||||
self.authkey = None
|
||||
self.passkey = None
|
||||
self.avatar = None
|
||||
self.is_friend = None
|
||||
self.profile_text = None
|
||||
self.notifications = None
|
||||
self.stats = None
|
||||
self.ranks = None
|
||||
self.personal = None
|
||||
self.community = None
|
||||
|
||||
self.parent_api.cached_users[self.id] = self # add self to cache of known User objects
|
||||
|
||||
def update_index_data(self):
|
||||
"""
|
||||
Calls 'index' API action, then updates this User objects information with it.
|
||||
NOTE: Only call if this user is the logged-in user...throws InvalidUserException otherwise.
|
||||
"""
|
||||
response = self.parent_api.request(action='index')
|
||||
self.set_index_data(response)
|
||||
|
||||
def set_index_data(self, index_json_response):
|
||||
"""
|
||||
Takes parsed JSON response from 'index' action on api, and updates the available subset of user information.
|
||||
ONLY callable if this User object represents the currently logged in user. Throws InvalidUserException otherwise.
|
||||
"""
|
||||
if self.id != index_json_response['id']:
|
||||
raise InvalidUserException("Tried to update non-logged-in User's information from 'index' API call." +
|
||||
" Should be %s, got %s" % (self.id, index_json_response['id']) )
|
||||
|
||||
self.username = index_json_response['username']
|
||||
|
||||
self.authkey = index_json_response['authkey']
|
||||
self.passkey = index_json_response['passkey']
|
||||
self.notifications = index_json_response['notifications']
|
||||
if self.stats:
|
||||
self.stats = dict(self.stats.items() + index_json_response['userstats'].items()) # merge in new info
|
||||
else:
|
||||
self.stats = index_json_response['userstats']
|
||||
|
||||
# cross pollinate some data that is located in multiple locations in API
|
||||
if self.personal:
|
||||
self.personal['class'] = self.stats['class']
|
||||
self.personal['passkey'] = self.passkey
|
||||
|
||||
|
||||
def update_user_data(self):
|
||||
response = self.parent_api.request(action='user', id=self.id)
|
||||
self.set_user_data(response)
|
||||
|
||||
def set_user_data(self, user_json_response):
|
||||
"""
|
||||
Takes parsed JSON response from 'user' action on api, and updates relevant user information.
|
||||
To avoid problems, only pass in user data from an API call that used this user's ID as an argument.
|
||||
"""
|
||||
if self.username and self.username != user_json_response['username']:
|
||||
raise InvalidUserException("Tried to update a user's information from a 'user' API call with a different username." +
|
||||
" Should be %s, got %s" % (self.username, user_json_response['username']) )
|
||||
|
||||
self.username = user_json_response['username']
|
||||
self.avatar = user_json_response['avatar']
|
||||
self.is_friend = user_json_response['isFriend']
|
||||
self.profile_text = user_json_response['profileText']
|
||||
if self.stats:
|
||||
self.stats = dict(self.stats.items() + user_json_response['stats'].items()) # merge in new info
|
||||
else:
|
||||
self.stats = user_json_response['stats']
|
||||
self.ranks = user_json_response['ranks']
|
||||
self.personal = user_json_response['personal']
|
||||
self.community = user_json_response['community']
|
||||
|
||||
# cross pollinate some data that is located in multiple locations in API
|
||||
self.stats['class'] = self.personal['class']
|
||||
self.passkey = self.personal['passkey']
|
||||
|
||||
def set_search_result_data(self, search_result_item):
|
||||
"""
|
||||
Takes a single user result item from a 'usersearch' API call and updates user info.
|
||||
"""
|
||||
if self.id != search_result_item['userId']:
|
||||
raise InvalidUserException("Tried to update existing user with another user's search result data (IDs don't match).")
|
||||
|
||||
self.username = search_result_item['username']
|
||||
|
||||
if not self.personal:
|
||||
self.personal = {}
|
||||
|
||||
self.personal['donor'] = search_result_item['donor']
|
||||
self.personal['warned'] = search_result_item['warned']
|
||||
self.personal['enabled'] = search_result_item['enabled']
|
||||
self.personal['class'] = search_result_item['class']
|
||||
|
||||
def __repr__(self):
|
||||
return "User: %s - ID: %s" % (self.username, self.id)
|
Loading…
Add table
Add a link
Reference in a new issue