diff --git a/data/interfaces/default/home_stats.html b/data/interfaces/default/home_stats.html new file mode 100644 index 00000000..fc625316 --- /dev/null +++ b/data/interfaces/default/home_stats.html @@ -0,0 +1,67 @@ +% if stats[0]['rows']: +
+ +
+% else: +
No stats for selected period.

+% endif diff --git a/data/interfaces/default/index.html b/data/interfaces/default/index.html index c718e476..f9300864 100644 --- a/data/interfaces/default/index.html +++ b/data/interfaces/default/index.html @@ -26,6 +26,24 @@ +
+
+
+
+
+

Statistics + | 7 days + | 30 days + | 90 days +

+
+
+
+
Loading stats...

+
+
+
+
@@ -45,6 +63,19 @@ <%def name="javascriptIncludes()"> diff --git a/plexpy/plexwatch.py b/plexpy/plexwatch.py index e4eb0e22..be78218b 100644 --- a/plexpy/plexwatch.py +++ b/plexpy/plexwatch.py @@ -15,6 +15,7 @@ from plexpy import logger, helpers, request, datatables, config, db from xml.dom import minidom +from collections import defaultdict, Counter import plexpy import json @@ -562,4 +563,134 @@ class PlexWatch(object): user_info = {'user_id': user_id, 'user_thumb': user_thumb} - return user_info \ No newline at end of file + return user_info + + def get_home_stats(self, time_range='30'): + myDB = db.DBConnection() + + if not time_range.isdigit(): + time_range = '30' + + stats_queries = ["top_tv", "top_users", "top_platforms"] + home_stats = [] + + for stat in stats_queries: + if 'top_tv' in stat: + top_tv = [] + query = 'SELECT orig_title, COUNT(orig_title) as total_plays, grandparentRatingKey, MAX(time) as last_watch, xml ' \ + 'FROM %s ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'AND episode != "" ' \ + 'GROUP BY orig_title ' \ + 'ORDER BY total_plays DESC LIMIT 10' % (self.get_user_table_name(), time_range) + result = myDB.select(query) + + for item in result: + xml_data = helpers.latinToAscii(item[4]) + + try: + xml_parse = minidom.parseString(xml_data) + except: + logger.warn("Error parsing XML for Plexwatch database.") + return None + + xml_head = xml_parse.getElementsByTagName('opt') + if not xml_head: + logger.warn("Error parsing XML for Plexwatch database.") + return None + + for a in xml_head: + grandparent_thumb = self.get_xml_attr(a, 'grandparentThumb') + + row = {'orig_title': item[0], + 'total_plays': item[1], + 'rating_key': item[2], + 'last_play': item[3], + 'grandparent_thumb': grandparent_thumb + } + top_tv.append(row) + + home_stats.append({'stat_id': stat, + 'rows': top_tv}) + + elif 'top_users' in stat: + top_users = [] + query = 'SELECT user, COUNT(id) as total_plays, MAX(time) as last_watch ' \ + 'FROM %s ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'GROUP BY user ' \ + 'ORDER BY total_plays DESC LIMIT 10' % (self.get_user_table_name(), time_range) + result = myDB.select(query) + + for item in result: + thumb = self.get_user_gravatar_image(item[0]) + row = {'user': item[0], + 'total_plays': item[1], + 'last_play': item[2], + 'thumb': thumb['user_thumb'] + } + top_users.append(row) + + home_stats.append({'stat_id': stat, + 'rows': top_users}) + + elif 'top_platforms' in stat: + top_platform = [] + query = 'SELECT platform, COUNT(id) as total_plays, MAX(time) as last_watch, xml ' \ + 'FROM %s ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'GROUP BY platform ' \ + 'ORDER BY total_plays DESC' % (self.get_user_table_name(), time_range) + result = myDB.select(query) + + for item in result: + xml_data = helpers.latinToAscii(item[3]) + + try: + xml_parse = minidom.parseString(xml_data) + except: + logger.warn("Error parsing XML for Plexwatch database.") + return None + + xml_head = xml_parse.getElementsByTagName('Player') + if not xml_head: + logger.warn("Error parsing XML for Plexwatch database.") + return None + + for a in xml_head: + platform_type = self.get_xml_attr(a, 'platform') + + row = {'platform': item[0], + 'total_plays': item[1], + 'last_play': item[2], + 'platform_type': platform_type + } + top_platform.append(row) + + top_platform_aggr = self.group_and_sum_dataset( + top_platform, 'platform_type', ['total_plays'], 'total_plays') + + home_stats.append({'stat_id': stat, + 'rows': top_platform_aggr}) + + return home_stats + + # Taken from: + # https://stackoverflow.com/questions/18066269/group-by-and-aggregate-the-values-of-a-list-of-dictionaries-in-python + @staticmethod + def group_and_sum_dataset(dataset, group_by_key, sum_value_keys, sort_by_key): + + container = defaultdict(Counter) + + for item in dataset: + key = item[group_by_key] + values = {k: item[k] for k in sum_value_keys} + container[key].update(values) + + new_dataset = [ + dict([(group_by_key, item[0])] + item[1].items()) + for item in container.items() + ] + new_dataset.sort(key=lambda item: item[sort_by_key], reverse=True) + + return new_dataset \ No newline at end of file diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 23b331d6..b231f4de 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -83,6 +83,13 @@ class WebInterface(object): cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(formats) + @cherrypy.expose + def home_stats(self, time_range='30', **kwargs): + plex_watch = plexwatch.PlexWatch() + stats_data = plex_watch.get_home_stats(time_range) + + return serve_template(templatename="home_stats.html", title="Stats", stats=stats_data) + @cherrypy.expose def history(self): return serve_template(templatename="history.html", title="History") @@ -671,6 +678,18 @@ class WebInterface(object): plex_watch = plexwatch.PlexWatch() result = plex_watch.get_user_gravatar_image(user) + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn('Unable to retrieve data.') + + @cherrypy.expose + def get_home_stats(self, time_range='30', **kwargs): + + plex_watch = plexwatch.PlexWatch() + result = plex_watch.get_home_stats(time_range) + if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result)