Try caching metadata for sessions

This commit is contained in:
JonnyWong16 2018-01-03 13:36:26 -08:00
parent 1ae8544f2d
commit 12c9aa3d6a
3 changed files with 57 additions and 16 deletions

View file

@ -14,7 +14,7 @@
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>. # along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
import datetime import datetime
import threading import os
import time import time
from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.schedulers.background import BackgroundScheduler
@ -26,7 +26,6 @@ import datafactory
import helpers import helpers
import logger import logger
import notification_handler import notification_handler
import notifiers
import pmsconnect import pmsconnect
@ -75,9 +74,12 @@ class ActivityHandler(object):
monitor_proc.write_session(session=session, notify=False) monitor_proc.write_session(session=session, notify=False)
def on_start(self): def on_start(self):
if self.is_valid_session() and self.get_live_session(): if self.is_valid_session():
session = self.get_live_session() session = self.get_live_session()
if not session:
return
# Some DLNA clients create a new session temporarily when browsing the library # Some DLNA clients create a new session temporarily when browsing the library
# Wait and get session again to make sure it is an actual session # Wait and get session again to make sure it is an actual session
if session['platform'] == 'DLNA': if session['platform'] == 'DLNA':
@ -124,6 +126,7 @@ class ActivityHandler(object):
logger.debug(u"Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue" logger.debug(u"Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue"
% (str(self.get_session_key()), str(self.get_rating_key()))) % (str(self.get_session_key()), str(self.get_rating_key())))
ap.delete_session(session_key=self.get_session_key()) ap.delete_session(session_key=self.get_session_key())
delete_metadata_cache(self.get_session_key())
def on_pause(self, still_paused=False): def on_pause(self, still_paused=False):
if self.is_valid_session(): if self.is_valid_session():
@ -449,12 +452,13 @@ def force_stop_stream(session_key):
args=[session_key], seconds=30) args=[session_key], seconds=30)
else: else:
logger.warn(u"Tautulli Monitor :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \ logger.warn(u"Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
"Removing session from the database. Write attempt %s." "Removing session from the database. Write attempt %s."
% (session['session_key'], session['rating_key'], str(session['write_attempts']))) % (session['session_key'], session['rating_key'], str(session['write_attempts'])))
logger.info(u"Tautulli Monitor :: Removing stale stream with sessionKey %s ratingKey %s from session queue" logger.info(u"Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
% (session['session_key'], session['rating_key'])) % (session['session_key'], session['rating_key']))
ap.delete_session(session_key=session_key) ap.delete_session(session_key=session_key)
delete_metadata_cache(session_key)
def clear_recently_added_queue(rating_key): def clear_recently_added_queue(rating_key):
@ -519,3 +523,11 @@ def on_created(rating_key, **kwargs):
else: else:
logger.error(u"Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key)) logger.error(u"Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key))
def delete_metadata_cache(session_key):
try:
os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, 'metadata-sessionKey-%s.json' % session_key))
except IOError as e:
logger.error(u"Tautulli ActivityHandler :: Failed to remove metadata cache file (sessionKey %s): %s"
% (session_key, e))

View file

@ -154,7 +154,7 @@ class HTTPHandler(object):
try: try:
if self.output_format == 'text': if self.output_format == 'text':
output = response_content.decode('utf-8', 'ignore') output = response_content.decode('utf-8', 'ignore')
if self.output_format == 'dict': elif self.output_format == 'dict':
output = helpers.convert_xml_to_dict(response_content) output = helpers.convert_xml_to_dict(response_content)
elif self.output_format == 'json': elif self.output_format == 'json':
output = helpers.convert_xml_to_json(response_content) output = helpers.convert_xml_to_json(response_content)

View file

@ -13,6 +13,9 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>. # along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
import json
import os
import time
import urllib import urllib
import plexpy import plexpy
@ -519,7 +522,7 @@ class PmsConnect(object):
return output return output
def get_metadata_details(self, rating_key='', sync_id=''): def get_metadata_details(self, rating_key='', sync_id='', cache_key=None):
""" """
Return processed and validated metadata list for requested item. Return processed and validated metadata list for requested item.
@ -527,19 +530,33 @@ class PmsConnect(object):
Output: array Output: array
""" """
metadata = {}
if cache_key:
in_file_path = os.path.join(plexpy.CONFIG.CACHE_DIR, 'metadata-sessionKey-%s.json' % cache_key)
try:
with open(in_file_path, 'r') as inFile:
metadata = json.load(inFile)
except IOError as e:
pass
if metadata:
_cache_time = metadata.pop('_cache_time', 0)
# Return cached metadata if less than 30 minutes ago
if int(time.time()) - _cache_time <= 1800:
return metadata
if rating_key: if rating_key:
metadata = self.get_metadata(str(rating_key), output_format='xml') metadata_xml = self.get_metadata(str(rating_key), output_format='xml')
elif sync_id: elif sync_id:
metadata = self.get_sync_item(str(sync_id), output_format='xml') metadata_xml = self.get_sync_item(str(sync_id), output_format='xml')
try: try:
xml_head = metadata.getElementsByTagName('MediaContainer') xml_head = metadata_xml.getElementsByTagName('MediaContainer')
except Exception as e: except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_metadata_details: %s." % e) logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_metadata_details: %s." % e)
return {} return {}
metadata = {}
for a in xml_head: for a in xml_head:
if a.getAttribute('size'): if a.getAttribute('size'):
if a.getAttribute('size') != '1': if a.getAttribute('size') != '1':
@ -1138,6 +1155,17 @@ class PmsConnect(object):
metadata['media_info'] = medias metadata['media_info'] = medias
if metadata: if metadata:
metadata['_cache_time'] = int(time.time())
if cache_key:
out_file_path = os.path.join(plexpy.CONFIG.CACHE_DIR, 'metadata-sessionKey-%s.json' % cache_key)
try:
with open(out_file_path, 'w') as outFile:
json.dump(metadata, outFile)
except IOError as e:
logger.error(u"Tautulli Pmsconnect :: Unable to create cache file for metadata (sessionKey %s): %s"
% (cache_key, e))
return metadata return metadata
else: else:
return {} return {}
@ -1299,6 +1327,7 @@ class PmsConnect(object):
# Get the source media type # Get the source media type
media_type = helpers.get_xml_attr(session, 'type') media_type = helpers.get_xml_attr(session, 'type')
rating_key = helpers.get_xml_attr(session, 'ratingKey') rating_key = helpers.get_xml_attr(session, 'ratingKey')
session_key = helpers.get_xml_attr(session, 'sessionKey')
# Get the user details # Get the user details
user_info = session.getElementsByTagName('User')[0] user_info = session.getElementsByTagName('User')[0]
@ -1613,9 +1642,9 @@ class PmsConnect(object):
part_id = helpers.get_xml_attr(stream_media_parts_info, 'id') part_id = helpers.get_xml_attr(stream_media_parts_info, 'id')
if sync_id: if sync_id:
metadata_details = self.get_metadata_details(sync_id=sync_id) metadata_details = self.get_metadata_details(sync_id=sync_id, cache_key=session_key)
else: else:
metadata_details = self.get_metadata_details(rating_key=rating_key) metadata_details = self.get_metadata_details(rating_key=rating_key, cache_key=session_key)
# Get the media info, fallback to first item if match id is not found # Get the media info, fallback to first item if match id is not found
source_medias = metadata_details.pop('media_info', []) source_medias = metadata_details.pop('media_info', [])
@ -1728,7 +1757,7 @@ class PmsConnect(object):
optimized_version_profile = '' optimized_version_profile = ''
# Entire session output (single dict for backwards compatibility) # Entire session output (single dict for backwards compatibility)
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'), session_output = {'session_key': session_key,
'media_type': media_type, 'media_type': media_type,
'view_offset': view_offset, 'view_offset': view_offset,
'progress_percent': str(helpers.get_percent(view_offset, stream_details['stream_duration'])), 'progress_percent': str(helpers.get_percent(view_offset, stream_details['stream_duration'])),