mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 05:01:14 -07:00
Fix calculating media info file sizes
This commit is contained in:
parent
18110206d6
commit
eb7a4fb4bf
3 changed files with 94 additions and 93 deletions
|
@ -514,39 +514,8 @@ class Libraries(object):
|
||||||
watched_list[str(item[group_by])] = {'last_played': item['last_played'],
|
watched_list[str(item[group_by])] = {'last_played': item['last_played'],
|
||||||
'play_count': item['play_count']}
|
'play_count': item['play_count']}
|
||||||
|
|
||||||
cache_time = None
|
|
||||||
rows = []
|
|
||||||
# Import media info cache from json file
|
# Import media info cache from json file
|
||||||
if rating_key:
|
cache_time, rows, library_count = self._load_media_info_cache(section_id=section_id, rating_key=rating_key)
|
||||||
try:
|
|
||||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
|
||||||
with open(inFilePath, 'r') as inFile:
|
|
||||||
data = json.load(inFile)
|
|
||||||
if isinstance(data, dict):
|
|
||||||
cache_time = data['last_refreshed']
|
|
||||||
rows = data['rows']
|
|
||||||
else:
|
|
||||||
rows = data
|
|
||||||
library_count = len(rows)
|
|
||||||
except IOError as e:
|
|
||||||
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
|
|
||||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
|
|
||||||
pass
|
|
||||||
elif section_id:
|
|
||||||
try:
|
|
||||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
|
||||||
with open(inFilePath, 'r') as inFile:
|
|
||||||
data = json.load(inFile)
|
|
||||||
if isinstance(data, dict):
|
|
||||||
cache_time = data['last_refreshed']
|
|
||||||
rows = data['rows']
|
|
||||||
else:
|
|
||||||
rows = data
|
|
||||||
library_count = len(rows)
|
|
||||||
except IOError as e:
|
|
||||||
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
|
|
||||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
|
|
||||||
pass
|
|
||||||
|
|
||||||
# If no cache was imported, get all library children items
|
# If no cache was imported, get all library children items
|
||||||
cached_items = {d['rating_key']: d['file_size'] for d in rows} if not refresh else {}
|
cached_items = {d['rating_key']: d['file_size'] for d in rows} if not refresh else {}
|
||||||
|
@ -606,22 +575,7 @@ class Libraries(object):
|
||||||
return default_return
|
return default_return
|
||||||
|
|
||||||
# Cache the media info to a json file
|
# Cache the media info to a json file
|
||||||
cache_time = helpers.timestamp()
|
self._save_media_info_cache(section_id=section_id, rating_key=rating_key, rows=rows)
|
||||||
|
|
||||||
if rating_key:
|
|
||||||
try:
|
|
||||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
|
||||||
with open(outFilePath, 'w') as outFile:
|
|
||||||
json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile)
|
|
||||||
except IOError as e:
|
|
||||||
logger.debug("Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key)
|
|
||||||
elif section_id:
|
|
||||||
try:
|
|
||||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
|
||||||
with open(outFilePath, 'w') as outFile:
|
|
||||||
json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile)
|
|
||||||
except IOError as e:
|
|
||||||
logger.debug("Tautulli Libraries :: Unable to create cache file for section_id %s." % section_id)
|
|
||||||
|
|
||||||
# Update the last_played and play_count
|
# Update the last_played and play_count
|
||||||
for item in rows:
|
for item in rows:
|
||||||
|
@ -707,30 +661,15 @@ class Libraries(object):
|
||||||
if library_details['section_type'] == 'photo':
|
if library_details['section_type'] == 'photo':
|
||||||
return False
|
return False
|
||||||
|
|
||||||
rows = []
|
|
||||||
# Import media info cache from json file
|
# Import media info cache from json file
|
||||||
if rating_key:
|
_, rows, _ = self._load_media_info_cache(section_id=section_id, rating_key=rating_key)
|
||||||
#logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
|
|
||||||
try:
|
|
||||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
|
||||||
with open(inFilePath, 'r') as inFile:
|
|
||||||
rows = json.load(inFile)
|
|
||||||
except IOError as e:
|
|
||||||
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
|
|
||||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
|
|
||||||
pass
|
|
||||||
elif section_id:
|
|
||||||
logger.debug("Tautulli Libraries :: Getting file sizes for section_id %s." % section_id)
|
|
||||||
try:
|
|
||||||
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
|
||||||
with open(inFilePath, 'r') as inFile:
|
|
||||||
rows = json.load(inFile)
|
|
||||||
except IOError as e:
|
|
||||||
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
|
|
||||||
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Get the total file size for each item
|
# Get the total file size for each item
|
||||||
|
if rating_key:
|
||||||
|
logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
|
||||||
|
elif section_id:
|
||||||
|
logger.debug("Tautulli Libraries :: Fetting file sizes for section_id %s." % section_id)
|
||||||
|
|
||||||
pms_connect = pmsconnect.PmsConnect()
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
|
||||||
for item in rows:
|
for item in rows:
|
||||||
|
@ -738,7 +677,9 @@ class Libraries(object):
|
||||||
file_size = 0
|
file_size = 0
|
||||||
|
|
||||||
metadata = pms_connect.get_metadata_children_details(rating_key=item['rating_key'],
|
metadata = pms_connect.get_metadata_children_details(rating_key=item['rating_key'],
|
||||||
get_children=True)
|
get_children=True,
|
||||||
|
media_type=item['media_type'],
|
||||||
|
section_id=section_id)
|
||||||
|
|
||||||
for child_metadata in metadata:
|
for child_metadata in metadata:
|
||||||
## TODO: Check list of media info items, currently only grabs first item
|
## TODO: Check list of media info items, currently only grabs first item
|
||||||
|
@ -754,28 +695,77 @@ class Libraries(object):
|
||||||
item['file_size'] = file_size
|
item['file_size'] = file_size
|
||||||
|
|
||||||
# Cache the media info to a json file
|
# Cache the media info to a json file
|
||||||
if rating_key:
|
self._save_media_info_cache(section_id=section_id, rating_key=rating_key, rows=rows)
|
||||||
try:
|
|
||||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
|
||||||
with open(outFilePath, 'w') as outFile:
|
|
||||||
json.dump(rows, outFile)
|
|
||||||
except IOError as e:
|
|
||||||
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
|
|
||||||
elif section_id:
|
|
||||||
try:
|
|
||||||
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
|
||||||
with open(outFilePath, 'w') as outFile:
|
|
||||||
json.dump(rows, outFile)
|
|
||||||
except IOError as e:
|
|
||||||
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id)
|
|
||||||
|
|
||||||
if rating_key:
|
if rating_key:
|
||||||
#logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
|
logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
|
||||||
pass
|
|
||||||
elif section_id:
|
elif section_id:
|
||||||
logger.debug("Tautulli Libraries :: File sizes updated for section_id %s." % section_id)
|
logger.debug("Tautulli Libraries :: File sizes updated for section_id %s." % section_id)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def _load_media_info_cache(self, section_id=None, rating_key=None):
|
||||||
|
cache_time = None
|
||||||
|
rows = []
|
||||||
|
library_count = 0
|
||||||
|
|
||||||
|
# Import media info cache from json file
|
||||||
|
if rating_key:
|
||||||
|
try:
|
||||||
|
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
||||||
|
with open(inFilePath, 'r') as inFile:
|
||||||
|
data = json.load(inFile)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
cache_time = data['last_refreshed']
|
||||||
|
rows = data['rows']
|
||||||
|
else:
|
||||||
|
rows = data
|
||||||
|
library_count = len(rows)
|
||||||
|
logger.debug("Tautulli Libraries :: Loaded media info from cache for rating_key %s (%s items)." % (rating_key, library_count))
|
||||||
|
except IOError as e:
|
||||||
|
logger.debug("Tautulli Libraries :: No media info cache for rating_key %s." % rating_key)
|
||||||
|
|
||||||
|
elif section_id:
|
||||||
|
try:
|
||||||
|
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
||||||
|
with open(inFilePath, 'r') as inFile:
|
||||||
|
data = json.load(inFile)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
cache_time = data['last_refreshed']
|
||||||
|
rows = data['rows']
|
||||||
|
else:
|
||||||
|
rows = data
|
||||||
|
library_count = len(rows)
|
||||||
|
logger.debug("Tautulli Libraries :: Loaded media info from cache for section_id %s (%s items)." % (section_id, library_count))
|
||||||
|
except IOError as e:
|
||||||
|
logger.debug("Tautulli Libraries :: No media info cache for section_id %s." % section_id)
|
||||||
|
|
||||||
|
return cache_time, rows, library_count
|
||||||
|
|
||||||
|
def _save_media_info_cache(self, section_id=None, rating_key=None, rows=None):
|
||||||
|
cache_time = helpers.timestamp()
|
||||||
|
|
||||||
|
if rows is None:
|
||||||
|
rows = []
|
||||||
|
|
||||||
|
if rating_key:
|
||||||
|
try:
|
||||||
|
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
|
||||||
|
with open(outFilePath, 'w') as outFile:
|
||||||
|
json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile)
|
||||||
|
logger.debug("Tautulli Libraries :: Saved media info cache for rating_key %s." % rating_key)
|
||||||
|
except IOError as e:
|
||||||
|
logger.debug("Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key)
|
||||||
|
|
||||||
|
elif section_id:
|
||||||
|
try:
|
||||||
|
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
|
||||||
|
with open(outFilePath, 'w') as outFile:
|
||||||
|
json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile)
|
||||||
|
logger.debug("Tautulli Libraries :: Saved media info cache for section_id %s." % section_id)
|
||||||
|
except IOError as e:
|
||||||
|
logger.debug("Tautulli Libraries :: Unable to create cache file for section_id %s." % section_id)
|
||||||
|
|
||||||
def set_config(self, section_id=None, custom_thumb='', custom_art='',
|
def set_config(self, section_id=None, custom_thumb='', custom_art='',
|
||||||
do_notify=1, keep_history=1, do_notify_created=1):
|
do_notify=1, keep_history=1, do_notify_created=1):
|
||||||
if section_id:
|
if section_id:
|
||||||
|
|
|
@ -1617,7 +1617,7 @@ class PmsConnect(object):
|
||||||
else:
|
else:
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
def get_metadata_children_details(self, rating_key='', get_children=False):
|
def get_metadata_children_details(self, rating_key='', get_children=False, media_type=None, section_id=None):
|
||||||
"""
|
"""
|
||||||
Return processed and validated metadata list for all children of requested item.
|
Return processed and validated metadata list for all children of requested item.
|
||||||
|
|
||||||
|
@ -1625,6 +1625,14 @@ class PmsConnect(object):
|
||||||
|
|
||||||
Output: array
|
Output: array
|
||||||
"""
|
"""
|
||||||
|
if media_type == 'artist':
|
||||||
|
sort_type = '&artist.id={}&type=9'.format(rating_key)
|
||||||
|
xml_head = self.fetch_library_list(
|
||||||
|
section_id=str(section_id),
|
||||||
|
sort_type=sort_type,
|
||||||
|
output_format='xml'
|
||||||
|
)
|
||||||
|
else:
|
||||||
metadata = self.get_metadata_children(str(rating_key), output_format='xml')
|
metadata = self.get_metadata_children(str(rating_key), output_format='xml')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -931,6 +931,9 @@ class WebInterface(object):
|
||||||
section_ids = set(get_file_sizes_hold['section_ids'])
|
section_ids = set(get_file_sizes_hold['section_ids'])
|
||||||
rating_keys = set(get_file_sizes_hold['rating_keys'])
|
rating_keys = set(get_file_sizes_hold['rating_keys'])
|
||||||
|
|
||||||
|
section_id = helpers.cast_to_int(section_id)
|
||||||
|
rating_key = helpers.cast_to_int(rating_key)
|
||||||
|
|
||||||
if (section_id and section_id not in section_ids) or (rating_key and rating_key not in rating_keys):
|
if (section_id and section_id not in section_ids) or (rating_key and rating_key not in rating_keys):
|
||||||
if section_id:
|
if section_id:
|
||||||
section_ids.add(section_id)
|
section_ids.add(section_id)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue