Refactor exporter into Export class

This commit is contained in:
JonnyWong16 2020-09-20 20:34:31 -07:00
parent ca06154805
commit 7eedb14834
No known key found for this signature in database
GPG key ID: B1F1F9807184697A
3 changed files with 1285 additions and 1081 deletions

View file

@ -23,7 +23,6 @@ import json
import os import os
import threading import threading
from copy import deepcopy
from functools import partial, reduce from functools import partial, reduce
from io import open from io import open
from multiprocessing.dummy import Pool as ThreadPool from multiprocessing.dummy import Pool as ThreadPool
@ -43,9 +42,22 @@ else:
from plexpy.plex import Plex from plexpy.plex import Plex
MOVIE_ATTRS = { class Export(object):
MEDIA_TYPES = (
'movie',
'show', 'season', 'episode',
'artist', 'album', 'track',
'photo album', 'photo',
'collection',
'playlist'
)
def return_attrs(self, media_type):
def movie_attrs():
_movie_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'artFile': lambda i: get_image(i, 'artUrl', self.filename),
'audienceRating': None, 'audienceRating': None,
'audienceRatingImage': None, 'audienceRatingImage': None,
'chapters': { 'chapters': {
@ -111,7 +123,7 @@ MOVIE_ATTRS = {
'videoProfile': None, 'videoProfile': None,
'videoResolution': None, 'videoResolution': None,
'width': None, 'width': None,
'hdr': lambda i: get_any_hdr(i, MOVIE_ATTRS['media']), 'hdr': lambda i: get_any_hdr(i, self.return_attrs('movie')['media']),
'parts': { 'parts': {
'accessible': None, 'accessible': None,
'audioProfile': None, 'audioProfile': None,
@ -236,6 +248,7 @@ MOVIE_ATTRS = {
'summary': None, 'summary': None,
'tagline': None, 'tagline': None,
'thumb': None, 'thumb': None,
'thumbFile': lambda i: get_image(i, 'thumbUrl', self.filename),
'title': None, 'title': None,
'titleSort': None, 'titleSort': None,
'type': None, 'type': None,
@ -248,8 +261,10 @@ MOVIE_ATTRS = {
}, },
'year': None 'year': None
} }
return _movie_attrs
SHOW_ATTRS = { def show_attrs():
_show_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'banner': None, 'banner': None,
@ -303,10 +318,13 @@ SHOW_ATTRS = {
'viewCount': None, 'viewCount': None,
'viewedLeafCount': None, 'viewedLeafCount': None,
'year': None, 'year': None,
'seasons': lambda e: helpers.get_attrs_to_dict(e, MEDIA_TYPES[e.type][0]) 'seasons': lambda e: helpers.get_attrs_to_dict(e.reload() if e.isPartialObject() else e,
self.return_attrs(e.type)[0])
} }
return _show_attrs
SEASON_ATTRS = { def season_attrs():
_season_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'fields': { 'fields': {
@ -338,10 +356,13 @@ SEASON_ATTRS = {
'userRating': None, 'userRating': None,
'viewCount': None, 'viewCount': None,
'viewedLeafCount': None, 'viewedLeafCount': None,
'episodes': lambda e: helpers.get_attrs_to_dict(e, MEDIA_TYPES[e.type][0]) 'episodes': lambda e: helpers.get_attrs_to_dict(e.reload() if e.isPartialObject() else e,
self.return_attrs(e.type)[0])
} }
return _season_attrs
EPISODE_ATTRS = { def episode_attrs():
_episode_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'chapterSource': None, 'chapterSource': None,
@ -391,7 +412,7 @@ EPISODE_ATTRS = {
'videoProfile': None, 'videoProfile': None,
'videoResolution': None, 'videoResolution': None,
'width': None, 'width': None,
'hdr': lambda i: get_any_hdr(i, EPISODE_ATTRS['media']), 'hdr': lambda i: get_any_hdr(i, self.return_attrs('episode')['media']),
'parts': { 'parts': {
'accessible': None, 'accessible': None,
'audioProfile': None, 'audioProfile': None,
@ -520,8 +541,10 @@ EPISODE_ATTRS = {
}, },
'year': None 'year': None
} }
return _episode_attrs
ARTIST_ATTRS = { def artist_attrs():
_artist_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'collections': { 'collections': {
@ -566,10 +589,13 @@ ARTIST_ATTRS = {
'updatedAt': helpers.datetime_to_iso, 'updatedAt': helpers.datetime_to_iso,
'userRating': None, 'userRating': None,
'viewCount': None, 'viewCount': None,
'albums': lambda e: helpers.get_attrs_to_dict(e, MEDIA_TYPES[e.type][0]) 'albums': lambda e: helpers.get_attrs_to_dict(e.reload() if e.isPartialObject() else e,
self.return_attrs(e.type))
} }
return _artist_attrs
ALBUM_ATTRS = { def album_attrs():
_album_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'collections': { 'collections': {
@ -622,10 +648,13 @@ ALBUM_ATTRS = {
'userRating': None, 'userRating': None,
'viewCount': None, 'viewCount': None,
'viewedLeafCount': None, 'viewedLeafCount': None,
'tracks': lambda e: helpers.get_attrs_to_dict(e, MEDIA_TYPES[e.type][0]) 'tracks': lambda e: helpers.get_attrs_to_dict(e.reload() if e.isPartialObject() else e,
self.return_attrs(e.type))
} }
return _album_attrs
TRACK_ATTRS = { def track_attrs():
_track_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'duration': None, 'duration': None,
@ -738,12 +767,15 @@ TRACK_ATTRS = {
'viewCount': None, 'viewCount': None,
'year': None, 'year': None,
} }
return _track_attrs
PHOTO_ALBUM_ATTRS = { def photo_album_attrs():
_photo_album_attrs = {
# For some reason photos needs to be first, # For some reason photos needs to be first,
# otherwise the photo album ratingKey gets # otherwise the photo album ratingKey gets
# clobbered by the first photo's ratingKey # clobbered by the first photo's ratingKey
'photos': lambda e: helpers.get_attrs_to_dict(e, MEDIA_TYPES[e.type][0]), 'photos': lambda e: helpers.get_attrs_to_dict(e.reload() if e.isPartialObject() else e,
self.return_attrs(e.type)),
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'art': None, 'art': None,
'composite': None, 'composite': None,
@ -760,8 +792,10 @@ PHOTO_ALBUM_ATTRS = {
'type': None, 'type': None,
'updatedAt': helpers.datetime_to_iso 'updatedAt': helpers.datetime_to_iso
} }
return _photo_album_attrs
PHOTO_ATTRS = { def photo_attrs():
_photo_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'createdAtAccuracy': None, 'createdAtAccuracy': None,
'createdAtTZOffset': None, 'createdAtTZOffset': None,
@ -813,8 +847,10 @@ PHOTO_ATTRS = {
'title': None 'title': None
} }
} }
return _photo_attrs
COLLECTION_ATTRS = { def collection_attrs():
_collection_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'childCount': None, 'childCount': None,
'collectionMode': None, 'collectionMode': None,
@ -839,10 +875,13 @@ COLLECTION_ATTRS = {
'title': None, 'title': None,
'type': None, 'type': None,
'updatedAt': helpers.datetime_to_iso, 'updatedAt': helpers.datetime_to_iso,
'children': lambda e: helpers.get_attrs_to_dict(e, MEDIA_TYPES[e.type][0]) 'children': lambda e: helpers.get_attrs_to_dict(e.reload() if e.isPartialObject() else e,
self.return_attrs(e.type))
} }
return _collection_attrs
PLAYLIST_ATTRS = { def playlist_attrs():
_playlist_attrs = {
'addedAt': helpers.datetime_to_iso, 'addedAt': helpers.datetime_to_iso,
'composite': None, 'composite': None,
'duration': None, 'duration': None,
@ -857,15 +896,35 @@ PLAYLIST_ATTRS = {
'title': None, 'title': None,
'type': None, 'type': None,
'updatedAt': helpers.datetime_to_iso, 'updatedAt': helpers.datetime_to_iso,
'items': lambda e: helpers.get_attrs_to_dict(e, MEDIA_TYPES[e.type][0]) 'items': lambda e: helpers.get_attrs_to_dict(e.reload() if e.isPartialObject() else e,
self.return_attrs(e.type))
}
return _playlist_attrs
_media_types = {
'movie': movie_attrs,
'show': show_attrs,
'season': season_attrs,
'episode': episode_attrs,
'artist': artist_attrs,
'album': album_attrs,
'track': track_attrs,
'photo album': photo_album_attrs,
'photo': photo_attrs,
'collection': collection_attrs,
'playlist': playlist_attrs,
} }
MOVIE_LEVELS = [ return _media_types[media_type]()
def return_levels(self, media_type):
def movie_levels():
_movie_levels = [
{ {
1: [ 1: [
'ratingKey', 'title', 'titleSort', 'originalTitle', 'originallyAvailableAt', 'year', 'ratingKey', 'title', 'titleSort', 'originalTitle', 'originallyAvailableAt', 'year', 'addedAt',
'rating', 'ratingImage', 'audienceRating', 'audienceRatingImage', 'userRating', 'contentRating', 'rating', 'ratingImage', 'audienceRating', 'audienceRatingImage', 'userRating', 'contentRating',
'studio', 'tagline', 'summary', 'guid', 'duration', 'durationHuman', 'type' 'studio', 'tagline', 'summary', 'guid', 'duration', 'durationHuman', 'type', 'artFile', 'thumbFile'
], ],
2: [ 2: [
'directors.tag', 'writers.tag', 'producers.tag', 'roles.tag', 'roles.role', 'directors.tag', 'writers.tag', 'producers.tag', 'roles.tag', 'roles.role',
@ -875,7 +934,8 @@ MOVIE_LEVELS = [
'art', 'thumb', 'key', 'chapterSource', 'art', 'thumb', 'key', 'chapterSource',
'chapters.tag', 'chapters.index', 'chapters.start', 'chapters.end', 'chapters.thumb', 'chapters.tag', 'chapters.index', 'chapters.start', 'chapters.end', 'chapters.thumb',
'updatedAt', 'lastViewedAt', 'viewCount' 'updatedAt', 'lastViewedAt', 'viewCount'
] ],
9: [k for k in self.return_attrs('movie') if k != 'media']
}, },
{ {
1: [ 1: [
@ -912,43 +972,382 @@ MOVIE_LEVELS = [
'media.parts.subtitleStreams.title', 'media.parts.subtitleStreams.displayTitle', 'media.parts.subtitleStreams.title', 'media.parts.subtitleStreams.displayTitle',
'media.parts.subtitleStreams.extendedDisplayTitle', 'media.parts.subtitleStreams.forced', 'media.parts.subtitleStreams.extendedDisplayTitle', 'media.parts.subtitleStreams.forced',
'media.parts.subtitleStreams.default' 'media.parts.subtitleStreams.default'
],
9: [
'locations', 'media'
] ]
} }
] ]
return _movie_levels
SHOW_LEVELS = {} def show_levels():
_show_levels = [
SEASON_LEVELS = {} {
1: [
EPISODE_LEVELS = {} 'ratingKey', 'title', 'titleSort', 'originallyAvailableAt', 'year', 'addedAt',
'rating', 'userRating', 'contentRating',
ARTIST_LEVELS = {} 'studio', 'summary', 'guid', 'duration', 'durationHuman', 'type', 'childCount'
] + ['seasons.' + attr for attr in self.return_levels('season')[0][1]],
ALBUM_LEVELS = {} 2: [
'roles.tag', 'roles.role',
TRACK_LEVELS = {} 'genres.tag', 'collections.tag', 'labels.tag', 'fields.name', 'fields.locked'
] + ['seasons.' + attr for attr in self.return_levels('season')[0][2]],
PHOTO_ALBUM_LEVELS = {} 3: [
'art', 'thumb', 'banner', 'theme', 'key',
PHOTO_LEVELS = {} 'updatedAt', 'lastViewedAt', 'viewCount'
] + ['seasons.' + attr for attr in self.return_levels('season')[0][3]],
COLLECTION_LEVELS = {} 9: [k for k in self.return_attrs('show') if k != 'media']
},
PLAYLIST_LEVELS = {} {
1: ['seasons.' + attr for attr in self.return_levels('season')[1][1]],
MEDIA_TYPES = { 2: ['seasons.' + attr for attr in self.return_levels('season')[1][2]],
'movie': (MOVIE_ATTRS, MOVIE_LEVELS), 3: ['seasons.' + attr for attr in self.return_levels('season')[1][3]],
'show': (SHOW_ATTRS, SHOW_LEVELS), 9: ['seasons.' + attr for attr in self.return_levels('season')[1][9]]
'season': (SEASON_ATTRS, SEASON_LEVELS),
'episode': (EPISODE_ATTRS, EPISODE_LEVELS),
'artist': (ARTIST_ATTRS, ARTIST_LEVELS),
'album': (ALBUM_ATTRS, ALBUM_LEVELS),
'track': (TRACK_ATTRS, TRACK_LEVELS),
'photo album': (PHOTO_ALBUM_ATTRS, PHOTO_ALBUM_LEVELS),
'photo': (PHOTO_ATTRS, PHOTO_LEVELS),
'collection': (COLLECTION_ATTRS, COLLECTION_LEVELS),
'playlist': (PLAYLIST_ATTRS, PLAYLIST_LEVELS)
} }
]
return _show_levels
def season_levels():
_season_levels = [
{
1: [
'ratingKey', 'title', 'titleSort', 'addedAt',
'userRating',
'summary', 'guid', 'type', 'index',
'parentTitle', 'parentRatingKey', 'parentGuid'
] + ['episodes.' + attr for attr in self.return_levels('episode')[0][1]],
2: [
'fields.name', 'fields.locked'
] + ['episodes.' + attr for attr in self.return_levels('episode')[0][2]],
3: [
'art', 'thumb', 'key',
'updatedAt', 'lastViewedAt', 'viewCount',
'parentKey', 'parentTheme', 'parentThumb'
] + ['episodes.' + attr for attr in self.return_levels('episode')[0][3]],
9: [k for k in self.return_attrs('season') if k != 'media']
},
{
1: ['episodes.' + attr for attr in self.return_levels('episode')[1][1]],
2: ['episodes.' + attr for attr in self.return_levels('episode')[1][2]],
3: ['episodes.' + attr for attr in self.return_levels('episode')[1][3]],
9: ['episodes.' + attr for attr in self.return_levels('episode')[1][9]]
}
]
return _season_levels
def episode_levels():
_episode_levels = [
{
1: [
'ratingKey', 'title', 'titleSort', 'originallyAvailableAt', 'year', 'addedAt',
'rating', 'userRating', 'contentRating',
'summary', 'guid', 'duration', 'durationHuman', 'type', 'index',
'parentTitle', 'parentRatingKey', 'parentGuid', 'parentIndex',
'grandparentTitle', 'grandparentRatingKey', 'grandparentGuid'
],
2: [
'directors.tag', 'writers.tag',
'fields.name', 'fields.locked'
],
3: [
'art', 'thumb', 'key', 'chapterSource',
'updatedAt', 'lastViewedAt', 'viewCount',
'parentThumb', 'parentKey',
'grandparentArt', 'grandparentThumb', 'grandparentTheme', 'grandparentKey'
],
9: [k for k in self.return_attrs('episode') if k != 'media']
},
{
1: [
'locations', 'media.aspectRatio', 'media.audioChannels', 'media.audioCodec', 'media.audioProfile',
'media.bitrate', 'media.container', 'media.duration', 'media.height', 'media.width',
'media.videoCodec', 'media.videoFrameRate', 'media.videoProfile', 'media.videoResolution',
'media.optimizedVersion', 'media.hdr'
],
2: [
'media.parts.accessible', 'media.parts.exists', 'media.parts.file', 'media.parts.duration',
'media.parts.container', 'media.parts.indexes', 'media.parts.size', 'media.parts.sizeHuman',
'media.parts.audioProfile', 'media.parts.videoProfile',
'media.parts.optimizedForStreaming', 'media.parts.deepAnalysisVersion'
],
3: [
'media.parts.videoStreams.codec', 'media.parts.videoStreams.bitrate',
'media.parts.videoStreams.language', 'media.parts.videoStreams.languageCode',
'media.parts.videoStreams.title', 'media.parts.videoStreams.displayTitle',
'media.parts.videoStreams.extendedDisplayTitle', 'media.parts.videoStreams.hdr',
'media.parts.videoStreams.bitDepth', 'media.parts.videoStreams.colorSpace',
'media.parts.videoStreams.frameRate', 'media.parts.videoStreams.level',
'media.parts.videoStreams.profile', 'media.parts.videoStreams.refFrames',
'media.parts.videoStreams.scanType', 'media.parts.videoStreams.default',
'media.parts.videoStreams.height', 'media.parts.videoStreams.width',
'media.parts.audioStreams.codec', 'media.parts.audioStreams.bitrate',
'media.parts.audioStreams.language', 'media.parts.audioStreams.languageCode',
'media.parts.audioStreams.title', 'media.parts.audioStreams.displayTitle',
'media.parts.audioStreams.extendedDisplayTitle', 'media.parts.audioStreams.bitDepth',
'media.parts.audioStreams.channels', 'media.parts.audioStreams.audioChannelLayout',
'media.parts.audioStreams.profile', 'media.parts.audioStreams.samplingRate',
'media.parts.audioStreams.default',
'media.parts.subtitleStreams.codec', 'media.parts.subtitleStreams.format',
'media.parts.subtitleStreams.language', 'media.parts.subtitleStreams.languageCode',
'media.parts.subtitleStreams.title', 'media.parts.subtitleStreams.displayTitle',
'media.parts.subtitleStreams.extendedDisplayTitle', 'media.parts.subtitleStreams.forced',
'media.parts.subtitleStreams.default'
],
9: [
'locations', 'media'
]
}
]
return _episode_levels
def artist_levels():
_artist_levels = []
return _artist_levels
def album_levels():
_album_levels = []
return _album_levels
def track_levels():
_track_levels = []
return _track_levels
def photo_album_levels():
_photo_album_levels = []
return _photo_album_levels
def photo_levels():
_photo_levels = []
return _photo_levels
def collection_levels():
_collection_levels = []
return _collection_levels
def playlist_levels():
_playlist_levels = []
return _playlist_levels
_media_types = {
'movie': movie_levels,
'show': show_levels,
'season': season_levels,
'episode': episode_levels,
'artist': artist_levels,
'album': album_levels,
'track': track_levels,
'photo album': photo_album_levels,
'photo': photo_levels,
'collection': collection_levels,
'playlist': playlist_levels
}
return _media_types[media_type]()
def __init__(self, section_id=None, rating_key=None, file_format='json',
metadata_level=1, media_info_level=1):
self.section_id = helpers.cast_to_int(section_id)
self.rating_key = helpers.cast_to_int(rating_key)
self.file_format = file_format
self.metadata_level = helpers.cast_to_int(metadata_level)
self.media_info_level = helpers.cast_to_int(media_info_level)
self.timestamp = helpers.timestamp()
self.media_type = None
self.items = []
self.filename = None
self.filename_ext = None
self.export_id = None
self.file_size = None
self.success = False
def export(self):
if not self.section_id and not self.rating_key:
logger.error("Tautulli Exporter :: Export called but no section_id or rating_key provided.")
return
elif self.rating_key and not str(self.rating_key).isdigit():
logger.error("Tautulli Exporter :: Export called with invalid rating_key '%s'.", self.rating_key)
return
elif self.section_id and not str(self.section_id).isdigit():
logger.error("Tautulli Exporter :: Export called with invalid section_id '%s'.", self.section_id)
return
elif not self.metadata_level:
logger.error("Tautulli Exporter :: Export called with invalid metadata_level '%s'.", self.metadata_level)
return
elif not self.media_info_level:
logger.error("Tautulli Exporter :: Export called with invalid media_info_level '%s'.", self.media_info_level)
return
elif self.file_format not in ('json', 'csv'):
logger.error("Tautulli Exporter :: Export called but invalid file_format '%s' provided.", self.file_format)
return
plex = Plex(plexpy.CONFIG.PMS_URL, plexpy.CONFIG.PMS_TOKEN)
if self.rating_key:
logger.debug(
"Tautulli Exporter :: Export called with rating_key %s, metadata_level %d, media_info_level %d",
self.rating_key, self.metadata_level, self.media_info_level)
item = plex.get_item(self.rating_key)
self.media_type = item.type
if self.media_type != 'playlist':
self.section_id = item.librarySectionID
if self.media_type in ('season', 'episode', 'album', 'track'):
item_title = item._defaultSyncTitle()
else:
item_title = item.title
if self.media_type == 'photo' and item.TAG == 'Directory':
self.media_type = 'photo album'
filename = '{} - {} [{}].{}'.format(
self.media_type.title(), item_title, self.rating_key,
helpers.timestamp_to_YMDHMS(self.timestamp))
self.items = [item]
elif self.section_id:
logger.debug(
"Tautulli Exporter :: Export called with section_id %s, metadata_level %d, media_info_level %d",
self.section_id, self.metadata_level, self.media_info_level)
library = plex.get_library(str(self.section_id))
self.media_type = library.type
library_title = library.title
filename = 'Library - {} [{}].{}'.format(
library_title, self.section_id,
helpers.timestamp_to_YMDHMS(self.timestamp))
self.items = library.all()
else:
return
if self.media_type not in self.MEDIA_TYPES:
logger.error("Tautulli Exporter :: Cannot export media type '%s'", self.media_type)
return
media_attrs = self.return_attrs(self.media_type)
metadata_level_attrs, media_info_level_attrs = self.return_levels(self.media_type)
if self.metadata_level not in metadata_level_attrs:
logger.error("Tautulli Exporter :: Export called with invalid metadata_level '%s'.", self.metadata_level)
return
elif self.media_info_level not in media_info_level_attrs:
logger.error("Tautulli Exporter :: Export called with invalid media_info_level '%s'.", self.media_info_level)
return
export_attrs_list = []
export_attrs_set = set()
for level, attrs in metadata_level_attrs.items():
if level <= self.metadata_level:
export_attrs_set.update(attrs)
for level, attrs in media_info_level_attrs.items():
if level <= self.media_info_level:
export_attrs_set.update(attrs)
for attr in export_attrs_set:
try:
value = helpers.get_dict_value_by_path(media_attrs, attr)
except KeyError:
logger.warn("Tautulli Exporter :: Unknown export attribute '%s', skipping...", attr)
continue
except Exception as e:
print(e)
continue
export_attrs_list.append(value)
export_attrs = reduce(helpers.dict_merge, export_attrs_list)
self.filename = helpers.clean_filename(filename)
self.filename_ext = '{}.{}'.format(self.filename, self.file_format)
self.export_id = self.add_export()
if not self.export_id:
logger.error("Tautulli Exporter :: Failed to export '%s'", self.filename)
return
threading.Thread(target=self._real_export,
kwargs={'attrs': export_attrs}).start()
return True
def _real_export(self, attrs):
logger.info("Tautulli Exporter :: Starting export for '%s'...", self.filename_ext)
filepath = get_export_filepath(self.filename_ext)
part = partial(helpers.get_attrs_to_dict, attrs=attrs)
pool = ThreadPool(processes=4)
try:
result = pool.map(part, self.items)
if self.file_format == 'json':
json_data = json.dumps(result, indent=4, ensure_ascii=False, sort_keys=True)
with open(filepath, 'w', encoding='utf-8') as outfile:
outfile.write(json_data)
elif self.file_format == 'csv':
flatten_result = helpers.flatten_dict(result)
flatten_attrs = set().union(*flatten_result)
with open(filepath, 'w', encoding='utf-8', newline='') as outfile:
writer = csv.DictWriter(outfile, sorted(flatten_attrs))
writer.writeheader()
writer.writerows(flatten_result)
self.file_size = os.path.getsize(filepath)
self.success = True
logger.info("Tautulli Exporter :: Successfully exported to '%s'", filepath)
except Exception as e:
logger.error("Tautulli Exporter :: Failed to export '%s': %s", self.filename_ext, e)
import traceback
traceback.print_exc()
finally:
pool.close()
pool.join()
self.set_export_state()
def add_export(self):
keys = {'timestamp': self.timestamp,
'section_id': self.section_id,
'rating_key': self.rating_key,
'media_type': self.media_type}
values = {'file_format': self.file_format,
'filename': self.filename_ext}
db = database.MonitorDatabase()
try:
db.upsert(table_name='exports', key_dict=keys, value_dict=values)
return db.last_insert_id()
except Exception as e:
logger.error("Tautulli Exporter :: Unable to save export to database: %s", e)
return False
def set_export_state(self):
if self.success:
complete = 1
else:
complete = -1
keys = {'id': self.export_id}
values = {'complete': complete,
'file_size': self.file_size}
db = database.MonitorDatabase()
db.upsert(table_name='exports', key_dict=keys, value_dict=values)
def get_any_hdr(obj, root): def get_any_hdr(obj, root):
@ -957,179 +1356,19 @@ def get_any_hdr(obj, root):
return any(vs.get('hdr') for p in media.get('parts', []) for vs in p.get('videoStreams', [])) return any(vs.get('hdr') for p in media.get('parts', []) for vs in p.get('videoStreams', []))
def export(section_id=None, rating_key=None, file_format='json', metadata_level=1, media_info_level=1): def get_image(item, prop, export_filename):
timestamp = helpers.timestamp() url = getattr(item, prop)
metadata_level = helpers.cast_to_int(metadata_level)
media_info_level = helpers.cast_to_int(media_info_level)
if not section_id and not rating_key:
logger.error("Tautulli Exporter :: Export called but no section_id or rating_key provided.")
return
elif rating_key and not str(rating_key).isdigit():
logger.error("Tautulli Exporter :: Export called with invalid rating_key '%s'.", rating_key)
return
elif section_id and not str(section_id).isdigit():
logger.error("Tautulli Exporter :: Export called with invalid section_id '%s'.", section_id)
return
elif not metadata_level:
logger.error("Tautulli Exporter :: Export called with invalid metadata_level '%s'.", metadata_level)
return
elif not media_info_level:
logger.error("Tautulli Exporter :: Export called with invalid media_info_level '%s'.", media_info_level)
return
elif file_format not in ('json', 'csv'):
logger.error("Tautulli Exporter :: Export called but invalid file_format '%s' provided.", file_format)
return
plex = Plex(plexpy.CONFIG.PMS_URL, plexpy.CONFIG.PMS_TOKEN)
if rating_key:
logger.debug("Tautulli Exporter :: Export called with rating_key %s, metadata_level %d, media_info_level %d",
rating_key, metadata_level, media_info_level)
item = plex.get_item(helpers.cast_to_int(rating_key))
media_type = item.type media_type = item.type
if media_type != 'playlist':
section_id = item.librarySectionID
if media_type in ('season', 'episode', 'album', 'track'): if media_type in ('season', 'episode', 'album', 'track'):
item_title = item._defaultSyncTitle() item_title = item._defaultSyncTitle()
else: else:
item_title = item.title item_title = item.title
if media_type == 'photo' and item.TAG == 'Directory': filename = os.path.join('{}.images'.format(export_filename), '{}.png'.format(item_title))
media_type = 'photo album'
filename = '{} - {} [{}].{}.{}'.format( file = get_export_filepath(filename)
media_type.title(), item_title, rating_key, helpers.timestamp_to_YMDHMS(timestamp), file_format) return file
items = [item]
elif section_id:
logger.debug("Tautulli Exporter :: Export called with section_id %s, metadata_level %d, media_info_level %d",
rating_key, metadata_level, media_info_level)
library = plex.get_library(section_id)
media_type = library.type
library_title = library.title
filename = 'Library - {} [{}].{}.{}'.format(
library_title, section_id, helpers.timestamp_to_YMDHMS(timestamp), file_format)
items = library.all()
else:
return
if media_type not in MEDIA_TYPES:
logger.error("Tautulli Exporter :: Cannot export media type '%s'", media_type)
return
media_attrs, (metadata_level_attrs, media_info_level_attrs) = MEDIA_TYPES[media_type]
if metadata_level != 9 and metadata_level not in metadata_level_attrs:
logger.error("Tautulli Exporter :: Export called with invalid metadata_level '%s'.", metadata_level)
return
elif media_info_level != 9 and media_info_level not in media_info_level_attrs:
logger.error("Tautulli Exporter :: Export called with invalid media_info_level '%s'.", media_info_level)
return
export_attrs_list = []
export_attrs_set = set()
if metadata_level == 9:
metadata_export_attrs = deepcopy(media_attrs)
del metadata_export_attrs['media']
export_attrs_list.append(metadata_export_attrs)
else:
_metadata_levels = sorted(metadata_level_attrs.keys())
for _metadata_level in _metadata_levels[:_metadata_levels.index(metadata_level) + 1]:
export_attrs_set.update(metadata_level_attrs[_metadata_level])
if media_info_level == 9:
media_info_export_attrs = {'media': deepcopy(media_attrs['media'])}
export_attrs_list.append(media_info_export_attrs)
else:
_media_info_levels = sorted(media_info_level_attrs.keys())
for _media_info_level in _media_info_levels[:_media_info_levels.index(media_info_level) + 1]:
export_attrs_set.update(media_info_level_attrs[_media_info_level])
for attr in export_attrs_set:
try:
value = helpers.get_dict_value_by_path(media_attrs, attr)
except KeyError:
logger.warn("Tautulli Exporter :: Unknown export attribute '%s', skipping...", attr)
continue
export_attrs_list.append(value)
export_attrs = reduce(helpers.dict_merge, export_attrs_list)
filename = helpers.clean_filename(filename)
export_id = add_export(timestamp=timestamp,
section_id=section_id,
rating_key=rating_key,
media_type=media_type,
file_format=file_format,
filename=filename)
if not export_id:
logger.error("Tautulli Exporter :: Failed to export '%s'", filename)
return
threading.Thread(target=_real_export,
kwargs={'export_id': export_id,
'items': items,
'attrs': export_attrs,
'file_format': file_format,
'filename': filename}).start()
return True
def _real_export(export_id, items, attrs, file_format, filename):
logger.info("Tautulli Exporter :: Starting export for '%s'...", filename)
filepath = get_export_filepath(filename)
part = partial(helpers.get_attrs_to_dict, attrs=attrs)
pool = ThreadPool(processes=4)
success = True
try:
result = pool.map(part, items)
if file_format == 'json':
json_data = json.dumps(result, indent=4, ensure_ascii=False, sort_keys=True)
with open(filepath, 'w', encoding='utf-8') as outfile:
outfile.write(json_data)
elif file_format == 'csv':
flatten_result = helpers.flatten_dict(result)
flatten_attrs = set().union(*flatten_result)
with open(filepath, 'w', encoding='utf-8', newline='') as outfile:
writer = csv.DictWriter(outfile, sorted(flatten_attrs))
writer.writeheader()
writer.writerows(flatten_result)
file_size = os.path.getsize(filepath)
except Exception as e:
set_export_state(export_id=export_id, success=False)
logger.error("Tautulli Exporter :: Failed to export '%s': %s", filename, e)
import traceback
traceback.print_exc()
success = False
finally:
pool.close()
pool.join()
if not success:
return
set_export_state(export_id=export_id, file_size=file_size)
logger.info("Tautulli Exporter :: Successfully exported to '%s'", filepath)
def get_export(export_id): def get_export(export_id):
@ -1144,38 +1383,6 @@ def get_export(export_id):
return result return result
def add_export(timestamp, section_id, rating_key, media_type, file_format, filename):
keys = {'timestamp': timestamp,
'section_id': section_id,
'rating_key': rating_key,
'media_type': media_type}
values = {'file_format': file_format,
'filename': filename}
db = database.MonitorDatabase()
try:
db.upsert(table_name='exports', key_dict=keys, value_dict=values)
return db.last_insert_id()
except Exception as e:
logger.error("Tautulli Exporter :: Unable to save export to database: %s", e)
return False
def set_export_state(export_id, file_size=None, success=True):
if success:
complete = 1
else:
complete = -1
keys = {'id': export_id}
values = {'complete': complete,
'file_size': file_size}
db = database.MonitorDatabase()
db.upsert(table_name='exports', key_dict=keys, value_dict=values)
def delete_export(export_id): def delete_export(export_id):
db = database.MonitorDatabase() db = database.MonitorDatabase()
if str(export_id).isdigit(): if str(export_id).isdigit():
@ -1189,6 +1396,7 @@ def delete_export(export_id):
logger.info("Tautulli Exporter :: Deleting exported file from '%s'.", filepath) logger.info("Tautulli Exporter :: Deleting exported file from '%s'.", filepath)
try: try:
os.remove(filepath) os.remove(filepath)
# TODO: Delete images as well
except OSError as e: except OSError as e:
logger.error("Tautulli Exporter :: Failed to delete exported file '%s': %s", filepath, e) logger.error("Tautulli Exporter :: Failed to delete exported file '%s': %s", filepath, e)
return True return True

View file

@ -1267,15 +1267,11 @@ def flatten_tree(obj, key=''):
# https://stackoverflow.com/a/14692747 # https://stackoverflow.com/a/14692747
def get_by_path(root, items): def get_by_path(root, items):
"""Access a nested object in root by item sequence.""" """Access a nested object in root by item sequence."""
if isinstance(items, str):
items = items.split('.')
return reduce(operator.getitem, items, root) return reduce(operator.getitem, items, root)
def set_by_path(root, items, value): def set_by_path(root, items, value):
"""Set a value in a nested object in root by item sequence.""" """Set a value in a nested object in root by item sequence."""
if isinstance(items, str):
items = items.split('.')
get_by_path(root, items[:-1])[items[-1]] = value get_by_path(root, items[:-1])[items[-1]] = value

View file

@ -6511,11 +6511,11 @@ class WebInterface(object):
} }
``` ```
""" """
result = exporter.export(section_id=section_id, result = exporter.Export(section_id=section_id,
rating_key=rating_key, rating_key=rating_key,
file_format=file_format, file_format=file_format,
metadata_level=metadata_level, metadata_level=metadata_level,
media_info_level=media_info_level) media_info_level=media_info_level).export()
if result: if result:
return {'result': 'success', 'message': 'Metadata export has started.'} return {'result': 'success', 'message': 'Metadata export has started.'}