diff --git a/data/interfaces/default/export_modal.html b/data/interfaces/default/export_modal.html index 758ef7bc..300d6a16 100644 --- a/data/interfaces/default/export_modal.html +++ b/data/interfaces/default/export_modal.html @@ -56,6 +56,14 @@ DOCUMENTATION :: END

Select the export data file format.

+ % if not rating_key: +
+ +

Enable to export one file for each ${media_type}, otherwise only export a single file containing all ${media_type}s.

+
+ % endif
@@ -249,6 +257,7 @@ DOCUMENTATION :: END $('#export_custom_media_info_fields').val() ].filter(Boolean).join(','); var export_type = $('#export_export_type').val() + var individual_files = $('#export_individual_files').is(':checked') $.ajax({ url: 'export_metadata', @@ -262,7 +271,8 @@ DOCUMENTATION :: END thumb_level: thumb_level, art_level: art_level, custom_fields: custom_fields, - export_type: export_type + export_type: export_type, + individual_files: individual_files }, async: true, success: function (data) { diff --git a/data/interfaces/default/js/tables/export_table.js b/data/interfaces/default/js/tables/export_table.js index c078045a..c7e172be 100644 --- a/data/interfaces/default/js/tables/export_table.js +++ b/data/interfaces/default/js/tables/export_table.js @@ -40,7 +40,8 @@ export_table_options = { } }, "width": "8%", - "className": "no-wrap" + "className": "no-wrap", + "searchable": false }, { "targets": [1], @@ -66,13 +67,23 @@ export_table_options = { }, { "targets": [3], - "data": "filename", + "data": "title", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== '') { - if (rowData['complete'] === 1 && rowData['exists']) { - $(td).html('' + cellData + ''); + var tooltip; + var filename; + if (!rowData['individual_files']) { + tooltip = ''; + filename = cellData + '.' + rowData['file_format'] } else { - $(td).html(cellData); + tooltip = ''; + filename = cellData + } + + if (rowData['complete'] === 1 && rowData['exists'] && !rowData['individual_files']) { + $(td).html('' + tooltip + ' ' + filename + ''); + } else { + $(td).html(tooltip + ' ' + filename); } } }, @@ -136,14 +147,25 @@ export_table_options = { } }, "width": "6%", - "className": "no-wrap" + "className": "no-wrap", + "searchable": false }, { "targets": [9], "data": "complete", "createdCell": function (td, cellData, rowData, row, col) { if (cellData === 1 && rowData['exists']) { - $(td).html(''); + var tooltip_title = ''; + var icon = ''; + if (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) { + tooltip_title = 'Zip Archive'; + icon = 'fa-file-archive'; + } else { + tooltip_title = rowData['file_format'].toUpperCase() + ' File'; + icon = 'fa-file-download'; + } + var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download'; + $(td).html(''); } else if (cellData === 0) { $(td).html(' Processing'); } else if (cellData === -1) { @@ -153,7 +175,8 @@ export_table_options = { } }, "width": "7%", - "className": "export_download" + "className": "export_download", + "searchable": false }, { "targets": [10], @@ -166,7 +189,8 @@ export_table_options = { } }, "width": "7%", - "className": "export_delete" + "className": "export_delete", + "searchable": false } ], "drawCallback": function (settings) { @@ -174,6 +198,12 @@ export_table_options = { //$('html,body').scrollTop(0); $('#ajaxMsg').fadeOut(); + // Create the tooltips. + $('body').tooltip({ + selector: '[data-toggle="tooltip"]', + container: 'body' + }); + if (export_processing_timer) { clearTimeout(export_processing_timer); } @@ -208,7 +238,7 @@ $('.export_table').on('click', '> tbody > tr > td.export_delete > button', funct var row = export_table.row(tr); var rowData = row.data(); - var msg = 'Are you sure you want to delete the following export?

' + rowData['filename'] + ''; + var msg = 'Are you sure you want to delete the following export?

' + rowData['title'] + ''; var url = 'delete_export?export_id=' + rowData['export_id']; confirmAjaxCall(url, msg, null, null, redrawExportTable); }); diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 37510837..a7e4a252 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -798,10 +798,10 @@ def dbcheck(): c_db.execute( 'CREATE TABLE IF NOT EXISTS exports (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'timestamp INTEGER, section_id INTEGER, user_id INTEGER, rating_key INTEGER, media_type TEXT, ' - 'filename TEXT, file_format TEXT, ' + 'title TEXT, file_format TEXT, ' 'metadata_level INTEGER, media_info_level INTEGER, ' 'thumb_level INTEGER DEFAULT 0, art_level INTEGER DEFAULT 0, ' - 'custom_fields TEXT, ' + 'custom_fields TEXT, individual_files INTEGER DEFAULT 0, ' 'file_size INTEGER DEFAULT 0, complete INTEGER DEFAULT 0)' ) @@ -2179,6 +2179,18 @@ def dbcheck(): 'UPDATE exports SET art_level = 9 WHERE include_art = 1' ) + # Upgrade exports table from earlier versions + try: + c_db.execute('SELECT title FROM exports') + except sqlite3.OperationalError: + logger.debug("Altering database. Updating database table exports.") + c_db.execute( + 'ALTER TABLE exports ADD COLUMN title TEXT' + ) + c_db.execute( + 'ALTER TABLE exports ADD COLUMN individual_files INTEGER DEFAULT 0' + ) + # Add "Local" user to database as default unauthenticated user. result = c_db.execute('SELECT id FROM users WHERE username = "Local"') if not result.fetchone(): diff --git a/plexpy/exporter.py b/plexpy/exporter.py index c5ad276f..2bbb7e2e 100644 --- a/plexpy/exporter.py +++ b/plexpy/exporter.py @@ -101,7 +101,7 @@ class Export(object): def __init__(self, section_id=None, user_id=None, rating_key=None, file_format='csv', metadata_level=1, media_info_level=1, thumb_level=0, art_level=0, - custom_fields='', export_type=None): + custom_fields='', export_type=None, individual_files=False): self.section_id = helpers.cast_to_int(section_id) or None self.user_id = helpers.cast_to_int(user_id) or None self.rating_key = helpers.cast_to_int(rating_key) or None @@ -112,17 +112,22 @@ class Export(object): self.art_level = helpers.cast_to_int(art_level) self.custom_fields = custom_fields.replace(' ', '') self._custom_fields = {} - self.export_type = export_type or 'all' + self.export_type = str(export_type).lower() or 'all' + self.individual_files = individual_files self.timestamp = helpers.timestamp() self.media_type = None self.obj = None - self.title = '' + self.obj_title = '' + self.directory = None self.filename = None + self.title = None self.export_id = None - self.file_size = None + self.file_size = 0 + self.exported_thumb = False + self.exported_art = False self.success = False # Reset export options for m3u8 @@ -416,7 +421,7 @@ class Export(object): 'viewCount': None, 'viewedLeafCount': None, 'year': None, - 'seasons': lambda e: self._export_obj(e) + 'seasons': lambda e: self.export_obj(e) } return _show_attrs @@ -455,7 +460,7 @@ class Export(object): 'userRating': None, 'viewCount': None, 'viewedLeafCount': None, - 'episodes': lambda e: self._export_obj(e) + 'episodes': lambda e: self.export_obj(e) } return _season_attrs @@ -700,7 +705,7 @@ class Export(object): 'updatedAt': helpers.datetime_to_iso, 'userRating': None, 'viewCount': None, - 'albums': lambda e: self._export_obj(e) + 'albums': lambda e: self.export_obj(e) } return _artist_attrs @@ -760,7 +765,7 @@ class Export(object): 'userRating': None, 'viewCount': None, 'viewedLeafCount': None, - 'tracks': lambda e: self._export_obj(e) + 'tracks': lambda e: self.export_obj(e) } return _album_attrs @@ -907,9 +912,9 @@ class Export(object): 'titleSort': None, 'type': lambda e: 'photoalbum' if e == 'photo' else e, 'updatedAt': helpers.datetime_to_iso, - 'photoalbums': lambda o: [self._export_obj(e) for e in getattr(o, 'albums')()], - 'photos': lambda e: self._export_obj(e), - 'clips': lambda e: self._export_obj(e) + 'photoalbums': lambda o: [self.export_obj(e) for e in getattr(o, 'albums')()], + 'photos': lambda e: self.export_obj(e), + 'clips': lambda e: self.export_obj(e) } return _photo_album_attrs @@ -1007,7 +1012,7 @@ class Export(object): 'titleSort': None, 'type': None, 'updatedAt': helpers.datetime_to_iso, - 'children': lambda e: self._export_obj(e) + 'children': lambda e: self.export_obj(e) } return _collection_attrs @@ -1027,7 +1032,7 @@ class Export(object): 'title': None, 'type': None, 'updatedAt': helpers.datetime_to_iso, - 'items': lambda e: self._export_obj(e) + 'items': lambda e: self.export_obj(e) } return _playlist_attrs @@ -1500,6 +1505,8 @@ class Export(object): elif self.user_id and self.export_type != 'playlist': msg = "Export called with invalid export_type '{}'. " \ "Only export_type 'playlist' is allowed for user export." + elif self.individual_files and self.rating_key: + msg = "Individual file export is only allowed for library or user export." if msg: logger.error("Tautulli Exporter :: %s", msg) @@ -1518,49 +1525,42 @@ class Export(object): if self.rating_key: logger.debug( "Tautulli Exporter :: Export called with rating_key %s, " - "metadata_level %d, media_info_level %d, thumb_level %s, art_level %s", + "metadata_level %d, media_info_level %d, thumb_level %s, art_level %s, " + "file_format %s", self.rating_key, self.metadata_level, self.media_info_level, - self.thumb_level, self.art_level) + self.thumb_level, self.art_level, self.file_format) self.obj = plex.get_item(self.rating_key) - self.media_type = 'photoalbum' if self.is_photoalbum(self.obj) else self.obj.type + self.media_type = self._media_type(self.obj) if self.media_type != 'playlist': self.section_id = self.obj.librarySectionID if self.media_type in ('season', 'episode', 'album', 'track'): - self.title = self.obj._defaultSyncTitle() + self.obj_title = self.obj._defaultSyncTitle() else: - self.title = self.obj.title - - filename = '{} - {} [{}].{}'.format( - self.media_type.capitalize(), self.title, self.rating_key, - helpers.timestamp_to_YMDHMS(self.timestamp)) + self.obj_title = self.obj.title elif self.user_id: logger.debug( "Tautulli Exporter :: Export called with user_id %s, " "metadata_level %d, media_info_level %d, thumb_level %s, art_level %s, " - "export_type %s", + "export_type %s, file_format %s", self.user_id, self.metadata_level, self.media_info_level, - self.thumb_level, self.art_level, self.export_type) + self.thumb_level, self.art_level, self.export_type, self.file_format) self.obj = plex.plex self.media_type = self.export_type - self.title = user_info['username'] - - filename = 'User - {} - {} [{}].{}'.format( - self.title, self.export_type.capitalize(), self.user_id, - helpers.timestamp_to_YMDHMS(self.timestamp)) + self.obj_title = user_info['username'] elif self.section_id: logger.debug( "Tautulli Exporter :: Export called with section_id %s, " "metadata_level %d, media_info_level %d, thumb_level %s, art_level %s, " - "export_type %s", + "export_type %s, file_format %s", self.section_id, self.metadata_level, self.media_info_level, - self.thumb_level, self.art_level, self.export_type) + self.thumb_level, self.art_level, self.export_type, self.file_format) self.obj = plex.get_library(str(self.section_id)) if self.export_type == 'all': @@ -1568,11 +1568,7 @@ class Export(object): else: self.media_type = self.export_type - self.title = self.obj.title - - filename = 'Library - {} - {} [{}].{}'.format( - self.title, self.export_type.capitalize(), self.section_id, - helpers.timestamp_to_YMDHMS(self.timestamp)) + self.obj_title = self.obj.title else: msg = "Export called but no section_id, user_id, or rating_key provided." @@ -1591,10 +1587,13 @@ class Export(object): self._process_custom_fields() - self.filename = '{}.{}'.format(helpers.clean_filename(filename), self.file_format) + self.directory = self._filename(directory=True) + self.filename = self._filename() + self.title = self._filename(extension=False) + self.export_id = self.add_export() if not self.export_id: - msg = "Failed to export '{}'.".format(self.filename) + msg = "Failed to export '{}'.".format(self.directory) logger.error("Tautulli Exporter :: %s", msg) return msg @@ -1603,19 +1602,24 @@ class Export(object): return True def add_export(self): - keys = {'timestamp': self.timestamp, - 'section_id': self.section_id, - 'user_id': self.user_id, - 'rating_key': self.rating_key, - 'media_type': self.media_type} + keys = { + 'timestamp': self.timestamp, + 'section_id': self.section_id, + 'user_id': self.user_id, + 'rating_key': self.rating_key, + 'media_type': self.media_type + } - values = {'file_format': self.file_format, - 'filename': self.filename, - 'metadata_level': self.metadata_level, - 'media_info_level': self.media_info_level, - 'thumb_level': self.thumb_level, - 'art_level': self.art_level, - 'custom_fields': self.custom_fields} + values = { + 'title': self.title, + 'file_format': self.file_format, + 'metadata_level': self.metadata_level, + 'media_info_level': self.media_info_level, + 'thumb_level': self.thumb_level, + 'art_level': self.art_level, + 'custom_fields': self.custom_fields, + 'individual_files': self.individual_files + } db = database.MonitorDatabase() try: @@ -1631,20 +1635,21 @@ class Export(object): else: complete = -1 - keys = {'id': self.export_id} - values = {'complete': complete, - 'file_size': self.file_size, - 'thumb_level': self.thumb_level, - 'art_level': self.art_level} + keys = { + 'id': self.export_id + } + values = { + 'thumb_level': self.thumb_level, + 'art_level': self.art_level, + 'complete': complete, + 'file_size': self.file_size + } db = database.MonitorDatabase() db.upsert(table_name='exports', key_dict=keys, value_dict=values) def _real_export(self): - logger.info("Tautulli Exporter :: Starting export for '%s'...", self.filename) - - filepath = get_export_filepath(self.filename) - images_folder = get_export_filepath(self.filename, images=True) + logger.info("Tautulli Exporter :: Starting export for '%s'...", self.title) if self.rating_key: items = [self.obj] @@ -1656,71 +1661,120 @@ class Export(object): items = method() pool = ThreadPool(processes=4) + items = [ExportObject(self, item) for item in items] try: result = pool.map(self._export_obj, items) - if self.file_format == 'csv': - csv_data = helpers.flatten_dict(result) - csv_headers = sorted(set().union(*csv_data), key=helpers.sort_attrs) - with open(filepath, 'w', encoding='utf-8', newline='') as outfile: - writer = csv.DictWriter(outfile, csv_headers) - writer.writeheader() - writer.writerows(csv_data) + if self.individual_files: + for item, item_result in zip(items, result): + self._save_file([item_result], item.filename) + self._exported_images(item.title) - elif self.file_format == 'json': - json_data = json.dumps(helpers.sort_obj(result), - indent=4, ensure_ascii=False) - with open(filepath, 'w', encoding='utf-8') as outfile: - outfile.write(json_data) + else: + self._save_file(result, self.filename) + self._exported_images(self.title) - elif self.file_format == 'xml': - xml_data = helpers.dict_to_xml({self.media_type: helpers.sort_obj(result)}, - root_node='export', indent=4) - with open(filepath, 'w', encoding='utf-8') as outfile: - outfile.write(xml_data) + self.thumb_level = self.thumb_level or 10 if self.exported_thumb else 0 + self.art_level = self.art_level or 10 if self.exported_art else 0 - elif self.file_format == 'm3u8': - m3u8_data = self.dict_to_m3u8(result) - with open(filepath, 'w', encoding='utf-8') as outfile: - outfile.write(m3u8_data) - - self.file_size = os.path.getsize(filepath) - - exported_thumb = exported_art = False - if os.path.exists(images_folder): - for f in os.listdir(images_folder): - if f.endswith('.thumb.jpg'): - exported_thumb = True - elif f.endswith('.art.jpg'): - exported_art = True - - image_path = os.path.join(images_folder, f) - if os.path.isfile(image_path): - self.file_size += os.path.getsize(image_path) - - self.thumb_level = self.thumb_level if exported_thumb else 0 - self.art_level = self.art_level if exported_art else 0 + self.file_size += sum(item.file_size for item in items) self.success = True - logger.info("Tautulli Exporter :: Successfully exported to '%s'", filepath) + + dirpath = get_export_dirpath(self.directory) + logger.info("Tautulli Exporter :: Successfully exported to '%s'", dirpath) except Exception as e: - logger.exception("Tautulli Exporter :: Failed to export '%s': %s", self.filename, e) + logger.exception("Tautulli Exporter :: Failed to export '%s': %s", self.title, e) finally: pool.close() pool.join() self.set_export_state() - def _export_obj(self, obj): - # Reload ~plexapi.base.PlexPartialObject - if hasattr(obj, 'isPartialObject') and obj.isPartialObject(): - obj = obj.reload() + @staticmethod + def _export_obj(export_obj): + return export_obj.export_obj(export_obj) - media_type = 'photoalbum' if self.is_photoalbum(obj) else obj.type - export_attrs = self._get_export_attrs(media_type) - return helpers.get_attrs_to_dict(obj, attrs=export_attrs) + def _save_file(self, result, filename): + dirpath = get_export_dirpath(self.directory) + filepath = os.path.join(dirpath, filename) + + if not os.path.exists(dirpath): + os.makedirs(dirpath) + + if self.file_format == 'csv': + csv_data = helpers.flatten_dict(result) + csv_headers = sorted(set().union(*csv_data), key=helpers.sort_attrs) + with open(filepath, 'w', encoding='utf-8', newline='') as outfile: + writer = csv.DictWriter(outfile, csv_headers) + writer.writeheader() + writer.writerows(csv_data) + + elif self.file_format == 'json': + json_data = json.dumps(helpers.sort_obj(result), + indent=4, ensure_ascii=False) + with open(filepath, 'w', encoding='utf-8') as outfile: + outfile.write(json_data) + + elif self.file_format == 'xml': + xml_data = helpers.dict_to_xml({self.media_type: helpers.sort_obj(result)}, + root_node='export', indent=4) + with open(filepath, 'w', encoding='utf-8') as outfile: + outfile.write(xml_data) + + elif self.file_format == 'm3u8': + m3u8_data = self.dict_to_m3u8(result) + with open(filepath, 'w', encoding='utf-8') as outfile: + outfile.write(m3u8_data) + + self.file_size += os.path.getsize(filepath) + + def _exported_images(self, title): + images_dirpath = get_export_dirpath(self.directory, images_directory=title) + + if os.path.exists(images_dirpath): + for f in os.listdir(images_dirpath): + if f.endswith('.thumb.jpg'): + self.exported_thumb = True + elif f.endswith('.art.jpg'): + self.exported_art = True + + def _media_type(self, obj): + return 'photoalbum' if self.is_photoalbum(obj) else obj.type + + def _filename(self, obj=None, directory=False, extension=True): + if obj: + media_type = self._media_type(obj) + if media_type in ('season', 'episode', 'album', 'track'): + title = obj._defaultSyncTitle() + else: + title = obj.title + filename = '{} - {} [{}]'.format( + media_type.capitalize(), title, obj.ratingKey) + + elif self.rating_key: + filename = '{} - {} [{}]'.format( + self.media_type.capitalize(), self.obj_title, self.rating_key) + + elif self.user_id: + filename = 'User - {} - {} [{}]'.format( + self.obj_title, self.export_type.capitalize(), self.user_id) + + elif self.section_id: + filename = 'Library - {} - {} [{}]'.format( + self.obj_title, self.export_type.capitalize(), self.section_id) + + else: + filename = 'Export - Unknown' + + filename = helpers.clean_filename(filename) + if directory: + return format_export_directory(filename, self.timestamp) + elif extension: + return format_export_filename(filename, self.file_format) + return filename def _process_custom_fields(self): if self.custom_fields: @@ -1798,61 +1852,6 @@ class Export(object): return reduce(helpers.dict_merge, export_attrs_list, {}) - def get_any_hdr(self, item, media_type): - root = self.return_attrs(media_type)['media'] - attrs = helpers.get_dict_value_by_path(root, 'parts.videoStreams.hdr') - media = helpers.get_attrs_to_dict(item, attrs) - return any(vs.get('hdr') for p in media.get('parts', []) for vs in p.get('videoStreams', [])) - - def get_image(self, item, image): - media_type = item.type - rating_key = item.ratingKey - - export_image = True - if self.thumb_level == 1 or self.art_level == 1: - posters = item.arts() if image == 'art' else item.posters() - export_image = any(poster.selected and poster.ratingKey.startswith('upload://') - for poster in posters) - elif self.thumb_level == 2 or self.art_level == 2: - export_image = any(field.locked and field.name == image - for field in item.fields) - elif self.thumb_level == 9 or self.art_level == 9: - export_image = True - - if not export_image and image + 'File' in self._custom_fields.get(media_type, set()): - export_image = True - - if not export_image: - return - - image_url = None - if image == 'thumb': - image_url = item.thumbUrl - elif image == 'art': - image_url = item.artUrl - - if not image_url: - return - - if media_type in ('season', 'episode', 'album', 'track'): - item_title = item._defaultSyncTitle() - else: - item_title = item.title - - folder = get_export_filepath(self.filename, images=True) - filename = helpers.clean_filename('{} [{}].{}.jpg'.format(item_title, rating_key, image)) - filepath = os.path.join(folder, filename) - - os.makedirs(folder, exist_ok=True) - - r = requests.get(image_url, stream=True) - if r.status_code == 200: - with open(filepath, 'wb') as outfile: - for chunk in r: - outfile.write(chunk) - - return os.path.join(os.path.basename(folder), filename) - @staticmethod def is_media_info_attr(attr): return attr.startswith('media.') or attr == 'locations' @@ -1863,10 +1862,8 @@ class Export(object): def dict_to_m3u8(self, data): items = self._get_m3u8_items(data) - m3u8_metadata = { - 'filename': self.filename, - 'type': self.media_type - } + + m3u8_metadata = {'type': self.media_type} if self.rating_key: m3u8_metadata['ratingKey'] = self.rating_key if self.user_id: @@ -1916,63 +1913,156 @@ class Export(object): return items + def export_obj(self, export_obj): + pass + + def get_any_hdr(self, item, media_type): + pass + + def get_image(self, item, image): + pass + + +class ExportObject(Export): + def __init__(self, export, obj): + super(ExportObject, self).__init__() + self.__dict__.update(export.__dict__) + + self.obj = obj + self.filename = self._filename(obj=self.obj) + self.title = self._filename(obj=self.obj, extension=False) + + def export_obj(self, export_obj): + if isinstance(export_obj, ExportObject): + obj = export_obj.obj + else: + obj = export_obj + + # Reload ~plexapi.base.PlexPartialObject + if hasattr(obj, 'isPartialObject') and obj.isPartialObject(): + obj = obj.reload() + + media_type = self._media_type(obj) + export_attrs = self._get_export_attrs(media_type) + return helpers.get_attrs_to_dict(obj, attrs=export_attrs) + + def get_any_hdr(self, item, media_type): + root = self.return_attrs(media_type)['media'] + attrs = helpers.get_dict_value_by_path(root, 'parts.videoStreams.hdr') + media = helpers.get_attrs_to_dict(item, attrs) + return any(vs.get('hdr') for p in media.get('parts', []) for vs in p.get('videoStreams', [])) + + def get_image(self, item, image): + media_type = item.type + rating_key = item.ratingKey + + export_image = True + if self.thumb_level == 1 or self.art_level == 1: + posters = item.arts() if image == 'art' else item.posters() + export_image = any(poster.selected and poster.ratingKey.startswith('upload://') + for poster in posters) + elif self.thumb_level == 2 or self.art_level == 2: + export_image = any(field.locked and field.name == image + for field in item.fields) + elif self.thumb_level == 9 or self.art_level == 9: + export_image = True + + if not export_image and image + 'File' in self._custom_fields.get(media_type, set()): + export_image = True + + if not export_image: + return + + image_url = None + if image == 'thumb': + image_url = item.thumbUrl + elif image == 'art': + image_url = item.artUrl + + if not image_url: + return + + r = requests.get(image_url, stream=True) + if r.status_code != 200: + return + + if media_type in ('season', 'episode', 'album', 'track'): + item_title = item._defaultSyncTitle() + else: + item_title = item.title + + dirpath = get_export_dirpath(self.directory, images_directory=self.title) + filename = helpers.clean_filename('{} [{}].{}.jpg'.format(item_title, rating_key, image)) + filepath = os.path.join(dirpath, filename) + + if not os.path.exists(dirpath): + os.makedirs(dirpath) + + with open(filepath, 'wb') as outfile: + for chunk in r: + outfile.write(chunk) + + self.file_size += os.path.getsize(filepath) + + return os.path.join(os.path.basename(dirpath), filename) + def get_export(export_id): db = database.MonitorDatabase() - result = db.select_single('SELECT filename, file_format, thumb_level, art_level, complete ' + result = db.select_single('SELECT timestamp, title, file_format, thumb_level, art_level, ' + 'individual_files, complete ' 'FROM exports WHERE id = ?', [export_id]) if result: - result['exists'] = check_export_exists(result['filename']) + if result['individual_files']: + result['filename'] = None + result['exists'] = check_export_exists(result['title'], result['timestamp']) + else: + result['filename'] = '{}.{}'.format(result['title'], result['file_format']) + result['exists'] = check_export_exists(result['title'], result['timestamp'], result['filename']) return result def delete_export(export_id): - db = database.MonitorDatabase() if str(export_id).isdigit(): - export_data = get_export(export_id=export_id) + deleted = True - logger.info("Tautulli Exporter :: Deleting export_id %s from the database.", export_id) - result = db.action('DELETE FROM exports WHERE id = ?', args=[export_id]) - - if export_data and export_data['exists']: - filepath = get_export_filepath(export_data['filename']) - logger.info("Tautulli Exporter :: Deleting exported file from '%s'.", filepath) + result = get_export(export_id=export_id) + if result and check_export_exists(result['title'], result['timestamp']): # Only check if folder exists + dirpath = get_export_dirpath(result['title'], result['timestamp']) + logger.info("Tautulli Exporter :: Deleting export '%s'.", dirpath) try: - os.remove(filepath) - images_folder = get_export_filepath(export_data['filename'], images=True) - shutil.rmtree(images_folder, ignore_errors=True) + shutil.rmtree(dirpath, ignore_errors=True) except OSError as e: - logger.error("Tautulli Exporter :: Failed to delete exported file '%s': %s", filepath, e) - return True + logger.error("Tautulli Exporter :: Failed to delete export '%s': %s", dirpath, e) + deleted = False + + if deleted: + logger.info("Tautulli Exporter :: Deleting export_id %s from the database.", export_id) + db = database.MonitorDatabase() + result = db.action('DELETE FROM exports WHERE id = ?', args=[export_id]) + + return deleted else: return False def delete_all_exports(): - db = database.MonitorDatabase() - result = db.select('SELECT filename FROM exports') + logger.info("Tautulli Exporter :: Deleting all exports from the export directory.") - logger.info("Tautulli Exporter :: Deleting all exports from the database.") + export_dir = plexpy.CONFIG.EXPORT_DIR + try: + shutil.rmtree(export_dir, ignore_errors=True) + except OSError as e: + logger.error("Tautulli Exporter :: Failed to delete export directory '%s': %s", export_dir, e) - deleted_files = True - for row in result: - if check_export_exists(row['filename']): - filepath = get_export_filepath(row['filename']) - try: - os.remove(filepath) - images_folder = get_export_filepath(row['filename'], images=True) - shutil.rmtree(images_folder, ignore_errors=True) - except OSError as e: - logger.error("Tautulli Exporter :: Failed to delete exported file '%s': %s", filepath, e) - deleted_files = False - break + if not os.path.exists(export_dir): + os.makedirs(export_dir) - if deleted_files: - database.delete_exports() - return True + database.delete_exports() + return True def cancel_exports(): @@ -2003,13 +2093,17 @@ def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs= 'exports.user_id', 'exports.rating_key', 'exports.media_type', - 'exports.filename', + 'CASE WHEN exports.media_type = "photoalbum" THEN "Photo Album" ELSE ' + 'UPPER(SUBSTR(exports.media_type, 1, 1)) || SUBSTR(exports.media_type, 2) END ' + 'AS media_type_title', + 'exports.title', 'exports.file_format', 'exports.metadata_level', 'exports.media_info_level', 'exports.thumb_level', 'exports.art_level', 'exports.custom_fields', + 'exports.individual_files', 'exports.file_size', 'exports.complete' ] @@ -2030,8 +2124,12 @@ def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs= rows = [] for item in result: - media_type_title = item['media_type'].title() - exists = helpers.cast_to_int(check_export_exists(item['filename'])) + if item['individual_files']: + filename = None + exists = check_export_exists(item['title'], item['timestamp']) + else: + filename = format_export_filename(item['title'], item['file_format']) + exists = check_export_exists(item['title'], item['timestamp'], filename) row = {'export_id': item['export_id'], 'timestamp': item['timestamp'], @@ -2039,14 +2137,16 @@ def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs= 'user_id': item['user_id'], 'rating_key': item['rating_key'], 'media_type': item['media_type'], - 'media_type_title': media_type_title, - 'filename': item['filename'], + 'media_type_title': item['media_type_title'], + 'title': item['title'], + 'filename': filename, 'file_format': item['file_format'], 'metadata_level': item['metadata_level'], 'media_info_level': item['media_info_level'], 'thumb_level': item['thumb_level'], 'art_level': item['art_level'], 'custom_fields': item['custom_fields'], + 'individual_files': item['individual_files'], 'file_size': item['file_size'], 'complete': item['complete'], 'exists': exists @@ -2063,15 +2163,32 @@ def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs= return result -def get_export_filepath(filename, images=False): - if images: - images_folder = '{}.images'.format(os.path.splitext(filename)[0]) - return os.path.join(plexpy.CONFIG.EXPORT_DIR, images_folder) - return os.path.join(plexpy.CONFIG.EXPORT_DIR, filename) +def format_export_directory(title, timestamp): + return '{}.{}'.format(title, helpers.timestamp_to_YMDHMS(timestamp)) -def check_export_exists(filename): - return os.path.isfile(get_export_filepath(filename)) +def format_export_filename(title, file_format): + return '{}.{}'.format(title, file_format) + + +def get_export_dirpath(title, timestamp=None, images_directory=None): + if timestamp: + title = format_export_directory(title, timestamp) + dirpath = os.path.join(plexpy.CONFIG.EXPORT_DIR, title) + if images_directory: + dirpath = os.path.join(dirpath, '{}.images'.format(images_directory)) + return dirpath + + +def get_export_filepath(title, timestamp, filename): + dirpath = get_export_dirpath(title, timestamp) + return os.path.join(dirpath, filename) + + +def check_export_exists(title, timestamp=None, filename=None): + if filename: + return os.path.isfile(get_export_filepath(title, timestamp, filename)) + return os.path.isdir(get_export_dirpath(title, timestamp)) def get_custom_fields(media_type, sub_media_type=None): diff --git a/plexpy/helpers.py b/plexpy/helpers.py index 31b8d87c..55cba024 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -1474,6 +1474,16 @@ def version_to_tuple(version): return tuple(cast_to_int(v) for v in version.strip('v').split('.')) +# https://stackoverflow.com/a/1855118 +def zipdir(path, ziph): + # ziph is zipfile handle + for root, dirs, files in os.walk(path): + for file in files: + ziph.write(os.path.join(root, file), + arcname=os.path.relpath(os.path.join(root, file), + os.path.join(path, '.'))) + + def page(endpoint, *args, **kwargs): endpoints = { 'pms_image_proxy': pms_image_proxy, diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 67eba4f6..64c376d7 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -6578,8 +6578,12 @@ class WebInterface(object): dt_columns = [("timestamp", True, False), ("media_type_title", True, True), ("rating_key", True, True), + ("title", True, True), ("file_format", True, True), - ("filename", True, True), + ("metadata_level", True, True), + ("media_info_level", True, True), + ("custom_fields", True, True), + ("file_size", True, False), ("complete", True, False)] kwargs['json_data'] = build_datatables_json(kwargs, dt_columns, "timestamp") @@ -6646,7 +6650,7 @@ class WebInterface(object): def export_metadata(self, section_id=None, user_id=None, rating_key=None, file_format='csv', metadata_level=1, media_info_level=1, thumb_level=0, art_level=0, - custom_fields='', export_type=None, **kwargs): + custom_fields='', export_type=None, individual_files=False, **kwargs): """ Export library or media metadata to a file ``` @@ -6663,8 +6667,9 @@ class WebInterface(object): art_level (int): The level of background artwork images to export (default 0) custom_fields (str): Comma separated list of custom fields to export in addition to the export level selected - export_type (str): collection or playlist for library/user export, + export_type (str): 'collection' or 'playlist' for library/user export, otherwise default to all library items + individual_files (bool): Export each item as an individual file for library/user export. Returns: json: @@ -6673,6 +6678,7 @@ class WebInterface(object): } ``` """ + individual_files = helpers.bool_true(individual_files) result = exporter.Export(section_id=section_id, user_id=user_id, rating_key=rating_key, @@ -6682,7 +6688,8 @@ class WebInterface(object): thumb_level=thumb_level, art_level=art_level, custom_fields=custom_fields, - export_type=export_type).export() + export_type=export_type, + individual_files=individual_files).export() if result is True: return {'result': 'success', 'message': 'Metadata export has started.'} @@ -6707,8 +6714,8 @@ class WebInterface(object): """ result = exporter.get_export(export_id=export_id) - if result and result['complete'] == 1 and result['exists']: - filepath = exporter.get_export_filepath(result['filename']) + if result and result['complete'] == 1 and result['exists'] and not result['individual_files']: + filepath = exporter.get_export_filepath(result['title'], result['timestamp'], result['filename']) if result['file_format'] == 'csv': with open(filepath, 'r', encoding='utf-8') as infile: @@ -6769,28 +6776,23 @@ class WebInterface(object): result = exporter.get_export(export_id=export_id) if result and result['complete'] == 1 and result['exists']: - export_filepath = exporter.get_export_filepath(result['filename']) + if result['thumb_level'] or result['art_level'] or result['individual_files']: + directory = exporter.format_export_directory(result['title'], result['timestamp']) + dirpath = exporter.get_export_dirpath(directory) + zip_filename = '{}.zip'.format(directory) - if result['thumb_level'] or result['art_level']: - zip_filename = '{}.zip'.format(os.path.splitext(result['filename'])[0]) - images_folder = exporter.get_export_filepath(result['filename'], images=True) + buffer = BytesIO() + temp_zip = zipfile.ZipFile(buffer, 'w') + helpers.zipdir(dirpath, temp_zip) + temp_zip.close() - if os.path.exists(images_folder): - buffer = BytesIO() - temp_zip = zipfile.ZipFile(buffer, 'w') - temp_zip.write(export_filepath, arcname=result['filename']) + return serve_fileobj(buffer.getvalue(), content_type='application/zip', + disposition='attachment', name=zip_filename) - _images_folder = os.path.basename(images_folder) + else: + filepath = exporter.get_export_filepath(result['title'], result['timestamp'], result['filename']) + return serve_download(filepath, name=result['filename']) - for f in os.listdir(images_folder): - image_path = os.path.join(images_folder, f) - temp_zip.write(image_path, arcname=os.path.join(_images_folder, f)) - - temp_zip.close() - return serve_fileobj(buffer.getvalue(), content_type='application/zip', - disposition='attachment', name=zip_filename) - - return serve_download(exporter.get_export_filepath(result['filename']), name=result['filename']) else: if result and result.get('complete') == 0: msg = 'Export is still being processed.'